From cfd452353744bbacdd073b9196c32bb905b8f829 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 18 Nov 2024 10:17:55 +0100 Subject: [PATCH 001/156] feat: add merge_if_module_configured (#12608) --- crates/rpc/rpc-builder/src/lib.rs | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/crates/rpc/rpc-builder/src/lib.rs b/crates/rpc/rpc-builder/src/lib.rs index ab68d3c88e49..0d86c838d51c 100644 --- a/crates/rpc/rpc-builder/src/lib.rs +++ b/crates/rpc/rpc-builder/src/lib.rs @@ -1999,6 +1999,29 @@ impl TransportRpcModules { &self.config } + /// Merge the given [`Methods`] in all configured transport modules if the given + /// [`RethRpcModule`] is configured for the transport. + /// + /// Fails if any of the methods in other is present already. + pub fn merge_if_module_configured( + &mut self, + module: RethRpcModule, + other: impl Into, + ) -> Result<(), RegisterMethodError> { + let other = other.into(); + if self.module_config().contains_http(&module) { + self.merge_http(other.clone())?; + } + if self.module_config().contains_ws(&module) { + self.merge_ws(other.clone())?; + } + if self.module_config().contains_ipc(&module) { + self.merge_ipc(other)?; + } + + Ok(()) + } + /// Merge the given [Methods] in the configured http methods. /// /// Fails if any of the methods in other is present already. From 5056a081123d40e6f4bcdc1bf7338e01b3c5ee9c Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Mon, 18 Nov 2024 10:03:54 +0100 Subject: [PATCH 002/156] fix(deps): Fix dev-deps for `reth-primitives` (#12612) --- .github/assets/check_wasm.sh | 1 + crates/primitives/Cargo.toml | 4 ++-- testing/testing-utils/Cargo.toml | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/assets/check_wasm.sh b/.github/assets/check_wasm.sh index 0e704857edb3..35f4bdda5b83 100755 --- a/.github/assets/check_wasm.sh +++ b/.github/assets/check_wasm.sh @@ -68,6 +68,7 @@ exclude_crates=( reth-static-file # tokio reth-transaction-pool # c-kzg reth-trie-parallel # tokio + reth-testing-utils ) # Array to hold the results diff --git a/crates/primitives/Cargo.toml b/crates/primitives/Cargo.toml index 34d04c94edcd..c9043a2bd11e 100644 --- a/crates/primitives/Cargo.toml +++ b/crates/primitives/Cargo.toml @@ -62,11 +62,11 @@ arbitrary = { workspace = true, features = ["derive"], optional = true } [dev-dependencies] # eth -reth-chainspec.workspace = true +reth-chainspec = { workspace = true, features = ["arbitrary"] } reth-codecs = { workspace = true, features = ["test-utils"] } reth-primitives-traits = { workspace = true, features = ["arbitrary"] } reth-testing-utils.workspace = true -reth-trie-common.workspace = true +reth-trie-common = { workspace = true, features = ["arbitrary"] } revm-primitives = { workspace = true, features = ["arbitrary"] } alloy-eips = { workspace = true, features = ["arbitrary"] } diff --git a/testing/testing-utils/Cargo.toml b/testing/testing-utils/Cargo.toml index 3e0f58a7bd08..d0de37bf77f1 100644 --- a/testing/testing-utils/Cargo.toml +++ b/testing/testing-utils/Cargo.toml @@ -12,7 +12,7 @@ repository.workspace = true workspace = true [dependencies] -reth-primitives = { workspace = true, features = ["secp256k1"] } +reth-primitives = { workspace = true, features = ["secp256k1", "arbitrary"] } alloy-genesis.workspace = true alloy-primitives.workspace = true From 4b4f9cf40626c8714f0a0caa152a970fa6e5dfa0 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 18 Nov 2024 11:53:12 +0100 Subject: [PATCH 003/156] feat: install op debug exeuction witness (#12622) --- Cargo.lock | 1 + crates/node/builder/src/rpc.rs | 32 ++++++++++++++++++++++++---- crates/optimism/node/Cargo.toml | 1 + crates/optimism/node/src/node.rs | 34 ++++++++++++++++++++---------- crates/optimism/rpc/src/witness.rs | 2 +- 5 files changed, 54 insertions(+), 16 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 23503d907563..ded071c5dc27 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8335,6 +8335,7 @@ dependencies = [ "reth-primitives", "reth-provider", "reth-revm", + "reth-rpc-server-types", "reth-tracing", "reth-transaction-pool", "reth-trie-db", diff --git a/crates/node/builder/src/rpc.rs b/crates/node/builder/src/rpc.rs index adee942748c1..fda8b66f8d79 100644 --- a/crates/node/builder/src/rpc.rs +++ b/crates/node/builder/src/rpc.rs @@ -399,7 +399,7 @@ where } } -impl NodeAddOns for RpcAddOns +impl RpcAddOns where N: FullNodeComponents< Types: ProviderNodeTypes, @@ -408,9 +408,16 @@ where EthApi: EthApiTypes + FullEthApiServer + AddDevSigners + Unpin + 'static, EV: EngineValidatorBuilder, { - type Handle = RpcHandle; - - async fn launch_add_ons(self, ctx: AddOnsContext<'_, N>) -> eyre::Result { + /// Launches the RPC servers with the given context and an additional hook for extending + /// modules. + pub async fn launch_add_ons_with( + self, + ctx: AddOnsContext<'_, N>, + ext: F, + ) -> eyre::Result> + where + F: FnOnce(&mut TransportRpcModules) -> eyre::Result<()>, + { let Self { eth_api_builder, engine_validator_builder, hooks, _pd: _ } = self; let engine_validator = engine_validator_builder.build(&ctx).await?; @@ -467,6 +474,7 @@ where let RpcHooks { on_rpc_started, extend_rpc_modules } = hooks; + ext(ctx.modules)?; extend_rpc_modules.extend_rpc_modules(ctx)?; let server_config = config.rpc.rpc_server_config(); @@ -513,6 +521,22 @@ where } } +impl NodeAddOns for RpcAddOns +where + N: FullNodeComponents< + Types: ProviderNodeTypes, + PayloadBuilder: PayloadBuilder::Engine>, + >, + EthApi: EthApiTypes + FullEthApiServer + AddDevSigners + Unpin + 'static, + EV: EngineValidatorBuilder, +{ + type Handle = RpcHandle; + + async fn launch_add_ons(self, ctx: AddOnsContext<'_, N>) -> eyre::Result { + self.launch_add_ons_with(ctx, |_| Ok(())).await + } +} + /// Helper trait implemented for add-ons producing [`RpcHandle`]. Used by common node launcher /// implementations. pub trait RethRpcAddOns: diff --git a/crates/optimism/node/Cargo.toml b/crates/optimism/node/Cargo.toml index 9a80c83deec1..03ea75a26cdd 100644 --- a/crates/optimism/node/Cargo.toml +++ b/crates/optimism/node/Cargo.toml @@ -29,6 +29,7 @@ reth-evm.workspace = true reth-revm = { workspace = true, features = ["std"] } reth-beacon-consensus.workspace = true reth-trie-db.workspace = true +reth-rpc-server-types.workspace = true # op-reth reth-optimism-payload-builder.workspace = true diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index 0c2186c72684..238953c9d571 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -1,7 +1,11 @@ //! Optimism Node types config. -use std::sync::Arc; - +use crate::{ + args::RollupArgs, + engine::OpEngineValidator, + txpool::{OpTransactionPool, OpTransactionValidator}, + OpEngineTypes, +}; use alloy_consensus::Header; use reth_basic_payload_builder::{BasicPayloadJobGenerator, BasicPayloadJobGeneratorConfig}; use reth_chainspec::{EthChainSpec, Hardforks}; @@ -23,23 +27,21 @@ use reth_optimism_chainspec::OpChainSpec; use reth_optimism_consensus::OpBeaconConsensus; use reth_optimism_evm::{OpEvmConfig, OpExecutionStrategyFactory}; use reth_optimism_payload_builder::builder::OpPayloadTransactions; -use reth_optimism_rpc::OpEthApi; +use reth_optimism_rpc::{ + witness::{DebugExecutionWitnessApiServer, OpDebugWitnessApi}, + OpEthApi, +}; use reth_payload_builder::{PayloadBuilderHandle, PayloadBuilderService}; use reth_primitives::{Block, Receipt, TransactionSigned, TxType}; use reth_provider::CanonStateSubscriptions; +use reth_rpc_server_types::RethRpcModule; use reth_tracing::tracing::{debug, info}; use reth_transaction_pool::{ blobstore::DiskFileBlobStore, CoinbaseTipOrdering, TransactionPool, TransactionValidationTaskExecutor, }; use reth_trie_db::MerklePatriciaTrie; - -use crate::{ - args::RollupArgs, - engine::OpEngineValidator, - txpool::{OpTransactionPool, OpTransactionValidator}, - OpEngineTypes, -}; +use std::sync::Arc; /// Optimism primitive types. #[derive(Debug, Default, Clone)] @@ -163,7 +165,17 @@ where self, ctx: reth_node_api::AddOnsContext<'_, N>, ) -> eyre::Result { - self.0.launch_add_ons(ctx).await + // install additional OP specific rpc methods + let debug_ext = + OpDebugWitnessApi::new(ctx.node.provider().clone(), ctx.node.evm_config().clone()); + + self.0 + .launch_add_ons_with(ctx, move |modules| { + debug!(target: "reth::cli", "Installing debug payload witness rpc endpoint"); + modules.merge_if_module_configured(RethRpcModule::Debug, debug_ext.into_rpc())?; + Ok(()) + }) + .await } } diff --git a/crates/optimism/rpc/src/witness.rs b/crates/optimism/rpc/src/witness.rs index 0521fa9025df..ed9d77e73e84 100644 --- a/crates/optimism/rpc/src/witness.rs +++ b/crates/optimism/rpc/src/witness.rs @@ -11,7 +11,7 @@ use reth_optimism_chainspec::OpChainSpec; use reth_optimism_payload_builder::OpPayloadBuilder; use reth_primitives::SealedHeader; use reth_provider::{BlockReaderIdExt, ProviderError, ProviderResult, StateProviderFactory}; -use reth_rpc_api::DebugExecutionWitnessApiServer; +pub use reth_rpc_api::DebugExecutionWitnessApiServer; use reth_rpc_server_types::{result::internal_rpc_err, ToRpcResult}; use std::{fmt::Debug, sync::Arc}; From 1acdf9b2d43958eec0bd7b2dd449978d8b4d7862 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 18 Nov 2024 12:33:38 +0100 Subject: [PATCH 004/156] fix: add additional op checks for chain specific check (#12623) --- crates/node/builder/src/launch/common.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/crates/node/builder/src/launch/common.rs b/crates/node/builder/src/launch/common.rs index 972fdc640dfc..41fbf93e05d1 100644 --- a/crates/node/builder/src/launch/common.rs +++ b/crates/node/builder/src/launch/common.rs @@ -819,7 +819,10 @@ where /// This checks for OP-Mainnet and ensures we have all the necessary data to progress (past /// bedrock height) fn ensure_chain_specific_db_checks(&self) -> ProviderResult<()> { - if self.chain_id() == Chain::optimism_mainnet() { + if self.chain_spec().is_optimism() && + !self.is_dev() && + self.chain_id() == Chain::optimism_mainnet() + { let latest = self.blockchain_db().last_block_number()?; // bedrock height if latest < 105235063 { From 8aaac6d7c264912917b834f651874a9db04ce982 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 18 Nov 2024 12:47:30 +0100 Subject: [PATCH 005/156] feat: add TokioTaskExecutor::boxed (#12619) --- crates/tasks/src/lib.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/crates/tasks/src/lib.rs b/crates/tasks/src/lib.rs index 28b5eaba9ffb..340e925ec56b 100644 --- a/crates/tasks/src/lib.rs +++ b/crates/tasks/src/lib.rs @@ -111,6 +111,13 @@ dyn_clone::clone_trait_object!(TaskSpawner); #[non_exhaustive] pub struct TokioTaskExecutor; +impl TokioTaskExecutor { + /// Converts the instance to a boxed [`TaskSpawner`]. + pub fn boxed(self) -> Box { + Box::new(self) + } +} + impl TaskSpawner for TokioTaskExecutor { fn spawn(&self, fut: BoxFuture<'static, ()>) -> JoinHandle<()> { tokio::task::spawn(fut) From 4daec16272d9f4b2c641b92ca2f42c1fbcb9b9db Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 18 Nov 2024 12:52:36 +0100 Subject: [PATCH 006/156] feat: add EthereumEthApiTypes (#12618) --- crates/rpc/rpc/src/eth/helpers/types.rs | 18 +++++++++++++++++- crates/rpc/rpc/src/eth/mod.rs | 5 ++++- 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/crates/rpc/rpc/src/eth/helpers/types.rs b/crates/rpc/rpc/src/eth/helpers/types.rs index d1ce84bc0b77..8f135a9103bd 100644 --- a/crates/rpc/rpc/src/eth/helpers/types.rs +++ b/crates/rpc/rpc/src/eth/helpers/types.rs @@ -4,11 +4,27 @@ use alloy_consensus::{Signed, Transaction as _, TxEip4844Variant, TxEnvelope}; use alloy_network::{Ethereum, Network}; use alloy_rpc_types_eth::{Transaction, TransactionInfo}; use reth_primitives::{TransactionSigned, TransactionSignedEcRecovered}; +use reth_rpc_eth_api::EthApiTypes; use reth_rpc_eth_types::EthApiError; use reth_rpc_types_compat::TransactionCompat; +/// A standalone [`EthApiTypes`] implementation for Ethereum. +#[derive(Debug, Clone, Copy, Default)] +pub struct EthereumEthApiTypes(EthTxBuilder); + +impl EthApiTypes for EthereumEthApiTypes { + type Error = EthApiError; + type NetworkTypes = Ethereum; + type TransactionCompat = EthTxBuilder; + + fn tx_resp_builder(&self) -> &Self::TransactionCompat { + &self.0 + } +} + /// Builds RPC transaction response for l1. -#[derive(Debug, Clone, Copy)] +#[derive(Debug, Clone, Copy, Default)] +#[non_exhaustive] pub struct EthTxBuilder; impl TransactionCompat for EthTxBuilder diff --git a/crates/rpc/rpc/src/eth/mod.rs b/crates/rpc/rpc/src/eth/mod.rs index 4d1833add3e7..d8a5b95f55e7 100644 --- a/crates/rpc/rpc/src/eth/mod.rs +++ b/crates/rpc/rpc/src/eth/mod.rs @@ -13,6 +13,9 @@ pub use core::EthApi; pub use filter::EthFilter; pub use pubsub::EthPubSub; -pub use helpers::{signer::DevSigner, types::EthTxBuilder}; +pub use helpers::{ + signer::DevSigner, + types::{EthTxBuilder, EthereumEthApiTypes}, +}; pub use reth_rpc_eth_api::{EthApiServer, EthApiTypes, FullEthApiServer, RpcNodeCore}; From 378e097aeadfce3b6506146a3af70b406b35e1ef Mon Sep 17 00:00:00 2001 From: Tien Nguyen <116023870+htiennv@users.noreply.github.com> Date: Mon, 18 Nov 2024 18:56:10 +0700 Subject: [PATCH 007/156] chore(sdk): Add InMemorySize as super trait (#12615) --- crates/optimism/primitives/src/tx_type.rs | 10 +++++++++- crates/primitives-traits/src/receipt.rs | 3 +++ .../primitives-traits/src/transaction/signed.rs | 3 ++- crates/primitives-traits/src/tx_type.rs | 3 +++ crates/primitives/src/receipt.rs | 17 +++++++++++++++++ crates/primitives/src/transaction/mod.rs | 14 ++++++++------ crates/primitives/src/transaction/tx_type.rs | 9 +++++++++ crates/transaction-pool/src/test_utils/mock.rs | 1 + 8 files changed, 52 insertions(+), 8 deletions(-) diff --git a/crates/optimism/primitives/src/tx_type.rs b/crates/optimism/primitives/src/tx_type.rs index 1b505920120d..70f5fd32d8e5 100644 --- a/crates/optimism/primitives/src/tx_type.rs +++ b/crates/optimism/primitives/src/tx_type.rs @@ -13,7 +13,7 @@ use derive_more::{ Display, }; use op_alloy_consensus::OpTxType as AlloyOpTxType; -use reth_primitives_traits::TxType; +use reth_primitives_traits::{InMemorySize, TxType}; #[cfg(feature = "reth-codec")] use alloy_consensus::constants::EIP7702_TX_TYPE_ID; @@ -57,6 +57,14 @@ impl TxType for OpTxType { } } +impl InMemorySize for OpTxType { + /// Calculates a heuristic for the in-memory size of the [`OpTxType`]. + #[inline] + fn size(&self) -> usize { + core::mem::size_of::() + } +} + impl From for U8 { fn from(tx_type: OpTxType) -> Self { Self::from(u8::from(tx_type)) diff --git a/crates/primitives-traits/src/receipt.rs b/crates/primitives-traits/src/receipt.rs index 31bded015d41..b34590dff0e9 100644 --- a/crates/primitives-traits/src/receipt.rs +++ b/crates/primitives-traits/src/receipt.rs @@ -7,6 +7,8 @@ use core::fmt; use reth_codecs::Compact; use serde::{Deserialize, Serialize}; +use crate::InMemorySize; + /// Helper trait that unifies all behaviour required by receipt to support full node operations. pub trait FullReceipt: Receipt + Compact {} @@ -25,6 +27,7 @@ pub trait Receipt: + alloy_rlp::Encodable + alloy_rlp::Decodable + Serialize + + InMemorySize + for<'de> Deserialize<'de> { /// Returns transaction type. diff --git a/crates/primitives-traits/src/transaction/signed.rs b/crates/primitives-traits/src/transaction/signed.rs index 958d5cd6c77e..7b6abbaec0f3 100644 --- a/crates/primitives-traits/src/transaction/signed.rs +++ b/crates/primitives-traits/src/transaction/signed.rs @@ -8,7 +8,7 @@ use alloy_primitives::{keccak256, Address, PrimitiveSignature, TxHash, B256}; use reth_codecs::Compact; use revm_primitives::TxEnv; -use crate::{FullTransaction, MaybeArbitrary, Transaction}; +use crate::{FullTransaction, InMemorySize, MaybeArbitrary, Transaction}; /// Helper trait that unifies all behaviour required by block to support full node operations. pub trait FullSignedTx: SignedTransaction + Compact {} @@ -35,6 +35,7 @@ pub trait SignedTransaction: + Decodable2718 + alloy_consensus::Transaction + MaybeArbitrary + + InMemorySize { /// Transaction type that is signed. type Transaction: Transaction; diff --git a/crates/primitives-traits/src/tx_type.rs b/crates/primitives-traits/src/tx_type.rs index b1828ad57d9e..d9ef687759e7 100644 --- a/crates/primitives-traits/src/tx_type.rs +++ b/crates/primitives-traits/src/tx_type.rs @@ -3,6 +3,8 @@ use core::fmt; use alloy_primitives::{U64, U8}; use reth_codecs::Compact; +use crate::InMemorySize; + /// Helper trait that unifies all behaviour required by transaction type ID to support full node /// operations. pub trait FullTxType: TxType + Compact {} @@ -29,6 +31,7 @@ pub trait TxType: + TryFrom + alloy_rlp::Encodable + alloy_rlp::Decodable + + InMemorySize { /// Returns `true` if this is a legacy transaction. fn is_legacy(&self) -> bool; diff --git a/crates/primitives/src/receipt.rs b/crates/primitives/src/receipt.rs index b61ee7c14d2f..f4567de421e5 100644 --- a/crates/primitives/src/receipt.rs +++ b/crates/primitives/src/receipt.rs @@ -1,5 +1,6 @@ use alloc::{vec, vec::Vec}; use core::cmp::Ordering; +use reth_primitives_traits::InMemorySize; use alloy_consensus::{ constants::{EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID}, @@ -109,6 +110,22 @@ impl ReceiptExt for Receipt { } } +impl InMemorySize for Receipt { + /// Calculates a heuristic for the in-memory size of the [Receipt]. + #[inline] + fn size(&self) -> usize { + let total_size = self.tx_type.size() + + core::mem::size_of::() + + core::mem::size_of::() + + self.logs.capacity() * core::mem::size_of::(); + + #[cfg(feature = "optimism")] + return total_size + 2 * core::mem::size_of::>(); + #[cfg(not(feature = "optimism"))] + total_size + } +} + /// A collection of receipts organized as a two-dimensional vector. #[derive( Clone, diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 015621cdcce1..aa57ef8d81e8 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -1274,12 +1274,6 @@ impl TransactionSigned { initial_tx } - /// Calculate a heuristic for the in-memory size of the [`TransactionSigned`]. - #[inline] - pub fn size(&self) -> usize { - mem::size_of::() + self.transaction.size() + mem::size_of::() - } - /// Decodes legacy transaction from the data buffer into a tuple. /// /// This expects `rlp(legacy_tx)` @@ -1447,6 +1441,14 @@ impl SignedTransaction for TransactionSigned { } } +impl InMemorySize for TransactionSigned { + /// Calculate a heuristic for the in-memory size of the [`TransactionSigned`]. + #[inline] + fn size(&self) -> usize { + mem::size_of::() + self.transaction.size() + mem::size_of::() + } +} + impl alloy_consensus::Transaction for TransactionSigned { fn chain_id(&self) -> Option { self.deref().chain_id() diff --git a/crates/primitives/src/transaction/tx_type.rs b/crates/primitives/src/transaction/tx_type.rs index 3445cb184c17..caa6d8728541 100644 --- a/crates/primitives/src/transaction/tx_type.rs +++ b/crates/primitives/src/transaction/tx_type.rs @@ -5,6 +5,7 @@ use alloy_consensus::constants::{ use alloy_primitives::{U64, U8}; use alloy_rlp::{Decodable, Encodable}; use derive_more::Display; +use reth_primitives_traits::InMemorySize; use serde::{Deserialize, Serialize}; /// Identifier parameter for legacy transaction @@ -118,6 +119,14 @@ impl reth_primitives_traits::TxType for TxType { } } +impl InMemorySize for TxType { + /// Calculates a heuristic for the in-memory size of the [`TxType`]. + #[inline] + fn size(&self) -> usize { + core::mem::size_of::() + } +} + impl From for u8 { fn from(value: TxType) -> Self { match value { diff --git a/crates/transaction-pool/src/test_utils/mock.rs b/crates/transaction-pool/src/test_utils/mock.rs index fc43349f3f18..69f1835edcf2 100644 --- a/crates/transaction-pool/src/test_utils/mock.rs +++ b/crates/transaction-pool/src/test_utils/mock.rs @@ -28,6 +28,7 @@ use reth_primitives::{ transaction::TryFromRecoveredTransactionError, PooledTransactionsElementEcRecovered, Transaction, TransactionSigned, TransactionSignedEcRecovered, TxType, }; +use reth_primitives_traits::InMemorySize; use std::{ops::Range, sync::Arc, time::Instant, vec::IntoIter}; /// A transaction pool implementation using [`MockOrdering`] for transaction ordering. From 626224e301f3fe1bd1faf2a20c486af13692d8f8 Mon Sep 17 00:00:00 2001 From: Hai | RISE <150876604+hai-rise@users.noreply.github.com> Date: Mon, 18 Nov 2024 18:57:22 +0700 Subject: [PATCH 008/156] chore: refactor `MockTransaction` (#12627) --- crates/net/network/src/transactions/mod.rs | 2 +- .../network/tests/it/big_pooled_txs_req.rs | 2 +- crates/transaction-pool/src/pool/mod.rs | 2 +- crates/transaction-pool/src/pool/txpool.rs | 12 +- .../transaction-pool/src/test_utils/mock.rs | 156 ++---------------- crates/transaction-pool/tests/it/blobs.rs | 13 +- crates/transaction-pool/tests/it/evict.rs | 7 +- crates/transaction-pool/tests/it/listeners.rs | 4 +- crates/transaction-pool/tests/it/pending.rs | 4 +- 9 files changed, 40 insertions(+), 162 deletions(-) diff --git a/crates/net/network/src/transactions/mod.rs b/crates/net/network/src/transactions/mod.rs index 36abcd3d6177..0ccb4252ac37 100644 --- a/crates/net/network/src/transactions/mod.rs +++ b/crates/net/network/src/transactions/mod.rs @@ -2230,7 +2230,7 @@ mod tests { .add_transaction(reth_transaction_pool::TransactionOrigin::External, tx.clone()) .await; - let request = GetPooledTransactions(vec![tx.get_hash()]); + let request = GetPooledTransactions(vec![*tx.get_hash()]); let (send, receive) = oneshot::channel::>(); diff --git a/crates/net/network/tests/it/big_pooled_txs_req.rs b/crates/net/network/tests/it/big_pooled_txs_req.rs index 4d65e3f63baa..9e0f69160b60 100644 --- a/crates/net/network/tests/it/big_pooled_txs_req.rs +++ b/crates/net/network/tests/it/big_pooled_txs_req.rs @@ -35,7 +35,7 @@ async fn test_large_tx_req() { tx }) .collect(); - let txs_hashes: Vec = txs.iter().map(|tx| tx.get_hash()).collect(); + let txs_hashes: Vec = txs.iter().map(|tx| *tx.get_hash()).collect(); // setup testnet let mut net = Testnet::create_with(2, MockEthProvider::default()).await; diff --git a/crates/transaction-pool/src/pool/mod.rs b/crates/transaction-pool/src/pool/mod.rs index 6441ed687f2a..3f7ecfa7836f 100644 --- a/crates/transaction-pool/src/pool/mod.rs +++ b/crates/transaction-pool/src/pool/mod.rs @@ -1302,7 +1302,7 @@ mod tests { // Insert the sidecar into the blob store if the current index is within the blob limit. if n < blob_limit.max_txs { - blob_store.insert(tx.get_hash(), sidecar.clone()).unwrap(); + blob_store.insert(*tx.get_hash(), sidecar.clone()).unwrap(); } // Add the transaction to the pool with external origin and valid outcome. diff --git a/crates/transaction-pool/src/pool/txpool.rs b/crates/transaction-pool/src/pool/txpool.rs index 040deb15fcbd..537162ac76c9 100644 --- a/crates/transaction-pool/src/pool/txpool.rs +++ b/crates/transaction-pool/src/pool/txpool.rs @@ -2486,8 +2486,7 @@ mod tests { let tx = MockTransaction::eip1559().inc_price().inc_limit(); let first = f.validated(tx.clone()); pool.insert_tx(first, on_chain_balance, on_chain_nonce).unwrap(); - let tx = - MockTransaction::eip4844().set_sender(tx.get_sender()).inc_price_by(100).inc_limit(); + let tx = MockTransaction::eip4844().set_sender(tx.sender()).inc_price_by(100).inc_limit(); let blob = f.validated(tx); let err = pool.insert_tx(blob, on_chain_balance, on_chain_nonce).unwrap_err(); assert!(matches!(err, InsertErr::TxTypeConflict { .. }), "{err:?}"); @@ -2502,8 +2501,7 @@ mod tests { let tx = MockTransaction::eip4844().inc_price().inc_limit(); let first = f.validated(tx.clone()); pool.insert_tx(first, on_chain_balance, on_chain_nonce).unwrap(); - let tx = - MockTransaction::eip1559().set_sender(tx.get_sender()).inc_price_by(100).inc_limit(); + let tx = MockTransaction::eip1559().set_sender(tx.sender()).inc_price_by(100).inc_limit(); let tx = f.validated(tx); let err = pool.insert_tx(tx, on_chain_balance, on_chain_nonce).unwrap_err(); assert!(matches!(err, InsertErr::TxTypeConflict { .. }), "{err:?}"); @@ -2622,7 +2620,7 @@ mod tests { assert_eq!( pool.max_account_slots, - pool.tx_count(f.ids.sender_id(&tx.get_sender()).unwrap()) + pool.tx_count(f.ids.sender_id(tx.get_sender()).unwrap()) ); let err = @@ -2654,7 +2652,7 @@ mod tests { assert_eq!( pool.max_account_slots, - pool.tx_count(f.ids.sender_id(&tx.get_sender()).unwrap()) + pool.tx_count(f.ids.sender_id(tx.get_sender()).unwrap()) ); pool.insert_tx( @@ -2829,7 +2827,7 @@ mod tests { let mut changed_senders = HashMap::default(); changed_senders.insert( id.sender, - SenderInfo { state_nonce: next.get_nonce(), balance: U256::from(1_000) }, + SenderInfo { state_nonce: next.nonce(), balance: U256::from(1_000) }, ); let outcome = pool.update_accounts(changed_senders); assert_eq!(outcome.discarded.len(), 1); diff --git a/crates/transaction-pool/src/test_utils/mock.rs b/crates/transaction-pool/src/test_utils/mock.rs index 69f1835edcf2..56acbb107f3b 100644 --- a/crates/transaction-pool/src/test_utils/mock.rs +++ b/crates/transaction-pool/src/test_utils/mock.rs @@ -69,7 +69,7 @@ macro_rules! get_value { MockTransaction::Legacy { $field, .. } | MockTransaction::Eip1559 { $field, .. } | MockTransaction::Eip4844 { $field, .. } | - MockTransaction::Eip2930 { $field, .. } => $field.clone(), + MockTransaction::Eip2930 { $field, .. } => $field, } }; } @@ -91,7 +91,7 @@ macro_rules! make_setters_getters { } /// Gets the value of the specified field. - pub fn [](&self) -> $t { + pub const fn [](&self) -> &$t { get_value!(self => $name) } )*} @@ -582,30 +582,15 @@ impl PoolTransaction for MockTransaction { } fn hash(&self) -> &TxHash { - match self { - Self::Legacy { hash, .. } | - Self::Eip1559 { hash, .. } | - Self::Eip4844 { hash, .. } | - Self::Eip2930 { hash, .. } => hash, - } + self.get_hash() } fn sender(&self) -> Address { - match self { - Self::Legacy { sender, .. } | - Self::Eip1559 { sender, .. } | - Self::Eip4844 { sender, .. } | - Self::Eip2930 { sender, .. } => *sender, - } + *self.get_sender() } fn nonce(&self) -> u64 { - match self { - Self::Legacy { nonce, .. } | - Self::Eip1559 { nonce, .. } | - Self::Eip4844 { nonce, .. } | - Self::Eip2930 { nonce, .. } => *nonce, - } + *self.get_nonce() } fn cost(&self) -> U256 { @@ -622,7 +607,7 @@ impl PoolTransaction for MockTransaction { } fn gas_limit(&self) -> u64 { - self.get_gas_limit() + *self.get_gas_limit() } fn max_fee_per_gas(&self) -> u128 { @@ -703,22 +688,12 @@ impl PoolTransaction for MockTransaction { /// Returns the input data associated with the transaction. fn input(&self) -> &[u8] { - match self { - Self::Legacy { .. } => &[], - Self::Eip1559 { input, .. } | - Self::Eip4844 { input, .. } | - Self::Eip2930 { input, .. } => input, - } + self.get_input() } /// Returns the size of the transaction. fn size(&self) -> usize { - match self { - Self::Legacy { size, .. } | - Self::Eip1559 { size, .. } | - Self::Eip4844 { size, .. } | - Self::Eip2930 { size, .. } => *size, - } + *self.get_size() } /// Returns the transaction type as a byte identifier. @@ -1007,109 +982,14 @@ impl proptest::arbitrary::Arbitrary for MockTransaction { fn arbitrary_with(_: Self::Parameters) -> Self::Strategy { use proptest::prelude::Strategy; use proptest_arbitrary_interop::arb; - use reth_primitives_traits::size::InMemorySize; - - arb::<(Transaction, Address, B256)>() - .prop_map(|(tx, sender, tx_hash)| match &tx { - Transaction::Legacy(TxLegacy { - chain_id, - nonce, - gas_price, - gas_limit, - to, - value, - input, - }) => Self::Legacy { - chain_id: *chain_id, - sender, - hash: tx_hash, - nonce: *nonce, - gas_price: *gas_price, - gas_limit: { *gas_limit }, - to: *to, - value: *value, - input: input.clone(), - size: tx.size(), - }, - - Transaction::Eip2930(TxEip2930 { - chain_id, - nonce, - gas_price, - gas_limit, - to, - value, - access_list, - input, - }) => Self::Eip2930 { - chain_id: *chain_id, - sender, - hash: tx_hash, - nonce: *nonce, - gas_price: *gas_price, - gas_limit: { *gas_limit }, - to: *to, - value: *value, - input: input.clone(), - access_list: access_list.clone(), - size: tx.size(), - }, - Transaction::Eip1559(TxEip1559 { - chain_id, - nonce, - gas_limit, - max_fee_per_gas, - max_priority_fee_per_gas, - to, - value, - input, - access_list, - }) => Self::Eip1559 { - chain_id: *chain_id, - sender, - hash: tx_hash, - nonce: *nonce, - max_fee_per_gas: *max_fee_per_gas, - max_priority_fee_per_gas: *max_priority_fee_per_gas, - gas_limit: { *gas_limit }, - to: *to, - value: *value, - input: input.clone(), - access_list: access_list.clone(), - size: tx.size(), - }, - Transaction::Eip4844(TxEip4844 { - chain_id, - nonce, - gas_limit, - max_fee_per_gas, - max_priority_fee_per_gas, - to, - value, - input, - max_fee_per_blob_gas, - access_list, - blob_versioned_hashes: _, - }) => Self::Eip4844 { - chain_id: *chain_id, - sender, - hash: tx_hash, - nonce: *nonce, - max_fee_per_gas: *max_fee_per_gas, - max_priority_fee_per_gas: *max_priority_fee_per_gas, - max_fee_per_blob_gas: *max_fee_per_blob_gas, - gas_limit: { *gas_limit }, - to: *to, - value: *value, - input: input.clone(), - access_list: access_list.clone(), - // only generate a sidecar if it is a 4844 tx - also for the sake of - // performance just use a default sidecar - sidecar: BlobTransactionSidecar::default(), - size: tx.size(), - }, - #[allow(unreachable_patterns)] - _ => unimplemented!(), + + arb::<(TransactionSigned, Address)>() + .prop_map(|(signed_transaction, signer)| { + TransactionSignedEcRecovered::from_signed_transaction(signed_transaction, signer) + .try_into() + .expect( + "Failed to create an Arbitrary MockTransaction via TransactionSignedEcRecovered", + ) }) .boxed() } @@ -1128,8 +1008,8 @@ pub struct MockTransactionFactory { impl MockTransactionFactory { /// Generates a transaction ID for the given [`MockTransaction`]. pub fn tx_id(&mut self, tx: &MockTransaction) -> TransactionId { - let sender = self.ids.sender_id_or_create(tx.get_sender()); - TransactionId::new(sender, tx.get_nonce()) + let sender = self.ids.sender_id_or_create(tx.sender()); + TransactionId::new(sender, tx.nonce()) } /// Validates a [`MockTransaction`] and returns a [`MockValidTx`]. diff --git a/crates/transaction-pool/tests/it/blobs.rs b/crates/transaction-pool/tests/it/blobs.rs index 0cdc6d088c04..9417c62278b7 100644 --- a/crates/transaction-pool/tests/it/blobs.rs +++ b/crates/transaction-pool/tests/it/blobs.rs @@ -3,7 +3,7 @@ use reth_transaction_pool::{ error::PoolErrorKind, test_utils::{MockTransaction, MockTransactionFactory, TestPoolBuilder}, - TransactionOrigin, TransactionPool, + PoolTransaction, TransactionOrigin, TransactionPool, }; #[tokio::test(flavor = "multi_thread")] @@ -16,23 +16,22 @@ async fn blobs_exclusive() { .add_transaction(TransactionOrigin::External, blob_tx.transaction.clone()) .await .unwrap(); - assert_eq!(hash, blob_tx.transaction.get_hash()); + assert_eq!(hash, *blob_tx.transaction.get_hash()); let mut best_txns = txpool.best_transactions(); assert_eq!(best_txns.next().unwrap().transaction.get_hash(), blob_tx.transaction.get_hash()); assert!(best_txns.next().is_none()); - let eip1559_tx = MockTransaction::eip1559() - .set_sender(blob_tx.transaction.get_sender()) - .inc_price_by(10_000); + let eip1559_tx = + MockTransaction::eip1559().set_sender(blob_tx.transaction.sender()).inc_price_by(10_000); let res = txpool.add_transaction(TransactionOrigin::External, eip1559_tx.clone()).await.unwrap_err(); - assert_eq!(res.hash, eip1559_tx.get_hash()); + assert_eq!(res.hash, *eip1559_tx.get_hash()); match res.kind { PoolErrorKind::ExistingConflictingTransactionType(addr, tx_type) => { - assert_eq!(addr, eip1559_tx.get_sender()); + assert_eq!(addr, eip1559_tx.sender()); assert_eq!(tx_type, eip1559_tx.tx_type()); } _ => unreachable!(), diff --git a/crates/transaction-pool/tests/it/evict.rs b/crates/transaction-pool/tests/it/evict.rs index fea50962fd9e..3b74b8cb2300 100644 --- a/crates/transaction-pool/tests/it/evict.rs +++ b/crates/transaction-pool/tests/it/evict.rs @@ -8,7 +8,8 @@ use reth_transaction_pool::{ test_utils::{ MockFeeRange, MockTransactionDistribution, MockTransactionRatio, TestPool, TestPoolBuilder, }, - BlockInfo, PoolConfig, SubPoolLimit, TransactionOrigin, TransactionPool, TransactionPoolExt, + BlockInfo, PoolConfig, PoolTransaction, SubPoolLimit, TransactionOrigin, TransactionPool, + TransactionPoolExt, }; #[tokio::test(flavor = "multi_thread")] @@ -87,7 +88,7 @@ async fn only_blobs_eviction() { let set = set.into_vec(); // ensure that the first nonce is 0 - assert_eq!(set[0].get_nonce(), 0); + assert_eq!(set[0].nonce(), 0); // and finally insert it into the pool let results = pool.add_transactions(TransactionOrigin::External, set).await; @@ -194,7 +195,7 @@ async fn mixed_eviction() { ); let set = set.into_inner().into_vec(); - assert_eq!(set[0].get_nonce(), 0); + assert_eq!(set[0].nonce(), 0); let results = pool.add_transactions(TransactionOrigin::External, set).await; for (i, result) in results.iter().enumerate() { diff --git a/crates/transaction-pool/tests/it/listeners.rs b/crates/transaction-pool/tests/it/listeners.rs index ad13af22a6a2..0f8a0b19e2bc 100644 --- a/crates/transaction-pool/tests/it/listeners.rs +++ b/crates/transaction-pool/tests/it/listeners.rs @@ -33,11 +33,11 @@ async fn txpool_listener_all() { let added_result = txpool.add_transaction(TransactionOrigin::External, transaction.transaction.clone()).await; - assert_matches!(added_result, Ok(hash) if hash == transaction.transaction.get_hash()); + assert_matches!(added_result, Ok(hash) if hash == *transaction.transaction.get_hash()); assert_matches!( all_tx_events.next().await, - Some(FullTransactionEvent::Pending(hash)) if hash == transaction.transaction.get_hash() + Some(FullTransactionEvent::Pending(hash)) if hash == *transaction.transaction.get_hash() ); } diff --git a/crates/transaction-pool/tests/it/pending.rs b/crates/transaction-pool/tests/it/pending.rs index 0b6349b24cc3..be559c71eec4 100644 --- a/crates/transaction-pool/tests/it/pending.rs +++ b/crates/transaction-pool/tests/it/pending.rs @@ -12,7 +12,7 @@ async fn txpool_new_pending_txs() { let added_result = txpool.add_transaction(TransactionOrigin::External, transaction.transaction.clone()).await; - assert_matches!(added_result, Ok(hash) if hash == transaction.transaction.get_hash()); + assert_matches!(added_result, Ok(hash) if hash == *transaction.transaction.get_hash()); let mut best_txns = txpool.best_transactions(); assert_matches!(best_txns.next(), Some(tx) if tx.transaction.get_hash() == transaction.transaction.get_hash()); @@ -20,6 +20,6 @@ async fn txpool_new_pending_txs() { let transaction = mock_tx_factory.create_eip1559(); let added_result = txpool.add_transaction(TransactionOrigin::External, transaction.transaction.clone()).await; - assert_matches!(added_result, Ok(hash) if hash == transaction.transaction.get_hash()); + assert_matches!(added_result, Ok(hash) if hash == *transaction.transaction.get_hash()); assert_matches!(best_txns.next(), Some(tx) if tx.transaction.get_hash() == transaction.transaction.get_hash()); } From a84f58bcb8e329999d842716e604ff97477c8970 Mon Sep 17 00:00:00 2001 From: Oliver Date: Mon, 18 Nov 2024 13:00:57 +0100 Subject: [PATCH 009/156] chore: use keccak256 from alloy (#12628) --- crates/trie/common/src/key.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/trie/common/src/key.rs b/crates/trie/common/src/key.rs index 9e440d199fa1..71f8019bff54 100644 --- a/crates/trie/common/src/key.rs +++ b/crates/trie/common/src/key.rs @@ -1,5 +1,4 @@ -use alloy_primitives::B256; -use revm_primitives::keccak256; +use alloy_primitives::{keccak256, B256}; /// Trait for hashing keys in state. pub trait KeyHasher: Default + Clone + Send + Sync + 'static { From b5bb3157c31eb98363a9a0a541c67acd8e3a59a2 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 18 Nov 2024 14:19:30 +0100 Subject: [PATCH 010/156] test: add tests for base l1 blockinfo (#12609) --- crates/optimism/rpc/src/eth/receipt.rs | 47 ++++++++++++++++++++++++-- 1 file changed, 44 insertions(+), 3 deletions(-) diff --git a/crates/optimism/rpc/src/eth/receipt.rs b/crates/optimism/rpc/src/eth/receipt.rs index f3d16b4adb5f..a801a408fd5d 100644 --- a/crates/optimism/rpc/src/eth/receipt.rs +++ b/crates/optimism/rpc/src/eth/receipt.rs @@ -233,13 +233,12 @@ impl OpReceiptBuilder { #[cfg(test)] mod test { + use super::*; use alloy_primitives::hex; use op_alloy_network::eip2718::Decodable2718; - use reth_optimism_chainspec::OP_MAINNET; + use reth_optimism_chainspec::{BASE_MAINNET, OP_MAINNET}; use reth_primitives::{Block, BlockBody}; - use super::*; - /// OP Mainnet transaction at index 0 in block 124665056. /// /// @@ -342,4 +341,46 @@ mod test { "incorrect l1 blob base fee scalar" ); } + + // + #[test] + fn base_receipt_gas_fields() { + // https://basescan.org/tx/0x510fd4c47d78ba9f97c91b0f2ace954d5384c169c9545a77a373cf3ef8254e6e + let system = hex!("7ef8f8a0389e292420bcbf9330741f72074e39562a09ff5a00fd22e4e9eee7e34b81bca494deaddeaddeaddeaddeaddeaddeaddeaddead00019442000000000000000000000000000000000000158080830f424080b8a4440a5e20000008dd00101c120000000000000004000000006721035b00000000014189960000000000000000000000000000000000000000000000000000000349b4dcdc000000000000000000000000000000000000000000000000000000004ef9325cc5991ce750960f636ca2ffbb6e209bb3ba91412f21dd78c14ff154d1930f1f9a0000000000000000000000005050f69a9786f081509234f1a7f4684b5e5b76c9"); + let tx_0 = TransactionSigned::decode_2718(&mut &system[..]).unwrap(); + + let block = Block { + body: BlockBody { transactions: vec![tx_0], ..Default::default() }, + ..Default::default() + }; + let l1_block_info = + reth_optimism_evm::extract_l1_info(&block.body).expect("should extract l1 info"); + + // https://basescan.org/tx/0xf9420cbaf66a2dda75a015488d37262cbfd4abd0aad7bb2be8a63e14b1fa7a94 + let tx = hex!("02f86c8221058034839a4ae283021528942f16386bb37709016023232523ff6d9daf444be380841249c58bc080a001b927eda2af9b00b52a57be0885e0303c39dd2831732e14051c2336470fd468a0681bf120baf562915841a48601c2b54a6742511e535cf8f71c95115af7ff63bd"); + let tx_1 = TransactionSigned::decode_2718(&mut &tx[..]).unwrap(); + + let receipt_meta = OpReceiptFieldsBuilder::new(1730216981) + .l1_block_info(&BASE_MAINNET, &tx_1, l1_block_info) + .expect("should parse revm l1 info") + .build(); + + let L1BlockInfo { + l1_gas_price, + l1_gas_used, + l1_fee, + l1_fee_scalar, + l1_base_fee_scalar, + l1_blob_base_fee, + l1_blob_base_fee_scalar, + } = receipt_meta.l1_block_info; + + assert_eq!(l1_gas_price, Some(14121491676), "incorrect l1 base fee (former gas price)"); + assert_eq!(l1_gas_used, Some(1600), "incorrect l1 gas used"); + assert_eq!(l1_fee, Some(191150293412), "incorrect l1 fee"); + assert!(l1_fee_scalar.is_none(), "incorrect l1 fee scalar"); + assert_eq!(l1_base_fee_scalar, Some(2269), "incorrect l1 base fee scalar"); + assert_eq!(l1_blob_base_fee, Some(1324954204), "incorrect l1 blob base fee"); + assert_eq!(l1_blob_base_fee_scalar, Some(1055762), "incorrect l1 blob base fee scalar"); + } } From 32a4d9ea0838626799efe3fd7ef09cfb4380ffac Mon Sep 17 00:00:00 2001 From: wizard <112275929+famouswizard@users.noreply.github.com> Date: Mon, 18 Nov 2024 16:27:35 +0300 Subject: [PATCH 011/156] Fix grammatical error in lib.rs (#12632) --- bin/reth/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/reth/src/lib.rs b/bin/reth/src/lib.rs index 6b71f48de123..53c592063eca 100644 --- a/bin/reth/src/lib.rs +++ b/bin/reth/src/lib.rs @@ -15,7 +15,7 @@ //! - `min-error-logs`: Disables all logs below `error` level. //! - `min-warn-logs`: Disables all logs below `warn` level. //! - `min-info-logs`: Disables all logs below `info` level. This can speed up the node, since fewer -//! calls to the logging component is made. +//! calls to the logging component are made. //! - `min-debug-logs`: Disables all logs below `debug` level. //! - `min-trace-logs`: Disables all logs below `trace` level. From 66887bbfaa2f2cb906f117001d9452d2c1bae6b6 Mon Sep 17 00:00:00 2001 From: Dmitry <98899785+mdqst@users.noreply.github.com> Date: Mon, 18 Nov 2024 16:28:15 +0300 Subject: [PATCH 012/156] Typo Update profiling.md (#12631) --- book/developers/profiling.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/book/developers/profiling.md b/book/developers/profiling.md index f1fdf520eb2e..956bc5633030 100644 --- a/book/developers/profiling.md +++ b/book/developers/profiling.md @@ -25,7 +25,7 @@ In this tutorial, we will be reviewing: [Jemalloc](https://jemalloc.net/) is a general-purpose allocator that is used [across the industry in production](https://engineering.fb.com/2011/01/03/core-data/scalable-memory-allocation-using-jemalloc/), well known for its performance benefits, predictability, and profiling capabilities. We've seen significant performance benefits in reth when using jemalloc, but will be primarily focusing on its profiling capabilities. -Jemalloc also provides tools for analyzing and visualizing its the allocation profiles it generates, notably `jeprof`. +Jemalloc also provides tools for analyzing and visualizing its allocation profiles it generates, notably `jeprof`. #### Enabling jemalloc in reth From 773f558ad647c6c0af2f8e7d573f064508beb16d Mon Sep 17 00:00:00 2001 From: Dmitry <98899785+mdqst@users.noreply.github.com> Date: Mon, 18 Nov 2024 16:37:13 +0300 Subject: [PATCH 013/156] Fix Minor Documentation Errors (#12601) --- book/run/config.md | 6 +++--- book/run/sync-op-mainnet.md | 2 +- book/run/transactions.md | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/book/run/config.md b/book/run/config.md index 10fd40ca7630..bb28d855de8d 100644 --- a/book/run/config.md +++ b/book/run/config.md @@ -36,7 +36,7 @@ The defaults shipped with Reth try to be relatively reasonable, but may not be o ### `headers` -The headers section controls both the behavior of the header stage, which download historical headers, as well as the primary downloader that fetches headers over P2P. +The headers section controls both the behavior of the header stage, which downloads historical headers, as well as the primary downloader that fetches headers over P2P. ```toml [stages.headers] @@ -65,7 +65,7 @@ commit_threshold = 10000 ### `bodies` -The bodies section controls both the behavior of the bodies stage, which download historical block bodies, as well as the primary downloader that fetches block bodies over P2P. +The bodies section controls both the behavior of the bodies stage, which downloads historical block bodies, as well as the primary downloader that fetches block bodies over P2P. ```toml [stages.bodies] @@ -102,7 +102,7 @@ The sender recovery stage recovers the address of transaction senders using tran ```toml [stages.sender_recovery] -# The amount of transactions to recover senders for before +# The number of transactions to recover senders for before # writing the results to disk. # # Lower thresholds correspond to more frequent disk I/O (writes), diff --git a/book/run/sync-op-mainnet.md b/book/run/sync-op-mainnet.md index 2a862314a1d5..0e2090acbcb5 100644 --- a/book/run/sync-op-mainnet.md +++ b/book/run/sync-op-mainnet.md @@ -1,6 +1,6 @@ # Sync OP Mainnet -To sync OP mainnet, bedrock state needs to be imported as a starting point. There are currently two ways: +To sync OP mainnet, Bedrock state needs to be imported as a starting point. There are currently two ways: * Minimal bootstrap **(recommended)**: only state snapshot at Bedrock block is imported without any OVM historical data. * Full bootstrap **(not recommended)**: state, blocks and receipts are imported. *Not recommended for now: [storage consistency issue](https://github.com/paradigmxyz/reth/pull/11099) tldr: sudden crash may break the node diff --git a/book/run/transactions.md b/book/run/transactions.md index 61327b57300a..edb3a24d76f2 100644 --- a/book/run/transactions.md +++ b/book/run/transactions.md @@ -38,7 +38,7 @@ Alongside the `accessList` parameter and legacy parameters (except `gasPrice`), The base fee is burned, while the priority fee is paid to the miner who includes the transaction, incentivizing miners to include transactions with higher priority fees per gas. -## EIP-4844 Transaction +## EIP-4844 Transactions [EIP-4844](https://eips.ethereum.org/EIPS/eip-4844) transactions (type `0x3`) was introduced in Ethereum's Dencun fork. This provides a temporary but significant scaling relief for rollups by allowing them to initially scale to 0.375 MB per slot, with a separate fee market allowing fees to be very low while usage of this system is limited. From 292e9d9812cd74662801252d6a9e08fe3b0ef738 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Mon, 18 Nov 2024 14:28:43 +0100 Subject: [PATCH 014/156] test(tx-pool): add more unit tests for blob pool (#12605) --- crates/transaction-pool/src/pool/blob.rs | 98 ++++++++++++++++++++++++ 1 file changed, 98 insertions(+) diff --git a/crates/transaction-pool/src/pool/blob.rs b/crates/transaction-pool/src/pool/blob.rs index ac39c6ab781a..e6c0cb245c3f 100644 --- a/crates/transaction-pool/src/pool/blob.rs +++ b/crates/transaction-pool/src/pool/blob.rs @@ -693,4 +693,102 @@ mod tests { ); } } + + #[test] + fn test_empty_pool_operations() { + let mut pool: BlobTransactions = BlobTransactions::default(); + + // Ensure pool is empty + assert!(pool.is_empty()); + assert_eq!(pool.len(), 0); + assert_eq!(pool.size(), 0); + + // Attempt to remove a non-existent transaction + let non_existent_id = TransactionId::new(0.into(), 0); + assert!(pool.remove_transaction(&non_existent_id).is_none()); + + // Check contains method on empty pool + assert!(!pool.contains(&non_existent_id)); + } + + #[test] + fn test_transaction_removal() { + let mut factory = MockTransactionFactory::default(); + let mut pool = BlobTransactions::default(); + + // Add a transaction + let tx = factory.validated_arc(MockTransaction::eip4844()); + let tx_id = *tx.id(); + pool.add_transaction(tx); + + // Remove the transaction + let removed = pool.remove_transaction(&tx_id); + assert!(removed.is_some()); + assert_eq!(*removed.unwrap().id(), tx_id); + assert!(pool.is_empty()); + } + + #[test] + fn test_satisfy_attributes_empty_pool() { + let pool: BlobTransactions = BlobTransactions::default(); + let attributes = BestTransactionsAttributes { blob_fee: Some(100), basefee: 100 }; + // Satisfy attributes on an empty pool should return an empty vector + let satisfied = pool.satisfy_attributes(attributes); + assert!(satisfied.is_empty()); + } + + #[test] + #[should_panic(expected = "transaction is not a blob tx")] + fn test_add_non_blob_transaction() { + // Ensure that adding a non-blob transaction causes a panic + let mut factory = MockTransactionFactory::default(); + let mut pool = BlobTransactions::default(); + let tx = factory.validated_arc(MockTransaction::eip1559()); // Not a blob transaction + pool.add_transaction(tx); + } + + #[test] + #[should_panic(expected = "transaction already included")] + fn test_add_duplicate_blob_transaction() { + // Ensure that adding a duplicate blob transaction causes a panic + let mut factory = MockTransactionFactory::default(); + let mut pool = BlobTransactions::default(); + let tx = factory.validated_arc(MockTransaction::eip4844()); + pool.add_transaction(tx.clone()); // First addition + pool.add_transaction(tx); // Attempt to add the same transaction again + } + + #[test] + fn test_remove_transactions_until_limit() { + // Test truncating the pool until it satisfies the given size limit + let mut factory = MockTransactionFactory::default(); + let mut pool = BlobTransactions::default(); + let tx1 = factory.validated_arc(MockTransaction::eip4844().with_size(100)); + let tx2 = factory.validated_arc(MockTransaction::eip4844().with_size(200)); + let tx3 = factory.validated_arc(MockTransaction::eip4844().with_size(300)); + + // Add transactions to the pool + pool.add_transaction(tx1); + pool.add_transaction(tx2); + pool.add_transaction(tx3); + + // Set a size limit that requires truncation + let limit = SubPoolLimit { max_txs: 2, max_size: 300 }; + let removed = pool.truncate_pool(limit); + + // Check that only one transaction was removed to satisfy the limit + assert_eq!(removed.len(), 1); + assert_eq!(pool.len(), 2); + assert!(pool.size() <= limit.max_size); + } + + #[test] + fn test_empty_pool_invariants() { + // Ensure that the invariants hold for an empty pool + let pool: BlobTransactions = BlobTransactions::default(); + pool.assert_invariants(); + assert!(pool.is_empty()); + assert_eq!(pool.size(), 0); + assert_eq!(pool.len(), 0); + } } From cee11dfb7c7a321a49151d013573299018fc58fe Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Mon, 18 Nov 2024 14:28:59 +0100 Subject: [PATCH 015/156] test(tx-pool): add more unit tests for pending pool (#12603) --- crates/transaction-pool/src/pool/pending.rs | 100 ++++++++++++++++++++ 1 file changed, 100 insertions(+) diff --git a/crates/transaction-pool/src/pool/pending.rs b/crates/transaction-pool/src/pool/pending.rs index f4bce8c85a63..ee2bcd96e849 100644 --- a/crates/transaction-pool/src/pool/pending.rs +++ b/crates/transaction-pool/src/pool/pending.rs @@ -880,4 +880,104 @@ mod tests { } } } + + #[test] + fn test_empty_pool_behavior() { + let mut pool = PendingPool::::new(MockOrdering::default()); + + // Ensure the pool is empty + assert!(pool.is_empty()); + assert_eq!(pool.len(), 0); + assert_eq!(pool.size(), 0); + + // Verify that attempting to truncate an empty pool does not panic and returns an empty vec + let removed = pool.truncate_pool(SubPoolLimit { max_txs: 10, max_size: 1000 }); + assert!(removed.is_empty()); + + // Verify that retrieving transactions from an empty pool yields nothing + let all_txs: Vec<_> = pool.all().collect(); + assert!(all_txs.is_empty()); + } + + #[test] + fn test_add_remove_transaction() { + let mut f = MockTransactionFactory::default(); + let mut pool = PendingPool::new(MockOrdering::default()); + + // Add a transaction and check if it's in the pool + let tx = f.validated_arc(MockTransaction::eip1559()); + pool.add_transaction(tx.clone(), 0); + assert!(pool.contains(tx.id())); + assert_eq!(pool.len(), 1); + + // Remove the transaction and ensure it's no longer in the pool + let removed_tx = pool.remove_transaction(tx.id()).unwrap(); + assert_eq!(removed_tx.id(), tx.id()); + assert!(!pool.contains(tx.id())); + assert_eq!(pool.len(), 0); + } + + #[test] + fn test_reorder_on_basefee_update() { + let mut f = MockTransactionFactory::default(); + let mut pool = PendingPool::new(MockOrdering::default()); + + // Add two transactions with different fees + let tx1 = f.validated_arc(MockTransaction::eip1559().inc_price()); + let tx2 = f.validated_arc(MockTransaction::eip1559().inc_price_by(20)); + pool.add_transaction(tx1.clone(), 0); + pool.add_transaction(tx2.clone(), 0); + + // Ensure the transactions are in the correct order + let mut best = pool.best(); + assert_eq!(best.next().unwrap().hash(), tx2.hash()); + assert_eq!(best.next().unwrap().hash(), tx1.hash()); + + // Update the base fee to a value higher than tx1's fee, causing it to be removed + let removed = pool.update_base_fee((tx1.max_fee_per_gas() + 1) as u64); + assert_eq!(removed.len(), 1); + assert_eq!(removed[0].hash(), tx1.hash()); + + // Verify that only tx2 remains in the pool + assert_eq!(pool.len(), 1); + assert!(pool.contains(tx2.id())); + assert!(!pool.contains(tx1.id())); + } + + #[test] + #[should_panic(expected = "transaction already included")] + fn test_handle_duplicates() { + let mut f = MockTransactionFactory::default(); + let mut pool = PendingPool::new(MockOrdering::default()); + + // Add the same transaction twice and ensure it only appears once + let tx = f.validated_arc(MockTransaction::eip1559()); + pool.add_transaction(tx.clone(), 0); + assert!(pool.contains(tx.id())); + assert_eq!(pool.len(), 1); + + // Attempt to add the same transaction again, which should be ignored + pool.add_transaction(tx, 0); + } + + #[test] + fn test_update_blob_fee() { + let mut f = MockTransactionFactory::default(); + let mut pool = PendingPool::new(MockOrdering::default()); + + // Add transactions with varying blob fees + let tx1 = f.validated_arc(MockTransaction::eip4844().set_blob_fee(50).clone()); + let tx2 = f.validated_arc(MockTransaction::eip4844().set_blob_fee(150).clone()); + pool.add_transaction(tx1.clone(), 0); + pool.add_transaction(tx2.clone(), 0); + + // Update the blob fee to a value that causes tx1 to be removed + let removed = pool.update_blob_fee(100); + assert_eq!(removed.len(), 1); + assert_eq!(removed[0].hash(), tx1.hash()); + + // Verify that only tx2 remains in the pool + assert!(pool.contains(tx2.id())); + assert!(!pool.contains(tx1.id())); + } } From ff22c8eef83c54f5893d7ae8abe72a1bfdf77516 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Mon, 18 Nov 2024 14:30:44 +0100 Subject: [PATCH 016/156] chore(sdk): Define `MaybeSerde` (#12577) Co-authored-by: Matthias Seitz --- Cargo.lock | 73 +++++++++---------- crates/evm/execution-types/Cargo.toml | 3 +- crates/exex/exex/Cargo.toml | 3 +- crates/net/eth-wire-types/Cargo.toml | 1 + crates/net/eth-wire/Cargo.toml | 3 +- crates/net/network/Cargo.toml | 1 + crates/optimism/cli/Cargo.toml | 3 +- crates/optimism/primitives/Cargo.toml | 41 ++++++++--- crates/optimism/primitives/src/tx_type.rs | 10 +-- crates/primitives-traits/Cargo.toml | 19 ++++- crates/primitives-traits/src/account.rs | 7 +- crates/primitives-traits/src/block/body.rs | 5 +- crates/primitives-traits/src/block/header.rs | 10 ++- crates/primitives-traits/src/block/mod.rs | 14 +--- crates/primitives-traits/src/lib.rs | 13 ++++ crates/primitives-traits/src/node.rs | 8 +- crates/primitives-traits/src/receipt.rs | 7 +- .../primitives-traits/src/transaction/mod.rs | 9 +-- .../src/transaction/signed.rs | 5 +- crates/primitives/Cargo.toml | 2 +- crates/revm/Cargo.toml | 1 + crates/storage/codecs/Cargo.toml | 2 +- crates/storage/db-api/Cargo.toml | 2 +- crates/storage/db-models/Cargo.toml | 2 +- crates/storage/db/Cargo.toml | 2 +- crates/storage/provider/Cargo.toml | 1 + crates/transaction-pool/Cargo.toml | 1 + crates/trie/common/Cargo.toml | 2 +- 28 files changed, 143 insertions(+), 107 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ded071c5dc27..56f2864c5fa1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -182,9 +182,9 @@ dependencies = [ [[package]] name = "alloy-eip7702" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69fb9fd842fdf10a524bbf2c4de6942ad869c1c8c3d128a1b09e67ed5f7cedbd" +checksum = "5f6cee6a35793f3db8a5ffe60e86c695f321d081a567211245f503e8c498fce8" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -776,9 +776,9 @@ dependencies = [ [[package]] name = "alloy-trie" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40d8e28db02c006f7abb20f345ffb3cc99c465e36f676ba262534e654ae76042" +checksum = "b6b2e366c0debf0af77766c23694a3f863b02633050e71e096e257ffbd395e50" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -1525,9 +1525,9 @@ dependencies = [ [[package]] name = "bstr" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40723b8fb387abc38f4f4a37c09073622e41dd12327033091ef8950659e6dc0c" +checksum = "1a68f1f47cdf0ec8ee4b941b2eee2a80cb796db73118c0dd09ac63fbe405be22" dependencies = [ "memchr", "regex-automata 0.4.9", @@ -1651,9 +1651,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aeb932158bd710538c73702db6945cb68a8fb08c519e6e12706b94263b36db8" +checksum = "fd9de9f2205d5ef3fd67e685b0df337994ddd4495e2a28d185500d0e1edfea47" dependencies = [ "jobserver", "libc", @@ -1752,9 +1752,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.20" +version = "4.5.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97f376d85a664d5837dbae44bf546e6477a679ff6610010f17276f686d867e8" +checksum = "fb3b4b9e5a7c7514dfa52869339ee98b3156b0bfb4e8a77c4ff4babb64b1604f" dependencies = [ "clap_builder", "clap_derive", @@ -1762,9 +1762,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.20" +version = "4.5.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19bc80abd44e4bed93ca373a0704ccbd1b710dc5749406201bb018272808dc54" +checksum = "b17a95aa67cc7b5ebd32aa5370189aa0d79069ef1c64ce893bd30fb24bff20ec" dependencies = [ "anstream", "anstyle", @@ -1786,9 +1786,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" +checksum = "afb84c814227b90d6895e01398aee0d8033c00e7466aca416fb6a8e0eb19d8a7" [[package]] name = "coins-bip32" @@ -1859,14 +1859,14 @@ dependencies = [ [[package]] name = "comfy-table" -version = "7.1.1" +version = "7.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b34115915337defe99b2aff5c2ce6771e5fbc4079f4b506301f5cf394c8452f7" +checksum = "24f165e7b643266ea80cb858aed492ad9280e3e05ce24d4a99d7d7b889b6a4d9" dependencies = [ - "crossterm 0.27.0", + "crossterm", "strum", "strum_macros", - "unicode-width", + "unicode-width 0.2.0", ] [[package]] @@ -2107,19 +2107,6 @@ version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" -[[package]] -name = "crossterm" -version = "0.27.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f476fe445d41c9e991fd07515a6f463074b782242ccf4a5b7b1d1012e70824df" -dependencies = [ - "bitflags 2.6.0", - "crossterm_winapi", - "libc", - "parking_lot", - "winapi", -] - [[package]] name = "crossterm" version = "0.28.1" @@ -3198,9 +3185,9 @@ dependencies = [ [[package]] name = "flate2" -version = "1.0.34" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1b589b4dc103969ad3cf85c950899926ec64300a1a46d76c03a6072957036f0" +checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" dependencies = [ "crc32fast", "miniz_oxide", @@ -6165,7 +6152,7 @@ dependencies = [ "bitflags 2.6.0", "cassowary", "compact_str", - "crossterm 0.28.1", + "crossterm", "instability", "itertools 0.13.0", "lru", @@ -6174,7 +6161,7 @@ dependencies = [ "strum_macros", "unicode-segmentation", "unicode-truncate", - "unicode-width", + "unicode-width 0.1.14", ] [[package]] @@ -6665,7 +6652,7 @@ dependencies = [ "backon", "clap", "comfy-table", - "crossterm 0.28.1", + "crossterm", "eyre", "fdlimit", "futures", @@ -8397,6 +8384,7 @@ dependencies = [ "reth-primitives", "reth-primitives-traits", "rstest", + "serde", ] [[package]] @@ -9629,6 +9617,7 @@ checksum = "8f4b84ba6e838ceb47b41de5194a60244fac43d9fe03b71dbe8c5a201081d6d1" dependencies = [ "bytemuck", "byteorder", + "serde", ] [[package]] @@ -9903,9 +9892,9 @@ dependencies = [ [[package]] name = "scc" -version = "2.2.4" +version = "2.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8d25269dd3a12467afe2e510f69fb0b46b698e5afb296b59f2145259deaf8e8" +checksum = "66b202022bb57c049555430e11fc22fea12909276a80a4c3d368da36ac1d88ed" dependencies = [ "sdd", ] @@ -11349,7 +11338,7 @@ checksum = "b3644627a5af5fa321c95b9b235a72fd24cd29c648c2c379431e6628655627bf" dependencies = [ "itertools 0.13.0", "unicode-segmentation", - "unicode-width", + "unicode-width 0.1.14", ] [[package]] @@ -11358,6 +11347,12 @@ version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" +[[package]] +name = "unicode-width" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" + [[package]] name = "unicode-xid" version = "0.2.6" diff --git a/crates/evm/execution-types/Cargo.toml b/crates/evm/execution-types/Cargo.toml index 13b0aef8ad45..4d2d8214ff98 100644 --- a/crates/evm/execution-types/Cargo.toml +++ b/crates/evm/execution-types/Cargo.toml @@ -40,7 +40,8 @@ serde = [ "revm/serde", "alloy-eips/serde", "alloy-primitives/serde", - "rand/serde" + "rand/serde", + "reth-primitives-traits/serde", ] serde-bincode-compat = [ "reth-primitives/serde-bincode-compat", diff --git a/crates/exex/exex/Cargo.toml b/crates/exex/exex/Cargo.toml index f7ab4fce5df0..3cbeb115b066 100644 --- a/crates/exex/exex/Cargo.toml +++ b/crates/exex/exex/Cargo.toml @@ -78,5 +78,6 @@ serde = [ "alloy-primitives/serde", "parking_lot/serde", "rand/serde", - "secp256k1/serde" + "secp256k1/serde", + "reth-primitives-traits/serde", ] diff --git a/crates/net/eth-wire-types/Cargo.toml b/crates/net/eth-wire-types/Cargo.toml index f9759ffc25af..8b89603167d4 100644 --- a/crates/net/eth-wire-types/Cargo.toml +++ b/crates/net/eth-wire-types/Cargo.toml @@ -65,4 +65,5 @@ serde = [ "alloy-primitives/serde", "bytes/serde", "rand/serde", + "reth-primitives-traits/serde", ] diff --git a/crates/net/eth-wire/Cargo.toml b/crates/net/eth-wire/Cargo.toml index 3999f658e0a6..ffbd3017fa62 100644 --- a/crates/net/eth-wire/Cargo.toml +++ b/crates/net/eth-wire/Cargo.toml @@ -87,7 +87,8 @@ serde = [ "rand/serde", "secp256k1/serde", "reth-codecs/serde", - "alloy-chains/serde" + "alloy-chains/serde", + "reth-primitives-traits/serde", ] [[test]] diff --git a/crates/net/network/Cargo.toml b/crates/net/network/Cargo.toml index dde0b4a0b230..ad8e65dffc69 100644 --- a/crates/net/network/Cargo.toml +++ b/crates/net/network/Cargo.toml @@ -121,6 +121,7 @@ serde = [ "rand/serde", "smallvec/serde", "url/serde", + "reth-primitives-traits/serde", ] test-utils = [ "dep:reth-provider", diff --git a/crates/optimism/cli/Cargo.toml b/crates/optimism/cli/Cargo.toml index 198e5377ec4d..d090075927aa 100644 --- a/crates/optimism/cli/Cargo.toml +++ b/crates/optimism/cli/Cargo.toml @@ -119,5 +119,6 @@ serde = [ "alloy-primitives/serde", "op-alloy-consensus?/serde", "reth-execution-types/serde", - "reth-provider/serde" + "reth-provider/serde", + "reth-optimism-primitives/serde", ] diff --git a/crates/optimism/primitives/Cargo.toml b/crates/optimism/primitives/Cargo.toml index 216e559a201e..4c6d9f51406f 100644 --- a/crates/optimism/primitives/Cargo.toml +++ b/crates/optimism/primitives/Cargo.toml @@ -12,21 +12,44 @@ description = "OP primitive types" workspace = true [dependencies] +# reth +reth-primitives-traits.workspace = true +reth-codecs = { workspace = true, optional = true } +reth-primitives = { workspace = true, features = ["reth-codec"], optional = true } + +# ethereum alloy-primitives.workspace = true alloy-consensus.workspace = true -op-alloy-consensus.workspace = true alloy-eips.workspace = true alloy-rlp.workspace = true -derive_more.workspace = true + +# op +op-alloy-consensus.workspace = true + +# codec bytes.workspace = true -reth-primitives-traits.workspace = true -reth-codecs = { workspace = true, optional = true } -reth-primitives = { workspace = true, features = ["reth-codec"], optional = true } +serde = { workspace = true, optional = true } -[features] -default = ["reth-codec"] -reth-codec = ["dep:reth-codecs", "dep:reth-primitives"] +# misc +derive_more.workspace = true [dev-dependencies] reth-codecs = { workspace = true, features = ["test-utils"] } -rstest.workspace = true \ No newline at end of file +rstest.workspace = true + +[features] +default = ["reth-codec"] +reth-codec = [ + "dep:reth-codecs", + "dep:reth-primitives" +] +serde = [ + "dep:serde", + "reth-primitives-traits/serde", + "alloy-primitives/serde", + "alloy-consensus/serde", + "alloy-eips/serde", + "bytes/serde", + "reth-codecs/serde", + "op-alloy-consensus/serde", +] diff --git a/crates/optimism/primitives/src/tx_type.rs b/crates/optimism/primitives/src/tx_type.rs index 70f5fd32d8e5..9ddfe77b192f 100644 --- a/crates/optimism/primitives/src/tx_type.rs +++ b/crates/optimism/primitives/src/tx_type.rs @@ -3,8 +3,9 @@ //! This type is required because a `Compact` impl is needed on the deposit tx type. use core::fmt::Debug; -use std::convert::TryFrom; +#[cfg(feature = "reth-codec")] +use alloy_consensus::constants::EIP7702_TX_TYPE_ID; use alloy_primitives::{U64, U8}; use alloy_rlp::{Decodable, Encodable, Error}; use bytes::BufMut; @@ -13,10 +14,6 @@ use derive_more::{ Display, }; use op_alloy_consensus::OpTxType as AlloyOpTxType; -use reth_primitives_traits::{InMemorySize, TxType}; - -#[cfg(feature = "reth-codec")] -use alloy_consensus::constants::EIP7702_TX_TYPE_ID; #[cfg(feature = "reth-codec")] use op_alloy_consensus::DEPOSIT_TX_TYPE_ID; #[cfg(feature = "reth-codec")] @@ -24,8 +21,9 @@ use reth_primitives::transaction::{ COMPACT_EXTENDED_IDENTIFIER_FLAG, COMPACT_IDENTIFIER_EIP1559, COMPACT_IDENTIFIER_EIP2930, COMPACT_IDENTIFIER_LEGACY, }; +use reth_primitives_traits::{InMemorySize, TxType}; -/// Wrapper type for `AlloyOpTxType` to implement `TxType` trait. +/// Wrapper type for [`op_alloy_consensus::OpTxType`] to implement [`TxType`] trait. #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Display, Ord, Hash, From, Into)] #[into(u8)] pub struct OpTxType(AlloyOpTxType); diff --git a/crates/primitives-traits/Cargo.toml b/crates/primitives-traits/Cargo.toml index 651583f8e4d0..20430fbc8829 100644 --- a/crates/primitives-traits/Cargo.toml +++ b/crates/primitives-traits/Cargo.toml @@ -12,15 +12,16 @@ description = "Common types in reth." workspace = true [dependencies] +# reth reth-codecs.workspace = true -alloy-consensus = { workspace = true, features = ["serde"] } +# ethereum +alloy-consensus.workspace = true alloy-eips.workspace = true alloy-genesis.workspace = true alloy-primitives.workspace = true alloy-rlp.workspace = true - -revm-primitives = { workspace = true, features = ["serde"] } +revm-primitives.workspace = true # misc byteorder = "1" @@ -76,7 +77,19 @@ arbitrary = [ "reth-codecs/arbitrary" ] serde-bincode-compat = [ + "serde", "serde_with", "alloy-consensus/serde-bincode-compat", "alloy-eips/serde-bincode-compat" ] +serde = [ + "alloy-consensus/serde", + "alloy-eips/serde", + "alloy-primitives/serde", + "bytes/serde", + "rand/serde", + "reth-codecs/serde", + "revm-primitives/serde", + "roaring/serde", + "revm-primitives/serde", +] \ No newline at end of file diff --git a/crates/primitives-traits/src/account.rs b/crates/primitives-traits/src/account.rs index ae58973edd71..927e39a52e17 100644 --- a/crates/primitives-traits/src/account.rs +++ b/crates/primitives-traits/src/account.rs @@ -6,7 +6,6 @@ use bytes::Buf; use derive_more::Deref; use reth_codecs::{add_arbitrary_tests, Compact}; use revm_primitives::{AccountInfo, Bytecode as RevmBytecode, BytecodeDecodeError, JumpTable}; -use serde::{Deserialize, Serialize}; /// Identifier for [`LegacyRaw`](RevmBytecode::LegacyRaw). const LEGACY_RAW_BYTECODE_ID: u8 = 0; @@ -24,7 +23,8 @@ const EOF_BYTECODE_ID: u8 = 3; const EIP7702_BYTECODE_ID: u8 = 4; /// An Ethereum account. -#[derive(Clone, Copy, Debug, PartialEq, Eq, Default, Serialize, Deserialize, Compact)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Default, Compact)] #[cfg_attr(any(test, feature = "arbitrary"), derive(arbitrary::Arbitrary))] #[add_arbitrary_tests(compact)] pub struct Account { @@ -60,7 +60,8 @@ impl Account { /// Bytecode for an account. /// /// A wrapper around [`revm::primitives::Bytecode`][RevmBytecode] with encoding/decoding support. -#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize, Deref)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[derive(Debug, Clone, Default, PartialEq, Eq, Deref)] pub struct Bytecode(pub RevmBytecode); impl Bytecode { diff --git a/crates/primitives-traits/src/block/body.rs b/crates/primitives-traits/src/block/body.rs index e9aadf409571..074efc4d5141 100644 --- a/crates/primitives-traits/src/block/body.rs +++ b/crates/primitives-traits/src/block/body.rs @@ -1,6 +1,6 @@ //! Block body abstraction. -use crate::InMemorySize; +use crate::{InMemorySize, MaybeSerde}; use alloc::fmt; use alloy_consensus::Transaction; @@ -15,11 +15,10 @@ pub trait BlockBody: + fmt::Debug + PartialEq + Eq - + serde::Serialize - + for<'de> serde::Deserialize<'de> + alloy_rlp::Encodable + alloy_rlp::Decodable + InMemorySize + + MaybeSerde { /// Ordered list of signed transactions as committed in block. // todo: requires trait for signed transaction diff --git a/crates/primitives-traits/src/block/header.rs b/crates/primitives-traits/src/block/header.rs index 779df4425388..524835879f31 100644 --- a/crates/primitives-traits/src/block/header.rs +++ b/crates/primitives-traits/src/block/header.rs @@ -1,10 +1,12 @@ //! Block header data primitive. -use crate::InMemorySize; -use alloy_primitives::Sealable; use core::fmt; + +use alloy_primitives::Sealable; use reth_codecs::Compact; +use crate::{InMemorySize, MaybeSerde}; + /// Helper trait that unifies all behaviour required by block header to support full node /// operations. pub trait FullBlockHeader: BlockHeader + Compact {} @@ -26,6 +28,7 @@ pub trait BlockHeader: + alloy_consensus::BlockHeader + Sealable + InMemorySize + + MaybeSerde { } @@ -38,12 +41,11 @@ impl BlockHeader for T where + fmt::Debug + PartialEq + Eq - + serde::Serialize - + for<'de> serde::Deserialize<'de> + alloy_rlp::Encodable + alloy_rlp::Decodable + alloy_consensus::BlockHeader + Sealable + InMemorySize + + MaybeSerde { } diff --git a/crates/primitives-traits/src/block/mod.rs b/crates/primitives-traits/src/block/mod.rs index 6bef9ea167fd..5b1faeafbb72 100644 --- a/crates/primitives-traits/src/block/mod.rs +++ b/crates/primitives-traits/src/block/mod.rs @@ -7,7 +7,7 @@ use alloc::fmt; use reth_codecs::Compact; -use crate::{BlockHeader, FullBlockHeader, InMemorySize}; +use crate::{BlockHeader, FullBlockHeader, InMemorySize, MaybeSerde}; /// Helper trait that unifies all behaviour required by block to support full node operations. pub trait FullBlock: Block + Compact {} @@ -20,17 +20,7 @@ impl FullBlock for T where T: Block + Compact {} // senders #[auto_impl::auto_impl(&, Arc)] pub trait Block: - Send - + Sync - + Unpin - + Clone - + Default - + fmt::Debug - + PartialEq - + Eq - + serde::Serialize - + for<'a> serde::Deserialize<'a> - + InMemorySize + Send + Sync + Unpin + Clone + Default + fmt::Debug + PartialEq + Eq + InMemorySize + MaybeSerde { /// Header part of the block. type Header: BlockHeader + 'static; diff --git a/crates/primitives-traits/src/lib.rs b/crates/primitives-traits/src/lib.rs index 584181f2c95b..1c848b814137 100644 --- a/crates/primitives-traits/src/lib.rs +++ b/crates/primitives-traits/src/lib.rs @@ -92,3 +92,16 @@ pub trait MaybeArbitrary {} impl MaybeArbitrary for T where T: for<'a> arbitrary::Arbitrary<'a> {} #[cfg(not(any(feature = "test-utils", feature = "arbitrary")))] impl MaybeArbitrary for T {} + +/// Helper trait that requires de-/serialize implementation since `serde` feature is enabled. +#[cfg(feature = "serde")] +pub trait MaybeSerde: serde::Serialize + for<'de> serde::Deserialize<'de> {} +/// Noop. Helper trait that would require de-/serialize implementation if `serde` feature were +/// enabled. +#[cfg(not(feature = "serde"))] +pub trait MaybeSerde {} + +#[cfg(feature = "serde")] +impl MaybeSerde for T where T: serde::Serialize + for<'de> serde::Deserialize<'de> {} +#[cfg(not(feature = "serde"))] +impl MaybeSerde for T {} diff --git a/crates/primitives-traits/src/node.rs b/crates/primitives-traits/src/node.rs index ca490ac15aa8..35c8ea0f6937 100644 --- a/crates/primitives-traits/src/node.rs +++ b/crates/primitives-traits/src/node.rs @@ -1,17 +1,17 @@ use core::fmt; -use crate::{BlockBody, FullBlock, FullReceipt, FullSignedTx, FullTxType}; +use crate::{BlockBody, FullBlock, FullReceipt, FullSignedTx, FullTxType, MaybeSerde}; /// Configures all the primitive types of the node. pub trait NodePrimitives: Send + Sync + Unpin + Clone + Default + fmt::Debug + 'static { /// Block primitive. - type Block: Send + Sync + Unpin + Clone + Default + fmt::Debug + 'static; + type Block: Send + Sync + Unpin + Clone + Default + fmt::Debug + MaybeSerde + 'static; /// Signed version of the transaction type. - type SignedTx: Send + Sync + Unpin + Clone + Default + fmt::Debug + 'static; + type SignedTx: Send + Sync + Unpin + Clone + Default + fmt::Debug + MaybeSerde + 'static; /// Transaction envelope type ID. type TxType: Send + Sync + Unpin + Clone + Default + fmt::Debug + 'static; /// A receipt. - type Receipt: Send + Sync + Unpin + Clone + Default + fmt::Debug + 'static; + type Receipt: Send + Sync + Unpin + Clone + Default + fmt::Debug + MaybeSerde + 'static; } impl NodePrimitives for () { diff --git a/crates/primitives-traits/src/receipt.rs b/crates/primitives-traits/src/receipt.rs index b34590dff0e9..64839ecb8b4b 100644 --- a/crates/primitives-traits/src/receipt.rs +++ b/crates/primitives-traits/src/receipt.rs @@ -1,13 +1,11 @@ //! Receipt abstraction +use crate::{InMemorySize, MaybeSerde}; use alloc::vec::Vec; use alloy_consensus::TxReceipt; use alloy_primitives::B256; use core::fmt; use reth_codecs::Compact; -use serde::{Deserialize, Serialize}; - -use crate::InMemorySize; /// Helper trait that unifies all behaviour required by receipt to support full node operations. pub trait FullReceipt: Receipt + Compact {} @@ -26,9 +24,8 @@ pub trait Receipt: + TxReceipt + alloy_rlp::Encodable + alloy_rlp::Decodable - + Serialize + + MaybeSerde + InMemorySize - + for<'de> Deserialize<'de> { /// Returns transaction type. fn tx_type(&self) -> u8; diff --git a/crates/primitives-traits/src/transaction/mod.rs b/crates/primitives-traits/src/transaction/mod.rs index 33ee36090acb..9d60be0c32e5 100644 --- a/crates/primitives-traits/src/transaction/mod.rs +++ b/crates/primitives-traits/src/transaction/mod.rs @@ -6,9 +6,8 @@ use core::{fmt, hash::Hash}; use alloy_primitives::B256; use reth_codecs::Compact; -use serde::{Deserialize, Serialize}; -use crate::{FullTxType, InMemorySize, MaybeArbitrary, TxType}; +use crate::{FullTxType, InMemorySize, MaybeArbitrary, MaybeSerde, TxType}; /// Helper trait that unifies all behaviour required by transaction to support full node operations. pub trait FullTransaction: Transaction + Compact {} @@ -26,10 +25,9 @@ pub trait Transaction: + Eq + PartialEq + Hash - + Serialize - + for<'de> Deserialize<'de> + TransactionExt + InMemorySize + + MaybeSerde + MaybeArbitrary { } @@ -44,10 +42,9 @@ impl Transaction for T where + Eq + PartialEq + Hash - + Serialize - + for<'de> Deserialize<'de> + TransactionExt + InMemorySize + + MaybeSerde + MaybeArbitrary { } diff --git a/crates/primitives-traits/src/transaction/signed.rs b/crates/primitives-traits/src/transaction/signed.rs index 7b6abbaec0f3..d860dbb92fca 100644 --- a/crates/primitives-traits/src/transaction/signed.rs +++ b/crates/primitives-traits/src/transaction/signed.rs @@ -8,7 +8,7 @@ use alloy_primitives::{keccak256, Address, PrimitiveSignature, TxHash, B256}; use reth_codecs::Compact; use revm_primitives::TxEnv; -use crate::{FullTransaction, InMemorySize, MaybeArbitrary, Transaction}; +use crate::{FullTransaction, InMemorySize, MaybeArbitrary, MaybeSerde, Transaction}; /// Helper trait that unifies all behaviour required by block to support full node operations. pub trait FullSignedTx: SignedTransaction + Compact {} @@ -27,13 +27,12 @@ pub trait SignedTransaction: + PartialEq + Eq + Hash - + serde::Serialize - + for<'a> serde::Deserialize<'a> + alloy_rlp::Encodable + alloy_rlp::Decodable + Encodable2718 + Decodable2718 + alloy_consensus::Transaction + + MaybeSerde + MaybeArbitrary + InMemorySize { diff --git a/crates/primitives/Cargo.toml b/crates/primitives/Cargo.toml index c9043a2bd11e..89282c8f93d7 100644 --- a/crates/primitives/Cargo.toml +++ b/crates/primitives/Cargo.toml @@ -13,7 +13,7 @@ workspace = true [dependencies] # reth -reth-primitives-traits.workspace = true +reth-primitives-traits = { workspace = true, features = ["serde"] } reth-ethereum-forks.workspace = true reth-static-file-types.workspace = true revm-primitives = { workspace = true, features = ["serde"] } diff --git a/crates/revm/Cargo.toml b/crates/revm/Cargo.toml index d1202cd8b2cf..4bc78b7b0562 100644 --- a/crates/revm/Cargo.toml +++ b/crates/revm/Cargo.toml @@ -59,4 +59,5 @@ serde = [ "alloy-eips/serde", "alloy-primitives/serde", "alloy-consensus/serde", + "reth-primitives-traits/serde", ] diff --git a/crates/storage/codecs/Cargo.toml b/crates/storage/codecs/Cargo.toml index 20a0673dff62..57fe9f726c7b 100644 --- a/crates/storage/codecs/Cargo.toml +++ b/crates/storage/codecs/Cargo.toml @@ -81,7 +81,7 @@ serde = [ "alloy-primitives/serde", "alloy-trie?/serde", "bytes/serde", - "op-alloy-consensus?/serde" + "op-alloy-consensus?/serde", ] arbitrary = [ "alloy-consensus?/arbitrary", diff --git a/crates/storage/db-api/Cargo.toml b/crates/storage/db-api/Cargo.toml index 9b8589cb6aa8..bcc3e7789847 100644 --- a/crates/storage/db-api/Cargo.toml +++ b/crates/storage/db-api/Cargo.toml @@ -16,7 +16,7 @@ workspace = true reth-codecs.workspace = true reth-db-models.workspace = true reth-primitives = { workspace = true, features = ["reth-codec"] } -reth-primitives-traits.workspace = true +reth-primitives-traits = { workspace = true, features = ["serde"] } reth-prune-types.workspace = true reth-stages-types.workspace = true reth-storage-errors.workspace = true diff --git a/crates/storage/db-models/Cargo.toml b/crates/storage/db-models/Cargo.toml index 59d95c2263d0..44c0c3d962a5 100644 --- a/crates/storage/db-models/Cargo.toml +++ b/crates/storage/db-models/Cargo.toml @@ -14,7 +14,7 @@ workspace = true [dependencies] # reth reth-codecs.workspace = true -reth-primitives-traits.workspace = true +reth-primitives-traits = { workspace = true, features = ["serde"] } # ethereum alloy-primitives.workspace = true diff --git a/crates/storage/db/Cargo.toml b/crates/storage/db/Cargo.toml index 6042b5faa815..7dca8aa84752 100644 --- a/crates/storage/db/Cargo.toml +++ b/crates/storage/db/Cargo.toml @@ -15,7 +15,7 @@ workspace = true # reth reth-db-api.workspace = true reth-primitives = { workspace = true, features = ["reth-codec"] } -reth-primitives-traits.workspace = true +reth-primitives-traits = { workspace = true, features = ["serde"] } reth-fs-util.workspace = true reth-storage-errors.workspace = true reth-nippy-jar.workspace = true diff --git a/crates/storage/provider/Cargo.toml b/crates/storage/provider/Cargo.toml index 399e3e000b99..eff0540638a5 100644 --- a/crates/storage/provider/Cargo.toml +++ b/crates/storage/provider/Cargo.toml @@ -110,6 +110,7 @@ serde = [ "rand/serde", "revm/serde", "reth-codecs/serde", + "reth-optimism-primitives?/serde", ] test-utils = [ "reth-db/test-utils", diff --git a/crates/transaction-pool/Cargo.toml b/crates/transaction-pool/Cargo.toml index 22df82536826..7c0f34765591 100644 --- a/crates/transaction-pool/Cargo.toml +++ b/crates/transaction-pool/Cargo.toml @@ -88,6 +88,7 @@ serde = [ "rand?/serde", "revm/serde", "smallvec/serde", + "reth-primitives-traits/serde", ] test-utils = [ "rand", diff --git a/crates/trie/common/Cargo.toml b/crates/trie/common/Cargo.toml index 0616e2597109..49d09d6f39bc 100644 --- a/crates/trie/common/Cargo.toml +++ b/crates/trie/common/Cargo.toml @@ -12,7 +12,7 @@ description = "Commonly used types for trie usage in reth." workspace = true [dependencies] -reth-primitives-traits.workspace = true +reth-primitives-traits = { workspace = true, features = ["serde"] } reth-codecs.workspace = true alloy-primitives.workspace = true From dc45aa9fffc869f1c865697a58843cc87113de57 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 18 Nov 2024 13:40:44 +0000 Subject: [PATCH 017/156] chore(deps): weekly `cargo update` (#12611) Co-authored-by: github-merge-queue <118344674+github-merge-queue@users.noreply.github.com> --- Cargo.lock | 33 +++++++++++++++++---------------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 56f2864c5fa1..054ad19dad5a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4579,9 +4579,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.162" +version = "0.2.164" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18d287de67fe55fd7e1581fe933d965a5a9477b38e949cfa9f8574ef01506398" +checksum = "433bfe06b8c75da9b2e3fbea6e5329ff87748f0b144ef75306e674c3f6f7c13f" [[package]] name = "libloading" @@ -5286,9 +5286,9 @@ checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" [[package]] name = "op-alloy-consensus" -version = "0.6.5" +version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bff54d1d790eca1f3aedbd666162e9c42eceff90b9f9d24b352ed9c2df1e901a" +checksum = "862db7293434837c1ca32ef509806a7b330bd24605da95438cd6e928a58b4b2c" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5304,9 +5304,9 @@ dependencies = [ [[package]] name = "op-alloy-genesis" -version = "0.6.5" +version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae84fd64fbc53b3e958ea5a96d7f5633e4a111092e41c51672c2d91835c09efb" +checksum = "8ebd0391a3123b47e44ccca8a6f63a39ead2d7ea52e4fc132ff1297f6184314e" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5318,9 +5318,9 @@ dependencies = [ [[package]] name = "op-alloy-network" -version = "0.6.5" +version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71e777450ee3e9c5177e00865e9b4496472b623c50f146fc907b667c6b4ab37" +checksum = "6fd5d57f04f7ce1ba8be7704ba87fe7bea151a94ffc971f5a8a68b3bdf962471" dependencies = [ "alloy-consensus", "alloy-network", @@ -5333,9 +5333,9 @@ dependencies = [ [[package]] name = "op-alloy-protocol" -version = "0.6.5" +version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e854d2d4958d0a213731560172e8455536329ee9574473ff79fa953da91eb6a" +checksum = "0220768efb59871af53e1685b90983c9f3090cdf45df3d0107348362ba7055ee" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5343,6 +5343,7 @@ dependencies = [ "alloy-rlp", "alloy-serde", "async-trait", + "brotli", "derive_more 1.0.0", "op-alloy-consensus", "op-alloy-genesis", @@ -5353,9 +5354,9 @@ dependencies = [ [[package]] name = "op-alloy-rpc-types" -version = "0.6.5" +version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "981b7f8ab11fe85ba3c1723702f000429b8d0c16b5883c93d577895f262cbac6" +checksum = "03db591ad512fdc70170fcb2bff3517b64811443f9fb65d3a1a6344c60acdbf0" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5372,9 +5373,9 @@ dependencies = [ [[package]] name = "op-alloy-rpc-types-engine" -version = "0.6.5" +version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a227b16c9c5df68b112c8db9d268ebf46b3e26c744b4d59d4949575cd603a292" +checksum = "dd1a11a9cf2f2e8ed9ae11c93dce5990ff81ff98f17995772f567b586a864812" dependencies = [ "alloy-eips", "alloy-primitives", @@ -9756,9 +9757,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.16" +version = "0.23.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eee87ff5d9b36712a58574e12e9f0ea80f915a5b0ac518d322b24a465617925e" +checksum = "7f1a745511c54ba6d4465e8d5dfbd81b45791756de28d4981af70d6dca128f1e" dependencies = [ "log", "once_cell", From 26ce7fbdb2aeb37e9009065f55ab826b2f4d2b56 Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Mon, 18 Nov 2024 14:56:14 +0100 Subject: [PATCH 018/156] feat(trie): add extend method to MultiProof (#12467) --- crates/trie/common/Cargo.toml | 1 + crates/trie/common/src/proofs.rs | 78 +++++++++++++++++++++++++++++++- 2 files changed, 78 insertions(+), 1 deletion(-) diff --git a/crates/trie/common/Cargo.toml b/crates/trie/common/Cargo.toml index 49d09d6f39bc..0161fc7ff3d9 100644 --- a/crates/trie/common/Cargo.toml +++ b/crates/trie/common/Cargo.toml @@ -34,6 +34,7 @@ plain_hasher = { version = "0.2", optional = true } arbitrary = { workspace = true, features = ["derive"], optional = true } [dev-dependencies] +alloy-primitives = { workspace = true, features = ["getrandom"] } arbitrary = { workspace = true, features = ["derive"] } proptest.workspace = true proptest-arbitrary-interop.workspace = true diff --git a/crates/trie/common/src/proofs.rs b/crates/trie/common/src/proofs.rs index f6eaf3960ec0..d0a5cd220420 100644 --- a/crates/trie/common/src/proofs.rs +++ b/crates/trie/common/src/proofs.rs @@ -12,7 +12,7 @@ use alloy_trie::{ use itertools::Itertools; use reth_primitives_traits::Account; use serde::{Deserialize, Serialize}; -use std::collections::HashMap; +use std::collections::{hash_map, HashMap}; /// The state multiproof of target accounts and multiproofs of their storage tries. /// Multiproof is effectively a state subtrie that only contains the nodes @@ -76,6 +76,24 @@ impl MultiProof { } Ok(AccountProof { address, info, proof, storage_root, storage_proofs }) } + + /// Extends this multiproof with another one, merging both account and storage + /// proofs. + pub fn extend(&mut self, other: Self) { + self.account_subtree.extend_from(other.account_subtree); + + for (hashed_address, storage) in other.storages { + match self.storages.entry(hashed_address) { + hash_map::Entry::Occupied(mut entry) => { + debug_assert_eq!(entry.get().root, storage.root); + entry.get_mut().subtree.extend_from(storage.subtree); + } + hash_map::Entry::Vacant(entry) => { + entry.insert(storage); + } + } + } + } } /// The merkle multiproof of storage trie. @@ -255,3 +273,61 @@ pub mod triehash { } } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_multiproof_extend_account_proofs() { + let mut proof1 = MultiProof::default(); + let mut proof2 = MultiProof::default(); + + let addr1 = B256::random(); + let addr2 = B256::random(); + + proof1.account_subtree.insert( + Nibbles::unpack(addr1), + alloy_rlp::encode_fixed_size(&U256::from(42)).to_vec().into(), + ); + proof2.account_subtree.insert( + Nibbles::unpack(addr2), + alloy_rlp::encode_fixed_size(&U256::from(43)).to_vec().into(), + ); + + proof1.extend(proof2); + + assert!(proof1.account_subtree.contains_key(&Nibbles::unpack(addr1))); + assert!(proof1.account_subtree.contains_key(&Nibbles::unpack(addr2))); + } + + #[test] + fn test_multiproof_extend_storage_proofs() { + let mut proof1 = MultiProof::default(); + let mut proof2 = MultiProof::default(); + + let addr = B256::random(); + let root = B256::random(); + + let mut subtree1 = ProofNodes::default(); + subtree1.insert( + Nibbles::from_nibbles(vec![0]), + alloy_rlp::encode_fixed_size(&U256::from(42)).to_vec().into(), + ); + proof1.storages.insert(addr, StorageMultiProof { root, subtree: subtree1 }); + + let mut subtree2 = ProofNodes::default(); + subtree2.insert( + Nibbles::from_nibbles(vec![1]), + alloy_rlp::encode_fixed_size(&U256::from(43)).to_vec().into(), + ); + proof2.storages.insert(addr, StorageMultiProof { root, subtree: subtree2 }); + + proof1.extend(proof2); + + let storage = proof1.storages.get(&addr).unwrap(); + assert_eq!(storage.root, root); + assert!(storage.subtree.contains_key(&Nibbles::from_nibbles(vec![0]))); + assert!(storage.subtree.contains_key(&Nibbles::from_nibbles(vec![1]))); + } +} From 8339c716b4edeecfa44620fce67978cad2f05342 Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Mon, 18 Nov 2024 14:58:31 +0100 Subject: [PATCH 019/156] feat(engine): introduce sync implementation of StateRootTask (#12378) --- Cargo.lock | 4 +- crates/engine/tree/Cargo.toml | 8 +- crates/engine/tree/benches/channel_perf.rs | 132 ++++++++++++++++ crates/engine/tree/src/tree/root.rs | 167 +++++++++++++++------ 4 files changed, 257 insertions(+), 54 deletions(-) create mode 100644 crates/engine/tree/benches/channel_perf.rs diff --git a/Cargo.lock b/Cargo.lock index 054ad19dad5a..e83e20a03997 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7224,9 +7224,10 @@ dependencies = [ "alloy-rlp", "alloy-rpc-types-engine", "assert_matches", + "criterion", + "crossbeam-channel", "futures", "metrics", - "pin-project", "reth-beacon-consensus", "reth-blockchain-tree", "reth-blockchain-tree-api", @@ -7261,7 +7262,6 @@ dependencies = [ "revm-primitives", "thiserror 1.0.69", "tokio", - "tokio-stream", "tracing", ] diff --git a/crates/engine/tree/Cargo.toml b/crates/engine/tree/Cargo.toml index 278457145e70..d6e1c80a7261 100644 --- a/crates/engine/tree/Cargo.toml +++ b/crates/engine/tree/Cargo.toml @@ -45,9 +45,7 @@ revm-primitives.workspace = true # common futures.workspace = true -pin-project.workspace = true tokio = { workspace = true, features = ["macros", "sync"] } -tokio-stream.workspace = true thiserror.workspace = true # metrics @@ -82,6 +80,12 @@ reth-chainspec.workspace = true alloy-rlp.workspace = true assert_matches.workspace = true +criterion.workspace = true +crossbeam-channel = "0.5.13" + +[[bench]] +name = "channel_perf" +harness = false [features] test-utils = [ diff --git a/crates/engine/tree/benches/channel_perf.rs b/crates/engine/tree/benches/channel_perf.rs new file mode 100644 index 000000000000..c1c65e0a68e1 --- /dev/null +++ b/crates/engine/tree/benches/channel_perf.rs @@ -0,0 +1,132 @@ +//! Benchmark comparing `std::sync::mpsc` and `crossbeam` channels for `StateRootTask`. + +#![allow(missing_docs)] + +use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion}; +use revm_primitives::{ + Account, AccountInfo, AccountStatus, Address, EvmState, EvmStorage, EvmStorageSlot, HashMap, + B256, U256, +}; +use std::thread; + +/// Creates a mock state with the specified number of accounts for benchmarking +fn create_bench_state(num_accounts: usize) -> EvmState { + let mut state_changes = HashMap::default(); + + for i in 0..num_accounts { + let storage = + EvmStorage::from_iter([(U256::from(i), EvmStorageSlot::new(U256::from(i + 1)))]); + + let account = Account { + info: AccountInfo { + balance: U256::from(100), + nonce: 10, + code_hash: B256::random(), + code: Default::default(), + }, + storage, + status: AccountStatus::Loaded, + }; + + let address = Address::random(); + state_changes.insert(address, account); + } + + state_changes +} + +/// Simulated `StateRootTask` with `std::sync::mpsc` +struct StdStateRootTask { + rx: std::sync::mpsc::Receiver, +} + +impl StdStateRootTask { + const fn new(rx: std::sync::mpsc::Receiver) -> Self { + Self { rx } + } + + fn run(self) { + while let Ok(state) = self.rx.recv() { + criterion::black_box(state); + } + } +} + +/// Simulated `StateRootTask` with `crossbeam-channel` +struct CrossbeamStateRootTask { + rx: crossbeam_channel::Receiver, +} + +impl CrossbeamStateRootTask { + const fn new(rx: crossbeam_channel::Receiver) -> Self { + Self { rx } + } + + fn run(self) { + while let Ok(state) = self.rx.recv() { + criterion::black_box(state); + } + } +} + +/// Benchmarks the performance of different channel implementations for state streaming +fn bench_state_stream(c: &mut Criterion) { + let mut group = c.benchmark_group("state_stream_channels"); + group.sample_size(10); + + for size in &[1, 10, 100] { + let bench_setup = || { + let states: Vec<_> = (0..100).map(|_| create_bench_state(*size)).collect(); + states + }; + + group.bench_with_input(BenchmarkId::new("std_channel", size), size, |b, _| { + b.iter_batched( + bench_setup, + |states| { + let (tx, rx) = std::sync::mpsc::channel(); + let task = StdStateRootTask::new(rx); + + let processor = thread::spawn(move || { + task.run(); + }); + + for state in states { + tx.send(state).unwrap(); + } + drop(tx); + + processor.join().unwrap(); + }, + BatchSize::LargeInput, + ); + }); + + group.bench_with_input(BenchmarkId::new("crossbeam_channel", size), size, |b, _| { + b.iter_batched( + bench_setup, + |states| { + let (tx, rx) = crossbeam_channel::unbounded(); + let task = CrossbeamStateRootTask::new(rx); + + let processor = thread::spawn(move || { + task.run(); + }); + + for state in states { + tx.send(state).unwrap(); + } + drop(tx); + + processor.join().unwrap(); + }, + BatchSize::LargeInput, + ); + }); + } + + group.finish(); +} + +criterion_group!(benches, bench_state_stream); +criterion_main!(benches); diff --git a/crates/engine/tree/src/tree/root.rs b/crates/engine/tree/src/tree/root.rs index fbf6c3481384..45cf5a780310 100644 --- a/crates/engine/tree/src/tree/root.rs +++ b/crates/engine/tree/src/tree/root.rs @@ -1,18 +1,13 @@ //! State root task related functionality. -use futures::Stream; -use pin_project::pin_project; use reth_provider::providers::ConsistentDbView; use reth_trie::{updates::TrieUpdates, TrieInput}; use reth_trie_parallel::root::ParallelStateRootError; use revm_primitives::{EvmState, B256}; -use std::{ - future::Future, - pin::Pin, - sync::{mpsc, Arc}, - task::{Context, Poll}, +use std::sync::{ + mpsc::{self, Receiver, RecvError}, + Arc, }; -use tokio_stream::wrappers::UnboundedReceiverStream; use tracing::debug; /// Result of the state root calculation @@ -28,12 +23,43 @@ pub(crate) struct StateRootHandle { #[allow(dead_code)] impl StateRootHandle { + /// Creates a new handle from a receiver. + pub(crate) const fn new(rx: mpsc::Receiver) -> Self { + Self { rx } + } + /// Waits for the state root calculation to complete. pub(crate) fn wait_for_result(self) -> StateRootResult { self.rx.recv().expect("state root task was dropped without sending result") } } +/// Common configuration for state root tasks +#[derive(Debug)] +pub(crate) struct StateRootConfig { + /// View over the state in the database. + pub consistent_view: ConsistentDbView, + /// Latest trie input. + pub input: Arc, +} + +/// Wrapper for std channel receiver to maintain compatibility with `UnboundedReceiverStream` +#[allow(dead_code)] +pub(crate) struct StdReceiverStream { + rx: Receiver, +} + +#[allow(dead_code)] +impl StdReceiverStream { + pub(crate) const fn new(rx: Receiver) -> Self { + Self { rx } + } + + pub(crate) fn recv(&self) -> Result { + self.rx.recv() + } +} + /// Standalone task that receives a transaction state stream and updates relevant /// data structures to calculate state root. /// @@ -42,15 +68,12 @@ impl StateRootHandle { /// fetches the proofs for relevant accounts from the database and reveal them /// to the tree. /// Then it updates relevant leaves according to the result of the transaction. -#[pin_project] +#[allow(dead_code)] pub(crate) struct StateRootTask { - /// View over the state in the database. - consistent_view: ConsistentDbView, /// Incoming state updates. - #[pin] - state_stream: UnboundedReceiverStream, - /// Latest trie input. - input: Arc, + state_stream: StdReceiverStream, + /// Task configuration. + config: StateRootConfig, } #[allow(dead_code)] @@ -60,65 +83,109 @@ where { /// Creates a new `StateRootTask`. pub(crate) const fn new( - consistent_view: ConsistentDbView, - input: Arc, - state_stream: UnboundedReceiverStream, + config: StateRootConfig, + state_stream: StdReceiverStream, ) -> Self { - Self { consistent_view, state_stream, input } + Self { config, state_stream } } /// Spawns the state root task and returns a handle to await its result. pub(crate) fn spawn(self) -> StateRootHandle { - let (tx, rx) = mpsc::channel(); - - // Spawn the task that will process state updates and calculate the root - tokio::spawn(async move { - debug!(target: "engine::tree", "Starting state root task"); - let result = self.await; - let _ = tx.send(result); - }); + let (tx, rx) = mpsc::sync_channel(1); + std::thread::Builder::new() + .name("State Root Task".to_string()) + .spawn(move || { + debug!(target: "engine::tree", "Starting state root task"); + let result = self.run(); + let _ = tx.send(result); + }) + .expect("failed to spawn state root thread"); - StateRootHandle { rx } + StateRootHandle::new(rx) } /// Handles state updates. fn on_state_update( - _view: &ConsistentDbView, - _input: &Arc, + _view: &reth_provider::providers::ConsistentDbView, + _input: &std::sync::Arc, _state: EvmState, ) { + // Default implementation of state update handling // TODO: calculate hashed state update and dispatch proof gathering for it. } } -impl Future for StateRootTask +#[allow(dead_code)] +impl StateRootTask where Factory: Send + 'static, { - type Output = StateRootResult; - - fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { - let mut this = self.project(); - - // Process all items until the stream is closed - loop { - match this.state_stream.as_mut().poll_next(cx) { - Poll::Ready(Some(state)) => { - Self::on_state_update(this.consistent_view, this.input, state); - } - Poll::Ready(None) => { - // stream closed, return final result - return Poll::Ready(Ok((B256::default(), TrieUpdates::default()))); - } - Poll::Pending => { - return Poll::Pending; - } - } + fn run(self) -> StateRootResult { + while let Ok(state) = self.state_stream.recv() { + Self::on_state_update(&self.config.consistent_view, &self.config.input, state); } // TODO: // * keep track of proof calculation // * keep track of intermediate root computation // * return final state root result + Ok((B256::default(), TrieUpdates::default())) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use reth_provider::{providers::ConsistentDbView, test_utils::MockEthProvider}; + use reth_trie::TrieInput; + use revm_primitives::{ + Account, AccountInfo, AccountStatus, Address, EvmState, EvmStorage, EvmStorageSlot, + HashMap, B256, U256, + }; + use std::sync::Arc; + + fn create_mock_config() -> StateRootConfig { + let factory = MockEthProvider::default(); + let view = ConsistentDbView::new(factory, None); + let input = Arc::new(TrieInput::default()); + StateRootConfig { consistent_view: view, input } + } + + fn create_mock_state() -> revm_primitives::EvmState { + let mut state_changes: EvmState = HashMap::default(); + let storage = EvmStorage::from_iter([(U256::from(1), EvmStorageSlot::new(U256::from(2)))]); + let account = Account { + info: AccountInfo { + balance: U256::from(100), + nonce: 10, + code_hash: B256::random(), + code: Default::default(), + }, + storage, + status: AccountStatus::Loaded, + }; + + let address = Address::random(); + state_changes.insert(address, account); + + state_changes + } + + #[test] + fn test_state_root_task() { + let config = create_mock_config(); + let (tx, rx) = std::sync::mpsc::channel(); + let stream = StdReceiverStream::new(rx); + + let task = StateRootTask::new(config, stream); + let handle = task.spawn(); + + for _ in 0..10 { + tx.send(create_mock_state()).expect("failed to send state"); + } + drop(tx); + + let result = handle.wait_for_result(); + assert!(result.is_ok(), "sync block execution failed"); } } From 2dc75fb9966a9afc93a235d42ee2a6195144cebb Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Mon, 18 Nov 2024 18:09:44 +0400 Subject: [PATCH 020/156] fix: correctly prune transactions during on-disk reorgs (#12630) --- crates/storage/provider/src/writer/mod.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/storage/provider/src/writer/mod.rs b/crates/storage/provider/src/writer/mod.rs index 1c3894e9cfdb..17dea5a6d51f 100644 --- a/crates/storage/provider/src/writer/mod.rs +++ b/crates/storage/provider/src/writer/mod.rs @@ -285,7 +285,8 @@ where let tx_range = self .database() .transaction_range_by_block_range(block_number + 1..=highest_static_file_block)?; - let total_txs = tx_range.end().saturating_sub(*tx_range.start()); + // We are using end + 1 - start here because the returned range is inclusive. + let total_txs = (tx_range.end() + 1).saturating_sub(*tx_range.start()); // IMPORTANT: we use `block_number+1` to make sure we remove only what is ABOVE the block debug!(target: "provider::storage_writer", ?block_number, "Removing blocks from database above block_number"); From 1d2934ba698ef90d07ae4961ce7e72b0a078e48d Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Mon, 18 Nov 2024 16:02:39 +0100 Subject: [PATCH 021/156] feat(trie): turn TrieWitness methods into public functions (#12510) --- crates/trie/trie/src/witness.rs | 264 ++++++++++++++++---------------- 1 file changed, 131 insertions(+), 133 deletions(-) diff --git a/crates/trie/trie/src/witness.rs b/crates/trie/trie/src/witness.rs index 8290f158062c..b2364b385e10 100644 --- a/crates/trie/trie/src/witness.rs +++ b/crates/trie/trie/src/witness.rs @@ -118,16 +118,15 @@ where account_rlp.clone() }); let key = Nibbles::unpack(hashed_address); - account_trie_nodes.extend( - self.target_nodes( - key.clone(), - value, - account_multiproof - .account_subtree - .matching_nodes_iter(&key) - .sorted_by(|a, b| a.0.cmp(b.0)), - )?, - ); + account_trie_nodes.extend(target_nodes( + key.clone(), + value, + Some(&mut self.witness), + account_multiproof + .account_subtree + .matching_nodes_iter(&key) + .sorted_by(|a, b| a.0.cmp(b.0)), + )?); // Gather and record storage trie nodes for this account. let mut storage_trie_nodes = BTreeMap::default(); @@ -138,19 +137,18 @@ where .and_then(|s| s.storage.get(&hashed_slot)) .filter(|v| !v.is_zero()) .map(|v| alloy_rlp::encode_fixed_size(v).to_vec()); - storage_trie_nodes.extend( - self.target_nodes( - slot_nibbles.clone(), - slot_value, - storage_multiproof - .subtree - .matching_nodes_iter(&slot_nibbles) - .sorted_by(|a, b| a.0.cmp(b.0)), - )?, - ); + storage_trie_nodes.extend(target_nodes( + slot_nibbles.clone(), + slot_value, + Some(&mut self.witness), + storage_multiproof + .subtree + .matching_nodes_iter(&slot_nibbles) + .sorted_by(|a, b| a.0.cmp(b.0)), + )?); } - Self::next_root_from_proofs(storage_trie_nodes, |key: Nibbles| { + next_root_from_proofs(storage_trie_nodes, |key: Nibbles| { // Right pad the target with 0s. let mut padded_key = key.pack(); padded_key.resize(32, 0); @@ -177,7 +175,7 @@ where })?; } - Self::next_root_from_proofs(account_trie_nodes, |key: Nibbles| { + next_root_from_proofs(account_trie_nodes, |key: Nibbles| { // Right pad the target with 0s. let mut padded_key = key.pack(); padded_key.resize(32, 0); @@ -197,63 +195,6 @@ where Ok(self.witness) } - /// Decodes and unrolls all nodes from the proof. Returns only sibling nodes - /// in the path of the target and the final leaf node with updated value. - fn target_nodes<'b>( - &mut self, - key: Nibbles, - value: Option>, - proof: impl IntoIterator, - ) -> Result>>, TrieWitnessError> { - let mut trie_nodes = BTreeMap::default(); - let mut proof_iter = proof.into_iter().enumerate().peekable(); - while let Some((idx, (path, encoded))) = proof_iter.next() { - // Record the node in witness. - self.witness.insert(keccak256(encoded.as_ref()), encoded.clone()); - - let mut next_path = path.clone(); - match TrieNode::decode(&mut &encoded[..])? { - TrieNode::Branch(branch) => { - next_path.push(key[path.len()]); - let children = branch_node_children(path.clone(), &branch); - for (child_path, value) in children { - if !key.starts_with(&child_path) { - let value = if value.len() < B256::len_bytes() { - Either::Right(value.to_vec()) - } else { - Either::Left(B256::from_slice(&value[1..])) - }; - trie_nodes.insert(child_path, value); - } - } - } - TrieNode::Extension(extension) => { - next_path.extend_from_slice(&extension.key); - } - TrieNode::Leaf(leaf) => { - next_path.extend_from_slice(&leaf.key); - if next_path != key { - trie_nodes.insert( - next_path.clone(), - Either::Right(leaf.value.as_slice().to_vec()), - ); - } - } - TrieNode::EmptyRoot => { - if idx != 0 || proof_iter.peek().is_some() { - return Err(TrieWitnessError::UnexpectedEmptyRoot(next_path)) - } - } - }; - } - - if let Some(value) = value { - trie_nodes.insert(key, Either::Right(value)); - } - - Ok(trie_nodes) - } - /// Retrieve proof targets for incoming hashed state. /// This method will aggregate all accounts and slots present in the hash state as well as /// select all existing slots from the database for the accounts that have been destroyed. @@ -283,73 +224,130 @@ where } Ok(proof_targets) } +} - fn next_root_from_proofs( - trie_nodes: BTreeMap>>, - mut trie_node_provider: impl FnMut(Nibbles) -> Result, - ) -> Result { - // Ignore branch child hashes in the path of leaves or lower child hashes. - let mut keys = trie_nodes.keys().peekable(); - let mut ignored = HashSet::::default(); - while let Some(key) = keys.next() { - if keys.peek().is_some_and(|next| next.starts_with(key)) { - ignored.insert(key.clone()); +/// Decodes and unrolls all nodes from the proof. Returns only sibling nodes +/// in the path of the target and the final leaf node with updated value. +pub fn target_nodes<'b>( + key: Nibbles, + value: Option>, + mut witness: Option<&mut HashMap>, + proof: impl IntoIterator, +) -> Result>>, TrieWitnessError> { + let mut trie_nodes = BTreeMap::default(); + let mut proof_iter = proof.into_iter().enumerate().peekable(); + while let Some((idx, (path, encoded))) = proof_iter.next() { + // Record the node in witness. + if let Some(witness) = witness.as_mut() { + witness.insert(keccak256(encoded.as_ref()), encoded.clone()); + } + + let mut next_path = path.clone(); + match TrieNode::decode(&mut &encoded[..])? { + TrieNode::Branch(branch) => { + next_path.push(key[path.len()]); + let children = branch_node_children(path.clone(), &branch); + for (child_path, value) in children { + if !key.starts_with(&child_path) { + let value = if value.len() < B256::len_bytes() { + Either::Right(value.to_vec()) + } else { + Either::Left(B256::from_slice(&value[1..])) + }; + trie_nodes.insert(child_path, value); + } + } + } + TrieNode::Extension(extension) => { + next_path.extend_from_slice(&extension.key); + } + TrieNode::Leaf(leaf) => { + next_path.extend_from_slice(&leaf.key); + if next_path != key { + trie_nodes + .insert(next_path.clone(), Either::Right(leaf.value.as_slice().to_vec())); + } + } + TrieNode::EmptyRoot => { + if idx != 0 || proof_iter.peek().is_some() { + return Err(TrieWitnessError::UnexpectedEmptyRoot(next_path)) + } } + }; + } + + if let Some(value) = value { + trie_nodes.insert(key, Either::Right(value)); + } + + Ok(trie_nodes) +} + +/// Computes the next root hash of a trie by processing a set of trie nodes and +/// their provided values. +pub fn next_root_from_proofs( + trie_nodes: BTreeMap>>, + mut trie_node_provider: impl FnMut(Nibbles) -> Result, +) -> Result { + // Ignore branch child hashes in the path of leaves or lower child hashes. + let mut keys = trie_nodes.keys().peekable(); + let mut ignored = HashSet::::default(); + while let Some(key) = keys.next() { + if keys.peek().is_some_and(|next| next.starts_with(key)) { + ignored.insert(key.clone()); } + } - let mut hash_builder = HashBuilder::default(); - let mut trie_nodes = trie_nodes.into_iter().filter(|e| !ignored.contains(&e.0)).peekable(); - while let Some((path, value)) = trie_nodes.next() { - match value { - Either::Left(branch_hash) => { - let parent_branch_path = path.slice(..path.len() - 1); - if hash_builder.key.starts_with(&parent_branch_path) || - trie_nodes - .peek() - .is_some_and(|next| next.0.starts_with(&parent_branch_path)) - { - hash_builder.add_branch(path, branch_hash, false); - } else { - // Parent is a branch node that needs to be turned into an extension node. - let mut path = path.clone(); - loop { - let node = trie_node_provider(path.clone())?; - match TrieNode::decode(&mut &node[..])? { - TrieNode::Branch(branch) => { - let children = branch_node_children(path, &branch); - for (child_path, value) in children { - if value.len() < B256::len_bytes() { - hash_builder.add_leaf(child_path, value); - } else { - let hash = B256::from_slice(&value[1..]); - hash_builder.add_branch(child_path, hash, false); - } + let mut hash_builder = HashBuilder::default(); + let mut trie_nodes = trie_nodes.into_iter().filter(|e| !ignored.contains(&e.0)).peekable(); + while let Some((path, value)) = trie_nodes.next() { + match value { + Either::Left(branch_hash) => { + let parent_branch_path = path.slice(..path.len() - 1); + if hash_builder.key.starts_with(&parent_branch_path) || + trie_nodes.peek().is_some_and(|next| next.0.starts_with(&parent_branch_path)) + { + hash_builder.add_branch(path, branch_hash, false); + } else { + // Parent is a branch node that needs to be turned into an extension node. + let mut path = path.clone(); + loop { + let node = trie_node_provider(path.clone())?; + match TrieNode::decode(&mut &node[..])? { + TrieNode::Branch(branch) => { + let children = branch_node_children(path, &branch); + for (child_path, value) in children { + if value.len() < B256::len_bytes() { + hash_builder.add_leaf(child_path, value); + } else { + let hash = B256::from_slice(&value[1..]); + hash_builder.add_branch(child_path, hash, false); } - break - } - TrieNode::Leaf(leaf) => { - let mut child_path = path; - child_path.extend_from_slice(&leaf.key); - hash_builder.add_leaf(child_path, &leaf.value); - break - } - TrieNode::Extension(ext) => { - path.extend_from_slice(&ext.key); - } - TrieNode::EmptyRoot => { - return Err(TrieWitnessError::UnexpectedEmptyRoot(path)) } + break + } + TrieNode::Leaf(leaf) => { + let mut child_path = path; + child_path.extend_from_slice(&leaf.key); + hash_builder.add_leaf(child_path, &leaf.value); + break + } + TrieNode::Extension(ext) => { + path.extend_from_slice(&ext.key); + } + TrieNode::EmptyRoot => { + return Err(TrieWitnessError::UnexpectedEmptyRoot(path)) } } } } - Either::Right(leaf_value) => { - hash_builder.add_leaf(path, &leaf_value); - } + } + Either::Right(leaf_value) => { + hash_builder.add_leaf(path, &leaf_value); } } - Ok(hash_builder.root()) } + Ok(hash_builder.root()) } /// Returned branch node children with keys in order. From 8aa9b71ef78e39759c3907ab4df3ebd4f6f43cdd Mon Sep 17 00:00:00 2001 From: Roman Krasiuk Date: Mon, 18 Nov 2024 16:51:46 +0100 Subject: [PATCH 022/156] chore(witness): simplify wiped storage retrieval (#12637) --- crates/trie/trie/src/witness.rs | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/crates/trie/trie/src/witness.rs b/crates/trie/trie/src/witness.rs index b2364b385e10..6f6a66a16eb9 100644 --- a/crates/trie/trie/src/witness.rs +++ b/crates/trie/trie/src/witness.rs @@ -213,11 +213,10 @@ where let mut storage_cursor = self.hashed_cursor_factory.hashed_storage_cursor(*hashed_address)?; // position cursor at the start - if let Some((hashed_slot, _)) = storage_cursor.seek(B256::ZERO)? { - storage_keys.insert(hashed_slot); - } - while let Some((hashed_slot, _)) = storage_cursor.next()? { + let mut current_entry = storage_cursor.seek(B256::ZERO)?; + while let Some((hashed_slot, _)) = current_entry { storage_keys.insert(hashed_slot); + current_entry = storage_cursor.next()?; } } proof_targets.insert(*hashed_address, storage_keys); From 55b51364b094077dd93d6e44477216f21e13d583 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Mon, 18 Nov 2024 17:27:39 +0100 Subject: [PATCH 023/156] fix(discv5): warning discv5 config socket override (#12636) --- crates/net/discv5/src/config.rs | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/crates/net/discv5/src/config.rs b/crates/net/discv5/src/config.rs index 4a534afbef53..61ab94b4f2fc 100644 --- a/crates/net/discv5/src/config.rs +++ b/crates/net/discv5/src/config.rs @@ -412,11 +412,13 @@ pub fn discv5_sockets_wrt_rlpx_addr( discv5_addr_ipv6.map(|ip| SocketAddrV6::new(ip, discv5_port_ipv6, 0, 0)); if let Some(discv5_addr) = discv5_addr_ipv4 { - warn!(target: "net::discv5", - %discv5_addr, - %rlpx_addr, - "Overwriting discv5 IPv4 address with RLPx IPv4 address, limited to one advertised IP address per IP version" - ); + if discv5_addr != rlpx_addr { + warn!(target: "net::discv5", + %discv5_addr, + %rlpx_addr, + "Overwriting discv5 IPv4 address with RLPx IPv4 address, limited to one advertised IP address per IP version" + ); + } } // overwrite discv5 ipv4 addr with RLPx address. this is since there is no @@ -429,11 +431,13 @@ pub fn discv5_sockets_wrt_rlpx_addr( discv5_addr_ipv4.map(|ip| SocketAddrV4::new(ip, discv5_port_ipv4)); if let Some(discv5_addr) = discv5_addr_ipv6 { - warn!(target: "net::discv5", - %discv5_addr, - %rlpx_addr, - "Overwriting discv5 IPv6 address with RLPx IPv6 address, limited to one advertised IP address per IP version" - ); + if discv5_addr != rlpx_addr { + warn!(target: "net::discv5", + %discv5_addr, + %rlpx_addr, + "Overwriting discv5 IPv6 address with RLPx IPv6 address, limited to one advertised IP address per IP version" + ); + } } // overwrite discv5 ipv6 addr with RLPx address. this is since there is no From f1279b35493d4086ee0c71d48416e6b22adfb11e Mon Sep 17 00:00:00 2001 From: Jennifer Date: Mon, 18 Nov 2024 20:34:09 +0000 Subject: [PATCH 024/156] Run kurtosis e2e test 2x/day (#12641) --- .github/assets/kurtosis_network_params.yaml | 2 -- .github/workflows/kurtosis.yml | 4 ++-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/assets/kurtosis_network_params.yaml b/.github/assets/kurtosis_network_params.yaml index 9c104de49500..e8cc1b51dc81 100644 --- a/.github/assets/kurtosis_network_params.yaml +++ b/.github/assets/kurtosis_network_params.yaml @@ -2,8 +2,6 @@ participants: - el_type: geth cl_type: lighthouse - el_type: reth - el_extra_params: - - --engine.experimental el_image: "ghcr.io/paradigmxyz/reth:kurtosis-ci" cl_type: teku additional_services: diff --git a/.github/workflows/kurtosis.yml b/.github/workflows/kurtosis.yml index 74d26dbd3eea..3e1b74321116 100644 --- a/.github/workflows/kurtosis.yml +++ b/.github/workflows/kurtosis.yml @@ -5,8 +5,8 @@ name: kurtosis on: workflow_dispatch: schedule: - # every day - - cron: "0 1 * * *" + # run every 12 hours + - cron: "0 */12 * * *" env: CARGO_TERM_COLOR: always From 7fb862cbde4137b372458e7302f270e220b44843 Mon Sep 17 00:00:00 2001 From: witty <131909329+0xwitty@users.noreply.github.com> Date: Mon, 18 Nov 2024 23:59:58 +0300 Subject: [PATCH 025/156] Typo Update private-testnet.md (#12633) --- book/run/private-testnet.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/book/run/private-testnet.md b/book/run/private-testnet.md index 3a987e52c73a..28253ca9f010 100644 --- a/book/run/private-testnet.md +++ b/book/run/private-testnet.md @@ -6,7 +6,7 @@ This guide uses [Kurtosis' ethereum-package](https://github.com/ethpandaops/ethe * Go [here](https://docs.kurtosis.com/install/) to install Kurtosis * Go [here](https://docs.docker.com/get-docker/) to install Docker -The [`ethereum-package`](https://github.com/ethpandaops/ethereum-package) is a [package](https://docs.kurtosis.com/advanced-concepts/packages) for a general purpose Ethereum testnet definition used for instantiating private testnets at any scale over Docker or Kubernetes, locally or in the cloud. This guide will go through how to spin up a local private testnet with Reth various CL clients locally. Specifically, you will instantiate a 2-node network over Docker with Reth/Lighthouse and Reth/Teku client combinations. +The [`ethereum-package`](https://github.com/ethpandaops/ethereum-package) is a [package](https://docs.kurtosis.com/advanced-concepts/packages) for a general purpose Ethereum testnet definition used for instantiating private testnets at any scale over Docker or Kubernetes, locally or in the cloud. This guide will go through how to spin up a local private testnet with Reth and various CL clients locally. Specifically, you will instantiate a 2-node network over Docker with Reth/Lighthouse and Reth/Teku client combinations. To see all possible configurations and flags you can use, including metrics and observability tools (e.g. Grafana, Prometheus, etc), go [here](https://github.com/ethpandaops/ethereum-package#configuration). From 641d1288e9314ecc360605bcda9c2cac7885a64e Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Mon, 18 Nov 2024 22:43:26 +0100 Subject: [PATCH 026/156] chore(sdk): limit `FillTxEnv` to super trait of `FullSignedTx` (#12614) --- Cargo.lock | 70 ++++++++++--------- crates/optimism/payload/src/payload.rs | 2 +- crates/primitives-traits/src/lib.rs | 1 + .../src/transaction/execute.rs | 10 +++ .../primitives-traits/src/transaction/mod.rs | 1 + .../src/transaction/signed.rs | 16 +++-- crates/primitives/src/transaction/mod.rs | 2 + 7 files changed, 60 insertions(+), 42 deletions(-) create mode 100644 crates/primitives-traits/src/transaction/execute.rs diff --git a/Cargo.lock b/Cargo.lock index e83e20a03997..20fc477ea22d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4964,9 +4964,9 @@ dependencies = [ [[package]] name = "mockall" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4c28b3fb6d753d28c20e826cd46ee611fda1cf3cde03a443a974043247c065a" +checksum = "39a6bfcc6c8c7eed5ee98b9c3e33adc726054389233e201c95dab2d41a3839d2" dependencies = [ "cfg-if", "downcast", @@ -4978,9 +4978,9 @@ dependencies = [ [[package]] name = "mockall_derive" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "341014e7f530314e9a1fdbc7400b244efea7122662c96bfa248c31da5bfb2020" +checksum = "25ca3004c2efe9011bd4e461bd8256445052b9615405b4f7ea43fc8ca5c20898" dependencies = [ "cfg-if", "proc-macro2", @@ -5286,9 +5286,9 @@ checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" [[package]] name = "op-alloy-consensus" -version = "0.6.6" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "862db7293434837c1ca32ef509806a7b330bd24605da95438cd6e928a58b4b2c" +checksum = "72da577a88d35b893fae6467112651f26ef023434c196b2a0b3dc75bc853e0e4" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5299,14 +5299,14 @@ dependencies = [ "derive_more 1.0.0", "serde", "serde_with", - "spin", + "thiserror 2.0.3", ] [[package]] name = "op-alloy-genesis" -version = "0.6.6" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ebd0391a3123b47e44ccca8a6f63a39ead2d7ea52e4fc132ff1297f6184314e" +checksum = "818180672dd14ca6642fb57942e1cbd602669f42b6e0222b7ea9bbcae065d67e" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5314,13 +5314,14 @@ dependencies = [ "alloy-sol-types", "serde", "serde_repr", + "thiserror 2.0.3", ] [[package]] name = "op-alloy-network" -version = "0.6.6" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fd5d57f04f7ce1ba8be7704ba87fe7bea151a94ffc971f5a8a68b3bdf962471" +checksum = "12f82e805bad171ceae2af45efaecf8d0b50622cff3473e3c998ff1dd340de35" dependencies = [ "alloy-consensus", "alloy-network", @@ -5333,10 +5334,11 @@ dependencies = [ [[package]] name = "op-alloy-protocol" -version = "0.6.6" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0220768efb59871af53e1685b90983c9f3090cdf45df3d0107348362ba7055ee" +checksum = "1803a1ac96203b8f713b1fa9b7509c46c645ca7bc22b582761a7495e999d4301" dependencies = [ + "alloc-no-stdlib", "alloy-consensus", "alloy-eips", "alloy-primitives", @@ -5344,19 +5346,20 @@ dependencies = [ "alloy-serde", "async-trait", "brotli", - "derive_more 1.0.0", + "miniz_oxide", "op-alloy-consensus", "op-alloy-genesis", "serde", + "thiserror 2.0.3", "tracing", "unsigned-varint", ] [[package]] name = "op-alloy-rpc-types" -version = "0.6.6" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03db591ad512fdc70170fcb2bff3517b64811443f9fb65d3a1a6344c60acdbf0" +checksum = "a838c125256e02e2f9da88c51e263b02a06cda7e60382fe2551a3385b516f5bb" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5373,9 +5376,9 @@ dependencies = [ [[package]] name = "op-alloy-rpc-types-engine" -version = "0.6.6" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd1a11a9cf2f2e8ed9ae11c93dce5990ff81ff98f17995772f567b586a864812" +checksum = "c227fcc7d81d4023363ba12406e57ebcc1c7cbb1075c38ea471ae32138d4706d" dependencies = [ "alloy-eips", "alloy-primitives", @@ -5387,6 +5390,7 @@ dependencies = [ "op-alloy-protocol", "serde", "snap", + "thiserror 2.0.3", ] [[package]] @@ -5466,9 +5470,9 @@ dependencies = [ [[package]] name = "parity-scale-codec" -version = "3.6.12" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "306800abfa29c7f16596b5970a588435e3d5b3149683d00c12b699cc19f895ee" +checksum = "8be4817d39f3272f69c59fe05d0535ae6456c2dc2fa1ba02910296c7e0a5c590" dependencies = [ "arbitrary", "arrayvec", @@ -5477,19 +5481,20 @@ dependencies = [ "bytes", "impl-trait-for-tuples", "parity-scale-codec-derive", + "rustversion", "serde", ] [[package]] name = "parity-scale-codec-derive" -version = "3.6.12" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d830939c76d294956402033aee57a6da7b438f2294eb94864c37b0569053a42c" +checksum = "8781a75c6205af67215f382092b6e0a4ff3734798523e69073d4bcd294ec767b" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.87", ] [[package]] @@ -9744,9 +9749,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.40" +version = "0.38.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99e4ea3e1cdc4b559b8e5650f9c8e5998e3e5c1343b4eaf034565f32318d63c0" +checksum = "d7f649912bc1495e167a6edee79151c84b1bad49748cb4f1f1167f459f6224f6" dependencies = [ "bitflags 2.6.0", "errno", @@ -10057,9 +10062,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.132" +version = "1.0.133" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03" +checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" dependencies = [ "indexmap 2.6.0", "itoa", @@ -10384,9 +10389,6 @@ name = "spin" version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" -dependencies = [ - "lock_api", -] [[package]] name = "spki" @@ -10990,9 +10992,9 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.6.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8437150ab6bbc8c5f0f519e3d5ed4aa883a83dd4cdd3d1b21f9482936046cb97" +checksum = "403fa3b783d4b626a8ad51d766ab03cb6d2dbfc46b1c5d4448395e6628dc9697" dependencies = [ "async-compression", "base64 0.22.1", @@ -11592,9 +11594,9 @@ dependencies = [ [[package]] name = "wasmtimer" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb4f099acbc1043cc752b91615b24b02d7f6fcd975bd781fed9f50b3c3e15bf7" +checksum = "0048ad49a55b9deb3953841fa1fc5858f0efbcb7a18868c899a360269fac1b23" dependencies = [ "futures", "js-sys", diff --git a/crates/optimism/payload/src/payload.rs b/crates/optimism/payload/src/payload.rs index 36f11ee628b3..1a951abadcae 100644 --- a/crates/optimism/payload/src/payload.rs +++ b/crates/optimism/payload/src/payload.rs @@ -7,7 +7,7 @@ use alloy_eips::{ use alloy_primitives::{keccak256, Address, Bytes, B256, B64, U256}; use alloy_rlp::Encodable; use alloy_rpc_types_engine::{ExecutionPayloadEnvelopeV2, ExecutionPayloadV1, PayloadId}; -use op_alloy_consensus::eip1559::{decode_holocene_extra_data, EIP1559ParamError}; +use op_alloy_consensus::{decode_holocene_extra_data, EIP1559ParamError}; /// Re-export for use in downstream arguments. pub use op_alloy_rpc_types_engine::OpPayloadAttributes; use op_alloy_rpc_types_engine::{OpExecutionPayloadEnvelopeV3, OpExecutionPayloadEnvelopeV4}; diff --git a/crates/primitives-traits/src/lib.rs b/crates/primitives-traits/src/lib.rs index 1c848b814137..33becad2feae 100644 --- a/crates/primitives-traits/src/lib.rs +++ b/crates/primitives-traits/src/lib.rs @@ -26,6 +26,7 @@ pub use receipt::{FullReceipt, Receipt}; pub mod transaction; pub use transaction::{ + execute::FillTxEnv, signed::{FullSignedTx, SignedTransaction}, FullTransaction, Transaction, TransactionExt, }; diff --git a/crates/primitives-traits/src/transaction/execute.rs b/crates/primitives-traits/src/transaction/execute.rs new file mode 100644 index 000000000000..c7350f1941be --- /dev/null +++ b/crates/primitives-traits/src/transaction/execute.rs @@ -0,0 +1,10 @@ +//! Abstraction of an executable transaction. + +use alloy_primitives::Address; +use revm_primitives::TxEnv; + +/// Loads transaction into execution environment. +pub trait FillTxEnv { + /// Fills [`TxEnv`] with an [`Address`] and transaction. + fn fill_tx_env(&self, tx_env: &mut TxEnv, sender: Address); +} diff --git a/crates/primitives-traits/src/transaction/mod.rs b/crates/primitives-traits/src/transaction/mod.rs index 9d60be0c32e5..53b772785713 100644 --- a/crates/primitives-traits/src/transaction/mod.rs +++ b/crates/primitives-traits/src/transaction/mod.rs @@ -1,5 +1,6 @@ //! Transaction abstraction +pub mod execute; pub mod signed; use core::{fmt, hash::Hash}; diff --git a/crates/primitives-traits/src/transaction/signed.rs b/crates/primitives-traits/src/transaction/signed.rs index d860dbb92fca..633b0caf7b2d 100644 --- a/crates/primitives-traits/src/transaction/signed.rs +++ b/crates/primitives-traits/src/transaction/signed.rs @@ -6,14 +6,19 @@ use core::hash::Hash; use alloy_eips::eip2718::{Decodable2718, Encodable2718}; use alloy_primitives::{keccak256, Address, PrimitiveSignature, TxHash, B256}; use reth_codecs::Compact; -use revm_primitives::TxEnv; -use crate::{FullTransaction, InMemorySize, MaybeArbitrary, MaybeSerde, Transaction}; +use crate::{FillTxEnv, FullTransaction, InMemorySize, MaybeArbitrary, MaybeSerde, Transaction}; /// Helper trait that unifies all behaviour required by block to support full node operations. -pub trait FullSignedTx: SignedTransaction + Compact {} +pub trait FullSignedTx: + SignedTransaction + FillTxEnv + Compact +{ +} -impl FullSignedTx for T where T: SignedTransaction + Compact {} +impl FullSignedTx for T where + T: SignedTransaction + FillTxEnv + Compact +{ +} /// A signed transaction. #[auto_impl::auto_impl(&, Arc)] @@ -71,9 +76,6 @@ pub trait SignedTransaction: fn recalculate_hash(&self) -> B256 { keccak256(self.encoded_2718()) } - - /// Fills [`TxEnv`] with an [`Address`] and transaction. - fn fill_tx_env(&self, tx_env: &mut TxEnv, sender: Address); } /// Helper trait used in testing. diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index aa57ef8d81e8..41522744a2ff 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -1360,7 +1360,9 @@ impl SignedTransaction for TransactionSigned { let signature_hash = self.signature_hash(); recover_signer_unchecked(&self.signature, signature_hash) } +} +impl reth_primitives_traits::FillTxEnv for TransactionSigned { fn fill_tx_env(&self, tx_env: &mut TxEnv, sender: Address) { tx_env.caller = sender; match self.as_ref() { From e859e1711d815c866273bb68f465e7c73bc85b28 Mon Sep 17 00:00:00 2001 From: AJStonewee Date: Mon, 18 Nov 2024 20:13:03 -0400 Subject: [PATCH 027/156] docs: small fix in HARDFORK-CHECKLIST.md (#12646) --- HARDFORK-CHECKLIST.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/HARDFORK-CHECKLIST.md b/HARDFORK-CHECKLIST.md index 80ebfc20c98f..17c639f0d5e4 100644 --- a/HARDFORK-CHECKLIST.md +++ b/HARDFORK-CHECKLIST.md @@ -17,5 +17,5 @@ ### Updates to the engine API - Add new endpoints to the `EngineApi` trait and implement endpoints. -- Update the `ExceuctionPayload` + `ExecutionPayloadSidecar` to `Block` conversion if there are any additional parameters. -- Update version specific validation checks in the `EngineValidator` trait. \ No newline at end of file +- Update the `ExecutionPayload` + `ExecutionPayloadSidecar` to `Block` conversion if there are any additional parameters. +- Update version specific validation checks in the `EngineValidator` trait. From 06bf5c77839972a8df7f9195499a67d38e840d29 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Tue, 19 Nov 2024 10:27:23 +0100 Subject: [PATCH 028/156] chore(sdk): make `Chain` generic over data primitives (#12635) --- crates/ethereum/node/src/node.rs | 16 +---- crates/evm/execution-types/src/chain.rs | 60 ++++++++++--------- crates/optimism/evm/src/lib.rs | 2 +- crates/optimism/node/src/node.rs | 2 +- crates/primitives-traits/src/node.rs | 43 +++++++++++-- crates/primitives/src/lib.rs | 13 +++- .../transaction-pool/src/blobstore/tracker.rs | 2 +- 7 files changed, 86 insertions(+), 52 deletions(-) diff --git a/crates/ethereum/node/src/node.rs b/crates/ethereum/node/src/node.rs index 5265329f19a9..1615ef0e6867 100644 --- a/crates/ethereum/node/src/node.rs +++ b/crates/ethereum/node/src/node.rs @@ -13,8 +13,7 @@ use reth_evm::execute::BasicBlockExecutorProvider; use reth_evm_ethereum::execute::EthExecutionStrategyFactory; use reth_network::{NetworkHandle, PeersInfo}; use reth_node_api::{ - AddOnsContext, ConfigureEvm, EngineValidator, FullNodeComponents, NodePrimitives, - NodeTypesWithDB, + AddOnsContext, ConfigureEvm, EngineValidator, FullNodeComponents, NodeTypesWithDB, }; use reth_node_builder::{ components::{ @@ -26,7 +25,7 @@ use reth_node_builder::{ BuilderContext, Node, NodeAdapter, NodeComponentsBuilder, PayloadBuilderConfig, PayloadTypes, }; use reth_payload_builder::{PayloadBuilderHandle, PayloadBuilderService}; -use reth_primitives::{Block, Receipt, TransactionSigned, TxType}; +use reth_primitives::EthPrimitives; use reth_provider::CanonStateSubscriptions; use reth_rpc::EthApi; use reth_tracing::tracing::{debug, info}; @@ -38,17 +37,6 @@ use reth_trie_db::MerklePatriciaTrie; use crate::{EthEngineTypes, EthEvmConfig}; -/// Ethereum primitive types. -#[derive(Debug, Default, Clone)] -pub struct EthPrimitives; - -impl NodePrimitives for EthPrimitives { - type Block = Block; - type SignedTx = TransactionSigned; - type TxType = TxType; - type Receipt = Receipt; -} - /// Type configuration for a regular Ethereum node. #[derive(Debug, Default, Clone, Copy)] #[non_exhaustive] diff --git a/crates/evm/execution-types/src/chain.rs b/crates/evm/execution-types/src/chain.rs index dc633e2d7ab7..b32b53b885e8 100644 --- a/crates/evm/execution-types/src/chain.rs +++ b/crates/evm/execution-types/src/chain.rs @@ -7,9 +7,10 @@ use alloy_primitives::{Address, BlockHash, BlockNumber, TxHash}; use core::{fmt, ops::RangeInclusive}; use reth_execution_errors::{BlockExecutionError, InternalBlockExecutionError}; use reth_primitives::{ - Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, TransactionSigned, + SealedBlock, SealedBlockWithSenders, SealedHeader, TransactionSigned, TransactionSignedEcRecovered, }; +use reth_primitives_traits::NodePrimitives; use reth_trie::updates::TrieUpdates; use revm::db::BundleState; @@ -25,7 +26,7 @@ use revm::db::BundleState; /// A chain of blocks should not be empty. #[derive(Clone, Debug, Default, PartialEq, Eq)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -pub struct Chain { +pub struct Chain { /// All blocks in this chain. blocks: BTreeMap, /// The outcome of block execution for this chain. @@ -34,14 +35,14 @@ pub struct Chain { /// chain, ranging from the [`Chain::first`] block to the [`Chain::tip`] block, inclusive. /// /// Additionally, it includes the individual state changes that led to the current state. - execution_outcome: ExecutionOutcome, + execution_outcome: ExecutionOutcome, /// State trie updates after block is added to the chain. /// NOTE: Currently, trie updates are present only for /// single-block chains that extend the canonical chain. trie_updates: Option, } -impl Chain { +impl Chain { /// Create new Chain from blocks and state. /// /// # Warning @@ -49,7 +50,7 @@ impl Chain { /// A chain of blocks should not be empty. pub fn new( blocks: impl IntoIterator, - execution_outcome: ExecutionOutcome, + execution_outcome: ExecutionOutcome, trie_updates: Option, ) -> Self { let blocks = blocks.into_iter().map(|b| (b.number, b)).collect::>(); @@ -61,7 +62,7 @@ impl Chain { /// Create new Chain from a single block and its state. pub fn from_block( block: SealedBlockWithSenders, - execution_outcome: ExecutionOutcome, + execution_outcome: ExecutionOutcome, trie_updates: Option, ) -> Self { Self::new([block], execution_outcome, trie_updates) @@ -93,12 +94,12 @@ impl Chain { } /// Get execution outcome of this chain - pub const fn execution_outcome(&self) -> &ExecutionOutcome { + pub const fn execution_outcome(&self) -> &ExecutionOutcome { &self.execution_outcome } /// Get mutable execution outcome of this chain - pub fn execution_outcome_mut(&mut self) -> &mut ExecutionOutcome { + pub fn execution_outcome_mut(&mut self) -> &mut ExecutionOutcome { &mut self.execution_outcome } @@ -132,7 +133,7 @@ impl Chain { pub fn execution_outcome_at_block( &self, block_number: BlockNumber, - ) -> Option { + ) -> Option> { if self.tip().number == block_number { return Some(self.execution_outcome.clone()) } @@ -149,19 +150,21 @@ impl Chain { /// 1. The blocks contained in the chain. /// 2. The execution outcome representing the final state. /// 3. The optional trie updates. - pub fn into_inner(self) -> (ChainBlocks<'static>, ExecutionOutcome, Option) { + pub fn into_inner( + self, + ) -> (ChainBlocks<'static>, ExecutionOutcome, Option) { (ChainBlocks { blocks: Cow::Owned(self.blocks) }, self.execution_outcome, self.trie_updates) } /// Destructure the chain into its inner components: /// 1. A reference to the blocks contained in the chain. /// 2. A reference to the execution outcome representing the final state. - pub const fn inner(&self) -> (ChainBlocks<'_>, &ExecutionOutcome) { + pub const fn inner(&self) -> (ChainBlocks<'_>, &ExecutionOutcome) { (ChainBlocks { blocks: Cow::Borrowed(&self.blocks) }, &self.execution_outcome) } /// Returns an iterator over all the receipts of the blocks in the chain. - pub fn block_receipts_iter(&self) -> impl Iterator>> + '_ { + pub fn block_receipts_iter(&self) -> impl Iterator>> + '_ { self.execution_outcome.receipts().iter() } @@ -173,7 +176,7 @@ impl Chain { /// Returns an iterator over all blocks and their receipts in the chain. pub fn blocks_and_receipts( &self, - ) -> impl Iterator>)> + '_ { + ) -> impl Iterator>)> + '_ { self.blocks_iter().zip(self.block_receipts_iter()) } @@ -219,7 +222,7 @@ impl Chain { } /// Get all receipts for the given block. - pub fn receipts_by_block_hash(&self, block_hash: BlockHash) -> Option> { + pub fn receipts_by_block_hash(&self, block_hash: BlockHash) -> Option> { let num = self.block_number(block_hash)?; self.execution_outcome.receipts_by_block(num).iter().map(Option::as_ref).collect() } @@ -227,7 +230,7 @@ impl Chain { /// Get all receipts with attachment. /// /// Attachment includes block number, block hash, transaction hash and transaction index. - pub fn receipts_with_attachment(&self) -> Vec { + pub fn receipts_with_attachment(&self) -> Vec> { let mut receipt_attach = Vec::with_capacity(self.blocks().len()); for ((block_num, block), receipts) in self.blocks().iter().zip(self.execution_outcome.receipts().iter()) @@ -250,7 +253,7 @@ impl Chain { pub fn append_block( &mut self, block: SealedBlockWithSenders, - execution_outcome: ExecutionOutcome, + execution_outcome: ExecutionOutcome, ) { self.blocks.insert(block.number, block); self.execution_outcome.extend(execution_outcome); @@ -300,7 +303,7 @@ impl Chain { /// /// If chain doesn't have any blocks. #[track_caller] - pub fn split(mut self, split_at: ChainSplitTarget) -> ChainSplit { + pub fn split(mut self, split_at: ChainSplitTarget) -> ChainSplit { let chain_tip = *self.blocks.last_entry().expect("chain is never empty").key(); let block_number = match split_at { ChainSplitTarget::Hash(block_hash) => { @@ -454,11 +457,11 @@ impl IntoIterator for ChainBlocks<'_> { /// Used to hold receipts and their attachment. #[derive(Default, Clone, Debug, PartialEq, Eq)] -pub struct BlockReceipts { +pub struct BlockReceipts { /// Block identifier pub block: BlockNumHash, /// Transaction identifier and receipt. - pub tx_receipts: Vec<(TxHash, Receipt)>, + pub tx_receipts: Vec<(TxHash, T)>, } /// The target block where the chain should be split. @@ -484,26 +487,26 @@ impl From for ChainSplitTarget { /// Result of a split chain. #[derive(Clone, Debug, PartialEq, Eq)] -pub enum ChainSplit { +pub enum ChainSplit { /// Chain is not split. Pending chain is returned. /// Given block split is higher than last block. /// Or in case of split by hash when hash is unknown. - NoSplitPending(Chain), + NoSplitPending(Chain), /// Chain is not split. Canonical chain is returned. /// Given block split is lower than first block. - NoSplitCanonical(Chain), + NoSplitCanonical(Chain), /// Chain is split into two: `[canonical]` and `[pending]` /// The target of this chain split [`ChainSplitTarget`] belongs to the `canonical` chain. Split { /// Contains lower block numbers that are considered canonicalized. It ends with /// the [`ChainSplitTarget`] block. The state of this chain is now empty and no longer /// usable. - canonical: Chain, + canonical: Chain, /// Right contains all subsequent blocks __after__ the [`ChainSplitTarget`] that are still /// pending. /// /// The state of the original chain is moved here. - pending: Chain, + pending: Chain, }, } @@ -678,7 +681,7 @@ mod tests { block3.set_parent_hash(block2_hash); - let mut chain1 = + let mut chain1: Chain = Chain { blocks: BTreeMap::from([(1, block1), (2, block2)]), ..Default::default() }; let chain2 = @@ -692,7 +695,7 @@ mod tests { #[test] fn test_number_split() { - let execution_outcome1 = ExecutionOutcome::new( + let execution_outcome1: ExecutionOutcome = ExecutionOutcome::new( BundleState::new( vec![( Address::new([2; 20]), @@ -739,7 +742,8 @@ mod tests { let mut block_state_extended = execution_outcome1; block_state_extended.extend(execution_outcome2); - let chain = Chain::new(vec![block1.clone(), block2.clone()], block_state_extended, None); + let chain: Chain = + Chain::new(vec![block1.clone(), block2.clone()], block_state_extended, None); let (split1_execution_outcome, split2_execution_outcome) = chain.execution_outcome.clone().split_at(2); @@ -838,7 +842,7 @@ mod tests { // Create a Chain object with a BTreeMap of blocks mapped to their block numbers, // including block1_hash and block2_hash, and the execution_outcome - let chain = Chain { + let chain: Chain = Chain { blocks: BTreeMap::from([(10, block1), (11, block2)]), execution_outcome: execution_outcome.clone(), ..Default::default() diff --git a/crates/optimism/evm/src/lib.rs b/crates/optimism/evm/src/lib.rs index 9569c1cb8b54..55dc3fc7debe 100644 --- a/crates/optimism/evm/src/lib.rs +++ b/crates/optimism/evm/src/lib.rs @@ -602,7 +602,7 @@ mod tests { // Create a Chain object with a BTreeMap of blocks mapped to their block numbers, // including block1_hash and block2_hash, and the execution_outcome - let chain = Chain::new([block1, block2], execution_outcome.clone(), None); + let chain: Chain = Chain::new([block1, block2], execution_outcome.clone(), None); // Assert that the proper receipt vector is returned for block1_hash assert_eq!(chain.receipts_by_block_hash(block1_hash), Some(vec![&receipt1])); diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index 238953c9d571..699239a43b2c 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -44,7 +44,7 @@ use reth_trie_db::MerklePatriciaTrie; use std::sync::Arc; /// Optimism primitive types. -#[derive(Debug, Default, Clone)] +#[derive(Debug, Default, Clone, PartialEq, Eq)] pub struct OpPrimitives; impl NodePrimitives for OpPrimitives { diff --git a/crates/primitives-traits/src/node.rs b/crates/primitives-traits/src/node.rs index 35c8ea0f6937..c11a19a105a8 100644 --- a/crates/primitives-traits/src/node.rs +++ b/crates/primitives-traits/src/node.rs @@ -3,15 +3,44 @@ use core::fmt; use crate::{BlockBody, FullBlock, FullReceipt, FullSignedTx, FullTxType, MaybeSerde}; /// Configures all the primitive types of the node. -pub trait NodePrimitives: Send + Sync + Unpin + Clone + Default + fmt::Debug + 'static { +pub trait NodePrimitives: + Send + Sync + Unpin + Clone + Default + fmt::Debug + PartialEq + Eq + 'static +{ /// Block primitive. - type Block: Send + Sync + Unpin + Clone + Default + fmt::Debug + MaybeSerde + 'static; + type Block: Send + + Sync + + Unpin + + Clone + + Default + + fmt::Debug + + PartialEq + + Eq + + MaybeSerde + + 'static; /// Signed version of the transaction type. - type SignedTx: Send + Sync + Unpin + Clone + Default + fmt::Debug + MaybeSerde + 'static; + type SignedTx: Send + + Sync + + Unpin + + Clone + + Default + + fmt::Debug + + PartialEq + + Eq + + MaybeSerde + + 'static; /// Transaction envelope type ID. - type TxType: Send + Sync + Unpin + Clone + Default + fmt::Debug + 'static; + type TxType: Send + Sync + Unpin + Clone + Default + fmt::Debug + PartialEq + Eq + 'static; /// A receipt. - type Receipt: Send + Sync + Unpin + Clone + Default + fmt::Debug + MaybeSerde + 'static; + type Receipt: Send + + Sync + + Unpin + + Clone + + Default + + fmt::Debug + + PartialEq + + Eq + + MaybeSerde + + 'static; } impl NodePrimitives for () { @@ -22,7 +51,9 @@ impl NodePrimitives for () { } /// Helper trait that sets trait bounds on [`NodePrimitives`]. -pub trait FullNodePrimitives: Send + Sync + Unpin + Clone + Default + fmt::Debug + 'static { +pub trait FullNodePrimitives: + Send + Sync + Unpin + Clone + Default + fmt::Debug + PartialEq + Eq + 'static +{ /// Block primitive. type Block: FullBlock>; /// Signed version of the transaction type. diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index 45067d60079c..2618f671927f 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -40,7 +40,7 @@ pub use receipt::{ }; pub use reth_primitives_traits::{ logs_bloom, Account, Bytecode, GotExpected, GotExpectedBoxed, HeaderError, Log, LogData, - SealedHeader, StorageEntry, + NodePrimitives, SealedHeader, StorageEntry, }; pub use static_file::StaticFileSegment; @@ -74,3 +74,14 @@ pub mod serde_bincode_compat { transaction::{serde_bincode_compat as transaction, serde_bincode_compat::*}, }; } + +/// Temp helper struct for integrating [`NodePrimitives`]. +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub struct EthPrimitives; + +impl NodePrimitives for EthPrimitives { + type Block = crate::Block; + type SignedTx = crate::TransactionSigned; + type TxType = crate::TxType; + type Receipt = crate::Receipt; +} diff --git a/crates/transaction-pool/src/blobstore/tracker.rs b/crates/transaction-pool/src/blobstore/tracker.rs index f22dcf5706e5..63d6e30eea05 100644 --- a/crates/transaction-pool/src/blobstore/tracker.rs +++ b/crates/transaction-pool/src/blobstore/tracker.rs @@ -178,7 +178,7 @@ mod tests { }; // Extract blocks from the chain - let chain = Chain::new(vec![block1, block2], Default::default(), None); + let chain: Chain = Chain::new(vec![block1, block2], Default::default(), None); let blocks = chain.into_inner().0; // Add new chain blocks to the tracker From 496bf0bf715f0a1fafc198f8d72ccd71913d1a40 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 19 Nov 2024 11:13:59 +0100 Subject: [PATCH 029/156] chore: bump version 1.1.2 (#12651) --- Cargo.lock | 240 ++++++++++++++++++++++++++--------------------------- Cargo.toml | 2 +- 2 files changed, 121 insertions(+), 121 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 20fc477ea22d..f0d116c29aed 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2596,7 +2596,7 @@ dependencies = [ [[package]] name = "ef-tests" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5395,7 +5395,7 @@ dependencies = [ [[package]] name = "op-reth" -version = "1.1.1" +version = "1.1.2" dependencies = [ "clap", "reth-cli-util", @@ -6342,7 +6342,7 @@ dependencies = [ [[package]] name = "reth" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -6415,7 +6415,7 @@ dependencies = [ [[package]] name = "reth-basic-payload-builder" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -6443,7 +6443,7 @@ dependencies = [ [[package]] name = "reth-beacon-consensus" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -6497,7 +6497,7 @@ dependencies = [ [[package]] name = "reth-bench" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-eips", "alloy-json-rpc", @@ -6533,7 +6533,7 @@ dependencies = [ [[package]] name = "reth-blockchain-tree" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -6571,7 +6571,7 @@ dependencies = [ [[package]] name = "reth-blockchain-tree-api" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-eips", "alloy-primitives", @@ -6584,7 +6584,7 @@ dependencies = [ [[package]] name = "reth-chain-state" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -6613,7 +6613,7 @@ dependencies = [ [[package]] name = "reth-chainspec" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-chains", "alloy-consensus", @@ -6634,7 +6634,7 @@ dependencies = [ [[package]] name = "reth-cli" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-genesis", "clap", @@ -6647,7 +6647,7 @@ dependencies = [ [[package]] name = "reth-cli-commands" -version = "1.1.1" +version = "1.1.2" dependencies = [ "ahash", "alloy-consensus", @@ -6714,7 +6714,7 @@ dependencies = [ [[package]] name = "reth-cli-runner" -version = "1.1.1" +version = "1.1.2" dependencies = [ "reth-tasks", "tokio", @@ -6723,7 +6723,7 @@ dependencies = [ [[package]] name = "reth-cli-util" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-eips", "alloy-primitives", @@ -6741,7 +6741,7 @@ dependencies = [ [[package]] name = "reth-codecs" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -6763,7 +6763,7 @@ dependencies = [ [[package]] name = "reth-codecs-derive" -version = "1.1.1" +version = "1.1.2" dependencies = [ "convert_case", "proc-macro2", @@ -6774,7 +6774,7 @@ dependencies = [ [[package]] name = "reth-config" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-primitives", "eyre", @@ -6790,7 +6790,7 @@ dependencies = [ [[package]] name = "reth-consensus" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -6803,7 +6803,7 @@ dependencies = [ [[package]] name = "reth-consensus-common" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -6819,7 +6819,7 @@ dependencies = [ [[package]] name = "reth-consensus-debug-client" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -6842,7 +6842,7 @@ dependencies = [ [[package]] name = "reth-db" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-primitives", @@ -6883,7 +6883,7 @@ dependencies = [ [[package]] name = "reth-db-api" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-genesis", @@ -6911,7 +6911,7 @@ dependencies = [ [[package]] name = "reth-db-common" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-genesis", @@ -6940,7 +6940,7 @@ dependencies = [ [[package]] name = "reth-db-models" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-eips", "alloy-primitives", @@ -6957,7 +6957,7 @@ dependencies = [ [[package]] name = "reth-discv4" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -6984,7 +6984,7 @@ dependencies = [ [[package]] name = "reth-discv5" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -7008,7 +7008,7 @@ dependencies = [ [[package]] name = "reth-dns-discovery" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-chains", "alloy-primitives", @@ -7036,7 +7036,7 @@ dependencies = [ [[package]] name = "reth-downloaders" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -7075,7 +7075,7 @@ dependencies = [ [[package]] name = "reth-e2e-test-utils" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -7113,7 +7113,7 @@ dependencies = [ [[package]] name = "reth-ecies" -version = "1.1.1" +version = "1.1.2" dependencies = [ "aes", "alloy-primitives", @@ -7143,7 +7143,7 @@ dependencies = [ [[package]] name = "reth-engine-local" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-primitives", "alloy-rpc-types-engine", @@ -7174,7 +7174,7 @@ dependencies = [ [[package]] name = "reth-engine-primitives" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-primitives", "alloy-rpc-types-engine", @@ -7192,7 +7192,7 @@ dependencies = [ [[package]] name = "reth-engine-service" -version = "1.1.1" +version = "1.1.2" dependencies = [ "futures", "pin-project", @@ -7221,7 +7221,7 @@ dependencies = [ [[package]] name = "reth-engine-tree" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -7272,7 +7272,7 @@ dependencies = [ [[package]] name = "reth-engine-util" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -7303,7 +7303,7 @@ dependencies = [ [[package]] name = "reth-errors" -version = "1.1.1" +version = "1.1.2" dependencies = [ "reth-blockchain-tree-api", "reth-consensus", @@ -7315,7 +7315,7 @@ dependencies = [ [[package]] name = "reth-eth-wire" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-chains", "alloy-eips", @@ -7352,7 +7352,7 @@ dependencies = [ [[package]] name = "reth-eth-wire-types" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-chains", "alloy-consensus", @@ -7376,7 +7376,7 @@ dependencies = [ [[package]] name = "reth-ethereum-cli" -version = "1.1.1" +version = "1.1.2" dependencies = [ "clap", "eyre", @@ -7387,7 +7387,7 @@ dependencies = [ [[package]] name = "reth-ethereum-consensus" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -7402,7 +7402,7 @@ dependencies = [ [[package]] name = "reth-ethereum-engine-primitives" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-eips", "alloy-primitives", @@ -7421,7 +7421,7 @@ dependencies = [ [[package]] name = "reth-ethereum-forks" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-chains", "alloy-consensus", @@ -7441,7 +7441,7 @@ dependencies = [ [[package]] name = "reth-ethereum-payload-builder" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -7467,7 +7467,7 @@ dependencies = [ [[package]] name = "reth-etl" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-primitives", "rayon", @@ -7477,7 +7477,7 @@ dependencies = [ [[package]] name = "reth-evm" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -7505,7 +7505,7 @@ dependencies = [ [[package]] name = "reth-evm-ethereum" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -7528,7 +7528,7 @@ dependencies = [ [[package]] name = "reth-execution-errors" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-eips", "alloy-primitives", @@ -7543,7 +7543,7 @@ dependencies = [ [[package]] name = "reth-execution-types" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-eips", "alloy-primitives", @@ -7561,7 +7561,7 @@ dependencies = [ [[package]] name = "reth-exex" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -7604,7 +7604,7 @@ dependencies = [ [[package]] name = "reth-exex-test-utils" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-eips", "eyre", @@ -7637,7 +7637,7 @@ dependencies = [ [[package]] name = "reth-exex-types" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-eips", "alloy-primitives", @@ -7653,7 +7653,7 @@ dependencies = [ [[package]] name = "reth-fs-util" -version = "1.1.1" +version = "1.1.2" dependencies = [ "serde", "serde_json", @@ -7662,7 +7662,7 @@ dependencies = [ [[package]] name = "reth-invalid-block-hooks" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-primitives", @@ -7687,7 +7687,7 @@ dependencies = [ [[package]] name = "reth-ipc" -version = "1.1.1" +version = "1.1.2" dependencies = [ "async-trait", "bytes", @@ -7709,7 +7709,7 @@ dependencies = [ [[package]] name = "reth-libmdbx" -version = "1.1.1" +version = "1.1.2" dependencies = [ "bitflags 2.6.0", "byteorder", @@ -7730,7 +7730,7 @@ dependencies = [ [[package]] name = "reth-mdbx-sys" -version = "1.1.1" +version = "1.1.2" dependencies = [ "bindgen", "cc", @@ -7738,7 +7738,7 @@ dependencies = [ [[package]] name = "reth-metrics" -version = "1.1.1" +version = "1.1.2" dependencies = [ "futures", "metrics", @@ -7749,14 +7749,14 @@ dependencies = [ [[package]] name = "reth-net-banlist" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-primitives", ] [[package]] name = "reth-net-nat" -version = "1.1.1" +version = "1.1.2" dependencies = [ "futures-util", "if-addrs", @@ -7770,7 +7770,7 @@ dependencies = [ [[package]] name = "reth-network" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -7832,7 +7832,7 @@ dependencies = [ [[package]] name = "reth-network-api" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-primitives", "alloy-rpc-types-admin", @@ -7854,7 +7854,7 @@ dependencies = [ [[package]] name = "reth-network-p2p" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -7876,7 +7876,7 @@ dependencies = [ [[package]] name = "reth-network-peers" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -7892,7 +7892,7 @@ dependencies = [ [[package]] name = "reth-network-types" -version = "1.1.1" +version = "1.1.2" dependencies = [ "humantime-serde", "reth-ethereum-forks", @@ -7905,7 +7905,7 @@ dependencies = [ [[package]] name = "reth-nippy-jar" -version = "1.1.1" +version = "1.1.2" dependencies = [ "anyhow", "bincode", @@ -7923,7 +7923,7 @@ dependencies = [ [[package]] name = "reth-node-api" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-rpc-types-engine", @@ -7944,7 +7944,7 @@ dependencies = [ [[package]] name = "reth-node-builder" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-primitives", @@ -8009,7 +8009,7 @@ dependencies = [ [[package]] name = "reth-node-core" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8059,7 +8059,7 @@ dependencies = [ [[package]] name = "reth-node-ethereum" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-contract", @@ -8104,7 +8104,7 @@ dependencies = [ [[package]] name = "reth-node-events" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8127,7 +8127,7 @@ dependencies = [ [[package]] name = "reth-node-metrics" -version = "1.1.1" +version = "1.1.2" dependencies = [ "eyre", "http", @@ -8153,7 +8153,7 @@ dependencies = [ [[package]] name = "reth-node-types" -version = "1.1.1" +version = "1.1.2" dependencies = [ "reth-chainspec", "reth-db-api", @@ -8164,7 +8164,7 @@ dependencies = [ [[package]] name = "reth-optimism-chainspec" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-chains", "alloy-consensus", @@ -8184,7 +8184,7 @@ dependencies = [ [[package]] name = "reth-optimism-cli" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8234,7 +8234,7 @@ dependencies = [ [[package]] name = "reth-optimism-consensus" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-primitives", @@ -8251,7 +8251,7 @@ dependencies = [ [[package]] name = "reth-optimism-evm" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8278,7 +8278,7 @@ dependencies = [ [[package]] name = "reth-optimism-forks" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-chains", "alloy-primitives", @@ -8289,7 +8289,7 @@ dependencies = [ [[package]] name = "reth-optimism-node" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8340,7 +8340,7 @@ dependencies = [ [[package]] name = "reth-optimism-payload-builder" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8377,7 +8377,7 @@ dependencies = [ [[package]] name = "reth-optimism-primitives" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8395,7 +8395,7 @@ dependencies = [ [[package]] name = "reth-optimism-rpc" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8439,7 +8439,7 @@ dependencies = [ [[package]] name = "reth-optimism-storage" -version = "1.1.1" +version = "1.1.2" dependencies = [ "reth-codecs", "reth-db-api", @@ -8450,7 +8450,7 @@ dependencies = [ [[package]] name = "reth-payload-builder" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-primitives", @@ -8472,7 +8472,7 @@ dependencies = [ [[package]] name = "reth-payload-builder-primitives" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-rpc-types-engine", "async-trait", @@ -8485,7 +8485,7 @@ dependencies = [ [[package]] name = "reth-payload-primitives" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-eips", "alloy-primitives", @@ -8503,7 +8503,7 @@ dependencies = [ [[package]] name = "reth-payload-util" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-primitives", @@ -8512,7 +8512,7 @@ dependencies = [ [[package]] name = "reth-payload-validator" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-rpc-types", "reth-chainspec", @@ -8522,7 +8522,7 @@ dependencies = [ [[package]] name = "reth-primitives" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8569,7 +8569,7 @@ dependencies = [ [[package]] name = "reth-primitives-traits" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8597,7 +8597,7 @@ dependencies = [ [[package]] name = "reth-provider" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8646,7 +8646,7 @@ dependencies = [ [[package]] name = "reth-prune" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-primitives", "assert_matches", @@ -8676,7 +8676,7 @@ dependencies = [ [[package]] name = "reth-prune-types" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-primitives", "arbitrary", @@ -8696,7 +8696,7 @@ dependencies = [ [[package]] name = "reth-revm" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8714,7 +8714,7 @@ dependencies = [ [[package]] name = "reth-rpc" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-dyn-abi", @@ -8786,7 +8786,7 @@ dependencies = [ [[package]] name = "reth-rpc-api" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-eips", "alloy-json-rpc", @@ -8810,7 +8810,7 @@ dependencies = [ [[package]] name = "reth-rpc-api-testing-util" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-eips", "alloy-primitives", @@ -8829,7 +8829,7 @@ dependencies = [ [[package]] name = "reth-rpc-builder" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8880,7 +8880,7 @@ dependencies = [ [[package]] name = "reth-rpc-engine-api" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-eips", "alloy-primitives", @@ -8918,7 +8918,7 @@ dependencies = [ [[package]] name = "reth-rpc-eth-api" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-dyn-abi", @@ -8960,7 +8960,7 @@ dependencies = [ [[package]] name = "reth-rpc-eth-types" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -9002,7 +9002,7 @@ dependencies = [ [[package]] name = "reth-rpc-layer" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-rpc-types-engine", "http", @@ -9019,7 +9019,7 @@ dependencies = [ [[package]] name = "reth-rpc-server-types" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-eips", "alloy-primitives", @@ -9034,7 +9034,7 @@ dependencies = [ [[package]] name = "reth-rpc-types-compat" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -9052,7 +9052,7 @@ dependencies = [ [[package]] name = "reth-stages" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-primitives", @@ -9102,7 +9102,7 @@ dependencies = [ [[package]] name = "reth-stages-api" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-primitives", "aquamarine", @@ -9130,7 +9130,7 @@ dependencies = [ [[package]] name = "reth-stages-types" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-primitives", "arbitrary", @@ -9147,7 +9147,7 @@ dependencies = [ [[package]] name = "reth-static-file" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-primitives", "assert_matches", @@ -9169,7 +9169,7 @@ dependencies = [ [[package]] name = "reth-static-file-types" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-primitives", "clap", @@ -9180,7 +9180,7 @@ dependencies = [ [[package]] name = "reth-storage-api" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -9201,7 +9201,7 @@ dependencies = [ [[package]] name = "reth-storage-errors" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-eips", "alloy-primitives", @@ -9213,7 +9213,7 @@ dependencies = [ [[package]] name = "reth-tasks" -version = "1.1.1" +version = "1.1.2" dependencies = [ "auto_impl", "dyn-clone", @@ -9230,7 +9230,7 @@ dependencies = [ [[package]] name = "reth-testing-utils" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -9243,7 +9243,7 @@ dependencies = [ [[package]] name = "reth-tokio-util" -version = "1.1.1" +version = "1.1.2" dependencies = [ "tokio", "tokio-stream", @@ -9252,7 +9252,7 @@ dependencies = [ [[package]] name = "reth-tracing" -version = "1.1.1" +version = "1.1.2" dependencies = [ "clap", "eyre", @@ -9266,7 +9266,7 @@ dependencies = [ [[package]] name = "reth-transaction-pool" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -9313,7 +9313,7 @@ dependencies = [ [[package]] name = "reth-trie" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-primitives", @@ -9343,7 +9343,7 @@ dependencies = [ [[package]] name = "reth-trie-common" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-genesis", @@ -9367,7 +9367,7 @@ dependencies = [ [[package]] name = "reth-trie-db" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-consensus", "alloy-primitives", @@ -9396,7 +9396,7 @@ dependencies = [ [[package]] name = "reth-trie-parallel" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -9423,7 +9423,7 @@ dependencies = [ [[package]] name = "reth-trie-sparse" -version = "1.1.1" +version = "1.1.2" dependencies = [ "alloy-primitives", "alloy-rlp", diff --git a/Cargo.toml b/Cargo.toml index 2f2f9aa884ac..58cdd1f8ca7b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,5 @@ [workspace.package] -version = "1.1.1" +version = "1.1.2" edition = "2021" rust-version = "1.82" license = "MIT OR Apache-2.0" From 206ba29f0b9e74ad77a2b0a3f5ebec07bc8b7462 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Tue, 19 Nov 2024 11:06:33 +0100 Subject: [PATCH 030/156] tx-pool: add `all` method for `AllPoolTransactions` (#12643) --- crates/transaction-pool/src/traits.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index 6c247a84cdb4..a7e9010d693b 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -551,6 +551,11 @@ impl AllPoolTransactions { pub fn queued_recovered(&self) -> impl Iterator + '_ { self.queued.iter().map(|tx| tx.transaction.clone().into()) } + + /// Returns an iterator over all transactions, both pending and queued. + pub fn all(&self) -> impl Iterator + '_ { + self.pending.iter().chain(self.queued.iter()).map(|tx| tx.transaction.clone().into()) + } } impl Default for AllPoolTransactions { From b78f20f5cb0c79ada804c70c162b76fc70bee2c0 Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Tue, 19 Nov 2024 11:07:30 +0100 Subject: [PATCH 031/156] fix: do not delegate is_optimism check for Ethereum ChainSpec (#12650) --- crates/chainspec/src/api.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/chainspec/src/api.rs b/crates/chainspec/src/api.rs index f0cc31bb44dc..94b4285f92dd 100644 --- a/crates/chainspec/src/api.rs +++ b/crates/chainspec/src/api.rs @@ -109,6 +109,6 @@ impl EthChainSpec for ChainSpec { } fn is_optimism(&self) -> bool { - self.chain.is_optimism() + false } } From e924bdab37dd05f66395f5f316bb3e318337c8fb Mon Sep 17 00:00:00 2001 From: "0xriazaka.eth" <168359025+0xriazaka@users.noreply.github.com> Date: Tue, 19 Nov 2024 11:42:40 +0100 Subject: [PATCH 032/156] Header validator (#12648) Co-authored-by: Arsenii Kulikov --- crates/consensus/consensus/src/lib.rs | 73 ++++++++------- crates/consensus/consensus/src/noop.rs | 6 +- crates/consensus/consensus/src/test_utils.rs | 42 ++++----- crates/ethereum/consensus/src/lib.rs | 46 +++++----- .../src/headers/reverse_headers.rs | 2 +- crates/net/p2p/src/headers/downloader.rs | 4 +- crates/net/p2p/src/test_utils/headers.rs | 2 +- crates/optimism/consensus/src/lib.rs | 88 ++++++++++--------- 8 files changed, 139 insertions(+), 124 deletions(-) diff --git a/crates/consensus/consensus/src/lib.rs b/crates/consensus/consensus/src/lib.rs index ec296f3ed499..e059305911f6 100644 --- a/crates/consensus/consensus/src/lib.rs +++ b/crates/consensus/consensus/src/lib.rs @@ -46,7 +46,42 @@ impl<'a> PostExecutionInput<'a> { /// Consensus is a protocol that chooses canonical chain. #[auto_impl::auto_impl(&, Arc)] -pub trait Consensus: Debug + Send + Sync { +pub trait Consensus: HeaderValidator + Debug + Send + Sync { + /// Ensures that body field values match the header. + fn validate_body_against_header( + &self, + body: &B, + header: &SealedHeader, + ) -> Result<(), ConsensusError>; + + /// Validate a block disregarding world state, i.e. things that can be checked before sender + /// recovery and execution. + /// + /// See the Yellow Paper sections 4.3.2 "Holistic Validity", 4.3.4 "Block Header Validity", and + /// 11.1 "Ommer Validation". + /// + /// **This should not be called for the genesis block**. + /// + /// Note: validating blocks does not include other validations of the Consensus + fn validate_block_pre_execution(&self, block: &SealedBlock) + -> Result<(), ConsensusError>; + + /// Validate a block considering world state, i.e. things that can not be checked before + /// execution. + /// + /// See the Yellow Paper sections 4.3.2 "Holistic Validity". + /// + /// Note: validating blocks does not include other validations of the Consensus + fn validate_block_post_execution( + &self, + block: &BlockWithSenders, + input: PostExecutionInput<'_>, + ) -> Result<(), ConsensusError>; +} + +/// HeaderValidator is a protocol that validates headers and their relationships. +#[auto_impl::auto_impl(&, Arc)] +pub trait HeaderValidator: Debug + Send + Sync { /// Validate if header is correct and follows consensus specification. /// /// This is called on standalone header to check if all hashes are correct. @@ -60,7 +95,8 @@ pub trait Consensus: Debug + Send + Sync { /// /// **This should not be called for the genesis block**. /// - /// Note: Validating header against its parent does not include other Consensus validations. + /// Note: Validating header against its parent does not include other HeaderValidator + /// validations. fn validate_header_against_parent( &self, header: &SealedHeader, @@ -99,43 +135,12 @@ pub trait Consensus: Debug + Send + Sync { /// /// Some consensus engines may want to do additional checks here. /// - /// Note: validating headers with TD does not include other Consensus validation. + /// Note: validating headers with TD does not include other HeaderValidator validation. fn validate_header_with_total_difficulty( &self, header: &H, total_difficulty: U256, ) -> Result<(), ConsensusError>; - - /// Ensures that body field values match the header. - fn validate_body_against_header( - &self, - body: &B, - header: &SealedHeader, - ) -> Result<(), ConsensusError>; - - /// Validate a block disregarding world state, i.e. things that can be checked before sender - /// recovery and execution. - /// - /// See the Yellow Paper sections 4.3.2 "Holistic Validity", 4.3.4 "Block Header Validity", and - /// 11.1 "Ommer Validation". - /// - /// **This should not be called for the genesis block**. - /// - /// Note: validating blocks does not include other validations of the Consensus - fn validate_block_pre_execution(&self, block: &SealedBlock) - -> Result<(), ConsensusError>; - - /// Validate a block considering world state, i.e. things that can not be checked before - /// execution. - /// - /// See the Yellow Paper sections 4.3.2 "Holistic Validity". - /// - /// Note: validating blocks does not include other validations of the Consensus - fn validate_block_post_execution( - &self, - block: &BlockWithSenders, - input: PostExecutionInput<'_>, - ) -> Result<(), ConsensusError>; } /// Consensus Errors diff --git a/crates/consensus/consensus/src/noop.rs b/crates/consensus/consensus/src/noop.rs index 9b72f89b176a..6d12af08d519 100644 --- a/crates/consensus/consensus/src/noop.rs +++ b/crates/consensus/consensus/src/noop.rs @@ -1,4 +1,4 @@ -use crate::{Consensus, ConsensusError, PostExecutionInput}; +use crate::{Consensus, ConsensusError, HeaderValidator, PostExecutionInput}; use alloy_primitives::U256; use reth_primitives::{BlockWithSenders, SealedBlock, SealedHeader}; @@ -7,7 +7,7 @@ use reth_primitives::{BlockWithSenders, SealedBlock, SealedHeader}; #[non_exhaustive] pub struct NoopConsensus; -impl Consensus for NoopConsensus { +impl HeaderValidator for NoopConsensus { fn validate_header(&self, _header: &SealedHeader) -> Result<(), ConsensusError> { Ok(()) } @@ -27,7 +27,9 @@ impl Consensus for NoopConsensus { ) -> Result<(), ConsensusError> { Ok(()) } +} +impl Consensus for NoopConsensus { fn validate_body_against_header( &self, _body: &B, diff --git a/crates/consensus/consensus/src/test_utils.rs b/crates/consensus/consensus/src/test_utils.rs index 52926ec323e7..ba683dd255f8 100644 --- a/crates/consensus/consensus/src/test_utils.rs +++ b/crates/consensus/consensus/src/test_utils.rs @@ -1,4 +1,4 @@ -use crate::{Consensus, ConsensusError, PostExecutionInput}; +use crate::{Consensus, ConsensusError, HeaderValidator, PostExecutionInput}; use alloy_primitives::U256; use core::sync::atomic::{AtomicBool, Ordering}; use reth_primitives::{BlockWithSenders, SealedBlock, SealedHeader}; @@ -47,18 +47,21 @@ impl TestConsensus { } impl Consensus for TestConsensus { - fn validate_header(&self, _header: &SealedHeader) -> Result<(), ConsensusError> { - if self.fail_validation() { + fn validate_body_against_header( + &self, + _body: &B, + _header: &SealedHeader, + ) -> Result<(), ConsensusError> { + if self.fail_body_against_header() { Err(ConsensusError::BaseFeeMissing) } else { Ok(()) } } - fn validate_header_against_parent( + fn validate_block_pre_execution( &self, - _header: &SealedHeader, - _parent: &SealedHeader, + _block: &SealedBlock, ) -> Result<(), ConsensusError> { if self.fail_validation() { Err(ConsensusError::BaseFeeMissing) @@ -67,10 +70,10 @@ impl Consensus for TestConsensus { } } - fn validate_header_with_total_difficulty( + fn validate_block_post_execution( &self, - _header: &H, - _total_difficulty: U256, + _block: &BlockWithSenders, + _input: PostExecutionInput<'_>, ) -> Result<(), ConsensusError> { if self.fail_validation() { Err(ConsensusError::BaseFeeMissing) @@ -78,22 +81,21 @@ impl Consensus for TestConsensus { Ok(()) } } +} - fn validate_body_against_header( - &self, - _body: &B, - _header: &SealedHeader, - ) -> Result<(), ConsensusError> { - if self.fail_body_against_header() { +impl HeaderValidator for TestConsensus { + fn validate_header(&self, _header: &SealedHeader) -> Result<(), ConsensusError> { + if self.fail_validation() { Err(ConsensusError::BaseFeeMissing) } else { Ok(()) } } - fn validate_block_pre_execution( + fn validate_header_against_parent( &self, - _block: &SealedBlock, + _header: &SealedHeader, + _parent: &SealedHeader, ) -> Result<(), ConsensusError> { if self.fail_validation() { Err(ConsensusError::BaseFeeMissing) @@ -102,10 +104,10 @@ impl Consensus for TestConsensus { } } - fn validate_block_post_execution( + fn validate_header_with_total_difficulty( &self, - _block: &BlockWithSenders, - _input: PostExecutionInput<'_>, + _header: &H, + _total_difficulty: U256, ) -> Result<(), ConsensusError> { if self.fail_validation() { Err(ConsensusError::BaseFeeMissing) diff --git a/crates/ethereum/consensus/src/lib.rs b/crates/ethereum/consensus/src/lib.rs index 7198a7036725..ffabe5b1952c 100644 --- a/crates/ethereum/consensus/src/lib.rs +++ b/crates/ethereum/consensus/src/lib.rs @@ -11,7 +11,7 @@ use alloy_consensus::{Header, EMPTY_OMMER_ROOT_HASH}; use alloy_primitives::U256; use reth_chainspec::{EthChainSpec, EthereumHardfork, EthereumHardforks}; -use reth_consensus::{Consensus, ConsensusError, PostExecutionInput}; +use reth_consensus::{Consensus, ConsensusError, HeaderValidator, PostExecutionInput}; use reth_consensus_common::validation::{ validate_4844_header_standalone, validate_against_parent_4844, validate_against_parent_eip1559_base_fee, validate_against_parent_hash_number, @@ -92,6 +92,30 @@ impl EthBeaconConsensus impl Consensus for EthBeaconConsensus +{ + fn validate_body_against_header( + &self, + body: &BlockBody, + header: &SealedHeader, + ) -> Result<(), ConsensusError> { + validate_body_against_header(body, header) + } + + fn validate_block_pre_execution(&self, block: &SealedBlock) -> Result<(), ConsensusError> { + validate_block_pre_execution(block, &self.chain_spec) + } + + fn validate_block_post_execution( + &self, + block: &BlockWithSenders, + input: PostExecutionInput<'_>, + ) -> Result<(), ConsensusError> { + validate_block_post_execution(block, &self.chain_spec, input.receipts, input.requests) + } +} + +impl HeaderValidator + for EthBeaconConsensus { fn validate_header(&self, header: &SealedHeader) -> Result<(), ConsensusError> { validate_header_gas(header)?; @@ -210,26 +234,6 @@ impl Consensu Ok(()) } - - fn validate_body_against_header( - &self, - body: &BlockBody, - header: &SealedHeader, - ) -> Result<(), ConsensusError> { - validate_body_against_header(body, header) - } - - fn validate_block_pre_execution(&self, block: &SealedBlock) -> Result<(), ConsensusError> { - validate_block_pre_execution(block, &self.chain_spec) - } - - fn validate_block_post_execution( - &self, - block: &BlockWithSenders, - input: PostExecutionInput<'_>, - ) -> Result<(), ConsensusError> { - validate_block_post_execution(block, &self.chain_spec, input.receipts, input.requests) - } } #[cfg(test)] diff --git a/crates/net/downloaders/src/headers/reverse_headers.rs b/crates/net/downloaders/src/headers/reverse_headers.rs index 0f8111e43958..2d79e0a7af6d 100644 --- a/crates/net/downloaders/src/headers/reverse_headers.rs +++ b/crates/net/downloaders/src/headers/reverse_headers.rs @@ -9,7 +9,7 @@ use futures::{stream::Stream, FutureExt}; use futures_util::{stream::FuturesUnordered, StreamExt}; use rayon::prelude::*; use reth_config::config::HeadersConfig; -use reth_consensus::Consensus; +use reth_consensus::{Consensus, HeaderValidator}; use reth_network_p2p::{ error::{DownloadError, DownloadResult, PeerRequestResult}, headers::{ diff --git a/crates/net/p2p/src/headers/downloader.rs b/crates/net/p2p/src/headers/downloader.rs index f02d9461fc1b..03ab467bafb3 100644 --- a/crates/net/p2p/src/headers/downloader.rs +++ b/crates/net/p2p/src/headers/downloader.rs @@ -4,7 +4,7 @@ use alloy_consensus::BlockHeader; use alloy_eips::BlockHashOrNumber; use alloy_primitives::B256; use futures::Stream; -use reth_consensus::Consensus; +use reth_consensus::HeaderValidator; use reth_primitives::SealedHeader; use reth_primitives_traits::BlockWithParent; /// A downloader capable of fetching and yielding block headers. @@ -83,7 +83,7 @@ impl SyncTarget { /// /// Returns Ok(false) if the pub fn validate_header_download( - consensus: &dyn Consensus, + consensus: &dyn HeaderValidator, header: &SealedHeader, parent: &SealedHeader, ) -> DownloadResult<()> { diff --git a/crates/net/p2p/src/test_utils/headers.rs b/crates/net/p2p/src/test_utils/headers.rs index bc5262abef4e..5809ad6bdd40 100644 --- a/crates/net/p2p/src/test_utils/headers.rs +++ b/crates/net/p2p/src/test_utils/headers.rs @@ -147,7 +147,7 @@ impl Stream for TestDownload { let empty: SealedHeader = SealedHeader::default(); if let Err(error) = - Consensus::<_>::validate_header_against_parent(&this.consensus, &empty, &empty) + >::validate_header_against_parent(&this.consensus, &empty, &empty) { this.done = true; return Poll::Ready(Some(Err(DownloadError::HeaderValidation { diff --git a/crates/optimism/consensus/src/lib.rs b/crates/optimism/consensus/src/lib.rs index 72f67dcb4501..e8b7959dd277 100644 --- a/crates/optimism/consensus/src/lib.rs +++ b/crates/optimism/consensus/src/lib.rs @@ -12,7 +12,7 @@ use alloy_consensus::{Header, EMPTY_OMMER_ROOT_HASH}; use alloy_primitives::{B64, U256}; use reth_chainspec::EthereumHardforks; -use reth_consensus::{Consensus, ConsensusError, PostExecutionInput}; +use reth_consensus::{Consensus, ConsensusError, HeaderValidator, PostExecutionInput}; use reth_consensus_common::validation::{ validate_against_parent_4844, validate_against_parent_eip1559_base_fee, validate_against_parent_hash_number, validate_against_parent_timestamp, @@ -47,6 +47,50 @@ impl OpBeaconConsensus { } impl Consensus for OpBeaconConsensus { + fn validate_body_against_header( + &self, + body: &BlockBody, + header: &SealedHeader, + ) -> Result<(), ConsensusError> { + validate_body_against_header(body, header) + } + + fn validate_block_pre_execution(&self, block: &SealedBlock) -> Result<(), ConsensusError> { + // Check ommers hash + let ommers_hash = reth_primitives::proofs::calculate_ommers_root(&block.body.ommers); + if block.header.ommers_hash != ommers_hash { + return Err(ConsensusError::BodyOmmersHashDiff( + GotExpected { got: ommers_hash, expected: block.header.ommers_hash }.into(), + )) + } + + // Check transaction root + if let Err(error) = block.ensure_transaction_root_valid() { + return Err(ConsensusError::BodyTransactionRootDiff(error.into())) + } + + // EIP-4895: Beacon chain push withdrawals as operations + if self.chain_spec.is_shanghai_active_at_timestamp(block.timestamp) { + validate_shanghai_withdrawals(block)?; + } + + if self.chain_spec.is_cancun_active_at_timestamp(block.timestamp) { + validate_cancun_gas(block)?; + } + + Ok(()) + } + + fn validate_block_post_execution( + &self, + block: &BlockWithSenders, + input: PostExecutionInput<'_>, + ) -> Result<(), ConsensusError> { + validate_block_post_execution(block, &self.chain_spec, input.receipts) + } +} + +impl HeaderValidator for OpBeaconConsensus { fn validate_header(&self, header: &SealedHeader) -> Result<(), ConsensusError> { validate_header_gas(header)?; validate_header_base_fee(header, &self.chain_spec) @@ -118,46 +162,4 @@ impl Consensus for OpBeaconConsensus { Ok(()) } - - fn validate_body_against_header( - &self, - body: &BlockBody, - header: &SealedHeader, - ) -> Result<(), ConsensusError> { - validate_body_against_header(body, header) - } - - fn validate_block_pre_execution(&self, block: &SealedBlock) -> Result<(), ConsensusError> { - // Check ommers hash - let ommers_hash = reth_primitives::proofs::calculate_ommers_root(&block.body.ommers); - if block.header.ommers_hash != ommers_hash { - return Err(ConsensusError::BodyOmmersHashDiff( - GotExpected { got: ommers_hash, expected: block.header.ommers_hash }.into(), - )) - } - - // Check transaction root - if let Err(error) = block.ensure_transaction_root_valid() { - return Err(ConsensusError::BodyTransactionRootDiff(error.into())) - } - - // EIP-4895: Beacon chain push withdrawals as operations - if self.chain_spec.is_shanghai_active_at_timestamp(block.timestamp) { - validate_shanghai_withdrawals(block)?; - } - - if self.chain_spec.is_cancun_active_at_timestamp(block.timestamp) { - validate_cancun_gas(block)?; - } - - Ok(()) - } - - fn validate_block_post_execution( - &self, - block: &BlockWithSenders, - input: PostExecutionInput<'_>, - ) -> Result<(), ConsensusError> { - validate_block_post_execution(block, &self.chain_spec, input.receipts) - } } From 0db10a13a53bf477777f4af7fba2bed805c915cf Mon Sep 17 00:00:00 2001 From: Cypher Pepe <125112044+cypherpepe@users.noreply.github.com> Date: Tue, 19 Nov 2024 15:05:30 +0300 Subject: [PATCH 033/156] fix: typos in troubleshooting.md (#12652) --- book/run/troubleshooting.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/book/run/troubleshooting.md b/book/run/troubleshooting.md index 7368b6631abb..cab39cb1165c 100644 --- a/book/run/troubleshooting.md +++ b/book/run/troubleshooting.md @@ -8,7 +8,7 @@ This page tries to answer how to deal with the most popular issues. If you're: 1. Running behind the tip -2. Have slow canonical commit time according to the `Canonical Commit Latency time` chart on [Grafana dashboard](./observability.md#prometheus--grafana) (more than 2-3 seconds) +2. Have slow canonical commit time according to the `Canonical Commit Latency Time` chart on [Grafana dashboard](./observability.md#prometheus--grafana) (more than 2-3 seconds) 3. Seeing warnings in your logs such as ```console 2023-11-08T15:17:24.789731Z WARN providers::db: Transaction insertion took too long block_number=18528075 tx_num=2150227643 hash=0xb7de1d6620efbdd3aa8547c47a0ff09a7fd3e48ba3fd2c53ce94c6683ed66e7c elapsed=6.793759034s @@ -48,7 +48,7 @@ equal to the [freshly synced node](../installation/installation.md#hardware-requ mv reth_compact.dat $(reth db path)/mdbx.dat ``` 7. Start Reth -8. Confirm that the values on the `Freelist` chart is near zero and the values on the `Canonical Commit Latency time` chart +8. Confirm that the values on the `Freelist` chart are near zero and the values on the `Canonical Commit Latency Time` chart is less than 1 second. 9. Delete original database ```bash From da77ffc9515dd575bab6238e65e089053072fe67 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Tue, 19 Nov 2024 13:53:34 +0100 Subject: [PATCH 034/156] chore(sdk): Move `reth_optimism_node::OpPrimitives` into `reth-optimism-primitives` (#12649) --- Cargo.lock | 81 +++++++++++----------- crates/optimism/evm/Cargo.toml | 1 + crates/optimism/evm/src/lib.rs | 4 +- crates/optimism/node/src/node.rs | 30 +++----- crates/optimism/payload/src/payload.rs | 2 +- crates/optimism/primitives/Cargo.toml | 5 +- crates/optimism/primitives/src/lib.rs | 16 +++++ crates/primitives-traits/src/block/body.rs | 10 ++- crates/primitives-traits/src/lib.rs | 2 +- 9 files changed, 84 insertions(+), 67 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f0d116c29aed..adacf448f20e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4579,9 +4579,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.164" +version = "0.2.162" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "433bfe06b8c75da9b2e3fbea6e5329ff87748f0b144ef75306e674c3f6f7c13f" +checksum = "18d287de67fe55fd7e1581fe933d965a5a9477b38e949cfa9f8574ef01506398" [[package]] name = "libloading" @@ -4964,9 +4964,9 @@ dependencies = [ [[package]] name = "mockall" -version = "0.13.1" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39a6bfcc6c8c7eed5ee98b9c3e33adc726054389233e201c95dab2d41a3839d2" +checksum = "d4c28b3fb6d753d28c20e826cd46ee611fda1cf3cde03a443a974043247c065a" dependencies = [ "cfg-if", "downcast", @@ -4978,9 +4978,9 @@ dependencies = [ [[package]] name = "mockall_derive" -version = "0.13.1" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25ca3004c2efe9011bd4e461bd8256445052b9615405b4f7ea43fc8ca5c20898" +checksum = "341014e7f530314e9a1fdbc7400b244efea7122662c96bfa248c31da5bfb2020" dependencies = [ "cfg-if", "proc-macro2", @@ -5286,9 +5286,9 @@ checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" [[package]] name = "op-alloy-consensus" -version = "0.6.7" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72da577a88d35b893fae6467112651f26ef023434c196b2a0b3dc75bc853e0e4" +checksum = "bff54d1d790eca1f3aedbd666162e9c42eceff90b9f9d24b352ed9c2df1e901a" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5299,14 +5299,14 @@ dependencies = [ "derive_more 1.0.0", "serde", "serde_with", - "thiserror 2.0.3", + "spin", ] [[package]] name = "op-alloy-genesis" -version = "0.6.7" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "818180672dd14ca6642fb57942e1cbd602669f42b6e0222b7ea9bbcae065d67e" +checksum = "ae84fd64fbc53b3e958ea5a96d7f5633e4a111092e41c51672c2d91835c09efb" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5314,14 +5314,13 @@ dependencies = [ "alloy-sol-types", "serde", "serde_repr", - "thiserror 2.0.3", ] [[package]] name = "op-alloy-network" -version = "0.6.7" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12f82e805bad171ceae2af45efaecf8d0b50622cff3473e3c998ff1dd340de35" +checksum = "d71e777450ee3e9c5177e00865e9b4496472b623c50f146fc907b667c6b4ab37" dependencies = [ "alloy-consensus", "alloy-network", @@ -5334,32 +5333,29 @@ dependencies = [ [[package]] name = "op-alloy-protocol" -version = "0.6.7" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1803a1ac96203b8f713b1fa9b7509c46c645ca7bc22b582761a7495e999d4301" +checksum = "1e854d2d4958d0a213731560172e8455536329ee9574473ff79fa953da91eb6a" dependencies = [ - "alloc-no-stdlib", "alloy-consensus", "alloy-eips", "alloy-primitives", "alloy-rlp", "alloy-serde", "async-trait", - "brotli", - "miniz_oxide", + "derive_more 1.0.0", "op-alloy-consensus", "op-alloy-genesis", "serde", - "thiserror 2.0.3", "tracing", "unsigned-varint", ] [[package]] name = "op-alloy-rpc-types" -version = "0.6.7" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a838c125256e02e2f9da88c51e263b02a06cda7e60382fe2551a3385b516f5bb" +checksum = "981b7f8ab11fe85ba3c1723702f000429b8d0c16b5883c93d577895f262cbac6" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5376,9 +5372,9 @@ dependencies = [ [[package]] name = "op-alloy-rpc-types-engine" -version = "0.6.7" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c227fcc7d81d4023363ba12406e57ebcc1c7cbb1075c38ea471ae32138d4706d" +checksum = "a227b16c9c5df68b112c8db9d268ebf46b3e26c744b4d59d4949575cd603a292" dependencies = [ "alloy-eips", "alloy-primitives", @@ -5390,7 +5386,6 @@ dependencies = [ "op-alloy-protocol", "serde", "snap", - "thiserror 2.0.3", ] [[package]] @@ -5470,9 +5465,9 @@ dependencies = [ [[package]] name = "parity-scale-codec" -version = "3.7.0" +version = "3.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8be4817d39f3272f69c59fe05d0535ae6456c2dc2fa1ba02910296c7e0a5c590" +checksum = "306800abfa29c7f16596b5970a588435e3d5b3149683d00c12b699cc19f895ee" dependencies = [ "arbitrary", "arrayvec", @@ -5481,20 +5476,19 @@ dependencies = [ "bytes", "impl-trait-for-tuples", "parity-scale-codec-derive", - "rustversion", "serde", ] [[package]] name = "parity-scale-codec-derive" -version = "3.7.0" +version = "3.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8781a75c6205af67215f382092b6e0a4ff3734798523e69073d4bcd294ec767b" +checksum = "d830939c76d294956402033aee57a6da7b438f2294eb94864c37b0569053a42c" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.87", + "syn 1.0.109", ] [[package]] @@ -8268,6 +8262,7 @@ dependencies = [ "reth-optimism-chainspec", "reth-optimism-consensus", "reth-optimism-forks", + "reth-optimism-primitives", "reth-primitives", "reth-prune-types", "reth-revm", @@ -8387,6 +8382,7 @@ dependencies = [ "derive_more 1.0.0", "op-alloy-consensus", "reth-codecs", + "reth-node-types", "reth-primitives", "reth-primitives-traits", "rstest", @@ -9749,9 +9745,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.41" +version = "0.38.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7f649912bc1495e167a6edee79151c84b1bad49748cb4f1f1167f459f6224f6" +checksum = "99e4ea3e1cdc4b559b8e5650f9c8e5998e3e5c1343b4eaf034565f32318d63c0" dependencies = [ "bitflags 2.6.0", "errno", @@ -9762,9 +9758,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.17" +version = "0.23.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f1a745511c54ba6d4465e8d5dfbd81b45791756de28d4981af70d6dca128f1e" +checksum = "eee87ff5d9b36712a58574e12e9f0ea80f915a5b0ac518d322b24a465617925e" dependencies = [ "log", "once_cell", @@ -10062,9 +10058,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.133" +version = "1.0.132" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" +checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03" dependencies = [ "indexmap 2.6.0", "itoa", @@ -10389,6 +10385,9 @@ name = "spin" version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +dependencies = [ + "lock_api", +] [[package]] name = "spki" @@ -10992,9 +10991,9 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.6.2" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "403fa3b783d4b626a8ad51d766ab03cb6d2dbfc46b1c5d4448395e6628dc9697" +checksum = "8437150ab6bbc8c5f0f519e3d5ed4aa883a83dd4cdd3d1b21f9482936046cb97" dependencies = [ "async-compression", "base64 0.22.1", @@ -11594,9 +11593,9 @@ dependencies = [ [[package]] name = "wasmtimer" -version = "0.4.1" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0048ad49a55b9deb3953841fa1fc5858f0efbcb7a18868c899a360269fac1b23" +checksum = "bb4f099acbc1043cc752b91615b24b02d7f6fcd975bd781fed9f50b3c3e15bf7" dependencies = [ "futures", "js-sys", diff --git a/crates/optimism/evm/Cargo.toml b/crates/optimism/evm/Cargo.toml index f6b22ad14c8d..98496bb26534 100644 --- a/crates/optimism/evm/Cargo.toml +++ b/crates/optimism/evm/Cargo.toml @@ -48,6 +48,7 @@ reth-primitives = { workspace = true, features = ["test-utils"] } reth-optimism-chainspec.workspace = true alloy-genesis.workspace = true alloy-consensus.workspace = true +reth-optimism-primitives.workspace = true [features] default = ["std"] diff --git a/crates/optimism/evm/src/lib.rs b/crates/optimism/evm/src/lib.rs index 55dc3fc7debe..be1fb6d32272 100644 --- a/crates/optimism/evm/src/lib.rs +++ b/crates/optimism/evm/src/lib.rs @@ -211,6 +211,7 @@ mod tests { AccountRevertInit, BundleStateInit, Chain, ExecutionOutcome, RevertsInit, }; use reth_optimism_chainspec::BASE_MAINNET; + use reth_optimism_primitives::OpPrimitives; use reth_primitives::{Account, Log, Receipt, Receipts, SealedBlockWithSenders, TxType}; use reth_revm::{ @@ -602,7 +603,8 @@ mod tests { // Create a Chain object with a BTreeMap of blocks mapped to their block numbers, // including block1_hash and block2_hash, and the execution_outcome - let chain: Chain = Chain::new([block1, block2], execution_outcome.clone(), None); + let chain: Chain = + Chain::new([block1, block2], execution_outcome.clone(), None); // Assert that the proper receipt vector is returned for block1_hash assert_eq!(chain.receipts_by_block_hash(block1_hash), Some(vec![&receipt1])); diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index 699239a43b2c..70f32c01ffd9 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -1,18 +1,14 @@ //! Optimism Node types config. -use crate::{ - args::RollupArgs, - engine::OpEngineValidator, - txpool::{OpTransactionPool, OpTransactionValidator}, - OpEngineTypes, -}; +use std::sync::Arc; + use alloy_consensus::Header; use reth_basic_payload_builder::{BasicPayloadJobGenerator, BasicPayloadJobGeneratorConfig}; use reth_chainspec::{EthChainSpec, Hardforks}; use reth_evm::{execute::BasicBlockExecutorProvider, ConfigureEvm}; use reth_network::{NetworkConfig, NetworkHandle, NetworkManager, PeersInfo}; use reth_node_api::{ - AddOnsContext, EngineValidator, FullNodeComponents, NodeAddOns, NodePrimitives, PayloadBuilder, + AddOnsContext, EngineValidator, FullNodeComponents, NodeAddOns, PayloadBuilder, }; use reth_node_builder::{ components::{ @@ -32,7 +28,6 @@ use reth_optimism_rpc::{ OpEthApi, }; use reth_payload_builder::{PayloadBuilderHandle, PayloadBuilderService}; -use reth_primitives::{Block, Receipt, TransactionSigned, TxType}; use reth_provider::CanonStateSubscriptions; use reth_rpc_server_types::RethRpcModule; use reth_tracing::tracing::{debug, info}; @@ -41,18 +36,13 @@ use reth_transaction_pool::{ TransactionValidationTaskExecutor, }; use reth_trie_db::MerklePatriciaTrie; -use std::sync::Arc; -/// Optimism primitive types. -#[derive(Debug, Default, Clone, PartialEq, Eq)] -pub struct OpPrimitives; - -impl NodePrimitives for OpPrimitives { - type Block = Block; - type SignedTx = TransactionSigned; - type TxType = TxType; - type Receipt = Receipt; -} +use crate::{ + args::RollupArgs, + engine::OpEngineValidator, + txpool::{OpTransactionPool, OpTransactionValidator}, + OpEngineTypes, +}; /// Type configuration for a regular Optimism node. #[derive(Debug, Default, Clone)] @@ -125,7 +115,7 @@ where } impl NodeTypes for OpNode { - type Primitives = OpPrimitives; + type Primitives = reth_primitives::EthPrimitives; // todo: replace with OpPrimitives when EthPrimitives is only used in reth-ethereum-* crates type ChainSpec = OpChainSpec; type StateCommitment = MerklePatriciaTrie; } diff --git a/crates/optimism/payload/src/payload.rs b/crates/optimism/payload/src/payload.rs index 1a951abadcae..36f11ee628b3 100644 --- a/crates/optimism/payload/src/payload.rs +++ b/crates/optimism/payload/src/payload.rs @@ -7,7 +7,7 @@ use alloy_eips::{ use alloy_primitives::{keccak256, Address, Bytes, B256, B64, U256}; use alloy_rlp::Encodable; use alloy_rpc_types_engine::{ExecutionPayloadEnvelopeV2, ExecutionPayloadV1, PayloadId}; -use op_alloy_consensus::{decode_holocene_extra_data, EIP1559ParamError}; +use op_alloy_consensus::eip1559::{decode_holocene_extra_data, EIP1559ParamError}; /// Re-export for use in downstream arguments. pub use op_alloy_rpc_types_engine::OpPayloadAttributes; use op_alloy_rpc_types_engine::{OpExecutionPayloadEnvelopeV3, OpExecutionPayloadEnvelopeV4}; diff --git a/crates/optimism/primitives/Cargo.toml b/crates/optimism/primitives/Cargo.toml index 4c6d9f51406f..ade6d4eb6bc5 100644 --- a/crates/optimism/primitives/Cargo.toml +++ b/crates/optimism/primitives/Cargo.toml @@ -13,9 +13,10 @@ workspace = true [dependencies] # reth +reth-node-types.workspace = true +reth-primitives.workspace = true reth-primitives-traits.workspace = true reth-codecs = { workspace = true, optional = true } -reth-primitives = { workspace = true, features = ["reth-codec"], optional = true } # ethereum alloy-primitives.workspace = true @@ -41,7 +42,7 @@ rstest.workspace = true default = ["reth-codec"] reth-codec = [ "dep:reth-codecs", - "dep:reth-primitives" + "reth-primitives/reth-codec" ] serde = [ "dep:serde", diff --git a/crates/optimism/primitives/src/lib.rs b/crates/optimism/primitives/src/lib.rs index a0745e7ac7d5..5f6b1848e648 100644 --- a/crates/optimism/primitives/src/lib.rs +++ b/crates/optimism/primitives/src/lib.rs @@ -9,3 +9,19 @@ pub mod bedrock; pub mod tx_type; + +pub use tx_type::OpTxType; + +use reth_node_types::NodePrimitives; +use reth_primitives::{Block, Receipt, TransactionSigned}; + +/// Optimism primitive types. +#[derive(Debug, Default, Clone, PartialEq, Eq)] +pub struct OpPrimitives; + +impl NodePrimitives for OpPrimitives { + type Block = Block; + type SignedTx = TransactionSigned; + type TxType = OpTxType; + type Receipt = Receipt; +} diff --git a/crates/primitives-traits/src/block/body.rs b/crates/primitives-traits/src/block/body.rs index 074efc4d5141..66c9c2d2e3a2 100644 --- a/crates/primitives-traits/src/block/body.rs +++ b/crates/primitives-traits/src/block/body.rs @@ -1,8 +1,16 @@ //! Block body abstraction. -use crate::{InMemorySize, MaybeSerde}; use alloc::fmt; + use alloy_consensus::Transaction; +use reth_codecs::Compact; + +use crate::{FullSignedTx, InMemorySize, MaybeSerde}; + +/// Helper trait that unifies all behaviour required by transaction to support full node operations. +pub trait FullBlockBody: BlockBody + Compact {} + +impl FullBlockBody for T where T: BlockBody + Compact {} /// Abstraction for block's body. #[auto_impl::auto_impl(&, Arc)] diff --git a/crates/primitives-traits/src/lib.rs b/crates/primitives-traits/src/lib.rs index 33becad2feae..acee2fd04d1e 100644 --- a/crates/primitives-traits/src/lib.rs +++ b/crates/primitives-traits/src/lib.rs @@ -36,7 +36,7 @@ pub use integer_list::{IntegerList, IntegerListError}; pub mod block; pub use block::{ - body::BlockBody, + body::{BlockBody, FullBlockBody}, header::{BlockHeader, FullBlockHeader}, Block, FullBlock, }; From 03992a53ec7f504d3e0d211160d8aa00e699b4fd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 13:32:52 +0000 Subject: [PATCH 035/156] chore(deps): bump dcarbone/install-jq-action from 2 to 3 (#12645) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/lint.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index fa7b4f9f45c2..7e6b8747fff5 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -71,7 +71,7 @@ jobs: - uses: Swatinem/rust-cache@v2 with: cache-on-failure: true - - uses: dcarbone/install-jq-action@v2 + - uses: dcarbone/install-jq-action@v3 - name: Run Wasm checks run: .github/assets/check_wasm.sh From 6615fd2efc8f3f94f1a4924ff3be2f65c474aba9 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 19 Nov 2024 14:46:31 +0100 Subject: [PATCH 036/156] chore: re-export header from primitives traits (#12657) --- crates/primitives-traits/src/header/mod.rs | 2 +- crates/primitives-traits/src/header/sealed.rs | 9 ++++----- crates/primitives-traits/src/lib.rs | 2 +- crates/primitives/src/lib.rs | 4 ++-- 4 files changed, 8 insertions(+), 9 deletions(-) diff --git a/crates/primitives-traits/src/header/mod.rs b/crates/primitives-traits/src/header/mod.rs index b36a74471ff5..ea5f7eafb518 100644 --- a/crates/primitives-traits/src/header/mod.rs +++ b/crates/primitives-traits/src/header/mod.rs @@ -1,5 +1,5 @@ mod sealed; -pub use sealed::{BlockWithParent, SealedHeader}; +pub use sealed::{BlockWithParent, Header, SealedHeader}; mod error; pub use error::HeaderError; diff --git a/crates/primitives-traits/src/header/sealed.rs b/crates/primitives-traits/src/header/sealed.rs index f4a365e15128..d9931fc95c5b 100644 --- a/crates/primitives-traits/src/header/sealed.rs +++ b/crates/primitives-traits/src/header/sealed.rs @@ -1,16 +1,15 @@ -use core::mem; - -use alloy_consensus::{Header, Sealed}; +use crate::InMemorySize; +pub use alloy_consensus::Header; +use alloy_consensus::Sealed; use alloy_eips::BlockNumHash; use alloy_primitives::{keccak256, BlockHash, Sealable, B256}; use alloy_rlp::{Decodable, Encodable}; use bytes::BufMut; +use core::mem; use derive_more::{AsRef, Deref}; use reth_codecs::add_arbitrary_tests; use serde::{Deserialize, Serialize}; -use crate::InMemorySize; - /// A helper struct to store the block number/hash and its parent hash. #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] pub struct BlockWithParent { diff --git a/crates/primitives-traits/src/lib.rs b/crates/primitives-traits/src/lib.rs index acee2fd04d1e..819825d635f1 100644 --- a/crates/primitives-traits/src/lib.rs +++ b/crates/primitives-traits/src/lib.rs @@ -60,7 +60,7 @@ pub use tx_type::{FullTxType, TxType}; pub mod header; #[cfg(any(test, feature = "arbitrary", feature = "test-utils"))] pub use header::test_utils; -pub use header::{BlockWithParent, HeaderError, SealedHeader}; +pub use header::{BlockWithParent, Header, HeaderError, SealedHeader}; /// Bincode-compatible serde implementations for common abstracted types in Reth. /// diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index 2618f671927f..b2f438920183 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -39,8 +39,8 @@ pub use receipt::{ gas_spent_by_transactions, Receipt, ReceiptWithBloom, ReceiptWithBloomRef, Receipts, }; pub use reth_primitives_traits::{ - logs_bloom, Account, Bytecode, GotExpected, GotExpectedBoxed, HeaderError, Log, LogData, - NodePrimitives, SealedHeader, StorageEntry, + logs_bloom, Account, Bytecode, GotExpected, GotExpectedBoxed, Header, HeaderError, Log, + LogData, NodePrimitives, SealedHeader, StorageEntry, }; pub use static_file::StaticFileSegment; From 66a9d3e424a0f59d7fa71a6e801891ef8e372f69 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 19 Nov 2024 16:33:19 +0100 Subject: [PATCH 037/156] fix: run upkeep manually (#12664) --- crates/node/builder/src/launch/common.rs | 15 +++--- crates/node/metrics/src/recorder.rs | 69 +++++++++++++++++++++--- crates/node/metrics/src/server.rs | 2 +- 3 files changed, 71 insertions(+), 15 deletions(-) diff --git a/crates/node/builder/src/launch/common.rs b/crates/node/builder/src/launch/common.rs index 41fbf93e05d1..e01d117e7bcb 100644 --- a/crates/node/builder/src/launch/common.rs +++ b/crates/node/builder/src/launch/common.rs @@ -2,6 +2,11 @@ use std::{sync::Arc, thread::available_parallelism}; +use crate::{ + components::{NodeComponents, NodeComponentsBuilder}, + hooks::OnComponentInitializedHook, + BuilderContext, NodeAdapter, +}; use alloy_primitives::{BlockNumber, B256}; use eyre::{Context, OptionExt}; use rayon::ThreadPoolBuilder; @@ -34,6 +39,7 @@ use reth_node_core::{ use reth_node_metrics::{ chain::ChainSpecInfo, hooks::Hooks, + recorder::install_prometheus_recorder, server::{MetricServer, MetricServerConfig}, version::VersionInfo, }; @@ -58,12 +64,6 @@ use tokio::sync::{ oneshot, watch, }; -use crate::{ - components::{NodeComponents, NodeComponentsBuilder}, - hooks::OnComponentInitializedHook, - BuilderContext, NodeAdapter, -}; - /// Allows to set a tree viewer for a configured blockchain provider. // TODO: remove this helper trait once the engine revamp is done, the new // blockchain provider won't require a TreeViewer. @@ -509,6 +509,9 @@ where /// Starts the prometheus endpoint. pub async fn start_prometheus_endpoint(&self) -> eyre::Result<()> { + // ensure recorder runs upkeep periodically + install_prometheus_recorder().spawn_upkeep(); + let listen_addr = self.node_config().metrics; if let Some(addr) = listen_addr { info!(target: "reth::cli", "Starting metrics endpoint at {}", addr); diff --git a/crates/node/metrics/src/recorder.rs b/crates/node/metrics/src/recorder.rs index a7421ab355c0..e62b98c81cd4 100644 --- a/crates/node/metrics/src/recorder.rs +++ b/crates/node/metrics/src/recorder.rs @@ -3,25 +3,78 @@ use eyre::WrapErr; use metrics_exporter_prometheus::{PrometheusBuilder, PrometheusHandle}; use metrics_util::layers::{PrefixLayer, Stack}; -use std::sync::LazyLock; +use std::sync::{atomic::AtomicBool, LazyLock}; /// Installs the Prometheus recorder as the global recorder. -pub fn install_prometheus_recorder() -> &'static PrometheusHandle { +/// +/// Note: This must be installed before any metrics are `described`. +/// +/// Caution: This only configures the global recorder and does not spawn the exporter. +/// Callers must run [`PrometheusRecorder::spawn_upkeep`] manually. +pub fn install_prometheus_recorder() -> &'static PrometheusRecorder { &PROMETHEUS_RECORDER_HANDLE } /// The default Prometheus recorder handle. We use a global static to ensure that it is only /// installed once. -static PROMETHEUS_RECORDER_HANDLE: LazyLock = +static PROMETHEUS_RECORDER_HANDLE: LazyLock = LazyLock::new(|| PrometheusRecorder::install().unwrap()); -/// Prometheus recorder installer +/// A handle to the Prometheus recorder. +/// +/// This is intended to be used as the global recorder. +/// Callers must ensure that [`PrometheusRecorder::spawn_upkeep`] is called once. #[derive(Debug)] -pub struct PrometheusRecorder; +pub struct PrometheusRecorder { + handle: PrometheusHandle, + upkeep: AtomicBool, +} impl PrometheusRecorder { + const fn new(handle: PrometheusHandle) -> Self { + Self { handle, upkeep: AtomicBool::new(false) } + } + + /// Returns a reference to the [`PrometheusHandle`]. + pub const fn handle(&self) -> &PrometheusHandle { + &self.handle + } + + /// Spawns the upkeep task if there hasn't been one spawned already. + /// + /// ## Panics + /// + /// This method must be called from within an existing Tokio runtime or it will panic. + /// + /// See also [`PrometheusHandle::run_upkeep`] + pub fn spawn_upkeep(&self) { + if self + .upkeep + .compare_exchange( + false, + true, + std::sync::atomic::Ordering::SeqCst, + std::sync::atomic::Ordering::Acquire, + ) + .is_err() + { + return; + } + + let handle = self.handle.clone(); + tokio::spawn(async move { + loop { + tokio::time::sleep(std::time::Duration::from_secs(5)).await; + handle.run_upkeep(); + } + }); + } + /// Installs Prometheus as the metrics recorder. - pub fn install() -> eyre::Result { + /// + /// Caution: This only configures the global recorder and does not spawn the exporter. + /// Callers must run [`Self::spawn_upkeep`] manually. + pub fn install() -> eyre::Result { let recorder = PrometheusBuilder::new().build_recorder(); let handle = recorder.handle(); @@ -31,7 +84,7 @@ impl PrometheusRecorder { .install() .wrap_err("Couldn't set metrics recorder.")?; - Ok(handle) + Ok(Self::new(handle)) } } @@ -52,7 +105,7 @@ mod tests { process.describe(); process.collect(); - let metrics = PROMETHEUS_RECORDER_HANDLE.render(); + let metrics = PROMETHEUS_RECORDER_HANDLE.handle.render(); assert!(metrics.contains("process_cpu_seconds_total"), "{metrics:?}"); } } diff --git a/crates/node/metrics/src/server.rs b/crates/node/metrics/src/server.rs index 87521349d4de..22c064d62f82 100644 --- a/crates/node/metrics/src/server.rs +++ b/crates/node/metrics/src/server.rs @@ -103,7 +103,7 @@ impl MetricServer { let hook = hook.clone(); let service = tower::service_fn(move |_| { (hook)(); - let metrics = handle.render(); + let metrics = handle.handle().render(); let mut response = Response::new(metrics); response .headers_mut() From 1e7189d3e4f2b7e744bf59906a791784621cf6e9 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Tue, 19 Nov 2024 19:39:28 +0400 Subject: [PATCH 038/156] feat: trait-based storage API (#12616) Co-authored-by: joshie <93316087+joshieDo@users.noreply.github.com> --- Cargo.lock | 5 + bin/reth/src/commands/debug_cmd/execution.rs | 2 +- crates/cli/commands/Cargo.toml | 1 + crates/cli/commands/src/common.rs | 26 ++++- crates/cli/commands/src/import.rs | 2 +- crates/cli/commands/src/stage/unwind.rs | 2 +- crates/e2e-test-utils/Cargo.toml | 2 + crates/e2e-test-utils/src/lib.rs | 15 ++- crates/ethereum/node/src/node.rs | 11 +- crates/exex/test-utils/src/lib.rs | 16 ++- crates/node/builder/src/builder/mod.rs | 29 +++-- crates/node/builder/src/launch/common.rs | 29 +++-- crates/node/builder/src/launch/engine.rs | 6 +- crates/node/builder/src/launch/mod.rs | 10 +- crates/node/builder/src/node.rs | 2 + crates/node/builder/src/setup.rs | 9 +- crates/node/types/src/lib.rs | 106 +++++++++++++----- .../cli/src/commands/build_pipeline.rs | 3 +- crates/optimism/node/Cargo.toml | 1 + crates/optimism/node/src/node.rs | 61 +++++++++- crates/primitives-traits/src/block/mod.rs | 4 +- crates/primitives/src/lib.rs | 9 ++ crates/stages/stages/src/stages/bodies.rs | 5 +- .../stages/stages/src/test_utils/test_db.rs | 4 +- crates/storage/provider/Cargo.toml | 3 + .../provider/src/providers/database/chain.rs | 26 +++++ .../provider/src/providers/database/mod.rs | 22 +++- .../src/providers/database/provider.rs | 74 ++++++------ crates/storage/provider/src/providers/mod.rs | 32 +++++- .../storage/provider/src/test_utils/mock.rs | 11 +- crates/storage/provider/src/test_utils/mod.rs | 12 +- crates/storage/provider/src/traits/block.rs | 2 +- crates/storage/storage-api/Cargo.toml | 1 + crates/storage/storage-api/src/chain.rs | 72 ++++++++++++ crates/storage/storage-api/src/lib.rs | 3 + examples/custom-engine-types/src/main.rs | 15 ++- 36 files changed, 485 insertions(+), 148 deletions(-) create mode 100644 crates/storage/provider/src/providers/database/chain.rs create mode 100644 crates/storage/storage-api/src/chain.rs diff --git a/Cargo.lock b/Cargo.lock index adacf448f20e..1b38e0566557 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6683,6 +6683,7 @@ dependencies = [ "reth-network", "reth-network-p2p", "reth-network-peers", + "reth-node-api", "reth-node-builder", "reth-node-core", "reth-node-events", @@ -7089,10 +7090,12 @@ dependencies = [ "reth-db", "reth-engine-local", "reth-network-peers", + "reth-node-api", "reth-node-builder", "reth-payload-builder", "reth-payload-builder-primitives", "reth-payload-primitives", + "reth-primitives", "reth-provider", "reth-rpc-layer", "reth-stages-types", @@ -8626,6 +8629,7 @@ dependencies = [ "reth-node-types", "reth-optimism-primitives", "reth-primitives", + "reth-primitives-traits", "reth-prune-types", "reth-stages-types", "reth-storage-api", @@ -9189,6 +9193,7 @@ dependencies = [ "reth-db-models", "reth-execution-types", "reth-primitives", + "reth-primitives-traits", "reth-prune-types", "reth-stages-types", "reth-storage-errors", diff --git a/bin/reth/src/commands/debug_cmd/execution.rs b/bin/reth/src/commands/debug_cmd/execution.rs index da928645b9f2..a6203ea2a73e 100644 --- a/bin/reth/src/commands/debug_cmd/execution.rs +++ b/bin/reth/src/commands/debug_cmd/execution.rs @@ -59,7 +59,7 @@ pub struct Command { } impl> Command { - fn build_pipeline, Client>( + fn build_pipeline + CliNodeTypes, Client>( &self, config: &Config, client: Client, diff --git a/crates/cli/commands/Cargo.toml b/crates/cli/commands/Cargo.toml index 7e27d9b4e2ed..90acb82d71d7 100644 --- a/crates/cli/commands/Cargo.toml +++ b/crates/cli/commands/Cargo.toml @@ -32,6 +32,7 @@ reth-fs-util.workspace = true reth-network = { workspace = true, features = ["serde"] } reth-network-p2p.workspace = true reth-network-peers = { workspace = true, features = ["secp256k1"] } +reth-node-api.workspace = true reth-node-builder.workspace = true reth-node-core.workspace = true reth-node-events.workspace = true diff --git a/crates/cli/commands/src/common.rs b/crates/cli/commands/src/common.rs index 0e4eb2723c35..e557f15da6bf 100644 --- a/crates/cli/commands/src/common.rs +++ b/crates/cli/commands/src/common.rs @@ -10,12 +10,16 @@ use reth_db::{init_db, open_db_read_only, DatabaseEnv}; use reth_db_common::init::init_genesis; use reth_downloaders::{bodies::noop::NoopBodiesDownloader, headers::noop::NoopHeaderDownloader}; use reth_evm::noop::NoopBlockExecutorProvider; +use reth_node_api::FullNodePrimitives; use reth_node_builder::{NodeTypesWithDBAdapter, NodeTypesWithEngine}; use reth_node_core::{ args::{DatabaseArgs, DatadirArgs}, dirs::{ChainPath, DataDirPath}, }; -use reth_provider::{providers::StaticFileProvider, ProviderFactory, StaticFileProviderFactory}; +use reth_provider::{ + providers::{NodeTypesForProvider, StaticFileProvider}, + ProviderFactory, StaticFileProviderFactory, +}; use reth_stages::{sets::DefaultStages, Pipeline, PipelineTarget}; use reth_static_file::StaticFileProducer; use std::{path::PathBuf, sync::Arc}; @@ -191,5 +195,21 @@ impl AccessRights { /// Helper trait with a common set of requirements for the /// [`NodeTypes`](reth_node_builder::NodeTypes) in CLI. -pub trait CliNodeTypes: NodeTypesWithEngine {} -impl CliNodeTypes for N where N: NodeTypesWithEngine {} +pub trait CliNodeTypes: + NodeTypesWithEngine + + NodeTypesForProvider< + Primitives: FullNodePrimitives< + Block: reth_node_api::Block, + >, + > +{ +} +impl CliNodeTypes for N where + N: NodeTypesWithEngine + + NodeTypesForProvider< + Primitives: FullNodePrimitives< + Block: reth_node_api::Block, + >, + > +{ +} diff --git a/crates/cli/commands/src/import.rs b/crates/cli/commands/src/import.rs index 539211a22f7a..c1f6408b49b0 100644 --- a/crates/cli/commands/src/import.rs +++ b/crates/cli/commands/src/import.rs @@ -167,7 +167,7 @@ pub fn build_import_pipeline( executor: E, ) -> eyre::Result<(Pipeline, impl Stream)> where - N: ProviderNodeTypes, + N: ProviderNodeTypes + CliNodeTypes, C: Consensus + 'static, E: BlockExecutorProvider, { diff --git a/crates/cli/commands/src/stage/unwind.rs b/crates/cli/commands/src/stage/unwind.rs index e71861a988de..4f47a70b02d4 100644 --- a/crates/cli/commands/src/stage/unwind.rs +++ b/crates/cli/commands/src/stage/unwind.rs @@ -113,7 +113,7 @@ impl> Command Ok(()) } - fn build_pipeline>( + fn build_pipeline + CliNodeTypes>( self, config: Config, provider_factory: ProviderFactory, diff --git a/crates/e2e-test-utils/Cargo.toml b/crates/e2e-test-utils/Cargo.toml index c4c74ebcdf12..9c40e2ba99d2 100644 --- a/crates/e2e-test-utils/Cargo.toml +++ b/crates/e2e-test-utils/Cargo.toml @@ -19,7 +19,9 @@ reth-rpc-layer.workspace = true reth-payload-builder = { workspace = true, features = ["test-utils"] } reth-payload-builder-primitives.workspace = true reth-payload-primitives.workspace = true +reth-primitives.workspace = true reth-provider.workspace = true +reth-node-api.workspace = true reth-node-builder = { workspace = true, features = ["test-utils"] } reth-tokio-util.workspace = true reth-stages-types.workspace = true diff --git a/crates/e2e-test-utils/src/lib.rs b/crates/e2e-test-utils/src/lib.rs index 1e9b39058e63..df459f641b43 100644 --- a/crates/e2e-test-utils/src/lib.rs +++ b/crates/e2e-test-utils/src/lib.rs @@ -5,12 +5,12 @@ use std::sync::Arc; use node::NodeTestContext; use reth::{ args::{DiscoveryArgs, NetworkArgs, RpcServerArgs}, - builder::{NodeBuilder, NodeConfig, NodeHandle}, + builder::{FullNodePrimitives, NodeBuilder, NodeConfig, NodeHandle}, network::PeersHandleProvider, rpc::server_types::RpcModuleSelection, tasks::TaskManager, }; -use reth_chainspec::{EthChainSpec, EthereumHardforks}; +use reth_chainspec::EthChainSpec; use reth_db::{test_utils::TempDatabase, DatabaseEnv}; use reth_engine_local::LocalPayloadAttributesBuilder; use reth_node_builder::{ @@ -18,7 +18,7 @@ use reth_node_builder::{ FullNodeTypesAdapter, Node, NodeAdapter, NodeComponents, NodeTypesWithDBAdapter, NodeTypesWithEngine, PayloadAttributesBuilder, PayloadTypes, }; -use reth_provider::providers::{BlockchainProvider, BlockchainProvider2}; +use reth_provider::providers::{BlockchainProvider, BlockchainProvider2, NodeTypesForProvider}; use tracing::{span, Level}; use wallet::Wallet; @@ -53,12 +53,14 @@ pub async fn setup( attributes_generator: impl Fn(u64) -> <::Engine as PayloadTypes>::PayloadBuilderAttributes + Copy + 'static, ) -> eyre::Result<(Vec>, TaskManager, Wallet)> where - N: Default + Node> + NodeTypesWithEngine, + N: Default + Node> + NodeTypesForProvider + NodeTypesWithEngine, N::ComponentsBuilder: NodeComponentsBuilder< TmpNodeAdapter, Components: NodeComponents, Network: PeersHandleProvider>, >, N::AddOns: RethRpcAddOns>, + N::Primitives: + FullNodePrimitives>, { let tasks = TaskManager::current(); let exec = tasks.executor(); @@ -120,7 +122,8 @@ pub async fn setup_engine( where N: Default + Node>>> - + NodeTypesWithEngine, + + NodeTypesWithEngine + + NodeTypesForProvider, N::ComponentsBuilder: NodeComponentsBuilder< TmpNodeAdapter>>, Components: NodeComponents< @@ -132,6 +135,8 @@ where LocalPayloadAttributesBuilder: PayloadAttributesBuilder< <::Engine as PayloadTypes>::PayloadAttributes, >, + N::Primitives: + FullNodePrimitives>, { let tasks = TaskManager::current(); let exec = tasks.executor(); diff --git a/crates/ethereum/node/src/node.rs b/crates/ethereum/node/src/node.rs index 1615ef0e6867..a2ae2374b966 100644 --- a/crates/ethereum/node/src/node.rs +++ b/crates/ethereum/node/src/node.rs @@ -26,7 +26,7 @@ use reth_node_builder::{ }; use reth_payload_builder::{PayloadBuilderHandle, PayloadBuilderService}; use reth_primitives::EthPrimitives; -use reth_provider::CanonStateSubscriptions; +use reth_provider::{CanonStateSubscriptions, EthStorage}; use reth_rpc::EthApi; use reth_tracing::tracing::{debug, info}; use reth_transaction_pool::{ @@ -74,6 +74,7 @@ impl NodeTypes for EthereumNode { type Primitives = EthPrimitives; type ChainSpec = ChainSpec; type StateCommitment = MerklePatriciaTrie; + type Storage = EthStorage; } impl NodeTypesWithEngine for EthereumNode { @@ -94,7 +95,13 @@ pub type EthereumAddOns = RpcAddOns< impl Node for EthereumNode where - Types: NodeTypesWithDB + NodeTypesWithEngine, + Types: NodeTypesWithDB + + NodeTypesWithEngine< + Engine = EthEngineTypes, + ChainSpec = ChainSpec, + Primitives = EthPrimitives, + Storage = EthStorage, + >, N: FullNodeTypes, { type ComponentsBuilder = ComponentsBuilder< diff --git a/crates/exex/test-utils/src/lib.rs b/crates/exex/test-utils/src/lib.rs index 5c3468a3c1c7..5b2267505c57 100644 --- a/crates/exex/test-utils/src/lib.rs +++ b/crates/exex/test-utils/src/lib.rs @@ -45,10 +45,10 @@ use reth_node_ethereum::{ EthEngineTypes, EthEvmConfig, }; use reth_payload_builder::noop::NoopPayloadBuilderService; -use reth_primitives::{Head, SealedBlockWithSenders}; +use reth_primitives::{EthPrimitives, Head, SealedBlockWithSenders}; use reth_provider::{ providers::{BlockchainProvider, StaticFileProvider}, - BlockReader, ProviderFactory, + BlockReader, EthStorage, ProviderFactory, }; use reth_tasks::TaskManager; use reth_transaction_pool::test_utils::{testing_pool, TestPool}; @@ -118,9 +118,10 @@ where pub struct TestNode; impl NodeTypes for TestNode { - type Primitives = (); + type Primitives = EthPrimitives; type ChainSpec = ChainSpec; type StateCommitment = reth_trie_db::MerklePatriciaTrie; + type Storage = EthStorage; } impl NodeTypesWithEngine for TestNode { @@ -129,7 +130,14 @@ impl NodeTypesWithEngine for TestNode { impl Node for TestNode where - N: FullNodeTypes>, + N: FullNodeTypes< + Types: NodeTypesWithEngine< + Engine = EthEngineTypes, + ChainSpec = ChainSpec, + Primitives = EthPrimitives, + Storage = EthStorage, + >, + >, { type ComponentsBuilder = ComponentsBuilder< N, diff --git a/crates/node/builder/src/builder/mod.rs b/crates/node/builder/src/builder/mod.rs index 89892ed5985f..3ad90a493f13 100644 --- a/crates/node/builder/src/builder/mod.rs +++ b/crates/node/builder/src/builder/mod.rs @@ -22,8 +22,8 @@ use reth_network::{ NetworkHandle, NetworkManager, }; use reth_node_api::{ - FullNodeTypes, FullNodeTypesAdapter, NodeAddOns, NodeTypes, NodeTypesWithDBAdapter, - NodeTypesWithEngine, + FullNodePrimitives, FullNodeTypes, FullNodeTypesAdapter, NodeAddOns, NodeTypes, + NodeTypesWithDBAdapter, NodeTypesWithEngine, }; use reth_node_core::{ cli::config::{PayloadBuilderConfig, RethTransactionPoolConfig}, @@ -31,7 +31,10 @@ use reth_node_core::{ node_config::NodeConfig, primitives::Head, }; -use reth_provider::{providers::BlockchainProvider, ChainSpecProvider, FullProvider}; +use reth_provider::{ + providers::{BlockchainProvider, NodeTypesForProvider}, + ChainSpecProvider, FullProvider, +}; use reth_tasks::TaskExecutor; use reth_transaction_pool::{PoolConfig, TransactionPool}; use revm_primitives::EnvKzgSettings; @@ -240,7 +243,7 @@ where /// Configures the types of the node. pub fn with_types(self) -> NodeBuilderWithTypes> where - T: NodeTypesWithEngine, + T: NodeTypesWithEngine + NodeTypesForProvider, { self.with_types_and_provider() } @@ -250,7 +253,7 @@ where self, ) -> NodeBuilderWithTypes, P>> where - T: NodeTypesWithEngine, + T: NodeTypesWithEngine + NodeTypesForProvider, P: FullProvider>, { NodeBuilderWithTypes::new(self.config, self.database) @@ -264,7 +267,7 @@ where node: N, ) -> NodeBuilderWithComponents, N::ComponentsBuilder, N::AddOns> where - N: Node, ChainSpec = ChainSpec>, + N: Node, ChainSpec = ChainSpec> + NodeTypesForProvider, { self.with_types().with_components(node.components_builder()).with_add_ons(node.add_ons()) } @@ -301,7 +304,7 @@ where /// Configures the types of the node. pub fn with_types(self) -> WithLaunchContext>> where - T: NodeTypesWithEngine, + T: NodeTypesWithEngine + NodeTypesForProvider, { WithLaunchContext { builder: self.builder.with_types(), task_executor: self.task_executor } } @@ -313,7 +316,7 @@ where NodeBuilderWithTypes, P>>, > where - T: NodeTypesWithEngine, + T: NodeTypesWithEngine + NodeTypesForProvider, P: FullProvider>, { WithLaunchContext { @@ -332,7 +335,7 @@ where NodeBuilderWithComponents, N::ComponentsBuilder, N::AddOns>, > where - N: Node, ChainSpec = ChainSpec>, + N: Node, ChainSpec = ChainSpec> + NodeTypesForProvider, { self.with_types().with_components(node.components_builder()).with_add_ons(node.add_ons()) } @@ -355,13 +358,15 @@ where >, > where - N: Node, ChainSpec = ChainSpec>, + N: Node, ChainSpec = ChainSpec> + NodeTypesForProvider, N::AddOns: RethRpcAddOns< NodeAdapter< RethFullAdapter, >>::Components, >, >, + N::Primitives: + FullNodePrimitives>, { self.node(node).launch().await } @@ -549,9 +554,11 @@ where impl WithLaunchContext, CB, AO>> where DB: Database + DatabaseMetrics + DatabaseMetadata + Clone + Unpin + 'static, - T: NodeTypesWithEngine, + T: NodeTypesWithEngine + NodeTypesForProvider, CB: NodeComponentsBuilder>, AO: RethRpcAddOns, CB::Components>>, + T::Primitives: + FullNodePrimitives>, { /// Launches the node with the [`DefaultNodeLauncher`] that sets up engine API consensus and rpc pub async fn launch( diff --git a/crates/node/builder/src/launch/common.rs b/crates/node/builder/src/launch/common.rs index e01d117e7bcb..903b09803542 100644 --- a/crates/node/builder/src/launch/common.rs +++ b/crates/node/builder/src/launch/common.rs @@ -26,7 +26,7 @@ use reth_evm::noop::NoopBlockExecutorProvider; use reth_fs_util as fs; use reth_invalid_block_hooks::InvalidBlockWitnessHook; use reth_network_p2p::headers::client::HeadersClient; -use reth_node_api::{FullNodeTypes, NodeTypes, NodeTypesWithDB}; +use reth_node_api::{FullNodePrimitives, FullNodeTypes, NodeTypes, NodeTypesWithDB}; use reth_node_core::{ args::InvalidBlockHookType, dirs::{ChainPath, DataDirPath}, @@ -404,9 +404,12 @@ where /// Returns the [`ProviderFactory`] for the attached storage after executing a consistent check /// between the database and static files. **It may execute a pipeline unwind if it fails this /// check.** - pub async fn create_provider_factory>( - &self, - ) -> eyre::Result> { + pub async fn create_provider_factory(&self) -> eyre::Result> + where + N: ProviderNodeTypes, + N::Primitives: + FullNodePrimitives>, + { let factory = ProviderFactory::new( self.right().clone(), self.chain_spec(), @@ -467,9 +470,14 @@ where } /// Creates a new [`ProviderFactory`] and attaches it to the launch context. - pub async fn with_provider_factory>( + pub async fn with_provider_factory( self, - ) -> eyre::Result, ProviderFactory>>> { + ) -> eyre::Result, ProviderFactory>>> + where + N: ProviderNodeTypes, + N::Primitives: + FullNodePrimitives>, + { let factory = self.create_provider_factory().await?; let ctx = LaunchContextWith { inner: self.inner, @@ -482,7 +490,7 @@ where impl LaunchContextWith, ProviderFactory>> where - T: NodeTypesWithDB, + T: ProviderNodeTypes, { /// Returns access to the underlying database. pub const fn database(&self) -> &T::DB { @@ -748,10 +756,7 @@ impl Attached::ChainSpec>, WithComponents>, > where - T: FullNodeTypes< - Provider: WithTree, - Types: NodeTypes, - >, + T: FullNodeTypes, CB: NodeComponentsBuilder, { /// Returns the configured `ProviderFactory`. @@ -913,7 +918,7 @@ impl where T: FullNodeTypes< Provider: WithTree + StateProviderFactory + ChainSpecProvider, - Types: NodeTypes, + Types: ProviderNodeTypes, >, CB: NodeComponentsBuilder, { diff --git a/crates/node/builder/src/launch/engine.rs b/crates/node/builder/src/launch/engine.rs index 86ab0b9a3d74..5a8405047b0d 100644 --- a/crates/node/builder/src/launch/engine.rs +++ b/crates/node/builder/src/launch/engine.rs @@ -19,8 +19,8 @@ use reth_exex::ExExManagerHandle; use reth_network::{NetworkSyncUpdater, SyncState}; use reth_network_api::{BlockDownloaderProvider, NetworkEventListenerProvider}; use reth_node_api::{ - BuiltPayload, FullNodeTypes, NodeTypesWithEngine, PayloadAttributesBuilder, PayloadBuilder, - PayloadTypes, + BuiltPayload, FullNodePrimitives, FullNodeTypes, NodeTypesWithEngine, PayloadAttributesBuilder, + PayloadBuilder, PayloadTypes, }; use reth_node_core::{ dirs::{ChainPath, DataDirPath}, @@ -77,6 +77,8 @@ where LocalPayloadAttributesBuilder: PayloadAttributesBuilder< <::Engine as PayloadTypes>::PayloadAttributes, >, + Types::Primitives: + FullNodePrimitives>, { type Node = NodeHandle, AO>; diff --git a/crates/node/builder/src/launch/mod.rs b/crates/node/builder/src/launch/mod.rs index 4f9e850c97f1..c4146f48306e 100644 --- a/crates/node/builder/src/launch/mod.rs +++ b/crates/node/builder/src/launch/mod.rs @@ -17,18 +17,18 @@ use reth_beacon_consensus::{ BeaconConsensusEngine, }; use reth_blockchain_tree::{noop::NoopBlockchainTree, BlockchainTreeConfig}; -use reth_chainspec::{EthChainSpec, EthereumHardforks}; +use reth_chainspec::EthChainSpec; use reth_consensus_debug_client::{DebugConsensusClient, EtherscanBlockProvider, RpcBlockProvider}; use reth_engine_util::EngineMessageStreamExt; use reth_exex::ExExManagerHandle; use reth_network::{BlockDownloaderProvider, NetworkEventListenerProvider}; -use reth_node_api::{AddOnsContext, FullNodeTypes, NodeTypesWithDB, NodeTypesWithEngine}; +use reth_node_api::{AddOnsContext, FullNodePrimitives, FullNodeTypes, NodeTypesWithEngine}; use reth_node_core::{ dirs::{ChainPath, DataDirPath}, exit::NodeExitFuture, }; use reth_node_events::{cl::ConsensusLayerHealthEvents, node}; -use reth_provider::providers::BlockchainProvider; +use reth_provider::providers::{BlockchainProvider, ProviderNodeTypes}; use reth_rpc::eth::RpcNodeCore; use reth_tasks::TaskExecutor; use reth_tracing::tracing::{debug, info}; @@ -98,10 +98,12 @@ impl DefaultNodeLauncher { impl LaunchNode> for DefaultNodeLauncher where - Types: NodeTypesWithDB + NodeTypesWithEngine, + Types: ProviderNodeTypes + NodeTypesWithEngine, T: FullNodeTypes, Types = Types>, CB: NodeComponentsBuilder, AO: RethRpcAddOns>, + Types::Primitives: + FullNodePrimitives>, { type Node = NodeHandle, AO>; diff --git a/crates/node/builder/src/node.rs b/crates/node/builder/src/node.rs index 62c710ea8022..ce7d12fee3d3 100644 --- a/crates/node/builder/src/node.rs +++ b/crates/node/builder/src/node.rs @@ -71,6 +71,8 @@ where type ChainSpec = ::ChainSpec; type StateCommitment = ::StateCommitment; + + type Storage = ::Storage; } impl NodeTypesWithEngine for AnyNode diff --git a/crates/node/builder/src/setup.rs b/crates/node/builder/src/setup.rs index db188402ca8b..337e37eeedd4 100644 --- a/crates/node/builder/src/setup.rs +++ b/crates/node/builder/src/setup.rs @@ -14,6 +14,7 @@ use reth_exex::ExExManagerHandle; use reth_network_p2p::{ bodies::downloader::BodyDownloader, headers::downloader::HeaderDownloader, EthBlockClient, }; +use reth_node_api::FullNodePrimitives; use reth_provider::{providers::ProviderNodeTypes, ProviderFactory}; use reth_stages::{prelude::DefaultStages, stages::ExecutionStage, Pipeline, StageSet}; use reth_static_file::StaticFileProducer; @@ -40,6 +41,8 @@ where N: ProviderNodeTypes, Client: EthBlockClient + 'static, Executor: BlockExecutorProvider, + N::Primitives: + FullNodePrimitives>, { // building network downloaders using the fetch client let header_downloader = ReverseHeadersDownloaderBuilder::new(config.headers) @@ -85,8 +88,12 @@ pub fn build_pipeline( where N: ProviderNodeTypes, H: HeaderDownloader
+ 'static, - B: BodyDownloader + 'static, + B: BodyDownloader< + Body = <::Block as reth_node_api::Block>::Body, + > + 'static, Executor: BlockExecutorProvider, + N::Primitives: + FullNodePrimitives>, { let mut builder = Pipeline::::builder(); diff --git a/crates/node/types/src/lib.rs b/crates/node/types/src/lib.rs index f8770a3c0147..2da8180a9562 100644 --- a/crates/node/types/src/lib.rs +++ b/crates/node/types/src/lib.rs @@ -9,12 +9,11 @@ #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] #![cfg_attr(not(feature = "std"), no_std)] +use core::{fmt::Debug, marker::PhantomData}; pub use reth_primitives_traits::{ Block, BlockBody, FullBlock, FullNodePrimitives, FullReceipt, FullSignedTx, NodePrimitives, }; -use core::marker::PhantomData; - use reth_chainspec::EthChainSpec; use reth_db_api::{ database_metrics::{DatabaseMetadata, DatabaseMetrics}, @@ -35,6 +34,8 @@ pub trait NodeTypes: Send + Sync + Unpin + 'static { type ChainSpec: EthChainSpec; /// The type used to perform state commitment operations. type StateCommitment: StateCommitment; + /// The type responsible for writing chain primitives to storage. + type Storage: Default + Send + Sync + Unpin + Debug + 'static; } /// The type that configures an Ethereum-like node with an engine for consensus. @@ -86,6 +87,7 @@ where type Primitives = Types::Primitives; type ChainSpec = Types::ChainSpec; type StateCommitment = Types::StateCommitment; + type Storage = Types::Storage; } impl NodeTypesWithEngine for NodeTypesWithDBAdapter @@ -105,86 +107,128 @@ where } /// A [`NodeTypes`] type builder. -#[derive(Default, Debug)] -pub struct AnyNodeTypes

(PhantomData

, PhantomData, PhantomData); +#[derive(Debug)] +pub struct AnyNodeTypes

( + PhantomData

, + PhantomData, + PhantomData, + PhantomData, +); + +impl Default for AnyNodeTypes { + fn default() -> Self { + Self::new() + } +} + +impl AnyNodeTypes { + /// Creates a new instance of [`AnyNodeTypes`]. + pub const fn new() -> Self { + Self(PhantomData, PhantomData, PhantomData, PhantomData) + } -impl AnyNodeTypes { /// Sets the `Primitives` associated type. - pub const fn primitives(self) -> AnyNodeTypes { - AnyNodeTypes::(PhantomData::, PhantomData::, PhantomData::) + pub const fn primitives(self) -> AnyNodeTypes { + AnyNodeTypes::new() } /// Sets the `ChainSpec` associated type. - pub const fn chain_spec(self) -> AnyNodeTypes { - AnyNodeTypes::(PhantomData::

, PhantomData::, PhantomData::) + pub const fn chain_spec(self) -> AnyNodeTypes { + AnyNodeTypes::new() } /// Sets the `StateCommitment` associated type. - pub const fn state_commitment(self) -> AnyNodeTypes { - AnyNodeTypes::(PhantomData::

, PhantomData::, PhantomData::) + pub const fn state_commitment(self) -> AnyNodeTypes { + AnyNodeTypes::new() + } + + /// Sets the `Storage` associated type. + pub const fn storage(self) -> AnyNodeTypes { + AnyNodeTypes::new() } } -impl NodeTypes for AnyNodeTypes +impl NodeTypes for AnyNodeTypes where P: NodePrimitives + Send + Sync + Unpin + 'static, C: EthChainSpec + 'static, - S: StateCommitment, + SC: StateCommitment, + S: Default + Send + Sync + Unpin + Debug + 'static, { type Primitives = P; type ChainSpec = C; - type StateCommitment = S; + type StateCommitment = SC; + type Storage = S; } /// A [`NodeTypesWithEngine`] type builder. -#[derive(Default, Debug)] -pub struct AnyNodeTypesWithEngine

{ +#[derive(Debug)] +pub struct AnyNodeTypesWithEngine

{ /// Embedding the basic node types. - base: AnyNodeTypes, + _base: AnyNodeTypes, /// Phantom data for the engine. _engine: PhantomData, } -impl AnyNodeTypesWithEngine { +impl Default for AnyNodeTypesWithEngine { + fn default() -> Self { + Self::new() + } +} + +impl AnyNodeTypesWithEngine { + /// Creates a new instance of [`AnyNodeTypesWithEngine`]. + pub const fn new() -> Self { + Self { _base: AnyNodeTypes::new(), _engine: PhantomData } + } + /// Sets the `Primitives` associated type. - pub const fn primitives(self) -> AnyNodeTypesWithEngine { - AnyNodeTypesWithEngine { base: self.base.primitives::(), _engine: PhantomData } + pub const fn primitives(self) -> AnyNodeTypesWithEngine { + AnyNodeTypesWithEngine::new() } /// Sets the `Engine` associated type. - pub const fn engine(self) -> AnyNodeTypesWithEngine { - AnyNodeTypesWithEngine { base: self.base, _engine: PhantomData:: } + pub const fn engine(self) -> AnyNodeTypesWithEngine { + AnyNodeTypesWithEngine::new() } /// Sets the `ChainSpec` associated type. - pub const fn chain_spec(self) -> AnyNodeTypesWithEngine { - AnyNodeTypesWithEngine { base: self.base.chain_spec::(), _engine: PhantomData } + pub const fn chain_spec(self) -> AnyNodeTypesWithEngine { + AnyNodeTypesWithEngine::new() } /// Sets the `StateCommitment` associated type. - pub const fn state_commitment(self) -> AnyNodeTypesWithEngine { - AnyNodeTypesWithEngine { base: self.base.state_commitment::(), _engine: PhantomData } + pub const fn state_commitment(self) -> AnyNodeTypesWithEngine { + AnyNodeTypesWithEngine::new() + } + + /// Sets the `Storage` associated type. + pub const fn storage(self) -> AnyNodeTypesWithEngine { + AnyNodeTypesWithEngine::new() } } -impl NodeTypes for AnyNodeTypesWithEngine +impl NodeTypes for AnyNodeTypesWithEngine where P: NodePrimitives + Send + Sync + Unpin + 'static, E: EngineTypes + Send + Sync + Unpin, C: EthChainSpec + 'static, - S: StateCommitment, + SC: StateCommitment, + S: Default + Send + Sync + Unpin + Debug + 'static, { type Primitives = P; type ChainSpec = C; - type StateCommitment = S; + type StateCommitment = SC; + type Storage = S; } -impl NodeTypesWithEngine for AnyNodeTypesWithEngine +impl NodeTypesWithEngine for AnyNodeTypesWithEngine where P: NodePrimitives + Send + Sync + Unpin + 'static, E: EngineTypes + Send + Sync + Unpin, C: EthChainSpec + 'static, - S: StateCommitment, + SC: StateCommitment, + S: Default + Send + Sync + Unpin + Debug + 'static, { type Engine = E; } diff --git a/crates/optimism/cli/src/commands/build_pipeline.rs b/crates/optimism/cli/src/commands/build_pipeline.rs index 88dc09897179..8ebefdcc0b40 100644 --- a/crates/optimism/cli/src/commands/build_pipeline.rs +++ b/crates/optimism/cli/src/commands/build_pipeline.rs @@ -1,5 +1,6 @@ use alloy_primitives::B256; use futures_util::{Stream, StreamExt}; +use reth_cli_commands::common::CliNodeTypes; use reth_config::Config; use reth_consensus::Consensus; use reth_downloaders::{ @@ -38,7 +39,7 @@ pub(crate) async fn build_import_pipeline( disable_exec: bool, ) -> eyre::Result<(Pipeline, impl Stream)> where - N: ProviderNodeTypes, + N: CliNodeTypes + ProviderNodeTypes, C: Consensus + 'static, { if !file_client.has_canonical_blocks() { diff --git a/crates/optimism/node/Cargo.toml b/crates/optimism/node/Cargo.toml index 03ea75a26cdd..2e3e9fb4f1d1 100644 --- a/crates/optimism/node/Cargo.toml +++ b/crates/optimism/node/Cargo.toml @@ -13,6 +13,7 @@ workspace = true [dependencies] # reth reth-chainspec.workspace = true +reth-db.workspace = true reth-engine-local.workspace = true reth-primitives.workspace = true reth-payload-builder.workspace = true diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index 70f32c01ffd9..6cdffd09059b 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -5,10 +5,12 @@ use std::sync::Arc; use alloy_consensus::Header; use reth_basic_payload_builder::{BasicPayloadJobGenerator, BasicPayloadJobGeneratorConfig}; use reth_chainspec::{EthChainSpec, Hardforks}; +use reth_db::transaction::{DbTx, DbTxMut}; use reth_evm::{execute::BasicBlockExecutorProvider, ConfigureEvm}; use reth_network::{NetworkConfig, NetworkHandle, NetworkManager, PeersInfo}; use reth_node_api::{ - AddOnsContext, EngineValidator, FullNodeComponents, NodeAddOns, PayloadBuilder, + AddOnsContext, EngineValidator, FullNodeComponents, FullNodePrimitives, NodeAddOns, + PayloadBuilder, }; use reth_node_builder::{ components::{ @@ -28,7 +30,11 @@ use reth_optimism_rpc::{ OpEthApi, }; use reth_payload_builder::{PayloadBuilderHandle, PayloadBuilderService}; -use reth_provider::CanonStateSubscriptions; +use reth_primitives::{Block, BlockBody, Receipt, TransactionSigned, TxType}; +use reth_provider::{ + providers::ChainStorage, BlockBodyWriter, CanonStateSubscriptions, DBProvider, EthStorage, + ProviderResult, +}; use reth_rpc_server_types::RethRpcModule; use reth_tracing::tracing::{debug, info}; use reth_transaction_pool::{ @@ -43,7 +49,42 @@ use crate::{ txpool::{OpTransactionPool, OpTransactionValidator}, OpEngineTypes, }; +/// Optimism primitive types. +#[derive(Debug, Default, Clone, PartialEq, Eq)] +pub struct OpPrimitives; + +impl FullNodePrimitives for OpPrimitives { + type Block = Block; + type SignedTx = TransactionSigned; + type TxType = TxType; + type Receipt = Receipt; +} + +/// Storage implementation for Optimism. +#[derive(Debug, Default, Clone)] +pub struct OpStorage(EthStorage); + +impl> BlockBodyWriter for OpStorage { + fn write_block_bodies( + &self, + provider: &Provider, + bodies: Vec<(u64, Option)>, + ) -> ProviderResult<()> { + self.0.write_block_bodies(provider, bodies) + } +} +impl ChainStorage for OpStorage { + fn writer( + &self, + ) -> impl reth_provider::ChainStorageWriter, OpPrimitives> + where + TX: DbTxMut + DbTx + 'static, + Types: NodeTypes, + { + self + } +} /// Type configuration for a regular Optimism node. #[derive(Debug, Default, Clone)] #[non_exhaustive] @@ -90,7 +131,14 @@ impl OpNode { impl Node for OpNode where - N: FullNodeTypes>, + N: FullNodeTypes< + Types: NodeTypesWithEngine< + Engine = OpEngineTypes, + ChainSpec = OpChainSpec, + Primitives = OpPrimitives, + Storage = OpStorage, + >, + >, { type ComponentsBuilder = ComponentsBuilder< N, @@ -115,9 +163,10 @@ where } impl NodeTypes for OpNode { - type Primitives = reth_primitives::EthPrimitives; // todo: replace with OpPrimitives when EthPrimitives is only used in reth-ethereum-* crates + type Primitives = OpPrimitives; type ChainSpec = OpChainSpec; type StateCommitment = MerklePatriciaTrie; + type Storage = OpStorage; } impl NodeTypesWithEngine for OpNode { @@ -144,7 +193,7 @@ impl OpAddOns { impl NodeAddOns for OpAddOns where N: FullNodeComponents< - Types: NodeTypes, + Types: NodeTypes, PayloadBuilder: PayloadBuilder::Engine>, >, OpEngineValidator: EngineValidator<::Engine>, @@ -172,7 +221,7 @@ where impl RethRpcAddOns for OpAddOns where N: FullNodeComponents< - Types: NodeTypes, + Types: NodeTypes, PayloadBuilder: PayloadBuilder::Engine>, >, OpEngineValidator: EngineValidator<::Engine>, diff --git a/crates/primitives-traits/src/block/mod.rs b/crates/primitives-traits/src/block/mod.rs index 5b1faeafbb72..67658c39e07d 100644 --- a/crates/primitives-traits/src/block/mod.rs +++ b/crates/primitives-traits/src/block/mod.rs @@ -10,9 +10,9 @@ use reth_codecs::Compact; use crate::{BlockHeader, FullBlockHeader, InMemorySize, MaybeSerde}; /// Helper trait that unifies all behaviour required by block to support full node operations. -pub trait FullBlock: Block + Compact {} +pub trait FullBlock: Block {} -impl FullBlock for T where T: Block + Compact {} +impl FullBlock for T where T: Block {} /// Abstraction of block data type. // todo: make sealable super-trait, depends on diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index b2f438920183..c3682ecba1da 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -79,6 +79,15 @@ pub mod serde_bincode_compat { #[derive(Debug, Clone, Default, PartialEq, Eq)] pub struct EthPrimitives; +#[cfg(feature = "reth-codec")] +impl reth_primitives_traits::FullNodePrimitives for EthPrimitives { + type Block = crate::Block; + type SignedTx = crate::TransactionSigned; + type TxType = crate::TxType; + type Receipt = crate::Receipt; +} + +#[cfg(not(feature = "reth-codec"))] impl NodePrimitives for EthPrimitives { type Block = crate::Block; type SignedTx = crate::TransactionSigned; diff --git a/crates/stages/stages/src/stages/bodies.rs b/crates/stages/stages/src/stages/bodies.rs index 07b975749726..80185eade878 100644 --- a/crates/stages/stages/src/stages/bodies.rs +++ b/crates/stages/stages/src/stages/bodies.rs @@ -198,7 +198,10 @@ where // Write bodies to database. This will NOT write transactions to database as we've already // written them directly to static files. provider.append_block_bodies( - buffer.into_iter().map(|response| (response.block_number(), response.into_body())), + buffer + .into_iter() + .map(|response| (response.block_number(), response.into_body())) + .collect(), )?; // The stage is "done" if: diff --git a/crates/stages/stages/src/test_utils/test_db.rs b/crates/stages/stages/src/test_utils/test_db.rs index 772e9cb78d07..2f9712f84364 100644 --- a/crates/stages/stages/src/test_utils/test_db.rs +++ b/crates/stages/stages/src/test_utils/test_db.rs @@ -15,7 +15,7 @@ use reth_db_api::{ DatabaseError as DbError, }; use reth_primitives::{ - Account, Receipt, SealedBlock, SealedHeader, StaticFileSegment, StorageEntry, + Account, EthPrimitives, Receipt, SealedBlock, SealedHeader, StaticFileSegment, StorageEntry, }; use reth_provider::{ providers::{StaticFileProvider, StaticFileProviderRWRefMut, StaticFileWriter}, @@ -142,7 +142,7 @@ impl TestStageDB { /// Insert header to static file if `writer` exists, otherwise to DB. pub fn insert_header( - writer: Option<&mut StaticFileProviderRWRefMut<'_, ()>>, + writer: Option<&mut StaticFileProviderRWRefMut<'_, EthPrimitives>>, tx: &TX, header: &SealedHeader, td: U256, diff --git a/crates/storage/provider/Cargo.toml b/crates/storage/provider/Cargo.toml index eff0540638a5..674f02adabc3 100644 --- a/crates/storage/provider/Cargo.toml +++ b/crates/storage/provider/Cargo.toml @@ -17,6 +17,7 @@ reth-chainspec.workspace = true reth-blockchain-tree-api.workspace = true reth-execution-types.workspace = true reth-primitives = { workspace = true, features = ["reth-codec", "secp256k1"] } +reth-primitives-traits.workspace = true reth-fs-util.workspace = true reth-errors.workspace = true reth-storage-errors.workspace = true @@ -111,6 +112,7 @@ serde = [ "revm/serde", "reth-codecs/serde", "reth-optimism-primitives?/serde", + "reth-primitives-traits/serde", ] test-utils = [ "reth-db/test-utils", @@ -122,6 +124,7 @@ test-utils = [ "reth-evm/test-utils", "reth-network-p2p/test-utils", "reth-primitives/test-utils", + "reth-primitives-traits/test-utils", "reth-codecs/test-utils", "reth-db-api/test-utils", "reth-trie-db/test-utils", diff --git a/crates/storage/provider/src/providers/database/chain.rs b/crates/storage/provider/src/providers/database/chain.rs new file mode 100644 index 000000000000..8f9a6395a9dd --- /dev/null +++ b/crates/storage/provider/src/providers/database/chain.rs @@ -0,0 +1,26 @@ +use crate::{providers::NodeTypes, DatabaseProvider}; +use reth_db::transaction::{DbTx, DbTxMut}; +use reth_node_types::FullNodePrimitives; +use reth_primitives::EthPrimitives; +use reth_storage_api::{ChainStorageWriter, EthStorage}; + +/// Trait that provides access to implementations of [`ChainStorage`] +pub trait ChainStorage: Send + Sync { + /// Provides access to the chain writer. + fn writer(&self) -> impl ChainStorageWriter, Primitives> + where + TX: DbTxMut + DbTx + 'static, + Types: NodeTypes; +} + +impl ChainStorage for EthStorage { + fn writer( + &self, + ) -> impl ChainStorageWriter, EthPrimitives> + where + TX: DbTxMut + DbTx + 'static, + Types: NodeTypes, + { + self + } +} diff --git a/crates/storage/provider/src/providers/database/mod.rs b/crates/storage/provider/src/providers/database/mod.rs index 94c83bbb4422..a64bb2578dd6 100644 --- a/crates/storage/provider/src/providers/database/mod.rs +++ b/crates/storage/provider/src/providers/database/mod.rs @@ -44,6 +44,9 @@ use super::ProviderNodeTypes; mod metrics; +mod chain; +pub use chain::*; + /// A common provider that fetches data from a database or static file. /// /// This provider implements most provider or provider factory traits. @@ -56,19 +59,22 @@ pub struct ProviderFactory { static_file_provider: StaticFileProvider, /// Optional pruning configuration prune_modes: PruneModes, + /// The node storage handler. + storage: Arc, } impl fmt::Debug for ProviderFactory where - N: NodeTypesWithDB, + N: NodeTypesWithDB, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self { db, chain_spec, static_file_provider, prune_modes } = self; + let Self { db, chain_spec, static_file_provider, prune_modes, storage } = self; f.debug_struct("ProviderFactory") .field("db", &db) .field("chain_spec", &chain_spec) .field("static_file_provider", &static_file_provider) .field("prune_modes", &prune_modes) + .field("storage", &storage) .finish() } } @@ -80,7 +86,13 @@ impl ProviderFactory { chain_spec: Arc, static_file_provider: StaticFileProvider, ) -> Self { - Self { db, chain_spec, static_file_provider, prune_modes: PruneModes::none() } + Self { + db, + chain_spec, + static_file_provider, + prune_modes: PruneModes::none(), + storage: Default::default(), + } } /// Enables metrics on the static file provider. @@ -121,6 +133,7 @@ impl>> ProviderFactory { chain_spec, static_file_provider, prune_modes: PruneModes::none(), + storage: Default::default(), }) } } @@ -139,6 +152,7 @@ impl ProviderFactory { self.chain_spec.clone(), self.static_file_provider.clone(), self.prune_modes.clone(), + self.storage.clone(), )) } @@ -153,6 +167,7 @@ impl ProviderFactory { self.chain_spec.clone(), self.static_file_provider.clone(), self.prune_modes.clone(), + self.storage.clone(), ))) } @@ -617,6 +632,7 @@ impl Clone for ProviderFactory { chain_spec: self.chain_spec.clone(), static_file_provider: self.static_file_provider.clone(), prune_modes: self.prune_modes.clone(), + storage: self.storage.clone(), } } } diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index b93112e70843..4690e27821ea 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -1,19 +1,24 @@ use crate::{ bundle_state::StorageRevertsIter, - providers::{database::metrics, static_file::StaticFileWriter, StaticFileProvider}, + providers::{ + database::{chain::ChainStorage, metrics}, + static_file::StaticFileWriter, + ProviderNodeTypes, StaticFileProvider, + }, to_range, traits::{ AccountExtReader, BlockSource, ChangeSetReader, ReceiptProvider, StageCheckpointWriter, }, writer::UnifiedStorageWriter, - AccountReader, BlockExecutionWriter, BlockHashReader, BlockNumReader, BlockReader, BlockWriter, - BundleStateInit, ChainStateBlockReader, ChainStateBlockWriter, DBProvider, EvmEnvProvider, - HashingWriter, HeaderProvider, HeaderSyncGap, HeaderSyncGapProvider, HistoricalStateProvider, - HistoricalStateProviderRef, HistoryWriter, LatestStateProvider, LatestStateProviderRef, - OriginalValuesKnown, ProviderError, PruneCheckpointReader, PruneCheckpointWriter, RevertsInit, - StageCheckpointReader, StateChangeWriter, StateProviderBox, StateReader, StateWriter, - StaticFileProviderFactory, StatsReader, StorageReader, StorageTrieWriter, TransactionVariant, - TransactionsProvider, TransactionsProviderExt, TrieWriter, WithdrawalsProvider, + AccountReader, BlockBodyWriter, BlockExecutionWriter, BlockHashReader, BlockNumReader, + BlockReader, BlockWriter, BundleStateInit, ChainStateBlockReader, ChainStateBlockWriter, + DBProvider, EvmEnvProvider, HashingWriter, HeaderProvider, HeaderSyncGap, + HeaderSyncGapProvider, HistoricalStateProvider, HistoricalStateProviderRef, HistoryWriter, + LatestStateProvider, LatestStateProviderRef, OriginalValuesKnown, ProviderError, + PruneCheckpointReader, PruneCheckpointWriter, RevertsInit, StageCheckpointReader, + StateChangeWriter, StateProviderBox, StateReader, StateWriter, StaticFileProviderFactory, + StatsReader, StorageReader, StorageTrieWriter, TransactionVariant, TransactionsProvider, + TransactionsProviderExt, TrieWriter, WithdrawalsProvider, }; use alloy_consensus::Header; use alloy_eips::{ @@ -47,6 +52,7 @@ use reth_primitives::{ SealedBlockWithSenders, SealedHeader, StaticFileSegment, StorageEntry, TransactionMeta, TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, }; +use reth_primitives_traits::{BlockBody as _, FullNodePrimitives}; use reth_prune_types::{PruneCheckpoint, PruneModes, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; use reth_storage_api::{StateProvider, StorageChangeSetReader, TryIntoHistoricalStateProvider}; @@ -138,6 +144,8 @@ pub struct DatabaseProvider { static_file_provider: StaticFileProvider, /// Pruning configuration prune_modes: PruneModes, + /// Node storage handler. + storage: Arc, } impl DatabaseProvider { @@ -224,8 +232,9 @@ impl DatabaseProvider { chain_spec: Arc, static_file_provider: StaticFileProvider, prune_modes: PruneModes, + storage: Arc, ) -> Self { - Self { tx, chain_spec, static_file_provider, prune_modes } + Self { tx, chain_spec, static_file_provider, prune_modes, storage } } } @@ -277,9 +286,7 @@ impl TryIntoHistoricalStateProvider for Databa } } -impl + 'static> - DatabaseProvider -{ +impl DatabaseProvider { // TODO: uncomment below, once `reth debug_cmd` has been feature gated with dev. // #[cfg(any(test, feature = "test-utils"))] /// Inserts an historical block. **Used for setting up test environments** @@ -367,8 +374,9 @@ impl DatabaseProvider { chain_spec: Arc, static_file_provider: StaticFileProvider, prune_modes: PruneModes, + storage: Arc, ) -> Self { - Self { tx, chain_spec, static_file_provider, prune_modes } + Self { tx, chain_spec, static_file_provider, prune_modes, storage } } /// Consume `DbTx` or `DbTxMut`. @@ -2899,8 +2907,8 @@ impl StateReader for DatabaseProvider { } } -impl + 'static> - BlockExecutionWriter for DatabaseProvider +impl BlockExecutionWriter + for DatabaseProvider { fn take_block_and_execution_range( &self, @@ -3101,10 +3109,11 @@ impl + } } -impl + 'static> BlockWriter +impl BlockWriter for DatabaseProvider { - type Body = BlockBody; + type Body = + <::Block as reth_primitives_traits::Block>::Body; /// Inserts the block into the database, always modifying the following tables: /// * [`CanonicalHeaders`](tables::CanonicalHeaders) @@ -3266,45 +3275,32 @@ impl + fn append_block_bodies( &self, - bodies: impl Iterator)>, + bodies: Vec<(BlockNumber, Option)>, ) -> ProviderResult<()> { let mut block_indices_cursor = self.tx.cursor_write::()?; let mut tx_block_cursor = self.tx.cursor_write::()?; - let mut ommers_cursor = self.tx.cursor_write::()?; - let mut withdrawals_cursor = self.tx.cursor_write::()?; // Get id for the next tx_num of zero if there are no transactions. let mut next_tx_num = tx_block_cursor.last()?.map(|(id, _)| id + 1).unwrap_or_default(); - for (block_number, body) in bodies { - let tx_count = body.as_ref().map(|b| b.transactions.len() as u64).unwrap_or_default(); + for (block_number, body) in &bodies { + let tx_count = body.as_ref().map(|b| b.transactions().len() as u64).unwrap_or_default(); let block_indices = StoredBlockBodyIndices { first_tx_num: next_tx_num, tx_count }; // insert block meta - block_indices_cursor.append(block_number, block_indices)?; + block_indices_cursor.append(*block_number, block_indices)?; next_tx_num += tx_count; let Some(body) = body else { continue }; // write transaction block index - if !body.transactions.is_empty() { - tx_block_cursor.append(block_indices.last_tx_num(), block_number)?; - } - - // Write ommers if any - if !body.ommers.is_empty() { - ommers_cursor.append(block_number, StoredBlockOmmers { ommers: body.ommers })?; - } - - // Write withdrawals if any - if let Some(withdrawals) = body.withdrawals { - if !withdrawals.is_empty() { - withdrawals_cursor - .append(block_number, StoredBlockWithdrawals { withdrawals })?; - } + if !body.transactions().is_empty() { + tx_block_cursor.append(block_indices.last_tx_num(), *block_number)?; } } + self.storage.writer().write_block_bodies(self, bodies)?; + Ok(()) } diff --git a/crates/storage/provider/src/providers/mod.rs b/crates/storage/provider/src/providers/mod.rs index 3bf3e7b247f6..d049243377e9 100644 --- a/crates/storage/provider/src/providers/mod.rs +++ b/crates/storage/provider/src/providers/mod.rs @@ -22,7 +22,7 @@ use reth_chain_state::{ChainInfoTracker, ForkChoiceNotifications, ForkChoiceSubs use reth_chainspec::{ChainInfo, EthereumHardforks}; use reth_db_api::models::{AccountBeforeTx, StoredBlockBodyIndices}; use reth_evm::ConfigureEvmEnv; -use reth_node_types::NodeTypesWithDB; +use reth_node_types::{FullNodePrimitives, NodeTypes, NodeTypesWithDB}; use reth_primitives::{ Account, Block, BlockWithSenders, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, TransactionMeta, TransactionSigned, TransactionSignedNoHash, @@ -37,6 +37,7 @@ use std::{ sync::Arc, time::Instant, }; + use tracing::trace; mod database; @@ -67,10 +68,35 @@ pub use blockchain_provider::BlockchainProvider2; mod consistent; pub use consistent::ConsistentProvider; +/// Helper trait to bound [`NodeTypes`] so that combined with database they satisfy +/// [`ProviderNodeTypes`]. +pub trait NodeTypesForProvider +where + Self: NodeTypes< + ChainSpec: EthereumHardforks, + Storage: ChainStorage, + Primitives: FullNodePrimitives, + >, +{ +} + +impl NodeTypesForProvider for T where + T: NodeTypes< + ChainSpec: EthereumHardforks, + Storage: ChainStorage, + Primitives: FullNodePrimitives, + > +{ +} + /// Helper trait keeping common requirements of providers for [`NodeTypesWithDB`]. -pub trait ProviderNodeTypes: NodeTypesWithDB {} +pub trait ProviderNodeTypes +where + Self: NodeTypesForProvider + NodeTypesWithDB, +{ +} -impl ProviderNodeTypes for T where T: NodeTypesWithDB {} +impl ProviderNodeTypes for T where T: NodeTypesForProvider + NodeTypesWithDB {} /// The main type for interacting with the blockchain. /// diff --git a/crates/storage/provider/src/test_utils/mock.rs b/crates/storage/provider/src/test_utils/mock.rs index 9661ab2057ca..43bb1e809422 100644 --- a/crates/storage/provider/src/test_utils/mock.rs +++ b/crates/storage/provider/src/test_utils/mock.rs @@ -1,9 +1,9 @@ use crate::{ traits::{BlockSource, ReceiptProvider}, AccountReader, BlockHashReader, BlockIdReader, BlockNumReader, BlockReader, BlockReaderIdExt, - ChainSpecProvider, ChangeSetReader, DatabaseProvider, EvmEnvProvider, HeaderProvider, - ReceiptProviderIdExt, StateProvider, StateProviderBox, StateProviderFactory, StateReader, - StateRootProvider, TransactionVariant, TransactionsProvider, WithdrawalsProvider, + ChainSpecProvider, ChangeSetReader, DatabaseProvider, EthStorage, EvmEnvProvider, + HeaderProvider, ReceiptProviderIdExt, StateProvider, StateProviderBox, StateProviderFactory, + StateReader, StateRootProvider, TransactionVariant, TransactionsProvider, WithdrawalsProvider, }; use alloy_consensus::{constants::EMPTY_ROOT_HASH, Header}; use alloy_eips::{ @@ -23,7 +23,7 @@ use reth_evm::ConfigureEvmEnv; use reth_execution_types::ExecutionOutcome; use reth_node_types::NodeTypes; use reth_primitives::{ - Account, Block, BlockWithSenders, Bytecode, GotExpected, Receipt, SealedBlock, + Account, Block, BlockWithSenders, Bytecode, EthPrimitives, GotExpected, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, TransactionMeta, TransactionSigned, TransactionSignedNoHash, }; @@ -158,9 +158,10 @@ impl MockEthProvider { pub struct MockNode; impl NodeTypes for MockNode { - type Primitives = (); + type Primitives = EthPrimitives; type ChainSpec = ChainSpec; type StateCommitment = MerklePatriciaTrie; + type Storage = EthStorage; } impl DatabaseProviderFactory for MockEthProvider { diff --git a/crates/storage/provider/src/test_utils/mod.rs b/crates/storage/provider/src/test_utils/mod.rs index c0e80930b318..2c3795573c20 100644 --- a/crates/storage/provider/src/test_utils/mod.rs +++ b/crates/storage/provider/src/test_utils/mod.rs @@ -1,4 +1,7 @@ -use crate::{providers::StaticFileProvider, HashingWriter, ProviderFactory, TrieWriter}; +use crate::{ + providers::{ProviderNodeTypes, StaticFileProvider}, + HashingWriter, ProviderFactory, TrieWriter, +}; use alloy_primitives::B256; use reth_chainspec::{ChainSpec, MAINNET}; use reth_db::{ @@ -6,7 +9,7 @@ use reth_db::{ DatabaseEnv, }; use reth_errors::ProviderResult; -use reth_node_types::{NodeTypesWithDB, NodeTypesWithDBAdapter}; +use reth_node_types::NodeTypesWithDBAdapter; use reth_primitives::{Account, StorageEntry}; use reth_trie::StateRoot; use reth_trie_db::DatabaseStateRoot; @@ -22,10 +25,11 @@ pub use reth_chain_state::test_utils::TestCanonStateSubscriptions; /// Mock [`reth_node_types::NodeTypes`] for testing. pub type MockNodeTypes = reth_node_types::AnyNodeTypesWithEngine< - (), + reth_primitives::EthPrimitives, reth_ethereum_engine_primitives::EthEngineTypes, reth_chainspec::ChainSpec, reth_trie_db::MerklePatriciaTrie, + crate::EthStorage, >; /// Mock [`reth_node_types::NodeTypesWithDB`] for testing. @@ -51,7 +55,7 @@ pub fn create_test_provider_factory_with_chain_spec( } /// Inserts the genesis alloc from the provided chain spec into the trie. -pub fn insert_genesis>( +pub fn insert_genesis>( provider_factory: &ProviderFactory, chain_spec: Arc, ) -> ProviderResult { diff --git a/crates/storage/provider/src/traits/block.rs b/crates/storage/provider/src/traits/block.rs index 50fb032923d8..a0dae1783eae 100644 --- a/crates/storage/provider/src/traits/block.rs +++ b/crates/storage/provider/src/traits/block.rs @@ -50,7 +50,7 @@ pub trait BlockWriter: Send + Sync { /// Bodies are passed as [`Option`]s, if body is `None` the corresponding block is empty. fn append_block_bodies( &self, - bodies: impl Iterator)>, + bodies: Vec<(BlockNumber, Option)>, ) -> ProviderResult<()>; /// Appends a batch of sealed blocks to the blockchain, including sender information, and diff --git a/crates/storage/storage-api/Cargo.toml b/crates/storage/storage-api/Cargo.toml index 2b13f6332f87..c059eb0d6e9b 100644 --- a/crates/storage/storage-api/Cargo.toml +++ b/crates/storage/storage-api/Cargo.toml @@ -18,6 +18,7 @@ reth-db-models.workspace = true reth-db-api.workspace = true reth-execution-types.workspace = true reth-primitives.workspace = true +reth-primitives-traits.workspace = true reth-prune-types.workspace = true reth-stages-types.workspace = true reth-storage-errors.workspace = true diff --git a/crates/storage/storage-api/src/chain.rs b/crates/storage/storage-api/src/chain.rs new file mode 100644 index 000000000000..099f61f1bcb5 --- /dev/null +++ b/crates/storage/storage-api/src/chain.rs @@ -0,0 +1,72 @@ +use crate::DBProvider; +use alloy_primitives::BlockNumber; +use reth_db::{ + cursor::DbCursorRW, + models::{StoredBlockOmmers, StoredBlockWithdrawals}, + tables, + transaction::DbTxMut, +}; +use reth_primitives_traits::{Block, BlockBody, FullNodePrimitives}; +use reth_storage_errors::provider::ProviderResult; + +/// Trait that implements how block bodies are written to the storage. +/// +/// Note: Within the current abstraction, this should only write to tables unrelated to +/// transactions. Writing of transactions is handled separately. +#[auto_impl::auto_impl(&, Arc)] +pub trait BlockBodyWriter { + /// Writes a set of block bodies to the storage. + fn write_block_bodies( + &self, + provider: &Provider, + bodies: Vec<(BlockNumber, Option)>, + ) -> ProviderResult<()>; +} + +/// Trait that implements how chain-specific types are written to the storage. +pub trait ChainStorageWriter: + BlockBodyWriter::Body> +{ +} +impl ChainStorageWriter for T where + T: BlockBodyWriter::Body> +{ +} + +/// Ethereum storage implementation. +#[derive(Debug, Default, Clone, Copy)] +pub struct EthStorage; + +impl BlockBodyWriter for EthStorage +where + Provider: DBProvider, +{ + fn write_block_bodies( + &self, + provider: &Provider, + bodies: Vec<(u64, Option)>, + ) -> ProviderResult<()> { + let mut ommers_cursor = provider.tx_ref().cursor_write::()?; + let mut withdrawals_cursor = + provider.tx_ref().cursor_write::()?; + + for (block_number, body) in bodies { + let Some(body) = body else { continue }; + + // Write ommers if any + if !body.ommers.is_empty() { + ommers_cursor.append(block_number, StoredBlockOmmers { ommers: body.ommers })?; + } + + // Write withdrawals if any + if let Some(withdrawals) = body.withdrawals { + if !withdrawals.is_empty() { + withdrawals_cursor + .append(block_number, StoredBlockWithdrawals { withdrawals })?; + } + } + } + + Ok(()) + } +} diff --git a/crates/storage/storage-api/src/lib.rs b/crates/storage/storage-api/src/lib.rs index 7b7ad761476a..be52a817e932 100644 --- a/crates/storage/storage-api/src/lib.rs +++ b/crates/storage/storage-api/src/lib.rs @@ -22,6 +22,9 @@ pub use block_id::*; mod block_hash; pub use block_hash::*; +mod chain; +pub use chain::*; + mod header; pub use header::*; diff --git a/examples/custom-engine-types/src/main.rs b/examples/custom-engine-types/src/main.rs index 704ecb7e3c4b..c21e893e05aa 100644 --- a/examples/custom-engine-types/src/main.rs +++ b/examples/custom-engine-types/src/main.rs @@ -42,7 +42,8 @@ use reth::{ PayloadBuilderConfig, }, network::NetworkHandle, - providers::{CanonStateSubscriptions, StateProviderFactory}, + primitives::EthPrimitives, + providers::{CanonStateSubscriptions, EthStorage, StateProviderFactory}, rpc::eth::EthApi, tasks::TaskManager, transaction_pool::TransactionPool, @@ -227,9 +228,10 @@ struct MyCustomNode; /// Configure the node types impl NodeTypes for MyCustomNode { - type Primitives = (); + type Primitives = EthPrimitives; type ChainSpec = ChainSpec; type StateCommitment = MerklePatriciaTrie; + type Storage = EthStorage; } /// Configure the node types with the custom engine types @@ -254,7 +256,14 @@ pub type MyNodeAddOns = RpcAddOns< /// This provides a preset configuration for the node impl Node for MyCustomNode where - N: FullNodeTypes>, + N: FullNodeTypes< + Types: NodeTypesWithEngine< + Engine = CustomEngineTypes, + ChainSpec = ChainSpec, + Primitives = EthPrimitives, + Storage = EthStorage, + >, + >, { type ComponentsBuilder = ComponentsBuilder< N, From 50c875b33c089db92c4a9c088e0427c003bcefe2 Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Tue, 19 Nov 2024 15:39:41 +0000 Subject: [PATCH 039/156] feat(trie): short-circuit account/storage reveal in sparse trie (#12663) --- crates/trie/sparse/src/state.rs | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/crates/trie/sparse/src/state.rs b/crates/trie/sparse/src/state.rs index d7557a7a365d..0b0db1401150 100644 --- a/crates/trie/sparse/src/state.rs +++ b/crates/trie/sparse/src/state.rs @@ -42,6 +42,10 @@ impl SparseStateTrie { account: B256, proof: impl IntoIterator, ) -> SparseStateTrieResult<()> { + if self.revealed.contains_key(&account) { + return Ok(()); + } + let mut proof = proof.into_iter().peekable(); let Some(root_node) = self.validate_proof(&mut proof)? else { return Ok(()) }; @@ -69,6 +73,10 @@ impl SparseStateTrie { slot: B256, proof: impl IntoIterator, ) -> SparseStateTrieResult<()> { + if self.revealed.get(&account).is_some_and(|v| v.contains(&slot)) { + return Ok(()); + } + let mut proof = proof.into_iter().peekable(); let Some(root_node) = self.validate_proof(&mut proof)? else { return Ok(()) }; From 3408059393bcf03f6727f790ec52f28114e25d02 Mon Sep 17 00:00:00 2001 From: Roman Krasiuk Date: Tue, 19 Nov 2024 17:01:44 +0100 Subject: [PATCH 040/156] feat(trie): introduce `TRIE_ACCOUNT_RLP_MAX_SIZE` constant (#12638) --- crates/trie/parallel/src/proof.rs | 4 ++-- crates/trie/parallel/src/root.rs | 4 ++-- crates/trie/trie/src/constants.rs | 24 ++++++++++++++++++++++++ crates/trie/trie/src/lib.rs | 4 ++++ crates/trie/trie/src/proof.rs | 4 ++-- crates/trie/trie/src/trie.rs | 4 ++-- crates/trie/trie/src/witness.rs | 4 ++-- 7 files changed, 38 insertions(+), 10 deletions(-) create mode 100644 crates/trie/trie/src/constants.rs diff --git a/crates/trie/parallel/src/proof.rs b/crates/trie/parallel/src/proof.rs index bafb9917c600..88321c821a8d 100644 --- a/crates/trie/parallel/src/proof.rs +++ b/crates/trie/parallel/src/proof.rs @@ -17,7 +17,7 @@ use reth_trie::{ proof::StorageProof, trie_cursor::{InMemoryTrieCursorFactory, TrieCursorFactory}, walker::TrieWalker, - HashBuilder, MultiProof, Nibbles, TrieAccount, TrieInput, + HashBuilder, MultiProof, Nibbles, TrieAccount, TrieInput, TRIE_ACCOUNT_RLP_MAX_SIZE, }; use reth_trie_common::proof::ProofRetainer; use reth_trie_db::{DatabaseHashedCursorFactory, DatabaseTrieCursorFactory}; @@ -153,7 +153,7 @@ where let mut hash_builder = HashBuilder::default().with_proof_retainer(retainer); let mut storages = HashMap::default(); - let mut account_rlp = Vec::with_capacity(128); + let mut account_rlp = Vec::with_capacity(TRIE_ACCOUNT_RLP_MAX_SIZE); let mut account_node_iter = TrieNodeIter::new( walker, hashed_cursor_factory.hashed_account_cursor().map_err(ProviderError::Database)?, diff --git a/crates/trie/parallel/src/root.rs b/crates/trie/parallel/src/root.rs index e432b91062ca..7a316d8b15fb 100644 --- a/crates/trie/parallel/src/root.rs +++ b/crates/trie/parallel/src/root.rs @@ -14,7 +14,7 @@ use reth_trie::{ trie_cursor::{InMemoryTrieCursorFactory, TrieCursorFactory}, updates::TrieUpdates, walker::TrieWalker, - HashBuilder, Nibbles, StorageRoot, TrieAccount, TrieInput, + HashBuilder, Nibbles, StorageRoot, TrieAccount, TrieInput, TRIE_ACCOUNT_RLP_MAX_SIZE, }; use reth_trie_db::{DatabaseHashedCursorFactory, DatabaseTrieCursorFactory}; use std::{collections::HashMap, sync::Arc}; @@ -149,7 +149,7 @@ where ); let mut hash_builder = HashBuilder::default().with_updates(retain_updates); - let mut account_rlp = Vec::with_capacity(128); + let mut account_rlp = Vec::with_capacity(TRIE_ACCOUNT_RLP_MAX_SIZE); while let Some(node) = account_node_iter.try_next().map_err(ProviderError::Database)? { match node { TrieElement::Branch(node) => { diff --git a/crates/trie/trie/src/constants.rs b/crates/trie/trie/src/constants.rs new file mode 100644 index 000000000000..7354290d9596 --- /dev/null +++ b/crates/trie/trie/src/constants.rs @@ -0,0 +1,24 @@ +/// The maximum size of RLP encoded trie account in bytes. +/// 2 (header) + 4 * 1 (field lens) + 8 (nonce) + 32 * 3 (balance, storage root, code hash) +pub const TRIE_ACCOUNT_RLP_MAX_SIZE: usize = 110; + +#[cfg(test)] +mod tests { + use super::*; + use alloy_primitives::{B256, U256}; + use alloy_rlp::Encodable; + use reth_trie_common::TrieAccount; + + #[test] + fn account_rlp_max_size() { + let account = TrieAccount { + nonce: u64::MAX, + balance: U256::MAX, + storage_root: B256::from_slice(&[u8::MAX; 32]), + code_hash: B256::from_slice(&[u8::MAX; 32]), + }; + let mut encoded = Vec::new(); + account.encode(&mut encoded); + assert_eq!(encoded.len(), TRIE_ACCOUNT_RLP_MAX_SIZE); + } +} diff --git a/crates/trie/trie/src/lib.rs b/crates/trie/trie/src/lib.rs index bb568ae8b8cf..26bdc751124f 100644 --- a/crates/trie/trie/src/lib.rs +++ b/crates/trie/trie/src/lib.rs @@ -13,6 +13,10 @@ )] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] +/// Constants related to the trie computation. +mod constants; +pub use constants::*; + /// The implementation of a container for storing intermediate changes to a trie. /// The container indicates when the trie has been modified. pub mod prefix_set; diff --git a/crates/trie/trie/src/proof.rs b/crates/trie/trie/src/proof.rs index e99d686aca7f..895a3de153dc 100644 --- a/crates/trie/trie/src/proof.rs +++ b/crates/trie/trie/src/proof.rs @@ -4,7 +4,7 @@ use crate::{ prefix_set::{PrefixSetMut, TriePrefixSetsMut}, trie_cursor::TrieCursorFactory, walker::TrieWalker, - HashBuilder, Nibbles, + HashBuilder, Nibbles, TRIE_ACCOUNT_RLP_MAX_SIZE, }; use alloy_primitives::{ keccak256, @@ -104,7 +104,7 @@ where let mut hash_builder = HashBuilder::default().with_proof_retainer(retainer); let mut storages = HashMap::default(); - let mut account_rlp = Vec::with_capacity(128); + let mut account_rlp = Vec::with_capacity(TRIE_ACCOUNT_RLP_MAX_SIZE); let mut account_node_iter = TrieNodeIter::new(walker, hashed_account_cursor); while let Some(account_node) = account_node_iter.try_next()? { match account_node { diff --git a/crates/trie/trie/src/trie.rs b/crates/trie/trie/src/trie.rs index 1bf8cf1ce797..74faf7bbc60f 100644 --- a/crates/trie/trie/src/trie.rs +++ b/crates/trie/trie/src/trie.rs @@ -7,7 +7,7 @@ use crate::{ trie_cursor::TrieCursorFactory, updates::{StorageTrieUpdates, TrieUpdates}, walker::TrieWalker, - HashBuilder, Nibbles, TrieAccount, + HashBuilder, Nibbles, TrieAccount, TRIE_ACCOUNT_RLP_MAX_SIZE, }; use alloy_consensus::EMPTY_ROOT_HASH; use alloy_primitives::{keccak256, Address, B256}; @@ -178,7 +178,7 @@ where } }; - let mut account_rlp = Vec::with_capacity(128); + let mut account_rlp = Vec::with_capacity(TRIE_ACCOUNT_RLP_MAX_SIZE); let mut hashed_entries_walked = 0; let mut updated_storage_nodes = 0; while let Some(node) = account_node_iter.try_next()? { diff --git a/crates/trie/trie/src/witness.rs b/crates/trie/trie/src/witness.rs index 6f6a66a16eb9..46f85c4d82e4 100644 --- a/crates/trie/trie/src/witness.rs +++ b/crates/trie/trie/src/witness.rs @@ -3,7 +3,7 @@ use crate::{ prefix_set::TriePrefixSetsMut, proof::{Proof, StorageProof}, trie_cursor::TrieCursorFactory, - HashedPostState, + HashedPostState, TRIE_ACCOUNT_RLP_MAX_SIZE, }; use alloy_consensus::EMPTY_ROOT_HASH; use alloy_primitives::{ @@ -97,7 +97,7 @@ where // Attempt to compute state root from proofs and gather additional // information for the witness. - let mut account_rlp = Vec::with_capacity(128); + let mut account_rlp = Vec::with_capacity(TRIE_ACCOUNT_RLP_MAX_SIZE); let mut account_trie_nodes = BTreeMap::default(); for (hashed_address, hashed_slots) in proof_targets { let storage_multiproof = account_multiproof From 565fd4d1336e7ae24fef4d8f38a0feae9a712f47 Mon Sep 17 00:00:00 2001 From: Tien Nguyen <116023870+htiennv@users.noreply.github.com> Date: Wed, 20 Nov 2024 00:09:22 +0700 Subject: [PATCH 041/156] chore(exex): emit warn log when WAL grows beyond a certain number of blocks (#12634) --- crates/exex/exex/src/manager.rs | 15 ++++++++++++++- crates/exex/exex/src/wal/cache.rs | 5 +++++ crates/exex/exex/src/wal/mod.rs | 5 +++++ 3 files changed, 24 insertions(+), 1 deletion(-) diff --git a/crates/exex/exex/src/manager.rs b/crates/exex/exex/src/manager.rs index a17de660862b..e8902e0f3525 100644 --- a/crates/exex/exex/src/manager.rs +++ b/crates/exex/exex/src/manager.rs @@ -10,7 +10,7 @@ use reth_chainspec::Head; use reth_metrics::{metrics::Counter, Metrics}; use reth_primitives::SealedHeader; use reth_provider::HeaderProvider; -use reth_tracing::tracing::debug; +use reth_tracing::tracing::{debug, warn}; use std::{ collections::VecDeque, fmt::Debug, @@ -35,6 +35,12 @@ use tokio_util::sync::{PollSendError, PollSender, ReusableBoxFuture}; /// or 17 minutes of 1-second blocks. pub const DEFAULT_EXEX_MANAGER_CAPACITY: usize = 1024; +/// The maximum number of blocks allowed in the WAL before emitting a warning. +/// +/// This constant defines the threshold for the Write-Ahead Log (WAL) size. If the number of blocks +/// in the WAL exceeds this limit, a warning is logged to indicate potential issues. +pub const WAL_BLOCKS_WARNING: usize = 128; + /// The source of the notification. /// /// This distinguishment is needed to not commit any pipeline notificatations to [WAL](`Wal`), @@ -377,6 +383,13 @@ where .unwrap(); self.wal.finalize(lowest_finished_height)?; + if self.wal.num_blocks() > WAL_BLOCKS_WARNING { + warn!( + target: "exex::manager", + blocks = ?self.wal.num_blocks(), + "WAL contains too many blocks and is not getting cleared. That will lead to increased disk space usage. Check that you emit the FinishedHeight event from your ExExes." + ); + } } else { let unfinalized_exexes = exex_finished_heights .into_iter() diff --git a/crates/exex/exex/src/wal/cache.rs b/crates/exex/exex/src/wal/cache.rs index 882b65e15892..86943f33cfa0 100644 --- a/crates/exex/exex/src/wal/cache.rs +++ b/crates/exex/exex/src/wal/cache.rs @@ -35,6 +35,11 @@ impl BlockCache { self.notification_max_blocks.is_empty() } + /// Returns the number of blocks in the cache. + pub(super) fn num_blocks(&self) -> usize { + self.committed_blocks.len() + } + /// Removes all files from the cache that has notifications with a tip block less than or equal /// to the given block number. /// diff --git a/crates/exex/exex/src/wal/mod.rs b/crates/exex/exex/src/wal/mod.rs index a2e8ee8e6c6e..41a7829a70f3 100644 --- a/crates/exex/exex/src/wal/mod.rs +++ b/crates/exex/exex/src/wal/mod.rs @@ -66,6 +66,11 @@ impl Wal { ) -> eyre::Result> + '_>> { self.inner.iter_notifications() } + + /// Returns the number of blocks in the WAL. + pub fn num_blocks(&self) -> usize { + self.inner.block_cache().num_blocks() + } } /// Inner type for the WAL. From d49f91378b19f17e5592eb56aca835656f234252 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 19 Nov 2024 18:31:56 +0100 Subject: [PATCH 042/156] chore: use jsonrpsee server crate directly (#12673) --- Cargo.lock | 2 +- Cargo.toml | 1 + crates/node/metrics/Cargo.toml | 2 +- crates/node/metrics/src/server.rs | 10 +++++----- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1b38e0566557..fb9018a55b73 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8128,7 +8128,7 @@ version = "1.1.2" dependencies = [ "eyre", "http", - "jsonrpsee", + "jsonrpsee-server", "metrics", "metrics-exporter-prometheus", "metrics-process", diff --git a/Cargo.toml b/Cargo.toml index 58cdd1f8ca7b..002b85f125a6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -566,6 +566,7 @@ if-addrs = "0.13" # rpc jsonrpsee = "0.24" jsonrpsee-core = "0.24" +jsonrpsee-server = "0.24" jsonrpsee-http-client = "0.24" jsonrpsee-types = "0.24" diff --git a/crates/node/metrics/Cargo.toml b/crates/node/metrics/Cargo.toml index a823db9b467d..7e271f93ce56 100644 --- a/crates/node/metrics/Cargo.toml +++ b/crates/node/metrics/Cargo.toml @@ -21,7 +21,7 @@ metrics-util.workspace = true tokio.workspace = true -jsonrpsee = { workspace = true, features = ["server"] } +jsonrpsee-server.workspace = true http.workspace = true tower.workspace = true diff --git a/crates/node/metrics/src/server.rs b/crates/node/metrics/src/server.rs index 22c064d62f82..313329fb56a5 100644 --- a/crates/node/metrics/src/server.rs +++ b/crates/node/metrics/src/server.rs @@ -113,12 +113,12 @@ impl MetricServer { let mut shutdown = signal.clone().ignore_guard(); tokio::task::spawn(async move { - if let Err(error) = - jsonrpsee::server::serve_with_graceful_shutdown(io, service, &mut shutdown) + let _ = + jsonrpsee_server::serve_with_graceful_shutdown(io, service, &mut shutdown) .await - { - tracing::debug!(%error, "failed to serve request") - } + .inspect_err( + |error| tracing::debug!(%error, "failed to serve request"), + ); }); } }); From 2b21bcf42546746bc5d4581c562f49b36071e3a7 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Tue, 19 Nov 2024 18:44:10 +0100 Subject: [PATCH 043/156] chore(sdk): Add adapter type for `NodePrimitives::Receipt` (#12674) --- crates/primitives-traits/src/lib.rs | 2 +- crates/primitives-traits/src/node.rs | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/crates/primitives-traits/src/lib.rs b/crates/primitives-traits/src/lib.rs index 819825d635f1..79dff4ae36bf 100644 --- a/crates/primitives-traits/src/lib.rs +++ b/crates/primitives-traits/src/lib.rs @@ -80,7 +80,7 @@ pub use size::InMemorySize; /// Node traits pub mod node; -pub use node::{FullNodePrimitives, NodePrimitives}; +pub use node::{FullNodePrimitives, NodePrimitives, ReceiptTy}; /// Helper trait that requires arbitrary implementation if the feature is enabled. #[cfg(any(feature = "test-utils", feature = "arbitrary"))] diff --git a/crates/primitives-traits/src/node.rs b/crates/primitives-traits/src/node.rs index c11a19a105a8..180920d39345 100644 --- a/crates/primitives-traits/src/node.rs +++ b/crates/primitives-traits/src/node.rs @@ -73,3 +73,6 @@ where type TxType = T::TxType; type Receipt = T::Receipt; } + +/// Helper adapter type for accessing [`NodePrimitives`] receipt type. +pub type ReceiptTy = ::Receipt; From 37181c357a2e4e91fb7048671cdb09f5b5afdaaf Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Tue, 19 Nov 2024 18:58:46 +0100 Subject: [PATCH 044/156] feat(executor): add init methods to set TxEnv overrides (#12551) Co-authored-by: Matthias Seitz --- crates/ethereum/evm/src/execute.rs | 14 ++++++++-- crates/evm/src/either.rs | 8 ++++++ crates/evm/src/execute.rs | 44 +++++++++++++++++++++++++++--- crates/evm/src/lib.rs | 15 ++++++++++ crates/optimism/evm/src/execute.rs | 14 ++++++++-- 5 files changed, 87 insertions(+), 8 deletions(-) diff --git a/crates/ethereum/evm/src/execute.rs b/crates/ethereum/evm/src/execute.rs index fa14e260d651..e339268a99a7 100644 --- a/crates/ethereum/evm/src/execute.rs +++ b/crates/ethereum/evm/src/execute.rs @@ -18,7 +18,7 @@ use reth_evm::{ }, state_change::post_block_balance_increments, system_calls::{OnStateHook, SystemCaller}, - ConfigureEvm, + ConfigureEvm, TxEnvOverrides, }; use reth_primitives::{BlockWithSenders, Receipt}; use reth_revm::db::State; @@ -83,6 +83,8 @@ where chain_spec: Arc, /// How to create an EVM. evm_config: EvmConfig, + /// Optional overrides for the transactions environment. + tx_env_overrides: Option>, /// Current state for block execution. state: State, /// Utility to call system smart contracts. @@ -96,7 +98,7 @@ where /// Creates a new [`EthExecutionStrategy`] pub fn new(state: State, chain_spec: Arc, evm_config: EvmConfig) -> Self { let system_caller = SystemCaller::new(evm_config.clone(), chain_spec.clone()); - Self { state, chain_spec, evm_config, system_caller } + Self { state, chain_spec, evm_config, system_caller, tx_env_overrides: None } } } @@ -130,6 +132,10 @@ where { type Error = BlockExecutionError; + fn init(&mut self, tx_env_overrides: Box) { + self.tx_env_overrides = Some(tx_env_overrides); + } + fn apply_pre_execution_changes( &mut self, block: &BlockWithSenders, @@ -172,6 +178,10 @@ where self.evm_config.fill_tx_env(evm.tx_mut(), transaction, *sender); + if let Some(tx_env_overrides) = &mut self.tx_env_overrides { + tx_env_overrides.apply(evm.tx_mut()); + } + // Execute transaction. let result_and_state = evm.transact().map_err(move |err| { let new_err = err.map_db_err(|e| e.into()); diff --git a/crates/evm/src/either.rs b/crates/evm/src/either.rs index 82f84301f03a..85bc7e7f9a79 100644 --- a/crates/evm/src/either.rs +++ b/crates/evm/src/either.rs @@ -6,6 +6,7 @@ use crate::{ execute::{BatchExecutor, BlockExecutorProvider, Executor}, system_calls::OnStateHook, }; +use alloc::boxed::Box; use alloy_primitives::BlockNumber; use reth_execution_errors::BlockExecutionError; use reth_execution_types::{BlockExecutionInput, BlockExecutionOutput, ExecutionOutcome}; @@ -70,6 +71,13 @@ where type Output = BlockExecutionOutput; type Error = BlockExecutionError; + fn init(&mut self, tx_env_overrides: Box) { + match self { + Self::Left(a) => a.init(tx_env_overrides), + Self::Right(b) => b.init(tx_env_overrides), + } + } + fn execute(self, input: Self::Input<'_>) -> Result { match self { Self::Left(a) => a.execute(input), diff --git a/crates/evm/src/execute.rs b/crates/evm/src/execute.rs index 677a15dfa1b4..42c756f4d93f 100644 --- a/crates/evm/src/execute.rs +++ b/crates/evm/src/execute.rs @@ -6,9 +6,8 @@ pub use reth_execution_errors::{ }; pub use reth_execution_types::{BlockExecutionInput, BlockExecutionOutput, ExecutionOutcome}; pub use reth_storage_errors::provider::ProviderError; -use revm::db::states::bundle_state::BundleRetention; -use crate::system_calls::OnStateHook; +use crate::{system_calls::OnStateHook, TxEnvOverrides}; use alloc::{boxed::Box, vec::Vec}; use alloy_eips::eip7685::Requests; use alloy_primitives::BlockNumber; @@ -17,7 +16,10 @@ use reth_consensus::ConsensusError; use reth_primitives::{BlockWithSenders, Receipt}; use reth_prune_types::PruneModes; use reth_revm::batch::BlockBatchRecord; -use revm::{db::BundleState, State}; +use revm::{ + db::{states::bundle_state::BundleRetention, BundleState}, + State, +}; use revm_primitives::{db::Database, U256}; /// A general purpose executor trait that executes an input (e.g. block) and produces an output @@ -32,6 +34,9 @@ pub trait Executor { /// The error type returned by the executor. type Error; + /// Initialize the executor with the given transaction environment overrides. + fn init(&mut self, _tx_env_overrides: Box) {} + /// Consumes the type and executes the block. /// /// # Note @@ -184,6 +189,9 @@ where /// The error type returned by this strategy's methods. type Error: From + core::error::Error; + /// Initialize the strategy with the given transaction environment overrides. + fn init(&mut self, _tx_env_overrides: Box) {} + /// Applies any necessary changes before executing the block's transactions. fn apply_pre_execution_changes( &mut self, @@ -329,6 +337,10 @@ where type Output = BlockExecutionOutput; type Error = S::Error; + fn init(&mut self, env_overrides: Box) { + self.strategy.init(env_overrides); + } + fn execute(mut self, input: Self::Input<'_>) -> Result { let BlockExecutionInput { block, total_difficulty } = input; @@ -480,7 +492,7 @@ mod tests { use alloy_primitives::U256; use reth_chainspec::{ChainSpec, MAINNET}; use revm::db::{CacheDB, EmptyDBTyped}; - use revm_primitives::bytes; + use revm_primitives::{bytes, TxEnv}; use std::sync::Arc; #[derive(Clone, Default)] @@ -703,4 +715,28 @@ mod tests { assert_eq!(block_execution_output.requests, expected_apply_post_execution_changes_result); assert_eq!(block_execution_output.state, expected_finish_result); } + + #[test] + fn test_tx_env_overrider() { + let strategy_factory = TestExecutorStrategyFactory { + execute_transactions_result: ExecuteOutput { + receipts: vec![Receipt::default()], + gas_used: 10, + }, + apply_post_execution_changes_result: Requests::new(vec![bytes!("deadbeef")]), + finish_result: BundleState::default(), + }; + let provider = BasicBlockExecutorProvider::new(strategy_factory); + let db = CacheDB::>::default(); + + // if we want to apply tx env overrides the executor must be mut. + let mut executor = provider.executor(db); + // execute consumes the executor, so we can only call it once. + // let result = executor.execute(BlockExecutionInput::new(&Default::default(), U256::ZERO)); + executor.init(Box::new(|tx_env: &mut TxEnv| { + tx_env.nonce.take(); + })); + let result = executor.execute(BlockExecutionInput::new(&Default::default(), U256::ZERO)); + assert!(result.is_ok()); + } } diff --git a/crates/evm/src/lib.rs b/crates/evm/src/lib.rs index d20dbe4594a2..f01701d5989d 100644 --- a/crates/evm/src/lib.rs +++ b/crates/evm/src/lib.rs @@ -212,3 +212,18 @@ pub struct NextBlockEnvAttributes { /// The randomness value for the next block. pub prev_randao: B256, } + +/// Function hook that allows to modify a transaction environment. +pub trait TxEnvOverrides { + /// Apply the overrides by modifying the given `TxEnv`. + fn apply(&mut self, env: &mut TxEnv); +} + +impl TxEnvOverrides for F +where + F: FnMut(&mut TxEnv), +{ + fn apply(&mut self, env: &mut TxEnv) { + self(env) + } +} diff --git a/crates/optimism/evm/src/execute.rs b/crates/optimism/evm/src/execute.rs index b4c2e16f593d..a9a4b301573e 100644 --- a/crates/optimism/evm/src/execute.rs +++ b/crates/optimism/evm/src/execute.rs @@ -15,7 +15,7 @@ use reth_evm::{ }, state_change::post_block_balance_increments, system_calls::{OnStateHook, SystemCaller}, - ConfigureEvm, + ConfigureEvm, TxEnvOverrides, }; use reth_optimism_chainspec::OpChainSpec; use reth_optimism_consensus::validate_block_post_execution; @@ -78,6 +78,8 @@ where chain_spec: Arc, /// How to create an EVM. evm_config: EvmConfig, + /// Optional overrides for the transactions environment. + tx_env_overrides: Option>, /// Current state for block execution. state: State, /// Utility to call system smart contracts. @@ -91,7 +93,7 @@ where /// Creates a new [`OpExecutionStrategy`] pub fn new(state: State, chain_spec: Arc, evm_config: EvmConfig) -> Self { let system_caller = SystemCaller::new(evm_config.clone(), chain_spec.clone()); - Self { state, chain_spec, evm_config, system_caller } + Self { state, chain_spec, evm_config, system_caller, tx_env_overrides: None } } } @@ -119,6 +121,10 @@ where { type Error = BlockExecutionError; + fn init(&mut self, tx_env_overrides: Box) { + self.tx_env_overrides = Some(tx_env_overrides); + } + fn apply_pre_execution_changes( &mut self, block: &BlockWithSenders, @@ -197,6 +203,10 @@ where self.evm_config.fill_tx_env(evm.tx_mut(), transaction, *sender); + if let Some(tx_env_overrides) = &mut self.tx_env_overrides { + tx_env_overrides.apply(evm.tx_mut()); + } + // Execute transaction. let result_and_state = evm.transact().map_err(move |err| { let new_err = err.map_db_err(|e| e.into()); From 8c467e42917d19d6fd7efe729283cc7f6c642584 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 19 Nov 2024 19:07:17 +0100 Subject: [PATCH 045/156] chore: genericify some net tx types (#12677) --- crates/net/network/src/transactions/mod.rs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/crates/net/network/src/transactions/mod.rs b/crates/net/network/src/transactions/mod.rs index 0ccb4252ac37..125818da33af 100644 --- a/crates/net/network/src/transactions/mod.rs +++ b/crates/net/network/src/transactions/mod.rs @@ -1449,9 +1449,9 @@ impl PropagationMode { /// A transaction that's about to be propagated to multiple peers. #[derive(Debug, Clone)] -struct PropagateTransaction { +struct PropagateTransaction { size: usize, - transaction: Arc, + transaction: Arc, } // === impl PropagateTransaction === @@ -1477,9 +1477,9 @@ impl PropagateTransaction { /// Helper type to construct the appropriate message to send to the peer based on whether the peer /// should receive them in full or as pooled #[derive(Debug, Clone)] -enum PropagateTransactionsBuilder { +enum PropagateTransactionsBuilder { Pooled(PooledTransactionsHashesBuilder), - Full(FullTransactionsBuilder), + Full(FullTransactionsBuilder), } impl PropagateTransactionsBuilder { @@ -1528,11 +1528,11 @@ impl PropagateTransactionsBuilder { } /// Represents how the transactions should be sent to a peer if any. -struct PropagateTransactions { +struct PropagateTransactions { /// The pooled transaction hashes to send. pooled: Option, /// The transactions to send in full. - full: Option>>, + full: Option>>, } /// Helper type for constructing the full transaction message that enforces the @@ -1540,11 +1540,11 @@ struct PropagateTransactions { /// and enforces other propagation rules for EIP-4844 and tracks those transactions that can't be /// broadcasted in full. #[derive(Debug, Clone)] -struct FullTransactionsBuilder { +struct FullTransactionsBuilder { /// The soft limit to enforce for a single broadcast message of full transactions. total_size: usize, /// All transactions to be broadcasted. - transactions: Vec>, + transactions: Vec>, /// Transactions that didn't fit into the broadcast message pooled: PooledTransactionsHashesBuilder, } From 7c7baca9807e9a9556cb217d3df8cd03c21ba4ed Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 19 Nov 2024 19:25:01 +0100 Subject: [PATCH 046/156] chore: group tx manager functions (#12679) --- crates/net/network/src/network.rs | 2 +- crates/net/network/src/transactions/mod.rs | 831 +++++++++++---------- 2 files changed, 417 insertions(+), 416 deletions(-) diff --git a/crates/net/network/src/network.rs b/crates/net/network/src/network.rs index 1715fa63e2f4..2fa3fd90efe7 100644 --- a/crates/net/network/src/network.rs +++ b/crates/net/network/src/network.rs @@ -252,7 +252,7 @@ impl PeersInfo for NetworkHandle { } } -impl Peers for NetworkHandle { +impl Peers for NetworkHandle { fn add_trusted_peer_id(&self, peer: PeerId) { self.send_message(NetworkHandleMessage::AddTrustedPeerId(peer)); } diff --git a/crates/net/network/src/transactions/mod.rs b/crates/net/network/src/transactions/mod.rs index 125818da33af..241f01ae8abb 100644 --- a/crates/net/network/src/transactions/mod.rs +++ b/crates/net/network/src/transactions/mod.rs @@ -313,15 +313,367 @@ impl TransactionsManager { } } -// === impl TransactionsManager === +impl TransactionsManager { + /// Returns a new handle that can send commands to this type. + pub fn handle(&self) -> TransactionsHandle { + TransactionsHandle { manager_tx: self.command_tx.clone() } + } + + /// Returns `true` if [`TransactionsManager`] has capacity to request pending hashes. Returns + /// `false` if [`TransactionsManager`] is operating close to full capacity. + fn has_capacity_for_fetching_pending_hashes(&self) -> bool { + self.pending_pool_imports_info + .has_capacity(self.pending_pool_imports_info.max_pending_pool_imports) && + self.transaction_fetcher.has_capacity_for_fetching_pending_hashes() + } + + fn report_peer_bad_transactions(&self, peer_id: PeerId) { + self.report_peer(peer_id, ReputationChangeKind::BadTransactions); + self.metrics.reported_bad_transactions.increment(1); + } + + fn report_peer(&self, peer_id: PeerId, kind: ReputationChangeKind) { + trace!(target: "net::tx", ?peer_id, ?kind, "reporting reputation change"); + self.network.reputation_change(peer_id, kind); + } + + fn report_already_seen(&self, peer_id: PeerId) { + trace!(target: "net::tx", ?peer_id, "Penalizing peer for already seen transaction"); + self.network.reputation_change(peer_id, ReputationChangeKind::AlreadySeenTransaction); + } + + /// Clear the transaction + fn on_good_import(&mut self, hash: TxHash) { + self.transactions_by_peers.remove(&hash); + } + + /// Penalize the peers that intentionally sent the bad transaction, and cache it to avoid + /// fetching or importing it again. + /// + /// Errors that count as bad transactions are: + /// + /// - intrinsic gas too low + /// - exceeds gas limit + /// - gas uint overflow + /// - exceeds max init code size + /// - oversized data + /// - signer account has bytecode + /// - chain id mismatch + /// - old legacy chain id + /// - tx type not supported + /// + /// (and additionally for blobs txns...) + /// + /// - no blobs + /// - too many blobs + /// - invalid kzg proof + /// - kzg error + /// - not blob transaction (tx type mismatch) + /// - wrong versioned kzg commitment hash + fn on_bad_import(&mut self, err: PoolError) { + let peers = self.transactions_by_peers.remove(&err.hash); + + // if we're _currently_ syncing, we ignore a bad transaction + if !err.is_bad_transaction() || self.network.is_syncing() { + return + } + // otherwise we penalize the peer that sent the bad transaction, with the assumption that + // the peer should have known that this transaction is bad (e.g. violating consensus rules) + if let Some(peers) = peers { + for peer_id in peers { + self.report_peer_bad_transactions(peer_id); + } + } + self.metrics.bad_imports.increment(1); + self.bad_imports.insert(err.hash); + } + + /// Runs an operation to fetch hashes that are cached in [`TransactionFetcher`]. + fn on_fetch_hashes_pending_fetch(&mut self) { + // try drain transaction hashes pending fetch + let info = &self.pending_pool_imports_info; + let max_pending_pool_imports = info.max_pending_pool_imports; + let has_capacity_wrt_pending_pool_imports = + |divisor| info.has_capacity(max_pending_pool_imports / divisor); + + self.transaction_fetcher + .on_fetch_pending_hashes(&self.peers, has_capacity_wrt_pending_pool_imports); + } + + fn on_request_error(&self, peer_id: PeerId, req_err: RequestError) { + let kind = match req_err { + RequestError::UnsupportedCapability => ReputationChangeKind::BadProtocol, + RequestError::Timeout => ReputationChangeKind::Timeout, + RequestError::ChannelClosed | RequestError::ConnectionDropped => { + // peer is already disconnected + return + } + RequestError::BadResponse => return self.report_peer_bad_transactions(peer_id), + }; + self.report_peer(peer_id, kind); + } + + #[inline] + fn update_poll_metrics(&self, start: Instant, poll_durations: TxManagerPollDurations) { + let metrics = &self.metrics; + + let TxManagerPollDurations { + acc_network_events, + acc_pending_imports, + acc_tx_events, + acc_imported_txns, + acc_fetch_events, + acc_pending_fetch, + acc_cmds, + } = poll_durations; + + // update metrics for whole poll function + metrics.duration_poll_tx_manager.set(start.elapsed().as_secs_f64()); + // update metrics for nested expressions + metrics.acc_duration_poll_network_events.set(acc_network_events.as_secs_f64()); + metrics.acc_duration_poll_pending_pool_imports.set(acc_pending_imports.as_secs_f64()); + metrics.acc_duration_poll_transaction_events.set(acc_tx_events.as_secs_f64()); + metrics.acc_duration_poll_imported_transactions.set(acc_imported_txns.as_secs_f64()); + metrics.acc_duration_poll_fetch_events.set(acc_fetch_events.as_secs_f64()); + metrics.acc_duration_fetch_pending_hashes.set(acc_pending_fetch.as_secs_f64()); + metrics.acc_duration_poll_commands.set(acc_cmds.as_secs_f64()); + } +} + +impl TransactionsManager +where + Pool: TransactionPool, + N: NetworkPrimitives, +{ + /// Processes a batch import results. + fn on_batch_import_result(&mut self, batch_results: Vec>) { + for res in batch_results { + match res { + Ok(hash) => { + self.on_good_import(hash); + } + Err(err) => { + self.on_bad_import(err); + } + } + } + } + + /// Request handler for an incoming `NewPooledTransactionHashes` + fn on_new_pooled_transaction_hashes( + &mut self, + peer_id: PeerId, + msg: NewPooledTransactionHashes, + ) { + // If the node is initially syncing, ignore transactions + if self.network.is_initially_syncing() { + return + } + if self.network.tx_gossip_disabled() { + return + } + + // get handle to peer's session, if the session is still active + let Some(peer) = self.peers.get_mut(&peer_id) else { + trace!( + peer_id = format!("{peer_id:#}"), + ?msg, + "discarding announcement from inactive peer" + ); + + return + }; + let client = peer.client_version.clone(); + + // keep track of the transactions the peer knows + let mut count_txns_already_seen_by_peer = 0; + for tx in msg.iter_hashes().copied() { + if !peer.seen_transactions.insert(tx) { + count_txns_already_seen_by_peer += 1; + } + } + if count_txns_already_seen_by_peer > 0 { + // this may occur if transactions are sent or announced to a peer, at the same time as + // the peer sends/announces those hashes to us. this is because, marking + // txns as seen by a peer is done optimistically upon sending them to the + // peer. + self.metrics.messages_with_hashes_already_seen_by_peer.increment(1); + self.metrics + .occurrences_hash_already_seen_by_peer + .increment(count_txns_already_seen_by_peer); + + trace!(target: "net::tx", + %count_txns_already_seen_by_peer, + peer_id=format!("{peer_id:#}"), + ?client, + "Peer sent hashes that have already been marked as seen by peer" + ); + + self.report_already_seen(peer_id); + } + + // 1. filter out spam + let (validation_outcome, mut partially_valid_msg) = + self.transaction_fetcher.filter_valid_message.partially_filter_valid_entries(msg); + + if validation_outcome == FilterOutcome::ReportPeer { + self.report_peer(peer_id, ReputationChangeKind::BadAnnouncement); + } + + // 2. filter out transactions pending import to pool + partially_valid_msg.retain_by_hash(|hash| !self.transactions_by_peers.contains_key(hash)); + + // 3. filter out known hashes + // + // known txns have already been successfully fetched or received over gossip. + // + // most hashes will be filtered out here since this the mempool protocol is a gossip + // protocol, healthy peers will send many of the same hashes. + // + let hashes_count_pre_pool_filter = partially_valid_msg.len(); + self.pool.retain_unknown(&mut partially_valid_msg); + if hashes_count_pre_pool_filter > partially_valid_msg.len() { + let already_known_hashes_count = + hashes_count_pre_pool_filter - partially_valid_msg.len(); + self.metrics + .occurrences_hashes_already_in_pool + .increment(already_known_hashes_count as u64); + } + + if partially_valid_msg.is_empty() { + // nothing to request + return + } + + // 4. filter out invalid entries (spam) + // + // validates messages with respect to the given network, e.g. allowed tx types + // + let (validation_outcome, mut valid_announcement_data) = if partially_valid_msg + .msg_version() + .expect("partially valid announcement should have version") + .is_eth68() + { + // validate eth68 announcement data + self.transaction_fetcher + .filter_valid_message + .filter_valid_entries_68(partially_valid_msg) + } else { + // validate eth66 announcement data + self.transaction_fetcher + .filter_valid_message + .filter_valid_entries_66(partially_valid_msg) + }; + + if validation_outcome == FilterOutcome::ReportPeer { + self.report_peer(peer_id, ReputationChangeKind::BadAnnouncement); + } + + if valid_announcement_data.is_empty() { + // no valid announcement data + return + } + + // 5. filter out already seen unknown hashes + // + // seen hashes are already in the tx fetcher, pending fetch. + // + // for any seen hashes add the peer as fallback. unseen hashes are loaded into the tx + // fetcher, hence they should be valid at this point. + let bad_imports = &self.bad_imports; + self.transaction_fetcher.filter_unseen_and_pending_hashes( + &mut valid_announcement_data, + |hash| bad_imports.contains(hash), + &peer_id, + |peer_id| self.peers.contains_key(&peer_id), + &client, + ); + + if valid_announcement_data.is_empty() { + // nothing to request + return + } + + trace!(target: "net::tx::propagation", + peer_id=format!("{peer_id:#}"), + hashes_len=valid_announcement_data.iter().count(), + hashes=?valid_announcement_data.keys().collect::>(), + msg_version=%valid_announcement_data.msg_version(), + client_version=%client, + "received previously unseen and pending hashes in announcement from peer" + ); + + // only send request for hashes to idle peer, otherwise buffer hashes storing peer as + // fallback + if !self.transaction_fetcher.is_idle(&peer_id) { + // load message version before announcement data is destructed in packing + let msg_version = valid_announcement_data.msg_version(); + let (hashes, _version) = valid_announcement_data.into_request_hashes(); + + trace!(target: "net::tx", + peer_id=format!("{peer_id:#}"), + hashes=?*hashes, + %msg_version, + %client, + "buffering hashes announced by busy peer" + ); + + self.transaction_fetcher.buffer_hashes(hashes, Some(peer_id)); + + return + } + + // load message version before announcement data type is destructed in packing + let msg_version = valid_announcement_data.msg_version(); + // + // demand recommended soft limit on response, however the peer may enforce an arbitrary + // limit on the response (2MB) + // + // request buffer is shrunk via call to pack request! + let init_capacity_req = + self.transaction_fetcher.approx_capacity_get_pooled_transactions_req(msg_version); + let mut hashes_to_request = RequestTxHashes::with_capacity(init_capacity_req); + let surplus_hashes = + self.transaction_fetcher.pack_request(&mut hashes_to_request, valid_announcement_data); -impl TransactionsManager -where - Pool: TransactionPool, -{ - /// Returns a new handle that can send commands to this type. - pub fn handle(&self) -> TransactionsHandle { - TransactionsHandle { manager_tx: self.command_tx.clone() } + if !surplus_hashes.is_empty() { + trace!(target: "net::tx", + peer_id=format!("{peer_id:#}"), + surplus_hashes=?*surplus_hashes, + %msg_version, + %client, + "some hashes in announcement from peer didn't fit in `GetPooledTransactions` request, buffering surplus hashes" + ); + + self.transaction_fetcher.buffer_hashes(surplus_hashes, Some(peer_id)); + } + + trace!(target: "net::tx", + peer_id=format!("{peer_id:#}"), + hashes=?*hashes_to_request, + %msg_version, + %client, + "sending hashes in `GetPooledTransactions` request to peer's session" + ); + + // request the missing transactions + // + // get handle to peer's session again, at this point we know it exists + let Some(peer) = self.peers.get_mut(&peer_id) else { return }; + if let Some(failed_to_request_hashes) = + self.transaction_fetcher.request_transactions_from_peer(hashes_to_request, peer) + { + let conn_eth_version = peer.version; + + trace!(target: "net::tx", + peer_id=format!("{peer_id:#}"), + failed_to_request_hashes=?*failed_to_request_hashes, + %conn_eth_version, + %client, + "sending `GetPooledTransactions` request to peer's session failed, buffering hashes" + ); + self.transaction_fetcher.buffer_hashes(failed_to_request_hashes, Some(peer_id)); + } } } @@ -329,32 +681,6 @@ impl TransactionsManager where Pool: TransactionPool + 'static, { - #[inline] - fn update_poll_metrics(&self, start: Instant, poll_durations: TxManagerPollDurations) { - let metrics = &self.metrics; - - let TxManagerPollDurations { - acc_network_events, - acc_pending_imports, - acc_tx_events, - acc_imported_txns, - acc_fetch_events, - acc_pending_fetch, - acc_cmds, - } = poll_durations; - - // update metrics for whole poll function - metrics.duration_poll_tx_manager.set(start.elapsed().as_secs_f64()); - // update metrics for nested expressions - metrics.acc_duration_poll_network_events.set(acc_network_events.as_secs_f64()); - metrics.acc_duration_poll_pending_pool_imports.set(acc_pending_imports.as_secs_f64()); - metrics.acc_duration_poll_transaction_events.set(acc_tx_events.as_secs_f64()); - metrics.acc_duration_poll_imported_transactions.set(acc_imported_txns.as_secs_f64()); - metrics.acc_duration_poll_fetch_events.set(acc_fetch_events.as_secs_f64()); - metrics.acc_duration_fetch_pending_hashes.set(acc_pending_fetch.as_secs_f64()); - metrics.acc_duration_poll_commands.set(acc_cmds.as_secs_f64()); - } - /// Request handler for an incoming request for transactions fn on_get_pooled_transactions( &mut self, @@ -575,294 +901,77 @@ where } // send full transactions - self.network.send_transactions(peer_id, new_full_transactions); - } - - // Update propagated transactions metrics - self.metrics.propagated_transactions.increment(propagated.0.len() as u64); - - Some(propagated) - } - - /// Propagate the transaction hashes to the given peer - /// - /// Note: This will only send the hashes for transactions that exist in the pool. - fn propagate_hashes_to( - &mut self, - hashes: Vec, - peer_id: PeerId, - propagation_mode: PropagationMode, - ) { - trace!(target: "net::tx", "Start propagating transactions as hashes"); - - // This fetches a transactions from the pool, including the blob transactions, which are - // only ever sent as hashes. - let propagated = { - let Some(peer) = self.peers.get_mut(&peer_id) else { - // no such peer - return - }; - - let to_propagate: Vec = - self.pool.get_all(hashes).into_iter().map(PropagateTransaction::new).collect(); - - let mut propagated = PropagatedTransactions::default(); - - // check if transaction is known to peer - let mut hashes = PooledTransactionsHashesBuilder::new(peer.version); - - if propagation_mode.is_forced() { - hashes.extend(to_propagate) - } else { - for tx in to_propagate { - if !peer.seen_transactions.contains(&tx.hash()) { - // Include if the peer hasn't seen it - hashes.push(&tx); - } - } - } - - let new_pooled_hashes = hashes.build(); - - if new_pooled_hashes.is_empty() { - // nothing to propagate - return - } - - for hash in new_pooled_hashes.iter_hashes().copied() { - propagated.0.entry(hash).or_default().push(PropagateKind::Hash(peer_id)); - } - - trace!(target: "net::tx::propagation", ?peer_id, ?new_pooled_hashes, "Propagating transactions to peer"); - - // send hashes of transactions - self.network.send_transactions_hashes(peer_id, new_pooled_hashes); - - // Update propagated transactions metrics - self.metrics.propagated_transactions.increment(propagated.0.len() as u64); - - propagated - }; - - // notify pool so events get fired - self.pool.on_propagated(propagated); - } - - /// Request handler for an incoming `NewPooledTransactionHashes` - fn on_new_pooled_transaction_hashes( - &mut self, - peer_id: PeerId, - msg: NewPooledTransactionHashes, - ) { - // If the node is initially syncing, ignore transactions - if self.network.is_initially_syncing() { - return - } - if self.network.tx_gossip_disabled() { - return - } - - // get handle to peer's session, if the session is still active - let Some(peer) = self.peers.get_mut(&peer_id) else { - trace!( - peer_id = format!("{peer_id:#}"), - ?msg, - "discarding announcement from inactive peer" - ); - - return - }; - let client = peer.client_version.clone(); - - // keep track of the transactions the peer knows - let mut count_txns_already_seen_by_peer = 0; - for tx in msg.iter_hashes().copied() { - if !peer.seen_transactions.insert(tx) { - count_txns_already_seen_by_peer += 1; - } - } - if count_txns_already_seen_by_peer > 0 { - // this may occur if transactions are sent or announced to a peer, at the same time as - // the peer sends/announces those hashes to us. this is because, marking - // txns as seen by a peer is done optimistically upon sending them to the - // peer. - self.metrics.messages_with_hashes_already_seen_by_peer.increment(1); - self.metrics - .occurrences_hash_already_seen_by_peer - .increment(count_txns_already_seen_by_peer); - - trace!(target: "net::tx", - %count_txns_already_seen_by_peer, - peer_id=format!("{peer_id:#}"), - ?client, - "Peer sent hashes that have already been marked as seen by peer" - ); - - self.report_already_seen(peer_id); - } - - // 1. filter out spam - let (validation_outcome, mut partially_valid_msg) = - self.transaction_fetcher.filter_valid_message.partially_filter_valid_entries(msg); - - if validation_outcome == FilterOutcome::ReportPeer { - self.report_peer(peer_id, ReputationChangeKind::BadAnnouncement); - } - - // 2. filter out transactions pending import to pool - partially_valid_msg.retain_by_hash(|hash| !self.transactions_by_peers.contains_key(hash)); - - // 3. filter out known hashes - // - // known txns have already been successfully fetched or received over gossip. - // - // most hashes will be filtered out here since this the mempool protocol is a gossip - // protocol, healthy peers will send many of the same hashes. - // - let hashes_count_pre_pool_filter = partially_valid_msg.len(); - self.pool.retain_unknown(&mut partially_valid_msg); - if hashes_count_pre_pool_filter > partially_valid_msg.len() { - let already_known_hashes_count = - hashes_count_pre_pool_filter - partially_valid_msg.len(); - self.metrics - .occurrences_hashes_already_in_pool - .increment(already_known_hashes_count as u64); - } - - if partially_valid_msg.is_empty() { - // nothing to request - return - } - - // 4. filter out invalid entries (spam) - // - // validates messages with respect to the given network, e.g. allowed tx types - // - let (validation_outcome, mut valid_announcement_data) = if partially_valid_msg - .msg_version() - .expect("partially valid announcement should have version") - .is_eth68() - { - // validate eth68 announcement data - self.transaction_fetcher - .filter_valid_message - .filter_valid_entries_68(partially_valid_msg) - } else { - // validate eth66 announcement data - self.transaction_fetcher - .filter_valid_message - .filter_valid_entries_66(partially_valid_msg) - }; - - if validation_outcome == FilterOutcome::ReportPeer { - self.report_peer(peer_id, ReputationChangeKind::BadAnnouncement); + self.network.send_transactions(peer_id, new_full_transactions); } - if valid_announcement_data.is_empty() { - // no valid announcement data - return - } + // Update propagated transactions metrics + self.metrics.propagated_transactions.increment(propagated.0.len() as u64); - // 5. filter out already seen unknown hashes - // - // seen hashes are already in the tx fetcher, pending fetch. - // - // for any seen hashes add the peer as fallback. unseen hashes are loaded into the tx - // fetcher, hence they should be valid at this point. - let bad_imports = &self.bad_imports; - self.transaction_fetcher.filter_unseen_and_pending_hashes( - &mut valid_announcement_data, - |hash| bad_imports.contains(hash), - &peer_id, - |peer_id| self.peers.contains_key(&peer_id), - &client, - ); + Some(propagated) + } - if valid_announcement_data.is_empty() { - // nothing to request - return - } + /// Propagate the transaction hashes to the given peer + /// + /// Note: This will only send the hashes for transactions that exist in the pool. + fn propagate_hashes_to( + &mut self, + hashes: Vec, + peer_id: PeerId, + propagation_mode: PropagationMode, + ) { + trace!(target: "net::tx", "Start propagating transactions as hashes"); - trace!(target: "net::tx::propagation", - peer_id=format!("{peer_id:#}"), - hashes_len=valid_announcement_data.iter().count(), - hashes=?valid_announcement_data.keys().collect::>(), - msg_version=%valid_announcement_data.msg_version(), - client_version=%client, - "received previously unseen and pending hashes in announcement from peer" - ); + // This fetches a transactions from the pool, including the blob transactions, which are + // only ever sent as hashes. + let propagated = { + let Some(peer) = self.peers.get_mut(&peer_id) else { + // no such peer + return + }; - // only send request for hashes to idle peer, otherwise buffer hashes storing peer as - // fallback - if !self.transaction_fetcher.is_idle(&peer_id) { - // load message version before announcement data is destructed in packing - let msg_version = valid_announcement_data.msg_version(); - let (hashes, _version) = valid_announcement_data.into_request_hashes(); + let to_propagate: Vec = + self.pool.get_all(hashes).into_iter().map(PropagateTransaction::new).collect(); - trace!(target: "net::tx", - peer_id=format!("{peer_id:#}"), - hashes=?*hashes, - %msg_version, - %client, - "buffering hashes announced by busy peer" - ); + let mut propagated = PropagatedTransactions::default(); - self.transaction_fetcher.buffer_hashes(hashes, Some(peer_id)); + // check if transaction is known to peer + let mut hashes = PooledTransactionsHashesBuilder::new(peer.version); - return - } + if propagation_mode.is_forced() { + hashes.extend(to_propagate) + } else { + for tx in to_propagate { + if !peer.seen_transactions.contains(&tx.hash()) { + // Include if the peer hasn't seen it + hashes.push(&tx); + } + } + } - // load message version before announcement data type is destructed in packing - let msg_version = valid_announcement_data.msg_version(); - // - // demand recommended soft limit on response, however the peer may enforce an arbitrary - // limit on the response (2MB) - // - // request buffer is shrunk via call to pack request! - let init_capacity_req = - self.transaction_fetcher.approx_capacity_get_pooled_transactions_req(msg_version); - let mut hashes_to_request = RequestTxHashes::with_capacity(init_capacity_req); - let surplus_hashes = - self.transaction_fetcher.pack_request(&mut hashes_to_request, valid_announcement_data); + let new_pooled_hashes = hashes.build(); - if !surplus_hashes.is_empty() { - trace!(target: "net::tx", - peer_id=format!("{peer_id:#}"), - surplus_hashes=?*surplus_hashes, - %msg_version, - %client, - "some hashes in announcement from peer didn't fit in `GetPooledTransactions` request, buffering surplus hashes" - ); + if new_pooled_hashes.is_empty() { + // nothing to propagate + return + } - self.transaction_fetcher.buffer_hashes(surplus_hashes, Some(peer_id)); - } + for hash in new_pooled_hashes.iter_hashes().copied() { + propagated.0.entry(hash).or_default().push(PropagateKind::Hash(peer_id)); + } - trace!(target: "net::tx", - peer_id=format!("{peer_id:#}"), - hashes=?*hashes_to_request, - %msg_version, - %client, - "sending hashes in `GetPooledTransactions` request to peer's session" - ); + trace!(target: "net::tx::propagation", ?peer_id, ?new_pooled_hashes, "Propagating transactions to peer"); - // request the missing transactions - // - // get handle to peer's session again, at this point we know it exists - let Some(peer) = self.peers.get_mut(&peer_id) else { return }; - if let Some(failed_to_request_hashes) = - self.transaction_fetcher.request_transactions_from_peer(hashes_to_request, peer) - { - let conn_eth_version = peer.version; + // send hashes of transactions + self.network.send_transactions_hashes(peer_id, new_pooled_hashes); - trace!(target: "net::tx", - peer_id=format!("{peer_id:#}"), - failed_to_request_hashes=?*failed_to_request_hashes, - %conn_eth_version, - %client, - "sending `GetPooledTransactions` request to peer's session failed, buffering hashes" - ); - self.transaction_fetcher.buffer_hashes(failed_to_request_hashes, Some(peer_id)); - } + // Update propagated transactions metrics + self.metrics.propagated_transactions.increment(propagated.0.len() as u64); + + propagated + }; + + // notify pool so events get fired + self.pool.on_propagated(propagated); } /// Handles dedicated transaction events related to the `eth` protocol. @@ -1136,20 +1245,6 @@ where } } - /// Processes a batch import results. - fn on_batch_import_result(&mut self, batch_results: Vec>) { - for res in batch_results { - match res { - Ok(hash) => { - self.on_good_import(hash); - } - Err(err) => { - self.on_bad_import(err); - } - } - } - } - /// Processes a [`FetchEvent`]. fn on_fetch_event(&mut self, fetch_event: FetchEvent) { match fetch_event { @@ -1165,100 +1260,6 @@ where } } } - - /// Runs an operation to fetch hashes that are cached in [`TransactionFetcher`]. - fn on_fetch_hashes_pending_fetch(&mut self) { - // try drain transaction hashes pending fetch - let info = &self.pending_pool_imports_info; - let max_pending_pool_imports = info.max_pending_pool_imports; - let has_capacity_wrt_pending_pool_imports = - |divisor| info.has_capacity(max_pending_pool_imports / divisor); - - self.transaction_fetcher - .on_fetch_pending_hashes(&self.peers, has_capacity_wrt_pending_pool_imports); - } - - fn report_peer_bad_transactions(&self, peer_id: PeerId) { - self.report_peer(peer_id, ReputationChangeKind::BadTransactions); - self.metrics.reported_bad_transactions.increment(1); - } - - fn report_peer(&self, peer_id: PeerId, kind: ReputationChangeKind) { - trace!(target: "net::tx", ?peer_id, ?kind, "reporting reputation change"); - self.network.reputation_change(peer_id, kind); - } - - fn on_request_error(&self, peer_id: PeerId, req_err: RequestError) { - let kind = match req_err { - RequestError::UnsupportedCapability => ReputationChangeKind::BadProtocol, - RequestError::Timeout => ReputationChangeKind::Timeout, - RequestError::ChannelClosed | RequestError::ConnectionDropped => { - // peer is already disconnected - return - } - RequestError::BadResponse => return self.report_peer_bad_transactions(peer_id), - }; - self.report_peer(peer_id, kind); - } - - fn report_already_seen(&self, peer_id: PeerId) { - trace!(target: "net::tx", ?peer_id, "Penalizing peer for already seen transaction"); - self.network.reputation_change(peer_id, ReputationChangeKind::AlreadySeenTransaction); - } - - /// Clear the transaction - fn on_good_import(&mut self, hash: TxHash) { - self.transactions_by_peers.remove(&hash); - } - - /// Penalize the peers that intentionally sent the bad transaction, and cache it to avoid - /// fetching or importing it again. - /// - /// Errors that count as bad transactions are: - /// - /// - intrinsic gas too low - /// - exceeds gas limit - /// - gas uint overflow - /// - exceeds max init code size - /// - oversized data - /// - signer account has bytecode - /// - chain id mismatch - /// - old legacy chain id - /// - tx type not supported - /// - /// (and additionally for blobs txns...) - /// - /// - no blobs - /// - too many blobs - /// - invalid kzg proof - /// - kzg error - /// - not blob transaction (tx type mismatch) - /// - wrong versioned kzg commitment hash - fn on_bad_import(&mut self, err: PoolError) { - let peers = self.transactions_by_peers.remove(&err.hash); - - // if we're _currently_ syncing, we ignore a bad transaction - if !err.is_bad_transaction() || self.network.is_syncing() { - return - } - // otherwise we penalize the peer that sent the bad transaction, with the assumption that - // the peer should have known that this transaction is bad (e.g. violating consensus rules) - if let Some(peers) = peers { - for peer_id in peers { - self.report_peer_bad_transactions(peer_id); - } - } - self.metrics.bad_imports.increment(1); - self.bad_imports.insert(err.hash); - } - - /// Returns `true` if [`TransactionsManager`] has capacity to request pending hashes. Returns - /// `false` if [`TransactionsManager`] is operating close to full capacity. - fn has_capacity_for_fetching_pending_hashes(&self) -> bool { - self.pending_pool_imports_info - .has_capacity(self.pending_pool_imports_info.max_pending_pool_imports) && - self.transaction_fetcher.has_capacity_for_fetching_pending_hashes() - } } /// An endless future. Preemption ensure that future is non-blocking, nonetheless. See From fcb5050f87054a5bf3cbb0d3b677a4f42ffdbabd Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Tue, 19 Nov 2024 20:59:21 +0100 Subject: [PATCH 047/156] Add SDK codeowners (#12685) --- .github/CODEOWNERS | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 488e6c90cf7f..5a1d1df72611 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -20,11 +20,11 @@ crates/fs-util/ @onbjerg @emhane crates/metrics/ @onbjerg crates/net/ @emhane @mattsse @Rjected crates/net/downloaders/ @onbjerg @rkrasiuk @emhane -crates/node/ @mattsse @Rjected @onbjerg +crates/node/ @mattsse @Rjected @onbjerg @emhane @klkvr crates/optimism/ @mattsse @Rjected @fgimenez @emhane crates/payload/ @mattsse @Rjected -crates/primitives/ @Rjected -crates/primitives-traits/ @Rjected @joshieDo +crates/primitives/ @Rjected @emhane @mattsse @klkvr +crates/primitives-traits/ @Rjected @joshieDo @emhane @mattsse @klkvr crates/prune/ @shekhirin @joshieDo crates/revm/ @mattsse @rakita crates/rpc/ @mattsse @Rjected @emhane From aa34a2795b1a2ed22a9ef58bf935092de77afc20 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Tue, 19 Nov 2024 21:16:45 +0100 Subject: [PATCH 048/156] chore(sdk): make `reth-chain-state` types generic over receipt (#12667) --- Cargo.lock | 1 + crates/blockchain-tree/src/shareable.rs | 6 +- crates/chain-state/src/in_memory.rs | 164 ++++++++++++----------- crates/chain-state/src/lib.rs | 3 + crates/chain-state/src/memory_overlay.rs | 16 +-- crates/chain-state/src/notifications.rs | 41 +++--- crates/chain-state/src/test_utils.rs | 16 ++- crates/exex/types/Cargo.toml | 7 +- crates/exex/types/src/notification.rs | 15 ++- crates/primitives/src/lib.rs | 2 +- 10 files changed, 144 insertions(+), 127 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fb9018a55b73..19e90852175d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7644,6 +7644,7 @@ dependencies = [ "reth-chain-state", "reth-execution-types", "reth-primitives", + "reth-primitives-traits", "serde", "serde_with", ] diff --git a/crates/blockchain-tree/src/shareable.rs b/crates/blockchain-tree/src/shareable.rs index 8e6cceccdd19..ec1f3cccf97d 100644 --- a/crates/blockchain-tree/src/shareable.rs +++ b/crates/blockchain-tree/src/shareable.rs @@ -13,8 +13,8 @@ use reth_evm::execute::BlockExecutorProvider; use reth_node_types::NodeTypesWithDB; use reth_primitives::{Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader}; use reth_provider::{ - providers::ProviderNodeTypes, BlockchainTreePendingStateProvider, CanonStateSubscriptions, - FullExecutionDataProvider, ProviderError, + providers::ProviderNodeTypes, BlockchainTreePendingStateProvider, CanonStateNotifications, + CanonStateSubscriptions, FullExecutionDataProvider, ProviderError, }; use reth_storage_errors::provider::ProviderResult; use std::{collections::BTreeMap, sync::Arc}; @@ -188,7 +188,7 @@ where N: ProviderNodeTypes, E: Send + Sync, { - fn subscribe_to_canonical_state(&self) -> reth_provider::CanonStateNotifications { + fn subscribe_to_canonical_state(&self) -> CanonStateNotifications { trace!(target: "blockchain_tree", "Registered subscriber for canonical state"); self.tree.read().subscribe_canon_state() } diff --git a/crates/chain-state/src/in_memory.rs b/crates/chain-state/src/in_memory.rs index 47443b36c67b..e07eaeaa5d9e 100644 --- a/crates/chain-state/src/in_memory.rs +++ b/crates/chain-state/src/in_memory.rs @@ -12,7 +12,7 @@ use reth_chainspec::ChainInfo; use reth_execution_types::{Chain, ExecutionOutcome}; use reth_metrics::{metrics::Gauge, Metrics}; use reth_primitives::{ - BlockWithSenders, Receipt, Receipts, SealedBlock, SealedBlockWithSenders, SealedHeader, + BlockWithSenders, NodePrimitives, Receipts, SealedBlock, SealedBlockWithSenders, SealedHeader, TransactionMeta, TransactionSigned, }; use reth_storage_api::StateProviderBox; @@ -50,22 +50,22 @@ pub(crate) struct InMemoryStateMetrics { /// This holds, because only lookup by number functions need to acquire the numbers lock first to /// get the block hash. #[derive(Debug, Default)] -pub(crate) struct InMemoryState { +pub(crate) struct InMemoryState { /// All canonical blocks that are not on disk yet. - blocks: RwLock>>, + blocks: RwLock>>>, /// Mapping of block numbers to block hashes. numbers: RwLock>, /// The pending block that has not yet been made canonical. - pending: watch::Sender>, + pending: watch::Sender>>, /// Metrics for the in-memory state. metrics: InMemoryStateMetrics, } -impl InMemoryState { +impl InMemoryState { pub(crate) fn new( - blocks: HashMap>, + blocks: HashMap>>, numbers: BTreeMap, - pending: Option, + pending: Option>, ) -> Self { let (pending, _) = watch::channel(pending); let this = Self { @@ -95,12 +95,12 @@ impl InMemoryState { } /// Returns the state for a given block hash. - pub(crate) fn state_by_hash(&self, hash: B256) -> Option> { + pub(crate) fn state_by_hash(&self, hash: B256) -> Option>> { self.blocks.read().get(&hash).cloned() } /// Returns the state for a given block number. - pub(crate) fn state_by_number(&self, number: u64) -> Option> { + pub(crate) fn state_by_number(&self, number: u64) -> Option>> { let hash = self.hash_by_number(number)?; self.state_by_hash(hash) } @@ -111,14 +111,14 @@ impl InMemoryState { } /// Returns the current chain head state. - pub(crate) fn head_state(&self) -> Option> { + pub(crate) fn head_state(&self) -> Option>> { let hash = *self.numbers.read().last_key_value()?.1; self.state_by_hash(hash) } /// Returns the pending state corresponding to the current head plus one, /// from the payload received in newPayload that does not have a FCU yet. - pub(crate) fn pending_state(&self) -> Option { + pub(crate) fn pending_state(&self) -> Option> { self.pending.borrow().clone() } @@ -131,17 +131,17 @@ impl InMemoryState { /// Inner type to provide in memory state. It includes a chain tracker to be /// advanced internally by the tree. #[derive(Debug)] -pub(crate) struct CanonicalInMemoryStateInner { +pub(crate) struct CanonicalInMemoryStateInner { /// Tracks certain chain information, such as the canonical head, safe head, and finalized /// head. pub(crate) chain_info_tracker: ChainInfoTracker, /// Tracks blocks at the tip of the chain that have not been persisted to disk yet. - pub(crate) in_memory_state: InMemoryState, + pub(crate) in_memory_state: InMemoryState, /// A broadcast stream that emits events when the canonical chain is updated. - pub(crate) canon_state_notification_sender: CanonStateNotificationSender, + pub(crate) canon_state_notification_sender: CanonStateNotificationSender, } -impl CanonicalInMemoryStateInner { +impl CanonicalInMemoryStateInner { /// Clears all entries in the in memory state. fn clear(&self) { { @@ -162,17 +162,17 @@ impl CanonicalInMemoryStateInner { /// all canonical blocks not on disk yet and keeps track of the block range that /// is in memory. #[derive(Debug, Clone)] -pub struct CanonicalInMemoryState { - pub(crate) inner: Arc, +pub struct CanonicalInMemoryState { + pub(crate) inner: Arc>, } -impl CanonicalInMemoryState { +impl CanonicalInMemoryState { /// Create a new in-memory state with the given blocks, numbers, pending state, and optional /// finalized header. pub fn new( - blocks: HashMap>, + blocks: HashMap>>, numbers: BTreeMap, - pending: Option, + pending: Option>, finalized: Option, safe: Option, ) -> Self { @@ -236,7 +236,7 @@ impl CanonicalInMemoryState { /// Updates the pending block with the given block. /// /// Note: This assumes that the parent block of the pending block is canonical. - pub fn set_pending_block(&self, pending: ExecutedBlock) { + pub fn set_pending_block(&self, pending: ExecutedBlock) { // fetch the state of the pending block's parent block let parent = self.state_by_hash(pending.block().parent_hash); let pending = BlockState::with_parent(pending, parent); @@ -252,7 +252,7 @@ impl CanonicalInMemoryState { /// them to their parent blocks. fn update_blocks(&self, new_blocks: I, reorged: I) where - I: IntoIterator, + I: IntoIterator>, { { // acquire locks, starting with the numbers lock @@ -288,7 +288,7 @@ impl CanonicalInMemoryState { } /// Update the in memory state with the given chain update. - pub fn update_chain(&self, new_chain: NewCanonicalChain) { + pub fn update_chain(&self, new_chain: NewCanonicalChain) { match new_chain { NewCanonicalChain::Commit { new } => { self.update_blocks(new, vec![]); @@ -359,22 +359,22 @@ impl CanonicalInMemoryState { } /// Returns in memory state corresponding the given hash. - pub fn state_by_hash(&self, hash: B256) -> Option> { + pub fn state_by_hash(&self, hash: B256) -> Option>> { self.inner.in_memory_state.state_by_hash(hash) } /// Returns in memory state corresponding the block number. - pub fn state_by_number(&self, number: u64) -> Option> { + pub fn state_by_number(&self, number: u64) -> Option>> { self.inner.in_memory_state.state_by_number(number) } /// Returns the in memory head state. - pub fn head_state(&self) -> Option> { + pub fn head_state(&self) -> Option>> { self.inner.in_memory_state.head_state() } /// Returns the in memory pending state. - pub fn pending_state(&self) -> Option { + pub fn pending_state(&self) -> Option> { self.inner.in_memory_state.pending_state() } @@ -479,14 +479,14 @@ impl CanonicalInMemoryState { /// Returns a tuple with the `SealedBlock` corresponding to the pending /// state and a vector of its `Receipt`s. - pub fn pending_block_and_receipts(&self) -> Option<(SealedBlock, Vec)> { + pub fn pending_block_and_receipts(&self) -> Option<(SealedBlock, Vec)> { self.pending_state().map(|block_state| { (block_state.block_ref().block().clone(), block_state.executed_block_receipts()) }) } /// Subscribe to new blocks events. - pub fn subscribe_canon_state(&self) -> CanonStateNotifications { + pub fn subscribe_canon_state(&self) -> CanonStateNotifications { self.inner.canon_state_notification_sender.subscribe() } @@ -501,7 +501,7 @@ impl CanonicalInMemoryState { } /// Attempts to send a new [`CanonStateNotification`] to all active Receiver handles. - pub fn notify_canon_state(&self, event: CanonStateNotification) { + pub fn notify_canon_state(&self, event: CanonStateNotification) { self.inner.canon_state_notification_sender.send(event).ok(); } @@ -513,7 +513,7 @@ impl CanonicalInMemoryState { &self, hash: B256, historical: StateProviderBox, - ) -> MemoryOverlayStateProvider { + ) -> MemoryOverlayStateProvider { let in_memory = if let Some(state) = self.state_by_hash(hash) { state.chain().map(|block_state| block_state.block()).collect() } else { @@ -527,7 +527,7 @@ impl CanonicalInMemoryState { /// oldest (highest to lowest). /// /// This iterator contains a snapshot of the in-memory state at the time of the call. - pub fn canonical_chain(&self) -> impl Iterator> { + pub fn canonical_chain(&self) -> impl Iterator>> { self.inner.in_memory_state.head_state().into_iter().flat_map(|head| head.iter()) } @@ -577,22 +577,22 @@ impl CanonicalInMemoryState { /// State after applying the given block, this block is part of the canonical chain that partially /// stored in memory and can be traced back to a canonical block on disk. #[derive(Debug, PartialEq, Eq, Clone)] -pub struct BlockState { +pub struct BlockState { /// The executed block that determines the state after this block has been executed. - block: ExecutedBlock, + block: ExecutedBlock, /// The block's parent block if it exists. - parent: Option>, + parent: Option>>, } #[allow(dead_code)] -impl BlockState { +impl BlockState { /// [`BlockState`] constructor. - pub const fn new(block: ExecutedBlock) -> Self { + pub const fn new(block: ExecutedBlock) -> Self { Self { block, parent: None } } /// [`BlockState`] constructor with parent. - pub const fn with_parent(block: ExecutedBlock, parent: Option>) -> Self { + pub const fn with_parent(block: ExecutedBlock, parent: Option>) -> Self { Self { block, parent } } @@ -606,12 +606,12 @@ impl BlockState { } /// Returns the executed block that determines the state. - pub fn block(&self) -> ExecutedBlock { + pub fn block(&self) -> ExecutedBlock { self.block.clone() } /// Returns a reference to the executed block that determines the state. - pub const fn block_ref(&self) -> &ExecutedBlock { + pub const fn block_ref(&self) -> &ExecutedBlock { &self.block } @@ -646,7 +646,7 @@ impl BlockState { } /// Returns the `Receipts` of executed block that determines the state. - pub fn receipts(&self) -> &Receipts { + pub fn receipts(&self) -> &Receipts { &self.block.execution_outcome().receipts } @@ -654,7 +654,7 @@ impl BlockState { /// We assume that the `Receipts` in the executed block `ExecutionOutcome` /// has only one element corresponding to the executed block associated to /// the state. - pub fn executed_block_receipts(&self) -> Vec { + pub fn executed_block_receipts(&self) -> Vec { let receipts = self.receipts(); debug_assert!( @@ -713,7 +713,7 @@ impl BlockState { /// /// This merges the state of all blocks that are part of the chain that the this block is /// the head of. This includes all blocks that connect back to the canonical block on disk. - pub fn state_provider(&self, historical: StateProviderBox) -> MemoryOverlayStateProvider { + pub fn state_provider(&self, historical: StateProviderBox) -> MemoryOverlayStateProvider { let in_memory = self.chain().map(|block_state| block_state.block()).collect(); MemoryOverlayStateProvider::new(historical, in_memory) @@ -771,25 +771,25 @@ impl BlockState { /// Represents an executed block stored in-memory. #[derive(Clone, Debug, PartialEq, Eq, Default)] -pub struct ExecutedBlock { +pub struct ExecutedBlock { /// Sealed block the rest of fields refer to. pub block: Arc, /// Block's senders. pub senders: Arc>, /// Block's execution outcome. - pub execution_output: Arc, + pub execution_output: Arc>, /// Block's hashed state. pub hashed_state: Arc, /// Trie updates that result of applying the block. pub trie: Arc, } -impl ExecutedBlock { +impl ExecutedBlock { /// [`ExecutedBlock`] constructor. pub const fn new( block: Arc, senders: Arc>, - execution_output: Arc, + execution_output: Arc>, hashed_state: Arc, trie: Arc, ) -> Self { @@ -814,7 +814,7 @@ impl ExecutedBlock { } /// Returns a reference to the block's execution outcome - pub fn execution_outcome(&self) -> &ExecutionOutcome { + pub fn execution_outcome(&self) -> &ExecutionOutcome { &self.execution_output } @@ -831,23 +831,23 @@ impl ExecutedBlock { /// Non-empty chain of blocks. #[derive(Debug)] -pub enum NewCanonicalChain { +pub enum NewCanonicalChain { /// A simple append to the current canonical head Commit { /// all blocks that lead back to the canonical head - new: Vec, + new: Vec>, }, /// A reorged chain consists of two chains that trace back to a shared ancestor block at which /// point they diverge. Reorg { /// All blocks of the _new_ chain - new: Vec, + new: Vec>, /// All blocks of the _old_ chain - old: Vec, + old: Vec>, }, } -impl NewCanonicalChain { +impl NewCanonicalChain { /// Returns the length of the new chain. pub fn new_block_count(&self) -> usize { match self { @@ -864,7 +864,7 @@ impl NewCanonicalChain { } /// Converts the new chain into a notification that will be emitted to listeners - pub fn to_chain_notification(&self) -> CanonStateNotification { + pub fn to_chain_notification(&self) -> CanonStateNotification { match self { Self::Commit { new } => { let new = Arc::new(new.iter().fold(Chain::default(), |mut chain, exec| { @@ -917,7 +917,7 @@ mod tests { use alloy_primitives::{map::HashSet, BlockNumber, Bytes, StorageKey, StorageValue}; use rand::Rng; use reth_errors::ProviderResult; - use reth_primitives::{Account, Bytecode, Receipt}; + use reth_primitives::{Account, Bytecode, EthPrimitives, Receipt}; use reth_storage_api::{ AccountReader, BlockHashReader, StateProofProvider, StateProvider, StateRootProvider, StorageRootProvider, @@ -925,7 +925,7 @@ mod tests { use reth_trie::{AccountProof, HashedStorage, MultiProof, StorageProof, TrieInput}; fn create_mock_state( - test_block_builder: &mut TestBlockBuilder, + test_block_builder: &mut TestBlockBuilder, block_number: u64, parent_hash: B256, ) -> BlockState { @@ -935,7 +935,7 @@ mod tests { } fn create_mock_state_chain( - test_block_builder: &mut TestBlockBuilder, + test_block_builder: &mut TestBlockBuilder, num_blocks: u64, ) -> Vec { let mut chain = Vec::with_capacity(num_blocks as usize); @@ -1065,7 +1065,7 @@ mod tests { fn test_in_memory_state_impl_state_by_hash() { let mut state_by_hash = HashMap::default(); let number = rand::thread_rng().gen::(); - let mut test_block_builder = TestBlockBuilder::default(); + let mut test_block_builder: TestBlockBuilder = TestBlockBuilder::default(); let state = Arc::new(create_mock_state(&mut test_block_builder, number, B256::random())); state_by_hash.insert(state.hash(), state.clone()); @@ -1081,7 +1081,7 @@ mod tests { let mut hash_by_number = BTreeMap::new(); let number = rand::thread_rng().gen::(); - let mut test_block_builder = TestBlockBuilder::default(); + let mut test_block_builder: TestBlockBuilder = TestBlockBuilder::default(); let state = Arc::new(create_mock_state(&mut test_block_builder, number, B256::random())); let hash = state.hash(); @@ -1098,7 +1098,7 @@ mod tests { fn test_in_memory_state_impl_head_state() { let mut state_by_hash = HashMap::default(); let mut hash_by_number = BTreeMap::new(); - let mut test_block_builder = TestBlockBuilder::default(); + let mut test_block_builder: TestBlockBuilder = TestBlockBuilder::default(); let state1 = Arc::new(create_mock_state(&mut test_block_builder, 1, B256::random())); let hash1 = state1.hash(); let state2 = Arc::new(create_mock_state(&mut test_block_builder, 2, hash1)); @@ -1118,7 +1118,7 @@ mod tests { #[test] fn test_in_memory_state_impl_pending_state() { let pending_number = rand::thread_rng().gen::(); - let mut test_block_builder = TestBlockBuilder::default(); + let mut test_block_builder: TestBlockBuilder = TestBlockBuilder::default(); let pending_state = create_mock_state(&mut test_block_builder, pending_number, B256::random()); let pending_hash = pending_state.hash(); @@ -1135,7 +1135,8 @@ mod tests { #[test] fn test_in_memory_state_impl_no_pending_state() { - let in_memory_state = InMemoryState::new(HashMap::default(), BTreeMap::new(), None); + let in_memory_state: InMemoryState = + InMemoryState::new(HashMap::default(), BTreeMap::new(), None); assert_eq!(in_memory_state.pending_state(), None); } @@ -1143,7 +1144,7 @@ mod tests { #[test] fn test_state_new() { let number = rand::thread_rng().gen::(); - let mut test_block_builder = TestBlockBuilder::default(); + let mut test_block_builder: TestBlockBuilder = TestBlockBuilder::default(); let block = test_block_builder.get_executed_block_with_number(number, B256::random()); let state = BlockState::new(block.clone()); @@ -1154,7 +1155,7 @@ mod tests { #[test] fn test_state_block() { let number = rand::thread_rng().gen::(); - let mut test_block_builder = TestBlockBuilder::default(); + let mut test_block_builder: TestBlockBuilder = TestBlockBuilder::default(); let block = test_block_builder.get_executed_block_with_number(number, B256::random()); let state = BlockState::new(block.clone()); @@ -1165,7 +1166,7 @@ mod tests { #[test] fn test_state_hash() { let number = rand::thread_rng().gen::(); - let mut test_block_builder = TestBlockBuilder::default(); + let mut test_block_builder: TestBlockBuilder = TestBlockBuilder::default(); let block = test_block_builder.get_executed_block_with_number(number, B256::random()); let state = BlockState::new(block.clone()); @@ -1176,7 +1177,7 @@ mod tests { #[test] fn test_state_number() { let number = rand::thread_rng().gen::(); - let mut test_block_builder = TestBlockBuilder::default(); + let mut test_block_builder: TestBlockBuilder = TestBlockBuilder::default(); let block = test_block_builder.get_executed_block_with_number(number, B256::random()); let state = BlockState::new(block); @@ -1187,7 +1188,7 @@ mod tests { #[test] fn test_state_state_root() { let number = rand::thread_rng().gen::(); - let mut test_block_builder = TestBlockBuilder::default(); + let mut test_block_builder: TestBlockBuilder = TestBlockBuilder::default(); let block = test_block_builder.get_executed_block_with_number(number, B256::random()); let state = BlockState::new(block.clone()); @@ -1198,7 +1199,7 @@ mod tests { #[test] fn test_state_receipts() { let receipts = Receipts { receipt_vec: vec![vec![Some(Receipt::default())]] }; - let mut test_block_builder = TestBlockBuilder::default(); + let mut test_block_builder: TestBlockBuilder = TestBlockBuilder::default(); let block = test_block_builder.get_executed_block_with_receipts(receipts.clone(), B256::random()); @@ -1209,8 +1210,8 @@ mod tests { #[test] fn test_in_memory_state_chain_update() { - let state = CanonicalInMemoryState::empty(); - let mut test_block_builder = TestBlockBuilder::default(); + let state: CanonicalInMemoryState = CanonicalInMemoryState::empty(); + let mut test_block_builder: TestBlockBuilder = TestBlockBuilder::default(); let block1 = test_block_builder.get_executed_block_with_number(0, B256::random()); let block2 = test_block_builder.get_executed_block_with_number(0, B256::random()); let chain = NewCanonicalChain::Commit { new: vec![block1.clone()] }; @@ -1234,8 +1235,8 @@ mod tests { #[test] fn test_in_memory_state_set_pending_block() { - let state = CanonicalInMemoryState::empty(); - let mut test_block_builder = TestBlockBuilder::default(); + let state: CanonicalInMemoryState = CanonicalInMemoryState::empty(); + let mut test_block_builder: TestBlockBuilder = TestBlockBuilder::default(); // First random block let block1 = test_block_builder.get_executed_block_with_number(0, B256::random()); @@ -1286,7 +1287,7 @@ mod tests { #[test] fn test_canonical_in_memory_state_state_provider() { - let mut test_block_builder = TestBlockBuilder::default(); + let mut test_block_builder: TestBlockBuilder = TestBlockBuilder::default(); let block1 = test_block_builder.get_executed_block_with_number(1, B256::random()); let block2 = test_block_builder.get_executed_block_with_number(2, block1.block().hash()); let block3 = test_block_builder.get_executed_block_with_number(3, block2.block().hash()); @@ -1333,14 +1334,15 @@ mod tests { #[test] fn test_canonical_in_memory_state_canonical_chain_empty() { - let state = CanonicalInMemoryState::empty(); + let state: CanonicalInMemoryState = CanonicalInMemoryState::empty(); let chain: Vec<_> = state.canonical_chain().collect(); assert!(chain.is_empty()); } #[test] fn test_canonical_in_memory_state_canonical_chain_single_block() { - let block = TestBlockBuilder::default().get_executed_block_with_number(1, B256::random()); + let block = TestBlockBuilder::::default() + .get_executed_block_with_number(1, B256::random()); let hash = block.block().hash(); let mut blocks = HashMap::default(); blocks.insert(hash, Arc::new(BlockState::new(block))); @@ -1359,7 +1361,7 @@ mod tests { fn test_canonical_in_memory_state_canonical_chain_multiple_blocks() { let mut parent_hash = B256::random(); let mut block_builder = TestBlockBuilder::default(); - let state = CanonicalInMemoryState::empty(); + let state: CanonicalInMemoryState = CanonicalInMemoryState::empty(); for i in 1..=3 { let block = block_builder.get_executed_block_with_number(i, parent_hash); @@ -1381,7 +1383,7 @@ mod tests { fn test_canonical_in_memory_state_canonical_chain_with_pending_block() { let mut parent_hash = B256::random(); let mut block_builder = TestBlockBuilder::default(); - let state = CanonicalInMemoryState::empty(); + let state: CanonicalInMemoryState = CanonicalInMemoryState::empty(); for i in 1..=2 { let block = block_builder.get_executed_block_with_number(i, parent_hash); @@ -1401,7 +1403,7 @@ mod tests { #[test] fn test_block_state_parent_blocks() { - let mut test_block_builder = TestBlockBuilder::default(); + let mut test_block_builder: TestBlockBuilder = TestBlockBuilder::default(); let chain = create_mock_state_chain(&mut test_block_builder, 4); let parents = chain[3].parent_state_chain(); @@ -1422,7 +1424,7 @@ mod tests { #[test] fn test_block_state_single_block_state_chain() { let single_block_number = 1; - let mut test_block_builder = TestBlockBuilder::default(); + let mut test_block_builder: TestBlockBuilder = TestBlockBuilder::default(); let single_block = create_mock_state(&mut test_block_builder, single_block_number, B256::random()); let single_block_hash = single_block.block().block.hash(); @@ -1438,7 +1440,7 @@ mod tests { #[test] fn test_block_state_chain() { - let mut test_block_builder = TestBlockBuilder::default(); + let mut test_block_builder: TestBlockBuilder = TestBlockBuilder::default(); let chain = create_mock_state_chain(&mut test_block_builder, 3); let block_state_chain = chain[2].chain().collect::>(); @@ -1460,7 +1462,7 @@ mod tests { #[test] fn test_to_chain_notification() { // Generate 4 blocks - let mut test_block_builder = TestBlockBuilder::default(); + let mut test_block_builder: TestBlockBuilder = TestBlockBuilder::default(); let block0 = test_block_builder.get_executed_block_with_number(0, B256::random()); let block1 = test_block_builder.get_executed_block_with_number(1, block0.block.hash()); let block1a = test_block_builder.get_executed_block_with_number(1, block0.block.hash()); diff --git a/crates/chain-state/src/lib.rs b/crates/chain-state/src/lib.rs index bd9b43a59eae..519469d67f60 100644 --- a/crates/chain-state/src/lib.rs +++ b/crates/chain-state/src/lib.rs @@ -27,3 +27,6 @@ pub use memory_overlay::{MemoryOverlayStateProvider, MemoryOverlayStateProviderR #[cfg(any(test, feature = "test-utils"))] /// Common test helpers pub mod test_utils; + +// todo: remove when generic data prim integration complete +pub use reth_primitives::EthPrimitives; diff --git a/crates/chain-state/src/memory_overlay.rs b/crates/chain-state/src/memory_overlay.rs index ada0faee4907..88cd411d38b2 100644 --- a/crates/chain-state/src/memory_overlay.rs +++ b/crates/chain-state/src/memory_overlay.rs @@ -5,7 +5,7 @@ use alloy_primitives::{ Address, BlockNumber, Bytes, StorageKey, StorageValue, B256, }; use reth_errors::ProviderResult; -use reth_primitives::{Account, Bytecode}; +use reth_primitives::{Account, Bytecode, NodePrimitives}; use reth_storage_api::{ AccountReader, BlockHashReader, StateProofProvider, StateProvider, StateRootProvider, StorageRootProvider, @@ -18,11 +18,11 @@ use std::sync::OnceLock; /// A state provider that stores references to in-memory blocks along with their state as well as a /// reference of the historical state provider for fallback lookups. #[allow(missing_debug_implementations)] -pub struct MemoryOverlayStateProviderRef<'a> { +pub struct MemoryOverlayStateProviderRef<'a, N: NodePrimitives = reth_primitives::EthPrimitives> { /// Historical state provider for state lookups that are not found in in-memory blocks. pub(crate) historical: Box, /// The collection of executed parent blocks. Expected order is newest to oldest. - pub(crate) in_memory: Vec, + pub(crate) in_memory: Vec>, /// Lazy-loaded in-memory trie data. pub(crate) trie_state: OnceLock, } @@ -30,11 +30,11 @@ pub struct MemoryOverlayStateProviderRef<'a> { /// A state provider that stores references to in-memory blocks along with their state as well as /// the historical state provider for fallback lookups. #[allow(missing_debug_implementations)] -pub struct MemoryOverlayStateProvider { +pub struct MemoryOverlayStateProvider { /// Historical state provider for state lookups that are not found in in-memory blocks. pub(crate) historical: Box, /// The collection of executed parent blocks. Expected order is newest to oldest. - pub(crate) in_memory: Vec, + pub(crate) in_memory: Vec>, /// Lazy-loaded in-memory trie data. pub(crate) trie_state: OnceLock, } @@ -49,7 +49,7 @@ macro_rules! impl_state_provider { /// - `in_memory` - the collection of executed ancestor blocks in reverse. /// - `historical` - a historical state provider for the latest ancestor block stored in the /// database. - pub fn new(historical: $historical_type, in_memory: Vec) -> Self { + pub fn new(historical: $historical_type, in_memory: Vec>) -> Self { Self { historical, in_memory, trie_state: OnceLock::new() } } @@ -230,8 +230,8 @@ macro_rules! impl_state_provider { }; } -impl_state_provider!([], MemoryOverlayStateProvider, Box); -impl_state_provider!([<'a>], MemoryOverlayStateProviderRef<'a>, Box); +impl_state_provider!([], MemoryOverlayStateProvider, Box); +impl_state_provider!([<'a, N: NodePrimitives>], MemoryOverlayStateProviderRef<'a, N>, Box); /// The collection of data necessary for trie-related operations for [`MemoryOverlayStateProvider`]. #[derive(Clone, Default, Debug)] diff --git a/crates/chain-state/src/notifications.rs b/crates/chain-state/src/notifications.rs index 582e1d2a05d4..84fb120d4b28 100644 --- a/crates/chain-state/src/notifications.rs +++ b/crates/chain-state/src/notifications.rs @@ -3,7 +3,7 @@ use auto_impl::auto_impl; use derive_more::{Deref, DerefMut}; use reth_execution_types::{BlockReceipts, Chain}; -use reth_primitives::{SealedBlockWithSenders, SealedHeader}; +use reth_primitives::{NodePrimitives, SealedBlockWithSenders, SealedHeader}; use std::{ pin::Pin, sync::Arc, @@ -17,10 +17,12 @@ use tokio_stream::{ use tracing::debug; /// Type alias for a receiver that receives [`CanonStateNotification`] -pub type CanonStateNotifications = broadcast::Receiver; +pub type CanonStateNotifications = + broadcast::Receiver>; /// Type alias for a sender that sends [`CanonStateNotification`] -pub type CanonStateNotificationSender = broadcast::Sender; +pub type CanonStateNotificationSender = + broadcast::Sender>; /// A type that allows to register chain related event subscriptions. #[auto_impl(&, Arc)] @@ -41,13 +43,13 @@ pub trait CanonStateSubscriptions: Send + Sync { /// A Stream of [`CanonStateNotification`]. #[derive(Debug)] #[pin_project::pin_project] -pub struct CanonStateNotificationStream { +pub struct CanonStateNotificationStream { #[pin] - st: BroadcastStream, + st: BroadcastStream>, } -impl Stream for CanonStateNotificationStream { - type Item = CanonStateNotification; +impl Stream for CanonStateNotificationStream { + type Item = CanonStateNotification; fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { loop { @@ -68,11 +70,11 @@ impl Stream for CanonStateNotificationStream { /// The notification contains at least one [`Chain`] with the imported segment. If some blocks were /// reverted (e.g. during a reorg), the old chain is also returned. #[derive(Clone, Debug, PartialEq, Eq)] -pub enum CanonStateNotification { +pub enum CanonStateNotification { /// The canonical chain was extended. Commit { /// The newly added chain segment. - new: Arc, + new: Arc>, }, /// A chain segment was reverted or reorged. /// @@ -82,18 +84,18 @@ pub enum CanonStateNotification { /// chain segment. Reorg { /// The chain segment that was reverted. - old: Arc, + old: Arc>, /// The chain segment that was added on top of the canonical chain, minus the reverted /// blocks. /// /// In the case of a revert, not a reorg, this chain segment is empty. - new: Arc, + new: Arc>, }, } -impl CanonStateNotification { +impl CanonStateNotification { /// Get the chain segment that was reverted, if any. - pub fn reverted(&self) -> Option> { + pub fn reverted(&self) -> Option>> { match self { Self::Commit { .. } => None, Self::Reorg { old, .. } => Some(old.clone()), @@ -101,7 +103,7 @@ impl CanonStateNotification { } /// Get the newly imported chain segment, if any. - pub fn committed(&self) -> Arc { + pub fn committed(&self) -> Arc> { match self { Self::Commit { new } | Self::Reorg { new, .. } => new.clone(), } @@ -122,7 +124,7 @@ impl CanonStateNotification { /// /// The boolean in the tuple (2nd element) denotes whether the receipt was from the reverted /// chain segment. - pub fn block_receipts(&self) -> Vec<(BlockReceipts, bool)> { + pub fn block_receipts(&self) -> Vec<(BlockReceipts, bool)> { let mut receipts = Vec::new(); // get old receipts @@ -212,7 +214,7 @@ mod tests { block2.set_block_number(2); block2.set_hash(block2_hash); - let chain = Arc::new(Chain::new( + let chain: Arc = Arc::new(Chain::new( vec![block1.clone(), block2.clone()], ExecutionOutcome::default(), None, @@ -250,7 +252,7 @@ mod tests { block3.set_block_number(3); block3.set_hash(block3_hash); - let old_chain = + let old_chain: Arc = Arc::new(Chain::new(vec![block1.clone()], ExecutionOutcome::default(), None)); let new_chain = Arc::new(Chain::new( vec![block2.clone(), block3.clone()], @@ -313,7 +315,7 @@ mod tests { let execution_outcome = ExecutionOutcome { receipts, ..Default::default() }; // Create a new chain segment with `block1` and `block2` and the execution outcome. - let new_chain = + let new_chain: Arc = Arc::new(Chain::new(vec![block1.clone(), block2.clone()], execution_outcome, None)); // Create a commit notification containing the new chain segment. @@ -361,7 +363,8 @@ mod tests { ExecutionOutcome { receipts: old_receipts, ..Default::default() }; // Create an old chain segment to be reverted, containing `old_block1`. - let old_chain = Arc::new(Chain::new(vec![old_block1.clone()], old_execution_outcome, None)); + let old_chain: Arc = + Arc::new(Chain::new(vec![old_block1.clone()], old_execution_outcome, None)); // Define block2 for the new chain segment, which will be committed. let mut new_block1 = SealedBlockWithSenders::default(); diff --git a/crates/chain-state/src/test_utils.rs b/crates/chain-state/src/test_utils.rs index 60a90e43fee0..63689f07f039 100644 --- a/crates/chain-state/src/test_utils.rs +++ b/crates/chain-state/src/test_utils.rs @@ -1,3 +1,5 @@ +use core::marker::PhantomData; + use crate::{ in_memory::ExecutedBlock, CanonStateNotification, CanonStateNotifications, CanonStateSubscriptions, @@ -12,8 +14,8 @@ use reth_chainspec::{ChainSpec, EthereumHardfork, MIN_TRANSACTION_GAS}; use reth_execution_types::{Chain, ExecutionOutcome}; use reth_primitives::{ proofs::{calculate_receipt_root, calculate_transaction_root, calculate_withdrawals_root}, - BlockBody, Receipt, Receipts, SealedBlock, SealedBlockWithSenders, SealedHeader, Transaction, - TransactionSigned, TransactionSignedEcRecovered, + BlockBody, NodePrimitives, Receipt, Receipts, SealedBlock, SealedBlockWithSenders, + SealedHeader, Transaction, TransactionSigned, TransactionSignedEcRecovered, }; use reth_trie::{root::state_root_unhashed, updates::TrieUpdates, HashedPostState}; use revm::{db::BundleState, primitives::AccountInfo}; @@ -27,7 +29,7 @@ use tokio::sync::broadcast::{self, Sender}; /// Functionality to build blocks for tests and help with assertions about /// their execution. #[derive(Debug)] -pub struct TestBlockBuilder { +pub struct TestBlockBuilder { /// The account that signs all the block's transactions. pub signer: Address, /// Private key for signing. @@ -40,9 +42,10 @@ pub struct TestBlockBuilder { pub signer_build_account_info: AccountInfo, /// Chain spec of the blocks generated by this builder pub chain_spec: ChainSpec, + _prims: PhantomData, } -impl Default for TestBlockBuilder { +impl Default for TestBlockBuilder { fn default() -> Self { let initial_account_info = AccountInfo::from_balance(U256::from(10).pow(U256::from(18))); let signer_pk = PrivateKeySigner::random(); @@ -53,6 +56,7 @@ impl Default for TestBlockBuilder { signer_pk, signer_execute_account_info: initial_account_info.clone(), signer_build_account_info: initial_account_info, + _prims: PhantomData, } } } @@ -289,8 +293,8 @@ impl TestBlockBuilder { } /// A test `ChainEventSubscriptions` #[derive(Clone, Debug, Default)] -pub struct TestCanonStateSubscriptions { - canon_notif_tx: Arc>>>, +pub struct TestCanonStateSubscriptions { + canon_notif_tx: Arc>>>>, } impl TestCanonStateSubscriptions { diff --git a/crates/exex/types/Cargo.toml b/crates/exex/types/Cargo.toml index 51097d6109c1..3b67fd5aa500 100644 --- a/crates/exex/types/Cargo.toml +++ b/crates/exex/types/Cargo.toml @@ -15,6 +15,7 @@ workspace = true # reth reth-chain-state.workspace = true reth-execution-types.workspace = true +reth-primitives-traits.workspace = true # reth alloy-primitives.workspace = true @@ -38,11 +39,13 @@ serde = [ "reth-execution-types/serde", "alloy-eips/serde", "alloy-primitives/serde", - "rand/serde" + "rand/serde", + "reth-primitives-traits/serde", ] serde-bincode-compat = [ "reth-execution-types/serde-bincode-compat", "serde_with", "reth-primitives/serde-bincode-compat", - "alloy-eips/serde-bincode-compat" + "alloy-eips/serde-bincode-compat", + "reth-primitives-traits/serde-bincode-compat", ] diff --git a/crates/exex/types/src/notification.rs b/crates/exex/types/src/notification.rs index 61d42a3319be..fb0762f04b3e 100644 --- a/crates/exex/types/src/notification.rs +++ b/crates/exex/types/src/notification.rs @@ -2,27 +2,28 @@ use std::sync::Arc; use reth_chain_state::CanonStateNotification; use reth_execution_types::Chain; +use reth_primitives_traits::NodePrimitives; /// Notifications sent to an `ExEx`. #[derive(Debug, Clone, PartialEq, Eq)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -pub enum ExExNotification { +pub enum ExExNotification { /// Chain got committed without a reorg, and only the new chain is returned. ChainCommitted { /// The new chain after commit. - new: Arc, + new: Arc>, }, /// Chain got reorged, and both the old and the new chains are returned. ChainReorged { /// The old chain before reorg. - old: Arc, + old: Arc>, /// The new chain after reorg. - new: Arc, + new: Arc>, }, /// Chain got reverted, and only the old chain is returned. ChainReverted { /// The old chain before reversion. - old: Arc, + old: Arc>, }, } @@ -60,8 +61,8 @@ impl ExExNotification { } } -impl From for ExExNotification { - fn from(notification: CanonStateNotification) -> Self { +impl From> for ExExNotification

{ + fn from(notification: CanonStateNotification

) -> Self { match notification { CanonStateNotification::Commit { new } => Self::ChainCommitted { new }, CanonStateNotification::Reorg { old, new } => Self::ChainReorged { old, new }, diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index c3682ecba1da..027bf97cfa5c 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -76,7 +76,7 @@ pub mod serde_bincode_compat { } /// Temp helper struct for integrating [`NodePrimitives`]. -#[derive(Debug, Clone, Default, PartialEq, Eq)] +#[derive(Debug, Clone, Default, PartialEq, Eq, serde::Serialize, serde::Deserialize)] pub struct EthPrimitives; #[cfg(feature = "reth-codec")] From 02a90e1c0b48c0af5842b92a49860e8446b5a0e2 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 19 Nov 2024 21:46:28 +0100 Subject: [PATCH 049/156] chore: rm allowance from oog error (#12686) --- crates/rpc/rpc-eth-types/src/error/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/rpc/rpc-eth-types/src/error/mod.rs b/crates/rpc/rpc-eth-types/src/error/mod.rs index 893bbdd6b9ce..187e2d943f70 100644 --- a/crates/rpc/rpc-eth-types/src/error/mod.rs +++ b/crates/rpc/rpc-eth-types/src/error/mod.rs @@ -362,7 +362,7 @@ pub enum RpcInvalidTransactionError { SenderNoEOA, /// Gas limit was exceeded during execution. /// Contains the gas limit. - #[error("out of gas: gas required exceeds allowance: {0}")] + #[error("out of gas: gas required exceeds: {0}")] BasicOutOfGas(u64), /// Gas limit was exceeded during memory expansion. /// Contains the gas limit. From 10caa9f8465043f6a0ab31f4543d86136aa1c419 Mon Sep 17 00:00:00 2001 From: joshieDo <93316087+joshieDo@users.noreply.github.com> Date: Wed, 20 Nov 2024 01:12:43 +0000 Subject: [PATCH 050/156] fix: use `body.recover_signers_unchecked` instead on `try_with_senders_unchecked` (#12668) --- crates/primitives/src/block.rs | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/crates/primitives/src/block.rs b/crates/primitives/src/block.rs index 94dd578493c9..5c47c49f4375 100644 --- a/crates/primitives/src/block.rs +++ b/crates/primitives/src/block.rs @@ -69,7 +69,7 @@ impl Block { let senders = if self.body.transactions.len() == senders.len() { senders } else { - let Some(senders) = self.body.recover_signers() else { return Err(self) }; + let Some(senders) = self.body.recover_signers_unchecked() else { return Err(self) }; senders }; @@ -379,7 +379,7 @@ impl SealedBlock { let senders = if self.body.transactions.len() == senders.len() { senders } else { - let Some(senders) = self.body.recover_signers() else { return Err(self) }; + let Some(senders) = self.body.recover_signers_unchecked() else { return Err(self) }; senders }; @@ -616,6 +616,15 @@ impl BlockBody { TransactionSigned::recover_signers(&self.transactions, self.transactions.len()) } + /// Recover signer addresses for all transactions in the block body _without ensuring that the + /// signature has a low `s` value_. + /// + /// Returns `None`, if some transaction's signature is invalid, see also + /// [`TransactionSigned::recover_signer_unchecked`]. + pub fn recover_signers_unchecked(&self) -> Option> { + TransactionSigned::recover_signers_unchecked(&self.transactions, self.transactions.len()) + } + /// Returns whether or not the block body contains any blob transactions. #[inline] pub fn has_blob_transactions(&self) -> bool { From 942ba7e823600828190aaf111a9a6bb9803a703e Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Wed, 20 Nov 2024 09:27:09 +0100 Subject: [PATCH 051/156] tx-pool: impl `From` for `Destination` (#12689) --- crates/transaction-pool/src/pool/txpool.rs | 4 ++-- crates/transaction-pool/src/pool/update.rs | 6 ++++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/crates/transaction-pool/src/pool/txpool.rs b/crates/transaction-pool/src/pool/txpool.rs index 537162ac76c9..576672b91af5 100644 --- a/crates/transaction-pool/src/pool/txpool.rs +++ b/crates/transaction-pool/src/pool/txpool.rs @@ -1328,7 +1328,7 @@ impl AllTransactions { id: *tx.transaction.id(), hash: *tx.transaction.hash(), current: current_pool, - destination: Destination::Pool(tx.subpool), + destination: tx.subpool.into(), }) } } @@ -1738,7 +1738,7 @@ impl AllTransactions { id: *id, hash: *tx.transaction.hash(), current: current_pool, - destination: Destination::Pool(tx.subpool), + destination: tx.subpool.into(), }) } } diff --git a/crates/transaction-pool/src/pool/update.rs b/crates/transaction-pool/src/pool/update.rs index a5cce8291fab..d62b1792e7b7 100644 --- a/crates/transaction-pool/src/pool/update.rs +++ b/crates/transaction-pool/src/pool/update.rs @@ -26,3 +26,9 @@ pub(crate) enum Destination { /// Move transaction to pool Pool(SubPool), } + +impl From for Destination { + fn from(sub_pool: SubPool) -> Self { + Self::Pool(sub_pool) + } +} From 3b120283192f276562de4f9019e6c3b59dc1efc4 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Wed, 20 Nov 2024 10:13:17 +0100 Subject: [PATCH 052/156] text(tx-pool): add unit tests for `DiskFileBlobStore` (#12692) --- crates/transaction-pool/src/blobstore/disk.rs | 137 +++++++++++++++++- crates/transaction-pool/src/blobstore/mod.rs | 2 +- 2 files changed, 134 insertions(+), 5 deletions(-) diff --git a/crates/transaction-pool/src/blobstore/disk.rs b/crates/transaction-pool/src/blobstore/disk.rs index 9d02276db85a..67c36a659981 100644 --- a/crates/transaction-pool/src/blobstore/disk.rs +++ b/crates/transaction-pool/src/blobstore/disk.rs @@ -75,10 +75,7 @@ impl BlobStore for DiskFileBlobStore { } fn cleanup(&self) -> BlobStoreCleanupStat { - let txs_to_delete = { - let mut txs_to_delete = self.inner.txs_to_delete.write(); - std::mem::take(&mut *txs_to_delete) - }; + let txs_to_delete = std::mem::take(&mut *self.inner.txs_to_delete.write()); let mut stat = BlobStoreCleanupStat::default(); let mut subsize = 0; debug!(target:"txpool::blob", num_blobs=%txs_to_delete.len(), "Removing blobs from disk"); @@ -554,4 +551,136 @@ mod tests { assert_eq!(store.data_size_hint(), Some(0)); assert_eq!(store.inner.size_tracker.num_blobs.load(Ordering::Relaxed), 0); } + + #[test] + fn disk_insert_and_retrieve() { + let (store, _dir) = tmp_store(); + + let (tx, blob) = rng_blobs(1).into_iter().next().unwrap(); + store.insert(tx, blob.clone()).unwrap(); + + assert!(store.is_cached(&tx)); + let retrieved_blob = store.get(tx).unwrap().map(Arc::unwrap_or_clone).unwrap(); + assert_eq!(retrieved_blob, blob); + } + + #[test] + fn disk_delete_blob() { + let (store, _dir) = tmp_store(); + + let (tx, blob) = rng_blobs(1).into_iter().next().unwrap(); + store.insert(tx, blob).unwrap(); + assert!(store.is_cached(&tx)); + + store.delete(tx).unwrap(); + assert!(store.inner.txs_to_delete.read().contains(&tx)); + store.cleanup(); + + let result = store.get(tx).unwrap(); + assert_eq!( + result, + Some(Arc::new(BlobTransactionSidecar { + blobs: vec![], + commitments: vec![], + proofs: vec![] + })) + ); + } + + #[test] + fn disk_insert_all_and_delete_all() { + let (store, _dir) = tmp_store(); + + let blobs = rng_blobs(5); + let txs = blobs.iter().map(|(tx, _)| *tx).collect::>(); + store.insert_all(blobs.clone()).unwrap(); + + for (tx, _) in &blobs { + assert!(store.is_cached(tx)); + } + + store.delete_all(txs.clone()).unwrap(); + store.cleanup(); + + for tx in txs { + let result = store.get(tx).unwrap(); + assert_eq!( + result, + Some(Arc::new(BlobTransactionSidecar { + blobs: vec![], + commitments: vec![], + proofs: vec![] + })) + ); + } + } + + #[test] + fn disk_get_all_blobs() { + let (store, _dir) = tmp_store(); + + let blobs = rng_blobs(3); + let txs = blobs.iter().map(|(tx, _)| *tx).collect::>(); + store.insert_all(blobs.clone()).unwrap(); + + let retrieved_blobs = store.get_all(txs.clone()).unwrap(); + for (tx, blob) in retrieved_blobs { + assert!(blobs.contains(&(tx, Arc::unwrap_or_clone(blob)))); + } + + store.delete_all(txs).unwrap(); + store.cleanup(); + } + + #[test] + fn disk_get_exact_blobs_success() { + let (store, _dir) = tmp_store(); + + let blobs = rng_blobs(3); + let txs = blobs.iter().map(|(tx, _)| *tx).collect::>(); + store.insert_all(blobs.clone()).unwrap(); + + let retrieved_blobs = store.get_exact(txs).unwrap(); + for (retrieved_blob, (_, original_blob)) in retrieved_blobs.into_iter().zip(blobs) { + assert_eq!(Arc::unwrap_or_clone(retrieved_blob), original_blob); + } + } + + #[test] + fn disk_get_exact_blobs_failure() { + let (store, _dir) = tmp_store(); + + let blobs = rng_blobs(2); + let txs = blobs.iter().map(|(tx, _)| *tx).collect::>(); + store.insert_all(blobs).unwrap(); + + // Try to get a blob that was never inserted + let missing_tx = TxHash::random(); + let result = store.get_exact(vec![txs[0], missing_tx]); + assert!(result.is_err()); + } + + #[test] + fn disk_data_size_hint() { + let (store, _dir) = tmp_store(); + assert_eq!(store.data_size_hint(), Some(0)); + + let blobs = rng_blobs(2); + store.insert_all(blobs).unwrap(); + assert!(store.data_size_hint().unwrap() > 0); + } + + #[test] + fn disk_cleanup_stat() { + let (store, _dir) = tmp_store(); + + let blobs = rng_blobs(3); + let txs = blobs.iter().map(|(tx, _)| *tx).collect::>(); + store.insert_all(blobs).unwrap(); + + store.delete_all(txs).unwrap(); + let stat = store.cleanup(); + assert_eq!(stat.delete_succeed, 3); + assert_eq!(stat.delete_failed, 0); + } } diff --git a/crates/transaction-pool/src/blobstore/mod.rs b/crates/transaction-pool/src/blobstore/mod.rs index f1612bcd022e..a21cea6e06c4 100644 --- a/crates/transaction-pool/src/blobstore/mod.rs +++ b/crates/transaction-pool/src/blobstore/mod.rs @@ -152,7 +152,7 @@ impl PartialEq for BlobStoreSize { } /// Statistics for the cleanup operation. -#[derive(Debug, Clone, Default)] +#[derive(Debug, Clone, Default, PartialEq, Eq)] pub struct BlobStoreCleanupStat { /// the number of successfully deleted blobs pub delete_succeed: usize, From 2c885eee21a18e2f4ca078fe742eac7aeb1203cd Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 20 Nov 2024 10:46:47 +0100 Subject: [PATCH 053/156] chore: rm unused windows import (#12697) --- crates/storage/nippy-jar/src/lib.rs | 5 ----- 1 file changed, 5 deletions(-) diff --git a/crates/storage/nippy-jar/src/lib.rs b/crates/storage/nippy-jar/src/lib.rs index b1d174feb2c3..98eddf22ee96 100644 --- a/crates/storage/nippy-jar/src/lib.rs +++ b/crates/storage/nippy-jar/src/lib.rs @@ -20,11 +20,6 @@ use std::{ ops::Range, path::{Path, PathBuf}, }; - -// Windows specific extension for std::fs -#[cfg(windows)] -use std::os::windows::prelude::OpenOptionsExt; - use tracing::*; /// Compression algorithms supported by `NippyJar`. From 11847b4f1e29a886b87890c1334dde5080e62469 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Wed, 20 Nov 2024 10:53:53 +0100 Subject: [PATCH 054/156] text(tx-pool): add unit tests for tx pool state (#12690) --- crates/transaction-pool/src/pool/state.rs | 76 +++++++++++++++++++---- 1 file changed, 64 insertions(+), 12 deletions(-) diff --git a/crates/transaction-pool/src/pool/state.rs b/crates/transaction-pool/src/pool/state.rs index d0a3b10f8cb9..d65fc05b03f6 100644 --- a/crates/transaction-pool/src/pool/state.rs +++ b/crates/transaction-pool/src/pool/state.rs @@ -46,8 +46,6 @@ bitflags::bitflags! { } } -// === impl TxState === - impl TxState { /// The state of a transaction is considered `pending`, if the transaction has: /// - _No_ parked ancestors @@ -89,8 +87,6 @@ pub enum SubPool { Pending, } -// === impl SubPool === - impl SubPool { /// Whether this transaction is to be moved to the pending sub-pool. #[inline] @@ -126,16 +122,15 @@ impl SubPool { impl From for SubPool { fn from(value: TxState) -> Self { if value.is_pending() { - return Self::Pending - } - if value.is_blob() { + Self::Pending + } else if value.is_blob() { // all _non-pending_ blob transactions are in the blob sub-pool - return Self::Blob + Self::Blob + } else if value.bits() < TxState::BASE_FEE_POOL_BITS.bits() { + Self::Queued + } else { + Self::BaseFee } - if value.bits() < TxState::BASE_FEE_POOL_BITS.bits() { - return Self::Queued - } - Self::BaseFee } } @@ -204,4 +199,61 @@ mod tests { assert!(state.is_blob()); assert!(!state.is_pending()); } + + #[test] + fn test_tx_state_no_nonce_gap() { + let mut state = TxState::default(); + state |= TxState::NO_NONCE_GAPS; + assert!(!state.has_nonce_gap()); + } + + #[test] + fn test_tx_state_with_nonce_gap() { + let state = TxState::default(); + assert!(state.has_nonce_gap()); + } + + #[test] + fn test_tx_state_enough_balance() { + let mut state = TxState::default(); + state.insert(TxState::ENOUGH_BALANCE); + assert!(state.contains(TxState::ENOUGH_BALANCE)); + } + + #[test] + fn test_tx_state_not_too_much_gas() { + let mut state = TxState::default(); + state.insert(TxState::NOT_TOO_MUCH_GAS); + assert!(state.contains(TxState::NOT_TOO_MUCH_GAS)); + } + + #[test] + fn test_tx_state_enough_fee_cap_block() { + let mut state = TxState::default(); + state.insert(TxState::ENOUGH_FEE_CAP_BLOCK); + assert!(state.contains(TxState::ENOUGH_FEE_CAP_BLOCK)); + } + + #[test] + fn test_tx_base_fee() { + let state = TxState::BASE_FEE_POOL_BITS; + assert_eq!(SubPool::BaseFee, state.into()); + } + + #[test] + fn test_blob_transaction_only() { + let state = TxState::BLOB_TRANSACTION; + assert_eq!(SubPool::Blob, state.into()); + assert!(state.is_blob()); + assert!(!state.is_pending()); + } + + #[test] + fn test_blob_transaction_with_base_fee_bits() { + let mut state = TxState::BASE_FEE_POOL_BITS; + state.insert(TxState::BLOB_TRANSACTION); + assert_eq!(SubPool::Blob, state.into()); + assert!(state.is_blob()); + assert!(!state.is_pending()); + } } From 7b13a22698da1d9e6aab5496b09570883c813b00 Mon Sep 17 00:00:00 2001 From: Hai | RISE <150876604+hai-rise@users.noreply.github.com> Date: Wed, 20 Nov 2024 16:56:12 +0700 Subject: [PATCH 055/156] perf(tx-pool): avoid copying tx cost (#12629) --- crates/transaction-pool/src/pool/txpool.rs | 6 +- .../transaction-pool/src/test_utils/mock.rs | 79 ++++++++++++------- crates/transaction-pool/src/traits.rs | 6 +- crates/transaction-pool/src/validate/eth.rs | 5 +- crates/transaction-pool/src/validate/mod.rs | 2 +- examples/network-txpool/src/main.rs | 2 +- 6 files changed, 62 insertions(+), 38 deletions(-) diff --git a/crates/transaction-pool/src/pool/txpool.rs b/crates/transaction-pool/src/pool/txpool.rs index 576672b91af5..86bf5f741c3d 100644 --- a/crates/transaction-pool/src/pool/txpool.rs +++ b/crates/transaction-pool/src/pool/txpool.rs @@ -657,7 +657,7 @@ impl TxPool { InsertErr::Overdraft { transaction } => Err(PoolError::new( *transaction.hash(), PoolErrorKind::InvalidTransaction(InvalidPoolTransactionError::Overdraft { - cost: transaction.cost(), + cost: *transaction.cost(), balance: on_chain_balance, }), )), @@ -1229,7 +1229,7 @@ impl AllTransactions { tx.state.insert(TxState::NO_NONCE_GAPS); tx.state.insert(TxState::NO_PARKED_ANCESTORS); tx.cumulative_cost = U256::ZERO; - if tx.transaction.cost() > info.balance { + if tx.transaction.cost() > &info.balance { // sender lacks sufficient funds to pay for this transaction tx.state.remove(TxState::ENOUGH_BALANCE); } else { @@ -1542,7 +1542,7 @@ impl AllTransactions { } } } - } else if new_blob_tx.cost() > on_chain_balance { + } else if new_blob_tx.cost() > &on_chain_balance { // the transaction would go into overdraft return Err(InsertErr::Overdraft { transaction: Arc::new(new_blob_tx) }) } diff --git a/crates/transaction-pool/src/test_utils/mock.rs b/crates/transaction-pool/src/test_utils/mock.rs index 56acbb107f3b..72304910e15d 100644 --- a/crates/transaction-pool/src/test_utils/mock.rs +++ b/crates/transaction-pool/src/test_utils/mock.rs @@ -59,6 +59,8 @@ macro_rules! set_value { *$field = new_value; } } + // Ensure the tx cost is always correct after each mutation. + $this.update_cost(); }; } @@ -123,6 +125,8 @@ pub enum MockTransaction { input: Bytes, /// The size of the transaction, returned in the implementation of [`PoolTransaction`]. size: usize, + /// The cost of the transaction, returned in the implementation of [`PoolTransaction`]. + cost: U256, }, /// EIP-2930 transaction type. Eip2930 { @@ -148,6 +152,8 @@ pub enum MockTransaction { access_list: AccessList, /// The size of the transaction, returned in the implementation of [`PoolTransaction`]. size: usize, + /// The cost of the transaction, returned in the implementation of [`PoolTransaction`]. + cost: U256, }, /// EIP-1559 transaction type. Eip1559 { @@ -175,6 +181,8 @@ pub enum MockTransaction { input: Bytes, /// The size of the transaction, returned in the implementation of [`PoolTransaction`]. size: usize, + /// The cost of the transaction, returned in the implementation of [`PoolTransaction`]. + cost: U256, }, /// EIP-4844 transaction type. Eip4844 { @@ -206,6 +214,8 @@ pub enum MockTransaction { sidecar: BlobTransactionSidecar, /// The size of the transaction, returned in the implementation of [`PoolTransaction`]. size: usize, + /// The cost of the transaction, returned in the implementation of [`PoolTransaction`]. + cost: U256, }, } @@ -235,6 +245,7 @@ impl MockTransaction { value: Default::default(), input: Default::default(), size: Default::default(), + cost: U256::ZERO, } } @@ -252,6 +263,7 @@ impl MockTransaction { gas_price: 0, access_list: Default::default(), size: Default::default(), + cost: U256::ZERO, } } @@ -270,6 +282,7 @@ impl MockTransaction { input: Bytes::new(), access_list: Default::default(), size: Default::default(), + cost: U256::ZERO, } } @@ -290,6 +303,7 @@ impl MockTransaction { access_list: Default::default(), sidecar: Default::default(), size: Default::default(), + cost: U256::ZERO, } } @@ -560,6 +574,19 @@ impl MockTransaction { pub const fn is_eip2930(&self) -> bool { matches!(self, Self::Eip2930 { .. }) } + + fn update_cost(&mut self) { + match self { + Self::Legacy { cost, gas_limit, gas_price, value, .. } | + Self::Eip2930 { cost, gas_limit, gas_price, value, .. } => { + *cost = U256::from(*gas_limit) * U256::from(*gas_price) + *value + } + Self::Eip1559 { cost, gas_limit, max_fee_per_gas, value, .. } | + Self::Eip4844 { cost, gas_limit, max_fee_per_gas, value, .. } => { + *cost = U256::from(*gas_limit) * U256::from(*max_fee_per_gas) + *value + } + }; + } } impl PoolTransaction for MockTransaction { @@ -593,16 +620,16 @@ impl PoolTransaction for MockTransaction { *self.get_nonce() } - fn cost(&self) -> U256 { + // Having `get_cost` from `make_setters_getters` would be cleaner but we didn't + // want to also generate the error-prone cost setters. For now cost should be + // correct at construction and auto-updated per field update via `update_cost`, + // not to be manually set. + fn cost(&self) -> &U256 { match self { - Self::Legacy { gas_price, value, gas_limit, .. } | - Self::Eip2930 { gas_limit, gas_price, value, .. } => { - U256::from(*gas_limit) * U256::from(*gas_price) + *value - } - Self::Eip1559 { max_fee_per_gas, value, gas_limit, .. } | - Self::Eip4844 { max_fee_per_gas, value, gas_limit, .. } => { - U256::from(*gas_limit) * U256::from(*max_fee_per_gas) + *value - } + Self::Legacy { cost, .. } | + Self::Eip2930 { cost, .. } | + Self::Eip1559 { cost, .. } | + Self::Eip4844 { cost, .. } => cost, } } @@ -783,6 +810,7 @@ impl TryFrom for MockTransaction { value, input, size, + cost: U256::from(gas_limit) * U256::from(gas_price) + value, }), Transaction::Eip2930(TxEip2930 { chain_id, @@ -805,6 +833,7 @@ impl TryFrom for MockTransaction { input, access_list, size, + cost: U256::from(gas_limit) * U256::from(gas_price) + value, }), Transaction::Eip1559(TxEip1559 { chain_id, @@ -829,6 +858,7 @@ impl TryFrom for MockTransaction { input, access_list, size, + cost: U256::from(gas_limit) * U256::from(max_fee_per_gas) + value, }), Transaction::Eip4844(TxEip4844 { chain_id, @@ -857,6 +887,7 @@ impl TryFrom for MockTransaction { access_list, sidecar: BlobTransactionSidecar::default(), size, + cost: U256::from(gas_limit) * U256::from(max_fee_per_gas) + value, }), _ => unreachable!("Invalid transaction type"), } @@ -888,28 +919,24 @@ impl From for Transaction { match mock { MockTransaction::Legacy { chain_id, - hash: _, - sender: _, nonce, gas_price, gas_limit, to, value, input, - size: _, + .. } => Self::Legacy(TxLegacy { chain_id, nonce, gas_price, gas_limit, to, value, input }), MockTransaction::Eip2930 { chain_id, - hash: _, - sender: _, nonce, - to, + gas_price, gas_limit, - input, + to, value, - gas_price, access_list, - size: _, + input, + .. } => Self::Eip2930(TxEip2930 { chain_id, nonce, @@ -922,17 +949,15 @@ impl From for Transaction { }), MockTransaction::Eip1559 { chain_id, - hash: _, - sender: _, nonce, + gas_limit, max_fee_per_gas, max_priority_fee_per_gas, - gas_limit, to, value, access_list, input, - size: _, + .. } => Self::Eip1559(TxEip1559 { chain_id, nonce, @@ -946,19 +971,17 @@ impl From for Transaction { }), MockTransaction::Eip4844 { chain_id, - hash: _, - sender: _, nonce, + gas_limit, max_fee_per_gas, max_priority_fee_per_gas, - max_fee_per_blob_gas, - gas_limit, to, value, access_list, - input, sidecar, - size: _, + max_fee_per_blob_gas, + input, + .. } => Self::Eip4844(TxEip4844 { chain_id, nonce, diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index a7e9010d693b..23f28cc3fa73 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -961,7 +961,7 @@ pub trait PoolTransaction: fmt::Debug + Send + Sync + Clone { /// For legacy transactions: `gas_price * gas_limit + tx_value`. /// For EIP-4844 blob transactions: `max_fee_per_gas * gas_limit + tx_value + /// max_blob_fee_per_gas * blob_gas_used`. - fn cost(&self) -> U256; + fn cost(&self) -> &U256; /// Amount of gas that should be used in executing this transaction. This is paid up-front. fn gas_limit(&self) -> u64; @@ -1228,8 +1228,8 @@ impl PoolTransaction for EthPooledTransaction { /// For legacy transactions: `gas_price * gas_limit + tx_value`. /// For EIP-4844 blob transactions: `max_fee_per_gas * gas_limit + tx_value + /// max_blob_fee_per_gas * blob_gas_used`. - fn cost(&self) -> U256 { - self.cost + fn cost(&self) -> &U256 { + &self.cost } /// Amount of gas that should be used in executing this transaction. This is paid up-front. diff --git a/crates/transaction-pool/src/validate/eth.rs b/crates/transaction-pool/src/validate/eth.rs index d5f7101eb550..70298487694f 100644 --- a/crates/transaction-pool/src/validate/eth.rs +++ b/crates/transaction-pool/src/validate/eth.rs @@ -384,11 +384,12 @@ where let cost = transaction.cost(); // Checks for max cost - if cost > account.balance { + if cost > &account.balance { + let expected = *cost; return TransactionValidationOutcome::Invalid( transaction, InvalidTransactionError::InsufficientFunds( - GotExpected { got: account.balance, expected: cost }.into(), + GotExpected { got: account.balance, expected }.into(), ) .into(), ) diff --git a/crates/transaction-pool/src/validate/mod.rs b/crates/transaction-pool/src/validate/mod.rs index 8a5ecc9c4192..35e3a85537ee 100644 --- a/crates/transaction-pool/src/validate/mod.rs +++ b/crates/transaction-pool/src/validate/mod.rs @@ -312,7 +312,7 @@ impl ValidPoolTransaction { /// /// For EIP-1559 transactions: `max_fee_per_gas * gas_limit + tx_value`. /// For legacy transactions: `gas_price * gas_limit + tx_value`. - pub fn cost(&self) -> U256 { + pub fn cost(&self) -> &U256 { self.transaction.cost() } diff --git a/examples/network-txpool/src/main.rs b/examples/network-txpool/src/main.rs index 6f8d69eab021..e66185ad828c 100644 --- a/examples/network-txpool/src/main.rs +++ b/examples/network-txpool/src/main.rs @@ -82,7 +82,7 @@ impl TransactionValidator for OkValidator { ) -> TransactionValidationOutcome { // Always return valid TransactionValidationOutcome::Valid { - balance: transaction.cost(), + balance: *transaction.cost(), state_nonce: transaction.nonce(), transaction: ValidTransaction::Valid(transaction), propagate: false, From b178f3a160f826b262e78081563e5dee73b3718e Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 20 Nov 2024 11:34:16 +0100 Subject: [PATCH 056/156] chore: add receipts to networkprimitives (#12699) --- crates/net/eth-wire-types/src/primitives.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/crates/net/eth-wire-types/src/primitives.rs b/crates/net/eth-wire-types/src/primitives.rs index eab36c3b6a7c..c8b62cb0a82d 100644 --- a/crates/net/eth-wire-types/src/primitives.rs +++ b/crates/net/eth-wire-types/src/primitives.rs @@ -70,6 +70,18 @@ pub trait NetworkPrimitives: + PartialEq + Eq + 'static; + + /// The transaction type which peers return in `GetReceipts` messages. + type Receipt: Encodable + + Decodable + + Send + + Sync + + Unpin + + Clone + + Debug + + PartialEq + + Eq + + 'static; } /// Primitive types used by Ethereum network. @@ -83,4 +95,5 @@ impl NetworkPrimitives for EthNetworkPrimitives { type Block = reth_primitives::Block; type BroadcastedTransaction = reth_primitives::TransactionSigned; type PooledTransaction = reth_primitives::PooledTransactionsElement; + type Receipt = reth_primitives::Receipt; } From 9c7536484c852cd5afe97a51d803e492a95b377c Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 20 Nov 2024 11:35:10 +0100 Subject: [PATCH 057/156] chore: bump op-alloy (#12696) --- Cargo.lock | 36 ++++++++++++++------------ Cargo.toml | 8 +++--- crates/optimism/payload/src/payload.rs | 2 +- 3 files changed, 24 insertions(+), 22 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 19e90852175d..0326a37f4994 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5286,9 +5286,9 @@ checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" [[package]] name = "op-alloy-consensus" -version = "0.6.5" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bff54d1d790eca1f3aedbd666162e9c42eceff90b9f9d24b352ed9c2df1e901a" +checksum = "72da577a88d35b893fae6467112651f26ef023434c196b2a0b3dc75bc853e0e4" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5299,14 +5299,14 @@ dependencies = [ "derive_more 1.0.0", "serde", "serde_with", - "spin", + "thiserror 2.0.3", ] [[package]] name = "op-alloy-genesis" -version = "0.6.5" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae84fd64fbc53b3e958ea5a96d7f5633e4a111092e41c51672c2d91835c09efb" +checksum = "818180672dd14ca6642fb57942e1cbd602669f42b6e0222b7ea9bbcae065d67e" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5314,13 +5314,14 @@ dependencies = [ "alloy-sol-types", "serde", "serde_repr", + "thiserror 2.0.3", ] [[package]] name = "op-alloy-network" -version = "0.6.5" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71e777450ee3e9c5177e00865e9b4496472b623c50f146fc907b667c6b4ab37" +checksum = "12f82e805bad171ceae2af45efaecf8d0b50622cff3473e3c998ff1dd340de35" dependencies = [ "alloy-consensus", "alloy-network", @@ -5333,29 +5334,32 @@ dependencies = [ [[package]] name = "op-alloy-protocol" -version = "0.6.5" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e854d2d4958d0a213731560172e8455536329ee9574473ff79fa953da91eb6a" +checksum = "1803a1ac96203b8f713b1fa9b7509c46c645ca7bc22b582761a7495e999d4301" dependencies = [ + "alloc-no-stdlib", "alloy-consensus", "alloy-eips", "alloy-primitives", "alloy-rlp", "alloy-serde", "async-trait", - "derive_more 1.0.0", + "brotli", + "miniz_oxide", "op-alloy-consensus", "op-alloy-genesis", "serde", + "thiserror 2.0.3", "tracing", "unsigned-varint", ] [[package]] name = "op-alloy-rpc-types" -version = "0.6.5" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "981b7f8ab11fe85ba3c1723702f000429b8d0c16b5883c93d577895f262cbac6" +checksum = "a838c125256e02e2f9da88c51e263b02a06cda7e60382fe2551a3385b516f5bb" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5372,9 +5376,9 @@ dependencies = [ [[package]] name = "op-alloy-rpc-types-engine" -version = "0.6.5" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a227b16c9c5df68b112c8db9d268ebf46b3e26c744b4d59d4949575cd603a292" +checksum = "c227fcc7d81d4023363ba12406e57ebcc1c7cbb1075c38ea471ae32138d4706d" dependencies = [ "alloy-eips", "alloy-primitives", @@ -5386,6 +5390,7 @@ dependencies = [ "op-alloy-protocol", "serde", "snap", + "thiserror 2.0.3", ] [[package]] @@ -10391,9 +10396,6 @@ name = "spin" version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" -dependencies = [ - "lock_api", -] [[package]] name = "spki" diff --git a/Cargo.toml b/Cargo.toml index 002b85f125a6..f2565a1c92ff 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -471,10 +471,10 @@ alloy-transport-ipc = { version = "0.6.4", default-features = false } alloy-transport-ws = { version = "0.6.4", default-features = false } # op -op-alloy-rpc-types = "0.6.5" -op-alloy-rpc-types-engine = "0.6.5" -op-alloy-network = "0.6.5" -op-alloy-consensus = "0.6.5" +op-alloy-rpc-types = "0.6.7" +op-alloy-rpc-types-engine = "0.6.7" +op-alloy-network = "0.6.7" +op-alloy-consensus = "0.6.7" # misc aquamarine = "0.6" diff --git a/crates/optimism/payload/src/payload.rs b/crates/optimism/payload/src/payload.rs index 36f11ee628b3..1a951abadcae 100644 --- a/crates/optimism/payload/src/payload.rs +++ b/crates/optimism/payload/src/payload.rs @@ -7,7 +7,7 @@ use alloy_eips::{ use alloy_primitives::{keccak256, Address, Bytes, B256, B64, U256}; use alloy_rlp::Encodable; use alloy_rpc_types_engine::{ExecutionPayloadEnvelopeV2, ExecutionPayloadV1, PayloadId}; -use op_alloy_consensus::eip1559::{decode_holocene_extra_data, EIP1559ParamError}; +use op_alloy_consensus::{decode_holocene_extra_data, EIP1559ParamError}; /// Re-export for use in downstream arguments. pub use op_alloy_rpc_types_engine::OpPayloadAttributes; use op_alloy_rpc_types_engine::{OpExecutionPayloadEnvelopeV3, OpExecutionPayloadEnvelopeV4}; From ce4a32017a113051e6a872cbf593d5fd5d710856 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 20 Nov 2024 11:48:09 +0100 Subject: [PATCH 058/156] chore: rm unused codec derive (#12700) --- crates/cli/commands/src/test_vectors/compact.rs | 4 +--- crates/optimism/storage/src/lib.rs | 4 +--- crates/primitives/src/receipt.rs | 3 --- crates/storage/db-api/src/models/mod.rs | 4 +--- 4 files changed, 3 insertions(+), 12 deletions(-) diff --git a/crates/cli/commands/src/test_vectors/compact.rs b/crates/cli/commands/src/test_vectors/compact.rs index c498718e9fcb..5490f568d3a8 100644 --- a/crates/cli/commands/src/test_vectors/compact.rs +++ b/crates/cli/commands/src/test_vectors/compact.rs @@ -22,8 +22,7 @@ use reth_db::{ }; use reth_fs_util as fs; use reth_primitives::{ - Account, Log, LogData, Receipt, ReceiptWithBloom, StorageEntry, Transaction, - TransactionSignedNoHash, TxType, + Account, Log, LogData, Receipt, StorageEntry, Transaction, TransactionSignedNoHash, TxType, }; use reth_prune_types::{PruneCheckpoint, PruneMode}; use reth_stages_types::{ @@ -76,7 +75,6 @@ compact_types!( // reth-primitives Account, Receipt, - ReceiptWithBloom, // reth_codecs::alloy Authorization, GenesisAccount, diff --git a/crates/optimism/storage/src/lib.rs b/crates/optimism/storage/src/lib.rs index c3b8a71feea1..391f26093ba6 100644 --- a/crates/optimism/storage/src/lib.rs +++ b/crates/optimism/storage/src/lib.rs @@ -16,7 +16,7 @@ mod tests { CompactClientVersion, CompactU256, CompactU64, StoredBlockBodyIndices, StoredBlockOmmers, StoredBlockWithdrawals, }; - use reth_primitives::{Account, Receipt, ReceiptWithBloom}; + use reth_primitives::{Account, Receipt}; use reth_prune_types::{PruneCheckpoint, PruneMode, PruneSegment}; use reth_stages_types::{ AccountHashingCheckpoint, CheckpointBlockRange, EntitiesCheckpoint, ExecutionCheckpoint, @@ -40,7 +40,6 @@ mod tests { assert_eq!(PruneMode::bitflag_encoded_bytes(), 1); assert_eq!(PruneSegment::bitflag_encoded_bytes(), 1); assert_eq!(Receipt::bitflag_encoded_bytes(), 2); - assert_eq!(ReceiptWithBloom::bitflag_encoded_bytes(), 0); assert_eq!(StageCheckpoint::bitflag_encoded_bytes(), 1); assert_eq!(StageUnitCheckpoint::bitflag_encoded_bytes(), 1); assert_eq!(StoredBlockBodyIndices::bitflag_encoded_bytes(), 1); @@ -65,7 +64,6 @@ mod tests { validate_bitflag_backwards_compat!(PruneMode, UnusedBits::Zero); validate_bitflag_backwards_compat!(PruneSegment, UnusedBits::Zero); validate_bitflag_backwards_compat!(Receipt, UnusedBits::NotZero); - validate_bitflag_backwards_compat!(ReceiptWithBloom, UnusedBits::Zero); validate_bitflag_backwards_compat!(StageCheckpoint, UnusedBits::NotZero); validate_bitflag_backwards_compat!(StageUnitCheckpoint, UnusedBits::Zero); validate_bitflag_backwards_compat!(StoredBlockBodyIndices, UnusedBits::Zero); diff --git a/crates/primitives/src/receipt.rs b/crates/primitives/src/receipt.rs index f4567de421e5..93c0af1d9714 100644 --- a/crates/primitives/src/receipt.rs +++ b/crates/primitives/src/receipt.rs @@ -72,7 +72,6 @@ impl Receipt { } } -// todo: replace with alloy receipt impl TxReceipt for Receipt { fn status_or_post_state(&self) -> Eip658Value { self.success.into() @@ -191,8 +190,6 @@ impl From for ReceiptWithBloom { /// [`Receipt`] with calculated bloom filter. #[derive(Clone, Debug, PartialEq, Eq, Default, Serialize, Deserialize)] #[cfg_attr(any(test, feature = "arbitrary"), derive(arbitrary::Arbitrary))] -#[cfg_attr(any(test, feature = "reth-codec"), derive(reth_codecs::Compact))] -#[cfg_attr(any(test, feature = "reth-codec"), reth_codecs::add_arbitrary_tests(compact))] pub struct ReceiptWithBloom { /// Bloom filter build from logs. pub bloom: Bloom, diff --git a/crates/storage/db-api/src/models/mod.rs b/crates/storage/db-api/src/models/mod.rs index 00787194c718..7b1cd5a1ddb3 100644 --- a/crates/storage/db-api/src/models/mod.rs +++ b/crates/storage/db-api/src/models/mod.rs @@ -312,7 +312,7 @@ mod tests { fn test_ensure_backwards_compatibility() { use super::*; use reth_codecs::{test_utils::UnusedBits, validate_bitflag_backwards_compat}; - use reth_primitives::{Account, Receipt, ReceiptWithBloom}; + use reth_primitives::{Account, Receipt}; use reth_prune_types::{PruneCheckpoint, PruneMode, PruneSegment}; use reth_stages_types::{ AccountHashingCheckpoint, CheckpointBlockRange, EntitiesCheckpoint, @@ -333,7 +333,6 @@ mod tests { assert_eq!(PruneMode::bitflag_encoded_bytes(), 1); assert_eq!(PruneSegment::bitflag_encoded_bytes(), 1); assert_eq!(Receipt::bitflag_encoded_bytes(), 1); - assert_eq!(ReceiptWithBloom::bitflag_encoded_bytes(), 0); assert_eq!(StageCheckpoint::bitflag_encoded_bytes(), 1); assert_eq!(StageUnitCheckpoint::bitflag_encoded_bytes(), 1); assert_eq!(StoredBlockBodyIndices::bitflag_encoded_bytes(), 1); @@ -355,7 +354,6 @@ mod tests { validate_bitflag_backwards_compat!(PruneMode, UnusedBits::Zero); validate_bitflag_backwards_compat!(PruneSegment, UnusedBits::Zero); validate_bitflag_backwards_compat!(Receipt, UnusedBits::Zero); - validate_bitflag_backwards_compat!(ReceiptWithBloom, UnusedBits::Zero); validate_bitflag_backwards_compat!(StageCheckpoint, UnusedBits::NotZero); validate_bitflag_backwards_compat!(StageUnitCheckpoint, UnusedBits::Zero); validate_bitflag_backwards_compat!(StoredBlockBodyIndices, UnusedBits::Zero); From 6977cf045349519d4acc7a0200cf0bf75968ce18 Mon Sep 17 00:00:00 2001 From: ftupas <35031356+ftupas@users.noreply.github.com> Date: Wed, 20 Nov 2024 11:56:44 +0100 Subject: [PATCH 059/156] feat: add `TaskSpawner` to spawn validation requests as blocking (#12543) Co-authored-by: Matthias Seitz --- crates/rpc/rpc-builder/src/lib.rs | 2 + crates/rpc/rpc/src/validation.rs | 144 ++++++++++++++++++------------ 2 files changed, 91 insertions(+), 55 deletions(-) diff --git a/crates/rpc/rpc-builder/src/lib.rs b/crates/rpc/rpc-builder/src/lib.rs index 0d86c838d51c..207bc9ec5be5 100644 --- a/crates/rpc/rpc-builder/src/lib.rs +++ b/crates/rpc/rpc-builder/src/lib.rs @@ -1252,6 +1252,7 @@ where Arc::new(self.consensus.clone()), self.block_executor.clone(), self.config.flashbots.clone(), + Box::new(self.executor.clone()), ) } } @@ -1416,6 +1417,7 @@ where Arc::new(self.consensus.clone()), self.block_executor.clone(), self.config.flashbots.clone(), + Box::new(self.executor.clone()), ) .into_rpc() .into(), diff --git a/crates/rpc/rpc/src/validation.rs b/crates/rpc/rpc/src/validation.rs index b997dec1e015..a5e29bb739f9 100644 --- a/crates/rpc/rpc/src/validation.rs +++ b/crates/rpc/rpc/src/validation.rs @@ -5,13 +5,13 @@ use alloy_rpc_types_beacon::relay::{ BuilderBlockValidationRequestV3, BuilderBlockValidationRequestV4, }; use alloy_rpc_types_engine::{ - BlobsBundleV1, CancunPayloadFields, ExecutionPayload, ExecutionPayloadSidecar, + BlobsBundleV1, CancunPayloadFields, ExecutionPayload, ExecutionPayloadSidecar, PayloadError, }; use async_trait::async_trait; use jsonrpsee::core::RpcResult; use reth_chainspec::{ChainSpecProvider, EthereumHardforks}; use reth_consensus::{Consensus, PostExecutionInput}; -use reth_errors::{BlockExecutionError, ConsensusError, ProviderError, RethError}; +use reth_errors::{BlockExecutionError, ConsensusError, ProviderError}; use reth_ethereum_consensus::GAS_LIMIT_BOUND_DIVISOR; use reth_evm::execute::{BlockExecutorProvider, Executor}; use reth_payload_validator::ExecutionPayloadValidator; @@ -22,16 +22,16 @@ use reth_provider::{ }; use reth_revm::{cached::CachedReads, database::StateProviderDatabase}; use reth_rpc_api::BlockSubmissionValidationApiServer; -use reth_rpc_eth_types::EthApiError; -use reth_rpc_server_types::{result::internal_rpc_err, ToRpcResult}; +use reth_rpc_server_types::result::internal_rpc_err; +use reth_tasks::TaskSpawner; use reth_trie::HashedPostState; use revm_primitives::{Address, B256, U256}; use serde::{Deserialize, Serialize}; use std::{collections::HashSet, sync::Arc}; -use tokio::sync::RwLock; +use tokio::sync::{oneshot, RwLock}; /// The type that implements the `validation` rpc namespace trait -#[derive(Debug, derive_more::Deref)] +#[derive(Clone, Debug, derive_more::Deref)] pub struct ValidationApi { #[deref] inner: Arc>, @@ -47,6 +47,7 @@ where consensus: Arc, executor_provider: E, config: ValidationApiConfig, + task_spawner: Box, ) -> Self { let ValidationApiConfig { disallow } = config; @@ -58,6 +59,7 @@ where executor_provider, disallow, cached_state: Default::default(), + task_spawner, }); Self { inner } @@ -338,55 +340,23 @@ where Ok(versioned_hashes) } -} - -#[async_trait] -impl BlockSubmissionValidationApiServer for ValidationApi -where - Provider: BlockReaderIdExt - + ChainSpecProvider - + StateProviderFactory - + HeaderProvider - + AccountReader - + WithdrawalsProvider - + Clone - + 'static, - E: BlockExecutorProvider, -{ - async fn validate_builder_submission_v1( - &self, - _request: BuilderBlockValidationRequest, - ) -> RpcResult<()> { - Err(internal_rpc_err("unimplemented")) - } - async fn validate_builder_submission_v2( - &self, - _request: BuilderBlockValidationRequestV2, - ) -> RpcResult<()> { - Err(internal_rpc_err("unimplemented")) - } - - /// Validates a block submitted to the relay + /// Core logic for validating the builder submission v3 async fn validate_builder_submission_v3( &self, request: BuilderBlockValidationRequestV3, - ) -> RpcResult<()> { + ) -> Result<(), ValidationApiError> { let block = self .payload_validator .ensure_well_formed_payload( ExecutionPayload::V3(request.request.execution_payload), ExecutionPayloadSidecar::v3(CancunPayloadFields { parent_beacon_block_root: request.parent_beacon_block_root, - versioned_hashes: self - .validate_blobs_bundle(request.request.blobs_bundle) - .map_err(|e| RethError::Other(e.into())) - .to_rpc_result()?, + versioned_hashes: self.validate_blobs_bundle(request.request.blobs_bundle)?, }), - ) - .to_rpc_result()? + )? .try_seal_with_senders() - .map_err(|_| EthApiError::InvalidTransactionSignature)?; + .map_err(|_| ValidationApiError::InvalidTransactionSignature)?; self.validate_message_against_block( block, @@ -394,15 +364,13 @@ where request.registered_gas_limit, ) .await - .map_err(|e| RethError::Other(e.into())) - .to_rpc_result() } - /// Validates a block submitted to the relay + /// Core logic for validating the builder submission v4 async fn validate_builder_submission_v4( &self, request: BuilderBlockValidationRequestV4, - ) -> RpcResult<()> { + ) -> Result<(), ValidationApiError> { let block = self .payload_validator .ensure_well_formed_payload( @@ -411,16 +379,13 @@ where CancunPayloadFields { parent_beacon_block_root: request.parent_beacon_block_root, versioned_hashes: self - .validate_blobs_bundle(request.request.blobs_bundle) - .map_err(|e| RethError::Other(e.into())) - .to_rpc_result()?, + .validate_blobs_bundle(request.request.blobs_bundle)?, }, request.request.execution_requests.into(), ), - ) - .to_rpc_result()? + )? .try_seal_with_senders() - .map_err(|_| EthApiError::InvalidTransactionSignature)?; + .map_err(|_| ValidationApiError::InvalidTransactionSignature)?; self.validate_message_against_block( block, @@ -428,8 +393,70 @@ where request.registered_gas_limit, ) .await - .map_err(|e| RethError::Other(e.into())) - .to_rpc_result() + } +} + +#[async_trait] +impl BlockSubmissionValidationApiServer for ValidationApi +where + Provider: BlockReaderIdExt + + ChainSpecProvider + + StateProviderFactory + + HeaderProvider + + AccountReader + + WithdrawalsProvider + + Clone + + 'static, + E: BlockExecutorProvider, +{ + async fn validate_builder_submission_v1( + &self, + _request: BuilderBlockValidationRequest, + ) -> RpcResult<()> { + Err(internal_rpc_err("unimplemented")) + } + + async fn validate_builder_submission_v2( + &self, + _request: BuilderBlockValidationRequestV2, + ) -> RpcResult<()> { + Err(internal_rpc_err("unimplemented")) + } + + /// Validates a block submitted to the relay + async fn validate_builder_submission_v3( + &self, + request: BuilderBlockValidationRequestV3, + ) -> RpcResult<()> { + let this = self.clone(); + let (tx, rx) = oneshot::channel(); + + self.task_spawner.spawn_blocking(Box::pin(async move { + let result = Self::validate_builder_submission_v3(&this, request) + .await + .map_err(|err| internal_rpc_err(err.to_string())); + let _ = tx.send(result); + })); + + rx.await.map_err(|_| internal_rpc_err("Internal blocking task error"))? + } + + /// Validates a block submitted to the relay + async fn validate_builder_submission_v4( + &self, + request: BuilderBlockValidationRequestV4, + ) -> RpcResult<()> { + let this = self.clone(); + let (tx, rx) = oneshot::channel(); + + self.task_spawner.spawn_blocking(Box::pin(async move { + let result = Self::validate_builder_submission_v4(&this, request) + .await + .map_err(|err| internal_rpc_err(err.to_string())); + let _ = tx.send(result); + })); + + rx.await.map_err(|_| internal_rpc_err("Internal blocking task error"))? } } @@ -450,6 +477,8 @@ pub struct ValidationApiInner { /// latest head block state. Uses async `RwLock` to safely handle concurrent validation /// requests. cached_state: RwLock<(B256, CachedReads)>, + /// Task spawner for blocking operations + task_spawner: Box, } /// Configuration for validation API. @@ -476,6 +505,9 @@ pub enum ValidationApiError { ProposerPayment, #[error("invalid blobs bundle")] InvalidBlobsBundle, + /// When the transaction signature is invalid + #[error("invalid transaction signature")] + InvalidTransactionSignature, #[error("block accesses blacklisted address: {_0}")] Blacklist(Address), #[error(transparent)] @@ -486,4 +518,6 @@ pub enum ValidationApiError { Provider(#[from] ProviderError), #[error(transparent)] Execution(#[from] BlockExecutionError), + #[error(transparent)] + Payload(#[from] PayloadError), } From 868f3acdbcd4e7df4c351eff38fd644c1f285ba3 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 20 Nov 2024 15:07:24 +0400 Subject: [PATCH 060/156] feat: integrate `HeaderValidator` + make `FileClient` generic over block (#12681) --- bin/reth/src/commands/debug_cmd/execution.rs | 2 +- .../consensus/beacon/src/engine/test_utils.rs | 2 +- crates/consensus/consensus/src/lib.rs | 23 +++++- crates/net/downloaders/src/bodies/bodies.rs | 3 +- crates/net/downloaders/src/bodies/task.rs | 7 +- crates/net/downloaders/src/file_client.rs | 76 ++++++++++--------- crates/net/downloaders/src/file_codec.rs | 20 +++-- .../src/headers/reverse_headers.rs | 6 +- crates/net/downloaders/src/headers/task.rs | 7 +- crates/net/downloaders/src/test_utils/mod.rs | 2 +- crates/net/p2p/src/bodies/downloader.rs | 4 +- crates/net/p2p/src/headers/client.rs | 3 +- crates/net/p2p/src/headers/downloader.rs | 4 +- crates/node/builder/src/setup.rs | 4 +- crates/primitives-traits/src/block/body.rs | 5 +- crates/primitives-traits/src/block/mod.rs | 14 +++- crates/stages/stages/src/lib.rs | 2 +- crates/stages/stages/src/sets.rs | 40 +++++++--- crates/stages/stages/src/stages/headers.rs | 8 +- 19 files changed, 143 insertions(+), 89 deletions(-) diff --git a/bin/reth/src/commands/debug_cmd/execution.rs b/bin/reth/src/commands/debug_cmd/execution.rs index a6203ea2a73e..0210142be713 100644 --- a/bin/reth/src/commands/debug_cmd/execution.rs +++ b/bin/reth/src/commands/debug_cmd/execution.rs @@ -73,7 +73,7 @@ impl> Command { { // building network downloaders using the fetch client let header_downloader = ReverseHeadersDownloaderBuilder::new(config.stages.headers) - .build(client.clone(), Arc::clone(&consensus)) + .build(client.clone(), consensus.clone().as_header_validator()) .into_task_with(task_executor); let body_downloader = BodiesDownloaderBuilder::new(config.stages.bodies) diff --git a/crates/consensus/beacon/src/engine/test_utils.rs b/crates/consensus/beacon/src/engine/test_utils.rs index 64daba2b453d..0ebef1efe6e6 100644 --- a/crates/consensus/beacon/src/engine/test_utils.rs +++ b/crates/consensus/beacon/src/engine/test_utils.rs @@ -370,7 +370,7 @@ where .with_tip_sender(tip_tx), TestPipelineConfig::Real => { let header_downloader = ReverseHeadersDownloaderBuilder::default() - .build(client.clone(), consensus.clone()) + .build(client.clone(), consensus.clone().as_header_validator()) .into_task(); let body_downloader = BodiesDownloaderBuilder::default() diff --git a/crates/consensus/consensus/src/lib.rs b/crates/consensus/consensus/src/lib.rs index e059305911f6..da90439af7ff 100644 --- a/crates/consensus/consensus/src/lib.rs +++ b/crates/consensus/consensus/src/lib.rs @@ -11,7 +11,7 @@ extern crate alloc; -use alloc::{fmt::Debug, vec::Vec}; +use alloc::{fmt::Debug, sync::Arc, vec::Vec}; use alloy_consensus::Header; use alloy_eips::eip7685::Requests; use alloy_primitives::{BlockHash, BlockNumber, Bloom, B256, U256}; @@ -46,7 +46,9 @@ impl<'a> PostExecutionInput<'a> { /// Consensus is a protocol that chooses canonical chain. #[auto_impl::auto_impl(&, Arc)] -pub trait Consensus: HeaderValidator + Debug + Send + Sync { +pub trait Consensus: + AsHeaderValidator + HeaderValidator + Debug + Send + Sync +{ /// Ensures that body field values match the header. fn validate_body_against_header( &self, @@ -143,6 +145,23 @@ pub trait HeaderValidator: Debug + Send + Sync { ) -> Result<(), ConsensusError>; } +/// Helper trait to cast `Arc` to `Arc` +pub trait AsHeaderValidator: HeaderValidator { + /// Converts the [`Arc`] of self to [`Arc`] of [`HeaderValidator`] + fn as_header_validator<'a>(self: Arc) -> Arc + 'a> + where + Self: 'a; +} + +impl, H> AsHeaderValidator for T { + fn as_header_validator<'a>(self: Arc) -> Arc + 'a> + where + Self: 'a, + { + self + } +} + /// Consensus Errors #[derive(Debug, PartialEq, Eq, Clone, derive_more::Display, derive_more::Error)] pub enum ConsensusError { diff --git a/crates/net/downloaders/src/bodies/bodies.rs b/crates/net/downloaders/src/bodies/bodies.rs index bebc51ad7725..82f45dd23bfe 100644 --- a/crates/net/downloaders/src/bodies/bodies.rs +++ b/crates/net/downloaders/src/bodies/bodies.rs @@ -20,6 +20,7 @@ use reth_tasks::{TaskSpawner, TokioTaskExecutor}; use std::{ cmp::Ordering, collections::BinaryHeap, + fmt::Debug, mem, ops::RangeInclusive, pin::Pin, @@ -298,7 +299,7 @@ where impl BodyDownloader for BodiesDownloader where - B: BodiesClient + 'static, + B: BodiesClient + 'static, Provider: HeaderProvider + Unpin + 'static, { type Body = B::Body; diff --git a/crates/net/downloaders/src/bodies/task.rs b/crates/net/downloaders/src/bodies/task.rs index de1638f3e665..a2b63c8ed186 100644 --- a/crates/net/downloaders/src/bodies/task.rs +++ b/crates/net/downloaders/src/bodies/task.rs @@ -8,6 +8,7 @@ use reth_network_p2p::{ }; use reth_tasks::{TaskSpawner, TokioTaskExecutor}; use std::{ + fmt::Debug, future::Future, ops::RangeInclusive, pin::Pin, @@ -47,10 +48,10 @@ impl TaskDownloader { /// use reth_network_p2p::bodies::client::BodiesClient; /// use reth_primitives_traits::InMemorySize; /// use reth_storage_api::HeaderProvider; - /// use std::sync::Arc; + /// use std::{fmt::Debug, sync::Arc}; /// /// fn t< - /// B: BodiesClient + 'static, + /// B: BodiesClient + 'static, /// Provider: HeaderProvider + Unpin + 'static, /// >( /// client: Arc, @@ -90,7 +91,7 @@ impl TaskDownloader { } } -impl BodyDownloader for TaskDownloader { +impl BodyDownloader for TaskDownloader { type Body = B; fn set_download_range(&mut self, range: RangeInclusive) -> DownloadResult<()> { diff --git a/crates/net/downloaders/src/file_client.rs b/crates/net/downloaders/src/file_client.rs index 486d4a05127a..ff352bc23049 100644 --- a/crates/net/downloaders/src/file_client.rs +++ b/crates/net/downloaders/src/file_client.rs @@ -1,8 +1,8 @@ use std::{collections::HashMap, io, path::Path}; -use alloy_consensus::Header; +use alloy_consensus::BlockHeader; use alloy_eips::BlockHashOrNumber; -use alloy_primitives::{BlockHash, BlockNumber, B256}; +use alloy_primitives::{BlockHash, BlockNumber, Sealable, B256}; use futures::Future; use itertools::Either; use reth_network_p2p::{ @@ -13,7 +13,8 @@ use reth_network_p2p::{ priority::Priority, }; use reth_network_peers::PeerId; -use reth_primitives::{BlockBody, SealedHeader}; +use reth_primitives::SealedHeader; +use reth_primitives_traits::{Block, BlockBody, FullBlock}; use thiserror::Error; use tokio::{fs::File, io::AsyncReadExt}; use tokio_stream::StreamExt; @@ -40,15 +41,15 @@ pub const DEFAULT_BYTE_LEN_CHUNK_CHAIN_FILE: u64 = 1_000_000_000; /// /// This reads the entire file into memory, so it is not suitable for large files. #[derive(Debug)] -pub struct FileClient { +pub struct FileClient { /// The buffered headers retrieved when fetching new bodies. - headers: HashMap, + headers: HashMap, /// A mapping between block hash and number. hash_to_number: HashMap, /// The buffered bodies retrieved when fetching new headers. - bodies: HashMap, + bodies: HashMap, } /// An error that can occur when constructing and using a [`FileClient`]. @@ -73,7 +74,7 @@ impl From<&'static str> for FileClientError { } } -impl FileClient { +impl FileClient { /// Create a new file client from a file path. pub async fn new>(path: P) -> Result { let file = File::open(path).await?; @@ -114,7 +115,7 @@ impl FileClient { /// Clones and returns the highest header of this client has or `None` if empty. Seals header /// before returning. - pub fn tip_header(&self) -> Option { + pub fn tip_header(&self) -> Option> { self.headers.get(&self.max_block()?).map(|h| SealedHeader::seal(h.clone())) } @@ -137,13 +138,13 @@ impl FileClient { } /// Use the provided bodies as the file client's block body buffer. - pub fn with_bodies(mut self, bodies: HashMap) -> Self { + pub fn with_bodies(mut self, bodies: HashMap) -> Self { self.bodies = bodies; self } /// Use the provided headers as the file client's block body buffer. - pub fn with_headers(mut self, headers: HashMap) -> Self { + pub fn with_headers(mut self, headers: HashMap) -> Self { self.headers = headers; for (number, header) in &self.headers { self.hash_to_number.insert(header.hash_slow(), *number); @@ -162,14 +163,14 @@ impl FileClient { } /// Returns an iterator over headers in the client. - pub fn headers_iter(&self) -> impl Iterator { + pub fn headers_iter(&self) -> impl Iterator { self.headers.values() } /// Returns a mutable iterator over bodies in the client. /// /// Panics, if file client headers and bodies are not mapping 1-1. - pub fn bodies_iter_mut(&mut self) -> impl Iterator { + pub fn bodies_iter_mut(&mut self) -> impl Iterator { let bodies = &mut self.bodies; let numbers = &self.hash_to_number; bodies.iter_mut().map(|(hash, body)| (numbers[hash], body)) @@ -177,27 +178,28 @@ impl FileClient { /// Returns the current number of transactions in the client. pub fn total_transactions(&self) -> usize { - self.bodies.iter().fold(0, |acc, (_, body)| acc + body.transactions.len()) + self.bodies.iter().fold(0, |acc, (_, body)| acc + body.transactions().len()) } } -impl FromReader for FileClient { +impl FromReader for FileClient { type Error = FileClientError; /// Initialize the [`FileClient`] from bytes that have been read from file. - fn from_reader( - reader: B, + fn from_reader( + reader: R, num_bytes: u64, ) -> impl Future, Self::Error>> where - B: AsyncReadExt + Unpin, + R: AsyncReadExt + Unpin, { let mut headers = HashMap::default(); let mut hash_to_number = HashMap::default(); let mut bodies = HashMap::default(); // use with_capacity to make sure the internal buffer contains the entire chunk - let mut stream = FramedRead::with_capacity(reader, BlockFileCodec, num_bytes as usize); + let mut stream = + FramedRead::with_capacity(reader, BlockFileCodec::::default(), num_bytes as usize); trace!(target: "downloaders::file", target_num_bytes=num_bytes, @@ -225,13 +227,13 @@ impl FromReader for FileClient { } Err(err) => return Err(err), }; - let block_number = block.header.number; - let block_hash = block.header.hash_slow(); + let block_number = block.header().number(); + let block_hash = block.header().hash_slow(); // add to the internal maps - headers.insert(block.header.number, block.header.clone()); - hash_to_number.insert(block_hash, block.header.number); - bodies.insert(block_hash, block.into()); + headers.insert(block.header().number(), block.header().clone()); + hash_to_number.insert(block_hash, block.header().number()); + bodies.insert(block_hash, block.body().clone()); if log_interval == 0 { trace!(target: "downloaders::file", @@ -260,9 +262,9 @@ impl FromReader for FileClient { } } -impl HeadersClient for FileClient { - type Header = Header; - type Output = HeadersFut; +impl HeadersClient for FileClient { + type Header = B::Header; + type Output = HeadersFut; fn get_headers_with_priority( &self, @@ -311,9 +313,9 @@ impl HeadersClient for FileClient { } } -impl BodiesClient for FileClient { - type Body = BlockBody; - type Output = BodiesFut; +impl BodiesClient for FileClient { + type Body = B::Body; + type Output = BodiesFut; fn get_block_bodies_with_priority( &self, @@ -336,7 +338,7 @@ impl BodiesClient for FileClient { } } -impl DownloadClient for FileClient { +impl DownloadClient for FileClient { fn report_bad_message(&self, _peer_id: PeerId) { warn!("Reported a bad message on a file client, the file may be corrupted or invalid"); // noop @@ -542,7 +544,7 @@ mod tests { // create an empty file let file = tempfile::tempfile().unwrap(); - let client = + let client: Arc = Arc::new(FileClient::from_file(file.into()).await.unwrap().with_bodies(bodies.clone())); let mut downloader = BodiesDownloaderBuilder::default().build( client.clone(), @@ -567,14 +569,14 @@ mod tests { let p0 = child_header(&p1); let file = tempfile::tempfile().unwrap(); - let client = Arc::new(FileClient::from_file(file.into()).await.unwrap().with_headers( - HashMap::from([ + let client: Arc = Arc::new( + FileClient::from_file(file.into()).await.unwrap().with_headers(HashMap::from([ (0u64, p0.clone().unseal()), (1, p1.clone().unseal()), (2, p2.clone().unseal()), (3, p3.clone().unseal()), - ]), - )); + ])), + ); let mut downloader = ReverseHeadersDownloaderBuilder::default() .stream_batch_size(3) @@ -596,7 +598,7 @@ mod tests { // Generate some random blocks let (file, headers, _) = generate_bodies_file(0..=19).await; // now try to read them back - let client = Arc::new(FileClient::from_file(file).await.unwrap()); + let client: Arc = Arc::new(FileClient::from_file(file).await.unwrap()); // construct headers downloader and use first header let mut header_downloader = ReverseHeadersDownloaderBuilder::default() @@ -621,7 +623,7 @@ mod tests { let (file, headers, mut bodies) = generate_bodies_file(0..=19).await; // now try to read them back - let client = Arc::new(FileClient::from_file(file).await.unwrap()); + let client: Arc = Arc::new(FileClient::from_file(file).await.unwrap()); // insert headers in db for the bodies downloader insert_headers(factory.db_ref().db(), &headers); diff --git a/crates/net/downloaders/src/file_codec.rs b/crates/net/downloaders/src/file_codec.rs index 3e754f9cf49b..57a15b6c888c 100644 --- a/crates/net/downloaders/src/file_codec.rs +++ b/crates/net/downloaders/src/file_codec.rs @@ -3,7 +3,6 @@ use crate::file_client::FileClientError; use alloy_primitives::bytes::{Buf, BytesMut}; use alloy_rlp::{Decodable, Encodable}; -use reth_primitives::Block; use tokio_util::codec::{Decoder, Encoder}; /// Codec for reading raw block bodies from a file. @@ -19,10 +18,16 @@ use tokio_util::codec::{Decoder, Encoder}; /// /// It's recommended to use [`with_capacity`](tokio_util::codec::FramedRead::with_capacity) to set /// the capacity of the framed reader to the size of the file. -pub(crate) struct BlockFileCodec; +pub(crate) struct BlockFileCodec(std::marker::PhantomData); -impl Decoder for BlockFileCodec { - type Item = Block; +impl Default for BlockFileCodec { + fn default() -> Self { + Self(std::marker::PhantomData) + } +} + +impl Decoder for BlockFileCodec { + type Item = B; type Error = FileClientError; fn decode(&mut self, src: &mut BytesMut) -> Result, Self::Error> { @@ -31,18 +36,17 @@ impl Decoder for BlockFileCodec { } let buf_slice = &mut src.as_ref(); - let body = - Block::decode(buf_slice).map_err(|err| FileClientError::Rlp(err, src.to_vec()))?; + let body = B::decode(buf_slice).map_err(|err| FileClientError::Rlp(err, src.to_vec()))?; src.advance(src.len() - buf_slice.len()); Ok(Some(body)) } } -impl Encoder for BlockFileCodec { +impl Encoder for BlockFileCodec { type Error = FileClientError; - fn encode(&mut self, item: Block, dst: &mut BytesMut) -> Result<(), Self::Error> { + fn encode(&mut self, item: B, dst: &mut BytesMut) -> Result<(), Self::Error> { item.encode(dst); Ok(()) } diff --git a/crates/net/downloaders/src/headers/reverse_headers.rs b/crates/net/downloaders/src/headers/reverse_headers.rs index 2d79e0a7af6d..63a20ff27f5b 100644 --- a/crates/net/downloaders/src/headers/reverse_headers.rs +++ b/crates/net/downloaders/src/headers/reverse_headers.rs @@ -9,7 +9,7 @@ use futures::{stream::Stream, FutureExt}; use futures_util::{stream::FuturesUnordered, StreamExt}; use rayon::prelude::*; use reth_config::config::HeadersConfig; -use reth_consensus::{Consensus, HeaderValidator}; +use reth_consensus::HeaderValidator; use reth_network_p2p::{ error::{DownloadError, DownloadResult, PeerRequestResult}, headers::{ @@ -68,7 +68,7 @@ impl From for ReverseHeadersDownloaderError { #[derive(Debug)] pub struct ReverseHeadersDownloader { /// Consensus client used to validate headers - consensus: Arc>, + consensus: Arc>, /// Client used to download headers. client: Arc, /// The local head of the chain. @@ -1165,7 +1165,7 @@ impl ReverseHeadersDownloaderBuilder { pub fn build( self, client: H, - consensus: Arc>, + consensus: Arc>, ) -> ReverseHeadersDownloader where H: HeadersClient + 'static, diff --git a/crates/net/downloaders/src/headers/task.rs b/crates/net/downloaders/src/headers/task.rs index 81c4cd80da3f..3dbfd5e3615e 100644 --- a/crates/net/downloaders/src/headers/task.rs +++ b/crates/net/downloaders/src/headers/task.rs @@ -8,6 +8,7 @@ use reth_network_p2p::headers::{ use reth_primitives::SealedHeader; use reth_tasks::{TaskSpawner, TokioTaskExecutor}; use std::{ + fmt::Debug, future::Future, pin::Pin, task::{ready, Context, Poll}, @@ -44,10 +45,10 @@ impl TaskDownloader { /// # use std::sync::Arc; /// # use reth_downloaders::headers::reverse_headers::ReverseHeadersDownloader; /// # use reth_downloaders::headers::task::TaskDownloader; - /// # use reth_consensus::Consensus; + /// # use reth_consensus::HeaderValidator; /// # use reth_network_p2p::headers::client::HeadersClient; /// # use reth_primitives_traits::BlockHeader; - /// # fn t + 'static>(consensus:Arc>, client: Arc) { + /// # fn t + 'static>(consensus:Arc>, client: Arc) { /// let downloader = ReverseHeadersDownloader::::builder().build( /// client, /// consensus @@ -82,7 +83,7 @@ impl TaskDownloader { } } -impl HeaderDownloader for TaskDownloader { +impl HeaderDownloader for TaskDownloader { type Header = H; fn update_sync_gap(&mut self, head: SealedHeader, target: SyncTarget) { diff --git a/crates/net/downloaders/src/test_utils/mod.rs b/crates/net/downloaders/src/test_utils/mod.rs index 7755c5e6017c..635383ce3f34 100644 --- a/crates/net/downloaders/src/test_utils/mod.rs +++ b/crates/net/downloaders/src/test_utils/mod.rs @@ -43,7 +43,7 @@ pub(crate) async fn generate_bodies_file( let raw_block_bodies = create_raw_bodies(headers.iter().cloned(), &mut bodies.clone()); let file: File = tempfile::tempfile().unwrap().into(); - let mut writer = FramedWrite::new(file, BlockFileCodec); + let mut writer = FramedWrite::new(file, BlockFileCodec::default()); // rlp encode one after the other for block in raw_block_bodies { diff --git a/crates/net/p2p/src/bodies/downloader.rs b/crates/net/p2p/src/bodies/downloader.rs index f335b21438b7..7008c08e522e 100644 --- a/crates/net/p2p/src/bodies/downloader.rs +++ b/crates/net/p2p/src/bodies/downloader.rs @@ -2,7 +2,7 @@ use super::response::BlockResponse; use crate::error::DownloadResult; use alloy_primitives::BlockNumber; use futures::Stream; -use std::ops::RangeInclusive; +use std::{fmt::Debug, ops::RangeInclusive}; /// Body downloader return type. pub type BodyDownloaderResult = DownloadResult>>; @@ -16,7 +16,7 @@ pub trait BodyDownloader: Send + Sync + Stream> + Unpin { /// The type of the body that is being downloaded. - type Body: Send + Sync + Unpin + 'static; + type Body: Debug + Send + Sync + Unpin + 'static; /// Method for setting the download range. fn set_download_range(&mut self, range: RangeInclusive) -> DownloadResult<()>; diff --git a/crates/net/p2p/src/headers/client.rs b/crates/net/p2p/src/headers/client.rs index 3e8f9296e076..4be6208c4a2c 100644 --- a/crates/net/p2p/src/headers/client.rs +++ b/crates/net/p2p/src/headers/client.rs @@ -50,7 +50,8 @@ impl HeadersRequest { } /// The headers future type -pub type HeadersFut = Pin>> + Send + Sync>>; +pub type HeadersFut = + Pin>> + Send + Sync>>; /// The block headers downloader client #[auto_impl::auto_impl(&, Arc, Box)] diff --git a/crates/net/p2p/src/headers/downloader.rs b/crates/net/p2p/src/headers/downloader.rs index 03ab467bafb3..eca03bdb4e79 100644 --- a/crates/net/p2p/src/headers/downloader.rs +++ b/crates/net/p2p/src/headers/downloader.rs @@ -7,6 +7,8 @@ use futures::Stream; use reth_consensus::HeaderValidator; use reth_primitives::SealedHeader; use reth_primitives_traits::BlockWithParent; +use std::fmt::Debug; + /// A downloader capable of fetching and yielding block headers. /// /// A downloader represents a distinct strategy for submitting requests to download block headers, @@ -21,7 +23,7 @@ pub trait HeaderDownloader: + Unpin { /// The header type being downloaded. - type Header: Send + Sync + Unpin + 'static; + type Header: Debug + Send + Sync + Unpin + 'static; /// Updates the gap to sync which ranges from local head to the sync target /// diff --git a/crates/node/builder/src/setup.rs b/crates/node/builder/src/setup.rs index 337e37eeedd4..400e3d844565 100644 --- a/crates/node/builder/src/setup.rs +++ b/crates/node/builder/src/setup.rs @@ -27,7 +27,7 @@ use tokio::sync::watch; pub fn build_networked_pipeline( config: &StageConfig, client: Client, - consensus: Arc, + consensus: Arc>, provider_factory: ProviderFactory, task_executor: &TaskExecutor, metrics_tx: reth_stages::MetricEventsSender, @@ -46,7 +46,7 @@ where { // building network downloaders using the fetch client let header_downloader = ReverseHeadersDownloaderBuilder::new(config.headers) - .build(client.clone(), Arc::clone(&consensus)) + .build(client.clone(), consensus.clone().as_header_validator()) .into_task_with(task_executor); let body_downloader = BodiesDownloaderBuilder::new(config.bodies) diff --git a/crates/primitives-traits/src/block/body.rs b/crates/primitives-traits/src/block/body.rs index 66c9c2d2e3a2..11c4dd785dd8 100644 --- a/crates/primitives-traits/src/block/body.rs +++ b/crates/primitives-traits/src/block/body.rs @@ -3,14 +3,13 @@ use alloc::fmt; use alloy_consensus::Transaction; -use reth_codecs::Compact; use crate::{FullSignedTx, InMemorySize, MaybeSerde}; /// Helper trait that unifies all behaviour required by transaction to support full node operations. -pub trait FullBlockBody: BlockBody + Compact {} +pub trait FullBlockBody: BlockBody {} -impl FullBlockBody for T where T: BlockBody + Compact {} +impl FullBlockBody for T where T: BlockBody {} /// Abstraction for block's body. #[auto_impl::auto_impl(&, Arc)] diff --git a/crates/primitives-traits/src/block/mod.rs b/crates/primitives-traits/src/block/mod.rs index 67658c39e07d..01ed75bd9673 100644 --- a/crates/primitives-traits/src/block/mod.rs +++ b/crates/primitives-traits/src/block/mod.rs @@ -5,14 +5,20 @@ pub mod header; use alloc::fmt; -use reth_codecs::Compact; +use alloy_rlp::{Decodable, Encodable}; -use crate::{BlockHeader, FullBlockHeader, InMemorySize, MaybeSerde}; +use crate::{BlockHeader, FullBlockBody, FullBlockHeader, InMemorySize, MaybeSerde}; /// Helper trait that unifies all behaviour required by block to support full node operations. -pub trait FullBlock: Block {} +pub trait FullBlock: + Block + Encodable + Decodable +{ +} -impl FullBlock for T where T: Block {} +impl FullBlock for T where + T: Block + Encodable + Decodable +{ +} /// Abstraction of block data type. // todo: make sealable super-trait, depends on diff --git a/crates/stages/stages/src/lib.rs b/crates/stages/stages/src/lib.rs index 38a0f209dbdd..ce6a96cf3496 100644 --- a/crates/stages/stages/src/lib.rs +++ b/crates/stages/stages/src/lib.rs @@ -37,7 +37,7 @@ //! # let consensus: Arc = Arc::new(TestConsensus::default()); //! # let headers_downloader = ReverseHeadersDownloaderBuilder::default().build( //! # Arc::new(TestHeadersClient::default()), -//! # consensus.clone() +//! # consensus.clone().as_header_validator() //! # ); //! # let provider_factory = create_test_provider_factory(); //! # let bodies_downloader = BodiesDownloaderBuilder::default().build( diff --git a/crates/stages/stages/src/sets.rs b/crates/stages/stages/src/sets.rs index a25fcd4e1e57..d04a96470a03 100644 --- a/crates/stages/stages/src/sets.rs +++ b/crates/stages/stages/src/sets.rs @@ -76,7 +76,11 @@ use tokio::sync::watch; /// - [`PruneStage`] (execute) /// - [`FinishStage`] #[derive(Debug)] -pub struct DefaultStages { +pub struct DefaultStages +where + H: HeaderDownloader, + B: BodyDownloader, +{ /// Configuration for the online stages online: OnlineStages, /// Executor factory needs for execution stage @@ -87,13 +91,17 @@ pub struct DefaultStages { prune_modes: PruneModes, } -impl DefaultStages { +impl DefaultStages +where + H: HeaderDownloader, + B: BodyDownloader, +{ /// Create a new set of default stages with default values. #[allow(clippy::too_many_arguments)] pub fn new( provider: Provider, tip: watch::Receiver, - consensus: Arc, + consensus: Arc>, header_downloader: H, body_downloader: B, executor_factory: E, @@ -122,6 +130,8 @@ impl DefaultStages { impl DefaultStages where E: BlockExecutorProvider, + H: HeaderDownloader, + B: BodyDownloader, { /// Appends the default offline stages and default finish stage to the given builder. pub fn add_offline_stages( @@ -164,13 +174,17 @@ where /// These stages *can* be run without network access if the specified downloaders are /// themselves offline. #[derive(Debug)] -pub struct OnlineStages { +pub struct OnlineStages +where + H: HeaderDownloader, + B: BodyDownloader, +{ /// Sync gap provider for the headers stage. provider: Provider, /// The tip for the headers stage. tip: watch::Receiver, /// The consensus engine used to validate incoming data. - consensus: Arc, + consensus: Arc>, /// The block header downloader header_downloader: H, /// The block body downloader @@ -179,12 +193,16 @@ pub struct OnlineStages { stages_config: StageConfig, } -impl OnlineStages { +impl OnlineStages +where + H: HeaderDownloader, + B: BodyDownloader, +{ /// Create a new set of online stages with default values. pub fn new( provider: Provider, tip: watch::Receiver, - consensus: Arc, + consensus: Arc>, header_downloader: H, body_downloader: B, stages_config: StageConfig, @@ -196,7 +214,7 @@ impl OnlineStages { impl OnlineStages where P: HeaderSyncGapProvider + 'static, - H: HeaderDownloader + 'static, + H: HeaderDownloader

+ 'static, B: BodyDownloader + 'static, { /// Create a new builder using the given headers stage. @@ -229,7 +247,7 @@ where provider, header_downloader, tip, - consensus.clone(), + consensus.clone().as_header_validator(), stages_config.etl, )) .add_stage(bodies) @@ -239,7 +257,7 @@ where impl StageSet for OnlineStages where P: HeaderSyncGapProvider + 'static, - H: HeaderDownloader + 'static, + H: HeaderDownloader
+ 'static, B: BodyDownloader + 'static, HeaderStage: Stage, BodyStage: Stage, @@ -250,7 +268,7 @@ where self.provider, self.header_downloader, self.tip, - self.consensus.clone(), + self.consensus.clone().as_header_validator(), self.stages_config.etl.clone(), )) .add_stage(BodyStage::new(self.body_downloader)) diff --git a/crates/stages/stages/src/stages/headers.rs b/crates/stages/stages/src/stages/headers.rs index 1ec55f7fd801..100fe4e979a7 100644 --- a/crates/stages/stages/src/stages/headers.rs +++ b/crates/stages/stages/src/stages/headers.rs @@ -1,7 +1,7 @@ use alloy_primitives::{BlockHash, BlockNumber, Bytes, B256}; use futures_util::StreamExt; use reth_config::config::EtlConfig; -use reth_consensus::Consensus; +use reth_consensus::HeaderValidator; use reth_db::{tables, transaction::DbTx, RawKey, RawTable, RawValue}; use reth_db_api::{ cursor::{DbCursorRO, DbCursorRW}, @@ -48,7 +48,7 @@ pub struct HeaderStage { /// The tip for the stage. tip: watch::Receiver, /// Consensus client implementation - consensus: Arc, + consensus: Arc>, /// Current sync gap. sync_gap: Option, /// ETL collector with `HeaderHash` -> `BlockNumber` @@ -63,14 +63,14 @@ pub struct HeaderStage { impl HeaderStage where - Downloader: HeaderDownloader, + Downloader: HeaderDownloader
, { /// Create a new header stage pub fn new( database: Provider, downloader: Downloader, tip: watch::Receiver, - consensus: Arc, + consensus: Arc>, etl_config: EtlConfig, ) -> Self { Self { From f12d7a92647dd2fd60286104c3b73cc63f40e206 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 20 Nov 2024 12:15:48 +0100 Subject: [PATCH 061/156] chore: use ethereum-forks types directly (#12702) --- Cargo.lock | 1 + crates/net/network/Cargo.toml | 1 + crates/net/network/src/builder.rs | 9 ++- crates/net/network/src/cache.rs | 3 +- crates/net/network/src/config.rs | 18 +++--- crates/net/network/src/discovery.rs | 24 ++++--- crates/net/network/src/error.rs | 6 +- crates/net/network/src/eth_requests.rs | 20 +++--- crates/net/network/src/fetch/client.rs | 12 ++-- crates/net/network/src/fetch/mod.rs | 20 +++--- crates/net/network/src/flattened_response.rs | 5 +- crates/net/network/src/import.rs | 6 +- crates/net/network/src/listener.rs | 3 +- crates/net/network/src/manager.rs | 62 +++++++++---------- crates/net/network/src/message.rs | 9 ++- crates/net/network/src/network.rs | 25 ++++---- crates/net/network/src/peers.rs | 28 ++++----- crates/net/network/src/protocol.rs | 13 ++-- crates/net/network/src/session/active.rs | 17 +++-- crates/net/network/src/session/conn.rs | 9 ++- crates/net/network/src/session/counter.rs | 3 +- crates/net/network/src/session/handle.rs | 14 ++--- crates/net/network/src/session/mod.rs | 15 +++-- crates/net/network/src/state.rs | 40 ++++++------ crates/net/network/src/swarm.rs | 30 +++++---- crates/net/network/src/test_utils/init.rs | 3 +- crates/net/network/src/test_utils/testnet.rs | 30 +++++---- crates/net/network/src/transactions/config.rs | 3 +- .../net/network/src/transactions/fetcher.rs | 36 +++++------ crates/net/network/src/transactions/mod.rs | 43 ++++++------- .../network/src/transactions/validation.rs | 3 +- 31 files changed, 234 insertions(+), 277 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0326a37f4994..09bcccf652e9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7802,6 +7802,7 @@ dependencies = [ "reth-ecies", "reth-eth-wire", "reth-eth-wire-types", + "reth-ethereum-forks", "reth-fs-util", "reth-metrics", "reth-net-banlist", diff --git a/crates/net/network/Cargo.toml b/crates/net/network/Cargo.toml index ad8e65dffc69..ab9e89c2ca84 100644 --- a/crates/net/network/Cargo.toml +++ b/crates/net/network/Cargo.toml @@ -23,6 +23,7 @@ reth-network-p2p.workspace = true reth-discv4.workspace = true reth-discv5.workspace = true reth-dns-discovery.workspace = true +reth-ethereum-forks.workspace = true reth-eth-wire.workspace = true reth-eth-wire-types.workspace = true reth-ecies.workspace = true diff --git a/crates/net/network/src/builder.rs b/crates/net/network/src/builder.rs index 31038906b25f..da003a2e2907 100644 --- a/crates/net/network/src/builder.rs +++ b/crates/net/network/src/builder.rs @@ -1,15 +1,14 @@ //! Builder support for configuring the entire setup. -use reth_eth_wire::{EthNetworkPrimitives, NetworkPrimitives}; -use reth_network_api::test_utils::PeersHandleProvider; -use reth_transaction_pool::TransactionPool; -use tokio::sync::mpsc; - use crate::{ eth_requests::EthRequestHandler, transactions::{TransactionsManager, TransactionsManagerConfig}, NetworkHandle, NetworkManager, }; +use reth_eth_wire::{EthNetworkPrimitives, NetworkPrimitives}; +use reth_network_api::test_utils::PeersHandleProvider; +use reth_transaction_pool::TransactionPool; +use tokio::sync::mpsc; /// We set the max channel capacity of the `EthRequestHandler` to 256 /// 256 requests with malicious 10MB body requests is 2.6GB which can be absorbed by the node. diff --git a/crates/net/network/src/cache.rs b/crates/net/network/src/cache.rs index 758b49167908..32389ec4b7b1 100644 --- a/crates/net/network/src/cache.rs +++ b/crates/net/network/src/cache.rs @@ -1,11 +1,10 @@ //! Network cache support use core::hash::BuildHasher; -use std::{fmt, hash::Hash}; - use derive_more::{Deref, DerefMut}; use itertools::Itertools; use schnellru::{ByLength, Limiter, RandomState, Unlimited}; +use std::{fmt, hash::Hash}; /// A minimal LRU cache based on a [`LruMap`](schnellru::LruMap) with limited capacity. /// diff --git a/crates/net/network/src/config.rs b/crates/net/network/src/config.rs index db7b384c2b34..e54000895a79 100644 --- a/crates/net/network/src/config.rs +++ b/crates/net/network/src/config.rs @@ -1,7 +1,11 @@ //! Network config support -use std::{collections::HashSet, net::SocketAddr, sync::Arc}; - +use crate::{ + error::NetworkError, + import::{BlockImport, ProofOfStakeBlockImport}, + transactions::TransactionsManagerConfig, + NetworkHandle, NetworkManager, +}; use reth_chainspec::{ChainSpecProvider, EthChainSpec, Hardforks}; use reth_discv4::{Discv4Config, Discv4ConfigBuilder, NatResolver, DEFAULT_DISCOVERY_ADDRESS}; use reth_discv5::NetworkStackId; @@ -9,19 +13,13 @@ use reth_dns_discovery::DnsDiscoveryConfig; use reth_eth_wire::{ EthNetworkPrimitives, HelloMessage, HelloMessageWithProtocols, NetworkPrimitives, Status, }; +use reth_ethereum_forks::{ForkFilter, Head}; use reth_network_peers::{mainnet_nodes, pk2id, sepolia_nodes, PeerId, TrustedPeer}; use reth_network_types::{PeersConfig, SessionsConfig}; -use reth_primitives::{ForkFilter, Head}; use reth_storage_api::{noop::NoopBlockReader, BlockNumReader, BlockReader, HeaderProvider}; use reth_tasks::{TaskSpawner, TokioTaskExecutor}; use secp256k1::SECP256K1; - -use crate::{ - error::NetworkError, - import::{BlockImport, ProofOfStakeBlockImport}, - transactions::TransactionsManagerConfig, - NetworkHandle, NetworkManager, -}; +use std::{collections::HashSet, net::SocketAddr, sync::Arc}; // re-export for convenience use crate::protocol::{IntoRlpxSubProtocol, RlpxSubProtocols}; diff --git a/crates/net/network/src/discovery.rs b/crates/net/network/src/discovery.rs index 5b2bb788f478..c0b9ffa7630b 100644 --- a/crates/net/network/src/discovery.rs +++ b/crates/net/network/src/discovery.rs @@ -1,13 +1,9 @@ //! Discovery support for the network. -use std::{ - collections::VecDeque, - net::{IpAddr, SocketAddr}, - pin::Pin, - sync::Arc, - task::{ready, Context, Poll}, +use crate::{ + cache::LruMap, + error::{NetworkError, ServiceKind}, }; - use enr::Enr; use futures::StreamExt; use reth_discv4::{DiscoveryUpdate, Discv4, Discv4Config}; @@ -15,20 +11,22 @@ use reth_discv5::{DiscoveredPeer, Discv5}; use reth_dns_discovery::{ DnsDiscoveryConfig, DnsDiscoveryHandle, DnsDiscoveryService, DnsNodeRecordUpdate, DnsResolver, }; +use reth_ethereum_forks::{EnrForkIdEntry, ForkId}; use reth_network_api::{DiscoveredEvent, DiscoveryEvent}; use reth_network_peers::{NodeRecord, PeerId}; use reth_network_types::PeerAddr; -use reth_primitives::{EnrForkIdEntry, ForkId}; use secp256k1::SecretKey; +use std::{ + collections::VecDeque, + net::{IpAddr, SocketAddr}, + pin::Pin, + sync::Arc, + task::{ready, Context, Poll}, +}; use tokio::{sync::mpsc, task::JoinHandle}; use tokio_stream::{wrappers::ReceiverStream, Stream}; use tracing::trace; -use crate::{ - cache::LruMap, - error::{NetworkError, ServiceKind}, -}; - /// Default max capacity for cache of discovered peers. /// /// Default is 10 000 peers. diff --git a/crates/net/network/src/error.rs b/crates/net/network/src/error.rs index 2709c4a29075..8156392b22f1 100644 --- a/crates/net/network/src/error.rs +++ b/crates/net/network/src/error.rs @@ -1,7 +1,6 @@ //! Possible errors when interacting with the network. -use std::{fmt, io, io::ErrorKind, net::SocketAddr}; - +use crate::session::PendingSessionHandshakeError; use reth_dns_discovery::resolver::ResolveError; use reth_ecies::ECIESErrorImpl; use reth_eth_wire::{ @@ -9,8 +8,7 @@ use reth_eth_wire::{ DisconnectReason, }; use reth_network_types::BackoffKind; - -use crate::session::PendingSessionHandshakeError; +use std::{fmt, io, io::ErrorKind, net::SocketAddr}; /// Service kind. #[derive(Debug, PartialEq, Eq, Copy, Clone)] diff --git a/crates/net/network/src/eth_requests.rs b/crates/net/network/src/eth_requests.rs index 8121b9675ed3..0f9348a42ce4 100644 --- a/crates/net/network/src/eth_requests.rs +++ b/crates/net/network/src/eth_requests.rs @@ -1,12 +1,9 @@ //! Blocks/Headers management for the p2p network. -use std::{ - future::Future, - pin::Pin, - task::{Context, Poll}, - time::Duration, +use crate::{ + budget::DEFAULT_BUDGET_TRY_DRAIN_DOWNLOADERS, metered_poll_nested_stream_with_budget, + metrics::EthRequestHandlerMetrics, }; - use alloy_consensus::Header; use alloy_eips::BlockHashOrNumber; use alloy_rlp::Encodable; @@ -20,14 +17,15 @@ use reth_network_p2p::error::RequestResult; use reth_network_peers::PeerId; use reth_primitives::BlockBody; use reth_storage_api::{BlockReader, HeaderProvider, ReceiptProvider}; +use std::{ + future::Future, + pin::Pin, + task::{Context, Poll}, + time::Duration, +}; use tokio::sync::{mpsc::Receiver, oneshot}; use tokio_stream::wrappers::ReceiverStream; -use crate::{ - budget::DEFAULT_BUDGET_TRY_DRAIN_DOWNLOADERS, metered_poll_nested_stream_with_budget, - metrics::EthRequestHandlerMetrics, -}; - // Limits: /// Maximum number of receipts to serve. diff --git a/crates/net/network/src/fetch/client.rs b/crates/net/network/src/fetch/client.rs index 584c079b8d86..e24ea167f5fe 100644 --- a/crates/net/network/src/fetch/client.rs +++ b/crates/net/network/src/fetch/client.rs @@ -1,10 +1,6 @@ //! A client implementation that can interact with the network and download data. -use std::sync::{ - atomic::{AtomicUsize, Ordering}, - Arc, -}; - +use crate::{fetch::DownloadRequest, flattened_response::FlattenedResponse}; use alloy_primitives::B256; use futures::{future, future::Either}; use reth_eth_wire::{EthNetworkPrimitives, NetworkPrimitives}; @@ -18,10 +14,12 @@ use reth_network_p2p::{ }; use reth_network_peers::PeerId; use reth_network_types::ReputationChangeKind; +use std::sync::{ + atomic::{AtomicUsize, Ordering}, + Arc, +}; use tokio::sync::{mpsc::UnboundedSender, oneshot}; -use crate::{fetch::DownloadRequest, flattened_response::FlattenedResponse}; - #[cfg_attr(doc, aquamarine::aquamarine)] /// Front-end API for fetching data from the network. /// diff --git a/crates/net/network/src/fetch/mod.rs b/crates/net/network/src/fetch/mod.rs index 8af6300b7056..c5474587adfb 100644 --- a/crates/net/network/src/fetch/mod.rs +++ b/crates/net/network/src/fetch/mod.rs @@ -4,15 +4,7 @@ mod client; pub use client::FetchClient; -use std::{ - collections::{HashMap, VecDeque}, - sync::{ - atomic::{AtomicU64, AtomicUsize, Ordering}, - Arc, - }, - task::{Context, Poll}, -}; - +use crate::message::BlockRequest; use alloy_primitives::B256; use futures::StreamExt; use reth_eth_wire::{EthNetworkPrimitives, GetBlockBodies, GetBlockHeaders, NetworkPrimitives}; @@ -24,11 +16,17 @@ use reth_network_p2p::{ }; use reth_network_peers::PeerId; use reth_network_types::ReputationChangeKind; +use std::{ + collections::{HashMap, VecDeque}, + sync::{ + atomic::{AtomicU64, AtomicUsize, Ordering}, + Arc, + }, + task::{Context, Poll}, +}; use tokio::sync::{mpsc, mpsc::UnboundedSender, oneshot}; use tokio_stream::wrappers::UnboundedReceiverStream; -use crate::message::BlockRequest; - type InflightHeadersRequest = Request>>; type InflightBodiesRequest = Request, PeerRequestResult>>; diff --git a/crates/net/network/src/flattened_response.rs b/crates/net/network/src/flattened_response.rs index 78c3c35f5981..df2a9db78ae4 100644 --- a/crates/net/network/src/flattened_response.rs +++ b/crates/net/network/src/flattened_response.rs @@ -1,10 +1,9 @@ +use futures::Future; +use pin_project::pin_project; use std::{ pin::Pin, task::{Context, Poll}, }; - -use futures::Future; -use pin_project::pin_project; use tokio::sync::oneshot::{error::RecvError, Receiver}; /// Flatten a [Receiver] message in order to get rid of the [RecvError] result diff --git a/crates/net/network/src/import.rs b/crates/net/network/src/import.rs index 749b3c347b37..f63bf2dd7a8c 100644 --- a/crates/net/network/src/import.rs +++ b/crates/net/network/src/import.rs @@ -1,10 +1,8 @@ //! This module provides an abstraction over block import in the form of the `BlockImport` trait. -use std::task::{Context, Poll}; - -use reth_network_peers::PeerId; - use crate::message::NewBlockMessage; +use reth_network_peers::PeerId; +use std::task::{Context, Poll}; /// Abstraction over block import. pub trait BlockImport: std::fmt::Debug + Send + Sync { diff --git a/crates/net/network/src/listener.rs b/crates/net/network/src/listener.rs index e5094f689481..9fcc15a104b5 100644 --- a/crates/net/network/src/listener.rs +++ b/crates/net/network/src/listener.rs @@ -1,13 +1,12 @@ //! Contains connection-oriented interfaces. +use futures::{ready, Stream}; use std::{ io, net::SocketAddr, pin::Pin, task::{Context, Poll}, }; - -use futures::{ready, Stream}; use tokio::net::{TcpListener, TcpStream}; /// A tcp connection listener. diff --git a/crates/net/network/src/manager.rs b/crates/net/network/src/manager.rs index 0738be1bcac2..c9caa412274e 100644 --- a/crates/net/network/src/manager.rs +++ b/crates/net/network/src/manager.rs @@ -15,18 +15,26 @@ //! (IP+port) of our node is published via discovery, remote peers can initiate inbound connections //! to the local node. Once a (tcp) connection is established, both peers start to authenticate a [RLPx session](https://github.com/ethereum/devp2p/blob/master/rlpx.md) via a handshake. If the handshake was successful, both peers announce their capabilities and are now ready to exchange sub-protocol messages via the `RLPx` session. -use std::{ - net::SocketAddr, - path::Path, - pin::Pin, - sync::{ - atomic::{AtomicU64, AtomicUsize, Ordering}, - Arc, - }, - task::{Context, Poll}, - time::{Duration, Instant}, +use crate::{ + budget::{DEFAULT_BUDGET_TRY_DRAIN_NETWORK_HANDLE_CHANNEL, DEFAULT_BUDGET_TRY_DRAIN_SWARM}, + config::NetworkConfig, + discovery::Discovery, + error::{NetworkError, ServiceKind}, + eth_requests::IncomingEthRequest, + import::{BlockImport, BlockImportOutcome, BlockValidation}, + listener::ConnectionListener, + message::{NewBlockMessage, PeerMessage}, + metrics::{DisconnectMetrics, NetworkMetrics, NETWORK_POOL_TRANSACTIONS_SCOPE}, + network::{NetworkHandle, NetworkHandleMessage}, + peers::PeersManager, + poll_nested_stream_with_budget, + protocol::IntoRlpxSubProtocol, + session::SessionManager, + state::NetworkState, + swarm::{Swarm, SwarmEvent}, + transactions::NetworkTransactionEvent, + FetchClient, NetworkBuilder, }; - use futures::{Future, StreamExt}; use parking_lot::Mutex; use reth_eth_wire::{ @@ -44,31 +52,21 @@ use reth_storage_api::BlockNumReader; use reth_tasks::shutdown::GracefulShutdown; use reth_tokio_util::EventSender; use secp256k1::SecretKey; +use std::{ + net::SocketAddr, + path::Path, + pin::Pin, + sync::{ + atomic::{AtomicU64, AtomicUsize, Ordering}, + Arc, + }, + task::{Context, Poll}, + time::{Duration, Instant}, +}; use tokio::sync::mpsc::{self, error::TrySendError}; use tokio_stream::wrappers::UnboundedReceiverStream; use tracing::{debug, error, trace, warn}; -use crate::{ - budget::{DEFAULT_BUDGET_TRY_DRAIN_NETWORK_HANDLE_CHANNEL, DEFAULT_BUDGET_TRY_DRAIN_SWARM}, - config::NetworkConfig, - discovery::Discovery, - error::{NetworkError, ServiceKind}, - eth_requests::IncomingEthRequest, - import::{BlockImport, BlockImportOutcome, BlockValidation}, - listener::ConnectionListener, - message::{NewBlockMessage, PeerMessage}, - metrics::{DisconnectMetrics, NetworkMetrics, NETWORK_POOL_TRANSACTIONS_SCOPE}, - network::{NetworkHandle, NetworkHandleMessage}, - peers::PeersManager, - poll_nested_stream_with_budget, - protocol::IntoRlpxSubProtocol, - session::SessionManager, - state::NetworkState, - swarm::{Swarm, SwarmEvent}, - transactions::NetworkTransactionEvent, - FetchClient, NetworkBuilder, -}; - #[cfg_attr(doc, aquamarine::aquamarine)] /// Manages the _entire_ state of the network. /// diff --git a/crates/net/network/src/message.rs b/crates/net/network/src/message.rs index 3040577415c5..4821e2592922 100644 --- a/crates/net/network/src/message.rs +++ b/crates/net/network/src/message.rs @@ -3,11 +3,6 @@ //! An `RLPx` stream is multiplexed via the prepended message-id of a framed message. //! Capabilities are exchanged via the `RLPx` `Hello` message as pairs of `(id, version)`, -use std::{ - sync::Arc, - task::{ready, Context, Poll}, -}; - use alloy_consensus::BlockHeader; use alloy_primitives::{Bytes, B256}; use futures::FutureExt; @@ -20,6 +15,10 @@ use reth_eth_wire::{ use reth_network_api::PeerRequest; use reth_network_p2p::error::{RequestError, RequestResult}; use reth_primitives::{PooledTransactionsElement, ReceiptWithBloom}; +use std::{ + sync::Arc, + task::{ready, Context, Poll}, +}; use tokio::sync::oneshot; /// Internal form of a `NewBlock` message diff --git a/crates/net/network/src/network.rs b/crates/net/network/src/network.rs index 2fa3fd90efe7..0af0cb1ad460 100644 --- a/crates/net/network/src/network.rs +++ b/crates/net/network/src/network.rs @@ -1,11 +1,7 @@ -use std::{ - net::SocketAddr, - sync::{ - atomic::{AtomicBool, AtomicU64, AtomicUsize, Ordering}, - Arc, - }, +use crate::{ + config::NetworkMode, protocol::RlpxSubProtocol, swarm::NetworkConnectionState, + transactions::TransactionsHandle, FetchClient, }; - use alloy_primitives::B256; use enr::Enr; use parking_lot::Mutex; @@ -15,6 +11,7 @@ use reth_eth_wire::{ DisconnectReason, EthNetworkPrimitives, NetworkPrimitives, NewBlock, NewPooledTransactionHashes, SharedTransactions, }; +use reth_ethereum_forks::Head; use reth_network_api::{ test_utils::{PeersHandle, PeersHandleProvider}, BlockDownloaderProvider, DiscoveryEvent, NetworkError, NetworkEvent, @@ -24,20 +21,22 @@ use reth_network_api::{ use reth_network_p2p::sync::{NetworkSyncUpdater, SyncState, SyncStateProvider}; use reth_network_peers::{NodeRecord, PeerId}; use reth_network_types::{PeerAddr, PeerKind, Reputation, ReputationChangeKind}; -use reth_primitives::{Head, TransactionSigned}; +use reth_primitives::TransactionSigned; use reth_tokio_util::{EventSender, EventStream}; use secp256k1::SecretKey; +use std::{ + net::SocketAddr, + sync::{ + atomic::{AtomicBool, AtomicU64, AtomicUsize, Ordering}, + Arc, + }, +}; use tokio::sync::{ mpsc::{self, UnboundedSender}, oneshot, }; use tokio_stream::wrappers::UnboundedReceiverStream; -use crate::{ - config::NetworkMode, protocol::RlpxSubProtocol, swarm::NetworkConnectionState, - transactions::TransactionsHandle, FetchClient, -}; - /// A _shareable_ network frontend. Used to interact with the network. /// /// See also [`NetworkManager`](crate::NetworkManager). diff --git a/crates/net/network/src/peers.rs b/crates/net/network/src/peers.rs index 4855ff5e7431..d4b762e3e12c 100644 --- a/crates/net/network/src/peers.rs +++ b/crates/net/network/src/peers.rs @@ -1,16 +1,13 @@ //! Peer related implementations -use std::{ - collections::{hash_map::Entry, HashMap, HashSet, VecDeque}, - fmt::Display, - io::{self}, - net::{IpAddr, SocketAddr}, - task::{Context, Poll}, - time::Duration, +use crate::{ + error::SessionError, + session::{Direction, PendingSessionHandshakeError}, + swarm::NetworkConnectionState, }; - use futures::StreamExt; use reth_eth_wire::{errors::EthStreamError, DisconnectReason}; +use reth_ethereum_forks::ForkId; use reth_net_banlist::BanList; use reth_network_api::test_utils::{PeerCommand, PeersHandle}; use reth_network_peers::{NodeRecord, PeerId}; @@ -22,7 +19,14 @@ use reth_network_types::{ ConnectionsConfig, Peer, PeerAddr, PeerConnectionState, PeerKind, PeersConfig, ReputationChangeKind, ReputationChangeOutcome, ReputationChangeWeights, }; -use reth_primitives::ForkId; +use std::{ + collections::{hash_map::Entry, HashMap, HashSet, VecDeque}, + fmt::Display, + io::{self}, + net::{IpAddr, SocketAddr}, + task::{Context, Poll}, + time::Duration, +}; use thiserror::Error; use tokio::{ sync::mpsc, @@ -31,12 +35,6 @@ use tokio::{ use tokio_stream::wrappers::UnboundedReceiverStream; use tracing::{trace, warn}; -use crate::{ - error::SessionError, - session::{Direction, PendingSessionHandshakeError}, - swarm::NetworkConnectionState, -}; - /// Maintains the state of _all_ the peers known to the network. /// /// This is supposed to be owned by the network itself, but can be reached via the [`PeersHandle`]. diff --git a/crates/net/network/src/protocol.rs b/crates/net/network/src/protocol.rs index eeffd1c95f4f..aa0749c2c7b9 100644 --- a/crates/net/network/src/protocol.rs +++ b/crates/net/network/src/protocol.rs @@ -2,19 +2,18 @@ //! //! See also -use std::{ - fmt, - net::SocketAddr, - ops::{Deref, DerefMut}, - pin::Pin, -}; - use alloy_primitives::bytes::BytesMut; use futures::Stream; use reth_eth_wire::{ capability::SharedCapabilities, multiplex::ProtocolConnection, protocol::Protocol, }; use reth_network_api::{Direction, PeerId}; +use std::{ + fmt, + net::SocketAddr, + ops::{Deref, DerefMut}, + pin::Pin, +}; /// A trait that allows to offer additional RLPx-based application-level protocols when establishing /// a peer-to-peer connection. diff --git a/crates/net/network/src/session/active.rs b/crates/net/network/src/session/active.rs index f979a912cd46..76701f7e2abf 100644 --- a/crates/net/network/src/session/active.rs +++ b/crates/net/network/src/session/active.rs @@ -11,6 +11,14 @@ use std::{ time::{Duration, Instant}, }; +use crate::{ + message::{NewBlockMessage, PeerMessage, PeerResponse, PeerResponseResult}, + session::{ + conn::EthRlpxConnection, + handle::{ActiveSessionMessage, SessionCommand}, + SessionId, + }, +}; use alloy_primitives::Sealable; use futures::{stream::Fuse, SinkExt, StreamExt}; use metrics::Gauge; @@ -34,15 +42,6 @@ use tokio_stream::wrappers::ReceiverStream; use tokio_util::sync::PollSender; use tracing::{debug, trace}; -use crate::{ - message::{NewBlockMessage, PeerMessage, PeerResponse, PeerResponseResult}, - session::{ - conn::EthRlpxConnection, - handle::{ActiveSessionMessage, SessionCommand}, - SessionId, - }, -}; - // Constants for timeout updating. /// Minimum timeout value diff --git a/crates/net/network/src/session/conn.rs b/crates/net/network/src/session/conn.rs index 5329f01028b3..45b83d1c487b 100644 --- a/crates/net/network/src/session/conn.rs +++ b/crates/net/network/src/session/conn.rs @@ -1,10 +1,5 @@ //! Connection types for a session -use std::{ - pin::Pin, - task::{Context, Poll}, -}; - use futures::{Sink, Stream}; use reth_ecies::stream::ECIESStream; use reth_eth_wire::{ @@ -13,6 +8,10 @@ use reth_eth_wire::{ multiplex::{ProtocolProxy, RlpxSatelliteStream}, EthMessage, EthNetworkPrimitives, EthStream, EthVersion, NetworkPrimitives, P2PStream, }; +use std::{ + pin::Pin, + task::{Context, Poll}, +}; use tokio::net::TcpStream; /// The type of the underlying peer network connection. diff --git a/crates/net/network/src/session/counter.rs b/crates/net/network/src/session/counter.rs index 0d8f764f206d..052cf1e25707 100644 --- a/crates/net/network/src/session/counter.rs +++ b/crates/net/network/src/session/counter.rs @@ -1,8 +1,7 @@ +use super::ExceedsSessionLimit; use reth_network_api::Direction; use reth_network_types::SessionLimits; -use super::ExceedsSessionLimit; - /// Keeps track of all sessions. #[derive(Debug, Clone)] pub struct SessionCounter { diff --git a/crates/net/network/src/session/handle.rs b/crates/net/network/src/session/handle.rs index f80428630d9b..d167dc0e6ec4 100644 --- a/crates/net/network/src/session/handle.rs +++ b/crates/net/network/src/session/handle.rs @@ -1,7 +1,10 @@ //! Session handles. -use std::{io, net::SocketAddr, sync::Arc, time::Instant}; - +use crate::{ + message::PeerMessage, + session::{conn::EthRlpxConnection, Direction, SessionId}, + PendingSessionHandshakeError, +}; use reth_ecies::ECIESError; use reth_eth_wire::{ capability::CapabilityMessage, errors::EthStreamError, Capabilities, DisconnectReason, @@ -10,17 +13,12 @@ use reth_eth_wire::{ use reth_network_api::PeerInfo; use reth_network_peers::{NodeRecord, PeerId}; use reth_network_types::PeerKind; +use std::{io, net::SocketAddr, sync::Arc, time::Instant}; use tokio::sync::{ mpsc::{self, error::SendError}, oneshot, }; -use crate::{ - message::PeerMessage, - session::{conn::EthRlpxConnection, Direction, SessionId}, - PendingSessionHandshakeError, -}; - /// A handler attached to a peer session that's not authenticated yet, pending Handshake and hello /// message which exchanges the `capabilities` of the peer. /// diff --git a/crates/net/network/src/session/mod.rs b/crates/net/network/src/session/mod.rs index a95f0e889101..816c540cee22 100644 --- a/crates/net/network/src/session/mod.rs +++ b/crates/net/network/src/session/mod.rs @@ -23,6 +23,12 @@ use std::{ time::{Duration, Instant}, }; +use crate::{ + message::PeerMessage, + metrics::SessionManagerMetrics, + protocol::{IntoRlpxSubProtocol, RlpxSubProtocolHandlers, RlpxSubProtocols}, + session::active::ActiveSession, +}; use counter::SessionCounter; use futures::{future::Either, io, FutureExt, StreamExt}; use reth_ecies::{stream::ECIESStream, ECIESError}; @@ -31,11 +37,11 @@ use reth_eth_wire::{ Capabilities, DisconnectReason, EthVersion, HelloMessageWithProtocols, NetworkPrimitives, Status, UnauthedEthStream, UnauthedP2PStream, }; +use reth_ethereum_forks::{ForkFilter, ForkId, ForkTransition, Head}; use reth_metrics::common::mpsc::MeteredPollSender; use reth_network_api::{PeerRequest, PeerRequestSender}; use reth_network_peers::PeerId; use reth_network_types::SessionsConfig; -use reth_primitives::{ForkFilter, ForkId, ForkTransition, Head}; use reth_tasks::TaskSpawner; use rustc_hash::FxHashMap; use secp256k1::SecretKey; @@ -48,13 +54,6 @@ use tokio_stream::wrappers::ReceiverStream; use tokio_util::sync::PollSender; use tracing::{debug, instrument, trace}; -use crate::{ - message::PeerMessage, - metrics::SessionManagerMetrics, - protocol::{IntoRlpxSubProtocol, RlpxSubProtocolHandlers, RlpxSubProtocols}, - session::active::ActiveSession, -}; - /// Internal identifier for active sessions. #[derive(Debug, Clone, Copy, PartialOrd, PartialEq, Eq, Hash)] pub struct SessionId(usize); diff --git a/crates/net/network/src/state.rs b/crates/net/network/src/state.rs index c51f115c52f5..473c76c260f0 100644 --- a/crates/net/network/src/state.rs +++ b/crates/net/network/src/state.rs @@ -1,17 +1,13 @@ //! Keeps track of the state of the network. -use std::{ - collections::{HashMap, VecDeque}, - fmt, - net::{IpAddr, SocketAddr}, - ops::Deref, - sync::{ - atomic::{AtomicU64, AtomicUsize}, - Arc, - }, - task::{Context, Poll}, +use crate::{ + cache::LruCache, + discovery::Discovery, + fetch::{BlockResponseOutcome, FetchAction, StateFetcher}, + message::{BlockRequest, NewBlockMessage, PeerResponse, PeerResponseResult}, + peers::{PeerAction, PeersManager}, + FetchClient, }; - use alloy_consensus::BlockHeader; use alloy_primitives::B256; use rand::seq::SliceRandom; @@ -19,23 +15,25 @@ use reth_eth_wire::{ BlockHashNumber, Capabilities, DisconnectReason, EthNetworkPrimitives, NetworkPrimitives, NewBlockHashes, Status, }; +use reth_ethereum_forks::ForkId; use reth_network_api::{DiscoveredEvent, DiscoveryEvent, PeerRequest, PeerRequestSender}; use reth_network_peers::PeerId; use reth_network_types::{PeerAddr, PeerKind}; -use reth_primitives::ForkId; use reth_primitives_traits::Block; +use std::{ + collections::{HashMap, VecDeque}, + fmt, + net::{IpAddr, SocketAddr}, + ops::Deref, + sync::{ + atomic::{AtomicU64, AtomicUsize}, + Arc, + }, + task::{Context, Poll}, +}; use tokio::sync::oneshot; use tracing::{debug, trace}; -use crate::{ - cache::LruCache, - discovery::Discovery, - fetch::{BlockResponseOutcome, FetchAction, StateFetcher}, - message::{BlockRequest, NewBlockMessage, PeerResponse, PeerResponseResult}, - peers::{PeerAction, PeersManager}, - FetchClient, -}; - /// Cache limit of blocks to keep track of for a single peer. const PEER_BLOCK_CACHE_LIMIT: u32 = 512; diff --git a/crates/net/network/src/swarm.rs b/crates/net/network/src/swarm.rs index 655934f207ac..47447783f428 100644 --- a/crates/net/network/src/swarm.rs +++ b/crates/net/network/src/swarm.rs @@ -1,11 +1,11 @@ -use std::{ - io, - net::SocketAddr, - pin::Pin, - sync::Arc, - task::{Context, Poll}, +use crate::{ + listener::{ConnectionListener, ListenerEvent}, + message::PeerMessage, + peers::InboundConnectionError, + protocol::IntoRlpxSubProtocol, + session::{Direction, PendingSessionHandshakeError, SessionEvent, SessionId, SessionManager}, + state::{NetworkState, StateAction}, }; - use futures::Stream; use reth_eth_wire::{ capability::CapabilityMessage, errors::EthStreamError, Capabilities, DisconnectReason, @@ -13,16 +13,14 @@ use reth_eth_wire::{ }; use reth_network_api::{PeerRequest, PeerRequestSender}; use reth_network_peers::PeerId; -use tracing::trace; - -use crate::{ - listener::{ConnectionListener, ListenerEvent}, - message::PeerMessage, - peers::InboundConnectionError, - protocol::IntoRlpxSubProtocol, - session::{Direction, PendingSessionHandshakeError, SessionEvent, SessionId, SessionManager}, - state::{NetworkState, StateAction}, +use std::{ + io, + net::SocketAddr, + pin::Pin, + sync::Arc, + task::{Context, Poll}, }; +use tracing::trace; #[cfg_attr(doc, aquamarine::aquamarine)] /// Contains the connectivity related state of the network. diff --git a/crates/net/network/src/test_utils/init.rs b/crates/net/network/src/test_utils/init.rs index 767f6818091a..87ccbb5f9d79 100644 --- a/crates/net/network/src/test_utils/init.rs +++ b/crates/net/network/src/test_utils/init.rs @@ -1,7 +1,6 @@ -use std::{net::SocketAddr, time::Duration}; - use enr::{k256::ecdsa::SigningKey, Enr, EnrPublicKey}; use reth_network_peers::PeerId; +use std::{net::SocketAddr, time::Duration}; /// The timeout for tests that create a `GethInstance` pub const GETH_TIMEOUT: Duration = Duration::from_secs(60); diff --git a/crates/net/network/src/test_utils/testnet.rs b/crates/net/network/src/test_utils/testnet.rs index d92272a871e0..a64084f2cf9b 100644 --- a/crates/net/network/src/test_utils/testnet.rs +++ b/crates/net/network/src/test_utils/testnet.rs @@ -1,13 +1,13 @@ //! A network implementation for testing purposes. -use std::{ - fmt, - future::Future, - net::{Ipv4Addr, SocketAddr, SocketAddrV4}, - pin::Pin, - task::{Context, Poll}, +use crate::{ + builder::ETH_REQUEST_CHANNEL_CAPACITY, + error::NetworkError, + eth_requests::EthRequestHandler, + protocol::IntoRlpxSubProtocol, + transactions::{TransactionsHandle, TransactionsManager, TransactionsManagerConfig}, + NetworkConfig, NetworkConfigBuilder, NetworkHandle, NetworkManager, }; - use futures::{FutureExt, StreamExt}; use pin_project::pin_project; use reth_chainspec::{Hardforks, MAINNET}; @@ -27,6 +27,13 @@ use reth_transaction_pool::{ EthTransactionPool, TransactionPool, TransactionValidationTaskExecutor, }; use secp256k1::SecretKey; +use std::{ + fmt, + future::Future, + net::{Ipv4Addr, SocketAddr, SocketAddrV4}, + pin::Pin, + task::{Context, Poll}, +}; use tokio::{ sync::{ mpsc::{channel, unbounded_channel}, @@ -35,15 +42,6 @@ use tokio::{ task::JoinHandle, }; -use crate::{ - builder::ETH_REQUEST_CHANNEL_CAPACITY, - error::NetworkError, - eth_requests::EthRequestHandler, - protocol::IntoRlpxSubProtocol, - transactions::{TransactionsHandle, TransactionsManager, TransactionsManagerConfig}, - NetworkConfig, NetworkConfigBuilder, NetworkHandle, NetworkManager, -}; - /// A test network consisting of multiple peers. pub struct Testnet { /// All running peers in the network. diff --git a/crates/net/network/src/transactions/config.rs b/crates/net/network/src/transactions/config.rs index b838f7cfe71b..db59ffac5cc0 100644 --- a/crates/net/network/src/transactions/config.rs +++ b/crates/net/network/src/transactions/config.rs @@ -1,5 +1,3 @@ -use derive_more::Constructor; - use super::{ DEFAULT_MAX_COUNT_TRANSACTIONS_SEEN_BY_PEER, DEFAULT_SOFT_LIMIT_BYTE_SIZE_POOLED_TRANSACTIONS_RESP_ON_PACK_GET_POOLED_TRANSACTIONS_REQ, @@ -9,6 +7,7 @@ use crate::transactions::constants::tx_fetcher::{ DEFAULT_MAX_CAPACITY_CACHE_PENDING_FETCH, DEFAULT_MAX_COUNT_CONCURRENT_REQUESTS, DEFAULT_MAX_COUNT_CONCURRENT_REQUESTS_PER_PEER, }; +use derive_more::Constructor; /// Configuration for managing transactions within the network. #[derive(Debug, Clone)] diff --git a/crates/net/network/src/transactions/fetcher.rs b/crates/net/network/src/transactions/fetcher.rs index 4c4119c85c07..0833f677409d 100644 --- a/crates/net/network/src/transactions/fetcher.rs +++ b/crates/net/network/src/transactions/fetcher.rs @@ -25,13 +25,18 @@ //! before it's re-tried. Nonetheless, the capacity of the buffered hashes cache must be large //! enough to buffer many hashes during network failure, to allow for recovery. -use std::{ - collections::HashMap, - pin::Pin, - task::{ready, Context, Poll}, - time::Duration, +use super::{ + config::TransactionFetcherConfig, + constants::{tx_fetcher::*, SOFT_LIMIT_COUNT_HASHES_IN_GET_POOLED_TRANSACTIONS_REQUEST}, + MessageFilter, PeerMetadata, PooledTransactions, + SOFT_LIMIT_BYTE_SIZE_POOLED_TRANSACTIONS_RESPONSE, +}; +use crate::{ + cache::{LruCache, LruMap}, + duration_metered_exec, + metrics::TransactionFetcherMetrics, + transactions::{validation, PartiallyFilterMessage}, }; - use alloy_primitives::TxHash; use derive_more::{Constructor, Deref}; use futures::{stream::FuturesUnordered, Future, FutureExt, Stream, StreamExt}; @@ -47,23 +52,16 @@ use reth_primitives::PooledTransactionsElement; use schnellru::ByLength; #[cfg(debug_assertions)] use smallvec::{smallvec, SmallVec}; +use std::{ + collections::HashMap, + pin::Pin, + task::{ready, Context, Poll}, + time::Duration, +}; use tokio::sync::{mpsc::error::TrySendError, oneshot, oneshot::error::RecvError}; use tracing::{debug, trace}; use validation::FilterOutcome; -use super::{ - config::TransactionFetcherConfig, - constants::{tx_fetcher::*, SOFT_LIMIT_COUNT_HASHES_IN_GET_POOLED_TRANSACTIONS_REQUEST}, - MessageFilter, PeerMetadata, PooledTransactions, - SOFT_LIMIT_BYTE_SIZE_POOLED_TRANSACTIONS_RESPONSE, -}; -use crate::{ - cache::{LruCache, LruMap}, - duration_metered_exec, - metrics::TransactionFetcherMetrics, - transactions::{validation, PartiallyFilterMessage}, -}; - /// The type responsible for fetching missing transactions from peers. /// /// This will keep track of unique transaction hashes that are currently being fetched and submits diff --git a/crates/net/network/src/transactions/mod.rs b/crates/net/network/src/transactions/mod.rs index 241f01ae8abb..b499f0ac422c 100644 --- a/crates/net/network/src/transactions/mod.rs +++ b/crates/net/network/src/transactions/mod.rs @@ -18,20 +18,19 @@ pub use validation::*; pub(crate) use fetcher::{FetchEvent, TransactionFetcher}; use self::constants::{tx_manager::*, DEFAULT_SOFT_LIMIT_BYTE_SIZE_TRANSACTIONS_BROADCAST_MESSAGE}; -use constants::SOFT_LIMIT_COUNT_HASHES_IN_NEW_POOLED_TRANSACTIONS_BROADCAST_MESSAGE; - -use std::{ - collections::{hash_map::Entry, HashMap, HashSet}, - pin::Pin, - sync::{ - atomic::{AtomicUsize, Ordering}, - Arc, +use crate::{ + budget::{ + DEFAULT_BUDGET_TRY_DRAIN_NETWORK_TRANSACTION_EVENTS, + DEFAULT_BUDGET_TRY_DRAIN_PENDING_POOL_IMPORTS, DEFAULT_BUDGET_TRY_DRAIN_POOL_IMPORTS, + DEFAULT_BUDGET_TRY_DRAIN_STREAM, }, - task::{Context, Poll}, - time::{Duration, Instant}, + cache::LruCache, + duration_metered_exec, metered_poll_nested_stream_with_budget, + metrics::{TransactionsManagerMetrics, NETWORK_POOL_TRANSACTIONS_SCOPE}, + NetworkHandle, }; - use alloy_primitives::{TxHash, B256}; +use constants::SOFT_LIMIT_COUNT_HASHES_IN_NEW_POOLED_TRANSACTIONS_BROADCAST_MESSAGE; use futures::{stream::FuturesUnordered, Future, StreamExt}; use reth_eth_wire::{ DedupPayload, EthNetworkPrimitives, EthVersion, GetPooledTransactions, HandleMempoolData, @@ -56,22 +55,20 @@ use reth_transaction_pool::{ GetPooledTransactionLimit, PoolTransaction, PropagateKind, PropagatedTransactions, TransactionPool, ValidPoolTransaction, }; +use std::{ + collections::{hash_map::Entry, HashMap, HashSet}, + pin::Pin, + sync::{ + atomic::{AtomicUsize, Ordering}, + Arc, + }, + task::{Context, Poll}, + time::{Duration, Instant}, +}; use tokio::sync::{mpsc, oneshot, oneshot::error::RecvError}; use tokio_stream::wrappers::{ReceiverStream, UnboundedReceiverStream}; use tracing::{debug, trace}; -use crate::{ - budget::{ - DEFAULT_BUDGET_TRY_DRAIN_NETWORK_TRANSACTION_EVENTS, - DEFAULT_BUDGET_TRY_DRAIN_PENDING_POOL_IMPORTS, DEFAULT_BUDGET_TRY_DRAIN_POOL_IMPORTS, - DEFAULT_BUDGET_TRY_DRAIN_STREAM, - }, - cache::LruCache, - duration_metered_exec, metered_poll_nested_stream_with_budget, - metrics::{TransactionsManagerMetrics, NETWORK_POOL_TRANSACTIONS_SCOPE}, - NetworkHandle, -}; - /// The future for importing transactions into the pool. /// /// Resolves with the result of each transaction import. diff --git a/crates/net/network/src/transactions/validation.rs b/crates/net/network/src/transactions/validation.rs index 7bfe07761a21..1575d9f3374a 100644 --- a/crates/net/network/src/transactions/validation.rs +++ b/crates/net/network/src/transactions/validation.rs @@ -2,8 +2,6 @@ //! and [`NewPooledTransactionHashes68`](reth_eth_wire::NewPooledTransactionHashes68) //! announcements. Validation and filtering of announcements is network dependent. -use std::{fmt, fmt::Display, mem}; - use crate::metrics::{AnnouncedTxTypesMetrics, TxTypesCounter}; use alloy_primitives::{Signature, TxHash}; use derive_more::{Deref, DerefMut}; @@ -12,6 +10,7 @@ use reth_eth_wire::{ MAX_MESSAGE_SIZE, }; use reth_primitives::TxType; +use std::{fmt, fmt::Display, mem}; use tracing::trace; /// The size of a decoded signature in bytes. From 402f96600b20b2f103ecab32fee372a4a4a4f579 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 20 Nov 2024 13:18:58 +0100 Subject: [PATCH 062/156] chore: move txtype constants to codecs (#12705) --- crates/optimism/primitives/src/tx_type.rs | 35 ++++++------- crates/primitives/src/receipt.rs | 4 +- crates/primitives/src/transaction/mod.rs | 20 +++----- crates/primitives/src/transaction/tx_type.rs | 54 +++++++------------- crates/storage/codecs/src/lib.rs | 5 +- crates/storage/codecs/src/txtype.rs | 15 ++++++ 6 files changed, 60 insertions(+), 73 deletions(-) create mode 100644 crates/storage/codecs/src/txtype.rs diff --git a/crates/optimism/primitives/src/tx_type.rs b/crates/optimism/primitives/src/tx_type.rs index 9ddfe77b192f..c6e7fcc0a806 100644 --- a/crates/optimism/primitives/src/tx_type.rs +++ b/crates/optimism/primitives/src/tx_type.rs @@ -2,25 +2,15 @@ //! `OpTxType` implements `reth_primitives_traits::TxType`. //! This type is required because a `Compact` impl is needed on the deposit tx type. -use core::fmt::Debug; - -#[cfg(feature = "reth-codec")] -use alloy_consensus::constants::EIP7702_TX_TYPE_ID; use alloy_primitives::{U64, U8}; use alloy_rlp::{Decodable, Encodable, Error}; use bytes::BufMut; +use core::fmt::Debug; use derive_more::{ derive::{From, Into}, Display, }; use op_alloy_consensus::OpTxType as AlloyOpTxType; -#[cfg(feature = "reth-codec")] -use op_alloy_consensus::DEPOSIT_TX_TYPE_ID; -#[cfg(feature = "reth-codec")] -use reth_primitives::transaction::{ - COMPACT_EXTENDED_IDENTIFIER_FLAG, COMPACT_IDENTIFIER_EIP1559, COMPACT_IDENTIFIER_EIP2930, - COMPACT_IDENTIFIER_LEGACY, -}; use reth_primitives_traits::{InMemorySize, TxType}; /// Wrapper type for [`op_alloy_consensus::OpTxType`] to implement [`TxType`] trait. @@ -145,16 +135,17 @@ impl reth_codecs::Compact for OpTxType { where B: bytes::BufMut + AsMut<[u8]>, { + use reth_codecs::txtype::*; match self.0 { AlloyOpTxType::Legacy => COMPACT_IDENTIFIER_LEGACY, AlloyOpTxType::Eip2930 => COMPACT_IDENTIFIER_EIP2930, AlloyOpTxType::Eip1559 => COMPACT_IDENTIFIER_EIP1559, AlloyOpTxType::Eip7702 => { - buf.put_u8(EIP7702_TX_TYPE_ID); + buf.put_u8(alloy_consensus::constants::EIP7702_TX_TYPE_ID); COMPACT_EXTENDED_IDENTIFIER_FLAG } AlloyOpTxType::Deposit => { - buf.put_u8(DEPOSIT_TX_TYPE_ID); + buf.put_u8(op_alloy_consensus::DEPOSIT_TX_TYPE_ID); COMPACT_EXTENDED_IDENTIFIER_FLAG } } @@ -164,14 +155,16 @@ impl reth_codecs::Compact for OpTxType { use bytes::Buf; ( match identifier { - COMPACT_IDENTIFIER_LEGACY => Self(AlloyOpTxType::Legacy), - COMPACT_IDENTIFIER_EIP2930 => Self(AlloyOpTxType::Eip2930), - COMPACT_IDENTIFIER_EIP1559 => Self(AlloyOpTxType::Eip1559), - COMPACT_EXTENDED_IDENTIFIER_FLAG => { + reth_codecs::txtype::COMPACT_IDENTIFIER_LEGACY => Self(AlloyOpTxType::Legacy), + reth_codecs::txtype::COMPACT_IDENTIFIER_EIP2930 => Self(AlloyOpTxType::Eip2930), + reth_codecs::txtype::COMPACT_IDENTIFIER_EIP1559 => Self(AlloyOpTxType::Eip1559), + reth_codecs::txtype::COMPACT_EXTENDED_IDENTIFIER_FLAG => { let extended_identifier = buf.get_u8(); match extended_identifier { - EIP7702_TX_TYPE_ID => Self(AlloyOpTxType::Eip7702), - DEPOSIT_TX_TYPE_ID => Self(AlloyOpTxType::Deposit), + alloy_consensus::constants::EIP7702_TX_TYPE_ID => { + Self(AlloyOpTxType::Eip7702) + } + op_alloy_consensus::DEPOSIT_TX_TYPE_ID => Self(AlloyOpTxType::Deposit), _ => panic!("Unsupported OpTxType identifier: {extended_identifier}"), } } @@ -185,8 +178,10 @@ impl reth_codecs::Compact for OpTxType { #[cfg(test)] mod tests { use super::*; + use alloy_consensus::constants::EIP7702_TX_TYPE_ID; use bytes::BytesMut; - use reth_codecs::Compact; + use op_alloy_consensus::DEPOSIT_TX_TYPE_ID; + use reth_codecs::{txtype::*, Compact}; use rstest::rstest; #[test] diff --git a/crates/primitives/src/receipt.rs b/crates/primitives/src/receipt.rs index 93c0af1d9714..77a44dc39e53 100644 --- a/crates/primitives/src/receipt.rs +++ b/crates/primitives/src/receipt.rs @@ -393,7 +393,7 @@ impl Decodable for ReceiptWithBloom { Self::decode_receipt(buf, TxType::Eip7702) } #[cfg(feature = "optimism")] - crate::transaction::DEPOSIT_TX_TYPE_ID => { + op_alloy_consensus::DEPOSIT_TX_TYPE_ID => { buf.advance(1); Self::decode_receipt(buf, TxType::Deposit) } @@ -529,7 +529,7 @@ impl ReceiptWithBloomEncoder<'_> { } #[cfg(feature = "optimism")] TxType::Deposit => { - out.put_u8(crate::transaction::DEPOSIT_TX_TYPE_ID); + out.put_u8(op_alloy_consensus::DEPOSIT_TX_TYPE_ID); } } out.put_slice(payload.as_ref()); diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 41522744a2ff..d50aea14c46f 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -57,17 +57,9 @@ pub mod signature; pub(crate) mod util; mod variant; +use alloc::vec::Vec; #[cfg(feature = "optimism")] use op_alloy_consensus::TxDeposit; -#[cfg(feature = "optimism")] -pub use tx_type::DEPOSIT_TX_TYPE_ID; -#[cfg(any(test, feature = "reth-codec"))] -pub use tx_type::{ - COMPACT_EXTENDED_IDENTIFIER_FLAG, COMPACT_IDENTIFIER_EIP1559, COMPACT_IDENTIFIER_EIP2930, - COMPACT_IDENTIFIER_LEGACY, -}; - -use alloc::vec::Vec; use reth_primitives_traits::{transaction::TransactionExt, SignedTransaction}; use revm_primitives::{AuthorizationList, TxEnv}; @@ -594,19 +586,19 @@ impl reth_codecs::Compact for Transaction { use bytes::Buf; match identifier { - COMPACT_IDENTIFIER_LEGACY => { + reth_codecs::txtype::COMPACT_IDENTIFIER_LEGACY => { let (tx, buf) = TxLegacy::from_compact(buf, buf.len()); (Self::Legacy(tx), buf) } - COMPACT_IDENTIFIER_EIP2930 => { + reth_codecs::txtype::COMPACT_IDENTIFIER_EIP2930 => { let (tx, buf) = TxEip2930::from_compact(buf, buf.len()); (Self::Eip2930(tx), buf) } - COMPACT_IDENTIFIER_EIP1559 => { + reth_codecs::txtype::COMPACT_IDENTIFIER_EIP1559 => { let (tx, buf) = TxEip1559::from_compact(buf, buf.len()); (Self::Eip1559(tx), buf) } - COMPACT_EXTENDED_IDENTIFIER_FLAG => { + reth_codecs::txtype::COMPACT_EXTENDED_IDENTIFIER_FLAG => { // An identifier of 3 indicates that the transaction type did not fit into // the backwards compatible 2 bit identifier, their transaction types are // larger than 2 bits (eg. 4844 and Deposit Transactions). In this case, @@ -623,7 +615,7 @@ impl reth_codecs::Compact for Transaction { (Self::Eip7702(tx), buf) } #[cfg(feature = "optimism")] - DEPOSIT_TX_TYPE_ID => { + op_alloy_consensus::DEPOSIT_TX_TYPE_ID => { let (tx, buf) = TxDeposit::from_compact(buf, buf.len()); (Self::Deposit(tx), buf) } diff --git a/crates/primitives/src/transaction/tx_type.rs b/crates/primitives/src/transaction/tx_type.rs index caa6d8728541..597487564dfc 100644 --- a/crates/primitives/src/transaction/tx_type.rs +++ b/crates/primitives/src/transaction/tx_type.rs @@ -8,28 +8,6 @@ use derive_more::Display; use reth_primitives_traits::InMemorySize; use serde::{Deserialize, Serialize}; -/// Identifier parameter for legacy transaction -#[cfg(any(test, feature = "reth-codec"))] -pub const COMPACT_IDENTIFIER_LEGACY: usize = 0; - -/// Identifier parameter for EIP-2930 transaction -#[cfg(any(test, feature = "reth-codec"))] -pub const COMPACT_IDENTIFIER_EIP2930: usize = 1; - -/// Identifier parameter for EIP-1559 transaction -#[cfg(any(test, feature = "reth-codec"))] -pub const COMPACT_IDENTIFIER_EIP1559: usize = 2; - -/// For backwards compatibility purposes only 2 bits of the type are encoded in the identifier -/// parameter. In the case of a [`COMPACT_EXTENDED_IDENTIFIER_FLAG`], the full transaction type is -/// read from the buffer as a single byte. -#[cfg(any(test, feature = "reth-codec"))] -pub const COMPACT_EXTENDED_IDENTIFIER_FLAG: usize = 3; - -/// Identifier for [`TxDeposit`](op_alloy_consensus::TxDeposit) transaction. -#[cfg(feature = "optimism")] -pub const DEPOSIT_TX_TYPE_ID: u8 = 126; - /// Transaction Type /// /// Currently being used as 2-bit type when encoding it to `reth_codecs::Compact` on @@ -136,7 +114,7 @@ impl From for u8 { TxType::Eip4844 => EIP4844_TX_TYPE_ID, TxType::Eip7702 => EIP7702_TX_TYPE_ID, #[cfg(feature = "optimism")] - TxType::Deposit => DEPOSIT_TX_TYPE_ID, + TxType::Deposit => op_alloy_consensus::DEPOSIT_TX_TYPE_ID, } } } @@ -195,6 +173,8 @@ impl reth_codecs::Compact for TxType { where B: bytes::BufMut + AsMut<[u8]>, { + use reth_codecs::txtype::*; + match self { Self::Legacy => COMPACT_IDENTIFIER_LEGACY, Self::Eip2930 => COMPACT_IDENTIFIER_EIP2930, @@ -209,7 +189,7 @@ impl reth_codecs::Compact for TxType { } #[cfg(feature = "optimism")] Self::Deposit => { - buf.put_u8(DEPOSIT_TX_TYPE_ID); + buf.put_u8(op_alloy_consensus::DEPOSIT_TX_TYPE_ID); COMPACT_EXTENDED_IDENTIFIER_FLAG } } @@ -222,16 +202,16 @@ impl reth_codecs::Compact for TxType { use bytes::Buf; ( match identifier { - COMPACT_IDENTIFIER_LEGACY => Self::Legacy, - COMPACT_IDENTIFIER_EIP2930 => Self::Eip2930, - COMPACT_IDENTIFIER_EIP1559 => Self::Eip1559, - COMPACT_EXTENDED_IDENTIFIER_FLAG => { + reth_codecs::txtype::COMPACT_IDENTIFIER_LEGACY => Self::Legacy, + reth_codecs::txtype::COMPACT_IDENTIFIER_EIP2930 => Self::Eip2930, + reth_codecs::txtype::COMPACT_IDENTIFIER_EIP1559 => Self::Eip1559, + reth_codecs::txtype::COMPACT_EXTENDED_IDENTIFIER_FLAG => { let extended_identifier = buf.get_u8(); match extended_identifier { EIP4844_TX_TYPE_ID => Self::Eip4844, EIP7702_TX_TYPE_ID => Self::Eip7702, #[cfg(feature = "optimism")] - DEPOSIT_TX_TYPE_ID => Self::Deposit, + op_alloy_consensus::DEPOSIT_TX_TYPE_ID => Self::Deposit, _ => panic!("Unsupported TxType identifier: {extended_identifier}"), } } @@ -274,19 +254,21 @@ impl Decodable for TxType { #[cfg(test)] mod tests { + use super::*; use alloy_primitives::hex; - use reth_codecs::Compact; + use reth_codecs::{txtype::*, Compact}; use rstest::rstest; - use super::*; - #[rstest] #[case(U64::from(LEGACY_TX_TYPE_ID), Ok(TxType::Legacy))] #[case(U64::from(EIP2930_TX_TYPE_ID), Ok(TxType::Eip2930))] #[case(U64::from(EIP1559_TX_TYPE_ID), Ok(TxType::Eip1559))] #[case(U64::from(EIP4844_TX_TYPE_ID), Ok(TxType::Eip4844))] #[case(U64::from(EIP7702_TX_TYPE_ID), Ok(TxType::Eip7702))] - #[cfg_attr(feature = "optimism", case(U64::from(DEPOSIT_TX_TYPE_ID), Ok(TxType::Deposit)))] + #[cfg_attr( + feature = "optimism", + case(U64::from(op_alloy_consensus::DEPOSIT_TX_TYPE_ID), Ok(TxType::Deposit)) + )] #[case(U64::MAX, Err("invalid tx type"))] fn test_u64_to_tx_type(#[case] input: U64, #[case] expected: Result) { let tx_type_result = TxType::try_from(input); @@ -299,7 +281,7 @@ mod tests { #[case(TxType::Eip1559, COMPACT_IDENTIFIER_EIP1559, vec![])] #[case(TxType::Eip4844, COMPACT_EXTENDED_IDENTIFIER_FLAG, vec![EIP4844_TX_TYPE_ID])] #[case(TxType::Eip7702, COMPACT_EXTENDED_IDENTIFIER_FLAG, vec![EIP7702_TX_TYPE_ID])] - #[cfg_attr(feature = "optimism", case(TxType::Deposit, COMPACT_EXTENDED_IDENTIFIER_FLAG, vec![DEPOSIT_TX_TYPE_ID]))] + #[cfg_attr(feature = "optimism", case(TxType::Deposit, COMPACT_EXTENDED_IDENTIFIER_FLAG, vec![op_alloy_consensus::DEPOSIT_TX_TYPE_ID]))] fn test_txtype_to_compact( #[case] tx_type: TxType, #[case] expected_identifier: usize, @@ -318,7 +300,7 @@ mod tests { #[case(TxType::Eip1559, COMPACT_IDENTIFIER_EIP1559, vec![])] #[case(TxType::Eip4844, COMPACT_EXTENDED_IDENTIFIER_FLAG, vec![EIP4844_TX_TYPE_ID])] #[case(TxType::Eip7702, COMPACT_EXTENDED_IDENTIFIER_FLAG, vec![EIP7702_TX_TYPE_ID])] - #[cfg_attr(feature = "optimism", case(TxType::Deposit, COMPACT_EXTENDED_IDENTIFIER_FLAG, vec![DEPOSIT_TX_TYPE_ID]))] + #[cfg_attr(feature = "optimism", case(TxType::Deposit, COMPACT_EXTENDED_IDENTIFIER_FLAG, vec![op_alloy_consensus::DEPOSIT_TX_TYPE_ID]))] fn test_txtype_from_compact( #[case] expected_type: TxType, #[case] identifier: usize, @@ -337,7 +319,7 @@ mod tests { #[case(&[EIP4844_TX_TYPE_ID], Ok(TxType::Eip4844))] #[case(&[EIP7702_TX_TYPE_ID], Ok(TxType::Eip7702))] #[case(&[u8::MAX], Err(alloy_rlp::Error::InputTooShort))] - #[cfg_attr(feature = "optimism", case(&[DEPOSIT_TX_TYPE_ID], Ok(TxType::Deposit)))] + #[cfg_attr(feature = "optimism", case(&[op_alloy_consensus::DEPOSIT_TX_TYPE_ID], Ok(TxType::Deposit)))] fn decode_tx_type(#[case] input: &[u8], #[case] expected: Result) { let tx_type_result = TxType::decode(&mut &input[..]); assert_eq!(tx_type_result, expected) diff --git a/crates/storage/codecs/src/lib.rs b/crates/storage/codecs/src/lib.rs index 284c6454f838..86d397ad24f0 100644 --- a/crates/storage/codecs/src/lib.rs +++ b/crates/storage/codecs/src/lib.rs @@ -17,13 +17,14 @@ #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] #![cfg_attr(not(feature = "std"), no_std)] +extern crate alloc; + pub use reth_codecs_derive::*; use serde as _; use alloy_primitives::{Address, Bloom, Bytes, FixedBytes, U256}; use bytes::{Buf, BufMut}; -extern crate alloc; use alloc::vec::Vec; #[cfg(feature = "test-utils")] @@ -33,6 +34,8 @@ pub mod alloy; #[cfg(any(test, feature = "alloy"))] mod alloy; +pub mod txtype; + #[cfg(any(test, feature = "test-utils"))] pub mod test_utils; diff --git a/crates/storage/codecs/src/txtype.rs b/crates/storage/codecs/src/txtype.rs new file mode 100644 index 000000000000..ce392b59cd08 --- /dev/null +++ b/crates/storage/codecs/src/txtype.rs @@ -0,0 +1,15 @@ +//! Commonly used constants for transaction types. + +/// Identifier parameter for legacy transaction +pub const COMPACT_IDENTIFIER_LEGACY: usize = 0; + +/// Identifier parameter for EIP-2930 transaction +pub const COMPACT_IDENTIFIER_EIP2930: usize = 1; + +/// Identifier parameter for EIP-1559 transaction +pub const COMPACT_IDENTIFIER_EIP1559: usize = 2; + +/// For backwards compatibility purposes only 2 bits of the type are encoded in the identifier +/// parameter. In the case of a [`COMPACT_EXTENDED_IDENTIFIER_FLAG`], the full transaction type is +/// read from the buffer as a single byte. +pub const COMPACT_EXTENDED_IDENTIFIER_FLAG: usize = 3; From f9b764f6e80661f283f81d69d32e0429392c5263 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 20 Nov 2024 13:28:47 +0100 Subject: [PATCH 063/156] chore: misc lint suggestion (#12706) --- crates/net/network/src/flattened_response.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/crates/net/network/src/flattened_response.rs b/crates/net/network/src/flattened_response.rs index df2a9db78ae4..61dae9c7c724 100644 --- a/crates/net/network/src/flattened_response.rs +++ b/crates/net/network/src/flattened_response.rs @@ -23,10 +23,7 @@ where fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { let this = self.project(); - this.receiver.poll(cx).map(|r| match r { - Ok(r) => r, - Err(err) => Err(err.into()), - }) + this.receiver.poll(cx).map(|r| r.unwrap_or_else(|err| Err(err.into()))) } } From e3702cfc87449294da061f02a571a23061b8ab50 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 20 Nov 2024 13:28:58 +0100 Subject: [PATCH 064/156] chore: move tracing futures to workspace (#12707) --- Cargo.toml | 1 + crates/rpc/rpc/Cargo.toml | 2 +- crates/tasks/Cargo.toml | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index f2565a1c92ff..e4ca1b7bc283 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -555,6 +555,7 @@ hyper = "1.3" hyper-util = "0.1.5" pin-project = "1.0.12" reqwest = { version = "0.12", default-features = false } +tracing-futures = "0.2" tower = "0.4" tower-http = "0.6" diff --git a/crates/rpc/rpc/Cargo.toml b/crates/rpc/rpc/Cargo.toml index 5418cd1eb3a7..804ecd11120e 100644 --- a/crates/rpc/rpc/Cargo.toml +++ b/crates/rpc/rpc/Cargo.toml @@ -82,7 +82,7 @@ parking_lot.workspace = true # misc tracing.workspace = true -tracing-futures = "0.2" +tracing-futures.workspace = true futures.workspace = true rand.workspace = true serde.workspace = true diff --git a/crates/tasks/Cargo.toml b/crates/tasks/Cargo.toml index 82c80c0932b8..68d8e9589791 100644 --- a/crates/tasks/Cargo.toml +++ b/crates/tasks/Cargo.toml @@ -15,7 +15,7 @@ workspace = true # async tokio = { workspace = true, features = ["sync", "rt"] } -tracing-futures = "0.2" +tracing-futures.workspace = true futures-util.workspace = true # metrics From 0d4b1e73d43bbdf901673c63ba2a1e4194e89bfe Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Wed, 20 Nov 2024 15:19:54 +0100 Subject: [PATCH 065/156] test(tx-pool): add more unit tests for tx-pool best (#12691) --- crates/transaction-pool/src/pool/best.rs | 124 ++++++++++++++++++++++- 1 file changed, 123 insertions(+), 1 deletion(-) diff --git a/crates/transaction-pool/src/pool/best.rs b/crates/transaction-pool/src/pool/best.rs index 7c2e5a025b7f..171faccf7c2a 100644 --- a/crates/transaction-pool/src/pool/best.rs +++ b/crates/transaction-pool/src/pool/best.rs @@ -401,7 +401,7 @@ mod tests { use crate::{ pool::pending::PendingPool, test_utils::{MockOrdering, MockTransaction, MockTransactionFactory}, - Priority, + BestTransactions, Priority, }; use alloy_primitives::U256; use reth_payload_util::{PayloadTransactionsChain, PayloadTransactionsFixed}; @@ -897,5 +897,127 @@ mod tests { assert_eq!(block.next(()).unwrap().signer(), address_regular); } + #[test] + fn test_best_with_fees_iter_no_blob_fee_required() { + // Tests transactions without blob fees where base fees are checked. + let mut pool = PendingPool::new(MockOrdering::default()); + let mut f = MockTransactionFactory::default(); + + let base_fee: u64 = 10; + let base_fee_per_blob_gas: u64 = 0; // No blob fee requirement + + // Insert transactions with max_fee_per_gas above the base fee + for nonce in 0..5 { + let tx = MockTransaction::eip1559() + .rng_hash() + .with_nonce(nonce) + .with_max_fee(base_fee as u128 + 5); + let valid_tx = f.validated(tx); + pool.add_transaction(Arc::new(valid_tx), 0); + } + + let mut best = pool.best_with_basefee_and_blobfee(base_fee, base_fee_per_blob_gas); + + // All transactions should be returned as no blob fee requirement is imposed + for nonce in 0..5 { + let tx = best.next().expect("Transaction should be returned"); + assert_eq!(tx.nonce(), nonce); + } + + // Ensure no more transactions are left + assert!(best.next().is_none()); + } + + #[test] + fn test_best_with_fees_iter_mix_of_blob_and_non_blob_transactions() { + // Tests mixed scenarios with both blob and non-blob transactions. + let mut pool = PendingPool::new(MockOrdering::default()); + let mut f = MockTransactionFactory::default(); + + let base_fee: u64 = 10; + let base_fee_per_blob_gas: u64 = 15; + + // Add a non-blob transaction that satisfies the base fee + let tx_non_blob = + MockTransaction::eip1559().rng_hash().with_nonce(0).with_max_fee(base_fee as u128 + 5); + pool.add_transaction(Arc::new(f.validated(tx_non_blob.clone())), 0); + + // Add a blob transaction that satisfies both base fee and blob fee + let tx_blob = MockTransaction::eip4844() + .rng_hash() + .with_nonce(1) + .with_max_fee(base_fee as u128 + 5) + .with_blob_fee(base_fee_per_blob_gas as u128 + 5); + pool.add_transaction(Arc::new(f.validated(tx_blob.clone())), 0); + + let mut best = pool.best_with_basefee_and_blobfee(base_fee, base_fee_per_blob_gas); + + // Verify both transactions are returned + let tx = best.next().expect("Transaction should be returned"); + assert_eq!(tx.transaction, tx_non_blob); + + let tx = best.next().expect("Transaction should be returned"); + assert_eq!(tx.transaction, tx_blob); + + // Ensure no more transactions are left + assert!(best.next().is_none()); + } + + #[test] + fn test_best_transactions_with_skipping_blobs() { + // Tests the skip_blobs functionality to ensure blob transactions are skipped. + let mut pool = PendingPool::new(MockOrdering::default()); + let mut f = MockTransactionFactory::default(); + + // Add a blob transaction + let tx_blob = MockTransaction::eip4844().rng_hash().with_nonce(0).with_blob_fee(100); + let valid_blob_tx = f.validated(tx_blob); + pool.add_transaction(Arc::new(valid_blob_tx), 0); + + // Add a non-blob transaction + let tx_non_blob = MockTransaction::eip1559().rng_hash().with_nonce(1).with_max_fee(200); + let valid_non_blob_tx = f.validated(tx_non_blob.clone()); + pool.add_transaction(Arc::new(valid_non_blob_tx), 0); + + let mut best = pool.best(); + best.skip_blobs(); + + // Only the non-blob transaction should be returned + let tx = best.next().expect("Transaction should be returned"); + assert_eq!(tx.transaction, tx_non_blob); + + // Ensure no more transactions are left + assert!(best.next().is_none()); + } + + #[test] + fn test_best_transactions_no_updates() { + // Tests the no_updates functionality to ensure it properly clears the + // new_transaction_receiver. + let mut pool = PendingPool::new(MockOrdering::default()); + let mut f = MockTransactionFactory::default(); + + // Add a transaction + let tx = MockTransaction::eip1559().rng_hash().with_nonce(0).with_max_fee(100); + let valid_tx = f.validated(tx); + pool.add_transaction(Arc::new(valid_tx), 0); + + let mut best = pool.best(); + + // Use a broadcast channel for transaction updates + let (_tx_sender, tx_receiver) = + tokio::sync::broadcast::channel::>(1000); + best.new_transaction_receiver = Some(tx_receiver); + + // Ensure receiver is set + assert!(best.new_transaction_receiver.is_some()); + + // Call no_updates to clear the receiver + best.no_updates(); + + // Ensure receiver is cleared + assert!(best.new_transaction_receiver.is_none()); + } + // TODO: Same nonce test } From ce0bcee416088251436a8e273563296dcf89863b Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 20 Nov 2024 15:25:16 +0100 Subject: [PATCH 066/156] chore: move tx builder fns (#12709) --- crates/net/network/src/transactions/mod.rs | 66 +++++++++++----------- 1 file changed, 34 insertions(+), 32 deletions(-) diff --git a/crates/net/network/src/transactions/mod.rs b/crates/net/network/src/transactions/mod.rs index b499f0ac422c..d72b657bffcb 100644 --- a/crates/net/network/src/transactions/mod.rs +++ b/crates/net/network/src/transactions/mod.rs @@ -1480,7 +1480,7 @@ enum PropagateTransactionsBuilder { Full(FullTransactionsBuilder), } -impl PropagateTransactionsBuilder { +impl PropagateTransactionsBuilder { /// Create a builder for pooled transactions fn pooled(version: EthVersion) -> Self { Self::Pooled(PooledTransactionsHashesBuilder::new(version)) @@ -1491,21 +1491,6 @@ impl PropagateTransactionsBuilder { Self::Full(FullTransactionsBuilder::new(version)) } - /// Appends all transactions - fn extend<'a>(&mut self, txs: impl IntoIterator) { - for tx in txs { - self.push(tx); - } - } - - /// Appends a transaction to the list. - fn push(&mut self, transaction: &PropagateTransaction) { - match self { - Self::Pooled(builder) => builder.push(transaction), - Self::Full(builder) => builder.push(transaction), - } - } - /// Returns true if no transactions are recorded. fn is_empty(&self) -> bool { match self { @@ -1515,7 +1500,7 @@ impl PropagateTransactionsBuilder { } /// Consumes the type and returns the built messages that should be sent to the peer. - fn build(self) -> PropagateTransactions { + fn build(self) -> PropagateTransactions { match self { Self::Pooled(pooled) => { PropagateTransactions { pooled: Some(pooled.build()), full: None } @@ -1525,6 +1510,23 @@ impl PropagateTransactionsBuilder { } } +impl PropagateTransactionsBuilder { + /// Appends all transactions + fn extend<'a>(&mut self, txs: impl IntoIterator) { + for tx in txs { + self.push(tx); + } + } + + /// Appends a transaction to the list. + fn push(&mut self, transaction: &PropagateTransaction) { + match self { + Self::Pooled(builder) => builder.push(transaction), + Self::Full(builder) => builder.push(transaction), + } + } +} + /// Represents how the transactions should be sent to a peer if any. struct PropagateTransactions { /// The pooled transaction hashes to send. @@ -1547,9 +1549,7 @@ struct FullTransactionsBuilder { pooled: PooledTransactionsHashesBuilder, } -// === impl FullTransactionsBuilder === - -impl FullTransactionsBuilder { +impl FullTransactionsBuilder { /// Create a builder for the negotiated version of the peer's session fn new(version: EthVersion) -> Self { Self { @@ -1559,6 +1559,20 @@ impl FullTransactionsBuilder { } } + /// Returns whether or not any transactions are in the [`FullTransactionsBuilder`]. + fn is_empty(&self) -> bool { + self.transactions.is_empty() && self.pooled.is_empty() + } + + /// Returns the messages that should be propagated to the peer. + fn build(self) -> PropagateTransactions { + let pooled = Some(self.pooled.build()).filter(|pooled| !pooled.is_empty()); + let full = Some(self.transactions).filter(|full| !full.is_empty()); + PropagateTransactions { pooled, full } + } +} + +impl FullTransactionsBuilder { /// Appends all transactions. fn extend(&mut self, txs: impl IntoIterator) { for tx in txs { @@ -1600,18 +1614,6 @@ impl FullTransactionsBuilder { self.total_size = new_size; self.transactions.push(Arc::clone(&transaction.transaction)); } - - /// Returns whether or not any transactions are in the [`FullTransactionsBuilder`]. - fn is_empty(&self) -> bool { - self.transactions.is_empty() && self.pooled.is_empty() - } - - /// Returns the messages that should be propagated to the peer. - fn build(self) -> PropagateTransactions { - let pooled = Some(self.pooled.build()).filter(|pooled| !pooled.is_empty()); - let full = Some(self.transactions).filter(|full| !full.is_empty()); - PropagateTransactions { pooled, full } - } } /// A helper type to create the pooled transactions message based on the negotiated version of the From b3b083fb82df484db030f2b14f46d640ec57bf9c Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 20 Nov 2024 15:33:56 +0100 Subject: [PATCH 067/156] chore: add generics to broadcast (#12714) --- crates/net/eth-wire-types/src/broadcast.rs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/crates/net/eth-wire-types/src/broadcast.rs b/crates/net/eth-wire-types/src/broadcast.rs index f37c5e74a046..7d74085d355a 100644 --- a/crates/net/eth-wire-types/src/broadcast.rs +++ b/crates/net/eth-wire-types/src/broadcast.rs @@ -90,9 +90,9 @@ generate_tests!(#[rlp, 25] NewBlock, EthNewBlockTests); #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(any(test, feature = "arbitrary"), derive(arbitrary::Arbitrary))] #[add_arbitrary_tests(rlp, 10)] -pub struct Transactions( +pub struct Transactions( /// New transactions for the peer to include in its mempool. - pub Vec, + pub Vec, ); impl Transactions { @@ -102,14 +102,14 @@ impl Transactions { } } -impl From> for Transactions { - fn from(txs: Vec) -> Self { +impl From> for Transactions { + fn from(txs: Vec) -> Self { Self(txs) } } -impl From for Vec { - fn from(txs: Transactions) -> Self { +impl From> for Vec { + fn from(txs: Transactions) -> Self { txs.0 } } @@ -121,9 +121,9 @@ impl From for Vec { #[derive(Clone, Debug, PartialEq, Eq, RlpEncodableWrapper, RlpDecodableWrapper)] #[cfg_attr(any(test, feature = "arbitrary"), derive(arbitrary::Arbitrary))] #[add_arbitrary_tests(rlp, 20)] -pub struct SharedTransactions( +pub struct SharedTransactions( /// New transactions for the peer to include in its mempool. - pub Vec>, + pub Vec>, ); /// A wrapper type for all different new pooled transaction types From 68abcb1fe9a47e8d42f30f433e6deb7c671698cf Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 20 Nov 2024 15:39:19 +0100 Subject: [PATCH 068/156] chore: rm unused file (#12713) --- crates/net/eth-wire-types/src/message.rs | 1 - crates/net/eth-wire-types/src/primitives.rs | 3 +-- crates/net/eth-wire-types/src/response.rs | 29 --------------------- 3 files changed, 1 insertion(+), 32 deletions(-) delete mode 100644 crates/net/eth-wire-types/src/response.rs diff --git a/crates/net/eth-wire-types/src/message.rs b/crates/net/eth-wire-types/src/message.rs index f83e21124e34..93d42fb3ea02 100644 --- a/crates/net/eth-wire-types/src/message.rs +++ b/crates/net/eth-wire-types/src/message.rs @@ -12,7 +12,6 @@ use super::{ NewPooledTransactionHashes68, NodeData, PooledTransactions, Receipts, Status, Transactions, }; use crate::{EthNetworkPrimitives, EthVersion, NetworkPrimitives, SharedTransactions}; - use alloy_primitives::bytes::{Buf, BufMut}; use alloy_rlp::{length_of_length, Decodable, Encodable, Header}; use std::{fmt::Debug, sync::Arc}; diff --git a/crates/net/eth-wire-types/src/primitives.rs b/crates/net/eth-wire-types/src/primitives.rs index c8b62cb0a82d..ff7ab1c801bd 100644 --- a/crates/net/eth-wire-types/src/primitives.rs +++ b/crates/net/eth-wire-types/src/primitives.rs @@ -1,9 +1,8 @@ //! Abstraction over primitive types in network messages. -use std::fmt::Debug; - use alloy_rlp::{Decodable, Encodable}; use reth_primitives_traits::{Block, BlockHeader}; +use std::fmt::Debug; /// Abstraction over primitive types which might appear in network messages. See /// [`crate::EthMessage`] for more context. diff --git a/crates/net/eth-wire-types/src/response.rs b/crates/net/eth-wire-types/src/response.rs deleted file mode 100644 index dfcf5ed56a8c..000000000000 --- a/crates/net/eth-wire-types/src/response.rs +++ /dev/null @@ -1,29 +0,0 @@ -use crate::{ - BlockBodies, BlockHeaders, NodeData, PooledTransactions, Receipts, RequestPair, Status, -}; - -// This type is analogous to the `zebra_network::Response` type. -/// An ethereum network response for version 66. -#[derive(Clone, Debug, Eq, PartialEq)] -pub enum Response { - /// The request does not have a response. - Nil, - - /// The [`Status`](super::Status) message response in the eth protocol handshake. - Status(Status), - - /// The response to a [`Request::GetBlockHeaders`](super::Request::GetBlockHeaders) request. - BlockHeaders(RequestPair), - - /// The response to a [`Request::GetBlockBodies`](super::Request::GetBlockBodies) request. - BlockBodies(RequestPair), - - /// The response to a [`Request::GetPooledTransactions`](super::Request::GetPooledTransactions) request. - PooledTransactions(RequestPair), - - /// The response to a [`Request::GetNodeData`](super::Request::GetNodeData) request. - NodeData(RequestPair), - - /// The response to a [`Request::GetReceipts`](super::Request::GetReceipts) request. - Receipts(RequestPair), -} From 04729f3c6655426f23e6ae8edd2abc1d2ca84e74 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 20 Nov 2024 15:39:32 +0100 Subject: [PATCH 069/156] chore: introduce network primitives to transactions handle (#12711) --- crates/net/network/src/transactions/mod.rs | 63 ++++++++++++---------- 1 file changed, 34 insertions(+), 29 deletions(-) diff --git a/crates/net/network/src/transactions/mod.rs b/crates/net/network/src/transactions/mod.rs index d72b657bffcb..5bdf200e20ff 100644 --- a/crates/net/network/src/transactions/mod.rs +++ b/crates/net/network/src/transactions/mod.rs @@ -82,42 +82,26 @@ pub type PoolImportFuture = Pin>> /// For example [`TransactionsHandle::get_peer_transaction_hashes`] returns the transaction hashes /// known by a specific peer. #[derive(Debug, Clone)] -pub struct TransactionsHandle { +pub struct TransactionsHandle { /// Command channel to the [`TransactionsManager`] - manager_tx: mpsc::UnboundedSender, + manager_tx: mpsc::UnboundedSender>, } /// Implementation of the `TransactionsHandle` API for use in testnet via type /// [`PeerHandle`](crate::test_utils::PeerHandle). -impl TransactionsHandle { - fn send(&self, cmd: TransactionsCommand) { +impl TransactionsHandle { + fn send(&self, cmd: TransactionsCommand) { let _ = self.manager_tx.send(cmd); } /// Fetch the [`PeerRequestSender`] for the given peer. - async fn peer_handle(&self, peer_id: PeerId) -> Result, RecvError> { - let (tx, rx) = oneshot::channel(); - self.send(TransactionsCommand::GetPeerSender { peer_id, peer_request_sender: tx }); - rx.await - } - - /// Requests the transactions directly from the given peer. - /// - /// Returns `None` if the peer is not connected. - /// - /// **Note**: this returns the response from the peer as received. - pub async fn get_pooled_transactions_from( + async fn peer_handle( &self, peer_id: PeerId, - hashes: Vec, - ) -> Result>, RequestError> { - let Some(peer) = self.peer_handle(peer_id).await? else { return Ok(None) }; - + ) -> Result>>, RecvError> { let (tx, rx) = oneshot::channel(); - let request = PeerRequest::GetPooledTransactions { request: hashes.into(), response: tx }; - peer.try_send(request).ok(); - - rx.await?.map(|res| Some(res.0)) + self.send(TransactionsCommand::GetPeerSender { peer_id, peer_request_sender: tx }); + rx.await } /// Manually propagate the transaction that belongs to the hash. @@ -179,6 +163,27 @@ impl TransactionsHandle { } } +impl TransactionsHandle { + /// Requests the transactions directly from the given peer. + /// + /// Returns `None` if the peer is not connected. + /// + /// **Note**: this returns the response from the peer as received. + pub async fn get_pooled_transactions_from( + &self, + peer_id: PeerId, + hashes: Vec, + ) -> Result>, RequestError> { + let Some(peer) = self.peer_handle(peer_id).await? else { return Ok(None) }; + + let (tx, rx) = oneshot::channel(); + let request = PeerRequest::GetPooledTransactions { request: hashes.into(), response: tx }; + peer.try_send(request).ok(); + + rx.await?.map(|res| Some(res.0)) + } +} + /// Manages transactions on top of the p2p network. /// /// This can be spawned to another task and is supposed to be run as background service. @@ -235,12 +240,12 @@ pub struct TransactionsManager, + command_tx: mpsc::UnboundedSender>, /// Incoming commands from [`TransactionsHandle`]. /// /// This will only receive commands if a user manually sends a command to the manager through /// the [`TransactionsHandle`] to interact with this type directly. - command_rx: UnboundedReceiverStream, + command_rx: UnboundedReceiverStream>, /// A stream that yields new __pending__ transactions. /// /// A transaction is considered __pending__ if it is executable on the current state of the @@ -312,7 +317,7 @@ impl TransactionsManager { impl TransactionsManager { /// Returns a new handle that can send commands to this type. - pub fn handle(&self) -> TransactionsHandle { + pub fn handle(&self) -> TransactionsHandle { TransactionsHandle { manager_tx: self.command_tx.clone() } } @@ -1732,7 +1737,7 @@ impl PeerMetadata { /// Commands to send to the [`TransactionsManager`] #[derive(Debug)] -enum TransactionsCommand { +enum TransactionsCommand { /// Propagate a transaction hash to the network. PropagateHash(B256), /// Propagate transaction hashes to a specific peer. @@ -1751,7 +1756,7 @@ enum TransactionsCommand { /// Requests a clone of the sender sender channel to the peer. GetPeerSender { peer_id: PeerId, - peer_request_sender: oneshot::Sender>, + peer_request_sender: oneshot::Sender>>>, }, } From 8df9045fd8de4d7f9d71e4798dec2d302e964702 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 20 Nov 2024 17:02:02 +0100 Subject: [PATCH 070/156] fix: use correct timestamp for op receipt (#12716) --- crates/optimism/rpc/src/eth/receipt.rs | 26 +++++++++++++++++++------- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/crates/optimism/rpc/src/eth/receipt.rs b/crates/optimism/rpc/src/eth/receipt.rs index a801a408fd5d..2cc771d0e44b 100644 --- a/crates/optimism/rpc/src/eth/receipt.rs +++ b/crates/optimism/rpc/src/eth/receipt.rs @@ -54,10 +54,10 @@ where /// L1 fee and data gas for a non-deposit transaction, or deposit nonce and receipt version for a /// deposit transaction. -#[derive(Debug, Default, Clone)] +#[derive(Debug, Clone)] pub struct OpReceiptFieldsBuilder { /// Block timestamp. - pub l1_block_timestamp: u64, + pub block_timestamp: u64, /// The L1 fee for transaction. pub l1_fee: Option, /// L1 gas used by transaction. @@ -84,8 +84,19 @@ pub struct OpReceiptFieldsBuilder { impl OpReceiptFieldsBuilder { /// Returns a new builder. - pub fn new(block_timestamp: u64) -> Self { - Self { l1_block_timestamp: block_timestamp, ..Default::default() } + pub const fn new(block_timestamp: u64) -> Self { + Self { + block_timestamp, + l1_fee: None, + l1_data_gas: None, + l1_fee_scalar: None, + l1_base_fee: None, + deposit_nonce: None, + deposit_receipt_version: None, + l1_base_fee_scalar: None, + l1_blob_base_fee: None, + l1_blob_base_fee_scalar: None, + } } /// Applies [`L1BlockInfo`](revm::L1BlockInfo). @@ -96,7 +107,7 @@ impl OpReceiptFieldsBuilder { l1_block_info: revm::L1BlockInfo, ) -> Result { let raw_tx = tx.encoded_2718(); - let timestamp = self.l1_block_timestamp; + let timestamp = self.block_timestamp; self.l1_fee = Some( l1_block_info @@ -140,7 +151,7 @@ impl OpReceiptFieldsBuilder { /// Builds the [`OpTransactionReceiptFields`] object. pub const fn build(self) -> OpTransactionReceiptFields { let Self { - l1_block_timestamp: _, // used to compute other fields + block_timestamp: _, // used to compute other fields l1_fee, l1_data_gas: l1_gas_used, l1_fee_scalar, @@ -187,6 +198,7 @@ impl OpReceiptBuilder { all_receipts: &[Receipt], l1_block_info: revm::L1BlockInfo, ) -> Result { + let timestamp = meta.timestamp; let core_receipt = build_receipt(transaction, meta, receipt, all_receipts, |receipt_with_bloom| { match receipt.tx_type { @@ -211,7 +223,7 @@ impl OpReceiptBuilder { } })?; - let op_receipt_fields = OpReceiptFieldsBuilder::default() + let op_receipt_fields = OpReceiptFieldsBuilder::new(timestamp) .l1_block_info(chain_spec, transaction, l1_block_info)? .deposit_nonce(receipt.deposit_nonce) .deposit_version(receipt.deposit_receipt_version) From a0d7503eb1ffd467d333a05c4ff21ef5771323cf Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 20 Nov 2024 18:52:06 +0100 Subject: [PATCH 071/156] feat: use network primitives pooled transaction AT (#12718) --- crates/net/eth-wire-types/src/message.rs | 6 +++++- crates/net/network-api/src/events.rs | 2 +- crates/net/network/src/manager.rs | 6 +++--- crates/net/network/src/message.rs | 6 +++--- crates/net/network/src/network.rs | 4 ++-- crates/net/network/src/transactions/mod.rs | 10 ++++------ 6 files changed, 18 insertions(+), 16 deletions(-) diff --git a/crates/net/eth-wire-types/src/message.rs b/crates/net/eth-wire-types/src/message.rs index 93d42fb3ea02..3d34b8cae80c 100644 --- a/crates/net/eth-wire-types/src/message.rs +++ b/crates/net/eth-wire-types/src/message.rs @@ -207,7 +207,11 @@ pub enum EthMessage { /// Represents a `GetPooledTransactions` request-response pair. GetPooledTransactions(RequestPair), /// Represents a `PooledTransactions` request-response pair. - PooledTransactions(RequestPair), + #[cfg_attr( + feature = "serde", + serde(bound = "N::PooledTransaction: serde::Serialize + serde::de::DeserializeOwned") + )] + PooledTransactions(RequestPair>), /// Represents a `GetNodeData` request-response pair. GetNodeData(RequestPair), /// Represents a `NodeData` request-response pair. diff --git a/crates/net/network-api/src/events.rs b/crates/net/network-api/src/events.rs index af392b6f9ead..624c43f5e1ba 100644 --- a/crates/net/network-api/src/events.rs +++ b/crates/net/network-api/src/events.rs @@ -154,7 +154,7 @@ pub enum PeerRequest { /// The request for pooled transactions. request: GetPooledTransactions, /// The channel to send the response for pooled transactions. - response: oneshot::Sender>, + response: oneshot::Sender>>, }, /// Requests `NodeData` from the peer. /// diff --git a/crates/net/network/src/manager.rs b/crates/net/network/src/manager.rs index c9caa412274e..c1db91773e38 100644 --- a/crates/net/network/src/manager.rs +++ b/crates/net/network/src/manager.rs @@ -90,7 +90,7 @@ pub struct NetworkManager { event_sender: EventSender>>, /// Sender half to send events to the /// [`TransactionsManager`](crate::transactions::TransactionsManager) task, if configured. - to_transactions_manager: Option>, + to_transactions_manager: Option>>, /// Sender half to send events to the /// [`EthRequestHandler`](crate::eth_requests::EthRequestHandler) task, if configured. /// @@ -120,7 +120,7 @@ pub struct NetworkManager { impl NetworkManager { /// Sets the dedicated channel for events indented for the /// [`TransactionsManager`](crate::transactions::TransactionsManager). - pub fn set_transactions(&mut self, tx: mpsc::UnboundedSender) { + pub fn set_transactions(&mut self, tx: mpsc::UnboundedSender>) { self.to_transactions_manager = Some(UnboundedMeteredSender::new(tx, NETWORK_POOL_TRANSACTIONS_SCOPE)); } @@ -409,7 +409,7 @@ impl NetworkManager { /// Sends an event to the [`TransactionsManager`](crate::transactions::TransactionsManager) if /// configured. - fn notify_tx_manager(&self, event: NetworkTransactionEvent) { + fn notify_tx_manager(&self, event: NetworkTransactionEvent) { if let Some(ref tx) = self.to_transactions_manager { let _ = tx.send(event); } diff --git a/crates/net/network/src/message.rs b/crates/net/network/src/message.rs index 4821e2592922..c2511f4e16a8 100644 --- a/crates/net/network/src/message.rs +++ b/crates/net/network/src/message.rs @@ -14,7 +14,7 @@ use reth_eth_wire::{ }; use reth_network_api::PeerRequest; use reth_network_p2p::error::{RequestError, RequestResult}; -use reth_primitives::{PooledTransactionsElement, ReceiptWithBloom}; +use reth_primitives::ReceiptWithBloom; use std::{ sync::Arc, task::{ready, Context, Poll}, @@ -89,7 +89,7 @@ pub enum PeerResponse { /// Represents a response to a request for pooled transactions. PooledTransactions { /// The receiver channel for the response to a pooled transactions request. - response: oneshot::Receiver>, + response: oneshot::Receiver>>, }, /// Represents a response to a request for `NodeData`. NodeData { @@ -146,7 +146,7 @@ pub enum PeerResponseResult { /// Represents a result containing block bodies or an error. BlockBodies(RequestResult>), /// Represents a result containing pooled transactions or an error. - PooledTransactions(RequestResult>), + PooledTransactions(RequestResult>), /// Represents a result containing node data or an error. NodeData(RequestResult>), /// Represents a result containing receipts or an error. diff --git a/crates/net/network/src/network.rs b/crates/net/network/src/network.rs index 0af0cb1ad460..496b4250ffd5 100644 --- a/crates/net/network/src/network.rs +++ b/crates/net/network/src/network.rs @@ -140,7 +140,7 @@ impl NetworkHandle { /// Send message to get the [`TransactionsHandle`]. /// /// Returns `None` if no transaction task is installed. - pub async fn transactions_handle(&self) -> Option { + pub async fn transactions_handle(&self) -> Option> { let (tx, rx) = oneshot::channel(); let _ = self.manager().send(NetworkHandleMessage::GetTransactionsHandle(tx)); rx.await.unwrap() @@ -504,7 +504,7 @@ pub(crate) enum NetworkHandleMessage>), /// Retrieves the `TransactionsHandle` via a oneshot sender. - GetTransactionsHandle(oneshot::Sender>), + GetTransactionsHandle(oneshot::Sender>>), /// Initiates a graceful shutdown of the network via a oneshot sender. Shutdown(oneshot::Sender<()>), /// Sets the network state between hibernation and active. diff --git a/crates/net/network/src/transactions/mod.rs b/crates/net/network/src/transactions/mod.rs index 5bdf200e20ff..f7a6fb8805e6 100644 --- a/crates/net/network/src/transactions/mod.rs +++ b/crates/net/network/src/transactions/mod.rs @@ -161,9 +161,7 @@ impl TransactionsHandle { let res = self.get_transaction_hashes(vec![peer]).await?; Ok(res.into_values().next().unwrap_or_default()) } -} -impl TransactionsHandle { /// Requests the transactions directly from the given peer. /// /// Returns `None` if the peer is not connected. @@ -173,7 +171,7 @@ impl TransactionsHandle { &self, peer_id: PeerId, hashes: Vec, - ) -> Result>, RequestError> { + ) -> Result>, RequestError> { let Some(peer) = self.peer_handle(peer_id).await? else { return Ok(None) }; let (tx, rx) = oneshot::channel(); @@ -1762,7 +1760,7 @@ enum TransactionsCommand { /// All events related to transactions emitted by the network. #[derive(Debug)] -pub enum NetworkTransactionEvent { +pub enum NetworkTransactionEvent { /// Represents the event of receiving a list of transactions from a peer. /// /// This indicates transactions that were broadcasted to us from the peer. @@ -1786,10 +1784,10 @@ pub enum NetworkTransactionEvent { /// The received `GetPooledTransactions` request. request: GetPooledTransactions, /// The sender for responding to the request with a result of `PooledTransactions`. - response: oneshot::Sender>, + response: oneshot::Sender>>, }, /// Represents the event of receiving a `GetTransactionsHandle` request. - GetTransactionsHandle(oneshot::Sender>), + GetTransactionsHandle(oneshot::Sender>>), } /// Tracks stats about the [`TransactionsManager`]. From 749f98e021099b6ff53232532b4941de524a258d Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 20 Nov 2024 22:56:35 +0400 Subject: [PATCH 072/156] chore: add header and body generics to `SealedBlockWithSenders` (#12717) --- crates/blockchain-tree/src/state.rs | 18 +++++++++--------- crates/chain-state/src/notifications.rs | 10 +++++----- crates/evm/execution-types/src/chain.rs | 8 ++++---- crates/exex/exex/src/manager.rs | 12 ++++++------ crates/optimism/evm/src/lib.rs | 2 +- crates/primitives/src/block.rs | 22 +++++++++++++++------- 6 files changed, 40 insertions(+), 32 deletions(-) diff --git a/crates/blockchain-tree/src/state.rs b/crates/blockchain-tree/src/state.rs index ca8af6f9b581..a8e43240f4fa 100644 --- a/crates/blockchain-tree/src/state.rs +++ b/crates/blockchain-tree/src/state.rs @@ -184,7 +184,7 @@ mod tests { let mut tree_state = TreeState::new(0, vec![], 5); // Create a chain with two blocks - let block = SealedBlockWithSenders::default(); + let block: SealedBlockWithSenders = Default::default(); let block1_hash = B256::random(); let block2_hash = B256::random(); @@ -254,8 +254,8 @@ mod tests { let block1_hash = B256::random(); let block2_hash = B256::random(); - let mut block1 = SealedBlockWithSenders::default(); - let mut block2 = SealedBlockWithSenders::default(); + let mut block1: SealedBlockWithSenders = Default::default(); + let mut block2: SealedBlockWithSenders = Default::default(); block1.block.header.set_hash(block1_hash); block1.block.header.set_block_number(9); @@ -296,8 +296,8 @@ mod tests { let block1_hash = B256::random(); let block2_hash = B256::random(); - let mut block1 = SealedBlockWithSenders::default(); - let mut block2 = SealedBlockWithSenders::default(); + let mut block1: SealedBlockWithSenders = Default::default(); + let mut block2: SealedBlockWithSenders = Default::default(); block1.block.header.set_hash(block1_hash); block1.block.header.set_block_number(9); @@ -336,7 +336,7 @@ mod tests { // Create a block with a random hash and add it to the buffer let block_hash = B256::random(); - let mut block = SealedBlockWithSenders::default(); + let mut block: SealedBlockWithSenders = Default::default(); block.block.header.set_hash(block_hash); // Add the block to the buffered blocks in the TreeState @@ -363,8 +363,8 @@ mod tests { let ancestor_hash = B256::random(); let descendant_hash = B256::random(); - let mut ancestor_block = SealedBlockWithSenders::default(); - let mut descendant_block = SealedBlockWithSenders::default(); + let mut ancestor_block: SealedBlockWithSenders = Default::default(); + let mut descendant_block: SealedBlockWithSenders = Default::default(); ancestor_block.block.header.set_hash(ancestor_hash); descendant_block.block.header.set_hash(descendant_hash); @@ -397,7 +397,7 @@ mod tests { let receipt1 = Receipt::default(); let receipt2 = Receipt::default(); - let mut block = SealedBlockWithSenders::default(); + let mut block: SealedBlockWithSenders = Default::default(); block.block.header.set_hash(block_hash); let receipts = vec![receipt1, receipt2]; diff --git a/crates/chain-state/src/notifications.rs b/crates/chain-state/src/notifications.rs index 84fb120d4b28..865f2bd65847 100644 --- a/crates/chain-state/src/notifications.rs +++ b/crates/chain-state/src/notifications.rs @@ -202,7 +202,7 @@ mod tests { #[test] fn test_commit_notification() { - let block = SealedBlockWithSenders::default(); + let block: SealedBlockWithSenders = Default::default(); let block1_hash = B256::new([0x01; 32]); let block2_hash = B256::new([0x02; 32]); @@ -235,7 +235,7 @@ mod tests { #[test] fn test_reorg_notification() { - let block = SealedBlockWithSenders::default(); + let block: SealedBlockWithSenders = Default::default(); let block1_hash = B256::new([0x01; 32]); let block2_hash = B256::new([0x02; 32]); let block3_hash = B256::new([0x03; 32]); @@ -277,7 +277,7 @@ mod tests { #[test] fn test_block_receipts_commit() { // Create a default block instance for use in block definitions. - let block = SealedBlockWithSenders::default(); + let block: SealedBlockWithSenders = Default::default(); // Define unique hashes for two blocks to differentiate them in the chain. let block1_hash = B256::new([0x01; 32]); @@ -343,7 +343,7 @@ mod tests { #[test] fn test_block_receipts_reorg() { // Define block1 for the old chain segment, which will be reverted. - let mut old_block1 = SealedBlockWithSenders::default(); + let mut old_block1: SealedBlockWithSenders = Default::default(); old_block1.set_block_number(1); old_block1.set_hash(B256::new([0x01; 32])); old_block1.block.body.transactions.push(TransactionSigned::default()); @@ -367,7 +367,7 @@ mod tests { Arc::new(Chain::new(vec![old_block1.clone()], old_execution_outcome, None)); // Define block2 for the new chain segment, which will be committed. - let mut new_block1 = SealedBlockWithSenders::default(); + let mut new_block1: SealedBlockWithSenders = Default::default(); new_block1.set_block_number(2); new_block1.set_hash(B256::new([0x02; 32])); new_block1.block.body.transactions.push(TransactionSigned::default()); diff --git a/crates/evm/execution-types/src/chain.rs b/crates/evm/execution-types/src/chain.rs index b32b53b885e8..2c672884d60a 100644 --- a/crates/evm/execution-types/src/chain.rs +++ b/crates/evm/execution-types/src/chain.rs @@ -663,7 +663,7 @@ mod tests { #[test] fn chain_append() { - let block = SealedBlockWithSenders::default(); + let block: SealedBlockWithSenders = Default::default(); let block1_hash = B256::new([0x01; 32]); let block2_hash = B256::new([0x02; 32]); let block3_hash = B256::new([0x03; 32]); @@ -727,13 +727,13 @@ mod tests { vec![], ); - let mut block1 = SealedBlockWithSenders::default(); + let mut block1: SealedBlockWithSenders = Default::default(); let block1_hash = B256::new([15; 32]); block1.set_block_number(1); block1.set_hash(block1_hash); block1.senders.push(Address::new([4; 20])); - let mut block2 = SealedBlockWithSenders::default(); + let mut block2: SealedBlockWithSenders = Default::default(); let block2_hash = B256::new([16; 32]); block2.set_block_number(2); block2.set_hash(block2_hash); @@ -797,7 +797,7 @@ mod tests { use reth_primitives::{Receipt, Receipts, TxType}; // Create a default SealedBlockWithSenders object - let block = SealedBlockWithSenders::default(); + let block: SealedBlockWithSenders = Default::default(); // Define block hashes for block1 and block2 let block1_hash = B256::new([0x01; 32]); diff --git a/crates/exex/exex/src/manager.rs b/crates/exex/exex/src/manager.rs index e8902e0f3525..c8c06021efae 100644 --- a/crates/exex/exex/src/manager.rs +++ b/crates/exex/exex/src/manager.rs @@ -736,7 +736,7 @@ mod tests { ExExManager::new((), vec![exex_handle], 10, wal, empty_finalized_header_stream()); // Define the notification for testing - let mut block1 = SealedBlockWithSenders::default(); + let mut block1: SealedBlockWithSenders = Default::default(); block1.block.header.set_hash(B256::new([0x01; 32])); block1.block.header.set_block_number(10); @@ -754,7 +754,7 @@ mod tests { assert_eq!(exex_manager.next_id, 1); // Push another notification - let mut block2 = SealedBlockWithSenders::default(); + let mut block2: SealedBlockWithSenders = Default::default(); block2.block.header.set_hash(B256::new([0x02; 32])); block2.block.header.set_block_number(20); @@ -792,7 +792,7 @@ mod tests { ); // Push some notifications to fill part of the buffer - let mut block1 = SealedBlockWithSenders::default(); + let mut block1: SealedBlockWithSenders = Default::default(); block1.block.header.set_hash(B256::new([0x01; 32])); block1.block.header.set_block_number(10); @@ -1051,11 +1051,11 @@ mod tests { assert_eq!(exex_handle.next_notification_id, 0); // Setup two blocks for the chain commit notification - let mut block1 = SealedBlockWithSenders::default(); + let mut block1: SealedBlockWithSenders = Default::default(); block1.block.header.set_hash(B256::new([0x01; 32])); block1.block.header.set_block_number(10); - let mut block2 = SealedBlockWithSenders::default(); + let mut block2: SealedBlockWithSenders = Default::default(); block2.block.header.set_hash(B256::new([0x02; 32])); block2.block.header.set_block_number(11); @@ -1104,7 +1104,7 @@ mod tests { // Set finished_height to a value higher than the block tip exex_handle.finished_height = Some(BlockNumHash::new(15, B256::random())); - let mut block1 = SealedBlockWithSenders::default(); + let mut block1: SealedBlockWithSenders = Default::default(); block1.block.header.set_hash(B256::new([0x01; 32])); block1.block.header.set_block_number(10); diff --git a/crates/optimism/evm/src/lib.rs b/crates/optimism/evm/src/lib.rs index be1fb6d32272..52b974e6c862 100644 --- a/crates/optimism/evm/src/lib.rs +++ b/crates/optimism/evm/src/lib.rs @@ -551,7 +551,7 @@ mod tests { #[test] fn receipts_by_block_hash() { // Create a default SealedBlockWithSenders object - let block = SealedBlockWithSenders::default(); + let block: SealedBlockWithSenders = Default::default(); // Define block hashes for block1 and block2 let block1_hash = B256::new([0x01; 32]); diff --git a/crates/primitives/src/block.rs b/crates/primitives/src/block.rs index 5c47c49f4375..57c63d53a43e 100644 --- a/crates/primitives/src/block.rs +++ b/crates/primitives/src/block.rs @@ -2,7 +2,7 @@ use crate::{GotExpected, SealedHeader, TransactionSigned, TransactionSignedEcRec use alloc::vec::Vec; use alloy_consensus::Header; use alloy_eips::{eip2718::Encodable2718, eip4895::Withdrawals}; -use alloy_primitives::{Address, Bytes, B256}; +use alloy_primitives::{Address, Bytes, Sealable, B256}; use alloy_rlp::{Decodable, Encodable, RlpDecodable, RlpEncodable}; use derive_more::{Deref, DerefMut}; #[cfg(any(test, feature = "arbitrary"))] @@ -493,22 +493,30 @@ where } /// Sealed block with senders recovered from transactions. -#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize, Deref, DerefMut)] -pub struct SealedBlockWithSenders { +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Deref, DerefMut)] +pub struct SealedBlockWithSenders { /// Sealed block #[deref] #[deref_mut] - pub block: SealedBlock, + pub block: SealedBlock, /// List of senders that match transactions from block. pub senders: Vec
, } -impl SealedBlockWithSenders { +impl Default for SealedBlockWithSenders { + fn default() -> Self { + Self { block: SealedBlock::default(), senders: Default::default() } + } +} + +impl SealedBlockWithSenders { /// New sealed block with sender. Return none if len of tx and senders does not match - pub fn new(block: SealedBlock, senders: Vec
) -> Option { - (block.body.transactions.len() == senders.len()).then_some(Self { block, senders }) + pub fn new(block: SealedBlock, senders: Vec
) -> Option { + (block.body.transactions().len() == senders.len()).then_some(Self { block, senders }) } +} +impl SealedBlockWithSenders { /// Split Structure to its components #[inline] pub fn into_components(self) -> (SealedBlock, Vec
) { From 0c5984179e0304c11ad4c5bc0057508b65441338 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 21 Nov 2024 02:48:39 +0400 Subject: [PATCH 073/156] refactor(storage): unify blocks insertion logic (#12694) --- Cargo.lock | 1 + bin/reth/src/commands/debug_cmd/merkle.rs | 4 +- crates/blockchain-tree/src/blockchain_tree.rs | 23 ++- crates/blockchain-tree/src/externals.rs | 14 +- crates/blockchain-tree/src/shareable.rs | 8 +- .../commands/src/init_state/without_evm.rs | 15 +- crates/consensus/beacon/src/engine/mod.rs | 9 +- crates/engine/local/src/service.rs | 4 +- crates/engine/service/src/service.rs | 4 +- crates/engine/tree/Cargo.toml | 2 + crates/engine/tree/src/persistence.rs | 16 +- crates/exex/exex/src/backfill/test_utils.rs | 6 +- crates/exex/exex/src/manager.rs | 4 +- crates/exex/exex/src/notifications.rs | 5 +- crates/node/builder/src/launch/common.rs | 5 +- crates/stages/stages/src/stages/bodies.rs | 39 +---- .../stages/src/stages/hashing_account.rs | 5 +- crates/storage/db-api/src/models/mod.rs | 5 +- .../src/providers/database/metrics.rs | 28 --- .../provider/src/providers/database/mod.rs | 17 +- .../src/providers/database/provider.rs | 163 ++++++++---------- crates/storage/provider/src/providers/mod.rs | 5 +- crates/storage/provider/src/traits/block.rs | 34 +++- crates/storage/provider/src/writer/mod.rs | 79 +-------- 24 files changed, 226 insertions(+), 269 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 09bcccf652e9..017b84f6e49d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7254,6 +7254,7 @@ dependencies = [ "reth-payload-primitives", "reth-payload-validator", "reth-primitives", + "reth-primitives-traits", "reth-provider", "reth-prune", "reth-prune-types", diff --git a/bin/reth/src/commands/debug_cmd/merkle.rs b/bin/reth/src/commands/debug_cmd/merkle.rs index db4cd952e8df..bb8a6a2c4a10 100644 --- a/bin/reth/src/commands/debug_cmd/merkle.rs +++ b/bin/reth/src/commands/debug_cmd/merkle.rs @@ -21,7 +21,7 @@ use reth_node_ethereum::EthExecutorProvider; use reth_provider::{ providers::ProviderNodeTypes, writer::UnifiedStorageWriter, BlockNumReader, BlockWriter, ChainSpecProvider, DatabaseProviderFactory, HeaderProvider, LatestStateProviderRef, - OriginalValuesKnown, ProviderError, ProviderFactory, StateWriter, + OriginalValuesKnown, ProviderError, ProviderFactory, StateWriter, StorageLocation, }; use reth_revm::database::StateProviderDatabase; use reth_stages::{ @@ -148,7 +148,7 @@ impl> Command { .map_err(|block| eyre::eyre!("Error sealing block with senders: {block:?}"))?; trace!(target: "reth::cli", block_number, "Executing block"); - provider_rw.insert_block(sealed_block.clone())?; + provider_rw.insert_block(sealed_block.clone(), StorageLocation::Database)?; td += sealed_block.difficulty; let mut executor = executor_provider.batch_executor(StateProviderDatabase::new( diff --git a/crates/blockchain-tree/src/blockchain_tree.rs b/crates/blockchain-tree/src/blockchain_tree.rs index 1a8a390e99dc..8e192492593a 100644 --- a/crates/blockchain-tree/src/blockchain_tree.rs +++ b/crates/blockchain-tree/src/blockchain_tree.rs @@ -1,6 +1,7 @@ //! Implementation of [`BlockchainTree`] use crate::{ + externals::TreeNodeTypes, metrics::{MakeCanonicalAction, MakeCanonicalDurationsRecorder, TreeMetrics}, state::{SidechainId, TreeState}, AppendableChain, BlockIndices, BlockchainTreeConfig, ExecutionData, TreeExternals, @@ -21,10 +22,10 @@ use reth_primitives::{ SealedHeader, StaticFileSegment, }; use reth_provider::{ - providers::ProviderNodeTypes, BlockExecutionWriter, BlockNumReader, BlockWriter, - CanonStateNotification, CanonStateNotificationSender, CanonStateNotifications, - ChainSpecProvider, ChainSplit, ChainSplitTarget, DBProvider, DisplayBlocksChain, - HeaderProvider, ProviderError, StaticFileProviderFactory, + BlockExecutionWriter, BlockNumReader, BlockWriter, CanonStateNotification, + CanonStateNotificationSender, CanonStateNotifications, ChainSpecProvider, ChainSplit, + ChainSplitTarget, DBProvider, DisplayBlocksChain, HeaderProvider, ProviderError, + StaticFileProviderFactory, }; use reth_stages_api::{MetricEvent, MetricEventsSender}; use reth_storage_errors::provider::{ProviderResult, RootMismatch}; @@ -93,7 +94,7 @@ impl BlockchainTree { impl BlockchainTree where - N: ProviderNodeTypes, + N: TreeNodeTypes, E: BlockExecutorProvider, { /// Builds the blockchain tree for the node. @@ -1386,16 +1387,18 @@ mod tests { use reth_db_api::transaction::DbTxMut; use reth_evm::test_utils::MockExecutorProvider; use reth_evm_ethereum::execute::EthExecutorProvider; + use reth_node_types::FullNodePrimitives; use reth_primitives::{ proofs::{calculate_receipt_root, calculate_transaction_root}, Account, BlockBody, Transaction, TransactionSigned, TransactionSignedEcRecovered, }; use reth_provider::{ + providers::ProviderNodeTypes, test_utils::{ blocks::BlockchainTestData, create_test_provider_factory_with_chain_spec, MockNodeTypesWithDB, }, - ProviderFactory, + ProviderFactory, StorageLocation, }; use reth_revm::primitives::AccountInfo; use reth_stages_api::StageCheckpoint; @@ -1420,7 +1423,12 @@ mod tests { TreeExternals::new(provider_factory, consensus, executor_factory) } - fn setup_genesis(factory: &ProviderFactory, mut genesis: SealedBlock) { + fn setup_genesis< + N: ProviderNodeTypes>, + >( + factory: &ProviderFactory, + mut genesis: SealedBlock, + ) { // insert genesis to db. genesis.header.set_block_number(10); @@ -1551,6 +1559,7 @@ mod tests { SealedBlock::new(chain_spec.sealed_genesis_header(), Default::default()) .try_seal_with_senders() .unwrap(), + StorageLocation::Database, ) .unwrap(); let account = Account { balance: initial_signer_balance, ..Default::default() }; diff --git a/crates/blockchain-tree/src/externals.rs b/crates/blockchain-tree/src/externals.rs index 4e22fcb78b6b..76b65824854c 100644 --- a/crates/blockchain-tree/src/externals.rs +++ b/crates/blockchain-tree/src/externals.rs @@ -4,8 +4,8 @@ use alloy_primitives::{BlockHash, BlockNumber}; use reth_consensus::Consensus; use reth_db::{static_file::HeaderMask, tables}; use reth_db_api::{cursor::DbCursorRO, transaction::DbTx}; -use reth_node_types::NodeTypesWithDB; -use reth_primitives::StaticFileSegment; +use reth_node_types::{Block, FullNodePrimitives, NodeTypesWithDB}; +use reth_primitives::{BlockBody, StaticFileSegment}; use reth_provider::{ providers::ProviderNodeTypes, ChainStateBlockReader, ChainStateBlockWriter, ProviderFactory, StaticFileProviderFactory, StatsReader, @@ -13,6 +13,16 @@ use reth_provider::{ use reth_storage_errors::provider::ProviderResult; use std::{collections::BTreeMap, sync::Arc}; +/// A helper trait with requirements for [`ProviderNodeTypes`] to be used within [`TreeExternals`]. +pub trait TreeNodeTypes: + ProviderNodeTypes>> +{ +} +impl TreeNodeTypes for T where + T: ProviderNodeTypes>> +{ +} + /// A container for external components. /// /// This is a simple container for external components used throughout the blockchain tree diff --git a/crates/blockchain-tree/src/shareable.rs b/crates/blockchain-tree/src/shareable.rs index ec1f3cccf97d..f997e0a062d3 100644 --- a/crates/blockchain-tree/src/shareable.rs +++ b/crates/blockchain-tree/src/shareable.rs @@ -1,5 +1,7 @@ //! Wrapper around `BlockchainTree` that allows for it to be shared. +use crate::externals::TreeNodeTypes; + use super::BlockchainTree; use alloy_eips::BlockNumHash; use alloy_primitives::{BlockHash, BlockNumber}; @@ -36,7 +38,7 @@ impl ShareableBlockchainTree { impl BlockchainTreeEngine for ShareableBlockchainTree where - N: ProviderNodeTypes, + N: TreeNodeTypes, E: BlockExecutorProvider, { fn buffer_block(&self, block: SealedBlockWithSenders) -> Result<(), InsertBlockError> { @@ -107,7 +109,7 @@ where impl BlockchainTreeViewer for ShareableBlockchainTree where - N: ProviderNodeTypes, + N: TreeNodeTypes, E: BlockExecutorProvider, { fn header_by_hash(&self, hash: BlockHash) -> Option { @@ -170,7 +172,7 @@ where impl BlockchainTreePendingStateProvider for ShareableBlockchainTree where - N: ProviderNodeTypes, + N: TreeNodeTypes, E: BlockExecutorProvider, { fn find_pending_state_provider( diff --git a/crates/cli/commands/src/init_state/without_evm.rs b/crates/cli/commands/src/init_state/without_evm.rs index c6e1f9a51dd1..e3594a593638 100644 --- a/crates/cli/commands/src/init_state/without_evm.rs +++ b/crates/cli/commands/src/init_state/without_evm.rs @@ -3,12 +3,10 @@ use alloy_rlp::Decodable; use alloy_consensus::Header; use reth_node_builder::NodePrimitives; -use reth_primitives::{ - BlockBody, SealedBlock, SealedBlockWithSenders, SealedHeader, StaticFileSegment, -}; +use reth_primitives::{SealedBlock, SealedBlockWithSenders, SealedHeader, StaticFileSegment}; use reth_provider::{ providers::StaticFileProvider, BlockWriter, StageCheckpointWriter, StaticFileProviderFactory, - StaticFileWriter, + StaticFileWriter, StorageLocation, }; use reth_stages::{StageCheckpoint, StageId}; @@ -33,7 +31,9 @@ pub fn setup_without_evm( total_difficulty: U256, ) -> Result<(), eyre::Error> where - Provider: StaticFileProviderFactory + StageCheckpointWriter + BlockWriter, + Provider: StaticFileProviderFactory + + StageCheckpointWriter + + BlockWriter, { info!(target: "reth::cli", "Setting up dummy EVM chain before importing state."); @@ -64,11 +64,12 @@ fn append_first_block( total_difficulty: U256, ) -> Result<(), eyre::Error> where - Provider: BlockWriter + StaticFileProviderFactory, + Provider: BlockWriter + StaticFileProviderFactory, { provider_rw.insert_block( - SealedBlockWithSenders::new(SealedBlock::new(header.clone(), BlockBody::default()), vec![]) + SealedBlockWithSenders::new(SealedBlock::new(header.clone(), Default::default()), vec![]) .expect("no senders or txes"), + StorageLocation::Database, )?; let sf_provider = provider_rw.static_file_provider(); diff --git a/crates/consensus/beacon/src/engine/mod.rs b/crates/consensus/beacon/src/engine/mod.rs index 0b93ae0f29ae..2ad06e68b67d 100644 --- a/crates/consensus/beacon/src/engine/mod.rs +++ b/crates/consensus/beacon/src/engine/mod.rs @@ -1991,7 +1991,8 @@ mod tests { use alloy_rpc_types_engine::{ForkchoiceState, ForkchoiceUpdated, PayloadStatus}; use assert_matches::assert_matches; use reth_chainspec::{ChainSpecBuilder, MAINNET}; - use reth_provider::{BlockWriter, ProviderFactory}; + use reth_node_types::FullNodePrimitives; + use reth_provider::{BlockWriter, ProviderFactory, StorageLocation}; use reth_rpc_types_compat::engine::payload::block_to_payload_v1; use reth_stages::{ExecOutput, PipelineError, StageError}; use reth_stages_api::StageCheckpoint; @@ -2169,7 +2170,10 @@ mod tests { assert_matches!(rx.await, Ok(Ok(()))); } - fn insert_blocks<'a, N: ProviderNodeTypes>( + fn insert_blocks< + 'a, + N: ProviderNodeTypes>, + >( provider_factory: ProviderFactory, mut blocks: impl Iterator, ) { @@ -2179,6 +2183,7 @@ mod tests { provider .insert_block( b.clone().try_seal_with_senders().expect("invalid tx signature in block"), + StorageLocation::Database, ) .map(drop) }) diff --git a/crates/engine/local/src/service.rs b/crates/engine/local/src/service.rs index 4e4826be31d7..3575bc133c6c 100644 --- a/crates/engine/local/src/service.rs +++ b/crates/engine/local/src/service.rs @@ -27,7 +27,7 @@ use reth_engine_tree::{ EngineApiKind, EngineApiRequest, EngineApiRequestHandler, EngineRequestHandler, FromEngine, RequestHandlerEvent, }, - persistence::PersistenceHandle, + persistence::{PersistenceHandle, PersistenceNodeTypes}, tree::{EngineApiTreeHandler, InvalidBlockHook, TreeConfig}, }; use reth_evm::execute::BlockExecutorProvider; @@ -59,7 +59,7 @@ where impl LocalEngineService where - N: EngineNodeTypes, + N: EngineNodeTypes + PersistenceNodeTypes, { /// Constructor for [`LocalEngineService`]. #[allow(clippy::too_many_arguments)] diff --git a/crates/engine/service/src/service.rs b/crates/engine/service/src/service.rs index cec9d981f1bf..49233439e0a8 100644 --- a/crates/engine/service/src/service.rs +++ b/crates/engine/service/src/service.rs @@ -8,7 +8,7 @@ use reth_engine_tree::{ backfill::PipelineSync, download::BasicBlockDownloader, engine::{EngineApiKind, EngineApiRequest, EngineApiRequestHandler, EngineHandler}, - persistence::PersistenceHandle, + persistence::{PersistenceHandle, PersistenceNodeTypes}, tree::{EngineApiTreeHandler, InvalidBlockHook, TreeConfig}, }; pub use reth_engine_tree::{ @@ -59,7 +59,7 @@ where impl EngineService where - N: EngineNodeTypes, + N: EngineNodeTypes + PersistenceNodeTypes, Client: EthBlockClient + 'static, E: BlockExecutorProvider + 'static, { diff --git a/crates/engine/tree/Cargo.toml b/crates/engine/tree/Cargo.toml index d6e1c80a7261..70be84a9f799 100644 --- a/crates/engine/tree/Cargo.toml +++ b/crates/engine/tree/Cargo.toml @@ -27,6 +27,7 @@ reth-payload-builder-primitives.workspace = true reth-payload-primitives.workspace = true reth-payload-validator.workspace = true reth-primitives.workspace = true +reth-primitives-traits.workspace = true reth-provider.workspace = true reth-prune.workspace = true reth-revm.workspace = true @@ -107,4 +108,5 @@ test-utils = [ "reth-provider/test-utils", "reth-trie/test-utils", "reth-prune-types?/test-utils", + "reth-primitives-traits/test-utils", ] diff --git a/crates/engine/tree/src/persistence.rs b/crates/engine/tree/src/persistence.rs index e0c9e0362d04..0199ae3f4613 100644 --- a/crates/engine/tree/src/persistence.rs +++ b/crates/engine/tree/src/persistence.rs @@ -2,6 +2,8 @@ use crate::metrics::PersistenceMetrics; use alloy_eips::BlockNumHash; use reth_chain_state::ExecutedBlock; use reth_errors::ProviderError; +use reth_primitives::BlockBody; +use reth_primitives_traits::{Block, FullNodePrimitives}; use reth_provider::{ providers::ProviderNodeTypes, writer::UnifiedStorageWriter, BlockHashReader, ChainStateBlockWriter, DatabaseProviderFactory, ProviderFactory, StaticFileProviderFactory, @@ -16,6 +18,16 @@ use thiserror::Error; use tokio::sync::oneshot; use tracing::{debug, error}; +/// A helper trait with requirements for [`ProviderNodeTypes`] to be used within +/// [`PersistenceService`]. +pub trait PersistenceNodeTypes: + ProviderNodeTypes>> +{ +} +impl PersistenceNodeTypes for T where + T: ProviderNodeTypes>> +{ +} /// Writes parts of reth's in memory tree state to the database and static files. /// /// This is meant to be a spawned service that listens for various incoming persistence operations, @@ -60,7 +72,7 @@ impl PersistenceService { } } -impl PersistenceService { +impl PersistenceService { /// This is the main loop, that will listen to database events and perform the requested /// database actions pub fn run(mut self) -> Result<(), PersistenceError> { @@ -198,7 +210,7 @@ impl PersistenceHandle { } /// Create a new [`PersistenceHandle`], and spawn the persistence service. - pub fn spawn_service( + pub fn spawn_service( provider_factory: ProviderFactory, pruner: PrunerWithFactory>, sync_metrics_tx: MetricEventsSender, diff --git a/crates/exex/exex/src/backfill/test_utils.rs b/crates/exex/exex/src/backfill/test_utils.rs index 80af408c5c8f..5d0f88f517dc 100644 --- a/crates/exex/exex/src/backfill/test_utils.rs +++ b/crates/exex/exex/src/backfill/test_utils.rs @@ -9,6 +9,7 @@ use reth_evm::execute::{ BatchExecutor, BlockExecutionInput, BlockExecutionOutput, BlockExecutorProvider, Executor, }; use reth_evm_ethereum::execute::EthExecutorProvider; +use reth_node_api::FullNodePrimitives; use reth_primitives::{ Block, BlockBody, BlockWithSenders, Receipt, SealedBlockWithSenders, Transaction, }; @@ -57,7 +58,7 @@ pub(crate) fn execute_block_and_commit_to_database( block: &BlockWithSenders, ) -> eyre::Result> where - N: ProviderNodeTypes, + N: ProviderNodeTypes>, { let provider = provider_factory.provider()?; @@ -161,7 +162,7 @@ pub(crate) fn blocks_and_execution_outputs( key_pair: Keypair, ) -> eyre::Result)>> where - N: ProviderNodeTypes, + N: ProviderNodeTypes>, { let (block1, block2) = blocks(chain_spec.clone(), key_pair)?; @@ -183,6 +184,7 @@ pub(crate) fn blocks_and_execution_outcome( ) -> eyre::Result<(Vec, ExecutionOutcome)> where N: ProviderNodeTypes, + N::Primitives: FullNodePrimitives, { let (block1, block2) = blocks(chain_spec.clone(), key_pair)?; diff --git a/crates/exex/exex/src/manager.rs b/crates/exex/exex/src/manager.rs index c8c06021efae..e3d3a3c06901 100644 --- a/crates/exex/exex/src/manager.rs +++ b/crates/exex/exex/src/manager.rs @@ -657,7 +657,7 @@ mod tests { use reth_primitives::SealedBlockWithSenders; use reth_provider::{ providers::BlockchainProvider2, test_utils::create_test_provider_factory, BlockReader, - BlockWriter, Chain, DatabaseProviderFactory, TransactionVariant, + BlockWriter, Chain, DatabaseProviderFactory, StorageLocation, TransactionVariant, }; use reth_testing_utils::generators::{self, random_block, BlockParams}; @@ -1238,7 +1238,7 @@ mod tests { .seal_with_senders() .unwrap(); let provider_rw = provider_factory.database_provider_rw().unwrap(); - provider_rw.insert_block(block.clone()).unwrap(); + provider_rw.insert_block(block.clone(), StorageLocation::Database).unwrap(); provider_rw.commit().unwrap(); let provider = BlockchainProvider2::new(provider_factory).unwrap(); diff --git a/crates/exex/exex/src/notifications.rs b/crates/exex/exex/src/notifications.rs index 14cfe9be4d92..baf504166d19 100644 --- a/crates/exex/exex/src/notifications.rs +++ b/crates/exex/exex/src/notifications.rs @@ -403,7 +403,7 @@ mod tests { use reth_primitives::Block; use reth_provider::{ providers::BlockchainProvider2, test_utils::create_test_provider_factory, BlockWriter, - Chain, DatabaseProviderFactory, + Chain, DatabaseProviderFactory, StorageLocation, }; use reth_testing_utils::generators::{self, random_block, BlockParams}; use tokio::sync::mpsc; @@ -431,6 +431,7 @@ mod tests { let provider_rw = provider_factory.provider_rw()?; provider_rw.insert_block( node_head_block.clone().seal_with_senders().ok_or_eyre("failed to recover senders")?, + StorageLocation::Database, )?; provider_rw.commit()?; @@ -574,7 +575,7 @@ mod tests { ..Default::default() }; let provider_rw = provider.database_provider_rw()?; - provider_rw.insert_block(node_head_block)?; + provider_rw.insert_block(node_head_block, StorageLocation::Database)?; provider_rw.commit()?; let node_head_notification = ExExNotification::ChainCommitted { new: Arc::new( diff --git a/crates/node/builder/src/launch/common.rs b/crates/node/builder/src/launch/common.rs index 903b09803542..225f2029c28a 100644 --- a/crates/node/builder/src/launch/common.rs +++ b/crates/node/builder/src/launch/common.rs @@ -12,7 +12,8 @@ use eyre::{Context, OptionExt}; use rayon::ThreadPoolBuilder; use reth_beacon_consensus::EthBeaconConsensus; use reth_blockchain_tree::{ - BlockchainTree, BlockchainTreeConfig, ShareableBlockchainTree, TreeExternals, + externals::TreeNodeTypes, BlockchainTree, BlockchainTreeConfig, ShareableBlockchainTree, + TreeExternals, }; use reth_chainspec::{Chain, EthChainSpec, EthereumHardforks}; use reth_config::{config::EtlConfig, PruneConfig}; @@ -631,7 +632,7 @@ impl Attached::ChainSpec>, WithMeteredProviders>, > where - T: FullNodeTypes, + T: FullNodeTypes, { /// Returns access to the underlying database. pub const fn database(&self) -> &::DB { diff --git a/crates/stages/stages/src/stages/bodies.rs b/crates/stages/stages/src/stages/bodies.rs index 80185eade878..b6eab349e161 100644 --- a/crates/stages/stages/src/stages/bodies.rs +++ b/crates/stages/stages/src/stages/bodies.rs @@ -19,6 +19,7 @@ use reth_primitives::StaticFileSegment; use reth_provider::{ providers::{StaticFileProvider, StaticFileWriter}, BlockReader, BlockWriter, DBProvider, ProviderError, StaticFileProviderFactory, StatsReader, + StorageLocation, }; use reth_stages_api::{ EntitiesCheckpoint, ExecInput, ExecOutput, Stage, StageCheckpoint, StageError, StageId, @@ -122,7 +123,7 @@ where let (from_block, to_block) = input.next_block_range().into_inner(); // Get id for the next tx_num of zero if there are no transactions. - let mut next_tx_num = provider + let next_tx_num = provider .tx_ref() .cursor_read::()? .last()? @@ -130,8 +131,6 @@ where .unwrap_or_default(); let static_file_provider = provider.static_file_provider(); - let mut static_file_producer = - static_file_provider.get_writer(from_block, StaticFileSegment::Transactions)?; // Make sure Transactions static file is at the same height. If it's further, this // input execution was interrupted previously and we need to unwind the static file. @@ -145,6 +144,8 @@ where // stage run. So, our only solution is to unwind the static files and proceed from the // database expected height. Ordering::Greater => { + let mut static_file_producer = + static_file_provider.get_writer(from_block, StaticFileSegment::Transactions)?; static_file_producer .prune_transactions(next_static_file_tx_num - next_tx_num, from_block - 1)?; // Since this is a database <-> static file inconsistency, we commit the change @@ -168,40 +169,16 @@ where let buffer = self.buffer.take().ok_or(StageError::MissingDownloadBuffer)?; trace!(target: "sync::stages::bodies", bodies_len = buffer.len(), "Writing blocks"); - let mut highest_block = from_block; - - // Firstly, write transactions to static files - for response in &buffer { - let block_number = response.block_number(); - - // Increment block on static file header. - if block_number > 0 { - static_file_producer.increment_block(block_number)?; - } - - match response { - BlockResponse::Full(block) => { - // Write transactions - for transaction in block.body.transactions() { - static_file_producer.append_transaction(next_tx_num, transaction)?; - - // Increment transaction id for each transaction. - next_tx_num += 1; - } - } - BlockResponse::Empty(_) => {} - }; - - highest_block = block_number; - } + let highest_block = buffer.last().map(|r| r.block_number()).unwrap_or(from_block); - // Write bodies to database. This will NOT write transactions to database as we've already - // written them directly to static files. + // Write bodies to database. provider.append_block_bodies( buffer .into_iter() .map(|response| (response.block_number(), response.into_body())) .collect(), + // We are writing transactions directly to static files. + StorageLocation::StaticFiles, )?; // The stage is "done" if: diff --git a/crates/stages/stages/src/stages/hashing_account.rs b/crates/stages/stages/src/stages/hashing_account.rs index 1ca0e1aa1325..ecca1e0716c1 100644 --- a/crates/stages/stages/src/stages/hashing_account.rs +++ b/crates/stages/stages/src/stages/hashing_account.rs @@ -61,7 +61,10 @@ impl AccountHashingStage { pub fn seed( provider: &reth_provider::DatabaseProvider, opts: SeedOpts, - ) -> Result, StageError> { + ) -> Result, StageError> + where + N::Primitives: reth_primitives_traits::FullNodePrimitives, + { use alloy_primitives::U256; use reth_db_api::models::AccountBeforeTx; use reth_provider::{StaticFileProviderFactory, StaticFileWriter}; diff --git a/crates/storage/db-api/src/models/mod.rs b/crates/storage/db-api/src/models/mod.rs index 7b1cd5a1ddb3..5d18711922ed 100644 --- a/crates/storage/db-api/src/models/mod.rs +++ b/crates/storage/db-api/src/models/mod.rs @@ -8,7 +8,9 @@ use alloy_consensus::Header; use alloy_genesis::GenesisAccount; use alloy_primitives::{Address, Bytes, Log, B256, U256}; use reth_codecs::{add_arbitrary_tests, Compact}; -use reth_primitives::{Account, Bytecode, Receipt, StorageEntry, TransactionSignedNoHash, TxType}; +use reth_primitives::{ + Account, Bytecode, Receipt, StorageEntry, TransactionSigned, TransactionSignedNoHash, TxType, +}; use reth_prune_types::{PruneCheckpoint, PruneSegment}; use reth_stages_types::StageCheckpoint; use reth_trie_common::{StoredNibbles, StoredNibblesSubKey, *}; @@ -225,6 +227,7 @@ impl_compression_for_compact!( Bytecode, AccountBeforeTx, TransactionSignedNoHash, + TransactionSigned, CompactU256, StageCheckpoint, PruneCheckpoint, diff --git a/crates/storage/provider/src/providers/database/metrics.rs b/crates/storage/provider/src/providers/database/metrics.rs index 7e9ee7202c01..4ee8f1ce5b12 100644 --- a/crates/storage/provider/src/providers/database/metrics.rs +++ b/crates/storage/provider/src/providers/database/metrics.rs @@ -22,14 +22,6 @@ impl Default for DurationsRecorder { } impl DurationsRecorder { - /// Saves the provided duration for future logging and instantly reports as a metric with - /// `action` label. - pub(crate) fn record_duration(&mut self, action: Action, duration: Duration) { - self.actions.push((action, duration)); - self.current_metrics.record_duration(action, duration); - self.latest = Some(self.start.elapsed()); - } - /// Records the duration since last record, saves it for future logging and instantly reports as /// a metric with `action` label. pub(crate) fn record_relative(&mut self, action: Action) { @@ -56,11 +48,6 @@ pub(crate) enum Action { InsertHeaders, InsertHeaderNumbers, InsertHeaderTerminalDifficulties, - InsertBlockOmmers, - InsertTransactionSenders, - InsertTransactions, - InsertTransactionHashNumbers, - InsertBlockWithdrawals, InsertBlockBodyIndices, InsertTransactionBlocks, GetNextTxNum, @@ -95,16 +82,6 @@ struct DatabaseProviderMetrics { insert_header_numbers: Histogram, /// Duration of insert header TD insert_header_td: Histogram, - /// Duration of insert block ommers - insert_block_ommers: Histogram, - /// Duration of insert tx senders - insert_tx_senders: Histogram, - /// Duration of insert transactions - insert_transactions: Histogram, - /// Duration of insert transaction hash numbers - insert_tx_hash_numbers: Histogram, - /// Duration of insert block withdrawals - insert_block_withdrawals: Histogram, /// Duration of insert block body indices insert_block_body_indices: Histogram, /// Duration of insert transaction blocks @@ -131,11 +108,6 @@ impl DatabaseProviderMetrics { Action::InsertHeaders => self.insert_headers.record(duration), Action::InsertHeaderNumbers => self.insert_header_numbers.record(duration), Action::InsertHeaderTerminalDifficulties => self.insert_header_td.record(duration), - Action::InsertBlockOmmers => self.insert_block_ommers.record(duration), - Action::InsertTransactionSenders => self.insert_tx_senders.record(duration), - Action::InsertTransactions => self.insert_transactions.record(duration), - Action::InsertTransactionHashNumbers => self.insert_tx_hash_numbers.record(duration), - Action::InsertBlockWithdrawals => self.insert_block_withdrawals.record(duration), Action::InsertBlockBodyIndices => self.insert_block_body_indices.record(duration), Action::InsertTransactionBlocks => self.insert_tx_blocks.record(duration), Action::GetNextTxNum => self.get_next_tx_num.record(duration), diff --git a/crates/storage/provider/src/providers/database/mod.rs b/crates/storage/provider/src/providers/database/mod.rs index a64bb2578dd6..cc50aa351457 100644 --- a/crates/storage/provider/src/providers/database/mod.rs +++ b/crates/storage/provider/src/providers/database/mod.rs @@ -644,7 +644,7 @@ mod tests { providers::{StaticFileProvider, StaticFileWriter}, test_utils::{blocks::TEST_BLOCK, create_test_provider_factory, MockNodeTypesWithDB}, BlockHashReader, BlockNumReader, BlockWriter, DBProvider, HeaderSyncGapProvider, - TransactionsProvider, + StorageLocation, TransactionsProvider, }; use alloy_primitives::{TxNumber, B256, U256}; use assert_matches::assert_matches; @@ -715,7 +715,10 @@ mod tests { { let provider = factory.provider_rw().unwrap(); assert_matches!( - provider.insert_block(block.clone().try_seal_with_senders().unwrap()), + provider.insert_block( + block.clone().try_seal_with_senders().unwrap(), + StorageLocation::Database + ), Ok(_) ); assert_matches!( @@ -733,7 +736,10 @@ mod tests { }; let provider = factory.with_prune_modes(prune_modes).provider_rw().unwrap(); assert_matches!( - provider.insert_block(block.clone().try_seal_with_senders().unwrap(),), + provider.insert_block( + block.clone().try_seal_with_senders().unwrap(), + StorageLocation::Database + ), Ok(_) ); assert_matches!(provider.transaction_sender(0), Ok(None)); @@ -754,7 +760,10 @@ mod tests { let provider = factory.provider_rw().unwrap(); assert_matches!( - provider.insert_block(block.clone().try_seal_with_senders().unwrap()), + provider.insert_block( + block.clone().try_seal_with_senders().unwrap(), + StorageLocation::Database + ), Ok(_) ); diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index 4690e27821ea..66bc4c053415 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -17,8 +17,8 @@ use crate::{ LatestStateProvider, LatestStateProviderRef, OriginalValuesKnown, ProviderError, PruneCheckpointReader, PruneCheckpointWriter, RevertsInit, StageCheckpointReader, StateChangeWriter, StateProviderBox, StateReader, StateWriter, StaticFileProviderFactory, - StatsReader, StorageReader, StorageTrieWriter, TransactionVariant, TransactionsProvider, - TransactionsProviderExt, TrieWriter, WithdrawalsProvider, + StatsReader, StorageLocation, StorageReader, StorageTrieWriter, TransactionVariant, + TransactionsProvider, TransactionsProviderExt, TrieWriter, WithdrawalsProvider, }; use alloy_consensus::Header; use alloy_eips::{ @@ -37,7 +37,7 @@ use reth_db_api::{ database::Database, models::{ sharded_key, storage_sharded_key::StorageShardedKey, AccountBeforeTx, BlockNumberAddress, - ShardedKey, StoredBlockBodyIndices, StoredBlockOmmers, StoredBlockWithdrawals, + ShardedKey, StoredBlockBodyIndices, }, table::Table, transaction::{DbTx, DbTxMut}, @@ -52,7 +52,7 @@ use reth_primitives::{ SealedBlockWithSenders, SealedHeader, StaticFileSegment, StorageEntry, TransactionMeta, TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, }; -use reth_primitives_traits::{BlockBody as _, FullNodePrimitives}; +use reth_primitives_traits::{BlockBody as _, FullNodePrimitives, SignedTransaction}; use reth_prune_types::{PruneCheckpoint, PruneModes, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; use reth_storage_api::{StateProvider, StorageChangeSetReader, TryIntoHistoricalStateProvider}; @@ -73,10 +73,9 @@ use std::{ fmt::Debug, ops::{Deref, DerefMut, Range, RangeBounds, RangeInclusive}, sync::{mpsc, Arc}, - time::{Duration, Instant}, }; use tokio::sync::watch; -use tracing::{debug, error, trace, warn}; +use tracing::{debug, error, trace}; /// A [`DatabaseProvider`] that holds a read-only database transaction. pub type DatabaseProviderRO = DatabaseProvider<::TX, N>; @@ -292,7 +291,7 @@ impl DatabaseProvi /// Inserts an historical block. **Used for setting up test environments** pub fn insert_historical_block( &self, - block: SealedBlockWithSenders, + block: SealedBlockWithSenders::Body>, ) -> ProviderResult { let ttd = if block.number == 0 { block.difficulty @@ -316,7 +315,7 @@ impl DatabaseProvi writer.append_header(block.header.as_ref(), ttd, &block.hash())?; - self.insert_block(block) + self.insert_block(block, StorageLocation::Database) } } @@ -3137,7 +3136,8 @@ impl BlockWriter /// [`TransactionHashNumbers`](tables::TransactionHashNumbers). fn insert_block( &self, - block: SealedBlockWithSenders, + block: SealedBlockWithSenders, + write_transactions_to: StorageLocation, ) -> ProviderResult { let block_number = block.number; @@ -3166,15 +3166,6 @@ impl BlockWriter self.tx.put::(block_number, ttd.into())?; durations_recorder.record_relative(metrics::Action::InsertHeaderTerminalDifficulties); - // insert body ommers data - if !block.body.ommers.is_empty() { - self.tx.put::( - block_number, - StoredBlockOmmers { ommers: block.block.body.ommers }, - )?; - durations_recorder.record_relative(metrics::Action::InsertBlockOmmers); - } - let mut next_tx_num = self .tx .cursor_read::()? @@ -3184,84 +3175,28 @@ impl BlockWriter durations_recorder.record_relative(metrics::Action::GetNextTxNum); let first_tx_num = next_tx_num; - let tx_count = block.block.body.transactions.len() as u64; + let tx_count = block.block.body.transactions().len() as u64; // Ensures we have all the senders for the block's transactions. - let mut tx_senders_elapsed = Duration::default(); - let mut transactions_elapsed = Duration::default(); - let mut tx_hash_numbers_elapsed = Duration::default(); - for (transaction, sender) in - block.block.body.transactions.into_iter().zip(block.senders.iter()) + block.block.body.transactions().iter().zip(block.senders.iter()) { - let hash = transaction.hash(); - - if self - .prune_modes - .sender_recovery - .as_ref() - .filter(|prune_mode| prune_mode.is_full()) - .is_none() - { - let start = Instant::now(); - self.tx.put::(next_tx_num, *sender)?; - tx_senders_elapsed += start.elapsed(); - } + let hash = transaction.tx_hash(); - let start = Instant::now(); - self.tx.put::(next_tx_num, transaction.into())?; - let elapsed = start.elapsed(); - if elapsed > Duration::from_secs(1) { - warn!( - target: "providers::db", - ?block_number, - tx_num = %next_tx_num, - hash = %hash, - ?elapsed, - "Transaction insertion took too long" - ); + if self.prune_modes.sender_recovery.as_ref().is_none_or(|m| !m.is_full()) { + self.tx.put::(next_tx_num, *sender)?; } - transactions_elapsed += elapsed; - if self - .prune_modes - .transaction_lookup - .filter(|prune_mode| prune_mode.is_full()) - .is_none() - { - let start = Instant::now(); - self.tx.put::(hash, next_tx_num)?; - tx_hash_numbers_elapsed += start.elapsed(); + if self.prune_modes.transaction_lookup.is_none_or(|m| !m.is_full()) { + self.tx.put::(*hash, next_tx_num)?; } next_tx_num += 1; } - durations_recorder - .record_duration(metrics::Action::InsertTransactionSenders, tx_senders_elapsed); - durations_recorder - .record_duration(metrics::Action::InsertTransactions, transactions_elapsed); - durations_recorder.record_duration( - metrics::Action::InsertTransactionHashNumbers, - tx_hash_numbers_elapsed, - ); - if let Some(withdrawals) = block.block.body.withdrawals { - if !withdrawals.is_empty() { - self.tx.put::( - block_number, - StoredBlockWithdrawals { withdrawals }, - )?; - durations_recorder.record_relative(metrics::Action::InsertBlockWithdrawals); - } - } - - let block_indices = StoredBlockBodyIndices { first_tx_num, tx_count }; - self.tx.put::(block_number, block_indices)?; - durations_recorder.record_relative(metrics::Action::InsertBlockBodyIndices); - - if !block_indices.is_empty() { - self.tx.put::(block_indices.last_tx_num(), block_number)?; - durations_recorder.record_relative(metrics::Action::InsertTransactionBlocks); - } + self.append_block_bodies( + vec![(block_number, Some(block.block.body))], + write_transactions_to, + )?; debug!( target: "providers::db", @@ -3270,33 +3205,83 @@ impl BlockWriter "Inserted block" ); - Ok(block_indices) + Ok(StoredBlockBodyIndices { first_tx_num, tx_count }) } fn append_block_bodies( &self, bodies: Vec<(BlockNumber, Option)>, + write_transactions_to: StorageLocation, ) -> ProviderResult<()> { + let Some(from_block) = bodies.first().map(|(block, _)| *block) else { return Ok(()) }; + + // Initialize writer if we will be writing transactions to staticfiles + let mut tx_static_writer = write_transactions_to + .static_files() + .then(|| { + self.static_file_provider.get_writer(from_block, StaticFileSegment::Transactions) + }) + .transpose()?; + let mut block_indices_cursor = self.tx.cursor_write::()?; let mut tx_block_cursor = self.tx.cursor_write::()?; + // Initialize cursor if we will be writing transactions to database + let mut tx_cursor = write_transactions_to + .database() + .then(|| { + self.tx.cursor_write::::Transaction, + >>() + }) + .transpose()?; + // Get id for the next tx_num of zero if there are no transactions. let mut next_tx_num = tx_block_cursor.last()?.map(|(id, _)| id + 1).unwrap_or_default(); for (block_number, body) in &bodies { + // Increment block on static file header. + if let Some(writer) = tx_static_writer.as_mut() { + writer.increment_block(*block_number)?; + } + let tx_count = body.as_ref().map(|b| b.transactions().len() as u64).unwrap_or_default(); let block_indices = StoredBlockBodyIndices { first_tx_num: next_tx_num, tx_count }; + let mut durations_recorder = metrics::DurationsRecorder::default(); + // insert block meta block_indices_cursor.append(*block_number, block_indices)?; - next_tx_num += tx_count; + durations_recorder.record_relative(metrics::Action::InsertBlockBodyIndices); + let Some(body) = body else { continue }; // write transaction block index if !body.transactions().is_empty() { tx_block_cursor.append(block_indices.last_tx_num(), *block_number)?; + durations_recorder.record_relative(metrics::Action::InsertTransactionBlocks); + } + + // write transactions + for transaction in body.transactions() { + if let Some(writer) = tx_static_writer.as_mut() { + writer.append_transaction(next_tx_num, transaction)?; + } + if let Some(cursor) = tx_cursor.as_mut() { + cursor.append(next_tx_num, transaction.clone())?; + } + + // Increment transaction id for each transaction. + next_tx_num += 1; } + + debug!( + target: "providers::db", + ?block_number, + actions = ?durations_recorder.actions, + "Inserted block body" + ); } self.storage.writer().write_block_bodies(self, bodies)?; @@ -3307,7 +3292,7 @@ impl BlockWriter /// TODO(joshie): this fn should be moved to `UnifiedStorageWriter` eventually fn append_blocks_with_state( &self, - blocks: Vec, + blocks: Vec>, execution_outcome: ExecutionOutcome, hashed_state: HashedPostStateSorted, trie_updates: TrieUpdates, @@ -3326,7 +3311,7 @@ impl BlockWriter // Insert the blocks for block in blocks { - self.insert_block(block)?; + self.insert_block(block, StorageLocation::Database)?; durations_recorder.record_relative(metrics::Action::InsertBlock); } diff --git a/crates/storage/provider/src/providers/mod.rs b/crates/storage/provider/src/providers/mod.rs index d049243377e9..d530917909ca 100644 --- a/crates/storage/provider/src/providers/mod.rs +++ b/crates/storage/provider/src/providers/mod.rs @@ -20,6 +20,7 @@ use reth_blockchain_tree_api::{ }; use reth_chain_state::{ChainInfoTracker, ForkChoiceNotifications, ForkChoiceSubscriptions}; use reth_chainspec::{ChainInfo, EthereumHardforks}; +use reth_db::table::Value; use reth_db_api::models::{AccountBeforeTx, StoredBlockBodyIndices}; use reth_evm::ConfigureEvmEnv; use reth_node_types::{FullNodePrimitives, NodeTypes, NodeTypesWithDB}; @@ -75,7 +76,7 @@ where Self: NodeTypes< ChainSpec: EthereumHardforks, Storage: ChainStorage, - Primitives: FullNodePrimitives, + Primitives: FullNodePrimitives, >, { } @@ -84,7 +85,7 @@ impl NodeTypesForProvider for T where T: NodeTypes< ChainSpec: EthereumHardforks, Storage: ChainStorage, - Primitives: FullNodePrimitives, + Primitives: FullNodePrimitives, > { } diff --git a/crates/storage/provider/src/traits/block.rs b/crates/storage/provider/src/traits/block.rs index a0dae1783eae..c84534e7a5da 100644 --- a/crates/storage/provider/src/traits/block.rs +++ b/crates/storage/provider/src/traits/block.rs @@ -1,3 +1,4 @@ +use alloy_consensus::Header; use alloy_primitives::BlockNumber; use reth_db_api::models::StoredBlockBodyIndices; use reth_execution_types::{Chain, ExecutionOutcome}; @@ -6,6 +7,29 @@ use reth_storage_errors::provider::ProviderResult; use reth_trie::{updates::TrieUpdates, HashedPostStateSorted}; use std::ops::RangeInclusive; +/// An enum that represents the storage location for a piece of data. +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum StorageLocation { + /// Write only to static files. + StaticFiles, + /// Write only to the database. + Database, + /// Write to both the database and static files. + Both, +} + +impl StorageLocation { + /// Returns true if the storage location includes static files. + pub const fn static_files(&self) -> bool { + matches!(self, Self::StaticFiles | Self::Both) + } + + /// Returns true if the storage location includes the database. + pub const fn database(&self) -> bool { + matches!(self, Self::Database | Self::Both) + } +} + /// BlockExecution Writer #[auto_impl::auto_impl(&, Arc, Box)] pub trait BlockExecutionWriter: BlockWriter + Send + Sync { @@ -40,8 +64,11 @@ pub trait BlockWriter: Send + Sync { /// /// Return [StoredBlockBodyIndices] that contains indices of the first and last transactions and /// transition in the block. - fn insert_block(&self, block: SealedBlockWithSenders) - -> ProviderResult; + fn insert_block( + &self, + block: SealedBlockWithSenders, + write_transactions_to: StorageLocation, + ) -> ProviderResult; /// Appends a batch of block bodies extending the canonical chain. This is invoked during /// `Bodies` stage and does not write to `TransactionHashNumbers` and `TransactionSenders` @@ -51,6 +78,7 @@ pub trait BlockWriter: Send + Sync { fn append_block_bodies( &self, bodies: Vec<(BlockNumber, Option)>, + write_transactions_to: StorageLocation, ) -> ProviderResult<()>; /// Appends a batch of sealed blocks to the blockchain, including sender information, and @@ -69,7 +97,7 @@ pub trait BlockWriter: Send + Sync { /// Returns `Ok(())` on success, or an error if any operation fails. fn append_blocks_with_state( &self, - blocks: Vec, + blocks: Vec>, execution_outcome: ExecutionOutcome, hashed_state: HashedPostStateSorted, trie_updates: TrieUpdates, diff --git a/crates/storage/provider/src/writer/mod.rs b/crates/storage/provider/src/writer/mod.rs index 17dea5a6d51f..3878cf2a9e33 100644 --- a/crates/storage/provider/src/writer/mod.rs +++ b/crates/storage/provider/src/writer/mod.rs @@ -2,7 +2,7 @@ use crate::{ providers::{StaticFileProvider, StaticFileProviderRWRefMut, StaticFileWriter as SfWriter}, writer::static_file::StaticFileWriter, BlockExecutionWriter, BlockWriter, HistoryWriter, StateChangeWriter, StateWriter, - StaticFileProviderFactory, TrieWriter, + StaticFileProviderFactory, StorageLocation, TrieWriter, }; use alloy_consensus::Header; use alloy_primitives::{BlockNumber, B256, U256}; @@ -15,7 +15,7 @@ use reth_db::{ }; use reth_errors::{ProviderError, ProviderResult}; use reth_execution_types::ExecutionOutcome; -use reth_primitives::{SealedBlock, StaticFileSegment, TransactionSignedNoHash}; +use reth_primitives::{BlockBody, SealedBlock, StaticFileSegment}; use reth_stages_types::{StageCheckpoint, StageId}; use reth_storage_api::{ DBProvider, HeaderProvider, ReceiptWriter, StageCheckpointWriter, TransactionsProviderExt, @@ -148,7 +148,7 @@ impl UnifiedStorageWriter<'_, (), ()> { impl UnifiedStorageWriter<'_, ProviderDB, &StaticFileProvider> where ProviderDB: DBProvider - + BlockWriter + + BlockWriter + TransactionsProviderExt + StateChangeWriter + TrieWriter @@ -195,7 +195,7 @@ where for block in blocks { let sealed_block = block.block().clone().try_with_senders_unchecked(block.senders().clone()).unwrap(); - self.database().insert_block(sealed_block)?; + self.database().insert_block(sealed_block, StorageLocation::Both)?; self.save_header_and_transactions(block.block.clone())?; // Write state and changesets to the database. @@ -246,25 +246,8 @@ where .save_stage_checkpoint(StageId::Headers, StageCheckpoint::new(block.number))?; } - { - let transactions_writer = - self.static_file().get_writer(block.number, StaticFileSegment::Transactions)?; - let mut storage_writer = - UnifiedStorageWriter::from(self.database(), transactions_writer); - let no_hash_transactions = block - .body - .transactions - .clone() - .into_iter() - .map(TransactionSignedNoHash::from) - .collect(); - storage_writer.append_transactions_from_blocks( - block.header().number, - std::iter::once(&no_hash_transactions), - )?; - self.database() - .save_stage_checkpoint(StageId::Bodies, StageCheckpoint::new(block.number))?; - } + self.database() + .save_stage_checkpoint(StageId::Bodies, StageCheckpoint::new(block.number))?; Ok(()) } @@ -378,56 +361,6 @@ where Ok(td) } - - /// Appends transactions to static files, using the - /// [`BlockBodyIndices`](tables::BlockBodyIndices) table to determine the transaction number - /// when appending to static files. - /// - /// NOTE: The static file writer used to construct this [`UnifiedStorageWriter`] MUST be a - /// writer for the Transactions segment. - pub fn append_transactions_from_blocks( - &mut self, - initial_block_number: BlockNumber, - transactions: impl Iterator, - ) -> ProviderResult<()> - where - T: Borrow>, - { - self.ensure_static_file_segment(StaticFileSegment::Transactions)?; - - let mut bodies_cursor = - self.database().tx_ref().cursor_read::()?; - - let mut last_tx_idx = None; - for (idx, transactions) in transactions.enumerate() { - let block_number = initial_block_number + idx as u64; - - let mut first_tx_index = - bodies_cursor.seek_exact(block_number)?.map(|(_, indices)| indices.first_tx_num()); - - // If there are no indices, that means there have been no transactions - // - // So instead of returning an error, use zero - if block_number == initial_block_number && first_tx_index.is_none() { - first_tx_index = Some(0); - } - - let mut tx_index = first_tx_index - .or(last_tx_idx) - .ok_or(ProviderError::BlockBodyIndicesNotFound(block_number))?; - - for tx in transactions.borrow() { - self.static_file_mut().append_transaction(tx_index, tx)?; - tx_index += 1; - } - - self.static_file_mut().increment_block(block_number)?; - - // update index - last_tx_idx = Some(tx_index); - } - Ok(()) - } } impl From 80268a1ce775cb8a9955a581526bd5bc2d5a999f Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Thu, 21 Nov 2024 10:30:21 +0100 Subject: [PATCH 074/156] chore(sdk): move `reth_primitives_traits::TxType` into transaction module (#12722) --- crates/primitives-traits/src/lib.rs | 5 +---- crates/primitives-traits/src/transaction/mod.rs | 1 + crates/primitives-traits/src/{ => transaction}/tx_type.rs | 2 ++ 3 files changed, 4 insertions(+), 4 deletions(-) rename crates/primitives-traits/src/{ => transaction}/tx_type.rs (96%) diff --git a/crates/primitives-traits/src/lib.rs b/crates/primitives-traits/src/lib.rs index 79dff4ae36bf..38e83f8ccdfd 100644 --- a/crates/primitives-traits/src/lib.rs +++ b/crates/primitives-traits/src/lib.rs @@ -28,6 +28,7 @@ pub mod transaction; pub use transaction::{ execute::FillTxEnv, signed::{FullSignedTx, SignedTransaction}, + tx_type::{FullTxType, TxType}, FullTransaction, Transaction, TransactionExt, }; @@ -52,10 +53,6 @@ pub use alloy_primitives::{logs_bloom, Log, LogData}; mod storage; pub use storage::StorageEntry; -/// Transaction types -pub mod tx_type; -pub use tx_type::{FullTxType, TxType}; - /// Common header types pub mod header; #[cfg(any(test, feature = "arbitrary", feature = "test-utils"))] diff --git a/crates/primitives-traits/src/transaction/mod.rs b/crates/primitives-traits/src/transaction/mod.rs index 53b772785713..8bd0027a8b21 100644 --- a/crates/primitives-traits/src/transaction/mod.rs +++ b/crates/primitives-traits/src/transaction/mod.rs @@ -2,6 +2,7 @@ pub mod execute; pub mod signed; +pub mod tx_type; use core::{fmt, hash::Hash}; diff --git a/crates/primitives-traits/src/tx_type.rs b/crates/primitives-traits/src/transaction/tx_type.rs similarity index 96% rename from crates/primitives-traits/src/tx_type.rs rename to crates/primitives-traits/src/transaction/tx_type.rs index d9ef687759e7..dc3dba7fdcf9 100644 --- a/crates/primitives-traits/src/tx_type.rs +++ b/crates/primitives-traits/src/transaction/tx_type.rs @@ -1,3 +1,5 @@ +//! Abstraction of transaction envelope type ID. + use core::fmt; use alloy_primitives::{U64, U8}; From 33730536f5e5de1403f2deb3c7dbe551879b9724 Mon Sep 17 00:00:00 2001 From: Ivan Date: Thu, 21 Nov 2024 12:35:32 +0300 Subject: [PATCH 075/156] Do not print whole block in `debug!` logs (#12729) --- crates/ethereum/payload/src/lib.rs | 2 +- crates/optimism/payload/src/builder.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/ethereum/payload/src/lib.rs b/crates/ethereum/payload/src/lib.rs index 4ec1e212c8d5..80f6786c404f 100644 --- a/crates/ethereum/payload/src/lib.rs +++ b/crates/ethereum/payload/src/lib.rs @@ -447,7 +447,7 @@ where }; let sealed_block = Arc::new(block.seal_slow()); - debug!(target: "payload_builder", ?sealed_block, "sealed built block"); + debug!(target: "payload_builder", sealed_block_header = ?sealed_block.header, "sealed built block"); // create the executed block data let executed = ExecutedBlock { diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index 3644d8f71a54..1050a55eb6e9 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -424,7 +424,7 @@ where }; let sealed_block = Arc::new(block.seal_slow()); - debug!(target: "payload_builder", ?sealed_block, "sealed built block"); + debug!(target: "payload_builder", sealed_block_header = ?sealed_block.header, "sealed built block"); // create the executed block data let executed = ExecutedBlock { From 1b874dcc6c352c05b5e2f1085001a090b8f03448 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 21 Nov 2024 11:14:31 +0100 Subject: [PATCH 076/156] feat: use broadcast tx generic (#12733) --- crates/net/eth-wire-types/src/message.rs | 8 ++++++-- crates/net/network/src/message.rs | 4 ++-- crates/net/network/src/network.rs | 5 ++--- crates/net/network/src/transactions/mod.rs | 2 +- 4 files changed, 11 insertions(+), 8 deletions(-) diff --git a/crates/net/eth-wire-types/src/message.rs b/crates/net/eth-wire-types/src/message.rs index 3d34b8cae80c..9a8667203108 100644 --- a/crates/net/eth-wire-types/src/message.rs +++ b/crates/net/eth-wire-types/src/message.rs @@ -182,7 +182,11 @@ pub enum EthMessage { )] NewBlock(Box>), /// Represents a Transactions message broadcast to the network. - Transactions(Transactions), + #[cfg_attr( + feature = "serde", + serde(bound = "N::BroadcastedTransaction: serde::Serialize + serde::de::DeserializeOwned") + )] + Transactions(Transactions), /// Represents a `NewPooledTransactionHashes` message for eth/66 version. NewPooledTransactionHashes66(NewPooledTransactionHashes66), /// Represents a `NewPooledTransactionHashes` message for eth/68 version. @@ -302,7 +306,7 @@ pub enum EthBroadcastMessage { /// Represents a new block broadcast message. NewBlock(Arc>), /// Represents a transactions broadcast message. - Transactions(SharedTransactions), + Transactions(SharedTransactions), } // === impl EthBroadcastMessage === diff --git a/crates/net/network/src/message.rs b/crates/net/network/src/message.rs index c2511f4e16a8..199498b0b4c1 100644 --- a/crates/net/network/src/message.rs +++ b/crates/net/network/src/message.rs @@ -48,9 +48,9 @@ pub enum PeerMessage { /// Broadcast new block. NewBlock(NewBlockMessage), /// Received transactions _from_ the peer - ReceivedTransaction(Transactions), + ReceivedTransaction(Transactions), /// Broadcast transactions _from_ local _to_ a peer. - SendTransactions(SharedTransactions), + SendTransactions(SharedTransactions), /// Send new pooled transactions PooledTransactions(NewPooledTransactionHashes), /// All `eth` request variants. diff --git a/crates/net/network/src/network.rs b/crates/net/network/src/network.rs index 496b4250ffd5..eadeccb15493 100644 --- a/crates/net/network/src/network.rs +++ b/crates/net/network/src/network.rs @@ -21,7 +21,6 @@ use reth_network_api::{ use reth_network_p2p::sync::{NetworkSyncUpdater, SyncState, SyncStateProvider}; use reth_network_peers::{NodeRecord, PeerId}; use reth_network_types::{PeerAddr, PeerKind, Reputation, ReputationChangeKind}; -use reth_primitives::TransactionSigned; use reth_tokio_util::{EventSender, EventStream}; use secp256k1::SecretKey; use std::{ @@ -130,7 +129,7 @@ impl NetworkHandle { } /// Send full transactions to the peer - pub fn send_transactions(&self, peer_id: PeerId, msg: Vec>) { + pub fn send_transactions(&self, peer_id: PeerId, msg: Vec>) { self.send_message(NetworkHandleMessage::SendTransaction { peer_id, msg: SharedTransactions(msg), @@ -466,7 +465,7 @@ pub(crate) enum NetworkHandleMessage, }, /// Sends a list of transaction hashes to the given peer. SendPooledTransactionHashes { diff --git a/crates/net/network/src/transactions/mod.rs b/crates/net/network/src/transactions/mod.rs index f7a6fb8805e6..c3ffea58d01d 100644 --- a/crates/net/network/src/transactions/mod.rs +++ b/crates/net/network/src/transactions/mod.rs @@ -1768,7 +1768,7 @@ pub enum NetworkTransactionEvent { /// The ID of the peer from which the transactions were received. peer_id: PeerId, /// The received transactions. - msg: Transactions, + msg: Transactions, }, /// Represents the event of receiving a list of transaction hashes from a peer. IncomingPooledTransactionHashes { From b9169399f303f3d1b50fe467e4ee9a6553a5d2ea Mon Sep 17 00:00:00 2001 From: Jun Song <87601811+syjn99@users.noreply.github.com> Date: Thu, 21 Nov 2024 19:22:42 +0900 Subject: [PATCH 077/156] chore: add pretty printing for pruned segment info (#12710) --- crates/node/events/src/node.rs | 6 +++++- crates/prune/types/src/pruner.rs | 10 +++++++--- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/crates/node/events/src/node.rs b/crates/node/events/src/node.rs index 285e28d0f2e2..d7f78ba1f7e1 100644 --- a/crates/node/events/src/node.rs +++ b/crates/node/events/src/node.rs @@ -307,7 +307,11 @@ impl NodeState { info!(tip_block_number, "Pruner started"); } PrunerEvent::Finished { tip_block_number, elapsed, stats } => { - info!(tip_block_number, ?elapsed, ?stats, "Pruner finished"); + let stats = format!( + "[{}]", + stats.iter().map(|item| item.to_string()).collect::>().join(", ") + ); + info!(tip_block_number, ?elapsed, %stats, "Pruner finished"); } } } diff --git a/crates/prune/types/src/pruner.rs b/crates/prune/types/src/pruner.rs index 3046dda06790..fb9079257298 100644 --- a/crates/prune/types/src/pruner.rs +++ b/crates/prune/types/src/pruner.rs @@ -1,5 +1,6 @@ use crate::{PruneCheckpoint, PruneLimiter, PruneMode, PruneSegment}; use alloy_primitives::{BlockNumber, TxNumber}; +use derive_more::Display; /// Pruner run output. #[derive(Debug)] @@ -17,7 +18,8 @@ impl From for PrunerOutput { } /// Represents information of a pruner run for a segment. -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq, Display)] +#[display("(table={segment}, pruned={pruned}, status={progress})")] pub struct PrunedSegmentInfo { /// The pruned segment pub segment: PruneSegment, @@ -77,16 +79,18 @@ impl SegmentOutputCheckpoint { } /// Progress of pruning. -#[derive(Debug, PartialEq, Eq, Clone, Copy)] +#[derive(Debug, PartialEq, Eq, Clone, Copy, Display)] pub enum PruneProgress { /// There is more data to prune. + #[display("HasMoreData({_0})")] HasMoreData(PruneInterruptReason), /// Pruning has been finished. + #[display("Finished")] Finished, } /// Reason for interrupting a prune run. -#[derive(Debug, PartialEq, Eq, Clone, Copy)] +#[derive(Debug, PartialEq, Eq, Clone, Copy, Display)] pub enum PruneInterruptReason { /// Prune run timed out. Timeout, From c2e6938606863e5a6f2fe5fb2ef7d993a1a1c69c Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 21 Nov 2024 11:41:34 +0100 Subject: [PATCH 078/156] chore: move prune event type (#12732) --- Cargo.lock | 2 +- crates/node/events/Cargo.toml | 2 +- crates/node/events/src/node.rs | 2 +- crates/prune/prune/src/event.rs | 12 ------------ crates/prune/prune/src/lib.rs | 2 -- crates/prune/types/src/event.rs | 22 ++++++++++++++++++++++ crates/prune/types/src/lib.rs | 2 ++ 7 files changed, 27 insertions(+), 17 deletions(-) delete mode 100644 crates/prune/prune/src/event.rs create mode 100644 crates/prune/types/src/event.rs diff --git a/Cargo.lock b/Cargo.lock index 017b84f6e49d..02302e14bf73 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8122,7 +8122,7 @@ dependencies = [ "reth-engine-primitives", "reth-network-api", "reth-primitives-traits", - "reth-prune", + "reth-prune-types", "reth-stages", "reth-static-file-types", "reth-storage-api", diff --git a/crates/node/events/Cargo.toml b/crates/node/events/Cargo.toml index 4b4d912a27b8..03f3ab172883 100644 --- a/crates/node/events/Cargo.toml +++ b/crates/node/events/Cargo.toml @@ -16,7 +16,7 @@ reth-storage-api.workspace = true reth-beacon-consensus.workspace = true reth-network-api.workspace = true reth-stages.workspace = true -reth-prune.workspace = true +reth-prune-types.workspace = true reth-static-file-types.workspace = true reth-primitives-traits.workspace = true reth-engine-primitives.workspace = true diff --git a/crates/node/events/src/node.rs b/crates/node/events/src/node.rs index d7f78ba1f7e1..4528bdeaa94c 100644 --- a/crates/node/events/src/node.rs +++ b/crates/node/events/src/node.rs @@ -9,7 +9,7 @@ use reth_beacon_consensus::{BeaconConsensusEngineEvent, ConsensusEngineLiveSyncP use reth_engine_primitives::ForkchoiceStatus; use reth_network_api::{NetworkEvent, PeersInfo}; use reth_primitives_traits::{format_gas, format_gas_throughput}; -use reth_prune::PrunerEvent; +use reth_prune_types::PrunerEvent; use reth_stages::{EntitiesCheckpoint, ExecOutput, PipelineEvent, StageCheckpoint, StageId}; use reth_static_file_types::StaticFileProducerEvent; use std::{ diff --git a/crates/prune/prune/src/event.rs b/crates/prune/prune/src/event.rs deleted file mode 100644 index 4f5806e592ee..000000000000 --- a/crates/prune/prune/src/event.rs +++ /dev/null @@ -1,12 +0,0 @@ -use alloy_primitives::BlockNumber; -use reth_prune_types::PrunedSegmentInfo; -use std::time::Duration; - -/// An event emitted by a [Pruner][crate::Pruner]. -#[derive(Debug, PartialEq, Eq, Clone)] -pub enum PrunerEvent { - /// Emitted when pruner started running. - Started { tip_block_number: BlockNumber }, - /// Emitted when pruner finished running. - Finished { tip_block_number: BlockNumber, elapsed: Duration, stats: Vec }, -} diff --git a/crates/prune/prune/src/lib.rs b/crates/prune/prune/src/lib.rs index 5a43afeb5026..e6bcbe5e8121 100644 --- a/crates/prune/prune/src/lib.rs +++ b/crates/prune/prune/src/lib.rs @@ -12,7 +12,6 @@ mod builder; mod db_ext; mod error; -mod event; mod metrics; mod pruner; pub mod segments; @@ -20,7 +19,6 @@ pub mod segments; use crate::metrics::Metrics; pub use builder::PrunerBuilder; pub use error::PrunerError; -pub use event::PrunerEvent; pub use pruner::{Pruner, PrunerResult, PrunerWithFactory, PrunerWithResult}; // Re-export prune types diff --git a/crates/prune/types/src/event.rs b/crates/prune/types/src/event.rs new file mode 100644 index 000000000000..bac5f0d512cc --- /dev/null +++ b/crates/prune/types/src/event.rs @@ -0,0 +1,22 @@ +use crate::PrunedSegmentInfo; +use alloy_primitives::BlockNumber; +use std::time::Duration; + +/// An event emitted by a pruner. +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum PrunerEvent { + /// Emitted when pruner started running. + Started { + /// The tip block number before pruning. + tip_block_number: BlockNumber, + }, + /// Emitted when pruner finished running. + Finished { + /// The tip block number before pruning. + tip_block_number: BlockNumber, + /// The elapsed time for the pruning process. + elapsed: Duration, + /// Collected pruning stats. + stats: Vec, + }, +} diff --git a/crates/prune/types/src/lib.rs b/crates/prune/types/src/lib.rs index 0722e760fafb..82a41f0c2b15 100644 --- a/crates/prune/types/src/lib.rs +++ b/crates/prune/types/src/lib.rs @@ -9,6 +9,7 @@ #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] mod checkpoint; +mod event; mod limiter; mod mode; mod pruner; @@ -16,6 +17,7 @@ mod segment; mod target; pub use checkpoint::PruneCheckpoint; +pub use event::PrunerEvent; pub use limiter::PruneLimiter; pub use mode::PruneMode; pub use pruner::{ From d00920c42102635b27276b1b767449f74af61975 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 21 Nov 2024 12:08:07 +0100 Subject: [PATCH 079/156] chore: relax tryfrom error (#12735) --- crates/net/eth-wire-types/src/transactions.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/net/eth-wire-types/src/transactions.rs b/crates/net/eth-wire-types/src/transactions.rs index 8db96c10042a..26f62b7f76a1 100644 --- a/crates/net/eth-wire-types/src/transactions.rs +++ b/crates/net/eth-wire-types/src/transactions.rs @@ -5,7 +5,7 @@ use alloy_primitives::B256; use alloy_rlp::{RlpDecodableWrapper, RlpEncodableWrapper}; use derive_more::{Constructor, Deref, IntoIterator}; use reth_codecs_derive::add_arbitrary_tests; -use reth_primitives::{transaction::TransactionConversionError, PooledTransactionsElement}; +use reth_primitives::PooledTransactionsElement; /// A list of transaction hashes that the peer would like transaction bodies for. #[derive(Clone, Debug, PartialEq, Eq, RlpEncodableWrapper, RlpDecodableWrapper, Default)] @@ -60,9 +60,9 @@ impl PooledTransactions { impl TryFrom> for PooledTransactions where - T: TryFrom, + T: TryFrom, { - type Error = TransactionConversionError; + type Error = T::Error; fn try_from(txs: Vec) -> Result { txs.into_iter().map(T::try_from).collect() From ac1867b63113f98ff6c392efd465c4ae6e13998a Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 21 Nov 2024 12:22:35 +0100 Subject: [PATCH 080/156] feat: add a way to convert consensus to pooled variant (#12734) --- crates/transaction-pool/src/test_utils/mock.rs | 6 ++++++ crates/transaction-pool/src/traits.rs | 11 +++++++++++ 2 files changed, 17 insertions(+) diff --git a/crates/transaction-pool/src/test_utils/mock.rs b/crates/transaction-pool/src/test_utils/mock.rs index 72304910e15d..344781b1f583 100644 --- a/crates/transaction-pool/src/test_utils/mock.rs +++ b/crates/transaction-pool/src/test_utils/mock.rs @@ -608,6 +608,12 @@ impl PoolTransaction for MockTransaction { pooled.into() } + fn try_consensus_into_pooled( + tx: Self::Consensus, + ) -> Result { + Self::Pooled::try_from(tx).map_err(|_| TryFromRecoveredTransactionError::BlobSidecarMissing) + } + fn hash(&self) -> &TxHash { self.get_hash() } diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index 23f28cc3fa73..bcde571b07b9 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -946,6 +946,11 @@ pub trait PoolTransaction: fmt::Debug + Send + Sync + Clone { pooled.into() } + /// Tries to convert the `Consensus` type into the `Pooled` type. + fn try_consensus_into_pooled( + tx: Self::Consensus, + ) -> Result; + /// Hash of the transaction. fn hash(&self) -> &TxHash; @@ -1207,6 +1212,12 @@ impl PoolTransaction for EthPooledTransaction { type Pooled = PooledTransactionsElementEcRecovered; + fn try_consensus_into_pooled( + tx: Self::Consensus, + ) -> Result { + Self::Pooled::try_from(tx).map_err(|_| TryFromRecoveredTransactionError::BlobSidecarMissing) + } + /// Returns hash of the transaction. fn hash(&self) -> &TxHash { self.transaction.hash_ref() From 42aea7b9f688c29da1012fa0c3738df65c840f4c Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Thu, 21 Nov 2024 11:49:42 +0000 Subject: [PATCH 081/156] feat(trie): retain branch nodes in sparse trie (#12291) --- Cargo.lock | 8 +- Cargo.toml | 4 +- crates/trie/sparse/Cargo.toml | 2 + crates/trie/sparse/src/trie.rs | 646 ++++++++++++++++++++++++--------- 4 files changed, 493 insertions(+), 167 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 02302e14bf73..10b7f2fbda53 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4590,7 +4590,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", - "windows-targets 0.52.6", + "windows-targets 0.48.5", ] [[package]] @@ -9441,7 +9441,9 @@ dependencies = [ "itertools 0.13.0", "pretty_assertions", "proptest", + "proptest-arbitrary-interop", "rand 0.8.5", + "reth-primitives-traits", "reth-testing-utils", "reth-tracing", "reth-trie", @@ -11181,7 +11183,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3637e734239e12ab152cd269302500bd063f37624ee210cd04b4936ed671f3b1" dependencies = [ "cc", - "windows-targets 0.52.6", + "windows-targets 0.48.5", ] [[package]] @@ -11672,7 +11674,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.48.0", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index e4ca1b7bc283..702bbc3090bc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -638,7 +638,7 @@ tracy-client = "0.17.3" #alloy-transport-ipc = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } #alloy-transport-ws = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#op-alloy-consensus = { git = "https://github.com/alloy-rs/op-alloy", rev = "6a042e7681b1" } +#op-alloy-network = { git = "https://github.com/alloy-rs/op-alloy", rev = "6a042e7681b1" } #op-alloy-rpc-types = { git = "https://github.com/alloy-rs/op-alloy", rev = "6a042e7681b1" } #op-alloy-rpc-types-engine = { git = "https://github.com/alloy-rs/op-alloy", rev = "6a042e7681b1" } -#op-alloy-network = { git = "https://github.com/alloy-rs/op-alloy", rev = "6a042e7681b1" } -#op-alloy-consensus = { git = "https://github.com/alloy-rs/op-alloy", rev = "6a042e7681b1" } diff --git a/crates/trie/sparse/Cargo.toml b/crates/trie/sparse/Cargo.toml index 1c5bb7d8a33e..3301975961e2 100644 --- a/crates/trie/sparse/Cargo.toml +++ b/crates/trie/sparse/Cargo.toml @@ -27,6 +27,7 @@ smallvec = { workspace = true, features = ["const_new"] } thiserror.workspace = true [dev-dependencies] +reth-primitives-traits = { workspace = true, features = ["arbitrary"] } reth-testing-utils.workspace = true reth-trie = { workspace = true, features = ["test-utils"] } reth-trie-common = { workspace = true, features = ["test-utils", "arbitrary"] } @@ -35,6 +36,7 @@ assert_matches.workspace = true criterion.workspace = true itertools.workspace = true pretty_assertions = "1.4" +proptest-arbitrary-interop.workspace = true proptest.workspace = true rand.workspace = true diff --git a/crates/trie/sparse/src/trie.rs b/crates/trie/sparse/src/trie.rs index 696934d3edb9..dff290271759 100644 --- a/crates/trie/sparse/src/trie.rs +++ b/crates/trie/sparse/src/trie.rs @@ -1,17 +1,21 @@ use crate::{SparseTrieError, SparseTrieResult}; -use alloy_primitives::{hex, keccak256, map::HashMap, B256}; +use alloy_primitives::{ + hex, keccak256, + map::{HashMap, HashSet}, + B256, +}; use alloy_rlp::Decodable; use reth_tracing::tracing::debug; use reth_trie::{ prefix_set::{PrefixSet, PrefixSetMut}, - RlpNode, + BranchNodeCompact, RlpNode, }; use reth_trie_common::{ BranchNodeRef, ExtensionNodeRef, LeafNodeRef, Nibbles, TrieMask, TrieNode, CHILD_INDEX_RANGE, EMPTY_ROOT_HASH, }; use smallvec::SmallVec; -use std::fmt; +use std::{borrow::Cow, fmt}; /// Inner representation of the sparse trie. /// Sparse trie is blind by default until nodes are revealed. @@ -21,13 +25,13 @@ pub enum SparseTrie { #[default] Blind, /// The trie nodes have been revealed. - Revealed(RevealedSparseTrie), + Revealed(Box), } impl SparseTrie { /// Creates new revealed empty trie. pub fn revealed_empty() -> Self { - Self::Revealed(RevealedSparseTrie::default()) + Self::Revealed(Box::default()) } /// Returns `true` if the sparse trie has no revealed nodes. @@ -51,7 +55,7 @@ impl SparseTrie { /// Mutable reference to [`RevealedSparseTrie`]. pub fn reveal_root(&mut self, root: TrieNode) -> SparseTrieResult<&mut RevealedSparseTrie> { if self.is_blind() { - *self = Self::Revealed(RevealedSparseTrie::from_root(root)?) + *self = Self::Revealed(Box::new(RevealedSparseTrie::from_root(root)?)) } Ok(self.as_revealed_mut().unwrap()) } @@ -87,6 +91,7 @@ pub struct RevealedSparseTrie { prefix_set: PrefixSetMut, /// Reusable buffer for RLP encoding of nodes. rlp_buf: Vec, + updates: Option, } impl fmt::Debug for RevealedSparseTrie { @@ -96,6 +101,7 @@ impl fmt::Debug for RevealedSparseTrie { .field("values", &self.values) .field("prefix_set", &self.prefix_set) .field("rlp_buf", &hex::encode(&self.rlp_buf)) + .field("updates", &self.updates) .finish() } } @@ -107,6 +113,7 @@ impl Default for RevealedSparseTrie { values: HashMap::default(), prefix_set: PrefixSetMut::default(), rlp_buf: Vec::new(), + updates: None, } } } @@ -119,11 +126,30 @@ impl RevealedSparseTrie { values: HashMap::default(), prefix_set: PrefixSetMut::default(), rlp_buf: Vec::new(), + updates: None, }; this.reveal_node(Nibbles::default(), node)?; Ok(this) } + /// Makes the sparse trie to store updated branch nodes. + pub fn with_updates(mut self, retain_updates: bool) -> Self { + if retain_updates { + self.updates = Some(SparseTrieUpdates::default()); + } + self + } + + /// Returns a reference to the retained sparse node updates without taking them. + pub fn updates_ref(&self) -> Cow<'_, SparseTrieUpdates> { + self.updates.as_ref().map_or(Cow::Owned(SparseTrieUpdates::default()), Cow::Borrowed) + } + + /// Takes and returns the retained sparse node updates + pub fn take_updates(&mut self) -> SparseTrieUpdates { + self.updates.take().unwrap_or_default() + } + /// Reveal the trie node only if it was not known already. pub fn reveal_node(&mut self, path: Nibbles, node: TrieNode) -> SparseTrieResult<()> { // TODO: revise all inserts to not overwrite existing entries @@ -146,10 +172,7 @@ impl RevealedSparseTrie { match self.nodes.get(&path) { // Blinded and non-existent nodes can be replaced. Some(SparseNode::Hash(_)) | None => { - self.nodes.insert( - path, - SparseNode::Branch { state_mask: branch.state_mask, hash: None }, - ); + self.nodes.insert(path, SparseNode::new_branch(branch.state_mask)); } // Branch node already exists, or an extension node was placed where a // branch node was before. @@ -165,7 +188,7 @@ impl RevealedSparseTrie { let mut child_path = path.clone(); child_path.extend_from_slice_unchecked(&ext.key); self.reveal_node_or_hash(child_path, &ext.child)?; - self.nodes.insert(path, SparseNode::Extension { key: ext.key, hash: None }); + self.nodes.insert(path, SparseNode::new_ext(ext.key)); } // Extension node already exists, or an extension node was placed where a branch // node was before. @@ -390,7 +413,7 @@ impl RevealedSparseTrie { SparseNode::Branch { .. } => removed_node.node, } } - SparseNode::Branch { mut state_mask, hash: _ } => { + SparseNode::Branch { mut state_mask, hash: _, store_in_db_trie: _ } => { // If the node is a branch node, we need to check the number of children left // after deleting the child at the given nibble. @@ -452,6 +475,10 @@ impl RevealedSparseTrie { self.nodes.remove(&child_path); } + if let Some(updates) = self.updates.as_mut() { + updates.removed_nodes.insert(removed_path.clone()); + } + new_node } // If more than one child is left set in the branch, we just re-insert it @@ -558,11 +585,11 @@ impl RevealedSparseTrie { pub fn root(&mut self) -> B256 { // take the current prefix set. let mut prefix_set = std::mem::take(&mut self.prefix_set).freeze(); - let root_rlp = self.rlp_node_allocate(Nibbles::default(), &mut prefix_set); - if let Some(root_hash) = root_rlp.as_hash() { + let rlp_node = self.rlp_node_allocate(Nibbles::default(), &mut prefix_set); + if let Some(root_hash) = rlp_node.as_hash() { root_hash } else { - keccak256(root_rlp) + keccak256(rlp_node) } } @@ -608,7 +635,7 @@ impl RevealedSparseTrie { paths.push((path, level + 1)); } } - SparseNode::Branch { state_mask, hash } => { + SparseNode::Branch { state_mask, hash, .. } => { if hash.is_some() && !prefix_set.contains(&path) { continue } @@ -644,48 +671,70 @@ impl RevealedSparseTrie { let mut prefix_set_contains = |path: &Nibbles| *is_in_prefix_set.get_or_insert_with(|| prefix_set.contains(path)); - let rlp_node = match self.nodes.get_mut(&path).unwrap() { - SparseNode::Empty => RlpNode::word_rlp(&EMPTY_ROOT_HASH), - SparseNode::Hash(hash) => RlpNode::word_rlp(hash), + let (rlp_node, calculated, node_type) = match self.nodes.get_mut(&path).unwrap() { + SparseNode::Empty => { + (RlpNode::word_rlp(&EMPTY_ROOT_HASH), false, SparseNodeType::Empty) + } + SparseNode::Hash(hash) => (RlpNode::word_rlp(hash), false, SparseNodeType::Hash), SparseNode::Leaf { key, hash } => { self.rlp_buf.clear(); let mut path = path.clone(); path.extend_from_slice_unchecked(key); if let Some(hash) = hash.filter(|_| !prefix_set_contains(&path)) { - RlpNode::word_rlp(&hash) + (RlpNode::word_rlp(&hash), false, SparseNodeType::Leaf) } else { let value = self.values.get(&path).unwrap(); let rlp_node = LeafNodeRef { key, value }.rlp(&mut self.rlp_buf); *hash = rlp_node.as_hash(); - rlp_node + (rlp_node, true, SparseNodeType::Leaf) } } SparseNode::Extension { key, hash } => { let mut child_path = path.clone(); child_path.extend_from_slice_unchecked(key); if let Some(hash) = hash.filter(|_| !prefix_set_contains(&path)) { - RlpNode::word_rlp(&hash) + ( + RlpNode::word_rlp(&hash), + false, + SparseNodeType::Extension { store_in_db_trie: true }, + ) } else if buffers.rlp_node_stack.last().is_some_and(|e| e.0 == child_path) { - let (_, child) = buffers.rlp_node_stack.pop().unwrap(); + let (_, child, _, node_type) = buffers.rlp_node_stack.pop().unwrap(); self.rlp_buf.clear(); let rlp_node = ExtensionNodeRef::new(key, &child).rlp(&mut self.rlp_buf); *hash = rlp_node.as_hash(); - rlp_node + + ( + rlp_node, + true, + SparseNodeType::Extension { + // Inherit the `store_in_db_trie` flag from the child node, which is + // always the branch node + store_in_db_trie: node_type.store_in_db_trie(), + }, + ) } else { // need to get rlp node for child first buffers.path_stack.extend([(path, is_in_prefix_set), (child_path, None)]); continue } } - SparseNode::Branch { state_mask, hash } => { - if let Some(hash) = hash.filter(|_| !prefix_set_contains(&path)) { - buffers.rlp_node_stack.push((path, RlpNode::word_rlp(&hash))); + SparseNode::Branch { state_mask, hash, store_in_db_trie } => { + if let Some((hash, store_in_db_trie)) = + hash.zip(*store_in_db_trie).filter(|_| !prefix_set_contains(&path)) + { + buffers.rlp_node_stack.push(( + path, + RlpNode::word_rlp(&hash), + false, + SparseNodeType::Branch { store_in_db_trie }, + )); continue } buffers.branch_child_buf.clear(); // Walk children in a reverse order from `f` to `0`, so we pop the `0` first - // from the stack. + // from the stack and keep walking in the sorted order. for bit in CHILD_INDEX_RANGE.rev() { if state_mask.is_bit_set(bit) { let mut child = path.clone(); @@ -698,11 +747,43 @@ impl RevealedSparseTrie { .branch_value_stack_buf .resize(buffers.branch_child_buf.len(), Default::default()); let mut added_children = false; + + // TODO(alexey): set the `TrieMask` bits directly + let mut tree_mask_values = Vec::new(); + let mut hash_mask_values = Vec::new(); + let mut hashes = Vec::new(); for (i, child_path) in buffers.branch_child_buf.iter().enumerate() { if buffers.rlp_node_stack.last().is_some_and(|e| &e.0 == child_path) { - let (_, child) = buffers.rlp_node_stack.pop().unwrap(); - // Insert children in the resulting buffer in a normal order, because - // initially we iterated in reverse. + let (_, child, calculated, node_type) = + buffers.rlp_node_stack.pop().unwrap(); + + // Update the masks only if we need to retain trie updates + if self.updates.is_some() { + // Set the trie mask + if node_type.store_in_db_trie() { + // A branch or an extension node explicitly set the + // `store_in_db_trie` flag + tree_mask_values.push(true); + } else { + // Set the flag according to whether a child node was + // pre-calculated + // (`calculated = false`), meaning that it wasn't in the + // database + tree_mask_values.push(!calculated); + } + + // Set the hash mask. If a child node has a hash value AND is a + // branch node, set the hash mask + // and save the hash. + let hash = child.as_hash().filter(|_| node_type.is_branch()); + hash_mask_values.push(hash.is_some()); + if let Some(hash) = hash { + hashes.push(hash); + } + } + + // Insert children in the resulting buffer in a normal order, + // because initially we iterated in reverse. buffers.branch_value_stack_buf [buffers.branch_child_buf.len() - i - 1] = child; added_children = true; @@ -717,21 +798,101 @@ impl RevealedSparseTrie { } self.rlp_buf.clear(); - let rlp_node = BranchNodeRef::new(&buffers.branch_value_stack_buf, *state_mask) - .rlp(&mut self.rlp_buf); + let branch_node_ref = + BranchNodeRef::new(&buffers.branch_value_stack_buf, *state_mask); + let rlp_node = branch_node_ref.rlp(&mut self.rlp_buf); *hash = rlp_node.as_hash(); - rlp_node + + let store_in_db_trie_value = if let Some(updates) = self.updates.as_mut() { + let mut tree_mask_values = tree_mask_values.into_iter().rev(); + let mut hash_mask_values = hash_mask_values.into_iter().rev(); + let mut tree_mask = TrieMask::default(); + let mut hash_mask = TrieMask::default(); + for (i, child) in branch_node_ref.children() { + if child.is_some() { + if hash_mask_values.next().unwrap() { + hash_mask.set_bit(i); + } + if tree_mask_values.next().unwrap() { + tree_mask.set_bit(i); + } + } + } + + // Store in DB trie if there are either any children that are stored in the + // DB trie, or any children represent hashed values + let store_in_db_trie = !tree_mask.is_empty() || !hash_mask.is_empty(); + if store_in_db_trie { + hashes.reverse(); + let branch_node = BranchNodeCompact::new( + *state_mask, + tree_mask, + hash_mask, + hashes, + hash.filter(|_| path.len() == 0), + ); + updates.updated_nodes.insert(path.clone(), branch_node); + } + + store_in_db_trie + } else { + false + }; + *store_in_db_trie = Some(store_in_db_trie_value); + + ( + rlp_node, + true, + SparseNodeType::Branch { store_in_db_trie: store_in_db_trie_value }, + ) } }; - buffers.rlp_node_stack.push((path, rlp_node)); + buffers.rlp_node_stack.push((path, rlp_node, calculated, node_type)); } + debug_assert_eq!(buffers.rlp_node_stack.len(), 1); buffers.rlp_node_stack.pop().unwrap().1 } } +/// Enum representing sparse trie node type. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum SparseNodeType { + /// Empty trie node. + Empty, + /// The hash of the node that was not revealed. + Hash, + /// Sparse leaf node. + Leaf, + /// Sparse extension node. + Extension { + /// A flag indicating whether the extension node should be stored in the database. + store_in_db_trie: bool, + }, + /// Sparse branch node. + Branch { + /// A flag indicating whether the branch node should be stored in the database. + store_in_db_trie: bool, + }, +} + +impl SparseNodeType { + const fn is_branch(&self) -> bool { + matches!(self, Self::Branch { .. }) + } + + const fn store_in_db_trie(&self) -> bool { + match *self { + Self::Extension { store_in_db_trie } | Self::Branch { store_in_db_trie } => { + store_in_db_trie + } + _ => false, + } + } +} + /// Enum representing trie nodes in sparse trie. -#[derive(PartialEq, Eq, Clone, Debug)] +#[derive(Debug, Clone, PartialEq, Eq)] pub enum SparseNode { /// Empty trie node. Empty, @@ -760,6 +921,9 @@ pub enum SparseNode { /// Pre-computed hash of the sparse node. /// Can be reused unless this trie path has been updated. hash: Option, + /// Pre-computed flag indicating whether the trie node should be stored in the database. + /// Can be reused unless this trie path has been updated. + store_in_db_trie: Option, }, } @@ -776,7 +940,7 @@ impl SparseNode { /// Create new [`SparseNode::Branch`] from state mask. pub const fn new_branch(state_mask: TrieMask) -> Self { - Self::Branch { state_mask, hash: None } + Self::Branch { state_mask, hash: None, store_in_db_trie: None } } /// Create new [`SparseNode::Branch`] with two bits set. @@ -785,7 +949,7 @@ impl SparseNode { // set bits for both children (1u16 << bit_a) | (1u16 << bit_b), ); - Self::Branch { state_mask, hash: None } + Self::Branch { state_mask, hash: None, store_in_db_trie: None } } /// Create new [`SparseNode::Extension`] from the key slice. @@ -812,7 +976,7 @@ struct RlpNodeBuffers { /// Stack of paths we need rlp nodes for and whether the path is in the prefix set. path_stack: Vec<(Nibbles, Option)>, /// Stack of rlp nodes - rlp_node_stack: Vec<(Nibbles, RlpNode)>, + rlp_node_stack: Vec<(Nibbles, RlpNode, bool, SparseNodeType)>, /// Reusable branch child path branch_child_buf: SmallVec<[Nibbles; 16]>, /// Reusable branch value stack @@ -831,37 +995,101 @@ impl RlpNodeBuffers { } } +/// The aggregation of sparse trie updates. +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub struct SparseTrieUpdates { + updated_nodes: HashMap, + removed_nodes: HashSet, +} + #[cfg(test)] mod tests { use std::collections::BTreeMap; use super::*; use alloy_primitives::{map::HashSet, U256}; + use alloy_rlp::Encodable; use assert_matches::assert_matches; use itertools::Itertools; use prop::sample::SizeRange; use proptest::prelude::*; + use proptest_arbitrary_interop::arb; use rand::seq::IteratorRandom; - use reth_trie::{BranchNode, ExtensionNode, LeafNode}; + use reth_primitives_traits::Account; + use reth_trie::{ + hashed_cursor::{noop::NoopHashedAccountCursor, HashedPostStateAccountCursor}, + node_iter::{TrieElement, TrieNodeIter}, + trie_cursor::noop::NoopAccountTrieCursor, + walker::TrieWalker, + BranchNode, ExtensionNode, HashedPostState, LeafNode, TrieAccount, + }; use reth_trie_common::{ proof::{ProofNodes, ProofRetainer}, HashBuilder, }; + /// Pad nibbles to the length of a B256 hash with zeros on the left. + fn pad_nibbles_left(nibbles: Nibbles) -> Nibbles { + let mut base = + Nibbles::from_nibbles_unchecked(vec![0; B256::len_bytes() * 2 - nibbles.len()]); + base.extend_from_slice_unchecked(&nibbles); + base + } + + /// Pad nibbles to the length of a B256 hash with zeros on the right. + fn pad_nibbles_right(mut nibbles: Nibbles) -> Nibbles { + nibbles.extend_from_slice_unchecked(&vec![0; B256::len_bytes() * 2 - nibbles.len()]); + nibbles + } + /// Calculate the state root by feeding the provided state to the hash builder and retaining the /// proofs for the provided targets. /// /// Returns the state root and the retained proof nodes. - fn hash_builder_root_with_proofs>( - state: impl IntoIterator, + fn run_hash_builder( + state: impl IntoIterator + Clone, proof_targets: impl IntoIterator, - ) -> (B256, ProofNodes) { - let mut hash_builder = - HashBuilder::default().with_proof_retainer(ProofRetainer::from_iter(proof_targets)); - for (key, value) in state { - hash_builder.add_leaf(key, value.as_ref()); + ) -> HashBuilder { + let mut account_rlp = Vec::new(); + + let mut hash_builder = HashBuilder::default() + .with_updates(true) + .with_proof_retainer(ProofRetainer::from_iter(proof_targets)); + + let mut prefix_set = PrefixSetMut::default(); + prefix_set.extend_keys(state.clone().into_iter().map(|(nibbles, _)| nibbles)); + let walker = TrieWalker::new(NoopAccountTrieCursor::default(), prefix_set.freeze()) + .with_deletions_retained(true); + let hashed_post_state = HashedPostState::default() + .with_accounts(state.into_iter().map(|(nibbles, account)| { + (nibbles.pack().into_inner().unwrap().into(), Some(account)) + })) + .into_sorted(); + let mut node_iter = TrieNodeIter::new( + walker, + HashedPostStateAccountCursor::new( + NoopHashedAccountCursor::default(), + hashed_post_state.accounts(), + ), + ); + + while let Some(node) = node_iter.try_next().unwrap() { + match node { + TrieElement::Branch(branch) => { + hash_builder.add_branch(branch.key, branch.value, branch.children_are_in_trie); + } + TrieElement::Leaf(key, account) => { + let account = TrieAccount::from((account, EMPTY_ROOT_HASH)); + account.encode(&mut account_rlp); + + hash_builder.add_leaf(Nibbles::unpack(key), &account_rlp); + account_rlp.clear(); + } + } } - (hash_builder.root(), hash_builder.take_proof_nodes()) + hash_builder.root(); + + hash_builder } /// Assert that the sparse trie nodes and the proof nodes from the hash builder are equal. @@ -915,58 +1143,80 @@ mod tests { #[test] fn sparse_trie_empty_update_one() { - let path = Nibbles::unpack(B256::with_last_byte(42)); - let value = alloy_rlp::encode_fixed_size(&U256::from(1)); + let key = Nibbles::unpack(B256::with_last_byte(42)); + let value = || Account::default(); + let value_encoded = || { + let mut account_rlp = Vec::new(); + TrieAccount::from((value(), EMPTY_ROOT_HASH)).encode(&mut account_rlp); + account_rlp + }; - let (hash_builder_root, hash_builder_proof_nodes) = - hash_builder_root_with_proofs([(path.clone(), &value)], [path.clone()]); + let mut hash_builder = run_hash_builder([(key.clone(), value())], [key.clone()]); - let mut sparse = RevealedSparseTrie::default(); - sparse.update_leaf(path, value.to_vec()).unwrap(); + let mut sparse = RevealedSparseTrie::default().with_updates(true); + sparse.update_leaf(key, value_encoded()).unwrap(); let sparse_root = sparse.root(); + let sparse_updates = sparse.take_updates(); - assert_eq!(sparse_root, hash_builder_root); - assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes); + assert_eq!(sparse_root, hash_builder.root()); + assert_eq!(sparse_updates.updated_nodes, hash_builder.updated_branch_nodes.take().unwrap()); + assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder.take_proof_nodes()); } #[test] fn sparse_trie_empty_update_multiple_lower_nibbles() { + reth_tracing::init_test_tracing(); + let paths = (0..=16).map(|b| Nibbles::unpack(B256::with_last_byte(b))).collect::>(); - let value = alloy_rlp::encode_fixed_size(&U256::from(1)); + let value = || Account::default(); + let value_encoded = || { + let mut account_rlp = Vec::new(); + TrieAccount::from((value(), EMPTY_ROOT_HASH)).encode(&mut account_rlp); + account_rlp + }; - let (hash_builder_root, hash_builder_proof_nodes) = hash_builder_root_with_proofs( - paths.iter().cloned().zip(std::iter::repeat_with(|| value.clone())), + let mut hash_builder = run_hash_builder( + paths.iter().cloned().zip(std::iter::repeat_with(value)), paths.clone(), ); - let mut sparse = RevealedSparseTrie::default(); + let mut sparse = RevealedSparseTrie::default().with_updates(true); for path in &paths { - sparse.update_leaf(path.clone(), value.to_vec()).unwrap(); + sparse.update_leaf(path.clone(), value_encoded()).unwrap(); } let sparse_root = sparse.root(); + let sparse_updates = sparse.take_updates(); - assert_eq!(sparse_root, hash_builder_root); - assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes); + assert_eq!(sparse_root, hash_builder.root()); + assert_eq!(sparse_updates.updated_nodes, hash_builder.updated_branch_nodes.take().unwrap()); + assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder.take_proof_nodes()); } #[test] fn sparse_trie_empty_update_multiple_upper_nibbles() { let paths = (239..=255).map(|b| Nibbles::unpack(B256::repeat_byte(b))).collect::>(); - let value = alloy_rlp::encode_fixed_size(&U256::from(1)); + let value = || Account::default(); + let value_encoded = || { + let mut account_rlp = Vec::new(); + TrieAccount::from((value(), EMPTY_ROOT_HASH)).encode(&mut account_rlp); + account_rlp + }; - let (hash_builder_root, hash_builder_proof_nodes) = hash_builder_root_with_proofs( - paths.iter().cloned().zip(std::iter::repeat_with(|| value.clone())), + let mut hash_builder = run_hash_builder( + paths.iter().cloned().zip(std::iter::repeat_with(value)), paths.clone(), ); - let mut sparse = RevealedSparseTrie::default(); + let mut sparse = RevealedSparseTrie::default().with_updates(true); for path in &paths { - sparse.update_leaf(path.clone(), value.to_vec()).unwrap(); + sparse.update_leaf(path.clone(), value_encoded()).unwrap(); } let sparse_root = sparse.root(); + let sparse_updates = sparse.take_updates(); - assert_eq!(sparse_root, hash_builder_root); - assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes); + assert_eq!(sparse_root, hash_builder.root()); + assert_eq!(sparse_updates.updated_nodes, hash_builder.updated_branch_nodes.take().unwrap()); + assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder.take_proof_nodes()); } #[test] @@ -980,55 +1230,79 @@ mod tests { }) }) .collect::>(); - let value = alloy_rlp::encode_fixed_size(&U256::from(1)); + let value = || Account::default(); + let value_encoded = || { + let mut account_rlp = Vec::new(); + TrieAccount::from((value(), EMPTY_ROOT_HASH)).encode(&mut account_rlp); + account_rlp + }; - let (hash_builder_root, hash_builder_proof_nodes) = hash_builder_root_with_proofs( - paths.iter().sorted_unstable().cloned().zip(std::iter::repeat_with(|| value.clone())), + let mut hash_builder = run_hash_builder( + paths.iter().sorted_unstable().cloned().zip(std::iter::repeat_with(value)), paths.clone(), ); - let mut sparse = RevealedSparseTrie::default(); + let mut sparse = RevealedSparseTrie::default().with_updates(true); for path in &paths { - sparse.update_leaf(path.clone(), value.to_vec()).unwrap(); + sparse.update_leaf(path.clone(), value_encoded()).unwrap(); } let sparse_root = sparse.root(); + let sparse_updates = sparse.take_updates(); - assert_eq!(sparse_root, hash_builder_root); - assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes); + assert_eq!(sparse_root, hash_builder.root()); + pretty_assertions::assert_eq!( + BTreeMap::from_iter(sparse_updates.updated_nodes), + BTreeMap::from_iter(hash_builder.updated_branch_nodes.take().unwrap()) + ); + assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder.take_proof_nodes()); } #[test] fn sparse_trie_empty_update_repeated() { let paths = (0..=255).map(|b| Nibbles::unpack(B256::repeat_byte(b))).collect::>(); - let old_value = alloy_rlp::encode_fixed_size(&U256::from(1)); - let new_value = alloy_rlp::encode_fixed_size(&U256::from(2)); + let old_value = Account { nonce: 1, ..Default::default() }; + let old_value_encoded = { + let mut account_rlp = Vec::new(); + TrieAccount::from((old_value, EMPTY_ROOT_HASH)).encode(&mut account_rlp); + account_rlp + }; + let new_value = Account { nonce: 2, ..Default::default() }; + let new_value_encoded = { + let mut account_rlp = Vec::new(); + TrieAccount::from((new_value, EMPTY_ROOT_HASH)).encode(&mut account_rlp); + account_rlp + }; - let (hash_builder_root, hash_builder_proof_nodes) = hash_builder_root_with_proofs( - paths.iter().cloned().zip(std::iter::repeat_with(|| old_value.clone())), + let mut hash_builder = run_hash_builder( + paths.iter().cloned().zip(std::iter::repeat_with(|| old_value)), paths.clone(), ); - let mut sparse = RevealedSparseTrie::default(); + let mut sparse = RevealedSparseTrie::default().with_updates(true); for path in &paths { - sparse.update_leaf(path.clone(), old_value.to_vec()).unwrap(); + sparse.update_leaf(path.clone(), old_value_encoded.clone()).unwrap(); } let sparse_root = sparse.root(); + let sparse_updates = sparse.updates_ref(); - assert_eq!(sparse_root, hash_builder_root); - assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes); + assert_eq!(sparse_root, hash_builder.root()); + assert_eq!(sparse_updates.updated_nodes, hash_builder.updated_branch_nodes.take().unwrap()); + assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder.take_proof_nodes()); - let (hash_builder_root, hash_builder_proof_nodes) = hash_builder_root_with_proofs( - paths.iter().cloned().zip(std::iter::repeat_with(|| new_value.clone())), + let mut hash_builder = run_hash_builder( + paths.iter().cloned().zip(std::iter::repeat_with(|| new_value)), paths.clone(), ); for path in &paths { - sparse.update_leaf(path.clone(), new_value.to_vec()).unwrap(); + sparse.update_leaf(path.clone(), new_value_encoded.clone()).unwrap(); } let sparse_root = sparse.root(); + let sparse_updates = sparse.take_updates(); - assert_eq!(sparse_root, hash_builder_root); - assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes); + assert_eq!(sparse_root, hash_builder.root()); + assert_eq!(sparse_updates.updated_nodes, hash_builder.updated_branch_nodes.take().unwrap()); + assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder.take_proof_nodes()); } #[test] @@ -1311,30 +1585,43 @@ mod tests { // to test the sparse trie updates. const KEY_NIBBLES_LEN: usize = 3; - fn test(updates: Vec<(HashMap>, HashSet)>) { + fn test(updates: Vec<(HashMap, HashSet)>) { { let mut state = BTreeMap::default(); - let mut sparse = RevealedSparseTrie::default(); + let mut sparse = RevealedSparseTrie::default().with_updates(true); for (update, keys_to_delete) in updates { // Insert state updates into the sparse trie and calculate the root - for (key, value) in update.clone() { - sparse.update_leaf(key, value).unwrap(); + for (key, account) in update.clone() { + let account = TrieAccount::from((account, EMPTY_ROOT_HASH)); + let mut account_rlp = Vec::new(); + account.encode(&mut account_rlp); + sparse.update_leaf(key, account_rlp).unwrap(); } - let sparse_root = sparse.root(); + // We need to clone the sparse trie, so that all updated branch nodes are + // preserved, and not only those that were changed after the last call to + // `root()`. + let mut updated_sparse = sparse.clone(); + let sparse_root = updated_sparse.root(); + let sparse_updates = updated_sparse.take_updates(); // Insert state updates into the hash builder and calculate the root state.extend(update); - let (hash_builder_root, hash_builder_proof_nodes) = - hash_builder_root_with_proofs( - state.clone(), - state.keys().cloned().collect::>(), - ); + let mut hash_builder = + run_hash_builder(state.clone(), state.keys().cloned().collect::>()); // Assert that the sparse trie root matches the hash builder root - assert_eq!(sparse_root, hash_builder_root); + assert_eq!(sparse_root, hash_builder.root()); + // Assert that the sparse trie updates match the hash builder updates + pretty_assertions::assert_eq!( + sparse_updates.updated_nodes, + hash_builder.updated_branch_nodes.take().unwrap() + ); // Assert that the sparse trie nodes match the hash builder proof nodes - assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes); + assert_eq_sparse_trie_proof_nodes( + &updated_sparse, + hash_builder.take_proof_nodes(), + ); // Delete some keys from both the hash builder and the sparse trie and check // that the sparse trie root still matches the hash builder root @@ -1343,34 +1630,36 @@ mod tests { sparse.remove_leaf(&key).unwrap(); } - let sparse_root = sparse.root(); + // We need to clone the sparse trie, so that all updated branch nodes are + // preserved, and not only those that were changed after the last call to + // `root()`. + let mut updated_sparse = sparse.clone(); + let sparse_root = updated_sparse.root(); + let sparse_updates = updated_sparse.take_updates(); - let (hash_builder_root, hash_builder_proof_nodes) = - hash_builder_root_with_proofs( - state.clone(), - state.keys().cloned().collect::>(), - ); + let mut hash_builder = + run_hash_builder(state.clone(), state.keys().cloned().collect::>()); // Assert that the sparse trie root matches the hash builder root - assert_eq!(sparse_root, hash_builder_root); + assert_eq!(sparse_root, hash_builder.root()); + // Assert that the sparse trie updates match the hash builder updates + pretty_assertions::assert_eq!( + sparse_updates.updated_nodes, + hash_builder.updated_branch_nodes.take().unwrap() + ); // Assert that the sparse trie nodes match the hash builder proof nodes - assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes); + assert_eq_sparse_trie_proof_nodes( + &updated_sparse, + hash_builder.take_proof_nodes(), + ); } } } - /// Pad nibbles of length [`KEY_NIBBLES_LEN`] with zeros to the length of a B256 hash. - fn pad_nibbles(nibbles: Nibbles) -> Nibbles { - let mut base = - Nibbles::from_nibbles_unchecked([0; { B256::len_bytes() / 2 - KEY_NIBBLES_LEN }]); - base.extend_from_slice_unchecked(&nibbles); - base - } - fn transform_updates( - updates: Vec>>, + updates: Vec>, mut rng: impl Rng, - ) -> Vec<(HashMap>, HashSet)> { + ) -> Vec<(HashMap, HashSet)> { let mut keys = HashSet::new(); updates .into_iter() @@ -1393,8 +1682,8 @@ mod tests { proptest!(ProptestConfig::with_cases(10), |( updates in proptest::collection::vec( proptest::collection::hash_map( - any_with::(SizeRange::new(KEY_NIBBLES_LEN..=KEY_NIBBLES_LEN)).prop_map(pad_nibbles), - any::>(), + any_with::(SizeRange::new(KEY_NIBBLES_LEN..=KEY_NIBBLES_LEN)).prop_map(pad_nibbles_right), + arb::(), 1..100, ).prop_map(HashMap::from_iter), 1..100, @@ -1417,24 +1706,28 @@ mod tests { /// replacing it. #[test] fn sparse_trie_reveal_node_1() { - let key1 = || Nibbles::from_nibbles_unchecked([0x00]); - let key2 = || Nibbles::from_nibbles_unchecked([0x01]); - let key3 = || Nibbles::from_nibbles_unchecked([0x02]); - let value = || alloy_rlp::encode_fixed_size(&B256::repeat_byte(1)); + let key1 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00])); + let key2 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01])); + let key3 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x02])); + let value = || Account::default(); + let value_encoded = || { + let mut account_rlp = Vec::new(); + TrieAccount::from((value(), EMPTY_ROOT_HASH)).encode(&mut account_rlp); + account_rlp + }; // Generate the proof for the root node and initialize the sparse trie with it - let (_, proof_nodes) = hash_builder_root_with_proofs( - [(key1(), value()), (key3(), value())], - [Nibbles::default()], - ); + let proof_nodes = + run_hash_builder([(key1(), value()), (key3(), value())], [Nibbles::default()]) + .take_proof_nodes(); let mut sparse = RevealedSparseTrie::from_root( TrieNode::decode(&mut &proof_nodes.nodes_sorted()[0].1[..]).unwrap(), ) .unwrap(); // Generate the proof for the first key and reveal it in the sparse trie - let (_, proof_nodes) = - hash_builder_root_with_proofs([(key1(), value()), (key3(), value())], [key1()]); + let proof_nodes = + run_hash_builder([(key1(), value()), (key3(), value())], [key1()]).take_proof_nodes(); for (path, node) in proof_nodes.nodes_sorted() { sparse.reveal_node(path, TrieNode::decode(&mut &node[..]).unwrap()).unwrap(); } @@ -1446,7 +1739,7 @@ mod tests { ); // Insert the leaf for the second key - sparse.update_leaf(key2(), value().to_vec()).unwrap(); + sparse.update_leaf(key2(), value_encoded()).unwrap(); // Check that the branch node was updated and another nibble was set assert_eq!( @@ -1455,8 +1748,8 @@ mod tests { ); // Generate the proof for the third key and reveal it in the sparse trie - let (_, proof_nodes_3) = - hash_builder_root_with_proofs([(key1(), value()), (key3(), value())], [key3()]); + let proof_nodes_3 = + run_hash_builder([(key1(), value()), (key3(), value())], [key3()]).take_proof_nodes(); for (path, node) in proof_nodes_3.nodes_sorted() { sparse.reveal_node(path, TrieNode::decode(&mut &node[..]).unwrap()).unwrap(); } @@ -1469,10 +1762,11 @@ mod tests { // Generate the nodes for the full trie with all three key using the hash builder, and // compare them to the sparse trie - let (_, proof_nodes) = hash_builder_root_with_proofs( + let proof_nodes = run_hash_builder( [(key1(), value()), (key2(), value()), (key3(), value())], [key1(), key2(), key3()], - ); + ) + .take_proof_nodes(); assert_eq_sparse_trie_proof_nodes(&sparse, proof_nodes); } @@ -1489,16 +1783,17 @@ mod tests { /// into an extension node, so it should ignore this node. #[test] fn sparse_trie_reveal_node_2() { - let key1 = || Nibbles::from_nibbles_unchecked([0x00, 0x00]); - let key2 = || Nibbles::from_nibbles_unchecked([0x01, 0x01]); - let key3 = || Nibbles::from_nibbles_unchecked([0x01, 0x02]); - let value = || alloy_rlp::encode_fixed_size(&B256::repeat_byte(1)); + let key1 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00, 0x00])); + let key2 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01, 0x01])); + let key3 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01, 0x02])); + let value = || Account::default(); // Generate the proof for the root node and initialize the sparse trie with it - let (_, proof_nodes) = hash_builder_root_with_proofs( + let proof_nodes = run_hash_builder( [(key1(), value()), (key2(), value()), (key3(), value())], [Nibbles::default()], - ); + ) + .take_proof_nodes(); let mut sparse = RevealedSparseTrie::from_root( TrieNode::decode(&mut &proof_nodes.nodes_sorted()[0].1[..]).unwrap(), ) @@ -1506,10 +1801,11 @@ mod tests { // Generate the proof for the children of the root branch node and reveal it in the sparse // trie - let (_, proof_nodes) = hash_builder_root_with_proofs( + let proof_nodes = run_hash_builder( [(key1(), value()), (key2(), value()), (key3(), value())], [key1(), Nibbles::from_nibbles_unchecked([0x01])], - ); + ) + .take_proof_nodes(); for (path, node) in proof_nodes.nodes_sorted() { sparse.reveal_node(path, TrieNode::decode(&mut &node[..]).unwrap()).unwrap(); } @@ -1530,10 +1826,9 @@ mod tests { ); // Generate the proof for the third key and reveal it in the sparse trie - let (_, proof_nodes) = hash_builder_root_with_proofs( - [(key1(), value()), (key2(), value()), (key3(), value())], - [key2()], - ); + let proof_nodes = + run_hash_builder([(key1(), value()), (key2(), value()), (key3(), value())], [key2()]) + .take_proof_nodes(); for (path, node) in proof_nodes.nodes_sorted() { sparse.reveal_node(path, TrieNode::decode(&mut &node[..]).unwrap()).unwrap(); } @@ -1555,16 +1850,20 @@ mod tests { /// overwritten with the extension node from the proof. #[test] fn sparse_trie_reveal_node_3() { - let key1 = || Nibbles::from_nibbles_unchecked([0x00, 0x01]); - let key2 = || Nibbles::from_nibbles_unchecked([0x00, 0x02]); - let key3 = || Nibbles::from_nibbles_unchecked([0x01, 0x00]); - let value = || alloy_rlp::encode_fixed_size(&B256::repeat_byte(1)); + let key1 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00, 0x01])); + let key2 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00, 0x02])); + let key3 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01, 0x00])); + let value = || Account::default(); + let value_encoded = || { + let mut account_rlp = Vec::new(); + TrieAccount::from((value(), EMPTY_ROOT_HASH)).encode(&mut account_rlp); + account_rlp + }; // Generate the proof for the root node and initialize the sparse trie with it - let (_, proof_nodes) = hash_builder_root_with_proofs( - [(key1(), value()), (key2(), value())], - [Nibbles::default()], - ); + let proof_nodes = + run_hash_builder([(key1(), value()), (key2(), value())], [Nibbles::default()]) + .take_proof_nodes(); let mut sparse = RevealedSparseTrie::from_root( TrieNode::decode(&mut &proof_nodes.nodes_sorted()[0].1[..]).unwrap(), ) @@ -1577,17 +1876,17 @@ mod tests { ); // Insert the leaf with a different prefix - sparse.update_leaf(key3(), value().to_vec()).unwrap(); + sparse.update_leaf(key3(), value_encoded()).unwrap(); // Check that the extension node was turned into a branch node assert_matches!( sparse.nodes.get(&Nibbles::default()), - Some(SparseNode::Branch { state_mask, hash: None }) if *state_mask == TrieMask::new(0b11) + Some(SparseNode::Branch { state_mask, hash: None, store_in_db_trie: None }) if *state_mask == TrieMask::new(0b11) ); // Generate the proof for the first key and reveal it in the sparse trie - let (_, proof_nodes) = - hash_builder_root_with_proofs([(key1(), value()), (key2(), value())], [key1()]); + let proof_nodes = + run_hash_builder([(key1(), value()), (key2(), value())], [key1()]).take_proof_nodes(); for (path, node) in proof_nodes.nodes_sorted() { sparse.reveal_node(path, TrieNode::decode(&mut &node[..]).unwrap()).unwrap(); } @@ -1595,7 +1894,7 @@ mod tests { // Check that the branch node wasn't overwritten by the extension node in the proof assert_matches!( sparse.nodes.get(&Nibbles::default()), - Some(SparseNode::Branch { state_mask, hash: None }) if *state_mask == TrieMask::new(0b11) + Some(SparseNode::Branch { state_mask, hash: None, store_in_db_trie: None }) if *state_mask == TrieMask::new(0b11) ); } @@ -1671,4 +1970,27 @@ mod tests { ] ); } + + #[test] + fn hash_builder_branch_hash_mask() { + let key1 = || pad_nibbles_left(Nibbles::from_nibbles_unchecked([0x00])); + let key2 = || pad_nibbles_left(Nibbles::from_nibbles_unchecked([0x01])); + let value = || Account { bytecode_hash: Some(B256::repeat_byte(1)), ..Default::default() }; + let value_encoded = || { + let mut account_rlp = Vec::new(); + TrieAccount::from((value(), EMPTY_ROOT_HASH)).encode(&mut account_rlp); + account_rlp + }; + + let mut hash_builder = + run_hash_builder([(key1(), value()), (key2(), value())], [Nibbles::default()]); + let mut sparse = RevealedSparseTrie::default(); + sparse.update_leaf(key1(), value_encoded()).unwrap(); + sparse.update_leaf(key2(), value_encoded()).unwrap(); + let sparse_root = sparse.root(); + let sparse_updates = sparse.take_updates(); + + assert_eq!(sparse_root, hash_builder.root()); + assert_eq!(sparse_updates.updated_nodes, hash_builder.updated_branch_nodes.take().unwrap()); + } } From fa7ad036ea5c23bf4322e9d0e516cf0e99a3af1f Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 21 Nov 2024 14:27:02 +0100 Subject: [PATCH 082/156] chore: force disable 4844 for op pool (#12740) --- crates/optimism/node/src/node.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index 6cdffd09059b..bdc3d0d3a44a 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -282,6 +282,7 @@ where let validator = TransactionValidationTaskExecutor::eth_builder(Arc::new( ctx.chain_spec().inner.clone(), )) + .no_eip4844() .with_head_timestamp(ctx.head().timestamp) .kzg_settings(ctx.kzg_settings()?) .with_additional_tasks( From 4eca2fa1eea6450bc197554afb03a984095a9aca Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 21 Nov 2024 14:28:08 +0100 Subject: [PATCH 083/156] chore: rm network event handling from node events (#12736) --- bin/reth/src/commands/debug_cmd/execution.rs | 11 +++-------- crates/node/builder/src/launch/engine.rs | 3 +-- crates/node/builder/src/launch/mod.rs | 4 +--- crates/node/events/src/node.rs | 19 +------------------ 4 files changed, 6 insertions(+), 31 deletions(-) diff --git a/bin/reth/src/commands/debug_cmd/execution.rs b/bin/reth/src/commands/debug_cmd/execution.rs index 0210142be713..dd060ac2ab61 100644 --- a/bin/reth/src/commands/debug_cmd/execution.rs +++ b/bin/reth/src/commands/debug_cmd/execution.rs @@ -4,7 +4,7 @@ use crate::{args::NetworkArgs, utils::get_single_header}; use alloy_eips::BlockHashOrNumber; use alloy_primitives::{BlockNumber, B256}; use clap::Parser; -use futures::{stream::select as stream_select, StreamExt}; +use futures::StreamExt; use reth_beacon_consensus::EthBeaconConsensus; use reth_chainspec::ChainSpec; use reth_cli::chainspec::ChainSpecParser; @@ -19,7 +19,7 @@ use reth_downloaders::{ headers::reverse_headers::ReverseHeadersDownloaderBuilder, }; use reth_exex::ExExManagerHandle; -use reth_network::{BlockDownloaderProvider, NetworkEventListenerProvider, NetworkHandle}; +use reth_network::{BlockDownloaderProvider, NetworkHandle}; use reth_network_api::NetworkInfo; use reth_network_p2p::{headers::client::HeadersClient, EthBlockClient}; use reth_node_api::NodeTypesWithDBAdapter; @@ -207,17 +207,12 @@ impl> Command { return Ok(()) } - let pipeline_events = pipeline.events(); - let events = stream_select( - network.event_listener().map(Into::into), - pipeline_events.map(Into::into), - ); ctx.task_executor.spawn_critical( "events task", reth_node_events::node::handle_events( Some(Box::new(network)), latest_block_number, - events, + pipeline.events().map(Into::into), ), ); diff --git a/crates/node/builder/src/launch/engine.rs b/crates/node/builder/src/launch/engine.rs index 5a8405047b0d..f485be2c22db 100644 --- a/crates/node/builder/src/launch/engine.rs +++ b/crates/node/builder/src/launch/engine.rs @@ -17,7 +17,7 @@ use reth_engine_tree::{ use reth_engine_util::EngineMessageStreamExt; use reth_exex::ExExManagerHandle; use reth_network::{NetworkSyncUpdater, SyncState}; -use reth_network_api::{BlockDownloaderProvider, NetworkEventListenerProvider}; +use reth_network_api::BlockDownloaderProvider; use reth_node_api::{ BuiltPayload, FullNodePrimitives, FullNodeTypes, NodeTypesWithEngine, PayloadAttributesBuilder, PayloadBuilder, PayloadTypes, @@ -256,7 +256,6 @@ where info!(target: "reth::cli", "Consensus engine initialized"); let events = stream_select!( - ctx.components().network().event_listener().map(Into::into), beacon_engine_handle.event_listener().map(Into::into), pipeline_events.map(Into::into), if ctx.node_config().debug.tip.is_none() && !ctx.is_dev() { diff --git a/crates/node/builder/src/launch/mod.rs b/crates/node/builder/src/launch/mod.rs index c4146f48306e..be317e4be318 100644 --- a/crates/node/builder/src/launch/mod.rs +++ b/crates/node/builder/src/launch/mod.rs @@ -21,7 +21,7 @@ use reth_chainspec::EthChainSpec; use reth_consensus_debug_client::{DebugConsensusClient, EtherscanBlockProvider, RpcBlockProvider}; use reth_engine_util::EngineMessageStreamExt; use reth_exex::ExExManagerHandle; -use reth_network::{BlockDownloaderProvider, NetworkEventListenerProvider}; +use reth_network::BlockDownloaderProvider; use reth_node_api::{AddOnsContext, FullNodePrimitives, FullNodeTypes, NodeTypesWithEngine}; use reth_node_core::{ dirs::{ChainPath, DataDirPath}, @@ -262,8 +262,6 @@ where info!(target: "reth::cli", "Consensus engine initialized"); let events = stream_select!( - ctx.components().network().event_listener().map(Into::into), - beacon_engine_handle.event_listener().map(Into::into), pipeline_events.map(Into::into), if ctx.node_config().debug.tip.is_none() && !ctx.is_dev() { Either::Left( diff --git a/crates/node/events/src/node.rs b/crates/node/events/src/node.rs index 4528bdeaa94c..edd85501ec0a 100644 --- a/crates/node/events/src/node.rs +++ b/crates/node/events/src/node.rs @@ -7,7 +7,7 @@ use alloy_rpc_types_engine::ForkchoiceState; use futures::Stream; use reth_beacon_consensus::{BeaconConsensusEngineEvent, ConsensusEngineLiveSyncProgress}; use reth_engine_primitives::ForkchoiceStatus; -use reth_network_api::{NetworkEvent, PeersInfo}; +use reth_network_api::PeersInfo; use reth_primitives_traits::{format_gas, format_gas_throughput}; use reth_prune_types::PrunerEvent; use reth_stages::{EntitiesCheckpoint, ExecOutput, PipelineEvent, StageCheckpoint, StageId}; @@ -211,12 +211,6 @@ impl NodeState { } } - fn handle_network_event(&self, _: NetworkEvent) { - // NOTE(onbjerg): This used to log established/disconnecting sessions, but this is already - // logged in the networking component. I kept this stub in case we want to catch other - // networking events later on. - } - fn handle_consensus_engine_event(&mut self, event: BeaconConsensusEngineEvent) { match event { BeaconConsensusEngineEvent::ForkchoiceUpdated(state, status) => { @@ -358,8 +352,6 @@ struct CurrentStage { /// A node event. #[derive(Debug)] pub enum NodeEvent { - /// A network event. - Network(NetworkEvent), /// A sync pipeline event. Pipeline(PipelineEvent), /// A consensus engine event. @@ -375,12 +367,6 @@ pub enum NodeEvent { Other(String), } -impl From for NodeEvent { - fn from(event: NetworkEvent) -> Self { - Self::Network(event) - } -} - impl From for NodeEvent { fn from(event: PipelineEvent) -> Self { Self::Pipeline(event) @@ -527,9 +513,6 @@ where while let Poll::Ready(Some(event)) = this.events.as_mut().poll_next(cx) { match event { - NodeEvent::Network(event) => { - this.state.handle_network_event(event); - } NodeEvent::Pipeline(event) => { this.state.handle_pipeline_event(event); } From 9fbe3468e842e62e3968d3a3e80d87c9db1e755b Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 21 Nov 2024 14:29:09 +0100 Subject: [PATCH 084/156] chore: use TransactionSigned trait bound for tx msg building (#12737) --- crates/net/network/src/transactions/mod.rs | 42 ++++++++++++---------- 1 file changed, 23 insertions(+), 19 deletions(-) diff --git a/crates/net/network/src/transactions/mod.rs b/crates/net/network/src/transactions/mod.rs index c3ffea58d01d..2a5496deead3 100644 --- a/crates/net/network/src/transactions/mod.rs +++ b/crates/net/network/src/transactions/mod.rs @@ -49,6 +49,7 @@ use reth_network_p2p::{ use reth_network_peers::PeerId; use reth_network_types::ReputationChangeKind; use reth_primitives::{PooledTransactionsElement, TransactionSigned, TransactionSignedEcRecovered}; +use reth_primitives_traits::{SignedTransaction, TransactionExt, TxType}; use reth_tokio_util::EventStream; use reth_transaction_pool::{ error::{PoolError, PoolResult}, @@ -1455,13 +1456,7 @@ struct PropagateTransaction { transaction: Arc, } -// === impl PropagateTransaction === - impl PropagateTransaction { - fn hash(&self) -> TxHash { - self.transaction.hash() - } - /// Create a new instance from a pooled transaction fn new(tx: Arc>) -> Self where @@ -1475,10 +1470,16 @@ impl PropagateTransaction { } } +impl PropagateTransaction { + fn hash(&self) -> TxHash { + *self.transaction.tx_hash() + } +} + /// Helper type to construct the appropriate message to send to the peer based on whether the peer /// should receive them in full or as pooled #[derive(Debug, Clone)] -enum PropagateTransactionsBuilder { +enum PropagateTransactionsBuilder { Pooled(PooledTransactionsHashesBuilder), Full(FullTransactionsBuilder), } @@ -1513,16 +1514,16 @@ impl PropagateTransactionsBuilder { } } -impl PropagateTransactionsBuilder { +impl PropagateTransactionsBuilder { /// Appends all transactions - fn extend<'a>(&mut self, txs: impl IntoIterator) { + fn extend<'a>(&mut self, txs: impl IntoIterator>) { for tx in txs { self.push(tx); } } /// Appends a transaction to the list. - fn push(&mut self, transaction: &PropagateTransaction) { + fn push(&mut self, transaction: &PropagateTransaction) { match self { Self::Pooled(builder) => builder.push(transaction), Self::Full(builder) => builder.push(transaction), @@ -1531,7 +1532,7 @@ impl PropagateTransactionsBuilder { } /// Represents how the transactions should be sent to a peer if any. -struct PropagateTransactions { +struct PropagateTransactions { /// The pooled transaction hashes to send. pooled: Option, /// The transactions to send in full. @@ -1543,7 +1544,7 @@ struct PropagateTransactions { /// and enforces other propagation rules for EIP-4844 and tracks those transactions that can't be /// broadcasted in full. #[derive(Debug, Clone)] -struct FullTransactionsBuilder { +struct FullTransactionsBuilder { /// The soft limit to enforce for a single broadcast message of full transactions. total_size: usize, /// All transactions to be broadcasted. @@ -1575,9 +1576,9 @@ impl FullTransactionsBuilder { } } -impl FullTransactionsBuilder { +impl FullTransactionsBuilder { /// Appends all transactions. - fn extend(&mut self, txs: impl IntoIterator) { + fn extend(&mut self, txs: impl IntoIterator>) { for tx in txs { self.push(&tx) } @@ -1591,7 +1592,7 @@ impl FullTransactionsBuilder { /// /// If the transaction is unsuitable for broadcast or would exceed the softlimit, it is appended /// to list of pooled transactions, (e.g. 4844 transactions). - fn push(&mut self, transaction: &PropagateTransaction) { + fn push(&mut self, transaction: &PropagateTransaction) { // Do not send full 4844 transaction hashes to peers. // // Nodes MUST NOT automatically broadcast blob transactions to their peers. @@ -1600,7 +1601,7 @@ impl FullTransactionsBuilder { // via `GetPooledTransactions`. // // From: - if transaction.transaction.is_eip4844() { + if transaction.transaction.transaction().tx_type().is_eip4844() { self.pooled.push(transaction); return } @@ -1651,19 +1652,22 @@ impl PooledTransactionsHashesBuilder { } /// Appends all hashes - fn extend(&mut self, txs: impl IntoIterator) { + fn extend( + &mut self, + txs: impl IntoIterator>, + ) { for tx in txs { self.push(&tx); } } - fn push(&mut self, tx: &PropagateTransaction) { + fn push(&mut self, tx: &PropagateTransaction) { match self { Self::Eth66(msg) => msg.0.push(tx.hash()), Self::Eth68(msg) => { msg.hashes.push(tx.hash()); msg.sizes.push(tx.size); - msg.types.push(tx.transaction.tx_type().into()); + msg.types.push(tx.transaction.transaction().tx_type().into()); } } } From 4f946733c356613cb155e9a64abcff8251696823 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 21 Nov 2024 14:30:04 +0100 Subject: [PATCH 085/156] feat: add is_broadcastable_in_full to txtype (#12739) --- crates/primitives-traits/src/transaction/tx_type.rs | 10 ++++++++++ crates/primitives/src/transaction/tx_type.rs | 8 ++++++++ 2 files changed, 18 insertions(+) diff --git a/crates/primitives-traits/src/transaction/tx_type.rs b/crates/primitives-traits/src/transaction/tx_type.rs index dc3dba7fdcf9..866242098d3f 100644 --- a/crates/primitives-traits/src/transaction/tx_type.rs +++ b/crates/primitives-traits/src/transaction/tx_type.rs @@ -49,4 +49,14 @@ pub trait TxType: /// Returns `true` if this is an eip-7702 transaction. fn is_eip7702(&self) -> bool; + + /// Returns whether this transaction type can be __broadcasted__ as full transaction over the + /// network. + /// + /// Some transactions are not broadcastable as objects and only allowed to be broadcasted as + /// hashes, e.g. because they missing context (e.g. blob sidecar). + fn is_broadcastable_in_full(&self) -> bool { + // EIP-4844 transactions are not broadcastable in full, only hashes are allowed. + !self.is_eip4844() + } } diff --git a/crates/primitives/src/transaction/tx_type.rs b/crates/primitives/src/transaction/tx_type.rs index 597487564dfc..0e344374d202 100644 --- a/crates/primitives/src/transaction/tx_type.rs +++ b/crates/primitives/src/transaction/tx_type.rs @@ -257,8 +257,16 @@ mod tests { use super::*; use alloy_primitives::hex; use reth_codecs::{txtype::*, Compact}; + use reth_primitives_traits::TxType as _; use rstest::rstest; + #[test] + fn is_broadcastable() { + assert!(TxType::Legacy.is_broadcastable_in_full()); + assert!(TxType::Eip1559.is_broadcastable_in_full()); + assert!(!TxType::Eip4844.is_broadcastable_in_full()); + } + #[rstest] #[case(U64::from(LEGACY_TX_TYPE_ID), Ok(TxType::Legacy))] #[case(U64::from(EIP2930_TX_TYPE_ID), Ok(TxType::Eip2930))] From 9d3f8cc6a27e59396ede209f3d439eb334e24eed Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 21 Nov 2024 14:31:03 +0100 Subject: [PATCH 086/156] docs: add additional eth validator docs (#12742) --- crates/transaction-pool/src/validate/eth.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/crates/transaction-pool/src/validate/eth.rs b/crates/transaction-pool/src/validate/eth.rs index 70298487694f..ca7452225755 100644 --- a/crates/transaction-pool/src/validate/eth.rs +++ b/crates/transaction-pool/src/validate/eth.rs @@ -107,6 +107,19 @@ where } /// A [`TransactionValidator`] implementation that validates ethereum transaction. +/// +/// It supports all known ethereum transaction types: +/// - Legacy +/// - EIP-2718 +/// - EIP-1559 +/// - EIP-4844 +/// - EIP-7702 +/// +/// And enforces additional constraints such as: +/// - Maximum transaction size +/// - Maximum gas limit +/// +/// And adheres to the configured [`LocalTransactionConfig`]. #[derive(Debug)] pub(crate) struct EthTransactionValidatorInner { /// Spec of the chain From c73dadacb2c0d324c17fa690807a0f77722e6ed2 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 21 Nov 2024 19:20:29 +0400 Subject: [PATCH 087/156] refactor: unify code paths for trie unwind (#12741) --- bin/reth/src/commands/debug_cmd/execution.rs | 9 +- .../src/providers/database/provider.rs | 241 +++++++----------- 2 files changed, 93 insertions(+), 157 deletions(-) diff --git a/bin/reth/src/commands/debug_cmd/execution.rs b/bin/reth/src/commands/debug_cmd/execution.rs index dd060ac2ab61..efe4a2f7c221 100644 --- a/bin/reth/src/commands/debug_cmd/execution.rs +++ b/bin/reth/src/commands/debug_cmd/execution.rs @@ -25,8 +25,7 @@ use reth_network_p2p::{headers::client::HeadersClient, EthBlockClient}; use reth_node_api::NodeTypesWithDBAdapter; use reth_node_ethereum::EthExecutorProvider; use reth_provider::{ - providers::ProviderNodeTypes, BlockExecutionWriter, ChainSpecProvider, ProviderFactory, - StageCheckpointReader, + providers::ProviderNodeTypes, ChainSpecProvider, ProviderFactory, StageCheckpointReader, }; use reth_prune::PruneModes; use reth_stages::{ @@ -230,11 +229,7 @@ impl> Command { trace!(target: "reth::cli", from = next_block, to = target_block, tip = ?target_block_hash, ?result, "Pipeline finished"); // Unwind the pipeline without committing. - { - provider_factory - .provider_rw()? - .take_block_and_execution_range(next_block..=target_block)?; - } + provider_factory.provider_rw()?.unwind_trie_state_range(next_block..=target_block)?; // Update latest block current_max_block = target_block; diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index 66bc4c053415..92cc8df2f5c2 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -243,6 +243,95 @@ impl AsRef for DatabaseProvider { } } +impl DatabaseProvider { + /// Unwinds trie state for the given range. + /// + /// This includes calculating the resulted state root and comparing it with the parent block + /// state root. + pub fn unwind_trie_state_range( + &self, + range: RangeInclusive, + ) -> ProviderResult<()> { + let changed_accounts = self + .tx + .cursor_read::()? + .walk_range(range.clone())? + .collect::, _>>()?; + + // Unwind account hashes. Add changed accounts to account prefix set. + let hashed_addresses = self.unwind_account_hashing(changed_accounts.iter())?; + let mut account_prefix_set = PrefixSetMut::with_capacity(hashed_addresses.len()); + let mut destroyed_accounts = HashSet::default(); + for (hashed_address, account) in hashed_addresses { + account_prefix_set.insert(Nibbles::unpack(hashed_address)); + if account.is_none() { + destroyed_accounts.insert(hashed_address); + } + } + + // Unwind account history indices. + self.unwind_account_history_indices(changed_accounts.iter())?; + let storage_range = BlockNumberAddress::range(range.clone()); + + let changed_storages = self + .tx + .cursor_read::()? + .walk_range(storage_range)? + .collect::, _>>()?; + + // Unwind storage hashes. Add changed account and storage keys to corresponding prefix + // sets. + let mut storage_prefix_sets = HashMap::::default(); + let storage_entries = self.unwind_storage_hashing(changed_storages.iter().copied())?; + for (hashed_address, hashed_slots) in storage_entries { + account_prefix_set.insert(Nibbles::unpack(hashed_address)); + let mut storage_prefix_set = PrefixSetMut::with_capacity(hashed_slots.len()); + for slot in hashed_slots { + storage_prefix_set.insert(Nibbles::unpack(slot)); + } + storage_prefix_sets.insert(hashed_address, storage_prefix_set.freeze()); + } + + // Unwind storage history indices. + self.unwind_storage_history_indices(changed_storages.iter().copied())?; + + // Calculate the reverted merkle root. + // This is the same as `StateRoot::incremental_root_with_updates`, only the prefix sets + // are pre-loaded. + let prefix_sets = TriePrefixSets { + account_prefix_set: account_prefix_set.freeze(), + storage_prefix_sets, + destroyed_accounts, + }; + let (new_state_root, trie_updates) = StateRoot::from_tx(&self.tx) + .with_prefix_sets(prefix_sets) + .root_with_updates() + .map_err(Into::::into)?; + + let parent_number = range.start().saturating_sub(1); + let parent_state_root = self + .header_by_number(parent_number)? + .ok_or_else(|| ProviderError::HeaderNotFound(parent_number.into()))? + .state_root; + + // state root should be always correct as we are reverting state. + // but for sake of double verification we will check it again. + if new_state_root != parent_state_root { + let parent_hash = self + .block_hash(parent_number)? + .ok_or_else(|| ProviderError::HeaderNotFound(parent_number.into()))?; + return Err(ProviderError::UnwindStateRootMismatch(Box::new(RootMismatch { + root: GotExpected { got: new_state_root, expected: parent_state_root }, + block_number: parent_number, + block_hash: parent_hash, + }))) + } + self.write_trie_updates(&trie_updates)?; + + Ok(()) + } +} + impl TryIntoHistoricalStateProvider for DatabaseProvider { fn try_into_history_at_block( self, @@ -2913,81 +3002,7 @@ impl BlockExecutio &self, range: RangeInclusive, ) -> ProviderResult { - let changed_accounts = self - .tx - .cursor_read::()? - .walk_range(range.clone())? - .collect::, _>>()?; - - // Unwind account hashes. Add changed accounts to account prefix set. - let hashed_addresses = self.unwind_account_hashing(changed_accounts.iter())?; - let mut account_prefix_set = PrefixSetMut::with_capacity(hashed_addresses.len()); - let mut destroyed_accounts = HashSet::default(); - for (hashed_address, account) in hashed_addresses { - account_prefix_set.insert(Nibbles::unpack(hashed_address)); - if account.is_none() { - destroyed_accounts.insert(hashed_address); - } - } - - // Unwind account history indices. - self.unwind_account_history_indices(changed_accounts.iter())?; - let storage_range = BlockNumberAddress::range(range.clone()); - - let changed_storages = self - .tx - .cursor_read::()? - .walk_range(storage_range)? - .collect::, _>>()?; - - // Unwind storage hashes. Add changed account and storage keys to corresponding prefix - // sets. - let mut storage_prefix_sets = HashMap::::default(); - let storage_entries = self.unwind_storage_hashing(changed_storages.iter().copied())?; - for (hashed_address, hashed_slots) in storage_entries { - account_prefix_set.insert(Nibbles::unpack(hashed_address)); - let mut storage_prefix_set = PrefixSetMut::with_capacity(hashed_slots.len()); - for slot in hashed_slots { - storage_prefix_set.insert(Nibbles::unpack(slot)); - } - storage_prefix_sets.insert(hashed_address, storage_prefix_set.freeze()); - } - - // Unwind storage history indices. - self.unwind_storage_history_indices(changed_storages.iter().copied())?; - - // Calculate the reverted merkle root. - // This is the same as `StateRoot::incremental_root_with_updates`, only the prefix sets - // are pre-loaded. - let prefix_sets = TriePrefixSets { - account_prefix_set: account_prefix_set.freeze(), - storage_prefix_sets, - destroyed_accounts, - }; - let (new_state_root, trie_updates) = StateRoot::from_tx(&self.tx) - .with_prefix_sets(prefix_sets) - .root_with_updates() - .map_err(Into::::into)?; - - let parent_number = range.start().saturating_sub(1); - let parent_state_root = self - .header_by_number(parent_number)? - .ok_or_else(|| ProviderError::HeaderNotFound(parent_number.into()))? - .state_root; - - // state root should be always correct as we are reverting state. - // but for sake of double verification we will check it again. - if new_state_root != parent_state_root { - let parent_hash = self - .block_hash(parent_number)? - .ok_or_else(|| ProviderError::HeaderNotFound(parent_number.into()))?; - return Err(ProviderError::UnwindStateRootMismatch(Box::new(RootMismatch { - root: GotExpected { got: new_state_root, expected: parent_state_root }, - block_number: parent_number, - block_hash: parent_hash, - }))) - } - self.write_trie_updates(&trie_updates)?; + self.unwind_trie_state_range(range.clone())?; // get blocks let blocks = self.take_block_range(range.clone())?; @@ -3012,81 +3027,7 @@ impl BlockExecutio &self, range: RangeInclusive, ) -> ProviderResult<()> { - let changed_accounts = self - .tx - .cursor_read::()? - .walk_range(range.clone())? - .collect::, _>>()?; - - // Unwind account hashes. Add changed accounts to account prefix set. - let hashed_addresses = self.unwind_account_hashing(changed_accounts.iter())?; - let mut account_prefix_set = PrefixSetMut::with_capacity(hashed_addresses.len()); - let mut destroyed_accounts = HashSet::default(); - for (hashed_address, account) in hashed_addresses { - account_prefix_set.insert(Nibbles::unpack(hashed_address)); - if account.is_none() { - destroyed_accounts.insert(hashed_address); - } - } - - // Unwind account history indices. - self.unwind_account_history_indices(changed_accounts.iter())?; - - let storage_range = BlockNumberAddress::range(range.clone()); - let changed_storages = self - .tx - .cursor_read::()? - .walk_range(storage_range)? - .collect::, _>>()?; - - // Unwind storage hashes. Add changed account and storage keys to corresponding prefix - // sets. - let mut storage_prefix_sets = HashMap::::default(); - let storage_entries = self.unwind_storage_hashing(changed_storages.iter().copied())?; - for (hashed_address, hashed_slots) in storage_entries { - account_prefix_set.insert(Nibbles::unpack(hashed_address)); - let mut storage_prefix_set = PrefixSetMut::with_capacity(hashed_slots.len()); - for slot in hashed_slots { - storage_prefix_set.insert(Nibbles::unpack(slot)); - } - storage_prefix_sets.insert(hashed_address, storage_prefix_set.freeze()); - } - - // Unwind storage history indices. - self.unwind_storage_history_indices(changed_storages.iter().copied())?; - - // Calculate the reverted merkle root. - // This is the same as `StateRoot::incremental_root_with_updates`, only the prefix sets - // are pre-loaded. - let prefix_sets = TriePrefixSets { - account_prefix_set: account_prefix_set.freeze(), - storage_prefix_sets, - destroyed_accounts, - }; - let (new_state_root, trie_updates) = StateRoot::from_tx(&self.tx) - .with_prefix_sets(prefix_sets) - .root_with_updates() - .map_err(Into::::into)?; - - let parent_number = range.start().saturating_sub(1); - let parent_state_root = self - .header_by_number(parent_number)? - .ok_or_else(|| ProviderError::HeaderNotFound(parent_number.into()))? - .state_root; - - // state root should be always correct as we are reverting state. - // but for sake of double verification we will check it again. - if new_state_root != parent_state_root { - let parent_hash = self - .block_hash(parent_number)? - .ok_or_else(|| ProviderError::HeaderNotFound(parent_number.into()))?; - return Err(ProviderError::UnwindStateRootMismatch(Box::new(RootMismatch { - root: GotExpected { got: new_state_root, expected: parent_state_root }, - block_number: parent_number, - block_hash: parent_hash, - }))) - } - self.write_trie_updates(&trie_updates)?; + self.unwind_trie_state_range(range.clone())?; // get blocks let blocks = self.take_block_range(range.clone())?; From 54ff4c73498c2b8e774eecd7b5e19fcbf54673c7 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 21 Nov 2024 16:35:43 +0100 Subject: [PATCH 088/156] feat: relax more tx manager bounds (#12744) --- crates/net/network/src/transactions/mod.rs | 322 +++++++++++---------- crates/optimism/node/src/txpool.rs | 7 +- crates/transaction-pool/src/traits.rs | 7 +- 3 files changed, 180 insertions(+), 156 deletions(-) diff --git a/crates/net/network/src/transactions/mod.rs b/crates/net/network/src/transactions/mod.rs index 2a5496deead3..a4eef2fa99c4 100644 --- a/crates/net/network/src/transactions/mod.rs +++ b/crates/net/network/src/transactions/mod.rs @@ -48,7 +48,7 @@ use reth_network_p2p::{ }; use reth_network_peers::PeerId; use reth_network_types::ReputationChangeKind; -use reth_primitives::{PooledTransactionsElement, TransactionSigned, TransactionSignedEcRecovered}; +use reth_primitives::{PooledTransactionsElement, TransactionSigned}; use reth_primitives_traits::{SignedTransaction, TransactionExt, TxType}; use reth_tokio_util::EventStream; use reth_transaction_pool::{ @@ -678,40 +678,13 @@ where } } -impl TransactionsManager +impl TransactionsManager where - Pool: TransactionPool + 'static, + Pool: TransactionPool, + N: NetworkPrimitives, + <::Transaction as PoolTransaction>::Consensus: + Into, { - /// Request handler for an incoming request for transactions - fn on_get_pooled_transactions( - &mut self, - peer_id: PeerId, - request: GetPooledTransactions, - response: oneshot::Sender>, - ) { - if let Some(peer) = self.peers.get_mut(&peer_id) { - if self.network.tx_gossip_disabled() { - let _ = response.send(Ok(PooledTransactions::default())); - return - } - let transactions = self.pool.get_pooled_transaction_elements( - request.0, - GetPooledTransactionLimit::ResponseSizeSoftLimit( - self.transaction_fetcher.info.soft_limit_byte_size_pooled_transactions_response, - ), - ); - - trace!(target: "net::tx::propagation", sent_txs=?transactions.iter().map(|tx| *tx.hash()), "Sending requested transactions to peer"); - - // we sent a response at which point we assume that the peer is aware of the - // transactions - peer.seen_transactions.extend(transactions.iter().map(|tx| *tx.hash())); - - let resp = PooledTransactions(transactions); - let _ = response.send(Ok(resp)); - } - } - /// Invoked when transactions in the local mempool are considered __pending__. /// /// When a transaction in the local mempool is moved to the pending pool, we propagate them to @@ -737,110 +710,6 @@ where self.propagate_all(hashes); } - /// Propagates the given transactions to the peers - /// - /// This fetches all transaction from the pool, including the 4844 blob transactions but - /// __without__ their sidecar, because 4844 transactions are only ever announced as hashes. - fn propagate_all(&mut self, hashes: Vec) { - let propagated = self.propagate_transactions( - self.pool.get_all(hashes).into_iter().map(PropagateTransaction::new).collect(), - PropagationMode::Basic, - ); - - // notify pool so events get fired - self.pool.on_propagated(propagated); - } - - /// Propagate the transactions to all connected peers either as full objects or hashes. - /// - /// The message for new pooled hashes depends on the negotiated version of the stream. - /// See [`NewPooledTransactionHashes`] - /// - /// Note: EIP-4844 are disallowed from being broadcast in full and are only ever sent as hashes, see also . - fn propagate_transactions( - &mut self, - to_propagate: Vec, - propagation_mode: PropagationMode, - ) -> PropagatedTransactions { - let mut propagated = PropagatedTransactions::default(); - if self.network.tx_gossip_disabled() { - return propagated - } - - // send full transactions to a set of the connected peers based on the configured mode - let max_num_full = self.config.propagation_mode.full_peer_count(self.peers.len()); - - // Note: Assuming ~random~ order due to random state of the peers map hasher - for (peer_idx, (peer_id, peer)) in self.peers.iter_mut().enumerate() { - // determine whether to send full tx objects or hashes. - let mut builder = if peer_idx > max_num_full { - PropagateTransactionsBuilder::pooled(peer.version) - } else { - PropagateTransactionsBuilder::full(peer.version) - }; - - if propagation_mode.is_forced() { - builder.extend(to_propagate.iter()); - } else { - // Iterate through the transactions to propagate and fill the hashes and full - // transaction lists, before deciding whether or not to send full transactions to - // the peer. - for tx in &to_propagate { - // Only proceed if the transaction is not in the peer's list of seen - // transactions - if !peer.seen_transactions.contains(&tx.hash()) { - builder.push(tx); - } - } - } - - if builder.is_empty() { - trace!(target: "net::tx", ?peer_id, "Nothing to propagate to peer; has seen all transactions"); - continue - } - - let PropagateTransactions { pooled, full } = builder.build(); - - // send hashes if any - if let Some(mut new_pooled_hashes) = pooled { - // enforce tx soft limit per message for the (unlikely) event the number of - // hashes exceeds it - new_pooled_hashes - .truncate(SOFT_LIMIT_COUNT_HASHES_IN_NEW_POOLED_TRANSACTIONS_BROADCAST_MESSAGE); - - for hash in new_pooled_hashes.iter_hashes().copied() { - propagated.0.entry(hash).or_default().push(PropagateKind::Hash(*peer_id)); - // mark transaction as seen by peer - peer.seen_transactions.insert(hash); - } - - trace!(target: "net::tx", ?peer_id, num_txs=?new_pooled_hashes.len(), "Propagating tx hashes to peer"); - - // send hashes of transactions - self.network.send_transactions_hashes(*peer_id, new_pooled_hashes); - } - - // send full transactions, if any - if let Some(new_full_transactions) = full { - for tx in &new_full_transactions { - propagated.0.entry(tx.hash()).or_default().push(PropagateKind::Full(*peer_id)); - // mark transaction as seen by peer - peer.seen_transactions.insert(tx.hash()); - } - - trace!(target: "net::tx", ?peer_id, num_txs=?new_full_transactions.len(), "Propagating full transactions to peer"); - - // send full transactions - self.network.send_transactions(*peer_id, new_full_transactions); - } - } - - // Update propagated transactions metrics - self.metrics.propagated_transactions.increment(propagated.0.len() as u64); - - propagated - } - /// Propagate the full transactions to a specific peer. /// /// Returns the propagated transactions. @@ -896,9 +765,9 @@ where // send full transactions, if any if let Some(new_full_transactions) = full { for tx in &new_full_transactions { - propagated.0.entry(tx.hash()).or_default().push(PropagateKind::Full(peer_id)); + propagated.0.entry(*tx.tx_hash()).or_default().push(PropagateKind::Full(peer_id)); // mark transaction as seen by peer - peer.seen_transactions.insert(tx.hash()); + peer.seen_transactions.insert(*tx.tx_hash()); } // send full transactions @@ -930,8 +799,12 @@ where return }; - let to_propagate: Vec = - self.pool.get_all(hashes).into_iter().map(PropagateTransaction::new).collect(); + let to_propagate = self + .pool + .get_all(hashes) + .into_iter() + .map(PropagateTransaction::new) + .collect::>(); let mut propagated = PropagatedTransactions::default(); @@ -975,6 +848,150 @@ where self.pool.on_propagated(propagated); } + /// Propagate the transactions to all connected peers either as full objects or hashes. + /// + /// The message for new pooled hashes depends on the negotiated version of the stream. + /// See [`NewPooledTransactionHashes`] + /// + /// Note: EIP-4844 are disallowed from being broadcast in full and are only ever sent as hashes, see also . + fn propagate_transactions( + &mut self, + to_propagate: Vec>, + propagation_mode: PropagationMode, + ) -> PropagatedTransactions { + let mut propagated = PropagatedTransactions::default(); + if self.network.tx_gossip_disabled() { + return propagated + } + + // send full transactions to a set of the connected peers based on the configured mode + let max_num_full = self.config.propagation_mode.full_peer_count(self.peers.len()); + + // Note: Assuming ~random~ order due to random state of the peers map hasher + for (peer_idx, (peer_id, peer)) in self.peers.iter_mut().enumerate() { + // determine whether to send full tx objects or hashes. + let mut builder = if peer_idx > max_num_full { + PropagateTransactionsBuilder::pooled(peer.version) + } else { + PropagateTransactionsBuilder::full(peer.version) + }; + + if propagation_mode.is_forced() { + builder.extend(to_propagate.iter()); + } else { + // Iterate through the transactions to propagate and fill the hashes and full + // transaction lists, before deciding whether or not to send full transactions to + // the peer. + for tx in &to_propagate { + // Only proceed if the transaction is not in the peer's list of seen + // transactions + if !peer.seen_transactions.contains(&tx.hash()) { + builder.push(tx); + } + } + } + + if builder.is_empty() { + trace!(target: "net::tx", ?peer_id, "Nothing to propagate to peer; has seen all transactions"); + continue + } + + let PropagateTransactions { pooled, full } = builder.build(); + + // send hashes if any + if let Some(mut new_pooled_hashes) = pooled { + // enforce tx soft limit per message for the (unlikely) event the number of + // hashes exceeds it + new_pooled_hashes + .truncate(SOFT_LIMIT_COUNT_HASHES_IN_NEW_POOLED_TRANSACTIONS_BROADCAST_MESSAGE); + + for hash in new_pooled_hashes.iter_hashes().copied() { + propagated.0.entry(hash).or_default().push(PropagateKind::Hash(*peer_id)); + // mark transaction as seen by peer + peer.seen_transactions.insert(hash); + } + + trace!(target: "net::tx", ?peer_id, num_txs=?new_pooled_hashes.len(), "Propagating tx hashes to peer"); + + // send hashes of transactions + self.network.send_transactions_hashes(*peer_id, new_pooled_hashes); + } + + // send full transactions, if any + if let Some(new_full_transactions) = full { + for tx in &new_full_transactions { + propagated + .0 + .entry(*tx.tx_hash()) + .or_default() + .push(PropagateKind::Full(*peer_id)); + // mark transaction as seen by peer + peer.seen_transactions.insert(*tx.tx_hash()); + } + + trace!(target: "net::tx", ?peer_id, num_txs=?new_full_transactions.len(), "Propagating full transactions to peer"); + + // send full transactions + self.network.send_transactions(*peer_id, new_full_transactions); + } + } + + // Update propagated transactions metrics + self.metrics.propagated_transactions.increment(propagated.0.len() as u64); + + propagated + } + + /// Propagates the given transactions to the peers + /// + /// This fetches all transaction from the pool, including the 4844 blob transactions but + /// __without__ their sidecar, because 4844 transactions are only ever announced as hashes. + fn propagate_all(&mut self, hashes: Vec) { + let propagated = self.propagate_transactions( + self.pool.get_all(hashes).into_iter().map(PropagateTransaction::new).collect(), + PropagationMode::Basic, + ); + + // notify pool so events get fired + self.pool.on_propagated(propagated); + } +} + +impl TransactionsManager +where + Pool: TransactionPool + 'static, + <::Transaction as PoolTransaction>::Consensus: Into, +{ + /// Request handler for an incoming request for transactions + fn on_get_pooled_transactions( + &mut self, + peer_id: PeerId, + request: GetPooledTransactions, + response: oneshot::Sender>, + ) { + if let Some(peer) = self.peers.get_mut(&peer_id) { + if self.network.tx_gossip_disabled() { + let _ = response.send(Ok(PooledTransactions::default())); + return + } + let transactions = self.pool.get_pooled_transaction_elements( + request.0, + GetPooledTransactionLimit::ResponseSizeSoftLimit( + self.transaction_fetcher.info.soft_limit_byte_size_pooled_transactions_response, + ), + ); + + trace!(target: "net::tx::propagation", sent_txs=?transactions.iter().map(|tx| *tx.hash()), "Sending requested transactions to peer"); + + // we sent a response at which point we assume that the peer is aware of the + // transactions + peer.seen_transactions.extend(transactions.iter().map(|tx| *tx.hash())); + + let resp = PooledTransactions(transactions); + let _ = response.send(Ok(resp)); + } + } + /// Handles dedicated transaction events related to the `eth` protocol. fn on_network_tx_event(&mut self, event: NetworkTransactionEvent) { match event { @@ -1273,6 +1290,7 @@ where impl Future for TransactionsManager where Pool: TransactionPool + Unpin + 'static, + <::Transaction as PoolTransaction>::Consensus: Into, { type Output = (); @@ -1456,21 +1474,18 @@ struct PropagateTransaction { transaction: Arc, } -impl PropagateTransaction { +impl PropagateTransaction { /// Create a new instance from a pooled transaction - fn new(tx: Arc>) -> Self + fn new

(tx: Arc>) -> Self where - T: PoolTransaction>, + P: PoolTransaction>, { let size = tx.encoded_length(); - let recovered: TransactionSignedEcRecovered = - tx.transaction.clone().into_consensus().into(); - let transaction = Arc::new(recovered.into_signed()); + let transaction = tx.transaction.clone().into_consensus().into(); + let transaction = Arc::new(transaction); Self { size, transaction } } -} -impl PropagateTransaction { fn hash(&self) -> TxHash { *self.transaction.tx_hash() } @@ -2372,7 +2387,8 @@ mod tests { #[test] fn test_transaction_builder_empty() { - let mut builder = PropagateTransactionsBuilder::pooled(EthVersion::Eth68); + let mut builder = + PropagateTransactionsBuilder::::pooled(EthVersion::Eth68); assert!(builder.is_empty()); let mut factory = MockTransactionFactory::default(); @@ -2388,7 +2404,8 @@ mod tests { #[test] fn test_transaction_builder_large() { - let mut builder = PropagateTransactionsBuilder::full(EthVersion::Eth68); + let mut builder = + PropagateTransactionsBuilder::::full(EthVersion::Eth68); assert!(builder.is_empty()); let mut factory = MockTransactionFactory::default(); @@ -2416,7 +2433,8 @@ mod tests { #[test] fn test_transaction_builder_eip4844() { - let mut builder = PropagateTransactionsBuilder::full(EthVersion::Eth68); + let mut builder = + PropagateTransactionsBuilder::::full(EthVersion::Eth68); assert!(builder.is_empty()); let mut factory = MockTransactionFactory::default(); diff --git a/crates/optimism/node/src/txpool.rs b/crates/optimism/node/src/txpool.rs index 0edfeec73227..7df5888fb751 100644 --- a/crates/optimism/node/src/txpool.rs +++ b/crates/optimism/node/src/txpool.rs @@ -3,7 +3,9 @@ use alloy_eips::eip2718::Encodable2718; use parking_lot::RwLock; use reth_chainspec::ChainSpec; use reth_optimism_evm::RethL1BlockInfo; -use reth_primitives::{Block, GotExpected, InvalidTransactionError, SealedBlock}; +use reth_primitives::{ + Block, GotExpected, InvalidTransactionError, SealedBlock, TransactionSigned, +}; use reth_provider::{BlockReaderIdExt, StateProviderFactory}; use reth_revm::L1BlockInfo; use reth_transaction_pool::{ @@ -140,7 +142,8 @@ where let l1_block_info = self.block_info.l1_block_info.read().clone(); let mut encoded = Vec::with_capacity(valid_tx.transaction().encoded_length()); - valid_tx.transaction().clone().into_consensus().into().encode_2718(&mut encoded); + let tx: TransactionSigned = valid_tx.transaction().clone().into_consensus().into(); + tx.encode_2718(&mut encoded); let cost_addition = match l1_block_info.l1_tx_data_fee( &self.chain_spec(), diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index bcde571b07b9..68a911f2e2e1 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -20,7 +20,8 @@ use reth_eth_wire_types::HandleMempoolData; use reth_execution_types::ChangedAccount; use reth_primitives::{ kzg::KzgSettings, transaction::TryFromRecoveredTransactionError, PooledTransactionsElement, - PooledTransactionsElementEcRecovered, SealedBlock, Transaction, TransactionSignedEcRecovered, + PooledTransactionsElementEcRecovered, SealedBlock, Transaction, TransactionSigned, + TransactionSignedEcRecovered, }; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; @@ -1068,7 +1069,9 @@ pub trait PoolTransaction: fmt::Debug + Send + Sync + Clone { /// Super trait for transactions that can be converted to and from Eth transactions pub trait EthPoolTransaction: PoolTransaction< - Consensus: From + Into, + Consensus: From + + Into + + Into, Pooled: From + Into, > { From 96f7572404321c718940360cf8b7423f5e5b7615 Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Thu, 21 Nov 2024 15:52:38 +0000 Subject: [PATCH 089/156] chore(net): downgrade pending sesion timeout log to trace (#12745) --- crates/net/network/src/session/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/net/network/src/session/mod.rs b/crates/net/network/src/session/mod.rs index 816c540cee22..a020c540e385 100644 --- a/crates/net/network/src/session/mod.rs +++ b/crates/net/network/src/session/mod.rs @@ -807,7 +807,7 @@ pub(crate) async fn pending_session_with_timeout( F: Future, { if tokio::time::timeout(timeout, f).await.is_err() { - debug!(target: "net::session", ?remote_addr, ?direction, "pending session timed out"); + trace!(target: "net::session", ?remote_addr, ?direction, "pending session timed out"); let event = PendingSessionEvent::Disconnected { remote_addr, session_id, From ad7885b48cbc8b6270cf10c2615ad9fbeccb28ef Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Thu, 21 Nov 2024 17:10:06 +0100 Subject: [PATCH 090/156] chore(sdk): Define helper trait `MaybeCompact` (#12683) --- Cargo.toml | 1 + crates/primitives-traits/Cargo.toml | 26 +++++--- crates/primitives-traits/src/account.rs | 59 ++++++++++++------- crates/primitives-traits/src/block/body.rs | 1 - crates/primitives-traits/src/block/header.rs | 7 +-- crates/primitives-traits/src/block/mod.rs | 8 +-- crates/primitives-traits/src/header/sealed.rs | 17 +++--- crates/primitives-traits/src/integer_list.rs | 27 +++++---- crates/primitives-traits/src/lib.rs | 14 +++++ crates/primitives-traits/src/receipt.rs | 11 ++-- crates/primitives-traits/src/storage.rs | 10 ++-- .../primitives-traits/src/transaction/mod.rs | 7 +-- .../src/transaction/signed.rs | 11 ++-- .../src/transaction/tx_type.rs | 7 +-- crates/primitives/Cargo.toml | 7 ++- crates/storage/db-api/Cargo.toml | 2 +- crates/storage/db-models/Cargo.toml | 2 +- crates/storage/db/Cargo.toml | 2 +- crates/storage/provider/Cargo.toml | 2 +- 19 files changed, 134 insertions(+), 87 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 702bbc3090bc..ad17ea4ad0ca 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -528,6 +528,7 @@ tracing = "0.1.0" tracing-appender = "0.2" url = "2.3" zstd = "0.13" +byteorder = "1" # metrics metrics = "0.24.0" diff --git a/crates/primitives-traits/Cargo.toml b/crates/primitives-traits/Cargo.toml index 20430fbc8829..b686a2e98ba3 100644 --- a/crates/primitives-traits/Cargo.toml +++ b/crates/primitives-traits/Cargo.toml @@ -13,7 +13,7 @@ workspace = true [dependencies] # reth -reth-codecs.workspace = true +reth-codecs = { workspace = true, optional = true } # ethereum alloy-consensus.workspace = true @@ -24,16 +24,16 @@ alloy-rlp.workspace = true revm-primitives.workspace = true # misc -byteorder = "1" +byteorder = { workspace = true, optional = true } +bytes.workspace = true derive_more.workspace = true roaring = "0.10.2" serde_with = { workspace = true, optional = true } auto_impl.workspace = true # required by reth-codecs -bytes.workspace = true -modular-bitfield.workspace = true -serde.workspace = true +modular-bitfield = { workspace = true, optional = true } +serde = { workspace = true, optional = true} # arbitrary utils arbitrary = { workspace = true, features = ["derive"], optional = true } @@ -50,6 +50,8 @@ proptest.workspace = true rand.workspace = true serde_json.workspace = true test-fuzz.workspace = true +modular-bitfield.workspace = true +serde.workspace = true [features] default = ["std"] @@ -59,11 +61,11 @@ std = [ "alloy-genesis/std", "alloy-primitives/std", "revm-primitives/std", - "serde/std" + "serde?/std" ] test-utils = [ "arbitrary", - "reth-codecs/test-utils" + "reth-codecs?/test-utils" ] arbitrary = [ "std", @@ -74,7 +76,7 @@ arbitrary = [ "dep:proptest-arbitrary-interop", "alloy-eips/arbitrary", "revm-primitives/arbitrary", - "reth-codecs/arbitrary" + "reth-codecs?/arbitrary" ] serde-bincode-compat = [ "serde", @@ -83,13 +85,19 @@ serde-bincode-compat = [ "alloy-eips/serde-bincode-compat" ] serde = [ + "dep:serde", "alloy-consensus/serde", "alloy-eips/serde", "alloy-primitives/serde", "bytes/serde", "rand/serde", - "reth-codecs/serde", + "reth-codecs?/serde", "revm-primitives/serde", "roaring/serde", "revm-primitives/serde", +] +reth-codec = [ + "dep:reth-codecs", + "dep:modular-bitfield", + "dep:byteorder", ] \ No newline at end of file diff --git a/crates/primitives-traits/src/account.rs b/crates/primitives-traits/src/account.rs index 927e39a52e17..c8504f3b63cb 100644 --- a/crates/primitives-traits/src/account.rs +++ b/crates/primitives-traits/src/account.rs @@ -1,32 +1,34 @@ use alloy_consensus::constants::KECCAK_EMPTY; use alloy_genesis::GenesisAccount; use alloy_primitives::{keccak256, Bytes, B256, U256}; -use byteorder::{BigEndian, ReadBytesExt}; -use bytes::Buf; use derive_more::Deref; -use reth_codecs::{add_arbitrary_tests, Compact}; -use revm_primitives::{AccountInfo, Bytecode as RevmBytecode, BytecodeDecodeError, JumpTable}; +use revm_primitives::{AccountInfo, Bytecode as RevmBytecode, BytecodeDecodeError}; -/// Identifier for [`LegacyRaw`](RevmBytecode::LegacyRaw). -const LEGACY_RAW_BYTECODE_ID: u8 = 0; +#[cfg(any(test, feature = "reth-codec"))] +/// Identifiers used in [`Compact`](reth_codecs::Compact) encoding of [`Bytecode`]. +pub mod compact_ids { + /// Identifier for [`LegacyRaw`](revm_primitives::Bytecode::LegacyRaw). + pub const LEGACY_RAW_BYTECODE_ID: u8 = 0; -/// Identifier for removed bytecode variant. -const REMOVED_BYTECODE_ID: u8 = 1; + /// Identifier for removed bytecode variant. + pub const REMOVED_BYTECODE_ID: u8 = 1; -/// Identifier for [`LegacyAnalyzed`](RevmBytecode::LegacyAnalyzed). -const LEGACY_ANALYZED_BYTECODE_ID: u8 = 2; + /// Identifier for [`LegacyAnalyzed`](revm_primitives::Bytecode::LegacyAnalyzed). + pub const LEGACY_ANALYZED_BYTECODE_ID: u8 = 2; -/// Identifier for [`Eof`](RevmBytecode::Eof). -const EOF_BYTECODE_ID: u8 = 3; + /// Identifier for [`Eof`](revm_primitives::Bytecode::Eof). + pub const EOF_BYTECODE_ID: u8 = 3; -/// Identifier for [`Eip7702`](RevmBytecode::Eip7702). -const EIP7702_BYTECODE_ID: u8 = 4; + /// Identifier for [`Eip7702`](revm_primitives::Bytecode::Eip7702). + pub const EIP7702_BYTECODE_ID: u8 = 4; +} /// An Ethereum account. #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[derive(Clone, Copy, Debug, PartialEq, Eq, Default, Compact)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] #[cfg_attr(any(test, feature = "arbitrary"), derive(arbitrary::Arbitrary))] -#[add_arbitrary_tests(compact)] +#[cfg_attr(any(test, feature = "reth-codec"), derive(reth_codecs::Compact))] +#[cfg_attr(any(test, feature = "reth-codec"), reth_codecs::add_arbitrary_tests(compact))] pub struct Account { /// Account nonce. pub nonce: u64, @@ -85,11 +87,17 @@ impl Bytecode { } } -impl Compact for Bytecode { +#[cfg(any(test, feature = "reth-codec"))] +impl reth_codecs::Compact for Bytecode { fn to_compact(&self, buf: &mut B) -> usize where B: bytes::BufMut + AsMut<[u8]>, { + use compact_ids::{ + EIP7702_BYTECODE_ID, EOF_BYTECODE_ID, LEGACY_ANALYZED_BYTECODE_ID, + LEGACY_RAW_BYTECODE_ID, + }; + let bytecode = match &self.0 { RevmBytecode::LegacyRaw(bytes) => bytes, RevmBytecode::LegacyAnalyzed(analyzed) => analyzed.bytecode(), @@ -128,7 +136,12 @@ impl Compact for Bytecode { // A panic will be triggered if a bytecode variant of 1 or greater than 2 is passed from the // database. fn from_compact(mut buf: &[u8], _: usize) -> (Self, &[u8]) { - let len = buf.read_u32::().expect("could not read bytecode length"); + use byteorder::ReadBytesExt; + use bytes::Buf; + + use compact_ids::*; + + let len = buf.read_u32::().expect("could not read bytecode length"); let bytes = Bytes::from(buf.copy_to_bytes(len as usize)); let variant = buf.read_u8().expect("could not read bytecode variant"); let decoded = match variant { @@ -139,8 +152,8 @@ impl Compact for Bytecode { LEGACY_ANALYZED_BYTECODE_ID => Self(unsafe { RevmBytecode::new_analyzed( bytes, - buf.read_u64::().unwrap() as usize, - JumpTable::from_slice(buf), + buf.read_u64::().unwrap() as usize, + revm_primitives::JumpTable::from_slice(buf), ) }), EOF_BYTECODE_ID | EIP7702_BYTECODE_ID => { @@ -187,9 +200,11 @@ impl From for AccountInfo { #[cfg(test)] mod tests { - use super::*; use alloy_primitives::{hex_literal::hex, B256, U256}; - use revm_primitives::LegacyAnalyzedBytecode; + use reth_codecs::Compact; + use revm_primitives::{JumpTable, LegacyAnalyzedBytecode}; + + use super::*; #[test] fn test_account() { diff --git a/crates/primitives-traits/src/block/body.rs b/crates/primitives-traits/src/block/body.rs index 11c4dd785dd8..ff41536ba3f0 100644 --- a/crates/primitives-traits/src/block/body.rs +++ b/crates/primitives-traits/src/block/body.rs @@ -28,7 +28,6 @@ pub trait BlockBody: + MaybeSerde { /// Ordered list of signed transactions as committed in block. - // todo: requires trait for signed transaction type Transaction: Transaction; /// Returns reference to transactions in block. diff --git a/crates/primitives-traits/src/block/header.rs b/crates/primitives-traits/src/block/header.rs index 524835879f31..695e63ed10ee 100644 --- a/crates/primitives-traits/src/block/header.rs +++ b/crates/primitives-traits/src/block/header.rs @@ -3,15 +3,14 @@ use core::fmt; use alloy_primitives::Sealable; -use reth_codecs::Compact; -use crate::{InMemorySize, MaybeSerde}; +use crate::{InMemorySize, MaybeCompact, MaybeSerde}; /// Helper trait that unifies all behaviour required by block header to support full node /// operations. -pub trait FullBlockHeader: BlockHeader + Compact {} +pub trait FullBlockHeader: BlockHeader + MaybeCompact {} -impl FullBlockHeader for T where T: BlockHeader + Compact {} +impl FullBlockHeader for T where T: BlockHeader + MaybeCompact {} /// Abstraction of a block header. pub trait BlockHeader: diff --git a/crates/primitives-traits/src/block/mod.rs b/crates/primitives-traits/src/block/mod.rs index 01ed75bd9673..3f4fbd343eec 100644 --- a/crates/primitives-traits/src/block/mod.rs +++ b/crates/primitives-traits/src/block/mod.rs @@ -5,18 +5,18 @@ pub mod header; use alloc::fmt; -use alloy_rlp::{Decodable, Encodable}; - use crate::{BlockHeader, FullBlockBody, FullBlockHeader, InMemorySize, MaybeSerde}; /// Helper trait that unifies all behaviour required by block to support full node operations. pub trait FullBlock: - Block + Encodable + Decodable + Block + alloy_rlp::Encodable + alloy_rlp::Decodable { } impl FullBlock for T where - T: Block + Encodable + Decodable + T: Block + + alloy_rlp::Encodable + + alloy_rlp::Decodable { } diff --git a/crates/primitives-traits/src/header/sealed.rs b/crates/primitives-traits/src/header/sealed.rs index d9931fc95c5b..f0a6869ed1e9 100644 --- a/crates/primitives-traits/src/header/sealed.rs +++ b/crates/primitives-traits/src/header/sealed.rs @@ -1,17 +1,19 @@ -use crate::InMemorySize; pub use alloy_consensus::Header; + +use core::mem; + use alloy_consensus::Sealed; use alloy_eips::BlockNumHash; use alloy_primitives::{keccak256, BlockHash, Sealable, B256}; use alloy_rlp::{Decodable, Encodable}; use bytes::BufMut; -use core::mem; use derive_more::{AsRef, Deref}; -use reth_codecs::add_arbitrary_tests; -use serde::{Deserialize, Serialize}; + +use crate::InMemorySize; /// A helper struct to store the block number/hash and its parent hash. -#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct BlockWithParent { /// Parent hash. pub parent: B256, @@ -21,8 +23,9 @@ pub struct BlockWithParent { /// A [`Header`] that is sealed at a precalculated hash, use [`SealedHeader::unseal()`] if you want /// to modify header. -#[derive(Debug, Clone, PartialEq, Eq, Hash, AsRef, Deref, Serialize, Deserialize)] -#[add_arbitrary_tests(rlp)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, AsRef, Deref)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(any(test, feature = "reth-codec"), reth_codecs::add_arbitrary_tests(rlp))] pub struct SealedHeader { /// Locked Header hash. hash: BlockHash, diff --git a/crates/primitives-traits/src/integer_list.rs b/crates/primitives-traits/src/integer_list.rs index 682fa0cf822f..6fc6d75899ce 100644 --- a/crates/primitives-traits/src/integer_list.rs +++ b/crates/primitives-traits/src/integer_list.rs @@ -1,13 +1,9 @@ use alloc::vec::Vec; -use bytes::BufMut; use core::fmt; + +use bytes::BufMut; use derive_more::Deref; use roaring::RoaringTreemap; -use serde::{ - de::{SeqAccess, Visitor}, - ser::SerializeSeq, - Deserialize, Deserializer, Serialize, Serializer, -}; /// A data structure that uses Roaring Bitmaps to efficiently store a list of integers. /// @@ -90,11 +86,14 @@ impl IntegerList { } } -impl Serialize for IntegerList { +#[cfg(feature = "serde")] +impl serde::Serialize for IntegerList { fn serialize(&self, serializer: S) -> Result where - S: Serializer, + S: serde::Serializer, { + use serde::ser::SerializeSeq; + let mut seq = serializer.serialize_seq(Some(self.len() as usize))?; for e in &self.0 { seq.serialize_element(&e)?; @@ -103,8 +102,11 @@ impl Serialize for IntegerList { } } +#[cfg(feature = "serde")] struct IntegerListVisitor; -impl<'de> Visitor<'de> for IntegerListVisitor { + +#[cfg(feature = "serde")] +impl<'de> serde::de::Visitor<'de> for IntegerListVisitor { type Value = IntegerList; fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -113,7 +115,7 @@ impl<'de> Visitor<'de> for IntegerListVisitor { fn visit_seq(self, mut seq: E) -> Result where - E: SeqAccess<'de>, + E: serde::de::SeqAccess<'de>, { let mut list = IntegerList::empty(); while let Some(item) = seq.next_element()? { @@ -123,10 +125,11 @@ impl<'de> Visitor<'de> for IntegerListVisitor { } } -impl<'de> Deserialize<'de> for IntegerList { +#[cfg(feature = "serde")] +impl<'de> serde::Deserialize<'de> for IntegerList { fn deserialize(deserializer: D) -> Result where - D: Deserializer<'de>, + D: serde::Deserializer<'de>, { deserializer.deserialize_byte_buf(IntegerListVisitor) } diff --git a/crates/primitives-traits/src/lib.rs b/crates/primitives-traits/src/lib.rs index 38e83f8ccdfd..5c969152d8d6 100644 --- a/crates/primitives-traits/src/lib.rs +++ b/crates/primitives-traits/src/lib.rs @@ -103,3 +103,17 @@ pub trait MaybeSerde {} impl MaybeSerde for T where T: serde::Serialize + for<'de> serde::Deserialize<'de> {} #[cfg(not(feature = "serde"))] impl MaybeSerde for T {} + +/// Helper trait that requires database encoding implementation since `reth-codec` feature is +/// enabled. +#[cfg(feature = "reth-codec")] +pub trait MaybeCompact: reth_codecs::Compact {} +/// Noop. Helper trait that would require database encoding implementation if `reth-codec` feature +/// were enabled. +#[cfg(not(feature = "reth-codec"))] +pub trait MaybeCompact {} + +#[cfg(feature = "reth-codec")] +impl MaybeCompact for T where T: reth_codecs::Compact {} +#[cfg(not(feature = "reth-codec"))] +impl MaybeCompact for T {} diff --git a/crates/primitives-traits/src/receipt.rs b/crates/primitives-traits/src/receipt.rs index 64839ecb8b4b..4370d2ac00f7 100644 --- a/crates/primitives-traits/src/receipt.rs +++ b/crates/primitives-traits/src/receipt.rs @@ -1,16 +1,17 @@ //! Receipt abstraction -use crate::{InMemorySize, MaybeSerde}; +use core::fmt; + use alloc::vec::Vec; use alloy_consensus::TxReceipt; use alloy_primitives::B256; -use core::fmt; -use reth_codecs::Compact; + +use crate::{InMemorySize, MaybeCompact, MaybeSerde}; /// Helper trait that unifies all behaviour required by receipt to support full node operations. -pub trait FullReceipt: Receipt + Compact {} +pub trait FullReceipt: Receipt + MaybeCompact {} -impl FullReceipt for T where T: ReceiptExt + Compact {} +impl FullReceipt for T where T: ReceiptExt + MaybeCompact {} /// Abstraction of a receipt. #[auto_impl::auto_impl(&, Arc)] diff --git a/crates/primitives-traits/src/storage.rs b/crates/primitives-traits/src/storage.rs index 39b6155ee284..c6b9b1e11c75 100644 --- a/crates/primitives-traits/src/storage.rs +++ b/crates/primitives-traits/src/storage.rs @@ -1,13 +1,12 @@ use alloy_primitives::{B256, U256}; -use reth_codecs::{add_arbitrary_tests, Compact}; -use serde::{Deserialize, Serialize}; /// Account storage entry. /// /// `key` is the subkey when used as a value in the `StorageChangeSets` table. -#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, PartialOrd, Ord)] +#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(any(test, feature = "arbitrary"), derive(arbitrary::Arbitrary))] -#[add_arbitrary_tests(compact)] +#[cfg_attr(any(test, feature = "reth-codec"), reth_codecs::add_arbitrary_tests(compact))] pub struct StorageEntry { /// Storage key. pub key: B256, @@ -31,7 +30,8 @@ impl From<(B256, U256)> for StorageEntry { // NOTE: Removing reth_codec and manually encode subkey // and compress second part of the value. If we have compression // over whole value (Even SubKey) that would mess up fetching of values with seek_by_key_subkey -impl Compact for StorageEntry { +#[cfg(any(test, feature = "reth-codec"))] +impl reth_codecs::Compact for StorageEntry { fn to_compact(&self, buf: &mut B) -> usize where B: bytes::BufMut + AsMut<[u8]>, diff --git a/crates/primitives-traits/src/transaction/mod.rs b/crates/primitives-traits/src/transaction/mod.rs index 8bd0027a8b21..7647c94496f6 100644 --- a/crates/primitives-traits/src/transaction/mod.rs +++ b/crates/primitives-traits/src/transaction/mod.rs @@ -7,14 +7,13 @@ pub mod tx_type; use core::{fmt, hash::Hash}; use alloy_primitives::B256; -use reth_codecs::Compact; -use crate::{FullTxType, InMemorySize, MaybeArbitrary, MaybeSerde, TxType}; +use crate::{FullTxType, InMemorySize, MaybeArbitrary, MaybeCompact, MaybeSerde, TxType}; /// Helper trait that unifies all behaviour required by transaction to support full node operations. -pub trait FullTransaction: Transaction + Compact {} +pub trait FullTransaction: Transaction + MaybeCompact {} -impl FullTransaction for T where T: Transaction + Compact {} +impl FullTransaction for T where T: Transaction + MaybeCompact {} /// Abstraction of a transaction. pub trait Transaction: diff --git a/crates/primitives-traits/src/transaction/signed.rs b/crates/primitives-traits/src/transaction/signed.rs index 633b0caf7b2d..563f3a6f3366 100644 --- a/crates/primitives-traits/src/transaction/signed.rs +++ b/crates/primitives-traits/src/transaction/signed.rs @@ -5,18 +5,19 @@ use core::hash::Hash; use alloy_eips::eip2718::{Decodable2718, Encodable2718}; use alloy_primitives::{keccak256, Address, PrimitiveSignature, TxHash, B256}; -use reth_codecs::Compact; -use crate::{FillTxEnv, FullTransaction, InMemorySize, MaybeArbitrary, MaybeSerde, Transaction}; +use crate::{ + FillTxEnv, FullTransaction, InMemorySize, MaybeArbitrary, MaybeCompact, MaybeSerde, Transaction, +}; /// Helper trait that unifies all behaviour required by block to support full node operations. pub trait FullSignedTx: - SignedTransaction + FillTxEnv + Compact + SignedTransaction + FillTxEnv + MaybeCompact { } impl FullSignedTx for T where - T: SignedTransaction + FillTxEnv + Compact + T: SignedTransaction + FillTxEnv + MaybeCompact { } @@ -41,7 +42,7 @@ pub trait SignedTransaction: + MaybeArbitrary + InMemorySize { - /// Transaction type that is signed. + /// Unsigned transaction type. type Transaction: Transaction; /// Returns reference to transaction hash. diff --git a/crates/primitives-traits/src/transaction/tx_type.rs b/crates/primitives-traits/src/transaction/tx_type.rs index 866242098d3f..931fcb773bf4 100644 --- a/crates/primitives-traits/src/transaction/tx_type.rs +++ b/crates/primitives-traits/src/transaction/tx_type.rs @@ -3,15 +3,14 @@ use core::fmt; use alloy_primitives::{U64, U8}; -use reth_codecs::Compact; -use crate::InMemorySize; +use crate::{InMemorySize, MaybeCompact}; /// Helper trait that unifies all behaviour required by transaction type ID to support full node /// operations. -pub trait FullTxType: TxType + Compact {} +pub trait FullTxType: TxType + MaybeCompact {} -impl FullTxType for T where T: TxType + Compact {} +impl FullTxType for T where T: TxType + MaybeCompact {} /// Trait representing the behavior of a transaction type. pub trait TxType: diff --git a/crates/primitives/Cargo.toml b/crates/primitives/Cargo.toml index 89282c8f93d7..ebfa26aef0a4 100644 --- a/crates/primitives/Cargo.toml +++ b/crates/primitives/Cargo.toml @@ -105,7 +105,12 @@ std = [ "serde/std", "alloy-trie/std" ] -reth-codec = ["dep:reth-codecs", "dep:zstd", "dep:modular-bitfield", "std"] +reth-codec = [ + "dep:reth-codecs", + "dep:zstd", + "dep:modular-bitfield", "std", + "reth-primitives-traits/reth-codec", +] asm-keccak = ["alloy-primitives/asm-keccak", "revm-primitives/asm-keccak"] arbitrary = [ "dep:arbitrary", diff --git a/crates/storage/db-api/Cargo.toml b/crates/storage/db-api/Cargo.toml index bcc3e7789847..3aa908a60093 100644 --- a/crates/storage/db-api/Cargo.toml +++ b/crates/storage/db-api/Cargo.toml @@ -16,7 +16,7 @@ workspace = true reth-codecs.workspace = true reth-db-models.workspace = true reth-primitives = { workspace = true, features = ["reth-codec"] } -reth-primitives-traits = { workspace = true, features = ["serde"] } +reth-primitives-traits = { workspace = true, features = ["serde", "reth-codec"] } reth-prune-types.workspace = true reth-stages-types.workspace = true reth-storage-errors.workspace = true diff --git a/crates/storage/db-models/Cargo.toml b/crates/storage/db-models/Cargo.toml index 44c0c3d962a5..0997c08b784d 100644 --- a/crates/storage/db-models/Cargo.toml +++ b/crates/storage/db-models/Cargo.toml @@ -14,7 +14,7 @@ workspace = true [dependencies] # reth reth-codecs.workspace = true -reth-primitives-traits = { workspace = true, features = ["serde"] } +reth-primitives-traits = { workspace = true, features = ["serde", "reth-codec"] } # ethereum alloy-primitives.workspace = true diff --git a/crates/storage/db/Cargo.toml b/crates/storage/db/Cargo.toml index 7dca8aa84752..af72bc43f7e9 100644 --- a/crates/storage/db/Cargo.toml +++ b/crates/storage/db/Cargo.toml @@ -15,7 +15,7 @@ workspace = true # reth reth-db-api.workspace = true reth-primitives = { workspace = true, features = ["reth-codec"] } -reth-primitives-traits = { workspace = true, features = ["serde"] } +reth-primitives-traits = { workspace = true, features = ["serde", "reth-codec"] } reth-fs-util.workspace = true reth-storage-errors.workspace = true reth-nippy-jar.workspace = true diff --git a/crates/storage/provider/Cargo.toml b/crates/storage/provider/Cargo.toml index 674f02adabc3..974de01e0045 100644 --- a/crates/storage/provider/Cargo.toml +++ b/crates/storage/provider/Cargo.toml @@ -17,7 +17,7 @@ reth-chainspec.workspace = true reth-blockchain-tree-api.workspace = true reth-execution-types.workspace = true reth-primitives = { workspace = true, features = ["reth-codec", "secp256k1"] } -reth-primitives-traits.workspace = true +reth-primitives-traits = { workspace = true, features = ["reth-codec"] } reth-fs-util.workspace = true reth-errors.workspace = true reth-storage-errors.workspace = true From 2c7b404c245dc465fbd956193400afb8df63be60 Mon Sep 17 00:00:00 2001 From: Hai | RISE <150876604+hai-rise@users.noreply.github.com> Date: Thu, 21 Nov 2024 23:46:44 +0700 Subject: [PATCH 091/156] perf(evm-config): return `&Arc` (#12748) --- crates/ethereum/evm/src/lib.rs | 2 +- crates/optimism/evm/src/lib.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/ethereum/evm/src/lib.rs b/crates/ethereum/evm/src/lib.rs index c8ed58df03ba..206230cd00ef 100644 --- a/crates/ethereum/evm/src/lib.rs +++ b/crates/ethereum/evm/src/lib.rs @@ -55,7 +55,7 @@ impl EthEvmConfig { } /// Returns the chain spec associated with this configuration. - pub fn chain_spec(&self) -> &ChainSpec { + pub const fn chain_spec(&self) -> &Arc { &self.chain_spec } } diff --git a/crates/optimism/evm/src/lib.rs b/crates/optimism/evm/src/lib.rs index 52b974e6c862..310746275103 100644 --- a/crates/optimism/evm/src/lib.rs +++ b/crates/optimism/evm/src/lib.rs @@ -50,7 +50,7 @@ impl OpEvmConfig { } /// Returns the chain spec associated with this configuration. - pub fn chain_spec(&self) -> &OpChainSpec { + pub const fn chain_spec(&self) -> &Arc { &self.chain_spec } } From 2093d2bd9a72074be19f410116a9fb256eb15eed Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Thu, 21 Nov 2024 18:03:05 +0100 Subject: [PATCH 092/156] chore(sdk): Add `NodePrimitives::BlockHeader` and `NodePrimitives::BlockBody` (#12647) --- .github/assets/check_wasm.sh | 1 + Cargo.lock | 1 + crates/optimism/node/Cargo.toml | 5 ++++ crates/optimism/node/src/node.rs | 16 +++-------- crates/optimism/primitives/Cargo.toml | 15 +++++++++-- crates/optimism/primitives/src/lib.rs | 10 ++++--- crates/primitives-traits/Cargo.toml | 2 +- crates/primitives-traits/src/node.rs | 38 ++++++++++++++++++++++++--- crates/primitives/src/lib.rs | 11 ++------ 9 files changed, 68 insertions(+), 31 deletions(-) diff --git a/.github/assets/check_wasm.sh b/.github/assets/check_wasm.sh index 35f4bdda5b83..11e5b5e00b9e 100755 --- a/.github/assets/check_wasm.sh +++ b/.github/assets/check_wasm.sh @@ -47,6 +47,7 @@ exclude_crates=( reth-optimism-node reth-optimism-payload-builder reth-optimism-rpc + reth-optimism-primitives reth-rpc reth-rpc-api reth-rpc-api-testing-util diff --git a/Cargo.lock b/Cargo.lock index 10b7f2fbda53..9ea3283dc91e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8328,6 +8328,7 @@ dependencies = [ "reth-optimism-forks", "reth-optimism-node", "reth-optimism-payload-builder", + "reth-optimism-primitives", "reth-optimism-rpc", "reth-payload-builder", "reth-payload-util", diff --git a/crates/optimism/node/Cargo.toml b/crates/optimism/node/Cargo.toml index 2e3e9fb4f1d1..18ceee8ef8b7 100644 --- a/crates/optimism/node/Cargo.toml +++ b/crates/optimism/node/Cargo.toml @@ -39,6 +39,7 @@ reth-optimism-rpc.workspace = true reth-optimism-chainspec.workspace = true reth-optimism-consensus.workspace = true reth-optimism-forks.workspace = true +reth-optimism-primitives.workspace = true # revm with required optimism features revm = { workspace = true, features = ["secp256k1", "blst", "c-kzg"] } @@ -119,3 +120,7 @@ test-utils = [ "revm/test-utils", "reth-optimism-node/test-utils", ] +reth-codec = [ + "reth-primitives/reth-codec", + "reth-optimism-primitives/reth-codec", +] \ No newline at end of file diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index bdc3d0d3a44a..46841a2a5b99 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -9,8 +9,7 @@ use reth_db::transaction::{DbTx, DbTxMut}; use reth_evm::{execute::BasicBlockExecutorProvider, ConfigureEvm}; use reth_network::{NetworkConfig, NetworkHandle, NetworkManager, PeersInfo}; use reth_node_api::{ - AddOnsContext, EngineValidator, FullNodeComponents, FullNodePrimitives, NodeAddOns, - PayloadBuilder, + AddOnsContext, EngineValidator, FullNodeComponents, NodeAddOns, PayloadBuilder, }; use reth_node_builder::{ components::{ @@ -25,12 +24,13 @@ use reth_optimism_chainspec::OpChainSpec; use reth_optimism_consensus::OpBeaconConsensus; use reth_optimism_evm::{OpEvmConfig, OpExecutionStrategyFactory}; use reth_optimism_payload_builder::builder::OpPayloadTransactions; +use reth_optimism_primitives::OpPrimitives; use reth_optimism_rpc::{ witness::{DebugExecutionWitnessApiServer, OpDebugWitnessApi}, OpEthApi, }; use reth_payload_builder::{PayloadBuilderHandle, PayloadBuilderService}; -use reth_primitives::{Block, BlockBody, Receipt, TransactionSigned, TxType}; +use reth_primitives::BlockBody; use reth_provider::{ providers::ChainStorage, BlockBodyWriter, CanonStateSubscriptions, DBProvider, EthStorage, ProviderResult, @@ -49,16 +49,6 @@ use crate::{ txpool::{OpTransactionPool, OpTransactionValidator}, OpEngineTypes, }; -/// Optimism primitive types. -#[derive(Debug, Default, Clone, PartialEq, Eq)] -pub struct OpPrimitives; - -impl FullNodePrimitives for OpPrimitives { - type Block = Block; - type SignedTx = TransactionSigned; - type TxType = TxType; - type Receipt = Receipt; -} /// Storage implementation for Optimism. #[derive(Debug, Default, Clone)] diff --git a/crates/optimism/primitives/Cargo.toml b/crates/optimism/primitives/Cargo.toml index ade6d4eb6bc5..fc368807736f 100644 --- a/crates/optimism/primitives/Cargo.toml +++ b/crates/optimism/primitives/Cargo.toml @@ -39,10 +39,21 @@ reth-codecs = { workspace = true, features = ["test-utils"] } rstest.workspace = true [features] -default = ["reth-codec"] +default = ["std", "reth-codec"] +std = [ + "reth-primitives-traits/std", + "reth-primitives/std", + "reth-node-types/std", + "reth-codecs/std", + "alloy-consensus/std", + "alloy-eips/std", + "alloy-primitives/std", + "serde/std", +] reth-codec = [ "dep:reth-codecs", - "reth-primitives/reth-codec" + "reth-primitives/reth-codec", + "reth-primitives-traits/reth-codec", ] serde = [ "dep:serde", diff --git a/crates/optimism/primitives/src/lib.rs b/crates/optimism/primitives/src/lib.rs index 5f6b1848e648..26499bb43af3 100644 --- a/crates/optimism/primitives/src/lib.rs +++ b/crates/optimism/primitives/src/lib.rs @@ -6,21 +6,25 @@ issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] +#![cfg_attr(not(feature = "std"), no_std)] pub mod bedrock; pub mod tx_type; pub use tx_type::OpTxType; -use reth_node_types::NodePrimitives; -use reth_primitives::{Block, Receipt, TransactionSigned}; +use alloy_consensus::Header; +use reth_node_types::FullNodePrimitives; +use reth_primitives::{Block, BlockBody, Receipt, TransactionSigned}; /// Optimism primitive types. #[derive(Debug, Default, Clone, PartialEq, Eq)] pub struct OpPrimitives; -impl NodePrimitives for OpPrimitives { +impl FullNodePrimitives for OpPrimitives { type Block = Block; + type BlockHeader = Header; + type BlockBody = BlockBody; type SignedTx = TransactionSigned; type TxType = OpTxType; type Receipt = Receipt; diff --git a/crates/primitives-traits/Cargo.toml b/crates/primitives-traits/Cargo.toml index b686a2e98ba3..df4491b2d126 100644 --- a/crates/primitives-traits/Cargo.toml +++ b/crates/primitives-traits/Cargo.toml @@ -100,4 +100,4 @@ reth-codec = [ "dep:reth-codecs", "dep:modular-bitfield", "dep:byteorder", -] \ No newline at end of file +] diff --git a/crates/primitives-traits/src/node.rs b/crates/primitives-traits/src/node.rs index 180920d39345..7cb321e9af3b 100644 --- a/crates/primitives-traits/src/node.rs +++ b/crates/primitives-traits/src/node.rs @@ -1,6 +1,8 @@ use core::fmt; -use crate::{BlockBody, FullBlock, FullReceipt, FullSignedTx, FullTxType, MaybeSerde}; +use crate::{ + FullBlock, FullBlockBody, FullBlockHeader, FullReceipt, FullSignedTx, FullTxType, MaybeSerde, +}; /// Configures all the primitive types of the node. pub trait NodePrimitives: @@ -17,6 +19,28 @@ pub trait NodePrimitives: + Eq + MaybeSerde + 'static; + /// Block header primitive. + type BlockHeader: Send + + Sync + + Unpin + + Clone + + Default + + fmt::Debug + + PartialEq + + Eq + + MaybeSerde + + 'static; + /// Block body primitive. + type BlockBody: Send + + Sync + + Unpin + + Clone + + Default + + fmt::Debug + + PartialEq + + Eq + + MaybeSerde + + 'static; /// Signed version of the transaction type. type SignedTx: Send + Sync @@ -45,6 +69,8 @@ pub trait NodePrimitives: impl NodePrimitives for () { type Block = (); + type BlockHeader = (); + type BlockBody = (); type SignedTx = (); type TxType = (); type Receipt = (); @@ -55,7 +81,11 @@ pub trait FullNodePrimitives: Send + Sync + Unpin + Clone + Default + fmt::Debug + PartialEq + Eq + 'static { /// Block primitive. - type Block: FullBlock>; + type Block: FullBlock

; + /// Block header primitive. + type BlockHeader: FullBlockHeader + 'static; + /// Block body primitive. + type BlockBody: FullBlockBody + 'static; /// Signed version of the transaction type. type SignedTx: FullSignedTx; /// Transaction envelope type ID. @@ -66,9 +96,11 @@ pub trait FullNodePrimitives: impl NodePrimitives for T where - T: FullNodePrimitives, + T: FullNodePrimitives, { type Block = T::Block; + type BlockHeader = T::BlockHeader; + type BlockBody = T::BlockBody; type SignedTx = T::SignedTx; type TxType = T::TxType; type Receipt = T::Receipt; diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index 027bf97cfa5c..c46c437dd714 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -79,17 +79,10 @@ pub mod serde_bincode_compat { #[derive(Debug, Clone, Default, PartialEq, Eq, serde::Serialize, serde::Deserialize)] pub struct EthPrimitives; -#[cfg(feature = "reth-codec")] impl reth_primitives_traits::FullNodePrimitives for EthPrimitives { type Block = crate::Block; - type SignedTx = crate::TransactionSigned; - type TxType = crate::TxType; - type Receipt = crate::Receipt; -} - -#[cfg(not(feature = "reth-codec"))] -impl NodePrimitives for EthPrimitives { - type Block = crate::Block; + type BlockHeader = alloy_consensus::Header; + type BlockBody = crate::BlockBody; type SignedTx = crate::TransactionSigned; type TxType = crate::TxType; type Receipt = crate::Receipt; From f8d683e80ee08412e19cc212a4c2bf8ebeca245b Mon Sep 17 00:00:00 2001 From: Nils Date: Thu, 21 Nov 2024 18:03:15 +0100 Subject: [PATCH 093/156] Improve metrics hooks setup (fixes #12672) (#12684) Co-authored-by: Matthias Seitz --- Cargo.lock | 3 - crates/cli/commands/src/stage/run.rs | 19 ++++- crates/node/builder/src/launch/common.rs | 17 +++- crates/node/metrics/Cargo.toml | 4 - crates/node/metrics/src/hooks.rs | 98 ++++++++++++++++-------- crates/node/metrics/src/server.rs | 8 +- 6 files changed, 99 insertions(+), 50 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9ea3283dc91e..db7adde04793 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8143,10 +8143,7 @@ dependencies = [ "metrics-util", "procfs 0.16.0", "reqwest", - "reth-db-api", "reth-metrics", - "reth-primitives-traits", - "reth-provider", "reth-tasks", "socket2", "tikv-jemalloc-ctl", diff --git a/crates/cli/commands/src/stage/run.rs b/crates/cli/commands/src/stage/run.rs index f3c3bbef9651..c852eea05a7e 100644 --- a/crates/cli/commands/src/stage/run.rs +++ b/crates/cli/commands/src/stage/run.rs @@ -11,6 +11,7 @@ use reth_cli::chainspec::ChainSpecParser; use reth_cli_runner::CliContext; use reth_cli_util::get_secret_key; use reth_config::config::{HashingConfig, SenderRecoveryConfig, TransactionLookupConfig}; +use reth_db_api::database_metrics::DatabaseMetrics; use reth_downloaders::{ bodies::bodies::BodiesDownloaderBuilder, headers::reverse_headers::ReverseHeadersDownloaderBuilder, @@ -132,10 +133,20 @@ impl> Command }, ChainSpecInfo { name: provider_factory.chain_spec().chain().to_string() }, ctx.task_executor, - Hooks::new( - provider_factory.db_ref().clone(), - provider_factory.static_file_provider(), - ), + Hooks::builder() + .with_hook({ + let db = provider_factory.db_ref().clone(); + move || db.report_metrics() + }) + .with_hook({ + let sfp = provider_factory.static_file_provider(); + move || { + if let Err(error) = sfp.report_metrics() { + error!(%error, "Failed to report metrics from static file provider"); + } + } + }) + .build(), ); MetricServer::new(config).serve().await?; diff --git a/crates/node/builder/src/launch/common.rs b/crates/node/builder/src/launch/common.rs index 225f2029c28a..9cc841f6fea8 100644 --- a/crates/node/builder/src/launch/common.rs +++ b/crates/node/builder/src/launch/common.rs @@ -18,7 +18,7 @@ use reth_blockchain_tree::{ use reth_chainspec::{Chain, EthChainSpec, EthereumHardforks}; use reth_config::{config::EtlConfig, PruneConfig}; use reth_consensus::Consensus; -use reth_db_api::database::Database; +use reth_db_api::{database::Database, database_metrics::DatabaseMetrics}; use reth_db_common::init::{init_genesis, InitDatabaseError}; use reth_downloaders::{bodies::noop::NoopBodiesDownloader, headers::noop::NoopHeaderDownloader}; use reth_engine_local::MiningMode; @@ -536,7 +536,20 @@ where }, ChainSpecInfo { name: self.left().config.chain.chain().to_string() }, self.task_executor().clone(), - Hooks::new(self.database().clone(), self.static_file_provider()), + Hooks::builder() + .with_hook({ + let db = self.database().clone(); + move || db.report_metrics() + }) + .with_hook({ + let sfp = self.static_file_provider(); + move || { + if let Err(error) = sfp.report_metrics() { + error!(%error, "Failed to report metrics for the static file provider"); + } + } + }) + .build(), ); MetricServer::new(config).serve().await?; diff --git a/crates/node/metrics/Cargo.toml b/crates/node/metrics/Cargo.toml index 7e271f93ce56..3d79d11db7d7 100644 --- a/crates/node/metrics/Cargo.toml +++ b/crates/node/metrics/Cargo.toml @@ -8,9 +8,6 @@ homepage.workspace = true repository.workspace = true [dependencies] -reth-db-api.workspace = true -reth-primitives-traits.workspace = true -reth-provider.workspace = true reth-metrics.workspace = true reth-tasks.workspace = true @@ -37,7 +34,6 @@ procfs = "0.16.0" [dev-dependencies] reqwest.workspace = true socket2 = { version = "0.5", default-features = false } -reth-provider = { workspace = true, features = ["test-utils"] } [lints] workspace = true diff --git a/crates/node/metrics/src/hooks.rs b/crates/node/metrics/src/hooks.rs index 21d12614f625..3b6d23a39007 100644 --- a/crates/node/metrics/src/hooks.rs +++ b/crates/node/metrics/src/hooks.rs @@ -1,20 +1,59 @@ use metrics_process::Collector; -use reth_db_api::database_metrics::DatabaseMetrics; -use reth_primitives_traits::NodePrimitives; -use reth_provider::providers::StaticFileProvider; -use std::{ - fmt::{self}, - sync::Arc, -}; +use std::{fmt, sync::Arc}; -pub(crate) trait Hook: Fn() + Send + Sync {} -impl Hook for T {} +/// The simple alias for function types that are `'static`, `Send`, and `Sync`. +pub trait Hook: Fn() + Send + Sync + 'static {} +impl Hook for T {} -impl fmt::Debug for Hooks { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let hooks_len = self.inner.len(); - f.debug_struct("Hooks") - .field("inner", &format!("Arc>>, len: {}", hooks_len)) +/// A builder-like type to create a new [`Hooks`] instance. +pub struct HooksBuilder { + hooks: Vec>>, +} + +impl HooksBuilder { + /// Registers a [`Hook`]. + pub fn with_hook(self, hook: impl Hook) -> Self { + self.with_boxed_hook(Box::new(hook)) + } + + /// Registers a [`Hook`] by calling the provided closure. + pub fn install_hook(self, f: F) -> Self + where + F: FnOnce() -> H, + H: Hook, + { + self.with_hook(f()) + } + + /// Registers a [`Hook`]. + #[inline] + pub fn with_boxed_hook(mut self, hook: Box>) -> Self { + self.hooks.push(hook); + self + } + + /// Builds the [`Hooks`] collection from the registered hooks. + pub fn build(self) -> Hooks { + Hooks { inner: Arc::new(self.hooks) } + } +} + +impl Default for HooksBuilder { + fn default() -> Self { + Self { + hooks: vec![ + Box::new(|| Collector::default().collect()), + Box::new(collect_memory_stats), + Box::new(collect_io_stats), + ], + } + } +} + +impl std::fmt::Debug for HooksBuilder { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("HooksBuilder") + .field("hooks", &format_args!("Vec>, len: {}", self.hooks.len())) .finish() } } @@ -26,24 +65,10 @@ pub struct Hooks { } impl Hooks { - /// Create a new set of hooks - pub fn new(db: Metrics, static_file_provider: StaticFileProvider) -> Self - where - Metrics: DatabaseMetrics + 'static + Send + Sync, - N: NodePrimitives, - { - let hooks: Vec>> = vec![ - Box::new(move || db.report_metrics()), - Box::new(move || { - let _ = static_file_provider.report_metrics().map_err( - |error| tracing::error!(%error, "Failed to report static file provider metrics"), - ); - }), - Box::new(move || Collector::default().collect()), - Box::new(collect_memory_stats), - Box::new(collect_io_stats), - ]; - Self { inner: Arc::new(hooks) } + /// Creates a new [`HooksBuilder`] instance. + #[inline] + pub fn builder() -> HooksBuilder { + HooksBuilder::default() } pub(crate) fn iter(&self) -> impl Iterator>> { @@ -51,6 +76,15 @@ impl Hooks { } } +impl fmt::Debug for Hooks { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let hooks_len = self.inner.len(); + f.debug_struct("Hooks") + .field("inner", &format_args!("Arc>>, len: {}", hooks_len)) + .finish() + } +} + #[cfg(all(feature = "jemalloc", unix))] fn collect_memory_stats() { use metrics::gauge; diff --git a/crates/node/metrics/src/server.rs b/crates/node/metrics/src/server.rs index 313329fb56a5..313b578f800d 100644 --- a/crates/node/metrics/src/server.rs +++ b/crates/node/metrics/src/server.rs @@ -206,7 +206,6 @@ const fn describe_io_stats() {} mod tests { use super::*; use reqwest::Client; - use reth_provider::{test_utils::create_test_provider_factory, StaticFileProviderFactory}; use reth_tasks::TaskManager; use socket2::{Domain, Socket, Type}; use std::net::{SocketAddr, TcpListener}; @@ -236,8 +235,7 @@ mod tests { let tasks = TaskManager::current(); let executor = tasks.executor(); - let factory = create_test_provider_factory(); - let hooks = Hooks::new(factory.db_ref().clone(), factory.static_file_provider()); + let hooks = Hooks::builder().build(); let listen_addr = get_random_available_addr(); let config = @@ -252,7 +250,7 @@ mod tests { // Check the response body let body = response.text().await.unwrap(); - assert!(body.contains("reth_db_table_size")); - assert!(body.contains("reth_jemalloc_metadata")); + assert!(body.contains("reth_process_cpu_seconds_total")); + assert!(body.contains("reth_process_start_time_seconds")); } } From 3d477e7d6fde555856529abca3de7b658662e125 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Thu, 21 Nov 2024 18:53:57 +0100 Subject: [PATCH 094/156] Replace use of fully qualified syntax (#12751) --- crates/e2e-test-utils/src/lib.rs | 6 ++---- crates/node/builder/src/builder/mod.rs | 6 ++---- crates/node/builder/src/launch/common.rs | 6 ++---- crates/node/builder/src/launch/engine.rs | 3 +-- crates/node/builder/src/launch/mod.rs | 3 +-- crates/node/builder/src/setup.rs | 6 ++---- 6 files changed, 10 insertions(+), 20 deletions(-) diff --git a/crates/e2e-test-utils/src/lib.rs b/crates/e2e-test-utils/src/lib.rs index df459f641b43..73a7e39f1a49 100644 --- a/crates/e2e-test-utils/src/lib.rs +++ b/crates/e2e-test-utils/src/lib.rs @@ -59,8 +59,7 @@ where Components: NodeComponents, Network: PeersHandleProvider>, >, N::AddOns: RethRpcAddOns>, - N::Primitives: - FullNodePrimitives>, + N::Primitives: FullNodePrimitives, { let tasks = TaskManager::current(); let exec = tasks.executor(); @@ -135,8 +134,7 @@ where LocalPayloadAttributesBuilder: PayloadAttributesBuilder< <::Engine as PayloadTypes>::PayloadAttributes, >, - N::Primitives: - FullNodePrimitives>, + N::Primitives: FullNodePrimitives, { let tasks = TaskManager::current(); let exec = tasks.executor(); diff --git a/crates/node/builder/src/builder/mod.rs b/crates/node/builder/src/builder/mod.rs index 3ad90a493f13..65ae704fe831 100644 --- a/crates/node/builder/src/builder/mod.rs +++ b/crates/node/builder/src/builder/mod.rs @@ -365,8 +365,7 @@ where >>::Components, >, >, - N::Primitives: - FullNodePrimitives>, + N::Primitives: FullNodePrimitives, { self.node(node).launch().await } @@ -557,8 +556,7 @@ where T: NodeTypesWithEngine + NodeTypesForProvider, CB: NodeComponentsBuilder>, AO: RethRpcAddOns, CB::Components>>, - T::Primitives: - FullNodePrimitives>, + T::Primitives: FullNodePrimitives, { /// Launches the node with the [`DefaultNodeLauncher`] that sets up engine API consensus and rpc pub async fn launch( diff --git a/crates/node/builder/src/launch/common.rs b/crates/node/builder/src/launch/common.rs index 9cc841f6fea8..47ec68ff0d7d 100644 --- a/crates/node/builder/src/launch/common.rs +++ b/crates/node/builder/src/launch/common.rs @@ -408,8 +408,7 @@ where pub async fn create_provider_factory(&self) -> eyre::Result> where N: ProviderNodeTypes, - N::Primitives: - FullNodePrimitives>, + N::Primitives: FullNodePrimitives, { let factory = ProviderFactory::new( self.right().clone(), @@ -476,8 +475,7 @@ where ) -> eyre::Result, ProviderFactory>>> where N: ProviderNodeTypes, - N::Primitives: - FullNodePrimitives>, + N::Primitives: FullNodePrimitives, { let factory = self.create_provider_factory().await?; let ctx = LaunchContextWith { diff --git a/crates/node/builder/src/launch/engine.rs b/crates/node/builder/src/launch/engine.rs index f485be2c22db..ef1edc899ebe 100644 --- a/crates/node/builder/src/launch/engine.rs +++ b/crates/node/builder/src/launch/engine.rs @@ -77,8 +77,7 @@ where LocalPayloadAttributesBuilder: PayloadAttributesBuilder< <::Engine as PayloadTypes>::PayloadAttributes, >, - Types::Primitives: - FullNodePrimitives>, + Types::Primitives: FullNodePrimitives, { type Node = NodeHandle, AO>; diff --git a/crates/node/builder/src/launch/mod.rs b/crates/node/builder/src/launch/mod.rs index be317e4be318..a1819948ee48 100644 --- a/crates/node/builder/src/launch/mod.rs +++ b/crates/node/builder/src/launch/mod.rs @@ -102,8 +102,7 @@ where T: FullNodeTypes, Types = Types>, CB: NodeComponentsBuilder, AO: RethRpcAddOns>, - Types::Primitives: - FullNodePrimitives>, + Types::Primitives: FullNodePrimitives, { type Node = NodeHandle, AO>; diff --git a/crates/node/builder/src/setup.rs b/crates/node/builder/src/setup.rs index 400e3d844565..71f0ceb56cdd 100644 --- a/crates/node/builder/src/setup.rs +++ b/crates/node/builder/src/setup.rs @@ -41,8 +41,7 @@ where N: ProviderNodeTypes, Client: EthBlockClient + 'static, Executor: BlockExecutorProvider, - N::Primitives: - FullNodePrimitives>, + N::Primitives: FullNodePrimitives, { // building network downloaders using the fetch client let header_downloader = ReverseHeadersDownloaderBuilder::new(config.headers) @@ -92,8 +91,7 @@ where Body = <::Block as reth_node_api::Block>::Body, > + 'static, Executor: BlockExecutorProvider, - N::Primitives: - FullNodePrimitives>, + N::Primitives: FullNodePrimitives, { let mut builder = Pipeline::::builder(); From c2323b4e49c2dc11a5f71253c2e0f1fc27ae7df0 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Thu, 21 Nov 2024 19:17:16 +0100 Subject: [PATCH 095/156] chore(sdk): add adapters for header and body to `NodeTypes` (#12723) --- Cargo.lock | 6 +++--- crates/node/types/src/lib.rs | 6 ++++++ 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index db7adde04793..eabde10a0a70 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4590,7 +4590,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", - "windows-targets 0.48.5", + "windows-targets 0.52.6", ] [[package]] @@ -11181,7 +11181,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3637e734239e12ab152cd269302500bd063f37624ee210cd04b4936ed671f3b1" dependencies = [ "cc", - "windows-targets 0.48.5", + "windows-targets 0.52.6", ] [[package]] @@ -11672,7 +11672,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.59.0", ] [[package]] diff --git a/crates/node/types/src/lib.rs b/crates/node/types/src/lib.rs index 2da8180a9562..2e5558a33bfc 100644 --- a/crates/node/types/src/lib.rs +++ b/crates/node/types/src/lib.rs @@ -232,3 +232,9 @@ where { type Engine = E; } + +/// Helper adapter type for accessing [`NodePrimitives::BlockHeader`] on [`NodeTypes`]. +pub type HeaderTy = <::Primitives as NodePrimitives>::BlockHeader; + +/// Helper adapter type for accessing [`NodePrimitives::BlockBody`] on [`NodeTypes`]. +pub type BodyTy = <::Primitives as NodePrimitives>::BlockBody; From edeacbecfbf662eeb51a8a28c253df8f4d3123fa Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 21 Nov 2024 19:46:39 +0100 Subject: [PATCH 096/156] fix: bad databaseargs default (#12747) --- crates/storage/db/src/implementation/mdbx/mod.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/crates/storage/db/src/implementation/mdbx/mod.rs b/crates/storage/db/src/implementation/mdbx/mod.rs index 10f3b2282301..006213e4cb91 100644 --- a/crates/storage/db/src/implementation/mdbx/mod.rs +++ b/crates/storage/db/src/implementation/mdbx/mod.rs @@ -66,7 +66,7 @@ impl DatabaseEnvKind { } /// Arguments for database initialization. -#[derive(Clone, Debug, Default)] +#[derive(Clone, Debug)] pub struct DatabaseArguments { /// Client version that accesses the database. client_version: ClientVersion, @@ -99,6 +99,12 @@ pub struct DatabaseArguments { exclusive: Option, } +impl Default for DatabaseArguments { + fn default() -> Self { + Self::new(ClientVersion::default()) + } +} + impl DatabaseArguments { /// Create new database arguments with given client version. pub fn new(client_version: ClientVersion) -> Self { From 0558235b98a8fa16aa745c6f3ecb65bb9bc8e00a Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 21 Nov 2024 23:47:33 +0400 Subject: [PATCH 097/156] refactor: unify logic for blocks removal (#12743) Co-authored-by: joshieDo <93316087+joshieDo@users.noreply.github.com> --- crates/blockchain-tree/src/blockchain_tree.rs | 4 +- crates/cli/commands/src/stage/unwind.rs | 46 +- crates/optimism/node/src/node.rs | 8 + crates/stages/stages/src/stages/bodies.rs | 190 +++---- .../provider/src/providers/database/mod.rs | 15 - .../src/providers/database/provider.rs | 478 ++++-------------- crates/storage/provider/src/traits/block.rs | 35 +- crates/storage/provider/src/writer/mod.rs | 8 +- crates/storage/storage-api/src/chain.rs | 19 + 9 files changed, 267 insertions(+), 536 deletions(-) diff --git a/crates/blockchain-tree/src/blockchain_tree.rs b/crates/blockchain-tree/src/blockchain_tree.rs index 8e192492593a..c778e0508dac 100644 --- a/crates/blockchain-tree/src/blockchain_tree.rs +++ b/crates/blockchain-tree/src/blockchain_tree.rs @@ -25,7 +25,7 @@ use reth_provider::{ BlockExecutionWriter, BlockNumReader, BlockWriter, CanonStateNotification, CanonStateNotificationSender, CanonStateNotifications, ChainSpecProvider, ChainSplit, ChainSplitTarget, DBProvider, DisplayBlocksChain, HeaderProvider, ProviderError, - StaticFileProviderFactory, + StaticFileProviderFactory, StorageLocation, }; use reth_stages_api::{MetricEvent, MetricEventsSender}; use reth_storage_errors::provider::{ProviderResult, RootMismatch}; @@ -1333,7 +1333,7 @@ where info!(target: "blockchain_tree", "REORG: revert canonical from database by unwinding chain blocks {:?}", revert_range); // read block and execution result from database. and remove traces of block from tables. let blocks_and_execution = provider_rw - .take_block_and_execution_range(revert_range) + .take_block_and_execution_above(revert_until, StorageLocation::Database) .map_err(|e| CanonicalError::CanonicalRevert(e.to_string()))?; provider_rw.commit()?; diff --git a/crates/cli/commands/src/stage/unwind.rs b/crates/cli/commands/src/stage/unwind.rs index 4f47a70b02d4..2d29121d0698 100644 --- a/crates/cli/commands/src/stage/unwind.rs +++ b/crates/cli/commands/src/stage/unwind.rs @@ -2,7 +2,7 @@ use crate::common::{AccessRights, CliNodeTypes, Environment, EnvironmentArgs}; use alloy_eips::BlockHashOrNumber; -use alloy_primitives::{BlockNumber, B256}; +use alloy_primitives::B256; use clap::{Parser, Subcommand}; use reth_beacon_consensus::EthBeaconConsensus; use reth_chainspec::{EthChainSpec, EthereumHardforks}; @@ -17,6 +17,7 @@ use reth_node_core::args::NetworkArgs; use reth_provider::{ providers::ProviderNodeTypes, BlockExecutionWriter, BlockNumReader, ChainSpecProvider, ChainStateBlockReader, ChainStateBlockWriter, ProviderFactory, StaticFileProviderFactory, + StorageLocation, }; use reth_prune::PruneModes; use reth_stages::{ @@ -25,7 +26,7 @@ use reth_stages::{ ExecutionStageThresholds, Pipeline, StageSet, }; use reth_static_file::StaticFileProducer; -use std::{ops::RangeInclusive, sync::Arc}; +use std::sync::Arc; use tokio::sync::watch; use tracing::info; @@ -52,16 +53,13 @@ impl> Command pub async fn execute>(self) -> eyre::Result<()> { let Environment { provider_factory, config, .. } = self.env.init::(AccessRights::RW)?; - let range = self.command.unwind_range(provider_factory.clone())?; - if *range.start() == 0 { - eyre::bail!("Cannot unwind genesis block") - } + let target = self.command.unwind_target(provider_factory.clone())?; let highest_static_file_block = provider_factory .static_file_provider() .get_highest_static_files() .max() - .filter(|highest_static_file_block| highest_static_file_block >= range.start()); + .filter(|highest_static_file_block| *highest_static_file_block > target); // Execute a pipeline unwind if the start of the range overlaps the existing static // files. If that's the case, then copy all available data from MDBX to static files, and @@ -75,9 +73,9 @@ impl> Command } if let Some(highest_static_file_block) = highest_static_file_block { - info!(target: "reth::cli", ?range, ?highest_static_file_block, "Executing a pipeline unwind."); + info!(target: "reth::cli", ?target, ?highest_static_file_block, "Executing a pipeline unwind."); } else { - info!(target: "reth::cli", ?range, "Executing a pipeline unwind."); + info!(target: "reth::cli", ?target, "Executing a pipeline unwind."); } // This will build an offline-only pipeline if the `offline` flag is enabled @@ -86,29 +84,25 @@ impl> Command // Move all applicable data from database to static files. pipeline.move_to_static_files()?; - pipeline.unwind((*range.start()).saturating_sub(1), None)?; + pipeline.unwind(target, None)?; } else { - info!(target: "reth::cli", ?range, "Executing a database unwind."); + info!(target: "reth::cli", ?target, "Executing a database unwind."); let provider = provider_factory.provider_rw()?; - let _ = provider - .take_block_and_execution_range(range.clone()) + provider + .remove_block_and_execution_above(target, StorageLocation::Both) .map_err(|err| eyre::eyre!("Transaction error on unwind: {err}"))?; // update finalized block if needed let last_saved_finalized_block_number = provider.last_finalized_block_number()?; - let range_min = - range.clone().min().ok_or(eyre::eyre!("Could not fetch lower range end"))?; - if last_saved_finalized_block_number.is_none() || - Some(range_min) < last_saved_finalized_block_number - { - provider.save_finalized_block_number(BlockNumber::from(range_min))?; + if last_saved_finalized_block_number.is_none_or(|f| f > target) { + provider.save_finalized_block_number(target)?; } provider.commit()?; } - info!(target: "reth::cli", range=?range.clone(), count=range.count(), "Unwound blocks"); + info!(target: "reth::cli", ?target, "Unwound blocks"); Ok(()) } @@ -183,13 +177,11 @@ enum Subcommands { } impl Subcommands { - /// Returns the block range to unwind. - /// - /// This returns an inclusive range: [target..=latest] - fn unwind_range>>( + /// Returns the block to unwind to. The returned block will stay in database. + fn unwind_target>>( &self, factory: ProviderFactory, - ) -> eyre::Result> { + ) -> eyre::Result { let provider = factory.provider()?; let last = provider.last_block_number()?; let target = match self { @@ -200,11 +192,11 @@ impl Subcommands { BlockHashOrNumber::Number(num) => *num, }, Self::NumBlocks { amount } => last.saturating_sub(*amount), - } + 1; + }; if target > last { eyre::bail!("Target block number is higher than the latest block number") } - Ok(target..=last) + Ok(target) } } diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index 46841a2a5b99..82b2ce2ebc2b 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -62,6 +62,14 @@ impl> BlockBodyWriter for ) -> ProviderResult<()> { self.0.write_block_bodies(provider, bodies) } + + fn remove_block_bodies_above( + &self, + provider: &Provider, + block: alloy_primitives::BlockNumber, + ) -> ProviderResult<()> { + self.0.remove_block_bodies_above(provider, block) + } } impl ChainStorage for OpStorage { diff --git a/crates/stages/stages/src/stages/bodies.rs b/crates/stages/stages/src/stages/bodies.rs index b6eab349e161..e541b9081040 100644 --- a/crates/stages/stages/src/stages/bodies.rs +++ b/crates/stages/stages/src/stages/bodies.rs @@ -10,10 +10,7 @@ use tracing::*; use alloy_primitives::TxNumber; use reth_db::{tables, transaction::DbTx}; -use reth_db_api::{ - cursor::{DbCursorRO, DbCursorRW}, - transaction::DbTxMut, -}; +use reth_db_api::{cursor::DbCursorRO, transaction::DbTxMut}; use reth_network_p2p::bodies::{downloader::BodyDownloader, response::BlockResponse}; use reth_primitives::StaticFileSegment; use reth_provider::{ @@ -70,6 +67,82 @@ impl BodyStage { pub const fn new(downloader: D) -> Self { Self { downloader, buffer: None } } + + /// Ensures that static files and database are in sync. + fn ensure_consistency( + &self, + provider: &Provider, + unwind_block: Option, + ) -> Result<(), StageError> + where + Provider: DBProvider + BlockReader + StaticFileProviderFactory, + { + // Get id for the next tx_num of zero if there are no transactions. + let next_tx_num = provider + .tx_ref() + .cursor_read::()? + .last()? + .map(|(id, _)| id + 1) + .unwrap_or_default(); + + let static_file_provider = provider.static_file_provider(); + + // Make sure Transactions static file is at the same height. If it's further, this + // input execution was interrupted previously and we need to unwind the static file. + let next_static_file_tx_num = static_file_provider + .get_highest_static_file_tx(StaticFileSegment::Transactions) + .map(|id| id + 1) + .unwrap_or_default(); + + match next_static_file_tx_num.cmp(&next_tx_num) { + // If static files are ahead, we are currently unwinding the stage or we didn't reach + // the database commit in a previous stage run. So, our only solution is to unwind the + // static files and proceed from the database expected height. + Ordering::Greater => { + let highest_db_block = + provider.tx_ref().entries::()? as u64; + let mut static_file_producer = + static_file_provider.latest_writer(StaticFileSegment::Transactions)?; + static_file_producer + .prune_transactions(next_static_file_tx_num - next_tx_num, highest_db_block)?; + // Since this is a database <-> static file inconsistency, we commit the change + // straight away. + static_file_producer.commit()?; + } + // If static files are behind, then there was some corruption or loss of files. This + // error will trigger an unwind, that will bring the database to the same height as the + // static files. + Ordering::Less => { + // If we are already in the process of unwind, this might be fine because we will + // fix the inconsistency right away. + if let Some(unwind_to) = unwind_block { + let next_tx_num_after_unwind = provider + .tx_ref() + .get::(unwind_to)? + .map(|b| b.next_tx_num()) + .ok_or(ProviderError::BlockBodyIndicesNotFound(unwind_to))?; + + // This means we need a deeper unwind. + if next_tx_num_after_unwind > next_static_file_tx_num { + return Err(missing_static_data_error( + next_static_file_tx_num.saturating_sub(1), + &static_file_provider, + provider, + )?) + } + } else { + return Err(missing_static_data_error( + next_static_file_tx_num.saturating_sub(1), + &static_file_provider, + provider, + )?) + } + } + Ordering::Equal => {} + } + + Ok(()) + } } impl Stage for BodyStage @@ -122,50 +195,9 @@ where } let (from_block, to_block) = input.next_block_range().into_inner(); - // Get id for the next tx_num of zero if there are no transactions. - let next_tx_num = provider - .tx_ref() - .cursor_read::()? - .last()? - .map(|(id, _)| id + 1) - .unwrap_or_default(); - - let static_file_provider = provider.static_file_provider(); - - // Make sure Transactions static file is at the same height. If it's further, this - // input execution was interrupted previously and we need to unwind the static file. - let next_static_file_tx_num = static_file_provider - .get_highest_static_file_tx(StaticFileSegment::Transactions) - .map(|id| id + 1) - .unwrap_or_default(); - - match next_static_file_tx_num.cmp(&next_tx_num) { - // If static files are ahead, then we didn't reach the database commit in a previous - // stage run. So, our only solution is to unwind the static files and proceed from the - // database expected height. - Ordering::Greater => { - let mut static_file_producer = - static_file_provider.get_writer(from_block, StaticFileSegment::Transactions)?; - static_file_producer - .prune_transactions(next_static_file_tx_num - next_tx_num, from_block - 1)?; - // Since this is a database <-> static file inconsistency, we commit the change - // straight away. - static_file_producer.commit()?; - } - // If static files are behind, then there was some corruption or loss of files. This - // error will trigger an unwind, that will bring the database to the same height as the - // static files. - Ordering::Less => { - return Err(missing_static_data_error( - next_static_file_tx_num.saturating_sub(1), - &static_file_provider, - provider, - )?) - } - Ordering::Equal => {} - } + self.ensure_consistency(provider, None)?; - debug!(target: "sync::stages::bodies", stage_progress = from_block, target = to_block, start_tx_id = next_tx_num, "Commencing sync"); + debug!(target: "sync::stages::bodies", stage_progress = from_block, target = to_block, "Commencing sync"); let buffer = self.buffer.take().ok_or(StageError::MissingDownloadBuffer)?; trace!(target: "sync::stages::bodies", bodies_len = buffer.len(), "Writing blocks"); @@ -200,66 +232,8 @@ where ) -> Result { self.buffer.take(); - let static_file_provider = provider.static_file_provider(); - let tx = provider.tx_ref(); - // Cursors to unwind bodies, ommers - let mut body_cursor = tx.cursor_write::()?; - let mut ommers_cursor = tx.cursor_write::()?; - let mut withdrawals_cursor = tx.cursor_write::()?; - // Cursors to unwind transitions - let mut tx_block_cursor = tx.cursor_write::()?; - - let mut rev_walker = body_cursor.walk_back(None)?; - while let Some((number, block_meta)) = rev_walker.next().transpose()? { - if number <= input.unwind_to { - break - } - - // Delete the ommers entry if any - if ommers_cursor.seek_exact(number)?.is_some() { - ommers_cursor.delete_current()?; - } - - // Delete the withdrawals entry if any - if withdrawals_cursor.seek_exact(number)?.is_some() { - withdrawals_cursor.delete_current()?; - } - - // Delete all transaction to block values. - if !block_meta.is_empty() && - tx_block_cursor.seek_exact(block_meta.last_tx_num())?.is_some() - { - tx_block_cursor.delete_current()?; - } - - // Delete the current body value - rev_walker.delete_current()?; - } - - let mut static_file_producer = - static_file_provider.latest_writer(StaticFileSegment::Transactions)?; - - // Unwind from static files. Get the current last expected transaction from DB, and match it - // on static file - let db_tx_num = - body_cursor.last()?.map(|(_, block_meta)| block_meta.last_tx_num()).unwrap_or_default(); - let static_file_tx_num: u64 = static_file_provider - .get_highest_static_file_tx(StaticFileSegment::Transactions) - .unwrap_or_default(); - - // If there are more transactions on database, then we are missing static file data and we - // need to unwind further. - if db_tx_num > static_file_tx_num { - return Err(missing_static_data_error( - static_file_tx_num, - &static_file_provider, - provider, - )?) - } - - // Unwinds static file - static_file_producer - .prune_transactions(static_file_tx_num.saturating_sub(db_tx_num), input.unwind_to)?; + self.ensure_consistency(provider, Some(input.unwind_to))?; + provider.remove_bodies_above(input.unwind_to, StorageLocation::Both)?; Ok(UnwindOutput { checkpoint: StageCheckpoint::new(input.unwind_to) @@ -268,6 +242,8 @@ where } } +/// Called when database is ahead of static files. Attempts to find the first block we are missing +/// transactions for. fn missing_static_data_error( last_tx_num: TxNumber, static_file_provider: &StaticFileProvider, diff --git a/crates/storage/provider/src/providers/database/mod.rs b/crates/storage/provider/src/providers/database/mod.rs index cc50aa351457..354eb10c1036 100644 --- a/crates/storage/provider/src/providers/database/mod.rs +++ b/crates/storage/provider/src/providers/database/mod.rs @@ -781,21 +781,6 @@ mod tests { let db_senders = provider.senders_by_tx_range(range); assert_eq!(db_senders, Ok(vec![])); - - let result = provider.take_block_transaction_range(0..=0); - assert_eq!( - result, - Ok(vec![( - 0, - block - .body - .transactions - .iter() - .cloned() - .map(|tx| tx.into_ecrecovered().unwrap()) - .collect() - )]) - ) } } diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index 92cc8df2f5c2..8c390b06c08f 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -26,7 +26,7 @@ use alloy_eips::{ BlockHashOrNumber, }; use alloy_primitives::{keccak256, Address, BlockHash, BlockNumber, TxHash, TxNumber, B256, U256}; -use itertools::{izip, Itertools}; +use itertools::Itertools; use rayon::slice::ParallelSliceMut; use reth_chainspec::{ChainInfo, ChainSpecProvider, EthChainSpec, EthereumHardforks}; use reth_db::{ @@ -41,7 +41,7 @@ use reth_db_api::{ }, table::Table, transaction::{DbTx, DbTxMut}, - DatabaseError, DbTxUnwindExt, + DatabaseError, }; use reth_evm::ConfigureEvmEnv; use reth_execution_types::{Chain, ExecutionOutcome}; @@ -50,7 +50,7 @@ use reth_node_types::NodeTypes; use reth_primitives::{ Account, Block, BlockBody, BlockWithSenders, Bytecode, GotExpected, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, StaticFileSegment, StorageEntry, TransactionMeta, - TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, + TransactionSigned, TransactionSignedNoHash, }; use reth_primitives_traits::{BlockBody as _, FullNodePrimitives, SignedTransaction}; use reth_prune_types::{PruneCheckpoint, PruneModes, PruneSegment}; @@ -75,7 +75,7 @@ use std::{ sync::{mpsc, Arc}, }; use tokio::sync::watch; -use tracing::{debug, error, trace}; +use tracing::{debug, trace}; /// A [`DatabaseProvider`] that holds a read-only database transaction. pub type DatabaseProviderRO = DatabaseProvider<::TX, N>; @@ -881,276 +881,6 @@ impl DatabaseProvider { Ok(self.tx.commit()?) } - /// Remove requested block transactions, without returning them. - /// - /// This will remove block data for the given range from the following tables: - /// * [`BlockBodyIndices`](tables::BlockBodyIndices) - /// * [`Transactions`](tables::Transactions) - /// * [`TransactionSenders`](tables::TransactionSenders) - /// * [`TransactionHashNumbers`](tables::TransactionHashNumbers) - /// * [`TransactionBlocks`](tables::TransactionBlocks) - pub fn remove_block_transaction_range( - &self, - range: impl RangeBounds + Clone, - ) -> ProviderResult<()> { - // Raad range of block bodies to get all transactions id's of this range. - let block_bodies = self.take::(range)?; - - if block_bodies.is_empty() { - return Ok(()) - } - - // Compute the first and last tx ID in the range - let first_transaction = block_bodies.first().expect("If we have headers").1.first_tx_num(); - let last_transaction = block_bodies.last().expect("Not empty").1.last_tx_num(); - - // If this is the case then all of the blocks in the range are empty - if last_transaction < first_transaction { - return Ok(()) - } - - // Get transactions so we can then remove - let transactions = self - .take::(first_transaction..=last_transaction)? - .into_iter() - .map(|(id, tx)| (id, tx.into())) - .collect::>(); - - // remove senders - self.remove::(first_transaction..=last_transaction)?; - - // Remove TransactionHashNumbers - let mut tx_hash_cursor = self.tx.cursor_write::()?; - for (_, tx) in &transactions { - if tx_hash_cursor.seek_exact(tx.hash())?.is_some() { - tx_hash_cursor.delete_current()?; - } - } - - // Remove TransactionBlocks index if there are transaction present - if !transactions.is_empty() { - let tx_id_range = transactions.first().unwrap().0..=transactions.last().unwrap().0; - self.remove::(tx_id_range)?; - } - - Ok(()) - } - - /// Get requested blocks transaction with senders, also removing them from the database - /// - /// This will remove block data for the given range from the following tables: - /// * [`BlockBodyIndices`](tables::BlockBodyIndices) - /// * [`Transactions`](tables::Transactions) - /// * [`TransactionSenders`](tables::TransactionSenders) - /// * [`TransactionHashNumbers`](tables::TransactionHashNumbers) - /// * [`TransactionBlocks`](tables::TransactionBlocks) - pub fn take_block_transaction_range( - &self, - range: impl RangeBounds + Clone, - ) -> ProviderResult)>> { - // Raad range of block bodies to get all transactions id's of this range. - let block_bodies = self.get::(range)?; - - if block_bodies.is_empty() { - return Ok(Vec::new()) - } - - // Compute the first and last tx ID in the range - let first_transaction = block_bodies.first().expect("If we have headers").1.first_tx_num(); - let last_transaction = block_bodies.last().expect("Not empty").1.last_tx_num(); - - // If this is the case then all of the blocks in the range are empty - if last_transaction < first_transaction { - return Ok(block_bodies.into_iter().map(|(n, _)| (n, Vec::new())).collect()) - } - - // Get transactions and senders - let transactions = self - .take::(first_transaction..=last_transaction)? - .into_iter() - .map(|(id, tx)| (id, tx.into())) - .collect::>(); - - let mut senders = - self.take::(first_transaction..=last_transaction)?; - - recover_block_senders(&mut senders, &transactions, first_transaction, last_transaction)?; - - // Remove TransactionHashNumbers - let mut tx_hash_cursor = self.tx.cursor_write::()?; - for (_, tx) in &transactions { - if tx_hash_cursor.seek_exact(tx.hash())?.is_some() { - tx_hash_cursor.delete_current()?; - } - } - - // Remove TransactionBlocks index if there are transaction present - if !transactions.is_empty() { - let tx_id_range = transactions.first().unwrap().0..=transactions.last().unwrap().0; - self.remove::(tx_id_range)?; - } - - // Merge transaction into blocks - let mut block_tx = Vec::with_capacity(block_bodies.len()); - let mut senders = senders.into_iter(); - let mut transactions = transactions.into_iter(); - for (block_number, block_body) in block_bodies { - let mut one_block_tx = Vec::with_capacity(block_body.tx_count as usize); - for _ in block_body.tx_num_range() { - let tx = transactions.next(); - let sender = senders.next(); - - let recovered = match (tx, sender) { - (Some((tx_id, tx)), Some((sender_tx_id, sender))) => { - if tx_id == sender_tx_id { - Ok(TransactionSignedEcRecovered::from_signed_transaction(tx, sender)) - } else { - Err(ProviderError::MismatchOfTransactionAndSenderId { tx_id }) - } - } - (Some((tx_id, _)), _) | (_, Some((tx_id, _))) => { - Err(ProviderError::MismatchOfTransactionAndSenderId { tx_id }) - } - (None, None) => Err(ProviderError::BlockBodyTransactionCount), - }?; - one_block_tx.push(recovered) - } - block_tx.push((block_number, one_block_tx)); - } - - Ok(block_tx) - } - - /// Remove the given range of blocks, without returning any of the blocks. - /// - /// This will remove block data for the given range from the following tables: - /// * [`HeaderNumbers`](tables::HeaderNumbers) - /// * [`CanonicalHeaders`](tables::CanonicalHeaders) - /// * [`BlockOmmers`](tables::BlockOmmers) - /// * [`BlockWithdrawals`](tables::BlockWithdrawals) - /// * [`HeaderTerminalDifficulties`](tables::HeaderTerminalDifficulties) - /// - /// This will also remove transaction data according to - /// [`remove_block_transaction_range`](Self::remove_block_transaction_range). - pub fn remove_block_range( - &self, - range: impl RangeBounds + Clone, - ) -> ProviderResult<()> { - let block_headers = self.remove::(range.clone())?; - if block_headers == 0 { - return Ok(()) - } - - self.tx.unwind_table_by_walker::( - range.clone(), - )?; - self.remove::(range.clone())?; - self.remove::(range.clone())?; - self.remove::(range.clone())?; - self.remove_block_transaction_range(range.clone())?; - self.remove::(range)?; - - Ok(()) - } - - /// Remove the given range of blocks, and return them. - /// - /// This will remove block data for the given range from the following tables: - /// * [`HeaderNumbers`](tables::HeaderNumbers) - /// * [`CanonicalHeaders`](tables::CanonicalHeaders) - /// * [`BlockOmmers`](tables::BlockOmmers) - /// * [`BlockWithdrawals`](tables::BlockWithdrawals) - /// * [`HeaderTerminalDifficulties`](tables::HeaderTerminalDifficulties) - /// - /// This will also remove transaction data according to - /// [`take_block_transaction_range`](Self::take_block_transaction_range). - pub fn take_block_range( - &self, - range: impl RangeBounds + Clone, - ) -> ProviderResult> - where - N::ChainSpec: EthereumHardforks, - { - // For blocks we need: - // - // - Headers - // - Bodies (transactions) - // - Uncles/ommers - // - Withdrawals - // - Signers - - let block_headers = self.take::(range.clone())?; - if block_headers.is_empty() { - return Ok(Vec::new()) - } - - self.tx.unwind_table_by_walker::( - range.clone(), - )?; - let block_header_hashes = self.take::(range.clone())?; - let block_ommers = self.take::(range.clone())?; - let block_withdrawals = self.take::(range.clone())?; - let block_tx = self.take_block_transaction_range(range.clone())?; - - let mut blocks = Vec::with_capacity(block_headers.len()); - - // rm HeaderTerminalDifficulties - self.remove::(range)?; - - // merge all into block - let block_header_iter = block_headers.into_iter(); - let block_header_hashes_iter = block_header_hashes.into_iter(); - let block_tx_iter = block_tx.into_iter(); - - // Ommers can be empty for some blocks - let mut block_ommers_iter = block_ommers.into_iter(); - let mut block_withdrawals_iter = block_withdrawals.into_iter(); - let mut block_ommers = block_ommers_iter.next(); - let mut block_withdrawals = block_withdrawals_iter.next(); - - for ((main_block_number, header), (_, header_hash), (_, tx)) in - izip!(block_header_iter, block_header_hashes_iter, block_tx_iter) - { - let header = SealedHeader::new(header, header_hash); - - let (transactions, senders) = tx.into_iter().map(|tx| tx.to_components()).unzip(); - - // Ommers can be missing - let mut ommers = Vec::new(); - if let Some((block_number, _)) = block_ommers.as_ref() { - if *block_number == main_block_number { - ommers = block_ommers.take().unwrap().1.ommers; - block_ommers = block_ommers_iter.next(); - } - }; - - // withdrawal can be missing - let shanghai_is_active = - self.chain_spec.is_shanghai_active_at_timestamp(header.timestamp); - let mut withdrawals = Some(Withdrawals::default()); - if shanghai_is_active { - if let Some((block_number, _)) = block_withdrawals.as_ref() { - if *block_number == main_block_number { - withdrawals = Some(block_withdrawals.take().unwrap().1.withdrawals); - block_withdrawals = block_withdrawals_iter.next(); - } - } - } else { - withdrawals = None - } - - blocks.push(SealedBlockWithSenders { - block: SealedBlock { - header, - body: BlockBody { transactions, ommers, withdrawals }, - }, - senders, - }) - } - - Ok(blocks) - } - /// Load shard and remove it. If list is empty, last shard was full or /// there are no shards at all. fn take_shard(&self, key: T::Key) -> ProviderResult> @@ -2998,52 +2728,48 @@ impl StateReader for DatabaseProvider { impl BlockExecutionWriter for DatabaseProvider { - fn take_block_and_execution_range( + fn take_block_and_execution_above( &self, - range: RangeInclusive, + block: BlockNumber, + remove_transactions_from: StorageLocation, ) -> ProviderResult { - self.unwind_trie_state_range(range.clone())?; + let range = block + 1..=self.last_block_number()?; - // get blocks - let blocks = self.take_block_range(range.clone())?; - let unwind_to = blocks.first().map(|b| b.number.saturating_sub(1)); + self.unwind_trie_state_range(range.clone())?; // get execution res let execution_state = self.take_state(range.clone())?; + let blocks = self.sealed_block_with_senders_range(range)?; + // remove block bodies it is needed for both get block range and get block execution results // that is why it is deleted afterwards. - self.remove::(range)?; + self.remove_blocks_above(block, remove_transactions_from)?; // Update pipeline progress - if let Some(fork_number) = unwind_to { - self.update_pipeline_stages(fork_number, true)?; - } + self.update_pipeline_stages(block, true)?; Ok(Chain::new(blocks, execution_state, None)) } - fn remove_block_and_execution_range( + fn remove_block_and_execution_above( &self, - range: RangeInclusive, + block: BlockNumber, + remove_transactions_from: StorageLocation, ) -> ProviderResult<()> { - self.unwind_trie_state_range(range.clone())?; + let range = block + 1..=self.last_block_number()?; - // get blocks - let blocks = self.take_block_range(range.clone())?; - let unwind_to = blocks.first().map(|b| b.number.saturating_sub(1)); + self.unwind_trie_state_range(range.clone())?; // remove execution res - self.remove_state(range.clone())?; + self.remove_state(range)?; // remove block bodies it is needed for both get block range and get block execution results // that is why it is deleted afterwards. - self.remove::(range)?; + self.remove_blocks_above(block, remove_transactions_from)?; // Update pipeline progress - if let Some(block_number) = unwind_to { - self.update_pipeline_stages(block_number, true)?; - } + self.update_pipeline_stages(block, true)?; Ok(()) } @@ -3230,6 +2956,92 @@ impl BlockWriter Ok(()) } + fn remove_blocks_above( + &self, + block: BlockNumber, + remove_transactions_from: StorageLocation, + ) -> ProviderResult<()> { + let mut canonical_headers_cursor = self.tx.cursor_write::()?; + let mut rev_headers = canonical_headers_cursor.walk_back(None)?; + + while let Some(Ok((number, hash))) = rev_headers.next() { + if number <= block { + break + } + self.tx.delete::(hash, None)?; + rev_headers.delete_current()?; + } + self.remove::(block + 1..)?; + self.remove::(block + 1..)?; + + // First transaction to be removed + let unwind_tx_from = self + .tx + .get::(block)? + .map(|b| b.next_tx_num()) + .ok_or(ProviderError::BlockBodyIndicesNotFound(block))?; + + // Last transaction to be removed + let unwind_tx_to = self + .tx + .cursor_read::()? + .last()? + // shouldn't happen because this was OK above + .ok_or(ProviderError::BlockBodyIndicesNotFound(block))? + .1 + .last_tx_num(); + + if unwind_tx_from < unwind_tx_to { + for (hash, _) in self.transaction_hashes_by_range(unwind_tx_from..(unwind_tx_to + 1))? { + self.tx.delete::(hash, None)?; + } + } + + self.remove::(unwind_tx_from..)?; + + self.remove_bodies_above(block, remove_transactions_from)?; + + Ok(()) + } + + fn remove_bodies_above( + &self, + block: BlockNumber, + remove_transactions_from: StorageLocation, + ) -> ProviderResult<()> { + self.storage.writer().remove_block_bodies_above(self, block)?; + + // First transaction to be removed + let unwind_tx_from = self + .tx + .get::(block)? + .map(|b| b.next_tx_num()) + .ok_or(ProviderError::BlockBodyIndicesNotFound(block))?; + + self.remove::(block + 1..)?; + self.remove::(unwind_tx_from..)?; + + if remove_transactions_from.database() { + self.remove::(unwind_tx_from..)?; + } + + if remove_transactions_from.static_files() { + let static_file_tx_num = self + .static_file_provider + .get_highest_static_file_tx(StaticFileSegment::Transactions); + + if let Some(static_tx) = static_file_tx_num { + if static_tx >= unwind_tx_from { + self.static_file_provider + .latest_writer(StaticFileSegment::Transactions)? + .prune_transactions(static_tx - unwind_tx_from + 1, block)?; + } + } + } + + Ok(()) + } + /// TODO(joshie): this fn should be moved to `UnifiedStorageWriter` eventually fn append_blocks_with_state( &self, @@ -3381,79 +3193,3 @@ impl DBProvider for DatabaseProvider self.prune_modes_ref() } } - -/// Helper method to recover senders for any blocks in the db which do not have senders. This -/// compares the length of the input senders [`Vec`], with the length of given transactions [`Vec`], -/// and will add to the input senders vec if there are more transactions. -/// -/// NOTE: This will modify the input senders list, which is why a mutable reference is required. -fn recover_block_senders( - senders: &mut Vec<(u64, Address)>, - transactions: &[(u64, TransactionSigned)], - first_transaction: u64, - last_transaction: u64, -) -> ProviderResult<()> { - // Recover senders manually if not found in db - // NOTE: Transactions are always guaranteed to be in the database whereas - // senders might be pruned. - if senders.len() != transactions.len() { - if senders.len() > transactions.len() { - error!(target: "providers::db", senders=%senders.len(), transactions=%transactions.len(), - first_tx=%first_transaction, last_tx=%last_transaction, - "unexpected senders and transactions mismatch"); - } - let missing = transactions.len().saturating_sub(senders.len()); - senders.reserve(missing); - // Find all missing senders, their corresponding tx numbers and indexes to the original - // `senders` vector at which the recovered senders will be inserted. - let mut missing_senders = Vec::with_capacity(missing); - { - let mut senders = senders.iter().peekable(); - - // `transactions` contain all entries. `senders` contain _some_ of the senders for - // these transactions. Both are sorted and indexed by `TxNumber`. - // - // The general idea is to iterate on both `transactions` and `senders`, and advance - // the `senders` iteration only if it matches the current `transactions` entry's - // `TxNumber`. Otherwise, add the transaction to the list of missing senders. - for (i, (tx_number, transaction)) in transactions.iter().enumerate() { - if let Some((sender_tx_number, _)) = senders.peek() { - if sender_tx_number == tx_number { - // If current sender's `TxNumber` matches current transaction's - // `TxNumber`, advance the senders iterator. - senders.next(); - } else { - // If current sender's `TxNumber` doesn't match current transaction's - // `TxNumber`, add it to missing senders. - missing_senders.push((i, tx_number, transaction)); - } - } else { - // If there's no more senders left, but we're still iterating over - // transactions, add them to missing senders - missing_senders.push((i, tx_number, transaction)); - } - } - } - - // Recover senders - let recovered_senders = TransactionSigned::recover_signers( - missing_senders.iter().map(|(_, _, tx)| *tx).collect::>(), - missing_senders.len(), - ) - .ok_or(ProviderError::SenderRecoveryError)?; - - // Insert recovered senders along with tx numbers at the corresponding indexes to the - // original `senders` vector - for ((i, tx_number, _), sender) in missing_senders.into_iter().zip(recovered_senders) { - // Insert will put recovered senders at necessary positions and shift the rest - senders.insert(i, (*tx_number, sender)); - } - - // Debug assertions which are triggered during the test to ensure that all senders are - // present and sorted - debug_assert_eq!(senders.len(), transactions.len(), "missing one or more senders"); - debug_assert!(senders.iter().tuple_windows().all(|(a, b)| a.0 < b.0), "senders not sorted"); - } - - Ok(()) -} diff --git a/crates/storage/provider/src/traits/block.rs b/crates/storage/provider/src/traits/block.rs index c84534e7a5da..c2ce477051d4 100644 --- a/crates/storage/provider/src/traits/block.rs +++ b/crates/storage/provider/src/traits/block.rs @@ -5,7 +5,6 @@ use reth_execution_types::{Chain, ExecutionOutcome}; use reth_primitives::SealedBlockWithSenders; use reth_storage_errors::provider::ProviderResult; use reth_trie::{updates::TrieUpdates, HashedPostStateSorted}; -use std::ops::RangeInclusive; /// An enum that represents the storage location for a piece of data. #[derive(Debug, Copy, Clone, PartialEq, Eq)] @@ -33,16 +32,22 @@ impl StorageLocation { /// BlockExecution Writer #[auto_impl::auto_impl(&, Arc, Box)] pub trait BlockExecutionWriter: BlockWriter + Send + Sync { - /// Take range of blocks and its execution result - fn take_block_and_execution_range( + /// Take all of the blocks above the provided number and their execution result + /// + /// The passed block number will stay in the database. + fn take_block_and_execution_above( &self, - range: RangeInclusive, + block: BlockNumber, + remove_transactions_from: StorageLocation, ) -> ProviderResult; - /// Remove range of blocks and its execution result - fn remove_block_and_execution_range( + /// Remove all of the blocks above the provided number and their execution result + /// + /// The passed block number will stay in the database. + fn remove_block_and_execution_above( &self, - range: RangeInclusive, + block: BlockNumber, + remove_transactions_from: StorageLocation, ) -> ProviderResult<()>; } @@ -81,6 +86,22 @@ pub trait BlockWriter: Send + Sync { write_transactions_to: StorageLocation, ) -> ProviderResult<()>; + /// Removes all blocks above the given block number from the database. + /// + /// Note: This does not remove state or execution data. + fn remove_blocks_above( + &self, + block: BlockNumber, + remove_transactions_from: StorageLocation, + ) -> ProviderResult<()>; + + /// Removes all block bodies above the given block number from the database. + fn remove_bodies_above( + &self, + block: BlockNumber, + remove_transactions_from: StorageLocation, + ) -> ProviderResult<()>; + /// Appends a batch of sealed blocks to the blockchain, including sender information, and /// updates the post-state. /// diff --git a/crates/storage/provider/src/writer/mod.rs b/crates/storage/provider/src/writer/mod.rs index 3878cf2a9e33..30c5f0d5291d 100644 --- a/crates/storage/provider/src/writer/mod.rs +++ b/crates/storage/provider/src/writer/mod.rs @@ -273,9 +273,7 @@ where // IMPORTANT: we use `block_number+1` to make sure we remove only what is ABOVE the block debug!(target: "provider::storage_writer", ?block_number, "Removing blocks from database above block_number"); - self.database().remove_block_and_execution_range( - block_number + 1..=self.database().last_block_number()?, - )?; + self.database().remove_block_and_execution_above(block_number, StorageLocation::Both)?; // IMPORTANT: we use `highest_static_file_block.saturating_sub(block_number)` to make sure // we remove only what is ABOVE the block. @@ -287,10 +285,6 @@ where .get_writer(block_number, StaticFileSegment::Headers)? .prune_headers(highest_static_file_block.saturating_sub(block_number))?; - self.static_file() - .get_writer(block_number, StaticFileSegment::Transactions)? - .prune_transactions(total_txs, block_number)?; - if !self.database().prune_modes_ref().has_receipts_pruning() { self.static_file() .get_writer(block_number, StaticFileSegment::Receipts)? diff --git a/crates/storage/storage-api/src/chain.rs b/crates/storage/storage-api/src/chain.rs index 099f61f1bcb5..d5228bdddf72 100644 --- a/crates/storage/storage-api/src/chain.rs +++ b/crates/storage/storage-api/src/chain.rs @@ -5,6 +5,7 @@ use reth_db::{ models::{StoredBlockOmmers, StoredBlockWithdrawals}, tables, transaction::DbTxMut, + DbTxUnwindExt, }; use reth_primitives_traits::{Block, BlockBody, FullNodePrimitives}; use reth_storage_errors::provider::ProviderResult; @@ -21,6 +22,13 @@ pub trait BlockBodyWriter { provider: &Provider, bodies: Vec<(BlockNumber, Option)>, ) -> ProviderResult<()>; + + /// Removes all block bodies above the given block number from the database. + fn remove_block_bodies_above( + &self, + provider: &Provider, + block: BlockNumber, + ) -> ProviderResult<()>; } /// Trait that implements how chain-specific types are written to the storage. @@ -69,4 +77,15 @@ where Ok(()) } + + fn remove_block_bodies_above( + &self, + provider: &Provider, + block: BlockNumber, + ) -> ProviderResult<()> { + provider.tx_ref().unwind_table_by_num::(block)?; + provider.tx_ref().unwind_table_by_num::(block)?; + + Ok(()) + } } From 367478c6f1334876d1aed1c50319e9d714bb18e3 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 21 Nov 2024 21:21:28 +0100 Subject: [PATCH 098/156] chore: use hash ref (#12756) --- crates/net/network/src/transactions/mod.rs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/crates/net/network/src/transactions/mod.rs b/crates/net/network/src/transactions/mod.rs index a4eef2fa99c4..30c75f63e5dd 100644 --- a/crates/net/network/src/transactions/mod.rs +++ b/crates/net/network/src/transactions/mod.rs @@ -736,7 +736,7 @@ where // Iterate through the transactions to propagate and fill the hashes and full // transaction for tx in to_propagate { - if !peer.seen_transactions.contains(&tx.hash()) { + if !peer.seen_transactions.contains(tx.tx_hash()) { // Only include if the peer hasn't seen the transaction full_transactions.push(&tx); } @@ -815,7 +815,7 @@ where hashes.extend(to_propagate) } else { for tx in to_propagate { - if !peer.seen_transactions.contains(&tx.hash()) { + if !peer.seen_transactions.contains(tx.tx_hash()) { // Include if the peer hasn't seen it hashes.push(&tx); } @@ -885,7 +885,7 @@ where for tx in &to_propagate { // Only proceed if the transaction is not in the peer's list of seen // transactions - if !peer.seen_transactions.contains(&tx.hash()) { + if !peer.seen_transactions.contains(tx.tx_hash()) { builder.push(tx); } } @@ -1486,8 +1486,8 @@ impl PropagateTransaction { Self { size, transaction } } - fn hash(&self) -> TxHash { - *self.transaction.tx_hash() + fn tx_hash(&self) -> &TxHash { + self.transaction.tx_hash() } } @@ -1678,9 +1678,9 @@ impl PooledTransactionsHashesBuilder { fn push(&mut self, tx: &PropagateTransaction) { match self { - Self::Eth66(msg) => msg.0.push(tx.hash()), + Self::Eth66(msg) => msg.0.push(*tx.tx_hash()), Self::Eth68(msg) => { - msg.hashes.push(tx.hash()); + msg.hashes.push(*tx.tx_hash()); msg.sizes.push(tx.size); msg.types.push(tx.transaction.transaction().tx_type().into()); } From 1061e46816e3a91212d40c6469adb81ea6d8c86c Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 21 Nov 2024 21:44:50 +0100 Subject: [PATCH 099/156] chore: use new is_broadcastable_in_full (#12757) Co-authored-by: Dan Cline <6798349+Rjected@users.noreply.github.com> --- crates/net/network/src/transactions/mod.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/net/network/src/transactions/mod.rs b/crates/net/network/src/transactions/mod.rs index 30c75f63e5dd..20525325ec11 100644 --- a/crates/net/network/src/transactions/mod.rs +++ b/crates/net/network/src/transactions/mod.rs @@ -1607,6 +1607,7 @@ impl FullTransactionsBuilder { /// /// If the transaction is unsuitable for broadcast or would exceed the softlimit, it is appended /// to list of pooled transactions, (e.g. 4844 transactions). + /// See also [`TxType::is_broadcastable_in_full`]. fn push(&mut self, transaction: &PropagateTransaction) { // Do not send full 4844 transaction hashes to peers. // @@ -1616,7 +1617,7 @@ impl FullTransactionsBuilder { // via `GetPooledTransactions`. // // From: - if transaction.transaction.transaction().tx_type().is_eip4844() { + if !transaction.transaction.transaction().tx_type().is_broadcastable_in_full() { self.pooled.push(transaction); return } From 6f6fb005ab681f97a80a0d8ce8e8a364f363d114 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 21 Nov 2024 23:56:18 +0100 Subject: [PATCH 100/156] chore: remove feature gated import (#12761) --- crates/primitives/src/transaction/mod.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index d50aea14c46f..ea436a92cb52 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -1,7 +1,5 @@ //! Transaction types. -#[cfg(any(test, feature = "reth-codec"))] -use alloy_consensus::constants::{EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID}; use alloy_consensus::{ transaction::RlpEcdsaTx, SignableTransaction, Transaction as _, TxEip1559, TxEip2930, TxEip4844, TxEip7702, TxLegacy, @@ -606,11 +604,11 @@ impl reth_codecs::Compact for Transaction { // reading the full 8 bits (single byte) and match on this transaction type. let identifier = buf.get_u8(); match identifier { - EIP4844_TX_TYPE_ID => { + alloy_consensus::constants::EIP4844_TX_TYPE_ID => { let (tx, buf) = TxEip4844::from_compact(buf, buf.len()); (Self::Eip4844(tx), buf) } - EIP7702_TX_TYPE_ID => { + alloy_consensus::constants::EIP7702_TX_TYPE_ID => { let (tx, buf) = TxEip7702::from_compact(buf, buf.len()); (Self::Eip7702(tx), buf) } From 4442b5d6fa4b5e8f09b5467143e9429de9378e64 Mon Sep 17 00:00:00 2001 From: Steven <112043913+stevencartavia@users.noreply.github.com> Date: Thu, 21 Nov 2024 16:40:29 -0600 Subject: [PATCH 101/156] feat: convert hash field to `OnceLock` on `TransactionSigned` (#12596) Co-authored-by: joshieDo <93316087+joshieDo@users.noreply.github.com> --- .../src/commands/debug_cmd/build_block.rs | 2 +- crates/chain-state/src/notifications.rs | 20 ++++-- crates/engine/util/src/reorg.rs | 2 +- crates/ethereum/payload/src/lib.rs | 2 +- crates/evm/execution-types/src/chain.rs | 2 +- crates/net/network/src/transactions/mod.rs | 2 +- crates/net/network/tests/it/txgossip.rs | 2 +- crates/optimism/rpc/src/eth/block.rs | 2 +- crates/optimism/rpc/src/eth/transaction.rs | 3 +- crates/primitives/src/alloy_compat.rs | 2 +- crates/primitives/src/transaction/mod.rs | 61 ++++++++++++------- crates/primitives/src/transaction/pooled.rs | 34 ++++++----- crates/primitives/src/transaction/sidecar.rs | 7 ++- crates/primitives/src/transaction/variant.rs | 4 +- .../src/segments/user/transaction_lookup.rs | 2 +- crates/rpc/rpc-eth-api/src/helpers/call.rs | 2 +- crates/rpc/rpc-eth-api/src/helpers/trace.rs | 2 +- crates/rpc/rpc/src/debug.rs | 6 +- crates/rpc/rpc/src/eth/helpers/block.rs | 2 +- crates/rpc/rpc/src/eth/helpers/types.rs | 3 +- crates/stages/stages/src/stages/tx_lookup.rs | 2 +- .../provider/src/providers/database/mod.rs | 7 ++- .../src/providers/database/provider.rs | 6 +- .../storage/provider/src/test_utils/blocks.rs | 2 +- .../transaction-pool/src/blobstore/tracker.rs | 12 ++-- crates/transaction-pool/src/maintain.rs | 4 +- .../transaction-pool/src/test_utils/mock.rs | 2 +- docs/crates/network.md | 4 +- examples/db-access/src/main.rs | 13 ++-- 29 files changed, 127 insertions(+), 87 deletions(-) diff --git a/bin/reth/src/commands/debug_cmd/build_block.rs b/bin/reth/src/commands/debug_cmd/build_block.rs index adb2c83b1b2f..aa89b4112c3b 100644 --- a/bin/reth/src/commands/debug_cmd/build_block.rs +++ b/bin/reth/src/commands/debug_cmd/build_block.rs @@ -201,7 +201,7 @@ impl> Command { let encoded_length = pooled.encode_2718_len(); // insert the blob into the store - blob_store.insert(transaction.hash, sidecar)?; + blob_store.insert(transaction.hash(), sidecar)?; encoded_length } diff --git a/crates/chain-state/src/notifications.rs b/crates/chain-state/src/notifications.rs index 865f2bd65847..03d740d3d133 100644 --- a/crates/chain-state/src/notifications.rs +++ b/crates/chain-state/src/notifications.rs @@ -196,7 +196,7 @@ impl Stream for ForkChoiceStream { #[cfg(test)] mod tests { use super::*; - use alloy_primitives::B256; + use alloy_primitives::{b256, B256}; use reth_execution_types::ExecutionOutcome; use reth_primitives::{Receipt, Receipts, TransactionSigned, TxType}; @@ -332,7 +332,11 @@ mod tests { block_receipts[0].0, BlockReceipts { block: block1.num_hash(), - tx_receipts: vec![(B256::default(), receipt1)] + tx_receipts: vec![( + // Transaction hash of a Transaction::default() + b256!("20b5378c6fe992c118b557d2f8e8bbe0b7567f6fe5483a8f0f1c51e93a9d91ab"), + receipt1 + )] } ); @@ -403,7 +407,11 @@ mod tests { block_receipts[0].0, BlockReceipts { block: old_block1.num_hash(), - tx_receipts: vec![(B256::default(), old_receipt)] + tx_receipts: vec![( + // Transaction hash of a Transaction::default() + b256!("20b5378c6fe992c118b557d2f8e8bbe0b7567f6fe5483a8f0f1c51e93a9d91ab"), + old_receipt + )] } ); // Confirm this is from the reverted segment. @@ -415,7 +423,11 @@ mod tests { block_receipts[1].0, BlockReceipts { block: new_block1.num_hash(), - tx_receipts: vec![(B256::default(), new_receipt)] + tx_receipts: vec![( + // Transaction hash of a Transaction::default() + b256!("20b5378c6fe992c118b557d2f8e8bbe0b7567f6fe5483a8f0f1c51e93a9d91ab"), + new_receipt + )] } ); // Confirm this is from the committed segment. diff --git a/crates/engine/util/src/reorg.rs b/crates/engine/util/src/reorg.rs index ec69bbd00241..fd80fa9e1658 100644 --- a/crates/engine/util/src/reorg.rs +++ b/crates/engine/util/src/reorg.rs @@ -339,7 +339,7 @@ where // Treat error as fatal Err(error) => { return Err(RethError::Execution(BlockExecutionError::Validation( - BlockValidationError::EVM { hash: tx.hash, error: Box::new(error) }, + BlockValidationError::EVM { hash: tx.hash(), error: Box::new(error) }, ))) } }; diff --git a/crates/ethereum/payload/src/lib.rs b/crates/ethereum/payload/src/lib.rs index 80f6786c404f..ac6427caf362 100644 --- a/crates/ethereum/payload/src/lib.rs +++ b/crates/ethereum/payload/src/lib.rs @@ -399,7 +399,7 @@ where // grab the blob sidecars from the executed txs blob_sidecars = pool .get_all_blobs_exact( - executed_txs.iter().filter(|tx| tx.is_eip4844()).map(|tx| tx.hash).collect(), + executed_txs.iter().filter(|tx| tx.is_eip4844()).map(|tx| tx.hash()).collect(), ) .map_err(PayloadBuilderError::other)?; diff --git a/crates/evm/execution-types/src/chain.rs b/crates/evm/execution-types/src/chain.rs index 2c672884d60a..200a37423cfa 100644 --- a/crates/evm/execution-types/src/chain.rs +++ b/crates/evm/execution-types/src/chain.rs @@ -441,7 +441,7 @@ impl ChainBlocks<'_> { /// Returns an iterator over all transaction hashes in the block #[inline] pub fn transaction_hashes(&self) -> impl Iterator + '_ { - self.blocks.values().flat_map(|block| block.transactions().map(|tx| tx.hash)) + self.blocks.values().flat_map(|block| block.transactions().map(|tx| tx.hash())) } } diff --git a/crates/net/network/src/transactions/mod.rs b/crates/net/network/src/transactions/mod.rs index 20525325ec11..1c93ae549716 100644 --- a/crates/net/network/src/transactions/mod.rs +++ b/crates/net/network/src/transactions/mod.rs @@ -2178,7 +2178,7 @@ mod tests { .await; assert!(!pool.is_empty()); - assert!(pool.get(&signed_tx.hash).is_some()); + assert!(pool.get(signed_tx.hash_ref()).is_some()); handle.terminate().await; } diff --git a/crates/net/network/tests/it/txgossip.rs b/crates/net/network/tests/it/txgossip.rs index 2e2ee4a031a0..98624c4c6098 100644 --- a/crates/net/network/tests/it/txgossip.rs +++ b/crates/net/network/tests/it/txgossip.rs @@ -95,7 +95,7 @@ async fn test_4844_tx_gossip_penalization() { let peer0_reputation_after = peer1.peer_handle().peer_by_id(*peer0.peer_id()).await.unwrap().reputation(); assert_ne!(peer0_reputation_before, peer0_reputation_after); - assert_eq!(received, txs[1].transaction().hash); + assert_eq!(received, txs[1].transaction().hash()); // this will return an [`Empty`] error because blob txs are disallowed to be broadcasted assert!(peer1_tx_listener.try_recv().is_err()); diff --git a/crates/optimism/rpc/src/eth/block.rs b/crates/optimism/rpc/src/eth/block.rs index 6678fbe5df4f..22d26e824b3b 100644 --- a/crates/optimism/rpc/src/eth/block.rs +++ b/crates/optimism/rpc/src/eth/block.rs @@ -48,7 +48,7 @@ where .enumerate() .map(|(idx, (ref tx, receipt))| -> Result<_, _> { let meta = TransactionMeta { - tx_hash: tx.hash, + tx_hash: tx.hash(), index: idx as u64, block_hash, block_number, diff --git a/crates/optimism/rpc/src/eth/transaction.rs b/crates/optimism/rpc/src/eth/transaction.rs index 11e33817229f..dad151c41c40 100644 --- a/crates/optimism/rpc/src/eth/transaction.rs +++ b/crates/optimism/rpc/src/eth/transaction.rs @@ -84,7 +84,8 @@ where tx_info: TransactionInfo, ) -> Result { let from = tx.signer(); - let TransactionSigned { transaction, signature, hash } = tx.into_signed(); + let hash = tx.hash(); + let TransactionSigned { transaction, signature, .. } = tx.into_signed(); let mut deposit_receipt_version = None; let mut deposit_nonce = None; diff --git a/crates/primitives/src/alloy_compat.rs b/crates/primitives/src/alloy_compat.rs index 462b27f9c73c..a72c83996c01 100644 --- a/crates/primitives/src/alloy_compat.rs +++ b/crates/primitives/src/alloy_compat.rs @@ -156,7 +156,7 @@ impl TryFrom for TransactionSigned { _ => return Err(ConversionError::Custom("unknown transaction type".to_string())), }; - Ok(Self { transaction, signature, hash }) + Ok(Self { transaction, signature, hash: hash.into() }) } } diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index ea436a92cb52..5900abb42c9a 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -14,11 +14,14 @@ use alloy_primitives::{ keccak256, Address, Bytes, ChainId, PrimitiveSignature as Signature, TxHash, TxKind, B256, U256, }; use alloy_rlp::{Decodable, Encodable, Error as RlpError, Header}; -use core::mem; +use core::{ + hash::{Hash, Hasher}, + mem, +}; use derive_more::{AsRef, Deref}; use once_cell as _; #[cfg(not(feature = "std"))] -use once_cell::sync::Lazy as LazyLock; +use once_cell::sync::{Lazy as LazyLock, OnceCell as OnceLock}; #[cfg(feature = "optimism")] use op_alloy_consensus::DepositTransaction; use rayon::prelude::{IntoParallelIterator, ParallelIterator}; @@ -26,7 +29,7 @@ use reth_primitives_traits::InMemorySize; use serde::{Deserialize, Serialize}; use signature::decode_with_eip155_chain_id; #[cfg(feature = "std")] -use std::sync::LazyLock; +use std::sync::{LazyLock, OnceLock}; pub use error::{ InvalidTransactionError, TransactionConversionError, TryFromRecoveredTransactionError, @@ -1078,10 +1081,11 @@ impl From for TransactionSignedNoHash { /// Signed transaction. #[cfg_attr(any(test, feature = "reth-codec"), reth_codecs::add_arbitrary_tests(rlp))] -#[derive(Debug, Clone, PartialEq, Eq, Hash, AsRef, Deref, Serialize, Deserialize)] +#[derive(Debug, Clone, Eq, AsRef, Deref, Serialize, Deserialize)] pub struct TransactionSigned { /// Transaction hash - pub hash: TxHash, + #[serde(skip)] + pub hash: OnceLock, /// The transaction signature values pub signature: Signature, /// Raw transaction info @@ -1106,6 +1110,21 @@ impl AsRef for TransactionSigned { } } +impl Hash for TransactionSigned { + fn hash(&self, state: &mut H) { + self.signature.hash(state); + self.transaction.hash(state); + } +} + +impl PartialEq for TransactionSigned { + fn eq(&self, other: &Self) -> bool { + self.signature == other.signature && + self.transaction == other.transaction && + self.hash_ref() == other.hash_ref() + } +} + // === impl TransactionSigned === impl TransactionSigned { @@ -1120,13 +1139,13 @@ impl TransactionSigned { } /// Transaction hash. Used to identify transaction. - pub const fn hash(&self) -> TxHash { - self.hash + pub fn hash(&self) -> TxHash { + *self.hash_ref() } /// Reference to transaction hash. Used to identify transaction. - pub const fn hash_ref(&self) -> &TxHash { - &self.hash + pub fn hash_ref(&self) -> &TxHash { + self.hash.get_or_init(|| self.recalculate_hash()) } /// Recover signer from signature and hash. @@ -1259,9 +1278,7 @@ impl TransactionSigned { /// /// This will also calculate the transaction hash using its encoding. pub fn from_transaction_and_signature(transaction: Transaction, signature: Signature) -> Self { - let mut initial_tx = Self { transaction, hash: Default::default(), signature }; - initial_tx.hash = initial_tx.recalculate_hash(); - initial_tx + Self { transaction, signature, hash: Default::default() } } /// Decodes legacy transaction from the data buffer into a tuple. @@ -1321,7 +1338,8 @@ impl TransactionSigned { // so decoding methods do not need to manually advance the buffer pub fn decode_rlp_legacy_transaction(data: &mut &[u8]) -> alloy_rlp::Result { let (transaction, hash, signature) = Self::decode_rlp_legacy_transaction_tuple(data)?; - let signed = Self { transaction: Transaction::Legacy(transaction), hash, signature }; + let signed = + Self { transaction: Transaction::Legacy(transaction), hash: hash.into(), signature }; Ok(signed) } } @@ -1330,7 +1348,7 @@ impl SignedTransaction for TransactionSigned { type Transaction = Transaction; fn tx_hash(&self) -> &TxHash { - &self.hash + self.hash_ref() } fn transaction(&self) -> &Self::Transaction { @@ -1608,19 +1626,19 @@ impl Decodable2718 for TransactionSigned { TxType::Legacy => Err(Eip2718Error::UnexpectedType(0)), TxType::Eip2930 => { let (tx, signature, hash) = TxEip2930::rlp_decode_signed(buf)?.into_parts(); - Ok(Self { transaction: Transaction::Eip2930(tx), signature, hash }) + Ok(Self { transaction: Transaction::Eip2930(tx), signature, hash: hash.into() }) } TxType::Eip1559 => { let (tx, signature, hash) = TxEip1559::rlp_decode_signed(buf)?.into_parts(); - Ok(Self { transaction: Transaction::Eip1559(tx), signature, hash }) + Ok(Self { transaction: Transaction::Eip1559(tx), signature, hash: hash.into() }) } TxType::Eip7702 => { let (tx, signature, hash) = TxEip7702::rlp_decode_signed(buf)?.into_parts(); - Ok(Self { transaction: Transaction::Eip7702(tx), signature, hash }) + Ok(Self { transaction: Transaction::Eip7702(tx), signature, hash: hash.into() }) } TxType::Eip4844 => { let (tx, signature, hash) = TxEip4844::rlp_decode_signed(buf)?.into_parts(); - Ok(Self { transaction: Transaction::Eip4844(tx), signature, hash }) + Ok(Self { transaction: Transaction::Eip4844(tx), signature, hash: hash.into() }) } #[cfg(feature = "optimism")] TxType::Deposit => Ok(Self::from_transaction_and_signature( @@ -1661,7 +1679,6 @@ impl<'a> arbitrary::Arbitrary<'a> for TransactionSigned { #[cfg(feature = "optimism")] let signature = if transaction.is_deposit() { TxDeposit::signature() } else { signature }; - Ok(Self::from_transaction_and_signature(transaction, signature)) } } @@ -1900,7 +1917,7 @@ pub mod serde_bincode_compat { impl<'a> From<&'a super::TransactionSigned> for TransactionSigned<'a> { fn from(value: &'a super::TransactionSigned) -> Self { Self { - hash: value.hash, + hash: value.hash(), signature: value.signature, transaction: Transaction::from(&value.transaction), } @@ -1910,7 +1927,7 @@ pub mod serde_bincode_compat { impl<'a> From> for super::TransactionSigned { fn from(value: TransactionSigned<'a>) -> Self { Self { - hash: value.hash, + hash: value.hash.into(), signature: value.signature, transaction: value.transaction.into(), } @@ -2203,7 +2220,7 @@ mod tests { ) { let expected = TransactionSigned::from_transaction_and_signature(transaction, signature); if let Some(hash) = hash { - assert_eq!(hash, expected.hash); + assert_eq!(hash, expected.hash()); } assert_eq!(bytes.len(), expected.length()); diff --git a/crates/primitives/src/transaction/pooled.rs b/crates/primitives/src/transaction/pooled.rs index 86cd40a8fe6b..05ad4afa87f1 100644 --- a/crates/primitives/src/transaction/pooled.rs +++ b/crates/primitives/src/transaction/pooled.rs @@ -69,17 +69,18 @@ impl PooledTransactionsElement { /// [`PooledTransactionsElement`]. Since [`BlobTransaction`] is disallowed to be broadcasted on /// p2p, return an err if `tx` is [`Transaction::Eip4844`]. pub fn try_from_broadcast(tx: TransactionSigned) -> Result { + let hash = tx.hash(); match tx { - TransactionSigned { transaction: Transaction::Legacy(tx), signature, hash } => { + TransactionSigned { transaction: Transaction::Legacy(tx), signature, .. } => { Ok(Self::Legacy { transaction: tx, signature, hash }) } - TransactionSigned { transaction: Transaction::Eip2930(tx), signature, hash } => { + TransactionSigned { transaction: Transaction::Eip2930(tx), signature, .. } => { Ok(Self::Eip2930 { transaction: tx, signature, hash }) } - TransactionSigned { transaction: Transaction::Eip1559(tx), signature, hash } => { + TransactionSigned { transaction: Transaction::Eip1559(tx), signature, .. } => { Ok(Self::Eip1559 { transaction: tx, signature, hash }) } - TransactionSigned { transaction: Transaction::Eip7702(tx), signature, hash } => { + TransactionSigned { transaction: Transaction::Eip7702(tx), signature, .. } => { Ok(Self::Eip7702 { transaction: tx, signature, hash }) } // Not supported because missing blob sidecar @@ -99,9 +100,10 @@ impl PooledTransactionsElement { tx: TransactionSigned, sidecar: BlobTransactionSidecar, ) -> Result { + let hash = tx.hash(); Ok(match tx { // If the transaction is an EIP-4844 transaction... - TransactionSigned { transaction: Transaction::Eip4844(tx), signature, hash } => { + TransactionSigned { transaction: Transaction::Eip4844(tx), signature, .. } => { // Construct a `PooledTransactionsElement::BlobTransaction` with provided sidecar. Self::BlobTransaction(BlobTransaction { signature, @@ -187,23 +189,25 @@ impl PooledTransactionsElement { /// Returns the inner [`TransactionSigned`]. pub fn into_transaction(self) -> TransactionSigned { match self { - Self::Legacy { transaction, signature, hash } => { - TransactionSigned { transaction: Transaction::Legacy(transaction), signature, hash } - } + Self::Legacy { transaction, signature, hash } => TransactionSigned { + transaction: Transaction::Legacy(transaction), + signature, + hash: hash.into(), + }, Self::Eip2930 { transaction, signature, hash } => TransactionSigned { transaction: Transaction::Eip2930(transaction), signature, - hash, + hash: hash.into(), }, Self::Eip1559 { transaction, signature, hash } => TransactionSigned { transaction: Transaction::Eip1559(transaction), signature, - hash, + hash: hash.into(), }, Self::Eip7702 { transaction, signature, hash } => TransactionSigned { transaction: Transaction::Eip7702(transaction), signature, - hash, + hash: hash.into(), }, Self::BlobTransaction(blob_tx) => blob_tx.into_parts().0, } @@ -460,7 +464,7 @@ impl Decodable2718 for PooledTransactionsElement { } tx_type => { let typed_tx = TransactionSigned::typed_decode(tx_type, buf)?; - + let hash = typed_tx.hash(); match typed_tx.transaction { Transaction::Legacy(_) => Err(RlpError::Custom( "legacy transactions should not be a result of typed decoding", @@ -473,17 +477,17 @@ impl Decodable2718 for PooledTransactionsElement { Transaction::Eip2930(tx) => Ok(Self::Eip2930 { transaction: tx, signature: typed_tx.signature, - hash: typed_tx.hash, + hash }), Transaction::Eip1559(tx) => Ok(Self::Eip1559 { transaction: tx, signature: typed_tx.signature, - hash: typed_tx.hash, + hash }), Transaction::Eip7702(tx) => Ok(Self::Eip7702 { transaction: tx, signature: typed_tx.signature, - hash: typed_tx.hash, + hash }), #[cfg(feature = "optimism")] Transaction::Deposit(_) => Err(RlpError::Custom("Optimism deposit transaction cannot be decoded to PooledTransactionsElement").into()) diff --git a/crates/primitives/src/transaction/sidecar.rs b/crates/primitives/src/transaction/sidecar.rs index 48a02f4e7405..ec8c9b7f0eb7 100644 --- a/crates/primitives/src/transaction/sidecar.rs +++ b/crates/primitives/src/transaction/sidecar.rs @@ -31,7 +31,8 @@ impl BlobTransaction { tx: TransactionSigned, sidecar: BlobTransactionSidecar, ) -> Result { - let TransactionSigned { transaction, signature, hash } = tx; + let hash = tx.hash(); + let TransactionSigned { transaction, signature, .. } = tx; match transaction { Transaction::Eip4844(transaction) => Ok(Self { hash, @@ -39,7 +40,7 @@ impl BlobTransaction { signature, }), transaction => { - let tx = TransactionSigned { transaction, signature, hash }; + let tx = TransactionSigned { transaction, signature, hash: hash.into() }; Err((tx, sidecar)) } } @@ -61,7 +62,7 @@ impl BlobTransaction { pub fn into_parts(self) -> (TransactionSigned, BlobTransactionSidecar) { let transaction = TransactionSigned { transaction: Transaction::Eip4844(self.transaction.tx), - hash: self.hash, + hash: self.hash.into(), signature: self.signature, }; diff --git a/crates/primitives/src/transaction/variant.rs b/crates/primitives/src/transaction/variant.rs index 888c83946cab..dd47df9a8693 100644 --- a/crates/primitives/src/transaction/variant.rs +++ b/crates/primitives/src/transaction/variant.rs @@ -36,8 +36,8 @@ impl TransactionSignedVariant { pub fn hash(&self) -> B256 { match self { Self::SignedNoHash(tx) => tx.hash(), - Self::Signed(tx) => tx.hash, - Self::SignedEcRecovered(tx) => tx.hash, + Self::Signed(tx) => tx.hash(), + Self::SignedEcRecovered(tx) => tx.hash(), } } diff --git a/crates/prune/prune/src/segments/user/transaction_lookup.rs b/crates/prune/prune/src/segments/user/transaction_lookup.rs index 2df8cccf3056..ada4019302ef 100644 --- a/crates/prune/prune/src/segments/user/transaction_lookup.rs +++ b/crates/prune/prune/src/segments/user/transaction_lookup.rs @@ -142,7 +142,7 @@ mod tests { for block in &blocks { tx_hash_numbers.reserve_exact(block.body.transactions.len()); for transaction in &block.body.transactions { - tx_hash_numbers.push((transaction.hash, tx_hash_numbers.len() as u64)); + tx_hash_numbers.push((transaction.hash(), tx_hash_numbers.len() as u64)); } } let tx_hash_numbers_len = tx_hash_numbers.len(); diff --git a/crates/rpc/rpc-eth-api/src/helpers/call.rs b/crates/rpc/rpc-eth-api/src/helpers/call.rs index 1eade554fc1c..d7e74c37b567 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/call.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/call.rs @@ -628,7 +628,7 @@ pub trait Call: LoadState> + SpawnBlocking { cfg.clone(), block_env.clone(), block_txs, - tx.hash, + tx.hash(), )?; let env = EnvWithHandlerCfg::new_with_cfg_env( diff --git a/crates/rpc/rpc-eth-api/src/helpers/trace.rs b/crates/rpc/rpc-eth-api/src/helpers/trace.rs index 104042d17a2b..a1e6084da55d 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/trace.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/trace.rs @@ -204,7 +204,7 @@ pub trait Trace: LoadState> { cfg.clone(), block_env.clone(), block_txs, - tx.hash, + tx.hash(), )?; let env = EnvWithHandlerCfg::new_with_cfg_env( diff --git a/crates/rpc/rpc/src/debug.rs b/crates/rpc/rpc/src/debug.rs index 78040b48c5f4..dd6bf9bbc24e 100644 --- a/crates/rpc/rpc/src/debug.rs +++ b/crates/rpc/rpc/src/debug.rs @@ -107,7 +107,7 @@ where let mut transactions = block.transactions_with_sender().enumerate().peekable(); let mut inspector = None; while let Some((index, (signer, tx))) = transactions.next() { - let tx_hash = tx.hash; + let tx_hash = tx.hash(); let env = EnvWithHandlerCfg { env: Env::boxed( @@ -255,7 +255,7 @@ where cfg.clone(), block_env.clone(), block_txs, - tx.hash, + tx.hash(), )?; let env = EnvWithHandlerCfg { @@ -274,7 +274,7 @@ where Some(TransactionContext { block_hash: Some(block_hash), tx_index: Some(index), - tx_hash: Some(tx.hash), + tx_hash: Some(tx.hash()), }), &mut None, ) diff --git a/crates/rpc/rpc/src/eth/helpers/block.rs b/crates/rpc/rpc/src/eth/helpers/block.rs index fd3b9db9da28..bc1e9344799e 100644 --- a/crates/rpc/rpc/src/eth/helpers/block.rs +++ b/crates/rpc/rpc/src/eth/helpers/block.rs @@ -42,7 +42,7 @@ where .enumerate() .map(|(idx, (tx, receipt))| { let meta = TransactionMeta { - tx_hash: tx.hash, + tx_hash: tx.hash(), index: idx as u64, block_hash, block_number, diff --git a/crates/rpc/rpc/src/eth/helpers/types.rs b/crates/rpc/rpc/src/eth/helpers/types.rs index 8f135a9103bd..157213b54e66 100644 --- a/crates/rpc/rpc/src/eth/helpers/types.rs +++ b/crates/rpc/rpc/src/eth/helpers/types.rs @@ -41,7 +41,8 @@ where tx_info: TransactionInfo, ) -> Result { let from = tx.signer(); - let TransactionSigned { transaction, signature, hash } = tx.into_signed(); + let hash = tx.hash(); + let TransactionSigned { transaction, signature, .. } = tx.into_signed(); let inner: TxEnvelope = match transaction { reth_primitives::Transaction::Legacy(tx) => { diff --git a/crates/stages/stages/src/stages/tx_lookup.rs b/crates/stages/stages/src/stages/tx_lookup.rs index 3fdcbd0da649..5208cc936ce6 100644 --- a/crates/stages/stages/src/stages/tx_lookup.rs +++ b/crates/stages/stages/src/stages/tx_lookup.rs @@ -383,7 +383,7 @@ mod tests { for block in &blocks[..=max_processed_block] { for transaction in &block.body.transactions { if block.number > max_pruned_block { - tx_hash_numbers.push((transaction.hash, tx_hash_number)); + tx_hash_numbers.push((transaction.hash(), tx_hash_number)); } tx_hash_number += 1; } diff --git a/crates/storage/provider/src/providers/database/mod.rs b/crates/storage/provider/src/providers/database/mod.rs index 354eb10c1036..7d94fb98a80a 100644 --- a/crates/storage/provider/src/providers/database/mod.rs +++ b/crates/storage/provider/src/providers/database/mod.rs @@ -725,7 +725,10 @@ mod tests { provider.transaction_sender(0), Ok(Some(sender)) if sender == block.body.transactions[0].recover_signer().unwrap() ); - assert_matches!(provider.transaction_id(block.body.transactions[0].hash), Ok(Some(0))); + assert_matches!( + provider.transaction_id(block.body.transactions[0].hash()), + Ok(Some(0)) + ); } { @@ -743,7 +746,7 @@ mod tests { Ok(_) ); assert_matches!(provider.transaction_sender(0), Ok(None)); - assert_matches!(provider.transaction_id(block.body.transactions[0].hash), Ok(None)); + assert_matches!(provider.transaction_id(block.body.transactions[0].hash()), Ok(None)); } } diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index 8c390b06c08f..d35e0a971a37 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -551,7 +551,7 @@ impl DatabaseProvider { .map(|tx| match transaction_kind { TransactionVariant::NoHash => TransactionSigned { // Caller explicitly asked for no hash, so we don't calculate it - hash: B256::ZERO, + hash: Default::default(), signature: tx.signature, transaction: tx.transaction, }, @@ -1500,7 +1500,7 @@ impl> Transaction fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { if let Some(id) = self.transaction_id(hash)? { Ok(self.transaction_by_id_no_hash(id)?.map(|tx| TransactionSigned { - hash, + hash: hash.into(), signature: tx.signature, transaction: tx.transaction, })) @@ -1518,7 +1518,7 @@ impl> Transaction if let Some(transaction_id) = self.transaction_id(tx_hash)? { if let Some(tx) = self.transaction_by_id_no_hash(transaction_id)? { let transaction = TransactionSigned { - hash: tx_hash, + hash: tx_hash.into(), signature: tx.signature, transaction: tx.transaction, }; diff --git a/crates/storage/provider/src/test_utils/blocks.rs b/crates/storage/provider/src/test_utils/blocks.rs index 3259eee2bfbf..2b8dc0f85cac 100644 --- a/crates/storage/provider/src/test_utils/blocks.rs +++ b/crates/storage/provider/src/test_utils/blocks.rs @@ -89,7 +89,7 @@ pub(crate) static TEST_BLOCK: LazyLock = LazyLock::new(|| SealedBlo ), body: BlockBody { transactions: vec![TransactionSigned { - hash: hex!("3541dd1d17e76adeb25dcf2b0a9b60a1669219502e58dcf26a2beafbfb550397").into(), + hash: b256!("3541dd1d17e76adeb25dcf2b0a9b60a1669219502e58dcf26a2beafbfb550397").into(), signature: Signature::new( U256::from_str( "51983300959770368863831494747186777928121405155922056726144551509338672451120", diff --git a/crates/transaction-pool/src/blobstore/tracker.rs b/crates/transaction-pool/src/blobstore/tracker.rs index 63d6e30eea05..d58abe9b4628 100644 --- a/crates/transaction-pool/src/blobstore/tracker.rs +++ b/crates/transaction-pool/src/blobstore/tracker.rs @@ -43,7 +43,7 @@ impl BlobStoreCanonTracker { .body .transactions() .filter(|tx| tx.transaction.is_eip4844()) - .map(|tx| tx.hash); + .map(|tx| tx.hash()); (*num, iter) }); self.add_blocks(blob_txs); @@ -128,18 +128,18 @@ mod tests { body: BlockBody { transactions: vec![ TransactionSigned { - hash: tx1_hash, + hash: tx1_hash.into(), transaction: Transaction::Eip4844(Default::default()), ..Default::default() }, TransactionSigned { - hash: tx2_hash, + hash: tx2_hash.into(), transaction: Transaction::Eip4844(Default::default()), ..Default::default() }, // Another transaction that is not EIP-4844 TransactionSigned { - hash: B256::random(), + hash: B256::random().into(), transaction: Transaction::Eip7702(Default::default()), ..Default::default() }, @@ -161,12 +161,12 @@ mod tests { body: BlockBody { transactions: vec![ TransactionSigned { - hash: tx3_hash, + hash: tx3_hash.into(), transaction: Transaction::Eip1559(Default::default()), ..Default::default() }, TransactionSigned { - hash: tx2_hash, + hash: tx2_hash.into(), transaction: Transaction::Eip2930(Default::default()), ..Default::default() }, diff --git a/crates/transaction-pool/src/maintain.rs b/crates/transaction-pool/src/maintain.rs index 271c63a388a4..47e70e914331 100644 --- a/crates/transaction-pool/src/maintain.rs +++ b/crates/transaction-pool/src/maintain.rs @@ -317,7 +317,7 @@ pub async fn maintain_transaction_pool( // find all transactions that were mined in the old chain but not in the new chain let pruned_old_transactions = old_blocks .transactions_ecrecovered() - .filter(|tx| !new_mined_transactions.contains(&tx.hash)) + .filter(|tx| !new_mined_transactions.contains(tx.hash_ref())) .filter_map(|tx| { if tx.is_eip4844() { // reorged blobs no longer include the blob, which is necessary for @@ -325,7 +325,7 @@ pub async fn maintain_transaction_pool( // been validated previously, we still need the blob in order to // accurately set the transaction's // encoded-length which is propagated over the network. - pool.get_blob(tx.hash) + pool.get_blob(TransactionSigned::hash(&tx)) .ok() .flatten() .map(Arc::unwrap_or_clone) diff --git a/crates/transaction-pool/src/test_utils/mock.rs b/crates/transaction-pool/src/test_utils/mock.rs index 344781b1f583..009543642ffd 100644 --- a/crates/transaction-pool/src/test_utils/mock.rs +++ b/crates/transaction-pool/src/test_utils/mock.rs @@ -911,7 +911,7 @@ impl From for MockTransaction { impl From for TransactionSignedEcRecovered { fn from(tx: MockTransaction) -> Self { let signed_tx = TransactionSigned { - hash: *tx.hash(), + hash: (*tx.hash()).into(), signature: Signature::test_signature(), transaction: tx.clone().into(), }; diff --git a/docs/crates/network.md b/docs/crates/network.md index a6ac24305658..be2c7cb3b143 100644 --- a/docs/crates/network.md +++ b/docs/crates/network.md @@ -991,9 +991,9 @@ fn import_transactions(&mut self, peer_id: PeerId, transactions: Vec { // transaction was already inserted entry.get_mut().push(peer_id); diff --git a/examples/db-access/src/main.rs b/examples/db-access/src/main.rs index 0f7d1a269f3c..f3b7fdf58421 100644 --- a/examples/db-access/src/main.rs +++ b/examples/db-access/src/main.rs @@ -92,16 +92,17 @@ fn txs_provider_example(provider: T) -> eyre::Result<() // Can query the tx by hash let tx_by_hash = - provider.transaction_by_hash(tx.hash)?.ok_or(eyre::eyre!("txhash not found"))?; + provider.transaction_by_hash(tx.hash())?.ok_or(eyre::eyre!("txhash not found"))?; assert_eq!(tx, tx_by_hash); // Can query the tx by hash with info about the block it was included in - let (tx, meta) = - provider.transaction_by_hash_with_meta(tx.hash)?.ok_or(eyre::eyre!("txhash not found"))?; - assert_eq!(tx.hash, meta.tx_hash); + let (tx, meta) = provider + .transaction_by_hash_with_meta(tx.hash())? + .ok_or(eyre::eyre!("txhash not found"))?; + assert_eq!(tx.hash(), meta.tx_hash); // Can reverse lookup the key too - let id = provider.transaction_id(tx.hash)?.ok_or(eyre::eyre!("txhash not found"))?; + let id = provider.transaction_id(tx.hash())?.ok_or(eyre::eyre!("txhash not found"))?; assert_eq!(id, txid); // Can find the block of a transaction given its key @@ -171,7 +172,7 @@ fn receipts_provider_example Date: Fri, 22 Nov 2024 00:29:08 +0100 Subject: [PATCH 102/156] chore: remove txext trait (#12760) --- crates/net/network/src/transactions/mod.rs | 6 +-- crates/primitives-traits/src/lib.rs | 2 +- .../primitives-traits/src/transaction/mod.rs | 29 +++---------- .../src/transaction/signed.rs | 42 +++++-------------- crates/primitives/src/transaction/mod.rs | 37 ++++------------ 5 files changed, 27 insertions(+), 89 deletions(-) diff --git a/crates/net/network/src/transactions/mod.rs b/crates/net/network/src/transactions/mod.rs index 1c93ae549716..9628dbb4f1ba 100644 --- a/crates/net/network/src/transactions/mod.rs +++ b/crates/net/network/src/transactions/mod.rs @@ -49,7 +49,7 @@ use reth_network_p2p::{ use reth_network_peers::PeerId; use reth_network_types::ReputationChangeKind; use reth_primitives::{PooledTransactionsElement, TransactionSigned}; -use reth_primitives_traits::{SignedTransaction, TransactionExt, TxType}; +use reth_primitives_traits::{SignedTransaction, TxType}; use reth_tokio_util::EventStream; use reth_transaction_pool::{ error::{PoolError, PoolResult}, @@ -1617,7 +1617,7 @@ impl FullTransactionsBuilder { // via `GetPooledTransactions`. // // From: - if !transaction.transaction.transaction().tx_type().is_broadcastable_in_full() { + if !transaction.transaction.tx_type().is_broadcastable_in_full() { self.pooled.push(transaction); return } @@ -1683,7 +1683,7 @@ impl PooledTransactionsHashesBuilder { Self::Eth68(msg) => { msg.hashes.push(*tx.tx_hash()); msg.sizes.push(tx.size); - msg.types.push(tx.transaction.transaction().tx_type().into()); + msg.types.push(tx.transaction.tx_type().into()); } } } diff --git a/crates/primitives-traits/src/lib.rs b/crates/primitives-traits/src/lib.rs index 5c969152d8d6..c149c6cd7e47 100644 --- a/crates/primitives-traits/src/lib.rs +++ b/crates/primitives-traits/src/lib.rs @@ -29,7 +29,7 @@ pub use transaction::{ execute::FillTxEnv, signed::{FullSignedTx, SignedTransaction}, tx_type::{FullTxType, TxType}, - FullTransaction, Transaction, TransactionExt, + FullTransaction, Transaction, }; mod integer_list; diff --git a/crates/primitives-traits/src/transaction/mod.rs b/crates/primitives-traits/src/transaction/mod.rs index 7647c94496f6..f176382146b7 100644 --- a/crates/primitives-traits/src/transaction/mod.rs +++ b/crates/primitives-traits/src/transaction/mod.rs @@ -4,16 +4,13 @@ pub mod execute; pub mod signed; pub mod tx_type; +use crate::{InMemorySize, MaybeArbitrary, MaybeCompact, MaybeSerde}; use core::{fmt, hash::Hash}; -use alloy_primitives::B256; - -use crate::{FullTxType, InMemorySize, MaybeArbitrary, MaybeCompact, MaybeSerde, TxType}; - /// Helper trait that unifies all behaviour required by transaction to support full node operations. -pub trait FullTransaction: Transaction + MaybeCompact {} +pub trait FullTransaction: Transaction + MaybeCompact {} -impl FullTransaction for T where T: Transaction + MaybeCompact {} +impl FullTransaction for T where T: Transaction + MaybeCompact {} /// Abstraction of a transaction. pub trait Transaction: @@ -26,7 +23,7 @@ pub trait Transaction: + Eq + PartialEq + Hash - + TransactionExt + + alloy_consensus::Transaction + InMemorySize + MaybeSerde + MaybeArbitrary @@ -43,25 +40,9 @@ impl Transaction for T where + Eq + PartialEq + Hash - + TransactionExt + + alloy_consensus::Transaction + InMemorySize + MaybeSerde + MaybeArbitrary { } - -/// Extension trait of [`alloy_consensus::Transaction`]. -#[auto_impl::auto_impl(&, Arc)] -pub trait TransactionExt: alloy_consensus::Transaction { - /// Transaction envelope type ID. - type Type: TxType; - - /// Heavy operation that return signature hash over rlp encoded transaction. - /// It is only for signature signing or signer recovery. - fn signature_hash(&self) -> B256; - - /// Returns the transaction type. - fn tx_type(&self) -> Self::Type { - Self::Type::try_from(self.ty()).expect("should decode tx type id") - } -} diff --git a/crates/primitives-traits/src/transaction/signed.rs b/crates/primitives-traits/src/transaction/signed.rs index 563f3a6f3366..64acbd3415c0 100644 --- a/crates/primitives-traits/src/transaction/signed.rs +++ b/crates/primitives-traits/src/transaction/signed.rs @@ -1,25 +1,15 @@ //! API of a signed transaction. +use crate::{FillTxEnv, InMemorySize, MaybeArbitrary, MaybeCompact, MaybeSerde, TxType}; use alloc::fmt; -use core::hash::Hash; - use alloy_eips::eip2718::{Decodable2718, Encodable2718}; use alloy_primitives::{keccak256, Address, PrimitiveSignature, TxHash, B256}; - -use crate::{ - FillTxEnv, FullTransaction, InMemorySize, MaybeArbitrary, MaybeCompact, MaybeSerde, Transaction, -}; +use core::hash::Hash; /// Helper trait that unifies all behaviour required by block to support full node operations. -pub trait FullSignedTx: - SignedTransaction + FillTxEnv + MaybeCompact -{ -} +pub trait FullSignedTx: SignedTransaction + FillTxEnv + MaybeCompact {} -impl FullSignedTx for T where - T: SignedTransaction + FillTxEnv + MaybeCompact -{ -} +impl FullSignedTx for T where T: SignedTransaction + FillTxEnv + MaybeCompact {} /// A signed transaction. #[auto_impl::auto_impl(&, Arc)] @@ -42,15 +32,17 @@ pub trait SignedTransaction: + MaybeArbitrary + InMemorySize { - /// Unsigned transaction type. - type Transaction: Transaction; + /// Transaction envelope type ID. + type Type: TxType; + + /// Returns the transaction type. + fn tx_type(&self) -> Self::Type { + Self::Type::try_from(self.ty()).expect("should decode tx type id") + } /// Returns reference to transaction hash. fn tx_hash(&self) -> &TxHash; - /// Returns reference to transaction. - fn transaction(&self) -> &Self::Transaction; - /// Returns reference to signature. fn signature(&self) -> &PrimitiveSignature; @@ -78,15 +70,3 @@ pub trait SignedTransaction: keccak256(self.encoded_2718()) } } - -/// Helper trait used in testing. -#[cfg(feature = "test-utils")] -pub trait SignedTransactionTesting: SignedTransaction { - /// Create a new signed transaction from a transaction and its signature. - /// - /// This will also calculate the transaction hash using its encoding. - fn from_transaction_and_signature( - transaction: Self::Transaction, - signature: PrimitiveSignature, - ) -> Self; -} diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 5900abb42c9a..1e313ca8b2b0 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -1,5 +1,6 @@ //! Transaction types. +use alloc::vec::Vec; use alloy_consensus::{ transaction::RlpEcdsaTx, SignableTransaction, Transaction as _, TxEip1559, TxEip2930, TxEip4844, TxEip7702, TxLegacy, @@ -24,21 +25,23 @@ use once_cell as _; use once_cell::sync::{Lazy as LazyLock, OnceCell as OnceLock}; #[cfg(feature = "optimism")] use op_alloy_consensus::DepositTransaction; +#[cfg(feature = "optimism")] +use op_alloy_consensus::TxDeposit; use rayon::prelude::{IntoParallelIterator, ParallelIterator}; -use reth_primitives_traits::InMemorySize; +use reth_primitives_traits::{InMemorySize, SignedTransaction}; +use revm_primitives::{AuthorizationList, TxEnv}; use serde::{Deserialize, Serialize}; use signature::decode_with_eip155_chain_id; #[cfg(feature = "std")] use std::sync::{LazyLock, OnceLock}; +pub use compat::FillTxEnv; pub use error::{ InvalidTransactionError, TransactionConversionError, TryFromRecoveredTransactionError, }; pub use meta::TransactionMeta; pub use pooled::{PooledTransactionsElement, PooledTransactionsElementEcRecovered}; pub use sidecar::BlobTransaction; - -pub use compat::FillTxEnv; pub use signature::{recover_signer, recover_signer_unchecked}; pub use tx_type::TxType; pub use variant::TransactionSignedVariant; @@ -58,12 +61,6 @@ pub mod signature; pub(crate) mod util; mod variant; -use alloc::vec::Vec; -#[cfg(feature = "optimism")] -use op_alloy_consensus::TxDeposit; -use reth_primitives_traits::{transaction::TransactionExt, SignedTransaction}; -use revm_primitives::{AuthorizationList, TxEnv}; - /// Either a transaction hash or number. pub type TxHashOrNumber = BlockHashOrNumber; @@ -839,22 +836,6 @@ impl alloy_consensus::Transaction for Transaction { } } -impl TransactionExt for Transaction { - type Type = TxType; - - fn signature_hash(&self) -> B256 { - match self { - Self::Legacy(tx) => tx.signature_hash(), - Self::Eip2930(tx) => tx.signature_hash(), - Self::Eip1559(tx) => tx.signature_hash(), - Self::Eip4844(tx) => tx.signature_hash(), - Self::Eip7702(tx) => tx.signature_hash(), - #[cfg(feature = "optimism")] - _ => todo!("use op type for op"), - } - } -} - /// Signed transaction without its Hash. Used type for inserting into the DB. /// /// This can by converted to [`TransactionSigned`] by calling [`TransactionSignedNoHash::hash`]. @@ -1345,16 +1326,12 @@ impl TransactionSigned { } impl SignedTransaction for TransactionSigned { - type Transaction = Transaction; + type Type = TxType; fn tx_hash(&self) -> &TxHash { self.hash_ref() } - fn transaction(&self) -> &Self::Transaction { - &self.transaction - } - fn signature(&self) -> &Signature { &self.signature } From 0eaef1f1dcbe229b5964cf1622b3018b4a1f80b9 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 22 Nov 2024 00:48:38 +0100 Subject: [PATCH 103/156] chore: rm unused error variants (#12763) --- crates/storage/errors/src/provider.rs | 14 +------------- 1 file changed, 1 insertion(+), 13 deletions(-) diff --git a/crates/storage/errors/src/provider.rs b/crates/storage/errors/src/provider.rs index b6fcee545d55..152427a128bf 100644 --- a/crates/storage/errors/src/provider.rs +++ b/crates/storage/errors/src/provider.rs @@ -1,6 +1,6 @@ use crate::{db::DatabaseError, lockfile::StorageLockError, writer::UnifiedStorageWriterError}; use alloy_eips::BlockHashOrNumber; -use alloy_primitives::{Address, BlockHash, BlockNumber, TxNumber, B256, U256}; +use alloy_primitives::{Address, BlockHash, BlockNumber, TxNumber, B256}; use derive_more::Display; use reth_primitives::{GotExpected, StaticFileSegment, TxHashOrNumber}; @@ -81,15 +81,6 @@ pub enum ProviderError { /// Unable to find the safe block. #[display("safe block does not exist")] SafeBlockNotFound, - /// Mismatch of sender and transaction. - #[display("mismatch of sender and transaction id {tx_id}")] - MismatchOfTransactionAndSenderId { - /// The transaction ID. - tx_id: TxNumber, - }, - /// Block body wrong transaction count. - #[display("stored block indices does not match transaction count")] - BlockBodyTransactionCount, /// Thrown when the cache service task dropped. #[display("cache service task stopped")] CacheServiceUnavailable, @@ -139,9 +130,6 @@ pub enum ProviderError { /// Static File Provider was initialized as read-only. #[display("cannot get a writer on a read-only environment.")] ReadOnlyStaticFileAccess, - /// Error encountered when the block number conversion from U256 to u64 causes an overflow. - #[display("failed to convert block number U256 to u64: {_0}")] - BlockNumberOverflow(U256), /// Consistent view error. #[display("failed to initialize consistent view: {_0}")] ConsistentView(Box), From f211aacf551afbf707b7fb6a40d17c8d2264110e Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 22 Nov 2024 01:07:04 +0100 Subject: [PATCH 104/156] chore: rm tx alias re-export (#12762) --- crates/primitives/src/lib.rs | 2 +- crates/primitives/src/transaction/mod.rs | 4 ---- crates/storage/errors/src/provider.rs | 20 ++++++++------------ 3 files changed, 9 insertions(+), 17 deletions(-) diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index c46c437dd714..7999588e49d8 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -48,7 +48,7 @@ pub use transaction::{ util::secp256k1::{public_key_to_address, recover_signer_unchecked, sign_message}, BlobTransaction, InvalidTransactionError, PooledTransactionsElement, PooledTransactionsElementEcRecovered, Transaction, TransactionMeta, TransactionSigned, - TransactionSignedEcRecovered, TransactionSignedNoHash, TxHashOrNumber, TxType, + TransactionSignedEcRecovered, TransactionSignedNoHash, TxType, }; // Re-exports diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 1e313ca8b2b0..1ac7b4394e0a 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -6,7 +6,6 @@ use alloy_consensus::{ TxEip4844, TxEip7702, TxLegacy, }; use alloy_eips::{ - eip1898::BlockHashOrNumber, eip2718::{Decodable2718, Eip2718Error, Eip2718Result, Encodable2718}, eip2930::AccessList, eip7702::SignedAuthorization, @@ -61,9 +60,6 @@ pub mod signature; pub(crate) mod util; mod variant; -/// Either a transaction hash or number. -pub type TxHashOrNumber = BlockHashOrNumber; - /// Expected number of transactions where we can expect a speed-up by recovering the senders in /// parallel. pub static PARALLEL_SENDER_RECOVERY_THRESHOLD: LazyLock = diff --git a/crates/storage/errors/src/provider.rs b/crates/storage/errors/src/provider.rs index 152427a128bf..9e6720b84403 100644 --- a/crates/storage/errors/src/provider.rs +++ b/crates/storage/errors/src/provider.rs @@ -1,13 +1,9 @@ use crate::{db::DatabaseError, lockfile::StorageLockError, writer::UnifiedStorageWriterError}; -use alloy_eips::BlockHashOrNumber; +use alloc::{boxed::Box, string::String}; +use alloy_eips::{BlockHashOrNumber, HashOrNumber}; use alloy_primitives::{Address, BlockHash, BlockNumber, TxNumber, B256}; use derive_more::Display; -use reth_primitives::{GotExpected, StaticFileSegment, TxHashOrNumber}; - -#[cfg(feature = "std")] -use std::path::PathBuf; - -use alloc::{boxed::Box, string::String}; +use reth_primitives::{GotExpected, StaticFileSegment}; /// Provider result type. pub type ProviderResult = Result; @@ -66,12 +62,12 @@ pub enum ProviderError { /// when required header related data was not found but was required. #[display("no header found for {_0:?}")] HeaderNotFound(BlockHashOrNumber), - /// The specific transaction is missing. + /// The specific transaction identified by hash or id is missing. #[display("no transaction found for {_0:?}")] - TransactionNotFound(TxHashOrNumber), - /// The specific receipt is missing + TransactionNotFound(HashOrNumber), + /// The specific receipt for a transaction identified by hash or id is missing #[display("no receipt found for {_0:?}")] - ReceiptNotFound(TxHashOrNumber), + ReceiptNotFound(HashOrNumber), /// Unable to find the best block. #[display("best block does not exist")] BestBlockNotFound, @@ -111,7 +107,7 @@ pub enum ProviderError { /// Static File is not found at specified path. #[cfg(feature = "std")] #[display("not able to find {_0} static file at {_1:?}")] - MissingStaticFilePath(StaticFileSegment, PathBuf), + MissingStaticFilePath(StaticFileSegment, std::path::PathBuf), /// Static File is not found for requested block. #[display("not able to find {_0} static file for block number {_1}")] MissingStaticFileBlock(StaticFileSegment, BlockNumber), From d3b68656c2118e7ae6f76d8a132579068f74246f Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 22 Nov 2024 12:30:56 +0400 Subject: [PATCH 105/156] fix: always truncate static files (#12765) --- .../provider/src/providers/database/provider.rs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index d35e0a971a37..ff80213fdf31 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -3030,13 +3030,13 @@ impl BlockWriter .static_file_provider .get_highest_static_file_tx(StaticFileSegment::Transactions); - if let Some(static_tx) = static_file_tx_num { - if static_tx >= unwind_tx_from { - self.static_file_provider - .latest_writer(StaticFileSegment::Transactions)? - .prune_transactions(static_tx - unwind_tx_from + 1, block)?; - } - } + let to_delete = static_file_tx_num + .map(|static_tx| (static_tx + 1).saturating_sub(unwind_tx_from)) + .unwrap_or_default(); + + self.static_file_provider + .latest_writer(StaticFileSegment::Transactions)? + .prune_transactions(to_delete, block)?; } Ok(()) From 7d24aa40e82dab393cff77dffaf976f059153f83 Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Fri, 22 Nov 2024 09:44:56 +0100 Subject: [PATCH 106/156] chore(trie): log proof result send error (#12749) --- crates/trie/parallel/src/proof.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/crates/trie/parallel/src/proof.rs b/crates/trie/parallel/src/proof.rs index 88321c821a8d..dcb1a0231dd1 100644 --- a/crates/trie/parallel/src/proof.rs +++ b/crates/trie/parallel/src/proof.rs @@ -22,7 +22,7 @@ use reth_trie::{ use reth_trie_common::proof::ProofRetainer; use reth_trie_db::{DatabaseHashedCursorFactory, DatabaseTrieCursorFactory}; use std::sync::Arc; -use tracing::debug; +use tracing::{debug, error}; #[cfg(feature = "metrics")] use crate::metrics::ParallelStateRootMetrics; @@ -126,7 +126,9 @@ where )) }) })(); - let _ = tx.send(result); + if let Err(err) = tx.send(result) { + error!(target: "trie::parallel", ?hashed_address, err_content = ?err.0, "Failed to send proof result"); + } }); storage_proofs.insert(hashed_address, rx); } From 3765ae244408ed8bf8243e2b7f48e46d7f872f2b Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 22 Nov 2024 11:37:34 +0100 Subject: [PATCH 107/156] feat: add TransactionSigned::new fns (#12768) --- crates/blockchain-tree/src/blockchain_tree.rs | 2 +- crates/chain-state/src/test_utils.rs | 3 +- crates/consensus/common/src/validation.rs | 2 +- crates/net/eth-wire-types/src/blocks.rs | 10 +++---- crates/net/eth-wire-types/src/transactions.rs | 28 +++++++++--------- crates/net/network/tests/it/requests.rs | 2 +- crates/net/network/tests/it/txgossip.rs | 5 +--- crates/optimism/evm/src/execute.rs | 8 ++--- crates/optimism/node/src/txpool.rs | 2 +- crates/optimism/node/tests/it/priority.rs | 5 +--- crates/primitives/src/transaction/mod.rs | 29 +++++++++++-------- crates/transaction-pool/src/test_utils/gen.rs | 2 +- crates/transaction-pool/src/traits.rs | 10 +++---- testing/testing-utils/src/generators.rs | 4 +-- 14 files changed, 55 insertions(+), 57 deletions(-) diff --git a/crates/blockchain-tree/src/blockchain_tree.rs b/crates/blockchain-tree/src/blockchain_tree.rs index c778e0508dac..67b200e64844 100644 --- a/crates/blockchain-tree/src/blockchain_tree.rs +++ b/crates/blockchain-tree/src/blockchain_tree.rs @@ -1570,7 +1570,7 @@ mod tests { let single_tx_cost = U256::from(INITIAL_BASE_FEE * MIN_TRANSACTION_GAS); let mock_tx = |nonce: u64| -> TransactionSignedEcRecovered { - TransactionSigned::from_transaction_and_signature( + TransactionSigned::new_unhashed( Transaction::Eip1559(TxEip1559 { chain_id: chain_spec.chain.id(), nonce, diff --git a/crates/chain-state/src/test_utils.rs b/crates/chain-state/src/test_utils.rs index 63689f07f039..af0c363fe486 100644 --- a/crates/chain-state/src/test_utils.rs +++ b/crates/chain-state/src/test_utils.rs @@ -102,8 +102,7 @@ impl TestBlockBuilder { let signature_hash = tx.signature_hash(); let signature = self.signer_pk.sign_hash_sync(&signature_hash).unwrap(); - TransactionSigned::from_transaction_and_signature(tx, signature) - .with_signer(self.signer) + TransactionSigned::new_unhashed(tx, signature).with_signer(self.signer) }; let num_txs = rng.gen_range(0..5); diff --git a/crates/consensus/common/src/validation.rs b/crates/consensus/common/src/validation.rs index 62357b4b9b12..6042f16bf50f 100644 --- a/crates/consensus/common/src/validation.rs +++ b/crates/consensus/common/src/validation.rs @@ -450,7 +450,7 @@ mod tests { let signature = Signature::new(U256::default(), U256::default(), true); - TransactionSigned::from_transaction_and_signature(request, signature) + TransactionSigned::new_unhashed(request, signature) } /// got test block diff --git a/crates/net/eth-wire-types/src/blocks.rs b/crates/net/eth-wire-types/src/blocks.rs index 06549e769e66..97bbe36b3d61 100644 --- a/crates/net/eth-wire-types/src/blocks.rs +++ b/crates/net/eth-wire-types/src/blocks.rs @@ -342,7 +342,7 @@ mod tests { message: BlockBodies(vec![ BlockBody { transactions: vec![ - TransactionSigned::from_transaction_and_signature(Transaction::Legacy(TxLegacy { + TransactionSigned::new_unhashed(Transaction::Legacy(TxLegacy { chain_id: Some(1), nonce: 0x8u64, gas_price: 0x4a817c808, @@ -356,7 +356,7 @@ mod tests { false, ), ), - TransactionSigned::from_transaction_and_signature(Transaction::Legacy(TxLegacy { + TransactionSigned::new_unhashed(Transaction::Legacy(TxLegacy { chain_id: Some(1), nonce: 0x9u64, gas_price: 0x4a817c809, @@ -413,7 +413,7 @@ mod tests { message: BlockBodies(vec![ BlockBody { transactions: vec![ - TransactionSigned::from_transaction_and_signature(Transaction::Legacy( + TransactionSigned::new_unhashed(Transaction::Legacy( TxLegacy { chain_id: Some(1), nonce: 0x8u64, @@ -423,13 +423,13 @@ mod tests { value: U256::from(0x200u64), input: Default::default(), }), - Signature::new( + Signature::new( U256::from_str("0x64b1702d9298fee62dfeccc57d322a463ad55ca201256d01f62b45b2e1c21c12").unwrap(), U256::from_str("0x64b1702d9298fee62dfeccc57d322a463ad55ca201256d01f62b45b2e1c21c10").unwrap(), false, ), ), - TransactionSigned::from_transaction_and_signature( + TransactionSigned::new_unhashed( Transaction::Legacy(TxLegacy { chain_id: Some(1), nonce: 0x9u64, diff --git a/crates/net/eth-wire-types/src/transactions.rs b/crates/net/eth-wire-types/src/transactions.rs index 26f62b7f76a1..ca76f0a8c7ed 100644 --- a/crates/net/eth-wire-types/src/transactions.rs +++ b/crates/net/eth-wire-types/src/transactions.rs @@ -130,7 +130,7 @@ mod tests { let expected = hex!("f8d7820457f8d2f867088504a817c8088302e2489435353535353535353535353535353535353535358202008025a064b1702d9298fee62dfeccc57d322a463ad55ca201256d01f62b45b2e1c21c12a064b1702d9298fee62dfeccc57d322a463ad55ca201256d01f62b45b2e1c21c10f867098504a817c809830334509435353535353535353535353535353535353535358202d98025a052f8f61201b2b11a78d6e866abc9c3db2ae8631fa656bfe5cb53668255367afba052f8f61201b2b11a78d6e866abc9c3db2ae8631fa656bfe5cb53668255367afb"); let mut data = vec![]; let txs = vec![ - TransactionSigned::from_transaction_and_signature( + TransactionSigned::new_unhashed( Transaction::Legacy(TxLegacy { chain_id: Some(1), nonce: 0x8u64, @@ -152,7 +152,7 @@ mod tests { false, ), ), - TransactionSigned::from_transaction_and_signature( + TransactionSigned::new_unhashed( Transaction::Legacy(TxLegacy { chain_id: Some(1), nonce: 0x09u64, @@ -196,7 +196,7 @@ mod tests { fn decode_pooled_transactions() { let data = hex!("f8d7820457f8d2f867088504a817c8088302e2489435353535353535353535353535353535353535358202008025a064b1702d9298fee62dfeccc57d322a463ad55ca201256d01f62b45b2e1c21c12a064b1702d9298fee62dfeccc57d322a463ad55ca201256d01f62b45b2e1c21c10f867098504a817c809830334509435353535353535353535353535353535353535358202d98025a052f8f61201b2b11a78d6e866abc9c3db2ae8631fa656bfe5cb53668255367afba052f8f61201b2b11a78d6e866abc9c3db2ae8631fa656bfe5cb53668255367afb"); let txs = vec![ - TransactionSigned::from_transaction_and_signature( + TransactionSigned::new_unhashed( Transaction::Legacy(TxLegacy { chain_id: Some(1), nonce: 0x8u64, @@ -218,7 +218,7 @@ mod tests { false, ), ), - TransactionSigned::from_transaction_and_signature( + TransactionSigned::new_unhashed( Transaction::Legacy(TxLegacy { chain_id: Some(1), nonce: 0x09u64, @@ -260,7 +260,7 @@ mod tests { let decoded_transactions = RequestPair::::decode(&mut &data[..]).unwrap(); let txs = vec![ - TransactionSigned::from_transaction_and_signature( + TransactionSigned::new_unhashed( Transaction::Legacy(TxLegacy { chain_id: Some(4), nonce: 15u64, @@ -282,7 +282,7 @@ mod tests { true, ), ), - TransactionSigned::from_transaction_and_signature( + TransactionSigned::new_unhashed( Transaction::Eip1559(TxEip1559 { chain_id: 4, nonce: 26u64, @@ -306,7 +306,7 @@ mod tests { true, ), ), - TransactionSigned::from_transaction_and_signature( + TransactionSigned::new_unhashed( Transaction::Legacy(TxLegacy { chain_id: Some(4), nonce: 3u64, @@ -328,7 +328,7 @@ mod tests { false, ), ), - TransactionSigned::from_transaction_and_signature( + TransactionSigned::new_unhashed( Transaction::Legacy(TxLegacy { chain_id: Some(4), nonce: 1u64, @@ -350,7 +350,7 @@ mod tests { false, ), ), - TransactionSigned::from_transaction_and_signature( + TransactionSigned::new_unhashed( Transaction::Legacy(TxLegacy { chain_id: Some(4), nonce: 2u64, @@ -397,7 +397,7 @@ mod tests { fn encode_pooled_transactions_network() { let expected = hex!("f9022980f90225f8650f84832156008287fb94cf7f9e66af820a19257a2108375b180b0ec491678204d2802ca035b7bfeb9ad9ece2cbafaaf8e202e706b4cfaeb233f46198f00b44d4a566a981a0612638fb29427ca33b9a3be2a0a561beecfe0269655be160d35e72d366a6a860b87502f872041a8459682f008459682f0d8252089461815774383099e24810ab832a5b2a5425c154d58829a2241af62c000080c001a059e6b67f48fb32e7e570dfb11e042b5ad2e55e3ce3ce9cd989c7e06e07feeafda0016b83f4f980694ed2eee4d10667242b1f40dc406901b34125b008d334d47469f86b0384773594008398968094d3e8763675e4c425df46cc3b5c0f6cbdac39604687038d7ea4c68000802ba0ce6834447c0a4193c40382e6c57ae33b241379c5418caac9cdc18d786fd12071a03ca3ae86580e94550d7c071e3a02eadb5a77830947c9225165cf9100901bee88f86b01843b9aca00830186a094d3e8763675e4c425df46cc3b5c0f6cbdac3960468702769bb01b2a00802ba0e24d8bd32ad906d6f8b8d7741e08d1959df021698b19ee232feba15361587d0aa05406ad177223213df262cb66ccbb2f46bfdccfdfbbb5ffdda9e2c02d977631daf86b02843b9aca00830186a094d3e8763675e4c425df46cc3b5c0f6cbdac39604687038d7ea4c68000802ba00eb96ca19e8a77102767a41fc85a36afd5c61ccb09911cec5d3e86e193d9c5aea03a456401896b1b6055311536bf00a718568c744d8c1f9df59879e8350220ca18"); let txs = vec![ - TransactionSigned::from_transaction_and_signature( + TransactionSigned::new_unhashed( Transaction::Legacy(TxLegacy { chain_id: Some(4), nonce: 15u64, @@ -419,7 +419,7 @@ mod tests { true, ), ), - TransactionSigned::from_transaction_and_signature( + TransactionSigned::new_unhashed( Transaction::Eip1559(TxEip1559 { chain_id: 4, nonce: 26u64, @@ -443,7 +443,7 @@ mod tests { true, ), ), - TransactionSigned::from_transaction_and_signature( + TransactionSigned::new_unhashed( Transaction::Legacy(TxLegacy { chain_id: Some(4), nonce: 3u64, @@ -465,7 +465,7 @@ mod tests { false, ), ), - TransactionSigned::from_transaction_and_signature( + TransactionSigned::new_unhashed( Transaction::Legacy(TxLegacy { chain_id: Some(4), nonce: 1u64, @@ -487,7 +487,7 @@ mod tests { false, ), ), - TransactionSigned::from_transaction_and_signature( + TransactionSigned::new_unhashed( Transaction::Legacy(TxLegacy { chain_id: Some(4), nonce: 2u64, diff --git a/crates/net/network/tests/it/requests.rs b/crates/net/network/tests/it/requests.rs index 54e1f4e12b4d..0dd38c959de9 100644 --- a/crates/net/network/tests/it/requests.rs +++ b/crates/net/network/tests/it/requests.rs @@ -33,7 +33,7 @@ pub fn rng_transaction(rng: &mut impl rand::RngCore) -> TransactionSigned { }); let signature = Signature::new(U256::default(), U256::default(), true); - TransactionSigned::from_transaction_and_signature(request, signature) + TransactionSigned::new_unhashed(request, signature) } #[tokio::test(flavor = "multi_thread")] diff --git a/crates/net/network/tests/it/txgossip.rs b/crates/net/network/tests/it/txgossip.rs index 98624c4c6098..ebde61ef8ea1 100644 --- a/crates/net/network/tests/it/txgossip.rs +++ b/crates/net/network/tests/it/txgossip.rs @@ -132,10 +132,7 @@ async fn test_sending_invalid_transactions() { value: Default::default(), input: Default::default(), }; - let tx = TransactionSigned::from_transaction_and_signature( - tx.into(), - Signature::test_signature(), - ); + let tx = TransactionSigned::new_unhashed(tx.into(), Signature::test_signature()); peer0.network().send_transactions(*peer1.peer_id(), vec![Arc::new(tx)]); } diff --git a/crates/optimism/evm/src/execute.rs b/crates/optimism/evm/src/execute.rs index a9a4b301573e..042b8e291934 100644 --- a/crates/optimism/evm/src/execute.rs +++ b/crates/optimism/evm/src/execute.rs @@ -377,7 +377,7 @@ mod tests { let chain_spec = Arc::new(OpChainSpecBuilder::base_mainnet().regolith_activated().build()); - let tx = TransactionSigned::from_transaction_and_signature( + let tx = TransactionSigned::new_unhashed( Transaction::Eip1559(TxEip1559 { chain_id: chain_spec.chain.id(), nonce: 0, @@ -388,7 +388,7 @@ mod tests { Signature::test_signature(), ); - let tx_deposit = TransactionSigned::from_transaction_and_signature( + let tx_deposit = TransactionSigned::new_unhashed( Transaction::Deposit(op_alloy_consensus::TxDeposit { from: addr, to: addr.into(), @@ -461,7 +461,7 @@ mod tests { let chain_spec = Arc::new(OpChainSpecBuilder::base_mainnet().canyon_activated().build()); - let tx = TransactionSigned::from_transaction_and_signature( + let tx = TransactionSigned::new_unhashed( Transaction::Eip1559(TxEip1559 { chain_id: chain_spec.chain.id(), nonce: 0, @@ -472,7 +472,7 @@ mod tests { Signature::test_signature(), ); - let tx_deposit = TransactionSigned::from_transaction_and_signature( + let tx_deposit = TransactionSigned::new_unhashed( Transaction::Deposit(op_alloy_consensus::TxDeposit { from: addr, to: addr.into(), diff --git a/crates/optimism/node/src/txpool.rs b/crates/optimism/node/src/txpool.rs index 7df5888fb751..a5616569c86f 100644 --- a/crates/optimism/node/src/txpool.rs +++ b/crates/optimism/node/src/txpool.rs @@ -265,7 +265,7 @@ mod tests { input: Default::default(), }); let signature = Signature::test_signature(); - let signed_tx = TransactionSigned::from_transaction_and_signature(deposit_tx, signature); + let signed_tx = TransactionSigned::new_unhashed(deposit_tx, signature); let signed_recovered = TransactionSignedEcRecovered::from_signed_transaction(signed_tx, signer); let len = signed_recovered.encode_2718_len(); diff --git a/crates/optimism/node/tests/it/priority.rs b/crates/optimism/node/tests/it/priority.rs index f1260d2da01f..c1df9180ce39 100644 --- a/crates/optimism/node/tests/it/priority.rs +++ b/crates/optimism/node/tests/it/priority.rs @@ -63,10 +63,7 @@ impl OpPayloadTransactions for CustomTxPriority { }; let signature = sender.sign_transaction_sync(&mut end_of_block_tx).unwrap(); let end_of_block_tx = TransactionSignedEcRecovered::from_signed_transaction( - TransactionSigned::from_transaction_and_signature( - Transaction::Eip1559(end_of_block_tx), - signature, - ), + TransactionSigned::new_unhashed(Transaction::Eip1559(end_of_block_tx), signature), sender.address(), ); diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 1ac7b4394e0a..ba0d3f316175 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -914,7 +914,7 @@ impl TransactionSignedNoHash { #[inline] pub fn with_hash(self) -> TransactionSigned { let Self { signature, transaction } = self; - TransactionSigned::from_transaction_and_signature(transaction, signature) + TransactionSigned::new_unhashed(transaction, signature) } /// Recovers a list of signers from a transaction list iterator @@ -1105,6 +1105,18 @@ impl PartialEq for TransactionSigned { // === impl TransactionSigned === impl TransactionSigned { + /// Creates a new signed transaction from the given parts. + pub fn new(transaction: Transaction, signature: Signature, hash: B256) -> Self { + Self { hash: hash.into(), signature, transaction } + } + + /// Creates a new signed transaction from the given transaction and signature without the hash. + /// + /// Note: this only calculates the hash on the first [`TransactionSigned::hash`] call. + pub fn new_unhashed(transaction: Transaction, signature: Signature) -> Self { + Self { hash: Default::default(), signature, transaction } + } + /// Transaction signature. pub const fn signature(&self) -> &Signature { &self.signature @@ -1251,13 +1263,6 @@ impl TransactionSigned { keccak256(self.encoded_2718()) } - /// Create a new signed transaction from a transaction and its signature. - /// - /// This will also calculate the transaction hash using its encoding. - pub fn from_transaction_and_signature(transaction: Transaction, signature: Signature) -> Self { - Self { transaction, signature, hash: Default::default() } - } - /// Decodes legacy transaction from the data buffer into a tuple. /// /// This expects `rlp(legacy_tx)` @@ -1614,7 +1619,7 @@ impl Decodable2718 for TransactionSigned { Ok(Self { transaction: Transaction::Eip4844(tx), signature, hash: hash.into() }) } #[cfg(feature = "optimism")] - TxType::Deposit => Ok(Self::from_transaction_and_signature( + TxType::Deposit => Ok(Self::new_unhashed( Transaction::Deposit(TxDeposit::rlp_decode(buf)?), TxDeposit::signature(), )), @@ -1652,7 +1657,7 @@ impl<'a> arbitrary::Arbitrary<'a> for TransactionSigned { #[cfg(feature = "optimism")] let signature = if transaction.is_deposit() { TxDeposit::signature() } else { signature }; - Ok(Self::from_transaction_and_signature(transaction, signature)) + Ok(Self::new_unhashed(transaction, signature)) } } @@ -2191,7 +2196,7 @@ mod tests { signature: Signature, hash: Option, ) { - let expected = TransactionSigned::from_transaction_and_signature(transaction, signature); + let expected = TransactionSigned::new_unhashed(transaction, signature); if let Some(hash) = hash { assert_eq!(hash, expected.hash()); } @@ -2288,7 +2293,7 @@ mod tests { let signature = crate::sign_message(B256::from_slice(&key_pair.secret_bytes()[..]), tx.signature_hash()).unwrap(); - TransactionSigned::from_transaction_and_signature(tx, signature) + TransactionSigned::new_unhashed(tx, signature) }).collect(); let parallel_senders = TransactionSigned::recover_signers(&txes, txes.len()).unwrap(); diff --git a/crates/transaction-pool/src/test_utils/gen.rs b/crates/transaction-pool/src/test_utils/gen.rs index 858098ec91ad..95a179aec814 100644 --- a/crates/transaction-pool/src/test_utils/gen.rs +++ b/crates/transaction-pool/src/test_utils/gen.rs @@ -199,7 +199,7 @@ impl TransactionBuilder { /// Signs the provided transaction using the specified signer and returns a signed transaction. fn signed(transaction: Transaction, signer: B256) -> TransactionSigned { let signature = sign_message(signer, transaction.signature_hash()).unwrap(); - TransactionSigned::from_transaction_and_signature(transaction, signature) + TransactionSigned::new_unhashed(transaction, signature) } /// Sets the signer for the transaction builder. diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index 68a911f2e2e1..cfdfcc07dd9d 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -1584,7 +1584,7 @@ mod tests { ..Default::default() }); let signature = Signature::test_signature(); - let signed_tx = TransactionSigned::from_transaction_and_signature(tx, signature); + let signed_tx = TransactionSigned::new_unhashed(tx, signature); let transaction = TransactionSignedEcRecovered::from_signed_transaction(signed_tx, Default::default()); let pooled_tx = EthPooledTransaction::new(transaction.clone(), 200); @@ -1606,7 +1606,7 @@ mod tests { ..Default::default() }); let signature = Signature::test_signature(); - let signed_tx = TransactionSigned::from_transaction_and_signature(tx, signature); + let signed_tx = TransactionSigned::new_unhashed(tx, signature); let transaction = TransactionSignedEcRecovered::from_signed_transaction(signed_tx, Default::default()); let pooled_tx = EthPooledTransaction::new(transaction.clone(), 200); @@ -1628,7 +1628,7 @@ mod tests { ..Default::default() }); let signature = Signature::test_signature(); - let signed_tx = TransactionSigned::from_transaction_and_signature(tx, signature); + let signed_tx = TransactionSigned::new_unhashed(tx, signature); let transaction = TransactionSignedEcRecovered::from_signed_transaction(signed_tx, Default::default()); let pooled_tx = EthPooledTransaction::new(transaction.clone(), 200); @@ -1652,7 +1652,7 @@ mod tests { ..Default::default() }); let signature = Signature::test_signature(); - let signed_tx = TransactionSigned::from_transaction_and_signature(tx, signature); + let signed_tx = TransactionSigned::new_unhashed(tx, signature); let transaction = TransactionSignedEcRecovered::from_signed_transaction(signed_tx, Default::default()); let pooled_tx = EthPooledTransaction::new(transaction.clone(), 300); @@ -1676,7 +1676,7 @@ mod tests { ..Default::default() }); let signature = Signature::test_signature(); - let signed_tx = TransactionSigned::from_transaction_and_signature(tx, signature); + let signed_tx = TransactionSigned::new_unhashed(tx, signature); let transaction = TransactionSignedEcRecovered::from_signed_transaction(signed_tx, Default::default()); let pooled_tx = EthPooledTransaction::new(transaction.clone(), 200); diff --git a/testing/testing-utils/src/generators.rs b/testing/testing-utils/src/generators.rs index 582298feab9f..d8f3a29790bb 100644 --- a/testing/testing-utils/src/generators.rs +++ b/testing/testing-utils/src/generators.rs @@ -149,7 +149,7 @@ pub fn sign_tx_with_key_pair(key_pair: Keypair, tx: Transaction) -> TransactionS let signature = sign_message(B256::from_slice(&key_pair.secret_bytes()[..]), tx.signature_hash()).unwrap(); - TransactionSigned::from_transaction_and_signature(tx, signature) + TransactionSigned::new_unhashed(tx, signature) } /// Generates a set of [Keypair]s based on the desired count. @@ -479,7 +479,7 @@ mod tests { sign_message(B256::from_slice(&key_pair.secret_bytes()[..]), signature_hash) .unwrap(); - let signed = TransactionSigned::from_transaction_and_signature(tx.clone(), signature); + let signed = TransactionSigned::new_unhashed(tx.clone(), signature); let recovered = signed.recover_signer().unwrap(); let expected = public_key_to_address(key_pair.public_key()); From a163929724670e7ef785d2d1c8d2572c4ce1299c Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 22 Nov 2024 11:58:20 +0100 Subject: [PATCH 108/156] feat: add signed conversions (#12772) --- crates/primitives/src/transaction/mod.rs | 37 +++++++++++++++++++++++- 1 file changed, 36 insertions(+), 1 deletion(-) diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index ba0d3f316175..6a085ac8d242 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -2,7 +2,7 @@ use alloc::vec::Vec; use alloy_consensus::{ - transaction::RlpEcdsaTx, SignableTransaction, Transaction as _, TxEip1559, TxEip2930, + transaction::RlpEcdsaTx, SignableTransaction, Signed, Transaction as _, TxEip1559, TxEip2930, TxEip4844, TxEip7702, TxLegacy, }; use alloy_eips::{ @@ -1263,6 +1263,12 @@ impl TransactionSigned { keccak256(self.encoded_2718()) } + /// Splits the transaction into parts. + pub fn into_parts(self) -> (Transaction, Signature, B256) { + let hash = self.hash(); + (self.transaction, self.signature, hash) + } + /// Decodes legacy transaction from the data buffer into a tuple. /// /// This expects `rlp(legacy_tx)` @@ -1631,6 +1637,35 @@ impl Decodable2718 for TransactionSigned { } } +macro_rules! impl_from_signed { + ($($tx:ident),*) => { + $( + impl From> for TransactionSigned { + fn from(value: Signed<$tx>) -> Self { + let(tx,sig,hash) = value.into_parts(); + Self::new(tx.into(), sig, hash) + } + } + )* + }; +} + +impl_from_signed!(TxLegacy, TxEip2930, TxEip1559, TxEip7702, TxEip4844); + +impl From> for TransactionSigned { + fn from(value: Signed) -> Self { + let (tx, sig, hash) = value.into_parts(); + Self::new(tx, sig, hash) + } +} + +impl From for Signed { + fn from(value: TransactionSigned) -> Self { + let (tx, sig, hash) = value.into_parts(); + Self::new_unchecked(tx, sig, hash) + } +} + #[cfg(any(test, feature = "arbitrary"))] impl<'a> arbitrary::Arbitrary<'a> for TransactionSigned { fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { From ef3e0b360ff972ba76e5ffbcf3c29f7d753ab2bf Mon Sep 17 00:00:00 2001 From: Hai | RISE <150876604+hai-rise@users.noreply.github.com> Date: Fri, 22 Nov 2024 17:31:06 +0700 Subject: [PATCH 109/156] perf(op-payload): remove unneeded clone (#12771) --- crates/optimism/payload/src/builder.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index 1050a55eb6e9..5926cfd34c56 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -350,7 +350,7 @@ where let block_number = ctx.block_number(); let execution_outcome = ExecutionOutcome::new( state.take_bundle(), - vec![info.receipts.clone()].into(), + vec![info.receipts].into(), block_number, Vec::new(), ); From f2126f2c0562e4bdf896343f45e803b88ac9396b Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 22 Nov 2024 12:06:42 +0100 Subject: [PATCH 110/156] chore: move withencoded struct (#12770) --- crates/primitives-traits/src/encoded.rs | 55 ++++++++++++++++++++++++ crates/primitives-traits/src/lib.rs | 3 +- crates/primitives/src/transaction/mod.rs | 55 +----------------------- 3 files changed, 58 insertions(+), 55 deletions(-) create mode 100644 crates/primitives-traits/src/encoded.rs diff --git a/crates/primitives-traits/src/encoded.rs b/crates/primitives-traits/src/encoded.rs new file mode 100644 index 000000000000..b162fc93343a --- /dev/null +++ b/crates/primitives-traits/src/encoded.rs @@ -0,0 +1,55 @@ +use alloy_primitives::Bytes; + +/// Generic wrapper with encoded Bytes, such as transaction data. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct WithEncoded(Bytes, pub T); + +impl From<(Bytes, T)> for WithEncoded { + fn from(value: (Bytes, T)) -> Self { + Self(value.0, value.1) + } +} + +impl WithEncoded { + /// Wraps the value with the bytes. + pub const fn new(bytes: Bytes, value: T) -> Self { + Self(bytes, value) + } + + /// Get the encoded bytes + pub fn encoded_bytes(&self) -> Bytes { + self.0.clone() + } + + /// Get the underlying value + pub const fn value(&self) -> &T { + &self.1 + } + + /// Returns ownership of the underlying value. + pub fn into_value(self) -> T { + self.1 + } + + /// Transform the value + pub fn transform>(self) -> WithEncoded { + WithEncoded(self.0, self.1.into()) + } + + /// Split the wrapper into [`Bytes`] and value tuple + pub fn split(self) -> (Bytes, T) { + (self.0, self.1) + } + + /// Maps the inner value to a new value using the given function. + pub fn map U>(self, op: F) -> WithEncoded { + WithEncoded(self.0, op(self.1)) + } +} + +impl WithEncoded> { + /// returns `None` if the inner value is `None`, otherwise returns `Some(WithEncoded)`. + pub fn transpose(self) -> Option> { + self.1.map(|v| WithEncoded(self.0, v)) + } +} diff --git a/crates/primitives-traits/src/lib.rs b/crates/primitives-traits/src/lib.rs index c149c6cd7e47..4d068b2ff4db 100644 --- a/crates/primitives-traits/src/lib.rs +++ b/crates/primitives-traits/src/lib.rs @@ -14,7 +14,6 @@ extern crate alloc; /// Common constants. pub mod constants; - pub use constants::gas_units::{format_gas, format_gas_throughput}; /// Minimal account @@ -42,7 +41,9 @@ pub use block::{ Block, FullBlock, }; +mod encoded; mod withdrawal; +pub use encoded::WithEncoded; mod error; pub use error::{GotExpected, GotExpectedBoxed}; diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 6a085ac8d242..2e274311a03c 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -40,6 +40,7 @@ pub use error::{ }; pub use meta::TransactionMeta; pub use pooled::{PooledTransactionsElement, PooledTransactionsElementEcRecovered}; +pub use reth_primitives_traits::WithEncoded; pub use sidecar::BlobTransaction; pub use signature::{recover_signer, recover_signer_unchecked}; pub use tx_type::TxType; @@ -1764,60 +1765,6 @@ impl Decodable for TransactionSignedEcRecovered { } } -/// Generic wrapper with encoded Bytes, such as transaction data. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct WithEncoded(Bytes, pub T); - -impl From<(Bytes, T)> for WithEncoded { - fn from(value: (Bytes, T)) -> Self { - Self(value.0, value.1) - } -} - -impl WithEncoded { - /// Wraps the value with the bytes. - pub const fn new(bytes: Bytes, value: T) -> Self { - Self(bytes, value) - } - - /// Get the encoded bytes - pub fn encoded_bytes(&self) -> Bytes { - self.0.clone() - } - - /// Get the underlying value - pub const fn value(&self) -> &T { - &self.1 - } - - /// Returns ownership of the underlying value. - pub fn into_value(self) -> T { - self.1 - } - - /// Transform the value - pub fn transform>(self) -> WithEncoded { - WithEncoded(self.0, self.1.into()) - } - - /// Split the wrapper into [`Bytes`] and value tuple - pub fn split(self) -> (Bytes, T) { - (self.0, self.1) - } - - /// Maps the inner value to a new value using the given function. - pub fn map U>(self, op: F) -> WithEncoded { - WithEncoded(self.0, op(self.1)) - } -} - -impl WithEncoded> { - /// returns `None` if the inner value is `None`, otherwise returns `Some(WithEncoded)`. - pub fn transpose(self) -> Option> { - self.1.map(|v| WithEncoded(self.0, v)) - } -} - /// Bincode-compatible transaction type serde implementations. #[cfg(feature = "serde-bincode-compat")] pub mod serde_bincode_compat { From 3d93b81a7ed4496d64a719ad5808d1abd0d29ca0 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 22 Nov 2024 12:19:03 +0100 Subject: [PATCH 111/156] chore: replace pooled elements with signed (#12773) --- crates/primitives/src/transaction/pooled.rs | 205 +++++++------------- 1 file changed, 68 insertions(+), 137 deletions(-) diff --git a/crates/primitives/src/transaction/pooled.rs b/crates/primitives/src/transaction/pooled.rs index 05ad4afa87f1..00f62c24372c 100644 --- a/crates/primitives/src/transaction/pooled.rs +++ b/crates/primitives/src/transaction/pooled.rs @@ -3,14 +3,15 @@ use super::{error::TransactionConversionError, signature::recover_signer, TxEip7702}; use crate::{BlobTransaction, Transaction, TransactionSigned, TransactionSignedEcRecovered}; -use alloy_eips::eip4844::BlobTransactionSidecar; - use alloy_consensus::{ constants::EIP4844_TX_TYPE_ID, transaction::{RlpEcdsaTx, TxEip1559, TxEip2930, TxEip4844, TxLegacy}, - SignableTransaction, TxEip4844WithSidecar, + SignableTransaction, Signed, Transaction as _, TxEip4844WithSidecar, +}; +use alloy_eips::{ + eip2718::{Decodable2718, Eip2718Result, Encodable2718}, + eip4844::BlobTransactionSidecar, }; -use alloy_eips::eip2718::{Decodable2718, Eip2718Result, Encodable2718}; use alloy_primitives::{Address, PrimitiveSignature as Signature, TxHash, B256}; use alloy_rlp::{Decodable, Encodable, Error as RlpError, Header}; use bytes::Buf; @@ -22,42 +23,14 @@ use serde::{Deserialize, Serialize}; #[cfg_attr(any(test, feature = "reth-codec"), reth_codecs::add_arbitrary_tests)] #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub enum PooledTransactionsElement { - /// A legacy transaction - Legacy { - /// The inner transaction - transaction: TxLegacy, - /// The signature - signature: Signature, - /// The hash of the transaction - hash: TxHash, - }, - /// An EIP-2930 typed transaction - Eip2930 { - /// The inner transaction - transaction: TxEip2930, - /// The signature - signature: Signature, - /// The hash of the transaction - hash: TxHash, - }, - /// An EIP-1559 typed transaction - Eip1559 { - /// The inner transaction - transaction: TxEip1559, - /// The signature - signature: Signature, - /// The hash of the transaction - hash: TxHash, - }, - /// An EIP-7702 typed transaction - Eip7702 { - /// The inner transaction - transaction: TxEip7702, - /// The signature - signature: Signature, - /// The hash of the transaction - hash: TxHash, - }, + /// An untagged [`TxLegacy`]. + Legacy(Signed), + /// A [`TxEip2930`] tagged with type 1. + Eip2930(Signed), + /// A [`TxEip1559`] tagged with type 2. + Eip1559(Signed), + /// A [`TxEip7702`] tagged with type 4. + Eip7702(Signed), /// A blob transaction, which includes the transaction, blob data, commitments, and proofs. BlobTransaction(BlobTransaction), } @@ -72,16 +45,16 @@ impl PooledTransactionsElement { let hash = tx.hash(); match tx { TransactionSigned { transaction: Transaction::Legacy(tx), signature, .. } => { - Ok(Self::Legacy { transaction: tx, signature, hash }) + Ok(Self::Legacy(Signed::new_unchecked(tx, signature, hash))) } TransactionSigned { transaction: Transaction::Eip2930(tx), signature, .. } => { - Ok(Self::Eip2930 { transaction: tx, signature, hash }) + Ok(Self::Eip2930(Signed::new_unchecked(tx, signature, hash))) } TransactionSigned { transaction: Transaction::Eip1559(tx), signature, .. } => { - Ok(Self::Eip1559 { transaction: tx, signature, hash }) + Ok(Self::Eip1559(Signed::new_unchecked(tx, signature, hash))) } TransactionSigned { transaction: Transaction::Eip7702(tx), signature, .. } => { - Ok(Self::Eip7702 { transaction: tx, signature, hash }) + Ok(Self::Eip7702(Signed::new_unchecked(tx, signature, hash))) } // Not supported because missing blob sidecar tx @ TransactionSigned { transaction: Transaction::Eip4844(_), .. } => Err(tx), @@ -121,10 +94,10 @@ impl PooledTransactionsElement { /// It is only for signature signing or signer recovery. pub fn signature_hash(&self) -> B256 { match self { - Self::Legacy { transaction, .. } => transaction.signature_hash(), - Self::Eip2930 { transaction, .. } => transaction.signature_hash(), - Self::Eip1559 { transaction, .. } => transaction.signature_hash(), - Self::Eip7702 { transaction, .. } => transaction.signature_hash(), + Self::Legacy(tx) => tx.signature_hash(), + Self::Eip2930(tx) => tx.signature_hash(), + Self::Eip1559(tx) => tx.signature_hash(), + Self::Eip7702(tx) => tx.signature_hash(), Self::BlobTransaction(blob_tx) => blob_tx.transaction.signature_hash(), } } @@ -132,10 +105,10 @@ impl PooledTransactionsElement { /// Reference to transaction hash. Used to identify transaction. pub const fn hash(&self) -> &TxHash { match self { - Self::Legacy { hash, .. } | - Self::Eip2930 { hash, .. } | - Self::Eip1559 { hash, .. } | - Self::Eip7702 { hash, .. } => hash, + Self::Legacy(tx) => tx.hash(), + Self::Eip2930(tx) => tx.hash(), + Self::Eip1559(tx) => tx.hash(), + Self::Eip7702(tx) => tx.hash(), Self::BlobTransaction(tx) => &tx.hash, } } @@ -143,21 +116,21 @@ impl PooledTransactionsElement { /// Returns the signature of the transaction. pub const fn signature(&self) -> &Signature { match self { - Self::Legacy { signature, .. } | - Self::Eip2930 { signature, .. } | - Self::Eip1559 { signature, .. } | - Self::Eip7702 { signature, .. } => signature, + Self::Legacy(tx) => tx.signature(), + Self::Eip2930(tx) => tx.signature(), + Self::Eip1559(tx) => tx.signature(), + Self::Eip7702(tx) => tx.signature(), Self::BlobTransaction(blob_tx) => &blob_tx.signature, } } /// Returns the transaction nonce. - pub const fn nonce(&self) -> u64 { + pub fn nonce(&self) -> u64 { match self { - Self::Legacy { transaction, .. } => transaction.nonce, - Self::Eip2930 { transaction, .. } => transaction.nonce, - Self::Eip1559 { transaction, .. } => transaction.nonce, - Self::Eip7702 { transaction, .. } => transaction.nonce, + Self::Legacy(tx) => tx.tx().nonce(), + Self::Eip2930(tx) => tx.tx().nonce(), + Self::Eip1559(tx) => tx.tx().nonce(), + Self::Eip7702(tx) => tx.tx().nonce(), Self::BlobTransaction(blob_tx) => blob_tx.transaction.tx.nonce, } } @@ -189,26 +162,10 @@ impl PooledTransactionsElement { /// Returns the inner [`TransactionSigned`]. pub fn into_transaction(self) -> TransactionSigned { match self { - Self::Legacy { transaction, signature, hash } => TransactionSigned { - transaction: Transaction::Legacy(transaction), - signature, - hash: hash.into(), - }, - Self::Eip2930 { transaction, signature, hash } => TransactionSigned { - transaction: Transaction::Eip2930(transaction), - signature, - hash: hash.into(), - }, - Self::Eip1559 { transaction, signature, hash } => TransactionSigned { - transaction: Transaction::Eip1559(transaction), - signature, - hash: hash.into(), - }, - Self::Eip7702 { transaction, signature, hash } => TransactionSigned { - transaction: Transaction::Eip7702(transaction), - signature, - hash: hash.into(), - }, + Self::Legacy(tx) => tx.into(), + Self::Eip2930(tx) => tx.into(), + Self::Eip1559(tx) => tx.into(), + Self::Eip7702(tx) => tx.into(), Self::BlobTransaction(blob_tx) => blob_tx.into_parts().0, } } @@ -222,7 +179,7 @@ impl PooledTransactionsElement { /// Returns the [`TxLegacy`] variant if the transaction is a legacy transaction. pub const fn as_legacy(&self) -> Option<&TxLegacy> { match self { - Self::Legacy { transaction, .. } => Some(transaction), + Self::Legacy(tx) => Some(tx.tx()), _ => None, } } @@ -230,7 +187,7 @@ impl PooledTransactionsElement { /// Returns the [`TxEip2930`] variant if the transaction is an EIP-2930 transaction. pub const fn as_eip2930(&self) -> Option<&TxEip2930> { match self { - Self::Eip2930 { transaction, .. } => Some(transaction), + Self::Eip2930(tx) => Some(tx.tx()), _ => None, } } @@ -238,7 +195,7 @@ impl PooledTransactionsElement { /// Returns the [`TxEip1559`] variant if the transaction is an EIP-1559 transaction. pub const fn as_eip1559(&self) -> Option<&TxEip1559> { match self { - Self::Eip1559 { transaction, .. } => Some(transaction), + Self::Eip1559(tx) => Some(tx.tx()), _ => None, } } @@ -254,7 +211,7 @@ impl PooledTransactionsElement { /// Returns the [`TxEip7702`] variant if the transaction is an EIP-7702 transaction. pub const fn as_eip7702(&self) -> Option<&TxEip7702> { match self { - Self::Eip7702 { transaction, .. } => Some(transaction), + Self::Eip7702(tx) => Some(tx.tx()), _ => None, } } @@ -286,9 +243,9 @@ impl PooledTransactionsElement { /// This is also commonly referred to as the "Gas Tip Cap" (`GasTipCap`). pub const fn max_priority_fee_per_gas(&self) -> Option { match self { - Self::Legacy { .. } | Self::Eip2930 { .. } => None, - Self::Eip1559 { transaction, .. } => Some(transaction.max_priority_fee_per_gas), - Self::Eip7702 { transaction, .. } => Some(transaction.max_priority_fee_per_gas), + Self::Legacy(_) | Self::Eip2930(_) => None, + Self::Eip1559(tx) => Some(tx.tx().max_priority_fee_per_gas), + Self::Eip7702(tx) => Some(tx.tx().max_priority_fee_per_gas), Self::BlobTransaction(tx) => Some(tx.transaction.tx.max_priority_fee_per_gas), } } @@ -298,10 +255,10 @@ impl PooledTransactionsElement { /// This is also commonly referred to as the "Gas Fee Cap" (`GasFeeCap`). pub const fn max_fee_per_gas(&self) -> u128 { match self { - Self::Legacy { transaction, .. } => transaction.gas_price, - Self::Eip2930 { transaction, .. } => transaction.gas_price, - Self::Eip1559 { transaction, .. } => transaction.max_fee_per_gas, - Self::Eip7702 { transaction, .. } => transaction.max_fee_per_gas, + Self::Legacy(tx) => tx.tx().gas_price, + Self::Eip2930(tx) => tx.tx().gas_price, + Self::Eip1559(tx) => tx.tx().max_fee_per_gas, + Self::Eip7702(tx) => tx.tx().max_fee_per_gas, Self::BlobTransaction(tx) => tx.transaction.tx.max_fee_per_gas, } } @@ -391,28 +348,20 @@ impl Decodable for PooledTransactionsElement { impl Encodable2718 for PooledTransactionsElement { fn type_flag(&self) -> Option { match self { - Self::Legacy { .. } => None, - Self::Eip2930 { .. } => Some(0x01), - Self::Eip1559 { .. } => Some(0x02), - Self::BlobTransaction { .. } => Some(0x03), - Self::Eip7702 { .. } => Some(0x04), + Self::Legacy(_) => None, + Self::Eip2930(_) => Some(0x01), + Self::Eip1559(_) => Some(0x02), + Self::BlobTransaction(_) => Some(0x03), + Self::Eip7702(_) => Some(0x04), } } fn encode_2718_len(&self) -> usize { match self { - Self::Legacy { transaction, signature, .. } => { - transaction.eip2718_encoded_length(signature) - } - Self::Eip2930 { transaction, signature, .. } => { - transaction.eip2718_encoded_length(signature) - } - Self::Eip1559 { transaction, signature, .. } => { - transaction.eip2718_encoded_length(signature) - } - Self::Eip7702 { transaction, signature, .. } => { - transaction.eip2718_encoded_length(signature) - } + Self::Legacy(tx) => tx.eip2718_encoded_length(), + Self::Eip2930(tx) => tx.eip2718_encoded_length(), + Self::Eip1559(tx) => tx.eip2718_encoded_length(), + Self::Eip7702(tx) => tx.eip2718_encoded_length(), Self::BlobTransaction(BlobTransaction { transaction, signature, .. }) => { transaction.eip2718_encoded_length(signature) } @@ -421,18 +370,10 @@ impl Encodable2718 for PooledTransactionsElement { fn encode_2718(&self, out: &mut dyn alloy_rlp::BufMut) { match self { - Self::Legacy { transaction, signature, .. } => { - transaction.eip2718_encode(signature, out) - } - Self::Eip2930 { transaction, signature, .. } => { - transaction.eip2718_encode(signature, out) - } - Self::Eip1559 { transaction, signature, .. } => { - transaction.eip2718_encode(signature, out) - } - Self::Eip7702 { transaction, signature, .. } => { - transaction.eip2718_encode(signature, out) - } + Self::Legacy(tx) => tx.eip2718_encode(out), + Self::Eip2930(tx) => tx.eip2718_encode(out), + Self::Eip1559(tx) => tx.eip2718_encode(out), + Self::Eip7702(tx) => tx.eip2718_encode(out), Self::BlobTransaction(BlobTransaction { transaction, signature, .. }) => { transaction.eip2718_encode(signature, out) } @@ -474,21 +415,11 @@ impl Decodable2718 for PooledTransactionsElement { Transaction::Eip4844(_) => Err(RlpError::Custom( "EIP-4844 transactions can only be decoded with transaction type 0x03", ).into()), - Transaction::Eip2930(tx) => Ok(Self::Eip2930 { - transaction: tx, - signature: typed_tx.signature, - hash - }), - Transaction::Eip1559(tx) => Ok(Self::Eip1559 { - transaction: tx, - signature: typed_tx.signature, - hash - }), - Transaction::Eip7702(tx) => Ok(Self::Eip7702 { - transaction: tx, - signature: typed_tx.signature, - hash - }), + Transaction::Eip2930(tx) => Ok(Self::Eip2930 ( + Signed::new_unchecked(tx, typed_tx.signature, hash) + )), + Transaction::Eip1559(tx) => Ok(Self::Eip1559( Signed::new_unchecked(tx, typed_tx.signature, hash))), + Transaction::Eip7702(tx) => Ok(Self::Eip7702( Signed::new_unchecked(tx, typed_tx.signature, hash))), #[cfg(feature = "optimism")] Transaction::Deposit(_) => Err(RlpError::Custom("Optimism deposit transaction cannot be decoded to PooledTransactionsElement").into()) } @@ -501,7 +432,7 @@ impl Decodable2718 for PooledTransactionsElement { let (transaction, hash, signature) = TransactionSigned::decode_rlp_legacy_transaction_tuple(buf)?; - Ok(Self::Legacy { transaction, signature, hash }) + Ok(Self::Legacy(Signed::new_unchecked(transaction, signature, hash))) } } From 7f5fd80cb7d69d1e5d14abf6851111adae762970 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 22 Nov 2024 15:49:25 +0400 Subject: [PATCH 112/156] feat: integrate `SignedTx` AT into `StaticFileProviderRW` (#12764) --- Cargo.lock | 3 + crates/blockchain-tree/src/blockchain_tree.rs | 2 +- crates/blockchain-tree/src/externals.rs | 6 +- crates/cli/commands/src/common.rs | 12 +--- crates/consensus/beacon/Cargo.toml | 3 + .../beacon/src/engine/hooks/static_file.rs | 14 +++-- crates/consensus/beacon/src/engine/mod.rs | 2 +- crates/engine/tree/src/persistence.rs | 6 +- crates/exex/exex/src/backfill/test_utils.rs | 6 +- crates/node/builder/src/setup.rs | 4 +- crates/optimism/primitives/src/lib.rs | 4 +- crates/primitives-traits/src/node.rs | 58 +++++++++++-------- crates/primitives/src/lib.rs | 2 +- .../stages/src/stages/hashing_account.rs | 3 +- crates/static-file/static-file/Cargo.toml | 2 + .../static-file/src/segments/transactions.rs | 16 +++-- .../static-file/src/static_file_producer.rs | 8 ++- .../src/providers/blockchain_provider.rs | 3 +- .../src/providers/database/provider.rs | 11 ++-- .../provider/src/providers/static_file/mod.rs | 10 ++-- .../src/providers/static_file/writer.rs | 5 +- 21 files changed, 105 insertions(+), 75 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index eabde10a0a70..e4a6687defba 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6456,6 +6456,7 @@ dependencies = [ "reth-blockchain-tree", "reth-blockchain-tree-api", "reth-chainspec", + "reth-codecs", "reth-config", "reth-consensus", "reth-db", @@ -9159,8 +9160,10 @@ dependencies = [ "assert_matches", "parking_lot", "rayon", + "reth-codecs", "reth-db", "reth-db-api", + "reth-primitives-traits", "reth-provider", "reth-prune-types", "reth-stages", diff --git a/crates/blockchain-tree/src/blockchain_tree.rs b/crates/blockchain-tree/src/blockchain_tree.rs index 67b200e64844..d2ff0f5c8445 100644 --- a/crates/blockchain-tree/src/blockchain_tree.rs +++ b/crates/blockchain-tree/src/blockchain_tree.rs @@ -1424,7 +1424,7 @@ mod tests { } fn setup_genesis< - N: ProviderNodeTypes>, + N: ProviderNodeTypes>, >( factory: &ProviderFactory, mut genesis: SealedBlock, diff --git a/crates/blockchain-tree/src/externals.rs b/crates/blockchain-tree/src/externals.rs index 76b65824854c..bf5a243a5a5c 100644 --- a/crates/blockchain-tree/src/externals.rs +++ b/crates/blockchain-tree/src/externals.rs @@ -4,7 +4,7 @@ use alloy_primitives::{BlockHash, BlockNumber}; use reth_consensus::Consensus; use reth_db::{static_file::HeaderMask, tables}; use reth_db_api::{cursor::DbCursorRO, transaction::DbTx}; -use reth_node_types::{Block, FullNodePrimitives, NodeTypesWithDB}; +use reth_node_types::{FullNodePrimitives, NodeTypesWithDB}; use reth_primitives::{BlockBody, StaticFileSegment}; use reth_provider::{ providers::ProviderNodeTypes, ChainStateBlockReader, ChainStateBlockWriter, ProviderFactory, @@ -15,11 +15,11 @@ use std::{collections::BTreeMap, sync::Arc}; /// A helper trait with requirements for [`ProviderNodeTypes`] to be used within [`TreeExternals`]. pub trait TreeNodeTypes: - ProviderNodeTypes>> + ProviderNodeTypes> { } impl TreeNodeTypes for T where - T: ProviderNodeTypes>> + T: ProviderNodeTypes> { } diff --git a/crates/cli/commands/src/common.rs b/crates/cli/commands/src/common.rs index e557f15da6bf..251e01a105a9 100644 --- a/crates/cli/commands/src/common.rs +++ b/crates/cli/commands/src/common.rs @@ -197,19 +197,11 @@ impl AccessRights { /// [`NodeTypes`](reth_node_builder::NodeTypes) in CLI. pub trait CliNodeTypes: NodeTypesWithEngine - + NodeTypesForProvider< - Primitives: FullNodePrimitives< - Block: reth_node_api::Block, - >, - > + + NodeTypesForProvider> { } impl CliNodeTypes for N where N: NodeTypesWithEngine - + NodeTypesForProvider< - Primitives: FullNodePrimitives< - Block: reth_node_api::Block, - >, - > + + NodeTypesForProvider> { } diff --git a/crates/consensus/beacon/Cargo.toml b/crates/consensus/beacon/Cargo.toml index 245ebe8541e0..65994557c06d 100644 --- a/crates/consensus/beacon/Cargo.toml +++ b/crates/consensus/beacon/Cargo.toml @@ -14,6 +14,8 @@ workspace = true # reth reth-ethereum-consensus.workspace = true reth-blockchain-tree-api.workspace = true +reth-codecs.workspace = true +reth-db-api.workspace = true reth-primitives.workspace = true reth-stages-api.workspace = true reth-errors.workspace = true @@ -80,6 +82,7 @@ assert_matches.workspace = true [features] optimism = [ "reth-blockchain-tree/optimism", + "reth-codecs/optimism", "reth-chainspec", "reth-db-api/optimism", "reth-db/optimism", diff --git a/crates/consensus/beacon/src/engine/hooks/static_file.rs b/crates/consensus/beacon/src/engine/hooks/static_file.rs index 99854209cb30..7cd286f659c3 100644 --- a/crates/consensus/beacon/src/engine/hooks/static_file.rs +++ b/crates/consensus/beacon/src/engine/hooks/static_file.rs @@ -6,8 +6,10 @@ use crate::{ }; use alloy_primitives::BlockNumber; use futures::FutureExt; +use reth_codecs::Compact; +use reth_db_api::table::Value; use reth_errors::RethResult; -use reth_primitives::static_file::HighestStaticFiles; +use reth_primitives::{static_file::HighestStaticFiles, NodePrimitives}; use reth_provider::{ BlockReader, ChainStateBlockReader, DatabaseProviderFactory, StageCheckpointReader, StaticFileProviderFactory, @@ -33,8 +35,9 @@ impl StaticFileHook where Provider: StaticFileProviderFactory + DatabaseProviderFactory< - Provider: StaticFileProviderFactory - + StageCheckpointReader + Provider: StaticFileProviderFactory< + Primitives: NodePrimitives, + > + StageCheckpointReader + BlockReader + ChainStateBlockReader, > + 'static, @@ -148,8 +151,9 @@ impl EngineHook for StaticFileHook where Provider: StaticFileProviderFactory + DatabaseProviderFactory< - Provider: StaticFileProviderFactory - + StageCheckpointReader + Provider: StaticFileProviderFactory< + Primitives: NodePrimitives, + > + StageCheckpointReader + BlockReader + ChainStateBlockReader, > + 'static, diff --git a/crates/consensus/beacon/src/engine/mod.rs b/crates/consensus/beacon/src/engine/mod.rs index 2ad06e68b67d..0fedbdd452de 100644 --- a/crates/consensus/beacon/src/engine/mod.rs +++ b/crates/consensus/beacon/src/engine/mod.rs @@ -2172,7 +2172,7 @@ mod tests { fn insert_blocks< 'a, - N: ProviderNodeTypes>, + N: ProviderNodeTypes>, >( provider_factory: ProviderFactory, mut blocks: impl Iterator, diff --git a/crates/engine/tree/src/persistence.rs b/crates/engine/tree/src/persistence.rs index 0199ae3f4613..86d18ceb48ce 100644 --- a/crates/engine/tree/src/persistence.rs +++ b/crates/engine/tree/src/persistence.rs @@ -3,7 +3,7 @@ use alloy_eips::BlockNumHash; use reth_chain_state::ExecutedBlock; use reth_errors::ProviderError; use reth_primitives::BlockBody; -use reth_primitives_traits::{Block, FullNodePrimitives}; +use reth_primitives_traits::FullNodePrimitives; use reth_provider::{ providers::ProviderNodeTypes, writer::UnifiedStorageWriter, BlockHashReader, ChainStateBlockWriter, DatabaseProviderFactory, ProviderFactory, StaticFileProviderFactory, @@ -21,11 +21,11 @@ use tracing::{debug, error}; /// A helper trait with requirements for [`ProviderNodeTypes`] to be used within /// [`PersistenceService`]. pub trait PersistenceNodeTypes: - ProviderNodeTypes>> + ProviderNodeTypes> { } impl PersistenceNodeTypes for T where - T: ProviderNodeTypes>> + T: ProviderNodeTypes> { } /// Writes parts of reth's in memory tree state to the database and static files. diff --git a/crates/exex/exex/src/backfill/test_utils.rs b/crates/exex/exex/src/backfill/test_utils.rs index 5d0f88f517dc..169d2d758de7 100644 --- a/crates/exex/exex/src/backfill/test_utils.rs +++ b/crates/exex/exex/src/backfill/test_utils.rs @@ -58,7 +58,7 @@ pub(crate) fn execute_block_and_commit_to_database( block: &BlockWithSenders, ) -> eyre::Result> where - N: ProviderNodeTypes>, + N: ProviderNodeTypes>, { let provider = provider_factory.provider()?; @@ -162,7 +162,7 @@ pub(crate) fn blocks_and_execution_outputs( key_pair: Keypair, ) -> eyre::Result)>> where - N: ProviderNodeTypes>, + N: ProviderNodeTypes>, { let (block1, block2) = blocks(chain_spec.clone(), key_pair)?; @@ -184,7 +184,7 @@ pub(crate) fn blocks_and_execution_outcome( ) -> eyre::Result<(Vec, ExecutionOutcome)> where N: ProviderNodeTypes, - N::Primitives: FullNodePrimitives, + N::Primitives: FullNodePrimitives, { let (block1, block2) = blocks(chain_spec.clone(), key_pair)?; diff --git a/crates/node/builder/src/setup.rs b/crates/node/builder/src/setup.rs index 71f0ceb56cdd..3258ba8fe544 100644 --- a/crates/node/builder/src/setup.rs +++ b/crates/node/builder/src/setup.rs @@ -14,7 +14,7 @@ use reth_exex::ExExManagerHandle; use reth_network_p2p::{ bodies::downloader::BodyDownloader, headers::downloader::HeaderDownloader, EthBlockClient, }; -use reth_node_api::FullNodePrimitives; +use reth_node_api::{FullNodePrimitives, NodePrimitives}; use reth_provider::{providers::ProviderNodeTypes, ProviderFactory}; use reth_stages::{prelude::DefaultStages, stages::ExecutionStage, Pipeline, StageSet}; use reth_static_file::StaticFileProducer; @@ -88,7 +88,7 @@ where N: ProviderNodeTypes, H: HeaderDownloader
+ 'static, B: BodyDownloader< - Body = <::Block as reth_node_api::Block>::Body, + Body = <::Block as reth_node_api::Block>::Body, > + 'static, Executor: BlockExecutorProvider, N::Primitives: FullNodePrimitives, diff --git a/crates/optimism/primitives/src/lib.rs b/crates/optimism/primitives/src/lib.rs index 26499bb43af3..334440ea1062 100644 --- a/crates/optimism/primitives/src/lib.rs +++ b/crates/optimism/primitives/src/lib.rs @@ -14,14 +14,14 @@ pub mod tx_type; pub use tx_type::OpTxType; use alloy_consensus::Header; -use reth_node_types::FullNodePrimitives; +use reth_node_types::NodePrimitives; use reth_primitives::{Block, BlockBody, Receipt, TransactionSigned}; /// Optimism primitive types. #[derive(Debug, Default, Clone, PartialEq, Eq)] pub struct OpPrimitives; -impl FullNodePrimitives for OpPrimitives { +impl NodePrimitives for OpPrimitives { type Block = Block; type BlockHeader = Header; type BlockBody = BlockBody; diff --git a/crates/primitives-traits/src/node.rs b/crates/primitives-traits/src/node.rs index 7cb321e9af3b..19f6bd8456a9 100644 --- a/crates/primitives-traits/src/node.rs +++ b/crates/primitives-traits/src/node.rs @@ -77,33 +77,45 @@ impl NodePrimitives for () { } /// Helper trait that sets trait bounds on [`NodePrimitives`]. -pub trait FullNodePrimitives: - Send + Sync + Unpin + Clone + Default + fmt::Debug + PartialEq + Eq + 'static +pub trait FullNodePrimitives +where + Self: NodePrimitives< + Block: FullBlock
, + BlockHeader: FullBlockHeader, + BlockBody: FullBlockBody, + SignedTx: FullSignedTx, + TxType: FullTxType, + Receipt: FullReceipt, + > + Send + + Sync + + Unpin + + Clone + + Default + + fmt::Debug + + PartialEq + + Eq + + 'static, { - /// Block primitive. - type Block: FullBlock
; - /// Block header primitive. - type BlockHeader: FullBlockHeader + 'static; - /// Block body primitive. - type BlockBody: FullBlockBody + 'static; - /// Signed version of the transaction type. - type SignedTx: FullSignedTx; - /// Transaction envelope type ID. - type TxType: FullTxType; - /// A receipt. - type Receipt: FullReceipt; } -impl NodePrimitives for T -where - T: FullNodePrimitives, +impl FullNodePrimitives for T where + T: NodePrimitives< + Block: FullBlock
, + BlockHeader: FullBlockHeader, + BlockBody: FullBlockBody, + SignedTx: FullSignedTx, + TxType: FullTxType, + Receipt: FullReceipt, + > + Send + + Sync + + Unpin + + Clone + + Default + + fmt::Debug + + PartialEq + + Eq + + 'static { - type Block = T::Block; - type BlockHeader = T::BlockHeader; - type BlockBody = T::BlockBody; - type SignedTx = T::SignedTx; - type TxType = T::TxType; - type Receipt = T::Receipt; } /// Helper adapter type for accessing [`NodePrimitives`] receipt type. diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index 7999588e49d8..203880209a2f 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -79,7 +79,7 @@ pub mod serde_bincode_compat { #[derive(Debug, Clone, Default, PartialEq, Eq, serde::Serialize, serde::Deserialize)] pub struct EthPrimitives; -impl reth_primitives_traits::FullNodePrimitives for EthPrimitives { +impl reth_primitives_traits::NodePrimitives for EthPrimitives { type Block = crate::Block; type BlockHeader = alloy_consensus::Header; type BlockBody = crate::BlockBody; diff --git a/crates/stages/stages/src/stages/hashing_account.rs b/crates/stages/stages/src/stages/hashing_account.rs index ecca1e0716c1..e6b1e548455f 100644 --- a/crates/stages/stages/src/stages/hashing_account.rs +++ b/crates/stages/stages/src/stages/hashing_account.rs @@ -63,7 +63,8 @@ impl AccountHashingStage { opts: SeedOpts, ) -> Result, StageError> where - N::Primitives: reth_primitives_traits::FullNodePrimitives, + N::Primitives: + reth_primitives_traits::FullNodePrimitives, { use alloy_primitives::U256; use reth_db_api::models::AccountBeforeTx; diff --git a/crates/static-file/static-file/Cargo.toml b/crates/static-file/static-file/Cargo.toml index d22b116cdc59..89f60687895e 100644 --- a/crates/static-file/static-file/Cargo.toml +++ b/crates/static-file/static-file/Cargo.toml @@ -13,12 +13,14 @@ workspace = true [dependencies] # reth +reth-codecs.workspace = true reth-db.workspace = true reth-db-api.workspace = true reth-provider.workspace = true reth-storage-errors.workspace = true reth-tokio-util.workspace = true reth-prune-types.workspace = true +reth-primitives-traits.workspace = true reth-static-file-types.workspace = true reth-stages-types.workspace = true diff --git a/crates/static-file/static-file/src/segments/transactions.rs b/crates/static-file/static-file/src/segments/transactions.rs index 59ec94be9e47..168ae94817b6 100644 --- a/crates/static-file/static-file/src/segments/transactions.rs +++ b/crates/static-file/static-file/src/segments/transactions.rs @@ -1,7 +1,9 @@ use crate::segments::Segment; use alloy_primitives::BlockNumber; -use reth_db::tables; +use reth_codecs::Compact; +use reth_db::{table::Value, tables}; use reth_db_api::{cursor::DbCursorRO, transaction::DbTx}; +use reth_primitives_traits::NodePrimitives; use reth_provider::{ providers::StaticFileWriter, BlockReader, DBProvider, StaticFileProviderFactory, }; @@ -13,8 +15,11 @@ use std::ops::RangeInclusive; #[derive(Debug, Default)] pub struct Transactions; -impl Segment - for Transactions +impl Segment for Transactions +where + Provider: StaticFileProviderFactory> + + DBProvider + + BlockReader, { fn segment(&self) -> StaticFileSegment { StaticFileSegment::Transactions @@ -38,8 +43,9 @@ impl Segment()?; + let mut transactions_cursor = provider.tx_ref().cursor_read::::Primitives as NodePrimitives>::SignedTx, + >>()?; let transactions_walker = transactions_cursor.walk_range(block_body_indices.tx_num_range())?; diff --git a/crates/static-file/static-file/src/static_file_producer.rs b/crates/static-file/static-file/src/static_file_producer.rs index 8959819e8217..371a344d8727 100644 --- a/crates/static-file/static-file/src/static_file_producer.rs +++ b/crates/static-file/static-file/src/static_file_producer.rs @@ -4,6 +4,9 @@ use crate::{segments, segments::Segment, StaticFileProducerEvent}; use alloy_primitives::BlockNumber; use parking_lot::Mutex; use rayon::prelude::*; +use reth_codecs::Compact; +use reth_db::table::Value; +use reth_primitives_traits::NodePrimitives; use reth_provider::{ providers::StaticFileWriter, BlockReader, ChainStateBlockReader, DBProvider, DatabaseProviderFactory, StageCheckpointReader, StaticFileProviderFactory, @@ -86,7 +89,10 @@ impl StaticFileProducerInner where Provider: StaticFileProviderFactory + DatabaseProviderFactory< - Provider: StaticFileProviderFactory + StageCheckpointReader + BlockReader, + Provider: StaticFileProviderFactory< + Primitives: NodePrimitives, + > + StageCheckpointReader + + BlockReader, >, { /// Listen for events on the `static_file_producer`. diff --git a/crates/storage/provider/src/providers/blockchain_provider.rs b/crates/storage/provider/src/providers/blockchain_provider.rs index 083e7fb596b6..74009ffff596 100644 --- a/crates/storage/provider/src/providers/blockchain_provider.rs +++ b/crates/storage/provider/src/providers/blockchain_provider.rs @@ -889,8 +889,7 @@ mod tests { static_file_provider.latest_writer(StaticFileSegment::Transactions)?; transactions_writer.increment_block(block.number)?; for tx in block.body.transactions() { - let tx: TransactionSignedNoHash = tx.clone().into(); - transactions_writer.append_transaction(tx_num, &tx)?; + transactions_writer.append_transaction(tx_num, tx)?; tx_num += 1; } diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index ff80213fdf31..e6c3842976fd 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -48,11 +48,11 @@ use reth_execution_types::{Chain, ExecutionOutcome}; use reth_network_p2p::headers::downloader::SyncTarget; use reth_node_types::NodeTypes; use reth_primitives::{ - Account, Block, BlockBody, BlockWithSenders, Bytecode, GotExpected, Receipt, SealedBlock, - SealedBlockWithSenders, SealedHeader, StaticFileSegment, StorageEntry, TransactionMeta, - TransactionSigned, TransactionSignedNoHash, + Account, Block, BlockBody, BlockWithSenders, Bytecode, GotExpected, NodePrimitives, Receipt, + SealedBlock, SealedBlockWithSenders, SealedHeader, StaticFileSegment, StorageEntry, + TransactionMeta, TransactionSigned, TransactionSignedNoHash, }; -use reth_primitives_traits::{BlockBody as _, FullNodePrimitives, SignedTransaction}; +use reth_primitives_traits::{BlockBody as _, SignedTransaction}; use reth_prune_types::{PruneCheckpoint, PruneModes, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; use reth_storage_api::{StateProvider, StorageChangeSetReader, TryIntoHistoricalStateProvider}; @@ -2778,8 +2778,7 @@ impl BlockExecutio impl BlockWriter for DatabaseProvider { - type Body = - <::Block as reth_primitives_traits::Block>::Body; + type Body = <::Block as reth_primitives_traits::Block>::Body; /// Inserts the block into the database, always modifying the following tables: /// * [`CanonicalHeaders`](tables::CanonicalHeaders) diff --git a/crates/storage/provider/src/providers/static_file/mod.rs b/crates/storage/provider/src/providers/static_file/mod.rs index 30b8d0344da1..58a9e3bb378c 100644 --- a/crates/storage/provider/src/providers/static_file/mod.rs +++ b/crates/storage/provider/src/providers/static_file/mod.rs @@ -68,7 +68,7 @@ mod tests { use reth_db_api::transaction::DbTxMut; use reth_primitives::{ static_file::{find_fixed_range, SegmentRangeInclusive, DEFAULT_BLOCKS_PER_STATIC_FILE}, - Receipt, TransactionSignedNoHash, + EthPrimitives, Receipt, TransactionSigned, }; use reth_storage_api::{ReceiptProvider, TransactionsProvider}; use reth_testing_utils::generators::{self, random_header_range}; @@ -304,20 +304,20 @@ mod tests { /// * `10..=19`: no txs/receipts /// * `20..=29`: only one tx/receipt fn setup_tx_based_scenario( - sf_rw: &StaticFileProvider<()>, + sf_rw: &StaticFileProvider, segment: StaticFileSegment, blocks_per_file: u64, ) { fn setup_block_ranges( - writer: &mut StaticFileProviderRWRefMut<'_, ()>, - sf_rw: &StaticFileProvider<()>, + writer: &mut StaticFileProviderRWRefMut<'_, EthPrimitives>, + sf_rw: &StaticFileProvider, segment: StaticFileSegment, block_range: &Range, mut tx_count: u64, next_tx_num: &mut u64, ) { let mut receipt = Receipt::default(); - let mut tx = TransactionSignedNoHash::default(); + let mut tx = TransactionSigned::default(); for block in block_range.clone() { writer.increment_block(block).unwrap(); diff --git a/crates/storage/provider/src/providers/static_file/writer.rs b/crates/storage/provider/src/providers/static_file/writer.rs index 5951dbb751fc..83954bde3521 100644 --- a/crates/storage/provider/src/providers/static_file/writer.rs +++ b/crates/storage/provider/src/providers/static_file/writer.rs @@ -558,7 +558,10 @@ impl StaticFileProviderRW { /// empty blocks and this function wouldn't be called. /// /// Returns the current [`TxNumber`] as seen in the static file. - pub fn append_transaction(&mut self, tx_num: TxNumber, tx: impl Compact) -> ProviderResult<()> { + pub fn append_transaction(&mut self, tx_num: TxNumber, tx: &N::SignedTx) -> ProviderResult<()> + where + N::SignedTx: Compact, + { let start = Instant::now(); self.ensure_no_queued_prune()?; From 73cd92f5f51fed011e3b340c881ff9f404534090 Mon Sep 17 00:00:00 2001 From: Hai | RISE <150876604+hai-rise@users.noreply.github.com> Date: Fri, 22 Nov 2024 20:31:44 +0700 Subject: [PATCH 113/156] chore(pending-pool): remove unused `all` txs (#12777) --- crates/transaction-pool/src/pool/pending.rs | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/crates/transaction-pool/src/pool/pending.rs b/crates/transaction-pool/src/pool/pending.rs index ee2bcd96e849..89e673aad998 100644 --- a/crates/transaction-pool/src/pool/pending.rs +++ b/crates/transaction-pool/src/pool/pending.rs @@ -8,7 +8,7 @@ use crate::{ }; use std::{ cmp::Ordering, - collections::{hash_map::Entry, BTreeMap, BTreeSet, HashMap}, + collections::{hash_map::Entry, BTreeMap, HashMap}, ops::Bound::Unbounded, sync::Arc, }; @@ -34,8 +34,6 @@ pub struct PendingPool { submission_id: u64, /// _All_ Transactions that are currently inside the pool grouped by their identifier. by_id: BTreeMap>, - /// _All_ transactions sorted by priority - all: BTreeSet>, /// The highest nonce transactions for each sender - like the `independent` set, but the /// highest instead of lowest nonce. highest_nonces: HashMap>, @@ -61,7 +59,6 @@ impl PendingPool { ordering, submission_id: 0, by_id: Default::default(), - all: Default::default(), independent_transactions: Default::default(), highest_nonces: Default::default(), size_of: Default::default(), @@ -78,7 +75,6 @@ impl PendingPool { fn clear_transactions(&mut self) -> BTreeMap> { self.independent_transactions.clear(); self.highest_nonces.clear(); - self.all.clear(); self.size_of.reset(); std::mem::take(&mut self.by_id) } @@ -194,7 +190,6 @@ impl PendingPool { } else { self.size_of += tx.transaction.size(); self.update_independents_and_highest_nonces(&tx); - self.all.insert(tx.clone()); self.by_id.insert(id, tx); } } @@ -240,7 +235,6 @@ impl PendingPool { self.size_of += tx.transaction.size(); self.update_independents_and_highest_nonces(&tx); - self.all.insert(tx.clone()); self.by_id.insert(id, tx); } } @@ -307,7 +301,6 @@ impl PendingPool { let tx = PendingTransaction { submission_id, transaction: tx, priority }; self.update_independents_and_highest_nonces(&tx); - self.all.insert(tx.clone()); // send the new transaction to any existing pendingpool static file iterators if self.new_transaction_notifier.receiver_count() > 0 { @@ -337,7 +330,6 @@ impl PendingPool { let tx = self.by_id.remove(id)?; self.size_of -= tx.transaction.size(); - self.all.remove(&tx); if let Some(highest) = self.highest_nonces.get(&id.sender) { if highest.transaction.nonce() == id.nonce { @@ -538,13 +530,12 @@ impl PendingPool { /// Asserts that the bijection between `by_id` and `all` is valid. #[cfg(any(test, feature = "test-utils"))] pub(crate) fn assert_invariants(&self) { - assert_eq!(self.by_id.len(), self.all.len(), "by_id.len() != all.len()"); assert!( - self.independent_transactions.len() <= self.all.len(), + self.independent_transactions.len() <= self.by_id.len(), "independent.len() > all.len()" ); assert!( - self.highest_nonces.len() <= self.all.len(), + self.highest_nonces.len() <= self.by_id.len(), "independent_descendants.len() > all.len()" ); assert_eq!( From 26ecda223f21fc36c5043a4673da5fdebae428f2 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 22 Nov 2024 14:33:43 +0100 Subject: [PATCH 114/156] chore: some additional pool docs (#12776) --- crates/transaction-pool/src/traits.rs | 70 ++++++++++++++++----------- 1 file changed, 41 insertions(+), 29 deletions(-) diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index cfdfcc07dd9d..c45584c50a76 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -921,7 +921,15 @@ impl BestTransactionsAttributes { } } -/// Trait for transaction types used inside the pool +/// Trait for transaction types used inside the pool. +/// +/// This supports two transaction formats +/// - Consensus format: the form the transaction takes when it is included in a block. +/// - Pooled format: the form the transaction takes when it is gossiping around the network. +/// +/// This distinction is necessary for the EIP-4844 blob transactions, which require an additional +/// sidecar when they are gossiped around the network. It is expected that the `Consensus` format is +/// a subset of the `Pooled` format. pub trait PoolTransaction: fmt::Debug + Send + Sync + Clone { /// Associated error type for the `try_from_consensus` method. type TryFromConsensusError; @@ -1066,7 +1074,11 @@ pub trait PoolTransaction: fmt::Debug + Send + Sync + Clone { } } -/// Super trait for transactions that can be converted to and from Eth transactions +/// Super trait for transactions that can be converted to and from Eth transactions intended for the +/// ethereum style pool. +/// +/// This extends the [`PoolTransaction`] trait with additional methods that are specific to the +/// Ethereum pool. pub trait EthPoolTransaction: PoolTransaction< Consensus: From @@ -1097,9 +1109,9 @@ pub trait EthPoolTransaction: /// This type is essentially a wrapper around [`TransactionSignedEcRecovered`] with additional /// fields derived from the transaction that are frequently used by the pools for ordering. #[derive(Debug, Clone, PartialEq, Eq)] -pub struct EthPooledTransaction { - /// `EcRecovered` transaction info - pub(crate) transaction: TransactionSignedEcRecovered, +pub struct EthPooledTransaction { + /// `EcRecovered` transaction, the consensus format. + pub(crate) transaction: T, /// For EIP-1559 transactions: `max_fee_per_gas * gas_limit + tx_value`. /// For legacy transactions: `gas_price * gas_limit + tx_value`. @@ -1115,30 +1127,6 @@ pub struct EthPooledTransaction { pub(crate) blob_sidecar: EthBlobTransactionSidecar, } -/// Represents the blob sidecar of the [`EthPooledTransaction`]. -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum EthBlobTransactionSidecar { - /// This transaction does not have a blob sidecar - None, - /// This transaction has a blob sidecar (EIP-4844) but it is missing - /// - /// It was either extracted after being inserted into the pool or re-injected after reorg - /// without the blob sidecar - Missing, - /// The eip-4844 transaction was pulled from the network and still has its blob sidecar - Present(BlobTransactionSidecar), -} - -impl EthBlobTransactionSidecar { - /// Returns the blob sidecar if it is present - pub const fn maybe_sidecar(&self) -> Option<&BlobTransactionSidecar> { - match self { - Self::Present(sidecar) => Some(sidecar), - _ => None, - } - } -} - impl EthPooledTransaction { /// Create new instance of [Self]. /// @@ -1403,6 +1391,30 @@ impl From for TransactionSignedEcRecovered { } } +/// Represents the blob sidecar of the [`EthPooledTransaction`]. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum EthBlobTransactionSidecar { + /// This transaction does not have a blob sidecar + None, + /// This transaction has a blob sidecar (EIP-4844) but it is missing + /// + /// It was either extracted after being inserted into the pool or re-injected after reorg + /// without the blob sidecar + Missing, + /// The eip-4844 transaction was pulled from the network and still has its blob sidecar + Present(BlobTransactionSidecar), +} + +impl EthBlobTransactionSidecar { + /// Returns the blob sidecar if it is present + pub const fn maybe_sidecar(&self) -> Option<&BlobTransactionSidecar> { + match self { + Self::Present(sidecar) => Some(sidecar), + _ => None, + } + } +} + /// Represents the current status of the pool. #[derive(Debug, Clone, Copy, Default)] pub struct PoolSize { From 852fba65248b0eb77f6b72a16197df613bfff963 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 22 Nov 2024 14:34:09 +0100 Subject: [PATCH 115/156] chore: replace blobtx fields with signed (#12774) --- crates/primitives/src/transaction/pooled.rs | 57 ++++++++++---------- crates/primitives/src/transaction/sidecar.rs | 40 +++++--------- crates/rpc/rpc/src/eth/bundle.rs | 2 +- 3 files changed, 45 insertions(+), 54 deletions(-) diff --git a/crates/primitives/src/transaction/pooled.rs b/crates/primitives/src/transaction/pooled.rs index 00f62c24372c..c6ab623829bd 100644 --- a/crates/primitives/src/transaction/pooled.rs +++ b/crates/primitives/src/transaction/pooled.rs @@ -5,8 +5,8 @@ use super::{error::TransactionConversionError, signature::recover_signer, TxEip7 use crate::{BlobTransaction, Transaction, TransactionSigned, TransactionSignedEcRecovered}; use alloy_consensus::{ constants::EIP4844_TX_TYPE_ID, - transaction::{RlpEcdsaTx, TxEip1559, TxEip2930, TxEip4844, TxLegacy}, - SignableTransaction, Signed, Transaction as _, TxEip4844WithSidecar, + transaction::{TxEip1559, TxEip2930, TxEip4844, TxLegacy}, + Signed, Transaction as _, TxEip4844WithSidecar, }; use alloy_eips::{ eip2718::{Decodable2718, Eip2718Result, Encodable2718}, @@ -78,11 +78,11 @@ impl PooledTransactionsElement { // If the transaction is an EIP-4844 transaction... TransactionSigned { transaction: Transaction::Eip4844(tx), signature, .. } => { // Construct a `PooledTransactionsElement::BlobTransaction` with provided sidecar. - Self::BlobTransaction(BlobTransaction { + Self::BlobTransaction(BlobTransaction(Signed::new_unchecked( + TxEip4844WithSidecar { tx, sidecar }, signature, hash, - transaction: TxEip4844WithSidecar { tx, sidecar }, - }) + ))) } // If the transaction is not EIP-4844, return an error with the original // transaction. @@ -98,7 +98,7 @@ impl PooledTransactionsElement { Self::Eip2930(tx) => tx.signature_hash(), Self::Eip1559(tx) => tx.signature_hash(), Self::Eip7702(tx) => tx.signature_hash(), - Self::BlobTransaction(blob_tx) => blob_tx.transaction.signature_hash(), + Self::BlobTransaction(tx) => tx.signature_hash(), } } @@ -109,7 +109,7 @@ impl PooledTransactionsElement { Self::Eip2930(tx) => tx.hash(), Self::Eip1559(tx) => tx.hash(), Self::Eip7702(tx) => tx.hash(), - Self::BlobTransaction(tx) => &tx.hash, + Self::BlobTransaction(tx) => tx.0.hash(), } } @@ -120,7 +120,7 @@ impl PooledTransactionsElement { Self::Eip2930(tx) => tx.signature(), Self::Eip1559(tx) => tx.signature(), Self::Eip7702(tx) => tx.signature(), - Self::BlobTransaction(blob_tx) => &blob_tx.signature, + Self::BlobTransaction(tx) => tx.0.signature(), } } @@ -131,7 +131,7 @@ impl PooledTransactionsElement { Self::Eip2930(tx) => tx.tx().nonce(), Self::Eip1559(tx) => tx.tx().nonce(), Self::Eip7702(tx) => tx.tx().nonce(), - Self::BlobTransaction(blob_tx) => blob_tx.transaction.tx.nonce, + Self::BlobTransaction(tx) => tx.tx().nonce(), } } @@ -203,7 +203,7 @@ impl PooledTransactionsElement { /// Returns the [`TxEip4844`] variant if the transaction is an EIP-4844 transaction. pub const fn as_eip4844(&self) -> Option<&TxEip4844> { match self { - Self::BlobTransaction(tx) => Some(&tx.transaction.tx), + Self::BlobTransaction(tx) => Some(tx.0.tx().tx()), _ => None, } } @@ -232,7 +232,7 @@ impl PooledTransactionsElement { /// This is also commonly referred to as the "Blob Gas Fee Cap" (`BlobGasFeeCap`). pub const fn max_fee_per_blob_gas(&self) -> Option { match self { - Self::BlobTransaction(tx) => Some(tx.transaction.tx.max_fee_per_blob_gas), + Self::BlobTransaction(tx) => Some(tx.0.tx().tx.max_fee_per_blob_gas), _ => None, } } @@ -246,7 +246,7 @@ impl PooledTransactionsElement { Self::Legacy(_) | Self::Eip2930(_) => None, Self::Eip1559(tx) => Some(tx.tx().max_priority_fee_per_gas), Self::Eip7702(tx) => Some(tx.tx().max_priority_fee_per_gas), - Self::BlobTransaction(tx) => Some(tx.transaction.tx.max_priority_fee_per_gas), + Self::BlobTransaction(tx) => Some(tx.0.tx().tx.max_priority_fee_per_gas), } } @@ -259,7 +259,7 @@ impl PooledTransactionsElement { Self::Eip2930(tx) => tx.tx().gas_price, Self::Eip1559(tx) => tx.tx().max_fee_per_gas, Self::Eip7702(tx) => tx.tx().max_fee_per_gas, - Self::BlobTransaction(tx) => tx.transaction.tx.max_fee_per_gas, + Self::BlobTransaction(tx) => tx.0.tx().tx.max_fee_per_gas, } } } @@ -362,9 +362,7 @@ impl Encodable2718 for PooledTransactionsElement { Self::Eip2930(tx) => tx.eip2718_encoded_length(), Self::Eip1559(tx) => tx.eip2718_encoded_length(), Self::Eip7702(tx) => tx.eip2718_encoded_length(), - Self::BlobTransaction(BlobTransaction { transaction, signature, .. }) => { - transaction.eip2718_encoded_length(signature) - } + Self::BlobTransaction(tx) => tx.eip2718_encoded_length(), } } @@ -374,9 +372,7 @@ impl Encodable2718 for PooledTransactionsElement { Self::Eip2930(tx) => tx.eip2718_encode(out), Self::Eip1559(tx) => tx.eip2718_encode(out), Self::Eip7702(tx) => tx.eip2718_encode(out), - Self::BlobTransaction(BlobTransaction { transaction, signature, .. }) => { - transaction.eip2718_encode(signature, out) - } + Self::BlobTransaction(tx) => tx.eip2718_encode(out), } } @@ -457,15 +453,22 @@ impl<'a> arbitrary::Arbitrary<'a> for PooledTransactionsElement { // Attempt to create a `TransactionSigned` with arbitrary data. let tx_signed = TransactionSigned::arbitrary(u)?; // Attempt to create a `PooledTransactionsElement` with arbitrary data, handling the Result. - match Self::try_from(tx_signed) { - Ok(Self::BlobTransaction(mut tx)) => { - // Successfully converted to a BlobTransaction, now generate a sidecar. - tx.transaction.sidecar = alloy_eips::eip4844::BlobTransactionSidecar::arbitrary(u)?; - Ok(Self::BlobTransaction(tx)) + match Self::try_from_broadcast(tx_signed) { + Ok(tx) => Ok(tx), + Err(tx) => { + let (tx, sig, hash) = tx.into_parts(); + match tx { + Transaction::Eip4844(tx) => { + let sidecar = BlobTransactionSidecar::arbitrary(u)?; + Ok(Self::BlobTransaction(BlobTransaction(Signed::new_unchecked( + TxEip4844WithSidecar { tx, sidecar }, + sig, + hash, + )))) + } + _ => Err(arbitrary::Error::IncorrectFormat), + } } - Ok(tx) => Ok(tx), // Successfully converted, but not a BlobTransaction. - Err(_) => Err(arbitrary::Error::IncorrectFormat), /* Conversion failed, return an - * arbitrary error. */ } } } diff --git a/crates/primitives/src/transaction/sidecar.rs b/crates/primitives/src/transaction/sidecar.rs index ec8c9b7f0eb7..5eeeef09fc3c 100644 --- a/crates/primitives/src/transaction/sidecar.rs +++ b/crates/primitives/src/transaction/sidecar.rs @@ -1,9 +1,9 @@ #![cfg_attr(docsrs, doc(cfg(feature = "c-kzg")))] use crate::{Transaction, TransactionSigned}; -use alloy_consensus::{transaction::RlpEcdsaTx, TxEip4844WithSidecar}; +use alloy_consensus::{transaction::RlpEcdsaTx, Signed, TxEip4844WithSidecar}; use alloy_eips::eip4844::BlobTransactionSidecar; -use alloy_primitives::{PrimitiveSignature as Signature, TxHash}; +use derive_more::Deref; use serde::{Deserialize, Serialize}; /// A response to `GetPooledTransactions` that includes blob data, their commitments, and their @@ -11,16 +11,8 @@ use serde::{Deserialize, Serialize}; /// /// This is defined in [EIP-4844](https://eips.ethereum.org/EIPS/eip-4844#networking) as an element /// of a `PooledTransactions` response. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct BlobTransaction { - /// The transaction hash. - pub hash: TxHash, - /// The transaction signature. - pub signature: Signature, - /// The transaction payload with the sidecar. - #[serde(flatten)] - pub transaction: TxEip4844WithSidecar, -} +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Deref)] +pub struct BlobTransaction(pub Signed); impl BlobTransaction { /// Constructs a new [`BlobTransaction`] from a [`TransactionSigned`] and a @@ -34,11 +26,11 @@ impl BlobTransaction { let hash = tx.hash(); let TransactionSigned { transaction, signature, .. } = tx; match transaction { - Transaction::Eip4844(transaction) => Ok(Self { - hash, - transaction: TxEip4844WithSidecar { tx: transaction, sidecar }, + Transaction::Eip4844(transaction) => Ok(Self(Signed::new_unchecked( + TxEip4844WithSidecar { tx: transaction, sidecar }, signature, - }), + hash, + ))), transaction => { let tx = TransactionSigned { transaction, signature, hash: hash.into() }; Err((tx, sidecar)) @@ -54,19 +46,16 @@ impl BlobTransaction { &self, proof_settings: &c_kzg::KzgSettings, ) -> Result<(), alloy_eips::eip4844::BlobTransactionValidationError> { - self.transaction.validate_blob(proof_settings) + self.tx().validate_blob(proof_settings) } /// Splits the [`BlobTransaction`] into its [`TransactionSigned`] and [`BlobTransactionSidecar`] /// components. pub fn into_parts(self) -> (TransactionSigned, BlobTransactionSidecar) { - let transaction = TransactionSigned { - transaction: Transaction::Eip4844(self.transaction.tx), - hash: self.hash.into(), - signature: self.signature, - }; - - (transaction, self.transaction.sidecar) + let (transaction, signature, hash) = self.0.into_parts(); + let (transaction, sidecar) = transaction.into_parts(); + let transaction = TransactionSigned::new(transaction.into(), signature, hash); + (transaction, sidecar) } /// Decodes a [`BlobTransaction`] from RLP. This expects the encoding to be: @@ -80,8 +69,7 @@ impl BlobTransaction { pub(crate) fn decode_inner(data: &mut &[u8]) -> alloy_rlp::Result { let (transaction, signature, hash) = TxEip4844WithSidecar::rlp_decode_signed(data)?.into_parts(); - - Ok(Self { transaction, hash, signature }) + Ok(Self(Signed::new_unchecked(transaction, signature, hash))) } } diff --git a/crates/rpc/rpc/src/eth/bundle.rs b/crates/rpc/rpc/src/eth/bundle.rs index f92bd075a3b1..ee2b3ed5e7cd 100644 --- a/crates/rpc/rpc/src/eth/bundle.rs +++ b/crates/rpc/rpc/src/eth/bundle.rs @@ -87,7 +87,7 @@ where .iter() .filter_map(|(tx, _)| { if let PooledTransactionsElement::BlobTransaction(tx) = tx { - Some(tx.transaction.tx.blob_gas()) + Some(tx.tx().tx().blob_gas()) } else { None } From 3384c84f6fec12d71d8714326f93a2a250d9e82a Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 22 Nov 2024 14:47:40 +0100 Subject: [PATCH 116/156] chore: more useful tx conversions (#12778) --- crates/primitives/src/transaction/mod.rs | 25 ++++++++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 2e274311a03c..4091e51f3ed9 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -3,7 +3,7 @@ use alloc::vec::Vec; use alloy_consensus::{ transaction::RlpEcdsaTx, SignableTransaction, Signed, Transaction as _, TxEip1559, TxEip2930, - TxEip4844, TxEip7702, TxLegacy, + TxEip4844, TxEip4844Variant, TxEip7702, TxLegacy, TypedTransaction, }; use alloy_eips::{ eip2718::{Decodable2718, Eip2718Error, Eip2718Result, Encodable2718}, @@ -833,6 +833,27 @@ impl alloy_consensus::Transaction for Transaction { } } +impl From for Transaction { + fn from(value: TxEip4844Variant) -> Self { + match value { + TxEip4844Variant::TxEip4844(tx) => tx.into(), + TxEip4844Variant::TxEip4844WithSidecar(tx) => tx.tx.into(), + } + } +} + +impl From for Transaction { + fn from(value: TypedTransaction) -> Self { + match value { + TypedTransaction::Legacy(tx) => tx.into(), + TypedTransaction::Eip2930(tx) => tx.into(), + TypedTransaction::Eip1559(tx) => tx.into(), + TypedTransaction::Eip4844(tx) => tx.into(), + TypedTransaction::Eip7702(tx) => tx.into(), + } + } +} + /// Signed transaction without its Hash. Used type for inserting into the DB. /// /// This can by converted to [`TransactionSigned`] by calling [`TransactionSignedNoHash::hash`]. @@ -1651,7 +1672,7 @@ macro_rules! impl_from_signed { }; } -impl_from_signed!(TxLegacy, TxEip2930, TxEip1559, TxEip7702, TxEip4844); +impl_from_signed!(TxLegacy, TxEip2930, TxEip1559, TxEip7702, TxEip4844, TypedTransaction); impl From> for TransactionSigned { fn from(value: Signed) -> Self { From 87ecb434135294565b51a0ddc36d9aab146a9e23 Mon Sep 17 00:00:00 2001 From: Steven <112043913+stevencartavia@users.noreply.github.com> Date: Fri, 22 Nov 2024 07:52:08 -0600 Subject: [PATCH 117/156] replace BlockWithSenders with fn (#12695) --- crates/chain-state/src/in_memory.rs | 2 +- crates/primitives/src/block.rs | 11 ++++++++--- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/crates/chain-state/src/in_memory.rs b/crates/chain-state/src/in_memory.rs index e07eaeaa5d9e..24f394a761f5 100644 --- a/crates/chain-state/src/in_memory.rs +++ b/crates/chain-state/src/in_memory.rs @@ -619,7 +619,7 @@ impl BlockState { pub fn block_with_senders(&self) -> BlockWithSenders { let block = self.block.block().clone(); let senders = self.block.senders().clone(); - BlockWithSenders { block: block.unseal(), senders } + BlockWithSenders::new_unchecked(block.unseal(), senders) } /// Returns the sealed block with senders for the state. diff --git a/crates/primitives/src/block.rs b/crates/primitives/src/block.rs index 57c63d53a43e..a93b1cf538a7 100644 --- a/crates/primitives/src/block.rs +++ b/crates/primitives/src/block.rs @@ -73,7 +73,7 @@ impl Block { senders }; - Ok(BlockWithSenders { block: self, senders }) + Ok(BlockWithSenders::new_unchecked(self, senders)) } /// **Expensive**. Transform into a [`BlockWithSenders`] by recovering senders in the contained @@ -82,7 +82,7 @@ impl Block { /// Returns `None` if a transaction is invalid. pub fn with_recovered_senders(self) -> Option { let senders = self.senders()?; - Some(BlockWithSenders { block: self, senders }) + Some(BlockWithSenders::new_unchecked(self, senders)) } } @@ -214,6 +214,11 @@ pub struct BlockWithSenders { } impl BlockWithSenders { + /// New block with senders + pub const fn new_unchecked(block: Block, senders: Vec
) -> Self { + Self { block, senders } + } + /// New block with senders. Return none if len of tx and senders does not match pub fn new(block: Block, senders: Vec
) -> Option { (block.body.transactions.len() == senders.len()).then_some(Self { block, senders }) @@ -527,7 +532,7 @@ impl SealedBlockWithSenders { #[inline] pub fn unseal(self) -> BlockWithSenders { let Self { block, senders } = self; - BlockWithSenders { block: block.unseal(), senders } + BlockWithSenders::new_unchecked(block.unseal(), senders) } /// Returns an iterator over all transactions in the block. From 64728e0856dc4267ac7f5a65567f3fde9fc559b8 Mon Sep 17 00:00:00 2001 From: Hai | RISE <150876604+hai-rise@users.noreply.github.com> Date: Fri, 22 Nov 2024 21:15:43 +0700 Subject: [PATCH 118/156] refactor: simplify withdrawals outcome (#12780) --- crates/ethereum/payload/src/lib.rs | 12 ++++--- crates/optimism/payload/src/builder.rs | 24 ++++++-------- crates/payload/basic/src/lib.rs | 43 ++++---------------------- 3 files changed, 23 insertions(+), 56 deletions(-) diff --git a/crates/ethereum/payload/src/lib.rs b/crates/ethereum/payload/src/lib.rs index ac6427caf362..f38c93613dc8 100644 --- a/crates/ethereum/payload/src/lib.rs +++ b/crates/ethereum/payload/src/lib.rs @@ -14,7 +14,7 @@ use alloy_eips::{eip4844::MAX_DATA_GAS_PER_BLOCK, eip7685::Requests, merge::BEAC use alloy_primitives::U256; use reth_basic_payload_builder::{ commit_withdrawals, is_better_payload, BuildArguments, BuildOutcome, PayloadBuilder, - PayloadConfig, WithdrawalsOutcome, + PayloadConfig, }; use reth_chain_state::ExecutedBlock; use reth_chainspec::ChainSpec; @@ -356,8 +356,8 @@ where None }; - let WithdrawalsOutcome { withdrawals_root, withdrawals } = - commit_withdrawals(&mut db, &chain_spec, attributes.timestamp, attributes.withdrawals)?; + let withdrawals_root = + commit_withdrawals(&mut db, &chain_spec, attributes.timestamp, &attributes.withdrawals)?; // merge all transitions into bundle state, this would apply the withdrawal balance changes // and 4788 contract call @@ -443,7 +443,11 @@ where // seal the block let block = Block { header, - body: BlockBody { transactions: executed_txs, ommers: vec![], withdrawals }, + body: BlockBody { + transactions: executed_txs, + ommers: vec![], + withdrawals: Some(attributes.withdrawals.clone()), + }, }; let sealed_block = Arc::new(block.seal_slow()); diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index 5926cfd34c56..eaf9e86e7738 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -4,7 +4,7 @@ use std::{fmt::Display, sync::Arc}; use alloy_consensus::{Header, Transaction, EMPTY_OMMER_ROOT_HASH}; use alloy_eips::merge::BEACON_NONCE; -use alloy_primitives::{Address, Bytes, U256}; +use alloy_primitives::{Address, Bytes, B256, U256}; use alloy_rpc_types_debug::ExecutionWitness; use alloy_rpc_types_engine::PayloadId; use reth_basic_payload_builder::*; @@ -318,13 +318,13 @@ where } } - let withdrawals_outcome = ctx.commit_withdrawals(state)?; + let withdrawals_root = ctx.commit_withdrawals(state)?; // merge all transitions into bundle state, this would apply the withdrawal balance changes // and 4788 contract call state.merge_transitions(BundleRetention::Reverts); - Ok(BuildOutcomeKind::Better { payload: ExecutedPayload { info, withdrawals_outcome } }) + Ok(BuildOutcomeKind::Better { payload: ExecutedPayload { info, withdrawals_root } }) } /// Builds the payload on top of the state. @@ -338,10 +338,7 @@ where DB: Database + AsRef

, P: StateRootProvider, { - let ExecutedPayload { - info, - withdrawals_outcome: WithdrawalsOutcome { withdrawals, withdrawals_root }, - } = match self.execute(&mut state, &ctx)? { + let ExecutedPayload { info, withdrawals_root } = match self.execute(&mut state, &ctx)? { BuildOutcomeKind::Better { payload } | BuildOutcomeKind::Freeze(payload) => payload, BuildOutcomeKind::Cancelled => return Ok(BuildOutcomeKind::Cancelled), BuildOutcomeKind::Aborted { fees } => return Ok(BuildOutcomeKind::Aborted { fees }), @@ -419,7 +416,7 @@ where body: BlockBody { transactions: info.executed_transactions, ommers: vec![], - withdrawals, + withdrawals: Some(ctx.attributes().payload_attributes.withdrawals.clone()), }, }; @@ -501,8 +498,8 @@ impl OpPayloadTransactions for () { pub struct ExecutedPayload { /// Tracked execution info pub info: ExecutionInfo, - /// Outcome after committing withdrawals. - pub withdrawals_outcome: WithdrawalsOutcome, + /// Withdrawal hash. + pub withdrawals_root: Option, } /// This acts as the container for executed transactions and its byproducts (receipts, gas used) @@ -652,10 +649,7 @@ impl OpPayloadBuilderCtx { } /// Commits the withdrawals from the payload attributes to the state. - pub fn commit_withdrawals( - &self, - db: &mut State, - ) -> Result + pub fn commit_withdrawals(&self, db: &mut State) -> Result, ProviderError> where DB: Database, { @@ -663,7 +657,7 @@ impl OpPayloadBuilderCtx { db, &self.chain_spec, self.attributes().payload_attributes.timestamp, - self.attributes().payload_attributes.withdrawals.clone(), + &self.attributes().payload_attributes.withdrawals, ) } diff --git a/crates/payload/basic/src/lib.rs b/crates/payload/basic/src/lib.rs index e3193ec6deb1..0ab411d3e600 100644 --- a/crates/payload/basic/src/lib.rs +++ b/crates/payload/basic/src/lib.rs @@ -979,31 +979,6 @@ impl Default for MissingPayloadBehaviour { } } -/// Represents the outcome of committing withdrawals to the runtime database and post state. -/// Pre-shanghai these are `None` values. -#[derive(Default, Debug)] -pub struct WithdrawalsOutcome { - /// committed withdrawals, if any. - pub withdrawals: Option, - /// withdrawals root if any. - pub withdrawals_root: Option, -} - -impl WithdrawalsOutcome { - /// No withdrawals pre shanghai - pub const fn pre_shanghai() -> Self { - Self { withdrawals: None, withdrawals_root: None } - } - - /// No withdrawals - pub fn empty() -> Self { - Self { - withdrawals: Some(Withdrawals::default()), - withdrawals_root: Some(EMPTY_WITHDRAWALS), - } - } -} - /// Executes the withdrawals and commits them to the _runtime_ Database and `BundleState`. /// /// Returns the withdrawals root. @@ -1013,32 +988,26 @@ pub fn commit_withdrawals( db: &mut State, chain_spec: &ChainSpec, timestamp: u64, - withdrawals: Withdrawals, -) -> Result + withdrawals: &Withdrawals, +) -> Result, DB::Error> where DB: Database, ChainSpec: EthereumHardforks, { if !chain_spec.is_shanghai_active_at_timestamp(timestamp) { - return Ok(WithdrawalsOutcome::pre_shanghai()) + return Ok(None) } if withdrawals.is_empty() { - return Ok(WithdrawalsOutcome::empty()) + return Ok(Some(EMPTY_WITHDRAWALS)) } let balance_increments = - post_block_withdrawals_balance_increments(chain_spec, timestamp, &withdrawals); + post_block_withdrawals_balance_increments(chain_spec, timestamp, withdrawals); db.increment_balances(balance_increments)?; - let withdrawals_root = proofs::calculate_withdrawals_root(&withdrawals); - - // calculate withdrawals root - Ok(WithdrawalsOutcome { - withdrawals: Some(withdrawals), - withdrawals_root: Some(withdrawals_root), - }) + Ok(Some(proofs::calculate_withdrawals_root(withdrawals))) } /// Checks if the new payload is better than the current best. From f2860006f7692e833700b1e95a7a579659cf2243 Mon Sep 17 00:00:00 2001 From: Tien Nguyen <116023870+htiennv@users.noreply.github.com> Date: Fri, 22 Nov 2024 21:28:59 +0700 Subject: [PATCH 119/156] chore: replace TransactionSigned struct inits with new functions (#12779) --- .../network/tests/it/big_pooled_txs_req.rs | 7 +-- crates/primitives/src/transaction/sidecar.rs | 2 +- crates/rpc/rpc/src/eth/core.rs | 28 +++++----- .../stages/src/stages/sender_recovery.rs | 6 +-- .../src/providers/database/provider.rs | 23 +++------ .../storage/provider/src/test_utils/blocks.rs | 20 ++++---- .../transaction-pool/src/blobstore/tracker.rs | 51 ++++++++++--------- .../transaction-pool/src/test_utils/mock.rs | 7 +-- 8 files changed, 63 insertions(+), 81 deletions(-) diff --git a/crates/net/network/tests/it/big_pooled_txs_req.rs b/crates/net/network/tests/it/big_pooled_txs_req.rs index 9e0f69160b60..328229e87e14 100644 --- a/crates/net/network/tests/it/big_pooled_txs_req.rs +++ b/crates/net/network/tests/it/big_pooled_txs_req.rs @@ -26,11 +26,8 @@ async fn test_large_tx_req() { // replace rng txhash with real txhash let mut tx = MockTransaction::eip1559(); - let ts = TransactionSigned { - hash: Default::default(), - signature: Signature::test_signature(), - transaction: tx.clone().into(), - }; + let ts = + TransactionSigned::new_unhashed(tx.clone().into(), Signature::test_signature()); tx.set_hash(ts.recalculate_hash()); tx }) diff --git a/crates/primitives/src/transaction/sidecar.rs b/crates/primitives/src/transaction/sidecar.rs index 5eeeef09fc3c..c1b1b029afcb 100644 --- a/crates/primitives/src/transaction/sidecar.rs +++ b/crates/primitives/src/transaction/sidecar.rs @@ -32,7 +32,7 @@ impl BlobTransaction { hash, ))), transaction => { - let tx = TransactionSigned { transaction, signature, hash: hash.into() }; + let tx = TransactionSigned::new(transaction, signature, hash); Err((tx, sidecar)) } } diff --git a/crates/rpc/rpc/src/eth/core.rs b/crates/rpc/rpc/src/eth/core.rs index d6c8f522cda0..dac371529426 100644 --- a/crates/rpc/rpc/src/eth/core.rs +++ b/crates/rpc/rpc/src/eth/core.rs @@ -407,7 +407,7 @@ impl EthApiInner DatabaseProvider { let body = transactions .into_iter() .map(|tx| match transaction_kind { - TransactionVariant::NoHash => TransactionSigned { - // Caller explicitly asked for no hash, so we don't calculate it - hash: Default::default(), - signature: tx.signature, - transaction: tx.transaction, - }, + TransactionVariant::NoHash => { + TransactionSigned::new_unhashed(tx.transaction, tx.signature) + } TransactionVariant::WithHash => tx.with_hash(), }) .collect(); @@ -1499,11 +1496,9 @@ impl> Transaction fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { if let Some(id) = self.transaction_id(hash)? { - Ok(self.transaction_by_id_no_hash(id)?.map(|tx| TransactionSigned { - hash: hash.into(), - signature: tx.signature, - transaction: tx.transaction, - })) + Ok(self + .transaction_by_id_no_hash(id)? + .map(|tx| TransactionSigned::new(tx.transaction, tx.signature, hash))) } else { Ok(None) } @@ -1517,11 +1512,7 @@ impl> Transaction let mut transaction_cursor = self.tx.cursor_read::()?; if let Some(transaction_id) = self.transaction_id(tx_hash)? { if let Some(tx) = self.transaction_by_id_no_hash(transaction_id)? { - let transaction = TransactionSigned { - hash: tx_hash.into(), - signature: tx.signature, - transaction: tx.transaction, - }; + let transaction = TransactionSigned::new(tx.transaction, tx.signature, tx_hash); if let Some(block_number) = transaction_cursor.seek(transaction_id).map(|b| b.map(|(_, bn)| bn))? { diff --git a/crates/storage/provider/src/test_utils/blocks.rs b/crates/storage/provider/src/test_utils/blocks.rs index 2b8dc0f85cac..fdded2807aab 100644 --- a/crates/storage/provider/src/test_utils/blocks.rs +++ b/crates/storage/provider/src/test_utils/blocks.rs @@ -88,9 +88,14 @@ pub(crate) static TEST_BLOCK: LazyLock = LazyLock::new(|| SealedBlo hex!("cf7b274520720b50e6a4c3e5c4d553101f44945396827705518ce17cb7219a42").into(), ), body: BlockBody { - transactions: vec![TransactionSigned { - hash: b256!("3541dd1d17e76adeb25dcf2b0a9b60a1669219502e58dcf26a2beafbfb550397").into(), - signature: Signature::new( + transactions: vec![TransactionSigned::new( + Transaction::Legacy(TxLegacy { + gas_price: 10, + gas_limit: 400_000, + to: TxKind::Call(hex!("095e7baea6a6c7c4c2dfeb977efac326af552d87").into()), + ..Default::default() + }), + Signature::new( U256::from_str( "51983300959770368863831494747186777928121405155922056726144551509338672451120", ) @@ -101,13 +106,8 @@ pub(crate) static TEST_BLOCK: LazyLock = LazyLock::new(|| SealedBlo .unwrap(), false, ), - transaction: Transaction::Legacy(TxLegacy { - gas_price: 10, - gas_limit: 400_000, - to: TxKind::Call(hex!("095e7baea6a6c7c4c2dfeb977efac326af552d87").into()), - ..Default::default() - }), - }], + b256!("3541dd1d17e76adeb25dcf2b0a9b60a1669219502e58dcf26a2beafbfb550397"), + )], ..Default::default() }, }); diff --git a/crates/transaction-pool/src/blobstore/tracker.rs b/crates/transaction-pool/src/blobstore/tracker.rs index d58abe9b4628..0f48c89a4995 100644 --- a/crates/transaction-pool/src/blobstore/tracker.rs +++ b/crates/transaction-pool/src/blobstore/tracker.rs @@ -82,6 +82,7 @@ pub enum BlobStoreUpdates { #[cfg(test)] mod tests { use alloy_consensus::Header; + use alloy_primitives::PrimitiveSignature as Signature; use reth_execution_types::Chain; use reth_primitives::{ BlockBody, SealedBlock, SealedBlockWithSenders, SealedHeader, Transaction, @@ -127,22 +128,22 @@ mod tests { ), body: BlockBody { transactions: vec![ - TransactionSigned { - hash: tx1_hash.into(), - transaction: Transaction::Eip4844(Default::default()), - ..Default::default() - }, - TransactionSigned { - hash: tx2_hash.into(), - transaction: Transaction::Eip4844(Default::default()), - ..Default::default() - }, + TransactionSigned::new( + Transaction::Eip4844(Default::default()), + Signature::test_signature(), + tx1_hash, + ), + TransactionSigned::new( + Transaction::Eip4844(Default::default()), + Signature::test_signature(), + tx2_hash, + ), // Another transaction that is not EIP-4844 - TransactionSigned { - hash: B256::random().into(), - transaction: Transaction::Eip7702(Default::default()), - ..Default::default() - }, + TransactionSigned::new( + Transaction::Eip7702(Default::default()), + Signature::test_signature(), + B256::random(), + ), ], ..Default::default() }, @@ -160,16 +161,16 @@ mod tests { ), body: BlockBody { transactions: vec![ - TransactionSigned { - hash: tx3_hash.into(), - transaction: Transaction::Eip1559(Default::default()), - ..Default::default() - }, - TransactionSigned { - hash: tx2_hash.into(), - transaction: Transaction::Eip2930(Default::default()), - ..Default::default() - }, + TransactionSigned::new( + Transaction::Eip1559(Default::default()), + Signature::test_signature(), + tx3_hash, + ), + TransactionSigned::new( + Transaction::Eip2930(Default::default()), + Signature::test_signature(), + tx2_hash, + ), ], ..Default::default() }, diff --git a/crates/transaction-pool/src/test_utils/mock.rs b/crates/transaction-pool/src/test_utils/mock.rs index 009543642ffd..849bde265482 100644 --- a/crates/transaction-pool/src/test_utils/mock.rs +++ b/crates/transaction-pool/src/test_utils/mock.rs @@ -910,11 +910,8 @@ impl From for MockTransaction { impl From for TransactionSignedEcRecovered { fn from(tx: MockTransaction) -> Self { - let signed_tx = TransactionSigned { - hash: (*tx.hash()).into(), - signature: Signature::test_signature(), - transaction: tx.clone().into(), - }; + let signed_tx = + TransactionSigned::new(tx.clone().into(), Signature::test_signature(), *tx.hash()); Self::from_signed_transaction(signed_tx, tx.sender()) } From 9a2eacdb9c929c4e25fe7e2b657303124928cce1 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 22 Nov 2024 15:53:14 +0100 Subject: [PATCH 120/156] chore: move traits used by legacy engine to separate mod (#12784) --- crates/storage/storage-api/src/legacy.rs | 83 ++++++++++++++++++++++++ crates/storage/storage-api/src/lib.rs | 3 + crates/storage/storage-api/src/state.rs | 79 +--------------------- 3 files changed, 88 insertions(+), 77 deletions(-) create mode 100644 crates/storage/storage-api/src/legacy.rs diff --git a/crates/storage/storage-api/src/legacy.rs b/crates/storage/storage-api/src/legacy.rs new file mode 100644 index 000000000000..e53a5d8bfa2b --- /dev/null +++ b/crates/storage/storage-api/src/legacy.rs @@ -0,0 +1,83 @@ +//! Traits used by the legacy execution engine. +//! +//! This module is scheduled for removal in the future. + +use alloy_eips::BlockNumHash; +use alloy_primitives::{BlockHash, BlockNumber}; +use auto_impl::auto_impl; +use reth_execution_types::ExecutionOutcome; +use reth_storage_errors::provider::{ProviderError, ProviderResult}; + +/// Blockchain trait provider that gives access to the blockchain state that is not yet committed +/// (pending). +pub trait BlockchainTreePendingStateProvider: Send + Sync { + /// Returns a state provider that includes all state changes of the given (pending) block hash. + /// + /// In other words, the state provider will return the state after all transactions of the given + /// hash have been executed. + fn pending_state_provider( + &self, + block_hash: BlockHash, + ) -> ProviderResult> { + self.find_pending_state_provider(block_hash) + .ok_or(ProviderError::StateForHashNotFound(block_hash)) + } + + /// Returns state provider if a matching block exists. + fn find_pending_state_provider( + &self, + block_hash: BlockHash, + ) -> Option>; +} + +/// Provides data required for post-block execution. +/// +/// This trait offers methods to access essential post-execution data, including the state changes +/// in accounts and storage, as well as block hashes for both the pending and canonical chains. +/// +/// The trait includes: +/// * [`ExecutionOutcome`] - Captures all account and storage changes in the pending chain. +/// * Block hashes - Provides access to the block hashes of both the pending chain and canonical +/// blocks. +#[auto_impl(&, Box)] +pub trait ExecutionDataProvider: Send + Sync { + /// Return the execution outcome. + fn execution_outcome(&self) -> &ExecutionOutcome; + /// Return block hash by block number of pending or canonical chain. + fn block_hash(&self, block_number: BlockNumber) -> Option; +} + +impl ExecutionDataProvider for ExecutionOutcome { + fn execution_outcome(&self) -> &ExecutionOutcome { + self + } + + /// Always returns [None] because we don't have any information about the block header. + fn block_hash(&self, _block_number: BlockNumber) -> Option { + None + } +} + +/// Fork data needed for execution on it. +/// +/// It contains a canonical fork, the block on what pending chain was forked from. +#[auto_impl(&, Box)] +pub trait BlockExecutionForkProvider { + /// Return canonical fork, the block on what post state was forked from. + /// + /// Needed to create state provider. + fn canonical_fork(&self) -> BlockNumHash; +} + +/// Provides comprehensive post-execution state data required for further execution. +/// +/// This trait is used to create a state provider over the pending state and is a combination of +/// [`ExecutionDataProvider`] and [`BlockExecutionForkProvider`]. +/// +/// The pending state includes: +/// * `ExecutionOutcome`: Contains all changes to accounts and storage within the pending chain. +/// * Block hashes: Represents hashes of both the pending chain and canonical blocks. +/// * Canonical fork: Denotes the block from which the pending chain forked. +pub trait FullExecutionDataProvider: ExecutionDataProvider + BlockExecutionForkProvider {} + +impl FullExecutionDataProvider for T where T: ExecutionDataProvider + BlockExecutionForkProvider {} diff --git a/crates/storage/storage-api/src/lib.rs b/crates/storage/storage-api/src/lib.rs index be52a817e932..de09e66f1281 100644 --- a/crates/storage/storage-api/src/lib.rs +++ b/crates/storage/storage-api/src/lib.rs @@ -67,3 +67,6 @@ mod hashing; pub use hashing::*; mod stats; pub use stats::*; + +mod legacy; +pub use legacy::*; diff --git a/crates/storage/storage-api/src/state.rs b/crates/storage/storage-api/src/state.rs index d37940f04787..3174489fc4ac 100644 --- a/crates/storage/storage-api/src/state.rs +++ b/crates/storage/storage-api/src/state.rs @@ -3,12 +3,11 @@ use super::{ StorageRootProvider, }; use alloy_consensus::constants::KECCAK_EMPTY; -use alloy_eips::{BlockId, BlockNumHash, BlockNumberOrTag}; +use alloy_eips::{BlockId, BlockNumberOrTag}; use alloy_primitives::{Address, BlockHash, BlockNumber, StorageKey, StorageValue, B256, U256}; use auto_impl::auto_impl; -use reth_execution_types::ExecutionOutcome; use reth_primitives::Bytecode; -use reth_storage_errors::provider::{ProviderError, ProviderResult}; +use reth_storage_errors::provider::ProviderResult; /// Type alias of boxed [`StateProvider`]. pub type StateProviderBox = Box; @@ -167,77 +166,3 @@ pub trait StateProviderFactory: BlockIdReader + Send + Sync { /// If the block couldn't be found, returns `None`. fn pending_state_by_hash(&self, block_hash: B256) -> ProviderResult>; } - -/// Blockchain trait provider that gives access to the blockchain state that is not yet committed -/// (pending). -pub trait BlockchainTreePendingStateProvider: Send + Sync { - /// Returns a state provider that includes all state changes of the given (pending) block hash. - /// - /// In other words, the state provider will return the state after all transactions of the given - /// hash have been executed. - fn pending_state_provider( - &self, - block_hash: BlockHash, - ) -> ProviderResult> { - self.find_pending_state_provider(block_hash) - .ok_or(ProviderError::StateForHashNotFound(block_hash)) - } - - /// Returns state provider if a matching block exists. - fn find_pending_state_provider( - &self, - block_hash: BlockHash, - ) -> Option>; -} - -/// Provides data required for post-block execution. -/// -/// This trait offers methods to access essential post-execution data, including the state changes -/// in accounts and storage, as well as block hashes for both the pending and canonical chains. -/// -/// The trait includes: -/// * [`ExecutionOutcome`] - Captures all account and storage changes in the pending chain. -/// * Block hashes - Provides access to the block hashes of both the pending chain and canonical -/// blocks. -#[auto_impl(&, Box)] -pub trait ExecutionDataProvider: Send + Sync { - /// Return the execution outcome. - fn execution_outcome(&self) -> &ExecutionOutcome; - /// Return block hash by block number of pending or canonical chain. - fn block_hash(&self, block_number: BlockNumber) -> Option; -} - -impl ExecutionDataProvider for ExecutionOutcome { - fn execution_outcome(&self) -> &ExecutionOutcome { - self - } - - /// Always returns [None] because we don't have any information about the block header. - fn block_hash(&self, _block_number: BlockNumber) -> Option { - None - } -} - -/// Fork data needed for execution on it. -/// -/// It contains a canonical fork, the block on what pending chain was forked from. -#[auto_impl(&, Box)] -pub trait BlockExecutionForkProvider { - /// Return canonical fork, the block on what post state was forked from. - /// - /// Needed to create state provider. - fn canonical_fork(&self) -> BlockNumHash; -} - -/// Provides comprehensive post-execution state data required for further execution. -/// -/// This trait is used to create a state provider over the pending state and is a combination of -/// [`ExecutionDataProvider`] and [`BlockExecutionForkProvider`]. -/// -/// The pending state includes: -/// * `ExecutionOutcome`: Contains all changes to accounts and storage within the pending chain. -/// * Block hashes: Represents hashes of both the pending chain and canonical blocks. -/// * Canonical fork: Denotes the block from which the pending chain forked. -pub trait FullExecutionDataProvider: ExecutionDataProvider + BlockExecutionForkProvider {} - -impl FullExecutionDataProvider for T where T: ExecutionDataProvider + BlockExecutionForkProvider {} From cbd9d6dc05f4c79d871f76852533a217f8ffc137 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 22 Nov 2024 16:11:03 +0100 Subject: [PATCH 121/156] chore: remove no hash usage in tests (#12782) --- crates/primitives/src/transaction/mod.rs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 4091e51f3ed9..2c639c7ffebd 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -1995,7 +1995,7 @@ pub mod serde_bincode_compat { mod tests { use crate::{ transaction::{TxEip1559, TxKind, TxLegacy}, - Transaction, TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, + Transaction, TransactionSigned, TransactionSignedEcRecovered, }; use alloy_consensus::Transaction as _; use alloy_eips::eip2718::{Decodable2718, Encodable2718}; @@ -2378,17 +2378,17 @@ mod tests { input: Bytes::from(input), }); - let tx_signed_no_hash = TransactionSignedNoHash { signature, transaction }; - test_transaction_signed_to_from_compact(tx_signed_no_hash); + let tx = TransactionSigned::new_unhashed(transaction, signature); + test_transaction_signed_to_from_compact(tx); } } - fn test_transaction_signed_to_from_compact(tx_signed_no_hash: TransactionSignedNoHash) { + fn test_transaction_signed_to_from_compact(tx: TransactionSigned) { // zstd aware `to_compact` let mut buff: Vec = Vec::new(); - let written_bytes = tx_signed_no_hash.to_compact(&mut buff); - let (decoded, _) = TransactionSignedNoHash::from_compact(&buff, written_bytes); - assert_eq!(tx_signed_no_hash, decoded); + let written_bytes = tx.to_compact(&mut buff); + let (decoded, _) = TransactionSigned::from_compact(&buff, written_bytes); + assert_eq!(tx, decoded); } #[test] From ba1a1687b02902799a8486170ec15974a6be44ba Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 22 Nov 2024 16:12:36 +0100 Subject: [PATCH 122/156] chore: simplify typed to signed tx (#12781) --- crates/rpc/rpc-eth-types/src/simulate.rs | 33 +++--------------------- 1 file changed, 3 insertions(+), 30 deletions(-) diff --git a/crates/rpc/rpc-eth-types/src/simulate.rs b/crates/rpc/rpc-eth-types/src/simulate.rs index b24ec9e86bcb..5a0daa1b42f0 100644 --- a/crates/rpc/rpc-eth-types/src/simulate.rs +++ b/crates/rpc/rpc-eth-types/src/simulate.rs @@ -1,6 +1,6 @@ //! Utilities for serving `eth_simulateV1` -use alloy_consensus::{Transaction as _, TxEip4844Variant, TxType, TypedTransaction}; +use alloy_consensus::{Transaction as _, TxType}; use alloy_primitives::PrimitiveSignature as Signature; use alloy_rpc_types_eth::{ simulate::{SimCallResult, SimulateError, SimulatedBlock}, @@ -10,7 +10,7 @@ use alloy_rpc_types_eth::{ use jsonrpsee_types::ErrorObject; use reth_primitives::{ proofs::{calculate_receipt_root, calculate_transaction_root}, - BlockBody, BlockWithSenders, Receipt, Transaction, TransactionSigned, TransactionSignedNoHash, + BlockBody, BlockWithSenders, Receipt, TransactionSigned, }; use reth_revm::database::StateProviderDatabase; use reth_rpc_server_types::result::rpc_err; @@ -135,34 +135,7 @@ where // Create an empty signature for the transaction. let signature = Signature::new(Default::default(), Default::default(), false); - - let tx = match tx { - TypedTransaction::Legacy(tx) => { - TransactionSignedNoHash { transaction: Transaction::Legacy(tx), signature } - .with_hash() - } - TypedTransaction::Eip2930(tx) => { - TransactionSignedNoHash { transaction: Transaction::Eip2930(tx), signature } - .with_hash() - } - TypedTransaction::Eip1559(tx) => { - TransactionSignedNoHash { transaction: Transaction::Eip1559(tx), signature } - .with_hash() - } - TypedTransaction::Eip4844(tx) => { - let tx = match tx { - TxEip4844Variant::TxEip4844(tx) => tx, - TxEip4844Variant::TxEip4844WithSidecar(tx) => tx.tx, - }; - TransactionSignedNoHash { transaction: Transaction::Eip4844(tx), signature } - .with_hash() - } - TypedTransaction::Eip7702(tx) => { - TransactionSignedNoHash { transaction: Transaction::Eip7702(tx), signature } - .with_hash() - } - }; - + let tx = TransactionSigned::new_unhashed(tx.into(), signature); transactions.push(tx); } From 7b156f058cf2b32659a85fd9c9955c9cee94214c Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 22 Nov 2024 16:29:04 +0100 Subject: [PATCH 123/156] feat: impl alloy transaction for pooled tx (#12787) --- crates/primitives/src/transaction/pooled.rs | 229 +++++++++++++++----- 1 file changed, 178 insertions(+), 51 deletions(-) diff --git a/crates/primitives/src/transaction/pooled.rs b/crates/primitives/src/transaction/pooled.rs index c6ab623829bd..bb8406147034 100644 --- a/crates/primitives/src/transaction/pooled.rs +++ b/crates/primitives/src/transaction/pooled.rs @@ -6,13 +6,17 @@ use crate::{BlobTransaction, Transaction, TransactionSigned, TransactionSignedEc use alloy_consensus::{ constants::EIP4844_TX_TYPE_ID, transaction::{TxEip1559, TxEip2930, TxEip4844, TxLegacy}, - Signed, Transaction as _, TxEip4844WithSidecar, + Signed, TxEip4844WithSidecar, }; use alloy_eips::{ eip2718::{Decodable2718, Eip2718Result, Encodable2718}, + eip2930::AccessList, eip4844::BlobTransactionSidecar, + eip7702::SignedAuthorization, +}; +use alloy_primitives::{ + Address, Bytes, ChainId, PrimitiveSignature as Signature, TxHash, TxKind, B256, U256, }; -use alloy_primitives::{Address, PrimitiveSignature as Signature, TxHash, B256}; use alloy_rlp::{Decodable, Encodable, Error as RlpError, Header}; use bytes::Buf; use derive_more::{AsRef, Deref}; @@ -124,17 +128,6 @@ impl PooledTransactionsElement { } } - /// Returns the transaction nonce. - pub fn nonce(&self) -> u64 { - match self { - Self::Legacy(tx) => tx.tx().nonce(), - Self::Eip2930(tx) => tx.tx().nonce(), - Self::Eip1559(tx) => tx.tx().nonce(), - Self::Eip7702(tx) => tx.tx().nonce(), - Self::BlobTransaction(tx) => tx.tx().nonce(), - } - } - /// Recover signer from signature and hash. /// /// Returns `None` if the transaction's signature is invalid, see also [`Self::recover_signer`]. @@ -224,44 +217,6 @@ impl PooledTransactionsElement { pub fn blob_gas_used(&self) -> Option { self.as_eip4844().map(TxEip4844::blob_gas) } - - /// Max fee per blob gas for eip4844 transaction [`TxEip4844`]. - /// - /// Returns `None` for non-eip4844 transactions. - /// - /// This is also commonly referred to as the "Blob Gas Fee Cap" (`BlobGasFeeCap`). - pub const fn max_fee_per_blob_gas(&self) -> Option { - match self { - Self::BlobTransaction(tx) => Some(tx.0.tx().tx.max_fee_per_blob_gas), - _ => None, - } - } - - /// Max priority fee per gas for eip1559 transaction, for legacy and eip2930 transactions this - /// is `None` - /// - /// This is also commonly referred to as the "Gas Tip Cap" (`GasTipCap`). - pub const fn max_priority_fee_per_gas(&self) -> Option { - match self { - Self::Legacy(_) | Self::Eip2930(_) => None, - Self::Eip1559(tx) => Some(tx.tx().max_priority_fee_per_gas), - Self::Eip7702(tx) => Some(tx.tx().max_priority_fee_per_gas), - Self::BlobTransaction(tx) => Some(tx.0.tx().tx.max_priority_fee_per_gas), - } - } - - /// Max fee per gas for eip1559 transaction, for legacy transactions this is `gas_price`. - /// - /// This is also commonly referred to as the "Gas Fee Cap" (`GasFeeCap`). - pub const fn max_fee_per_gas(&self) -> u128 { - match self { - Self::Legacy(tx) => tx.tx().gas_price, - Self::Eip2930(tx) => tx.tx().gas_price, - Self::Eip1559(tx) => tx.tx().max_fee_per_gas, - Self::Eip7702(tx) => tx.tx().max_fee_per_gas, - Self::BlobTransaction(tx) => tx.0.tx().tx.max_fee_per_gas, - } - } } impl Encodable for PooledTransactionsElement { @@ -432,6 +387,178 @@ impl Decodable2718 for PooledTransactionsElement { } } +impl alloy_consensus::Transaction for PooledTransactionsElement { + fn chain_id(&self) -> Option { + match self { + Self::Legacy(tx) => tx.tx().chain_id(), + Self::Eip2930(tx) => tx.tx().chain_id(), + Self::Eip1559(tx) => tx.tx().chain_id(), + Self::Eip7702(tx) => tx.tx().chain_id(), + Self::BlobTransaction(tx) => tx.tx().chain_id(), + } + } + + fn nonce(&self) -> u64 { + match self { + Self::Legacy(tx) => tx.tx().nonce(), + Self::Eip2930(tx) => tx.tx().nonce(), + Self::Eip1559(tx) => tx.tx().nonce(), + Self::Eip7702(tx) => tx.tx().nonce(), + Self::BlobTransaction(tx) => tx.tx().nonce(), + } + } + + fn gas_limit(&self) -> u64 { + match self { + Self::Legacy(tx) => tx.tx().gas_limit(), + Self::Eip2930(tx) => tx.tx().gas_limit(), + Self::Eip1559(tx) => tx.tx().gas_limit(), + Self::Eip7702(tx) => tx.tx().gas_limit(), + Self::BlobTransaction(tx) => tx.tx().gas_limit(), + } + } + + fn gas_price(&self) -> Option { + match self { + Self::Legacy(tx) => tx.tx().gas_price(), + Self::Eip2930(tx) => tx.tx().gas_price(), + Self::Eip1559(tx) => tx.tx().gas_price(), + Self::Eip7702(tx) => tx.tx().gas_price(), + Self::BlobTransaction(tx) => tx.tx().gas_price(), + } + } + + fn max_fee_per_gas(&self) -> u128 { + match self { + Self::Legacy(tx) => tx.tx().max_fee_per_gas(), + Self::Eip2930(tx) => tx.tx().max_fee_per_gas(), + Self::Eip1559(tx) => tx.tx().max_fee_per_gas(), + Self::Eip7702(tx) => tx.tx().max_fee_per_gas(), + Self::BlobTransaction(tx) => tx.tx().max_fee_per_gas(), + } + } + + fn max_priority_fee_per_gas(&self) -> Option { + match self { + Self::Legacy(tx) => tx.tx().max_priority_fee_per_gas(), + Self::Eip2930(tx) => tx.tx().max_priority_fee_per_gas(), + Self::Eip1559(tx) => tx.tx().max_priority_fee_per_gas(), + Self::Eip7702(tx) => tx.tx().max_priority_fee_per_gas(), + Self::BlobTransaction(tx) => tx.tx().max_priority_fee_per_gas(), + } + } + + fn max_fee_per_blob_gas(&self) -> Option { + match self { + Self::Legacy(tx) => tx.tx().max_fee_per_blob_gas(), + Self::Eip2930(tx) => tx.tx().max_fee_per_blob_gas(), + Self::Eip1559(tx) => tx.tx().max_fee_per_blob_gas(), + Self::Eip7702(tx) => tx.tx().max_fee_per_blob_gas(), + Self::BlobTransaction(tx) => tx.tx().max_fee_per_blob_gas(), + } + } + + fn priority_fee_or_price(&self) -> u128 { + match self { + Self::Legacy(tx) => tx.tx().priority_fee_or_price(), + Self::Eip2930(tx) => tx.tx().priority_fee_or_price(), + Self::Eip1559(tx) => tx.tx().priority_fee_or_price(), + Self::Eip7702(tx) => tx.tx().priority_fee_or_price(), + Self::BlobTransaction(tx) => tx.tx().priority_fee_or_price(), + } + } + + fn effective_gas_price(&self, base_fee: Option) -> u128 { + match self { + Self::Legacy(tx) => tx.tx().effective_gas_price(base_fee), + Self::Eip2930(tx) => tx.tx().effective_gas_price(base_fee), + Self::Eip1559(tx) => tx.tx().effective_gas_price(base_fee), + Self::Eip7702(tx) => tx.tx().effective_gas_price(base_fee), + Self::BlobTransaction(tx) => tx.tx().effective_gas_price(base_fee), + } + } + + fn is_dynamic_fee(&self) -> bool { + match self { + Self::Legacy(tx) => tx.tx().is_dynamic_fee(), + Self::Eip2930(tx) => tx.tx().is_dynamic_fee(), + Self::Eip1559(tx) => tx.tx().is_dynamic_fee(), + Self::Eip7702(tx) => tx.tx().is_dynamic_fee(), + Self::BlobTransaction(tx) => tx.tx().is_dynamic_fee(), + } + } + + fn kind(&self) -> TxKind { + match self { + Self::Legacy(tx) => tx.tx().kind(), + Self::Eip2930(tx) => tx.tx().kind(), + Self::Eip1559(tx) => tx.tx().kind(), + Self::Eip7702(tx) => tx.tx().kind(), + Self::BlobTransaction(tx) => tx.tx().kind(), + } + } + + fn value(&self) -> U256 { + match self { + Self::Legacy(tx) => tx.tx().value(), + Self::Eip2930(tx) => tx.tx().value(), + Self::Eip1559(tx) => tx.tx().value(), + Self::Eip7702(tx) => tx.tx().value(), + Self::BlobTransaction(tx) => tx.tx().value(), + } + } + + fn input(&self) -> &Bytes { + match self { + Self::Legacy(tx) => tx.tx().input(), + Self::Eip2930(tx) => tx.tx().input(), + Self::Eip1559(tx) => tx.tx().input(), + Self::Eip7702(tx) => tx.tx().input(), + Self::BlobTransaction(tx) => tx.tx().input(), + } + } + + fn ty(&self) -> u8 { + match self { + Self::Legacy(tx) => tx.tx().ty(), + Self::Eip2930(tx) => tx.tx().ty(), + Self::Eip1559(tx) => tx.tx().ty(), + Self::Eip7702(tx) => tx.tx().ty(), + Self::BlobTransaction(tx) => tx.tx().ty(), + } + } + + fn access_list(&self) -> Option<&AccessList> { + match self { + Self::Legacy(tx) => tx.tx().access_list(), + Self::Eip2930(tx) => tx.tx().access_list(), + Self::Eip1559(tx) => tx.tx().access_list(), + Self::Eip7702(tx) => tx.tx().access_list(), + Self::BlobTransaction(tx) => tx.tx().access_list(), + } + } + + fn blob_versioned_hashes(&self) -> Option<&[B256]> { + match self { + Self::Legacy(tx) => tx.tx().blob_versioned_hashes(), + Self::Eip2930(tx) => tx.tx().blob_versioned_hashes(), + Self::Eip1559(tx) => tx.tx().blob_versioned_hashes(), + Self::Eip7702(tx) => tx.tx().blob_versioned_hashes(), + Self::BlobTransaction(tx) => tx.tx().blob_versioned_hashes(), + } + } + + fn authorization_list(&self) -> Option<&[SignedAuthorization]> { + match self { + Self::Legacy(tx) => tx.tx().authorization_list(), + Self::Eip2930(tx) => tx.tx().authorization_list(), + Self::Eip1559(tx) => tx.tx().authorization_list(), + Self::Eip7702(tx) => tx.tx().authorization_list(), + Self::BlobTransaction(tx) => tx.tx().authorization_list(), + } + } +} + impl TryFrom for PooledTransactionsElement { type Error = TransactionConversionError; From 362e2ed0af251b4c90246cc78b0d0e8349d6c758 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 22 Nov 2024 16:35:50 +0100 Subject: [PATCH 124/156] chore: rename transaction_by_id_no_hash fn (#12783) --- crates/stages/stages/src/stages/sender_recovery.rs | 2 +- .../storage/provider/src/providers/blockchain_provider.rs | 6 +++--- crates/storage/provider/src/providers/consistent.rs | 4 ++-- crates/storage/provider/src/providers/database/mod.rs | 6 +++--- .../storage/provider/src/providers/database/provider.rs | 8 ++++---- crates/storage/provider/src/providers/mod.rs | 4 ++-- crates/storage/provider/src/providers/static_file/jar.rs | 2 +- .../storage/provider/src/providers/static_file/manager.rs | 6 +++--- crates/storage/provider/src/test_utils/mock.rs | 2 +- crates/storage/provider/src/test_utils/noop.rs | 2 +- crates/storage/storage-api/src/transactions.rs | 2 +- 11 files changed, 22 insertions(+), 22 deletions(-) diff --git a/crates/stages/stages/src/stages/sender_recovery.rs b/crates/stages/stages/src/stages/sender_recovery.rs index 0b8e2faaebd7..d611062b565d 100644 --- a/crates/stages/stages/src/stages/sender_recovery.rs +++ b/crates/stages/stages/src/stages/sender_recovery.rs @@ -667,7 +667,7 @@ mod tests { while let Some((_, body)) = body_cursor.next()? { for tx_id in body.tx_num_range() { let transaction: TransactionSigned = provider - .transaction_by_id_no_hash(tx_id)? + .transaction_by_id_unhashed(tx_id)? .map(|tx| { TransactionSigned::new_unhashed(tx.transaction, tx.signature) }) diff --git a/crates/storage/provider/src/providers/blockchain_provider.rs b/crates/storage/provider/src/providers/blockchain_provider.rs index 74009ffff596..744120dd0c06 100644 --- a/crates/storage/provider/src/providers/blockchain_provider.rs +++ b/crates/storage/provider/src/providers/blockchain_provider.rs @@ -339,11 +339,11 @@ impl TransactionsProvider for BlockchainProvider2 { self.consistent_provider()?.transaction_by_id(id) } - fn transaction_by_id_no_hash( + fn transaction_by_id_unhashed( &self, id: TxNumber, ) -> ProviderResult> { - self.consistent_provider()?.transaction_by_id_no_hash(id) + self.consistent_provider()?.transaction_by_id_unhashed(id) } fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { @@ -2588,7 +2588,7 @@ mod tests { ), ( ONE, - transaction_by_id_no_hash, + transaction_by_id_unhashed, |block: &SealedBlock, tx_num: TxNumber, _: B256, _: &Vec>| ( tx_num, Some(Into::::into( diff --git a/crates/storage/provider/src/providers/consistent.rs b/crates/storage/provider/src/providers/consistent.rs index 3b2599f49991..7d52dfcc4bb0 100644 --- a/crates/storage/provider/src/providers/consistent.rs +++ b/crates/storage/provider/src/providers/consistent.rs @@ -945,13 +945,13 @@ impl TransactionsProvider for ConsistentProvider { ) } - fn transaction_by_id_no_hash( + fn transaction_by_id_unhashed( &self, id: TxNumber, ) -> ProviderResult> { self.get_in_memory_or_storage_by_tx( id.into(), - |provider| provider.transaction_by_id_no_hash(id), + |provider| provider.transaction_by_id_unhashed(id), |tx_index, _, block_state| { Ok(block_state .block_ref() diff --git a/crates/storage/provider/src/providers/database/mod.rs b/crates/storage/provider/src/providers/database/mod.rs index 7d94fb98a80a..491c79d7aa64 100644 --- a/crates/storage/provider/src/providers/database/mod.rs +++ b/crates/storage/provider/src/providers/database/mod.rs @@ -433,15 +433,15 @@ impl TransactionsProvider for ProviderFactory { ) } - fn transaction_by_id_no_hash( + fn transaction_by_id_unhashed( &self, id: TxNumber, ) -> ProviderResult> { self.static_file_provider.get_with_static_file_or_database( StaticFileSegment::Transactions, id, - |static_file| static_file.transaction_by_id_no_hash(id), - || self.provider()?.transaction_by_id_no_hash(id), + |static_file| static_file.transaction_by_id_unhashed(id), + || self.provider()?.transaction_by_id_unhashed(id), ) } diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index bcb9fa415ec4..279637abd84f 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -1482,14 +1482,14 @@ impl> Transaction ) } - fn transaction_by_id_no_hash( + fn transaction_by_id_unhashed( &self, id: TxNumber, ) -> ProviderResult> { self.static_file_provider.get_with_static_file_or_database( StaticFileSegment::Transactions, id, - |static_file| static_file.transaction_by_id_no_hash(id), + |static_file| static_file.transaction_by_id_unhashed(id), || Ok(self.tx.get::(id)?), ) } @@ -1497,7 +1497,7 @@ impl> Transaction fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { if let Some(id) = self.transaction_id(hash)? { Ok(self - .transaction_by_id_no_hash(id)? + .transaction_by_id_unhashed(id)? .map(|tx| TransactionSigned::new(tx.transaction, tx.signature, hash))) } else { Ok(None) @@ -1511,7 +1511,7 @@ impl> Transaction ) -> ProviderResult> { let mut transaction_cursor = self.tx.cursor_read::()?; if let Some(transaction_id) = self.transaction_id(tx_hash)? { - if let Some(tx) = self.transaction_by_id_no_hash(transaction_id)? { + if let Some(tx) = self.transaction_by_id_unhashed(transaction_id)? { let transaction = TransactionSigned::new(tx.transaction, tx.signature, tx_hash); if let Some(block_number) = transaction_cursor.seek(transaction_id).map(|b| b.map(|(_, bn)| bn))? diff --git a/crates/storage/provider/src/providers/mod.rs b/crates/storage/provider/src/providers/mod.rs index d530917909ca..4d641bb290e3 100644 --- a/crates/storage/provider/src/providers/mod.rs +++ b/crates/storage/provider/src/providers/mod.rs @@ -425,11 +425,11 @@ impl TransactionsProvider for BlockchainProvider { self.database.transaction_by_id(id) } - fn transaction_by_id_no_hash( + fn transaction_by_id_unhashed( &self, id: TxNumber, ) -> ProviderResult> { - self.database.transaction_by_id_no_hash(id) + self.database.transaction_by_id_unhashed(id) } fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { diff --git a/crates/storage/provider/src/providers/static_file/jar.rs b/crates/storage/provider/src/providers/static_file/jar.rs index e87829b11338..b3ff20d91978 100644 --- a/crates/storage/provider/src/providers/static_file/jar.rs +++ b/crates/storage/provider/src/providers/static_file/jar.rs @@ -221,7 +221,7 @@ impl TransactionsProvider for StaticFileJarProvider<'_, N> { .map(|tx| tx.with_hash())) } - fn transaction_by_id_no_hash( + fn transaction_by_id_unhashed( &self, num: TxNumber, ) -> ProviderResult> { diff --git a/crates/storage/provider/src/providers/static_file/manager.rs b/crates/storage/provider/src/providers/static_file/manager.rs index bee42fdac831..7bf0c49893e7 100644 --- a/crates/storage/provider/src/providers/static_file/manager.rs +++ b/crates/storage/provider/src/providers/static_file/manager.rs @@ -1463,12 +1463,12 @@ impl TransactionsProvider for StaticFileProvider { }) } - fn transaction_by_id_no_hash( + fn transaction_by_id_unhashed( &self, num: TxNumber, ) -> ProviderResult> { self.get_segment_provider_from_transaction(StaticFileSegment::Transactions, num, None) - .and_then(|provider| provider.transaction_by_id_no_hash(num)) + .and_then(|provider| provider.transaction_by_id_unhashed(num)) .or_else(|err| { if let ProviderError::MissingStaticFileTx(_, _) = err { Ok(None) @@ -1541,7 +1541,7 @@ impl TransactionsProvider for StaticFileProvider { } fn transaction_sender(&self, id: TxNumber) -> ProviderResult> { - Ok(self.transaction_by_id_no_hash(id)?.and_then(|tx| tx.recover_signer())) + Ok(self.transaction_by_id_unhashed(id)?.and_then(|tx| tx.recover_signer())) } } diff --git a/crates/storage/provider/src/test_utils/mock.rs b/crates/storage/provider/src/test_utils/mock.rs index 43bb1e809422..77a4b75a0e2b 100644 --- a/crates/storage/provider/src/test_utils/mock.rs +++ b/crates/storage/provider/src/test_utils/mock.rs @@ -263,7 +263,7 @@ impl TransactionsProvider for MockEthProvider { Ok(transaction) } - fn transaction_by_id_no_hash( + fn transaction_by_id_unhashed( &self, id: TxNumber, ) -> ProviderResult> { diff --git a/crates/storage/provider/src/test_utils/noop.rs b/crates/storage/provider/src/test_utils/noop.rs index d12539a2c27f..966bab5944cc 100644 --- a/crates/storage/provider/src/test_utils/noop.rs +++ b/crates/storage/provider/src/test_utils/noop.rs @@ -200,7 +200,7 @@ impl TransactionsProvider for NoopProvider { Ok(None) } - fn transaction_by_id_no_hash( + fn transaction_by_id_unhashed( &self, _id: TxNumber, ) -> ProviderResult> { diff --git a/crates/storage/storage-api/src/transactions.rs b/crates/storage/storage-api/src/transactions.rs index f2c44e9e140b..a639fcedde5a 100644 --- a/crates/storage/storage-api/src/transactions.rs +++ b/crates/storage/storage-api/src/transactions.rs @@ -31,7 +31,7 @@ pub trait TransactionsProvider: BlockNumReader + Send + Sync { fn transaction_by_id(&self, id: TxNumber) -> ProviderResult>; /// Get transaction by id without computing the hash. - fn transaction_by_id_no_hash( + fn transaction_by_id_unhashed( &self, id: TxNumber, ) -> ProviderResult>; From 7f95f1bf072a6d7a4804db7f4067fd397df86267 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 22 Nov 2024 19:52:51 +0400 Subject: [PATCH 125/156] feat: generic static file masks (#12785) --- crates/blockchain-tree/src/externals.rs | 4 +- crates/cli/commands/src/db/get.rs | 6 +- crates/stages/stages/src/stages/bodies.rs | 6 +- crates/storage/db/Cargo.toml | 1 - crates/storage/db/src/static_file/mask.rs | 69 ++++++++----------- crates/storage/db/src/static_file/masks.rs | 45 ++++++++---- crates/storage/db/src/static_file/mod.rs | 1 + .../provider/src/providers/static_file/jar.rs | 22 +++--- .../src/providers/static_file/manager.rs | 18 ++--- 9 files changed, 94 insertions(+), 78 deletions(-) diff --git a/crates/blockchain-tree/src/externals.rs b/crates/blockchain-tree/src/externals.rs index bf5a243a5a5c..2b9dae9a3dfd 100644 --- a/crates/blockchain-tree/src/externals.rs +++ b/crates/blockchain-tree/src/externals.rs @@ -2,7 +2,7 @@ use alloy_primitives::{BlockHash, BlockNumber}; use reth_consensus::Consensus; -use reth_db::{static_file::HeaderMask, tables}; +use reth_db::{static_file::BlockHashMask, tables}; use reth_db_api::{cursor::DbCursorRO, transaction::DbTx}; use reth_node_types::{FullNodePrimitives, NodeTypesWithDB}; use reth_primitives::{BlockBody, StaticFileSegment}; @@ -85,7 +85,7 @@ impl TreeExternals { hashes.extend(range.clone().zip(static_file_provider.fetch_range_with_predicate( StaticFileSegment::Headers, range, - |cursor, number| cursor.get_one::>(number.into()), + |cursor, number| cursor.get_one::(number.into()), |_| true, )?)); } diff --git a/crates/cli/commands/src/db/get.rs b/crates/cli/commands/src/db/get.rs index e9fc034519f5..8f9a5f1d322e 100644 --- a/crates/cli/commands/src/db/get.rs +++ b/crates/cli/commands/src/db/get.rs @@ -2,7 +2,9 @@ use alloy_consensus::Header; use alloy_primitives::{hex, BlockHash}; use clap::Parser; use reth_db::{ - static_file::{ColumnSelectorOne, ColumnSelectorTwo, HeaderMask, ReceiptMask, TransactionMask}, + static_file::{ + ColumnSelectorOne, ColumnSelectorTwo, HeaderWithHashMask, ReceiptMask, TransactionMask, + }, tables, RawKey, RawTable, Receipts, TableViewer, Transactions, }; use reth_db_api::table::{Decompress, DupSort, Table}; @@ -61,7 +63,7 @@ impl Command { Subcommand::StaticFile { segment, key, raw } => { let (key, mask): (u64, _) = match segment { StaticFileSegment::Headers => { - (table_key::(&key)?, >::MASK) + (table_key::(&key)?, >::MASK) } StaticFileSegment::Transactions => ( table_key::(&key)?, diff --git a/crates/stages/stages/src/stages/bodies.rs b/crates/stages/stages/src/stages/bodies.rs index e541b9081040..b90729c71311 100644 --- a/crates/stages/stages/src/stages/bodies.rs +++ b/crates/stages/stages/src/stages/bodies.rs @@ -523,9 +523,9 @@ mod tests { }, }; use alloy_consensus::Header; - use alloy_primitives::{BlockHash, BlockNumber, TxNumber, B256}; + use alloy_primitives::{BlockNumber, TxNumber, B256}; use futures_util::Stream; - use reth_db::{static_file::HeaderMask, tables}; + use reth_db::{static_file::HeaderWithHashMask, tables}; use reth_db_api::{ cursor::DbCursorRO, models::{StoredBlockBodyIndices, StoredBlockOmmers}, @@ -813,7 +813,7 @@ mod tests { for header in static_file_provider.fetch_range_iter( StaticFileSegment::Headers, *range.start()..*range.end() + 1, - |cursor, number| cursor.get_two::>(number.into()), + |cursor, number| cursor.get_two::>(number.into()), )? { let (header, hash) = header?; self.headers.push_back(SealedHeader::new(header, hash)); diff --git a/crates/storage/db/Cargo.toml b/crates/storage/db/Cargo.toml index af72bc43f7e9..5ff9fb43a3d9 100644 --- a/crates/storage/db/Cargo.toml +++ b/crates/storage/db/Cargo.toml @@ -48,7 +48,6 @@ page_size = { version = "0.6.0", optional = true } thiserror.workspace = true tempfile = { workspace = true, optional = true } derive_more.workspace = true -paste.workspace = true rustc-hash = { workspace = true, optional = true } sysinfo = { version = "0.31", default-features = false, features = ["system"] } parking_lot = { workspace = true, optional = true } diff --git a/crates/storage/db/src/static_file/mask.rs b/crates/storage/db/src/static_file/mask.rs index f5d35a193d70..38831ea34cab 100644 --- a/crates/storage/db/src/static_file/mask.rs +++ b/crates/storage/db/src/static_file/mask.rs @@ -1,38 +1,5 @@ use reth_db_api::table::Decompress; -/// Generic Mask helper struct for selecting specific column values to read and decompress. -/// -/// #### Explanation: -/// -/// A `NippyJar` static file row can contain multiple column values. To specify the column values -/// to be read, a mask is utilized. -/// -/// For example, a static file with three columns, if the first and last columns are queried, the -/// mask `0b101` would be passed. To select only the second column, the mask `0b010` would be used. -/// -/// Since each static file has its own column distribution, different wrapper types are necessary. -/// For instance, `B256` might be the third column in the `Header` segment, while being the second -/// column in another segment. Hence, `Mask` would only be applicable to one of these -/// scenarios. -/// -/// Alongside, the column selector traits (eg. [`ColumnSelectorOne`]) this provides a structured way -/// to tie the types to be decoded to the mask necessary to query them. -#[derive(Debug)] -pub struct Mask(std::marker::PhantomData<(FIRST, SECOND, THIRD)>); - -macro_rules! add_segments { - ($($segment:tt),+) => { - paste::paste! { - $( - #[doc = concat!("Mask for ", stringify!($segment), " static file segment. See [`Mask`] for more.")] - #[derive(Debug)] - pub struct [<$segment Mask>](Mask); - )+ - } - }; -} -add_segments!(Header, Receipt, Transaction); - /// Trait for specifying a mask to select one column value. pub trait ColumnSelectorOne { /// First desired column value @@ -66,21 +33,45 @@ pub trait ColumnSelectorThree { #[macro_export] /// Add mask to select `N` column values from a specific static file segment row. macro_rules! add_static_file_mask { - ($mask_struct:tt, $type1:ty, $mask:expr) => { - impl ColumnSelectorOne for $mask_struct<$type1> { + ($(#[$attr:meta])* $mask_struct:ident $(<$generic:ident>)?, $type1:ty, $mask:expr) => { + $(#[$attr])* + #[derive(Debug)] + pub struct $mask_struct$(<$generic>)?$((std::marker::PhantomData<$generic>))?; + + impl$(<$generic>)? ColumnSelectorOne for $mask_struct$(<$generic>)? + where + $type1: Send + Sync + std::fmt::Debug + reth_db_api::table::Decompress, + { type FIRST = $type1; const MASK: usize = $mask; } }; - ($mask_struct:tt, $type1:ty, $type2:ty, $mask:expr) => { - impl ColumnSelectorTwo for $mask_struct<$type1, $type2> { + ($(#[$attr:meta])* $mask_struct:ident $(<$generic:ident>)?, $type1:ty, $type2:ty, $mask:expr) => { + $(#[$attr])* + #[derive(Debug)] + pub struct $mask_struct$(<$generic>)?$((std::marker::PhantomData<$generic>))?; + + impl$(<$generic>)? ColumnSelectorTwo for $mask_struct$(<$generic>)? + where + $type1: Send + Sync + std::fmt::Debug + reth_db_api::table::Decompress, + $type2: Send + Sync + std::fmt::Debug + reth_db_api::table::Decompress, + { type FIRST = $type1; type SECOND = $type2; const MASK: usize = $mask; } }; - ($mask_struct:tt, $type1:ty, $type2:ty, $type3:ty, $mask:expr) => { - impl ColumnSelectorThree for $mask_struct<$type1, $type2, $type3> { + ($(#[$attr:meta])* $mask_struct:ident $(<$generic:ident>)?, $type1:ty, $type2:ty, $type3:ty, $mask:expr) => { + $(#[$attr])* + #[derive(Debug)] + pub struct $mask_struct$(<$generic>)?$((std::marker::PhantomData<$generic>))?; + + impl$(<$generic>)? ColumnSelectorThree for $mask_struct$(<$generic>)? + where + $type1: Send + Sync + std::fmt::Debug + reth_db_api::table::Decompress, + $type2: Send + Sync + std::fmt::Debug + reth_db_api::table::Decompress, + $type3: Send + Sync + std::fmt::Debug + reth_db_api::table::Decompress, + { type FIRST = $type1; type SECOND = $type2; type THIRD = $type3; diff --git a/crates/storage/db/src/static_file/masks.rs b/crates/storage/db/src/static_file/masks.rs index 405606389bad..17833e7ee293 100644 --- a/crates/storage/db/src/static_file/masks.rs +++ b/crates/storage/db/src/static_file/masks.rs @@ -1,23 +1,44 @@ -use super::{ReceiptMask, TransactionMask}; use crate::{ add_static_file_mask, - static_file::mask::{ColumnSelectorOne, ColumnSelectorTwo, HeaderMask}, - HeaderTerminalDifficulties, RawValue, Receipts, Transactions, + static_file::mask::{ColumnSelectorOne, ColumnSelectorTwo}, + HeaderTerminalDifficulties, }; -use alloy_consensus::Header; use alloy_primitives::BlockHash; use reth_db_api::table::Table; // HEADER MASKS -add_static_file_mask!(HeaderMask, Header, 0b001); -add_static_file_mask!(HeaderMask, ::Value, 0b010); -add_static_file_mask!(HeaderMask, BlockHash, 0b100); -add_static_file_mask!(HeaderMask, Header, BlockHash, 0b101); -add_static_file_mask!(HeaderMask, ::Value, BlockHash, 0b110); +add_static_file_mask! { + #[doc = "Mask for selecting a single header from Headers static file segment"] + HeaderMask, H, 0b001 +} +add_static_file_mask! { + #[doc = "Mask for selecting a total difficulty value from Headers static file segment"] + TotalDifficultyMask, ::Value, 0b010 +} +add_static_file_mask! { + #[doc = "Mask for selecting a block hash value from Headers static file segment"] + BlockHashMask, BlockHash, 0b100 +} +add_static_file_mask! { + #[doc = "Mask for selecting a header along with block hash from Headers static file segment"] + HeaderWithHashMask, H, BlockHash, 0b101 +} +add_static_file_mask! { + #[doc = "Mask for selecting a total difficulty along with block hash from Headers static file segment"] + TDWithHashMask, + ::Value, + BlockHash, + 0b110 +} // RECEIPT MASKS -add_static_file_mask!(ReceiptMask, ::Value, 0b1); +add_static_file_mask! { + #[doc = "Mask for selecting a single receipt from Receipts static file segment"] + ReceiptMask, R, 0b1 +} // TRANSACTION MASKS -add_static_file_mask!(TransactionMask, ::Value, 0b1); -add_static_file_mask!(TransactionMask, RawValue<::Value>, 0b1); +add_static_file_mask! { + #[doc = "Mask for selecting a single transaction from Transactions static file segment"] + TransactionMask, T, 0b1 +} diff --git a/crates/storage/db/src/static_file/mod.rs b/crates/storage/db/src/static_file/mod.rs index 071835f566bb..8491bd6ed779 100644 --- a/crates/storage/db/src/static_file/mod.rs +++ b/crates/storage/db/src/static_file/mod.rs @@ -17,6 +17,7 @@ use reth_primitives::{ }; mod masks; +pub use masks::*; /// Alias type for a map of [`StaticFileSegment`] and sorted lists of existing static file ranges. type SortedStaticFiles = diff --git a/crates/storage/provider/src/providers/static_file/jar.rs b/crates/storage/provider/src/providers/static_file/jar.rs index b3ff20d91978..9bde4a5f7603 100644 --- a/crates/storage/provider/src/providers/static_file/jar.rs +++ b/crates/storage/provider/src/providers/static_file/jar.rs @@ -10,8 +10,10 @@ use alloy_consensus::Header; use alloy_eips::BlockHashOrNumber; use alloy_primitives::{Address, BlockHash, BlockNumber, TxHash, TxNumber, B256, U256}; use reth_chainspec::ChainInfo; -use reth_db::static_file::{HeaderMask, ReceiptMask, StaticFileCursor, TransactionMask}; -use reth_db_api::models::CompactU256; +use reth_db::static_file::{ + BlockHashMask, HeaderMask, HeaderWithHashMask, ReceiptMask, StaticFileCursor, TDWithHashMask, + TotalDifficultyMask, TransactionMask, +}; use reth_node_types::NodePrimitives; use reth_primitives::{ Receipt, SealedHeader, TransactionMeta, TransactionSigned, TransactionSignedNoHash, @@ -90,7 +92,7 @@ impl HeaderProvider for StaticFileJarProvider<'_, N> { fn header(&self, block_hash: &BlockHash) -> ProviderResult> { Ok(self .cursor()? - .get_two::>(block_hash.into())? + .get_two::>(block_hash.into())? .filter(|(_, hash)| hash == block_hash) .map(|(header, _)| header)) } @@ -102,13 +104,13 @@ impl HeaderProvider for StaticFileJarProvider<'_, N> { fn header_td(&self, block_hash: &BlockHash) -> ProviderResult> { Ok(self .cursor()? - .get_two::>(block_hash.into())? + .get_two::(block_hash.into())? .filter(|(_, hash)| hash == block_hash) .map(|(td, _)| td.into())) } fn header_td_by_number(&self, num: BlockNumber) -> ProviderResult> { - Ok(self.cursor()?.get_one::>(num.into())?.map(Into::into)) + Ok(self.cursor()?.get_one::(num.into())?.map(Into::into)) } fn headers_range(&self, range: impl RangeBounds) -> ProviderResult> { @@ -129,7 +131,7 @@ impl HeaderProvider for StaticFileJarProvider<'_, N> { fn sealed_header(&self, number: BlockNumber) -> ProviderResult> { Ok(self .cursor()? - .get_two::>(number.into())? + .get_two::>(number.into())? .map(|(header, hash)| SealedHeader::new(header, hash))) } @@ -145,7 +147,7 @@ impl HeaderProvider for StaticFileJarProvider<'_, N> { for number in range { if let Some((header, hash)) = - cursor.get_two::>(number.into())? + cursor.get_two::>(number.into())? { let sealed = SealedHeader::new(header, hash); if !predicate(&sealed) { @@ -160,7 +162,7 @@ impl HeaderProvider for StaticFileJarProvider<'_, N> { impl BlockHashReader for StaticFileJarProvider<'_, N> { fn block_hash(&self, number: u64) -> ProviderResult> { - self.cursor()?.get_one::>(number.into()) + self.cursor()?.get_one::(number.into()) } fn canonical_hashes_range( @@ -172,7 +174,7 @@ impl BlockHashReader for StaticFileJarProvider<'_, N> { let mut hashes = Vec::with_capacity((end - start) as usize); for number in start..end { - if let Some(hash) = cursor.get_one::>(number.into())? { + if let Some(hash) = cursor.get_one::(number.into())? { hashes.push(hash) } } @@ -200,7 +202,7 @@ impl BlockNumReader for StaticFileJarProvider<'_, N> { let mut cursor = self.cursor()?; Ok(cursor - .get_one::>((&hash).into())? + .get_one::((&hash).into())? .and_then(|res| (res == hash).then(|| cursor.number()).flatten())) } } diff --git a/crates/storage/provider/src/providers/static_file/manager.rs b/crates/storage/provider/src/providers/static_file/manager.rs index 7bf0c49893e7..8ecc33240b4e 100644 --- a/crates/storage/provider/src/providers/static_file/manager.rs +++ b/crates/storage/provider/src/providers/static_file/manager.rs @@ -19,14 +19,14 @@ use parking_lot::RwLock; use reth_chainspec::{ChainInfo, ChainSpecProvider}; use reth_db::{ lockfile::StorageLock, - static_file::{iter_static_files, HeaderMask, ReceiptMask, StaticFileCursor, TransactionMask}, + static_file::{ + iter_static_files, BlockHashMask, HeaderMask, HeaderWithHashMask, ReceiptMask, + StaticFileCursor, TDWithHashMask, TransactionMask, + }, tables, }; use reth_db_api::{ - cursor::DbCursorRO, - models::{CompactU256, StoredBlockBodyIndices}, - table::Table, - transaction::DbTx, + cursor::DbCursorRO, models::StoredBlockBodyIndices, table::Table, transaction::DbTx, }; use reth_nippy_jar::{NippyJar, NippyJarChecker, CONFIG_FILE_EXTENSION}; use reth_node_types::NodePrimitives; @@ -1236,7 +1236,7 @@ impl HeaderProvider for StaticFileProvider { self.find_static_file(StaticFileSegment::Headers, |jar_provider| { Ok(jar_provider .cursor()? - .get_two::>(block_hash.into())? + .get_two::>(block_hash.into())? .and_then(|(header, hash)| { if &hash == block_hash { return Some(header) @@ -1262,7 +1262,7 @@ impl HeaderProvider for StaticFileProvider { self.find_static_file(StaticFileSegment::Headers, |jar_provider| { Ok(jar_provider .cursor()? - .get_two::>(block_hash.into())? + .get_two::(block_hash.into())? .and_then(|(td, hash)| (&hash == block_hash).then_some(td.0))) }) } @@ -1310,7 +1310,7 @@ impl HeaderProvider for StaticFileProvider { to_range(range), |cursor, number| { Ok(cursor - .get_two::>(number.into())? + .get_two::>(number.into())? .map(|(header, hash)| SealedHeader::new(header, hash))) }, predicate, @@ -1331,7 +1331,7 @@ impl BlockHashReader for StaticFileProvider { self.fetch_range_with_predicate( StaticFileSegment::Headers, start..end, - |cursor, number| cursor.get_one::>(number.into()), + |cursor, number| cursor.get_one::(number.into()), |_| true, ) } From 0416550c5374b511d49546dea76ea84c53a763a8 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 22 Nov 2024 16:53:42 +0100 Subject: [PATCH 126/156] chore: move traits for safe and finalized block read/write (#12786) --- .../provider/src/traits/finalized_block.rs | 23 ------------------- crates/storage/provider/src/traits/mod.rs | 3 --- crates/storage/storage-api/src/block.rs | 21 +++++++++++++++++ 3 files changed, 21 insertions(+), 26 deletions(-) delete mode 100644 crates/storage/provider/src/traits/finalized_block.rs diff --git a/crates/storage/provider/src/traits/finalized_block.rs b/crates/storage/provider/src/traits/finalized_block.rs deleted file mode 100644 index 98a6d9d0e343..000000000000 --- a/crates/storage/provider/src/traits/finalized_block.rs +++ /dev/null @@ -1,23 +0,0 @@ -use alloy_primitives::BlockNumber; -use reth_errors::ProviderResult; - -/// Functionality to read the last known chain blocks from the database. -pub trait ChainStateBlockReader: Send + Sync { - /// Returns the last finalized block number. - /// - /// If no finalized block has been written yet, this returns `None`. - fn last_finalized_block_number(&self) -> ProviderResult>; - /// Returns the last safe block number. - /// - /// If no safe block has been written yet, this returns `None`. - fn last_safe_block_number(&self) -> ProviderResult>; -} - -/// Functionality to write the last known chain blocks to the database. -pub trait ChainStateBlockWriter: Send + Sync { - /// Saves the given finalized block number in the DB. - fn save_finalized_block_number(&self, block_number: BlockNumber) -> ProviderResult<()>; - - /// Saves the given safe block number in the DB. - fn save_safe_block_number(&self, block_number: BlockNumber) -> ProviderResult<()>; -} diff --git a/crates/storage/provider/src/traits/mod.rs b/crates/storage/provider/src/traits/mod.rs index 69f053936bbc..a772204d0c19 100644 --- a/crates/storage/provider/src/traits/mod.rs +++ b/crates/storage/provider/src/traits/mod.rs @@ -25,6 +25,3 @@ pub use full::{FullProvider, FullRpcProvider}; mod tree_viewer; pub use tree_viewer::TreeViewer; - -mod finalized_block; -pub use finalized_block::{ChainStateBlockReader, ChainStateBlockWriter}; diff --git a/crates/storage/storage-api/src/block.rs b/crates/storage/storage-api/src/block.rs index 929f7ecca432..37c7857f1c25 100644 --- a/crates/storage/storage-api/src/block.rs +++ b/crates/storage/storage-api/src/block.rs @@ -267,3 +267,24 @@ pub trait BlockReaderIdExt: BlockReader + ReceiptProviderIdExt { /// Returns `None` if block is not found. fn ommers_by_id(&self, id: BlockId) -> ProviderResult>>; } + +/// Functionality to read the last known chain blocks from the database. +pub trait ChainStateBlockReader: Send + Sync { + /// Returns the last finalized block number. + /// + /// If no finalized block has been written yet, this returns `None`. + fn last_finalized_block_number(&self) -> ProviderResult>; + /// Returns the last safe block number. + /// + /// If no safe block has been written yet, this returns `None`. + fn last_safe_block_number(&self) -> ProviderResult>; +} + +/// Functionality to write the last known chain blocks to the database. +pub trait ChainStateBlockWriter: Send + Sync { + /// Saves the given finalized block number in the DB. + fn save_finalized_block_number(&self, block_number: BlockNumber) -> ProviderResult<()>; + + /// Saves the given safe block number in the DB. + fn save_safe_block_number(&self, block_number: BlockNumber) -> ProviderResult<()>; +} From 5b5e69b64e9934c8d76f2ba959933dec1f2d098f Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 22 Nov 2024 17:24:51 +0100 Subject: [PATCH 127/156] feat: add InMemorySize for more alloy types (#12788) --- crates/primitives-traits/src/size.rs | 41 ++++++++++++++++++++++++++-- 1 file changed, 39 insertions(+), 2 deletions(-) diff --git a/crates/primitives-traits/src/size.rs b/crates/primitives-traits/src/size.rs index 7d83a8af8c47..da3b39888f46 100644 --- a/crates/primitives-traits/src/size.rs +++ b/crates/primitives-traits/src/size.rs @@ -1,3 +1,6 @@ +use alloy_consensus::{Header, TxEip1559, TxEip2930, TxEip4844, TxEip7702, TxLegacy}; +use alloy_primitives::{PrimitiveSignature as Signature, TxHash}; + /// Trait for calculating a heuristic for the in-memory size of a struct. #[auto_impl::auto_impl(&, Arc, Box)] pub trait InMemorySize { @@ -5,8 +8,42 @@ pub trait InMemorySize { fn size(&self) -> usize; } -impl InMemorySize for alloy_consensus::Header { +impl InMemorySize for alloy_consensus::Signed { fn size(&self) -> usize { - self.size() + T::size(self.tx()) + core::mem::size_of::() + core::mem::size_of::() + } +} + +/// Implement `InMemorySize` for a type with a native `size` method. +macro_rules! impl_in_mem_size { + ($($ty:ty),*) => { + $( + impl InMemorySize for $ty { + fn size(&self) -> usize { + Self::size(self) + } + } + )* + }; +} + +impl_in_mem_size!(Header, TxLegacy, TxEip2930, TxEip1559, TxEip7702, TxEip4844); + +#[cfg(test)] +mod tests { + use super::*; + + // ensures we don't have any recursion in the `InMemorySize` impls + #[test] + fn no_in_memory_no_recursion() { + fn assert_no_recursion() { + let _ = T::default().size(); + } + assert_no_recursion::

(); + assert_no_recursion::(); + assert_no_recursion::(); + assert_no_recursion::(); + assert_no_recursion::(); + assert_no_recursion::(); } } From 55d047fcf608cfcdfdf28c3ca01d1f7d7525516d Mon Sep 17 00:00:00 2001 From: Maks Date: Fri, 22 Nov 2024 19:02:46 +0100 Subject: [PATCH 128/156] Grammar and Typo Fixes in Documentation (#12789) --- crates/storage/libmdbx-rs/mdbx-sys/libmdbx/mdbx.h | 4 ++-- docs/design/metrics.md | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/storage/libmdbx-rs/mdbx-sys/libmdbx/mdbx.h b/crates/storage/libmdbx-rs/mdbx-sys/libmdbx/mdbx.h index 43960abfb4cd..dfcba66063a7 100644 --- a/crates/storage/libmdbx-rs/mdbx-sys/libmdbx/mdbx.h +++ b/crates/storage/libmdbx-rs/mdbx-sys/libmdbx/mdbx.h @@ -136,7 +136,7 @@ are only a few cases of changing data. | _DELETING_||| |Key is absent → Error since no such key |\ref mdbx_del() or \ref mdbx_replace()|Error \ref MDBX_NOTFOUND| |Key exist → Delete by key |\ref mdbx_del() with the parameter `data = NULL`|Deletion| -|Key exist → Delete by key with with data matching check|\ref mdbx_del() with the parameter `data` filled with the value which should be match for deletion|Deletion or \ref MDBX_NOTFOUND if the value does not match| +|Key exist → Delete by key with data matching check|\ref mdbx_del() with the parameter `data` filled with the value which should be match for deletion|Deletion or \ref MDBX_NOTFOUND if the value does not match| |Delete at the current cursor position |\ref mdbx_cursor_del() with \ref MDBX_CURRENT flag|Deletion| |Extract (read & delete) value by the key |\ref mdbx_replace() with zero flag and parameter `new_data = NULL`|Returning a deleted value| @@ -5264,7 +5264,7 @@ LIBMDBX_API int mdbx_dbi_sequence(MDBX_txn *txn, MDBX_dbi dbi, uint64_t *result, * This returns a comparison as if the two data items were keys in the * specified database. * - * \warning There ss a Undefined behavior if one of arguments is invalid. + * \warning There is a Undefined behavior if one of arguments is invalid. * * \param [in] txn A transaction handle returned by \ref mdbx_txn_begin(). * \param [in] dbi A database handle returned by \ref mdbx_dbi_open(). diff --git a/docs/design/metrics.md b/docs/design/metrics.md index 0ac1f71c90da..cc386a112516 100644 --- a/docs/design/metrics.md +++ b/docs/design/metrics.md @@ -42,7 +42,7 @@ There will only ever exist one description per metric `KeyName`; it is not possi The `metrics` crate provides three macros per metric variant: `register_!`, `!`, and `describe_!`. Prefer to use these where possible, since they generate the code necessary to register and update metrics under various conditions. - The `register_!` macro simply creates the metric and returns a handle to it (e.g. a `Counter`). These metric structs are thread-safe and cheap to clone. -- The `!` macro registers the metric if it does not exist, and updates it's value. +- The `!` macro registers the metric if it does not exist, and updates its value. - The `describe_!` macro adds an end-user description for the metric. How the metrics are exposed to the end-user is determined by the CLI. From 2d5256cb05b4952194ee513d3b7de4c90c883d3a Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 22 Nov 2024 20:14:58 +0100 Subject: [PATCH 129/156] chore: use existing transaction fns (#12793) --- crates/transaction-pool/src/traits.rs | 19 ++----------------- 1 file changed, 2 insertions(+), 17 deletions(-) diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index c45584c50a76..d0ec36cb07ee 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -1245,15 +1245,7 @@ impl PoolTransaction for EthPooledTransaction { /// /// This is also commonly referred to as the "Gas Fee Cap" (`GasFeeCap`). fn max_fee_per_gas(&self) -> u128 { - #[allow(unreachable_patterns)] - match &self.transaction.transaction { - Transaction::Legacy(tx) => tx.gas_price, - Transaction::Eip2930(tx) => tx.gas_price, - Transaction::Eip1559(tx) => tx.max_fee_per_gas, - Transaction::Eip4844(tx) => tx.max_fee_per_gas, - Transaction::Eip7702(tx) => tx.max_fee_per_gas, - _ => 0, - } + self.transaction.transaction.max_fee_per_gas() } fn access_list(&self) -> Option<&AccessList> { @@ -1264,14 +1256,7 @@ impl PoolTransaction for EthPooledTransaction { /// /// This will return `None` for non-EIP1559 transactions fn max_priority_fee_per_gas(&self) -> Option { - #[allow(unreachable_patterns, clippy::match_same_arms)] - match &self.transaction.transaction { - Transaction::Legacy(_) | Transaction::Eip2930(_) => None, - Transaction::Eip1559(tx) => Some(tx.max_priority_fee_per_gas), - Transaction::Eip4844(tx) => Some(tx.max_priority_fee_per_gas), - Transaction::Eip7702(tx) => Some(tx.max_priority_fee_per_gas), - _ => None, - } + self.transaction.transaction.max_priority_fee_per_gas() } fn max_fee_per_blob_gas(&self) -> Option { From 71fd63d9ac658c19a859cbe30f5ca32628b7b845 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 22 Nov 2024 20:41:11 +0100 Subject: [PATCH 130/156] feat: add try_into_pooled_eip4844 (#12792) --- crates/transaction-pool/src/test_utils/mock.rs | 8 ++++++++ crates/transaction-pool/src/traits.rs | 15 +++++++++++++++ 2 files changed, 23 insertions(+) diff --git a/crates/transaction-pool/src/test_utils/mock.rs b/crates/transaction-pool/src/test_utils/mock.rs index 849bde265482..afa1638c8516 100644 --- a/crates/transaction-pool/src/test_utils/mock.rs +++ b/crates/transaction-pool/src/test_utils/mock.rs @@ -770,6 +770,14 @@ impl EthPoolTransaction for MockTransaction { } } + fn try_into_pooled_eip4844(self, sidecar: Arc) -> Option { + Self::Pooled::try_from_blob_transaction( + self.into_consensus(), + Arc::unwrap_or_clone(sidecar), + ) + .ok() + } + fn validate_blob( &self, _blob: &BlobTransactionSidecar, diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index d0ec36cb07ee..f4946ed9d16e 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -1093,6 +1093,13 @@ pub trait EthPoolTransaction: /// Returns the number of blobs this transaction has. fn blob_count(&self) -> usize; + /// A specialization for the EIP-4844 transaction type. + /// Tries to reattach the blob sidecar to the transaction. + /// + /// This returns an option, but callers should ensure that the transaction is an EIP-4844 + /// transaction: [`PoolTransaction::is_eip4844`]. + fn try_into_pooled_eip4844(self, sidecar: Arc) -> Option; + /// Validates the blob sidecar of the transaction with the given settings. fn validate_blob( &self, @@ -1324,6 +1331,14 @@ impl EthPoolTransaction for EthPooledTransaction { } } + fn try_into_pooled_eip4844(self, sidecar: Arc) -> Option { + PooledTransactionsElementEcRecovered::try_from_blob_transaction( + self.into_consensus(), + Arc::unwrap_or_clone(sidecar), + ) + .ok() + } + fn validate_blob( &self, sidecar: &BlobTransactionSidecar, From 9a7a733a087a01b243a12890e6630b4e0312638d Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 22 Nov 2024 20:41:46 +0100 Subject: [PATCH 131/156] feat: impl InMemorySize for PooledTx (#12791) --- crates/primitives-traits/src/size.rs | 19 ++++++++++++++++++- crates/primitives/src/transaction/pooled.rs | 13 +++++++++++++ crates/primitives/src/transaction/sidecar.rs | 11 +++++++++++ 3 files changed, 42 insertions(+), 1 deletion(-) diff --git a/crates/primitives-traits/src/size.rs b/crates/primitives-traits/src/size.rs index da3b39888f46..4d721dd00b30 100644 --- a/crates/primitives-traits/src/size.rs +++ b/crates/primitives-traits/src/size.rs @@ -10,15 +10,32 @@ pub trait InMemorySize { impl InMemorySize for alloy_consensus::Signed { fn size(&self) -> usize { - T::size(self.tx()) + core::mem::size_of::() + core::mem::size_of::() + T::size(self.tx()) + self.signature().size() + self.hash().size() } } +/// Implement `InMemorySize` for a type with `size_of` +macro_rules! impl_in_mem_size_size_of { + ($($ty:ty),*) => { + $( + impl InMemorySize for $ty { + #[inline] + fn size(&self) -> usize { + core::mem::size_of::() + } + } + )* + }; +} + +impl_in_mem_size_size_of!(Signature, TxHash); + /// Implement `InMemorySize` for a type with a native `size` method. macro_rules! impl_in_mem_size { ($($ty:ty),*) => { $( impl InMemorySize for $ty { + #[inline] fn size(&self) -> usize { Self::size(self) } diff --git a/crates/primitives/src/transaction/pooled.rs b/crates/primitives/src/transaction/pooled.rs index bb8406147034..09111c61a170 100644 --- a/crates/primitives/src/transaction/pooled.rs +++ b/crates/primitives/src/transaction/pooled.rs @@ -20,6 +20,7 @@ use alloy_primitives::{ use alloy_rlp::{Decodable, Encodable, Error as RlpError, Header}; use bytes::Buf; use derive_more::{AsRef, Deref}; +use reth_primitives_traits::InMemorySize; use serde::{Deserialize, Serialize}; /// A response to `GetPooledTransactions`. This can include either a blob transaction, or a @@ -559,6 +560,18 @@ impl alloy_consensus::Transaction for PooledTransactionsElement { } } +impl InMemorySize for PooledTransactionsElement { + fn size(&self) -> usize { + match self { + Self::Legacy(tx) => tx.size(), + Self::Eip2930(tx) => tx.size(), + Self::Eip1559(tx) => tx.size(), + Self::Eip7702(tx) => tx.size(), + Self::BlobTransaction(tx) => tx.size(), + } + } +} + impl TryFrom for PooledTransactionsElement { type Error = TransactionConversionError; diff --git a/crates/primitives/src/transaction/sidecar.rs b/crates/primitives/src/transaction/sidecar.rs index c1b1b029afcb..2cf04bc8e741 100644 --- a/crates/primitives/src/transaction/sidecar.rs +++ b/crates/primitives/src/transaction/sidecar.rs @@ -4,6 +4,7 @@ use crate::{Transaction, TransactionSigned}; use alloy_consensus::{transaction::RlpEcdsaTx, Signed, TxEip4844WithSidecar}; use alloy_eips::eip4844::BlobTransactionSidecar; use derive_more::Deref; +use reth_primitives_traits::InMemorySize; use serde::{Deserialize, Serialize}; /// A response to `GetPooledTransactions` that includes blob data, their commitments, and their @@ -73,6 +74,16 @@ impl BlobTransaction { } } +impl InMemorySize for BlobTransaction { + fn size(&self) -> usize { + // TODO(mattsse): replace with next alloy bump + self.0.hash().size() + + self.0.signature().size() + + self.0.tx().tx().size() + + self.0.tx().sidecar.size() + } +} + #[cfg(all(test, feature = "c-kzg"))] mod tests { use super::*; From 6a97a6dfe4c01c94c0e412641c46333aa6d2cf91 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 22 Nov 2024 20:53:46 +0100 Subject: [PATCH 132/156] chore: include payload id in debug msg (#12795) --- crates/ethereum/payload/src/lib.rs | 2 +- crates/optimism/payload/src/builder.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/ethereum/payload/src/lib.rs b/crates/ethereum/payload/src/lib.rs index f38c93613dc8..24312fecbf48 100644 --- a/crates/ethereum/payload/src/lib.rs +++ b/crates/ethereum/payload/src/lib.rs @@ -451,7 +451,7 @@ where }; let sealed_block = Arc::new(block.seal_slow()); - debug!(target: "payload_builder", sealed_block_header = ?sealed_block.header, "sealed built block"); + debug!(target: "payload_builder", id=%attributes.id, sealed_block_header = ?sealed_block.header, "sealed built block"); // create the executed block data let executed = ExecutedBlock { diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index eaf9e86e7738..ec7668768364 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -421,7 +421,7 @@ where }; let sealed_block = Arc::new(block.seal_slow()); - debug!(target: "payload_builder", sealed_block_header = ?sealed_block.header, "sealed built block"); + debug!(target: "payload_builder", id=%ctx.attributes().payload_id(), sealed_block_header = ?sealed_block.header, "sealed built block"); // create the executed block data let executed = ExecutedBlock { From 36db1c24077f7a500048c545c4c5cefb6a27224a Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 22 Nov 2024 21:13:07 +0100 Subject: [PATCH 133/156] chore: simplify cost calc (#12796) --- crates/transaction-pool/src/traits.rs | 30 +++++++-------------------- 1 file changed, 8 insertions(+), 22 deletions(-) diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index f4946ed9d16e..27bed950c501 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -1142,34 +1142,20 @@ impl EthPooledTransaction { pub fn new(transaction: TransactionSignedEcRecovered, encoded_length: usize) -> Self { let mut blob_sidecar = EthBlobTransactionSidecar::None; - #[allow(unreachable_patterns)] - let gas_cost = match &transaction.transaction { - Transaction::Legacy(t) => { - U256::from(t.gas_price).saturating_mul(U256::from(t.gas_limit)) - } - Transaction::Eip2930(t) => { - U256::from(t.gas_price).saturating_mul(U256::from(t.gas_limit)) - } - Transaction::Eip1559(t) => { - U256::from(t.max_fee_per_gas).saturating_mul(U256::from(t.gas_limit)) - } - Transaction::Eip4844(t) => { - blob_sidecar = EthBlobTransactionSidecar::Missing; - U256::from(t.max_fee_per_gas).saturating_mul(U256::from(t.gas_limit)) - } - Transaction::Eip7702(t) => { - U256::from(t.max_fee_per_gas).saturating_mul(U256::from(t.gas_limit)) - } - _ => U256::ZERO, - }; - let mut cost = transaction.value(); - cost = cost.saturating_add(gas_cost); + let gas_cost = U256::from(transaction.transaction.max_fee_per_gas()) + .saturating_mul(U256::from(transaction.transaction.gas_limit())); + + let mut cost = gas_cost.saturating_add(transaction.value()); if let Some(blob_tx) = transaction.as_eip4844() { // Add max blob cost using saturating math to avoid overflow cost = cost.saturating_add(U256::from( blob_tx.max_fee_per_blob_gas.saturating_mul(blob_tx.blob_gas() as u128), )); + + // because the blob sidecar is not included in this transaction variant, mark it as + // missing + blob_sidecar = EthBlobTransactionSidecar::Missing; } Self { transaction, cost, encoded_length, blob_sidecar } From 5db3ad1a674e0ccef7b3e3d33113df62e1df4d49 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Sat, 23 Nov 2024 03:04:42 +0400 Subject: [PATCH 134/156] feat: add Transaction AT to `TransactionsProvider` (#12794) --- Cargo.lock | 3 + crates/node/types/src/lib.rs | 3 + crates/optimism/cli/src/ovm_file_codec.rs | 1 + crates/optimism/rpc/src/eth/receipt.rs | 3 +- crates/optimism/rpc/src/eth/transaction.rs | 1 + crates/primitives/src/transaction/mod.rs | 56 ++++++++--- crates/prune/prune/Cargo.toml | 3 + .../src/segments/user/transaction_lookup.rs | 3 +- crates/rpc/rpc-builder/src/lib.rs | 8 +- crates/rpc/rpc-eth-api/src/helpers/receipt.rs | 9 +- .../rpc-eth-api/src/helpers/transaction.rs | 20 +++- crates/rpc/rpc-eth-api/src/types.rs | 35 ++++--- crates/rpc/rpc-eth-types/Cargo.toml | 1 + crates/rpc/rpc-eth-types/src/logs_utils.rs | 4 +- crates/rpc/rpc-eth-types/src/transaction.rs | 31 +++--- crates/rpc/rpc-types-compat/src/block.rs | 8 +- .../rpc/rpc-types-compat/src/transaction.rs | 16 +-- crates/rpc/rpc/src/eth/helpers/receipt.rs | 3 +- crates/stages/stages/Cargo.toml | 1 + crates/stages/stages/src/stages/tx_lookup.rs | 9 +- .../src/providers/blockchain_provider.rs | 26 +++-- .../provider/src/providers/consistent.rs | 58 ++++++++--- .../provider/src/providers/database/mod.rs | 20 ++-- .../src/providers/database/provider.rs | 98 ++++++++----------- crates/storage/provider/src/providers/mod.rs | 28 +++--- .../provider/src/providers/static_file/jar.rs | 65 ++++++------ .../src/providers/static_file/manager.rs | 52 +++++----- .../provider/src/providers/static_file/mod.rs | 2 +- .../storage/provider/src/test_utils/mock.rs | 24 +++-- .../storage/provider/src/test_utils/noop.rs | 18 ++-- crates/storage/provider/src/traits/full.rs | 10 +- .../storage/storage-api/src/transactions.rs | 31 +++--- examples/db-access/src/main.rs | 10 +- 33 files changed, 389 insertions(+), 271 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e4a6687defba..0f3d34cfba5f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8655,6 +8655,7 @@ dependencies = [ name = "reth-prune" version = "1.1.2" dependencies = [ + "alloy-eips", "alloy-primitives", "assert_matches", "itertools 0.13.0", @@ -8988,6 +8989,7 @@ dependencies = [ "reth-execution-types", "reth-metrics", "reth-primitives", + "reth-primitives-traits", "reth-revm", "reth-rpc-server-types", "reth-rpc-types-compat", @@ -9062,6 +9064,7 @@ name = "reth-stages" version = "1.1.2" dependencies = [ "alloy-consensus", + "alloy-eips", "alloy-primitives", "alloy-rlp", "assert_matches", diff --git a/crates/node/types/src/lib.rs b/crates/node/types/src/lib.rs index 2e5558a33bfc..a23b9bfe4146 100644 --- a/crates/node/types/src/lib.rs +++ b/crates/node/types/src/lib.rs @@ -238,3 +238,6 @@ pub type HeaderTy = <::Primitives as NodePrimitives>::BlockHe /// Helper adapter type for accessing [`NodePrimitives::BlockBody`] on [`NodeTypes`]. pub type BodyTy = <::Primitives as NodePrimitives>::BlockBody; + +/// Helper adapter type for accessing [`NodePrimitives::SignedTx`] on [`NodeTypes`]. +pub type TxTy = <::Primitives as NodePrimitives>::SignedTx; diff --git a/crates/optimism/cli/src/ovm_file_codec.rs b/crates/optimism/cli/src/ovm_file_codec.rs index b29d30093ecc..3d746d6d1e0d 100644 --- a/crates/optimism/cli/src/ovm_file_codec.rs +++ b/crates/optimism/cli/src/ovm_file_codec.rs @@ -250,6 +250,7 @@ impl Encodable2718 for TransactionSigned { Transaction::Deposit(deposit_tx) => deposit_tx.eip2718_encoded_length(), } } + fn encode_2718(&self, out: &mut dyn alloy_rlp::BufMut) { self.transaction.eip2718_encode(&self.signature, out) } diff --git a/crates/optimism/rpc/src/eth/receipt.rs b/crates/optimism/rpc/src/eth/receipt.rs index 2cc771d0e44b..5064c9ed5cfa 100644 --- a/crates/optimism/rpc/src/eth/receipt.rs +++ b/crates/optimism/rpc/src/eth/receipt.rs @@ -11,7 +11,7 @@ use reth_optimism_chainspec::OpChainSpec; use reth_optimism_evm::RethL1BlockInfo; use reth_optimism_forks::OpHardforks; use reth_primitives::{Receipt, TransactionMeta, TransactionSigned, TxType}; -use reth_provider::ChainSpecProvider; +use reth_provider::{ChainSpecProvider, TransactionsProvider}; use reth_rpc_eth_api::{helpers::LoadReceipt, FromEthApiError, RpcReceipt}; use reth_rpc_eth_types::{receipt::build_receipt, EthApiError}; @@ -21,6 +21,7 @@ impl LoadReceipt for OpEthApi where Self: Send + Sync, N: FullNodeComponents>, + Self::Provider: TransactionsProvider, { async fn build_transaction_receipt( &self, diff --git a/crates/optimism/rpc/src/eth/transaction.rs b/crates/optimism/rpc/src/eth/transaction.rs index dad151c41c40..19bcd31daccb 100644 --- a/crates/optimism/rpc/src/eth/transaction.rs +++ b/crates/optimism/rpc/src/eth/transaction.rs @@ -58,6 +58,7 @@ impl LoadTransaction for OpEthApi where Self: SpawnBlocking + FullEthApiTypes, N: RpcNodeCore, + Self::Pool: TransactionPool, { } diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 2c639c7ffebd..cc966154c09c 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -1624,6 +1624,10 @@ impl Encodable2718 for TransactionSigned { fn encode_2718(&self, out: &mut dyn alloy_rlp::BufMut) { self.transaction.eip2718_encode(&self.signature, out) } + + fn trie_hash(&self) -> B256 { + self.hash() + } } impl Decodable2718 for TransactionSigned { @@ -1720,50 +1724,47 @@ impl<'a> arbitrary::Arbitrary<'a> for TransactionSigned { /// Signed transaction with recovered signer. #[derive(Debug, Clone, PartialEq, Hash, Eq, AsRef, Deref)] -pub struct TransactionSignedEcRecovered { +pub struct TransactionSignedEcRecovered { /// Signer of the transaction signer: Address, /// Signed transaction #[deref] #[as_ref] - signed_transaction: TransactionSigned, + signed_transaction: T, } // === impl TransactionSignedEcRecovered === -impl TransactionSignedEcRecovered { +impl TransactionSignedEcRecovered { /// Signer of transaction recovered from signature pub const fn signer(&self) -> Address { self.signer } /// Returns a reference to [`TransactionSigned`] - pub const fn as_signed(&self) -> &TransactionSigned { + pub const fn as_signed(&self) -> &T { &self.signed_transaction } /// Transform back to [`TransactionSigned`] - pub fn into_signed(self) -> TransactionSigned { + pub fn into_signed(self) -> T { self.signed_transaction } /// Dissolve Self to its component - pub fn to_components(self) -> (TransactionSigned, Address) { + pub fn to_components(self) -> (T, Address) { (self.signed_transaction, self.signer) } /// Create [`TransactionSignedEcRecovered`] from [`TransactionSigned`] and [`Address`] of the /// signer. #[inline] - pub const fn from_signed_transaction( - signed_transaction: TransactionSigned, - signer: Address, - ) -> Self { + pub const fn from_signed_transaction(signed_transaction: T, signer: Address) -> Self { Self { signed_transaction, signer } } } -impl Encodable for TransactionSignedEcRecovered { +impl Encodable for TransactionSignedEcRecovered { /// This encodes the transaction _with_ the signature, and an rlp header. /// /// Refer to docs for [`TransactionSigned::encode`] for details on the exact format. @@ -1776,9 +1777,9 @@ impl Encodable for TransactionSignedEcRecovered { } } -impl Decodable for TransactionSignedEcRecovered { +impl Decodable for TransactionSignedEcRecovered { fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { - let signed_transaction = TransactionSigned::decode(buf)?; + let signed_transaction = T::decode(buf)?; let signer = signed_transaction .recover_signer() .ok_or(RlpError::Custom("Unable to recover decoded transaction signer."))?; @@ -1786,6 +1787,20 @@ impl Decodable for TransactionSignedEcRecovered { } } +/// Extension trait for [`SignedTransaction`] to convert it into [`TransactionSignedEcRecovered`]. +pub trait SignedTransactionIntoRecoveredExt: SignedTransaction { + /// Consumes the type, recover signer and return [`TransactionSignedEcRecovered`] _without + /// ensuring that the signature has a low `s` value_ (EIP-2). + /// + /// Returns `None` if the transaction's signature is invalid. + fn into_ecrecovered_unchecked(self) -> Option> { + let signer = self.recover_signer_unchecked()?; + Some(TransactionSignedEcRecovered::from_signed_transaction(self, signer)) + } +} + +impl SignedTransactionIntoRecoveredExt for T where T: SignedTransaction {} + /// Bincode-compatible transaction type serde implementations. #[cfg(feature = "serde-bincode-compat")] pub mod serde_bincode_compat { @@ -1991,6 +2006,21 @@ pub mod serde_bincode_compat { } } +/// Recovers a list of signers from a transaction list iterator. +/// +/// Returns `None`, if some transaction's signature is invalid +pub fn recover_signers<'a, I, T>(txes: I, num_txes: usize) -> Option> +where + T: SignedTransaction, + I: IntoParallelIterator + IntoIterator + Send, +{ + if num_txes < *PARALLEL_SENDER_RECOVERY_THRESHOLD { + txes.into_iter().map(|tx| tx.recover_signer()).collect() + } else { + txes.into_par_iter().map(|tx| tx.recover_signer()).collect() + } +} + #[cfg(test)] mod tests { use crate::{ diff --git a/crates/prune/prune/Cargo.toml b/crates/prune/prune/Cargo.toml index 4df9ace81338..41156d3e56b8 100644 --- a/crates/prune/prune/Cargo.toml +++ b/crates/prune/prune/Cargo.toml @@ -24,6 +24,9 @@ reth-config.workspace = true reth-prune-types.workspace = true reth-static-file-types.workspace = true +# ethereum +alloy-eips.workspace = true + # metrics reth-metrics.workspace = true metrics.workspace = true diff --git a/crates/prune/prune/src/segments/user/transaction_lookup.rs b/crates/prune/prune/src/segments/user/transaction_lookup.rs index ada4019302ef..ce9d90c291b7 100644 --- a/crates/prune/prune/src/segments/user/transaction_lookup.rs +++ b/crates/prune/prune/src/segments/user/transaction_lookup.rs @@ -3,6 +3,7 @@ use crate::{ segments::{PruneInput, Segment, SegmentOutput}, PrunerError, }; +use alloy_eips::eip2718::Encodable2718; use rayon::prelude::*; use reth_db::{tables, transaction::DbTxMut}; use reth_provider::{BlockReader, DBProvider, TransactionsProvider}; @@ -58,7 +59,7 @@ where let hashes = provider .transactions_by_tx_range(tx_range.clone())? .into_par_iter() - .map(|transaction| transaction.hash()) + .map(|transaction| transaction.trie_hash()) .collect::>(); // Number of transactions retrieved from the database should match the tx range count diff --git a/crates/rpc/rpc-builder/src/lib.rs b/crates/rpc/rpc-builder/src/lib.rs index 207bc9ec5be5..ccf19ed1a0bc 100644 --- a/crates/rpc/rpc-builder/src/lib.rs +++ b/crates/rpc/rpc-builder/src/lib.rs @@ -19,6 +19,7 @@ //! use alloy_consensus::Header; //! use reth_evm::{execute::BlockExecutorProvider, ConfigureEvm}; //! use reth_network_api::{NetworkInfo, Peers}; +//! use reth_primitives::TransactionSigned; //! use reth_provider::{AccountReader, CanonStateSubscriptions, ChangeSetReader, FullRpcProvider}; //! use reth_rpc::EthApi; //! use reth_rpc_builder::{ @@ -36,7 +37,8 @@ //! block_executor: BlockExecutor, //! consensus: Consensus, //! ) where -//! Provider: FullRpcProvider + AccountReader + ChangeSetReader, +//! Provider: +//! FullRpcProvider + AccountReader + ChangeSetReader, //! Pool: TransactionPool + Unpin + 'static, //! Network: NetworkInfo + Peers + Clone + 'static, //! Events: CanonStateSubscriptions + Clone + 'static, @@ -77,6 +79,7 @@ //! use reth_engine_primitives::EngineTypes; //! use reth_evm::{execute::BlockExecutorProvider, ConfigureEvm}; //! use reth_network_api::{NetworkInfo, Peers}; +//! use reth_primitives::TransactionSigned; //! use reth_provider::{AccountReader, CanonStateSubscriptions, ChangeSetReader, FullRpcProvider}; //! use reth_rpc::EthApi; //! use reth_rpc_api::EngineApiServer; @@ -109,7 +112,8 @@ //! block_executor: BlockExecutor, //! consensus: Consensus, //! ) where -//! Provider: FullRpcProvider + AccountReader + ChangeSetReader, +//! Provider: +//! FullRpcProvider + AccountReader + ChangeSetReader, //! Pool: TransactionPool + Unpin + 'static, //! Network: NetworkInfo + Peers + Clone + 'static, //! Events: CanonStateSubscriptions + Clone + 'static, diff --git a/crates/rpc/rpc-eth-api/src/helpers/receipt.rs b/crates/rpc/rpc-eth-api/src/helpers/receipt.rs index 48394f1cd6bb..7e1992017d84 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/receipt.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/receipt.rs @@ -2,18 +2,21 @@ //! loads receipt data w.r.t. network. use futures::Future; -use reth_primitives::{Receipt, TransactionMeta, TransactionSigned}; +use reth_primitives::{Receipt, TransactionMeta}; +use reth_provider::TransactionsProvider; use crate::{EthApiTypes, RpcNodeCoreExt, RpcReceipt}; /// Assembles transaction receipt data w.r.t to network. /// /// Behaviour shared by several `eth_` RPC methods, not exclusive to `eth_` receipts RPC methods. -pub trait LoadReceipt: EthApiTypes + RpcNodeCoreExt + Send + Sync { +pub trait LoadReceipt: + EthApiTypes + RpcNodeCoreExt + Send + Sync +{ /// Helper method for `eth_getBlockReceipts` and `eth_getTransactionReceipt`. fn build_transaction_receipt( &self, - tx: TransactionSigned, + tx: ::Transaction, meta: TransactionMeta, receipt: Receipt, ) -> impl Future, Self::Error>> + Send; diff --git a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs index afe1c513b69f..d87a4855b1db 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs @@ -9,7 +9,9 @@ use alloy_primitives::{Address, Bytes, TxHash, B256}; use alloy_rpc_types_eth::{transaction::TransactionRequest, BlockNumberOrTag, TransactionInfo}; use futures::Future; use reth_primitives::{Receipt, SealedBlockWithSenders, TransactionMeta, TransactionSigned}; -use reth_provider::{BlockNumReader, BlockReaderIdExt, ReceiptProvider, TransactionsProvider}; +use reth_provider::{ + BlockNumReader, BlockReaderIdExt, ProviderTx, ReceiptProvider, TransactionsProvider, +}; use reth_rpc_eth_types::{ utils::{binary_search, recover_raw_transaction}, EthApiError, SignError, TransactionSource, @@ -60,10 +62,13 @@ pub trait EthTransactions: LoadTransaction { /// Checks the pool and state. /// /// Returns `Ok(None)` if no matching transaction was found. + #[expect(clippy::complexity)] fn transaction_by_hash( &self, hash: B256, - ) -> impl Future, Self::Error>> + Send { + ) -> impl Future< + Output = Result>>, Self::Error>, + > + Send { LoadTransaction::transaction_by_hash(self, hash) } @@ -148,11 +153,15 @@ pub trait EthTransactions: LoadTransaction { } /// Helper method that loads a transaction and its receipt. + #[expect(clippy::complexity)] fn load_transaction_and_receipt( &self, hash: TxHash, ) -> impl Future< - Output = Result, Self::Error>, + Output = Result< + Option<(ProviderTx, TransactionMeta, Receipt)>, + Self::Error, + >, > + Send where Self: 'static, @@ -477,10 +486,13 @@ pub trait LoadTransaction: /// Checks the pool and state. /// /// Returns `Ok(None)` if no matching transaction was found. + #[expect(clippy::complexity)] fn transaction_by_hash( &self, hash: B256, - ) -> impl Future, Self::Error>> + Send { + ) -> impl Future< + Output = Result>>, Self::Error>, + > + Send { async move { // Try to find the transaction on disk let mut resp = self diff --git a/crates/rpc/rpc-eth-api/src/types.rs b/crates/rpc/rpc-eth-api/src/types.rs index 12ff090d37c1..994f9ac884d0 100644 --- a/crates/rpc/rpc-eth-api/src/types.rs +++ b/crates/rpc/rpc-eth-api/src/types.rs @@ -7,9 +7,11 @@ use std::{ use alloy_network::Network; use alloy_rpc_types_eth::Block; +use reth_primitives::TransactionSigned; +use reth_provider::TransactionsProvider; use reth_rpc_types_compat::TransactionCompat; -use crate::{AsEthApiError, FromEthApiError, FromEvmError}; +use crate::{AsEthApiError, FromEthApiError, FromEvmError, RpcNodeCore}; /// Network specific `eth` API types. pub trait EthApiTypes: Send + Sync + Clone { @@ -43,22 +45,27 @@ pub type RpcReceipt = ::ReceiptResponse; pub type RpcError = ::Error; /// Helper trait holds necessary trait bounds on [`EthApiTypes`] to implement `eth` API. -pub trait FullEthApiTypes: - EthApiTypes< - TransactionCompat: TransactionCompat< - Transaction = RpcTransaction, - Error = RpcError, - >, -> +pub trait FullEthApiTypes +where + Self: RpcNodeCore> + + EthApiTypes< + TransactionCompat: TransactionCompat< + ::Transaction, + Transaction = RpcTransaction, + Error = RpcError, + >, + >, { } impl FullEthApiTypes for T where - T: EthApiTypes< - TransactionCompat: TransactionCompat< - Transaction = RpcTransaction, - Error = RpcError, - >, - > + T: RpcNodeCore> + + EthApiTypes< + TransactionCompat: TransactionCompat< + ::Transaction, + Transaction = RpcTransaction, + Error = RpcError, + >, + > { } diff --git a/crates/rpc/rpc-eth-types/Cargo.toml b/crates/rpc/rpc-eth-types/Cargo.toml index 9b38ed89724a..98b9530d63c2 100644 --- a/crates/rpc/rpc-eth-types/Cargo.toml +++ b/crates/rpc/rpc-eth-types/Cargo.toml @@ -19,6 +19,7 @@ reth-evm.workspace = true reth-execution-types.workspace = true reth-metrics.workspace = true reth-primitives = { workspace = true, features = ["secp256k1"] } +reth-primitives-traits.workspace = true reth-storage-api.workspace = true reth-revm.workspace = true reth-rpc-server-types.workspace = true diff --git a/crates/rpc/rpc-eth-types/src/logs_utils.rs b/crates/rpc/rpc-eth-types/src/logs_utils.rs index 3e7c9db6d68e..5ead11b71156 100644 --- a/crates/rpc/rpc-eth-types/src/logs_utils.rs +++ b/crates/rpc/rpc-eth-types/src/logs_utils.rs @@ -2,7 +2,7 @@ //! //! Log parsing for building filter. -use alloy_eips::BlockNumHash; +use alloy_eips::{eip2718::Encodable2718, BlockNumHash}; use alloy_primitives::TxHash; use alloy_rpc_types_eth::{FilteredParams, Log}; use reth_chainspec::ChainInfo; @@ -110,7 +110,7 @@ pub fn append_matching_block_logs( ProviderError::TransactionNotFound(transaction_id.into()) })?; - Some(transaction.hash()) + Some(transaction.trie_hash()) } }; } diff --git a/crates/rpc/rpc-eth-types/src/transaction.rs b/crates/rpc/rpc-eth-types/src/transaction.rs index a4ede0a1a4eb..83ef97807de0 100644 --- a/crates/rpc/rpc-eth-types/src/transaction.rs +++ b/crates/rpc/rpc-eth-types/src/transaction.rs @@ -4,7 +4,8 @@ use alloy_primitives::B256; use alloy_rpc_types_eth::TransactionInfo; -use reth_primitives::TransactionSignedEcRecovered; +use reth_primitives::{TransactionSigned, TransactionSignedEcRecovered}; +use reth_primitives_traits::SignedTransaction; use reth_rpc_types_compat::{ transaction::{from_recovered, from_recovered_with_block_context}, TransactionCompat, @@ -12,15 +13,15 @@ use reth_rpc_types_compat::{ /// Represents from where a transaction was fetched. #[derive(Debug, Clone, Eq, PartialEq)] -pub enum TransactionSource { +pub enum TransactionSource { /// Transaction exists in the pool (Pending) - Pool(TransactionSignedEcRecovered), + Pool(TransactionSignedEcRecovered), /// Transaction already included in a block /// /// This can be a historical block or a pending block (received from the CL) Block { /// Transaction fetched via provider - transaction: TransactionSignedEcRecovered, + transaction: TransactionSignedEcRecovered, /// Index of the transaction in the block index: u64, /// Hash of the block. @@ -34,22 +35,22 @@ pub enum TransactionSource { // === impl TransactionSource === -impl TransactionSource { +impl TransactionSource { /// Consumes the type and returns the wrapped transaction. - pub fn into_recovered(self) -> TransactionSignedEcRecovered { + pub fn into_recovered(self) -> TransactionSignedEcRecovered { self.into() } /// Conversion into network specific transaction type. - pub fn into_transaction( + pub fn into_transaction>( self, - resp_builder: &T, - ) -> Result { + resp_builder: &Builder, + ) -> Result { match self { Self::Pool(tx) => from_recovered(tx, resp_builder), Self::Block { transaction, index, block_hash, block_number, base_fee } => { let tx_info = TransactionInfo { - hash: Some(transaction.hash()), + hash: Some(transaction.trie_hash()), index: Some(index), block_hash: Some(block_hash), block_number: Some(block_number), @@ -62,14 +63,14 @@ impl TransactionSource { } /// Returns the transaction and block related info, if not pending - pub fn split(self) -> (TransactionSignedEcRecovered, TransactionInfo) { + pub fn split(self) -> (TransactionSignedEcRecovered, TransactionInfo) { match self { Self::Pool(tx) => { - let hash = tx.hash(); + let hash = tx.trie_hash(); (tx, TransactionInfo { hash: Some(hash), ..Default::default() }) } Self::Block { transaction, index, block_hash, block_number, base_fee } => { - let hash = transaction.hash(); + let hash = transaction.trie_hash(); ( transaction, TransactionInfo { @@ -85,8 +86,8 @@ impl TransactionSource { } } -impl From for TransactionSignedEcRecovered { - fn from(value: TransactionSource) -> Self { +impl From> for TransactionSignedEcRecovered { + fn from(value: TransactionSource) -> Self { match value { TransactionSource::Pool(tx) => tx, TransactionSource::Block { transaction, .. } => transaction, diff --git a/crates/rpc/rpc-types-compat/src/block.rs b/crates/rpc/rpc-types-compat/src/block.rs index 43086b311bd7..f2b1d93be83e 100644 --- a/crates/rpc/rpc-types-compat/src/block.rs +++ b/crates/rpc/rpc-types-compat/src/block.rs @@ -7,7 +7,7 @@ use alloy_rlp::Encodable; use alloy_rpc_types_eth::{ Block, BlockTransactions, BlockTransactionsKind, Header, TransactionInfo, }; -use reth_primitives::{Block as PrimitiveBlock, BlockWithSenders}; +use reth_primitives::{Block as PrimitiveBlock, BlockWithSenders, TransactionSigned}; use crate::{transaction::from_recovered_with_block_context, TransactionCompat}; @@ -87,7 +87,11 @@ pub fn from_block_full( index: Some(idx as u64), }; - from_recovered_with_block_context::(signed_tx_ec_recovered, tx_info, tx_resp_builder) + from_recovered_with_block_context::( + signed_tx_ec_recovered, + tx_info, + tx_resp_builder, + ) }) .collect::, T::Error>>()?; diff --git a/crates/rpc/rpc-types-compat/src/transaction.rs b/crates/rpc/rpc-types-compat/src/transaction.rs index 9e8fae670963..31c9d967cd15 100644 --- a/crates/rpc/rpc-types-compat/src/transaction.rs +++ b/crates/rpc/rpc-types-compat/src/transaction.rs @@ -8,7 +8,7 @@ use alloy_rpc_types_eth::{ request::{TransactionInput, TransactionRequest}, TransactionInfo, }; -use reth_primitives::TransactionSignedEcRecovered; +use reth_primitives::{TransactionSigned, TransactionSignedEcRecovered}; use serde::{Deserialize, Serialize}; /// Create a new rpc transaction result for a mined transaction, using the given block hash, @@ -16,8 +16,8 @@ use serde::{Deserialize, Serialize}; /// /// The block hash, number, and tx index fields should be from the original block where the /// transaction was mined. -pub fn from_recovered_with_block_context( - tx: TransactionSignedEcRecovered, +pub fn from_recovered_with_block_context>( + tx: TransactionSignedEcRecovered, tx_info: TransactionInfo, resp_builder: &T, ) -> Result { @@ -26,15 +26,17 @@ pub fn from_recovered_with_block_context( /// Create a new rpc transaction result for a _pending_ signed transaction, setting block /// environment related fields to `None`. -pub fn from_recovered( - tx: TransactionSignedEcRecovered, +pub fn from_recovered>( + tx: TransactionSignedEcRecovered, resp_builder: &T, ) -> Result { resp_builder.fill(tx, TransactionInfo::default()) } /// Builds RPC transaction w.r.t. network. -pub trait TransactionCompat: Send + Sync + Unpin + Clone + fmt::Debug { +pub trait TransactionCompat: + Send + Sync + Unpin + Clone + fmt::Debug +{ /// RPC transaction response type. type Transaction: Serialize + for<'de> Deserialize<'de> @@ -51,7 +53,7 @@ pub trait TransactionCompat: Send + Sync + Unpin + Clone + fmt::Debug { /// environment related fields to `None`. fn fill( &self, - tx: TransactionSignedEcRecovered, + tx: TransactionSignedEcRecovered, tx_inf: TransactionInfo, ) -> Result; diff --git a/crates/rpc/rpc/src/eth/helpers/receipt.rs b/crates/rpc/rpc/src/eth/helpers/receipt.rs index 594cffd09f22..13b0dab2593d 100644 --- a/crates/rpc/rpc/src/eth/helpers/receipt.rs +++ b/crates/rpc/rpc/src/eth/helpers/receipt.rs @@ -1,6 +1,7 @@ //! Builds an RPC receipt response w.r.t. data layout of network. use reth_primitives::{Receipt, TransactionMeta, TransactionSigned}; +use reth_provider::TransactionsProvider; use reth_rpc_eth_api::{helpers::LoadReceipt, FromEthApiError, RpcNodeCoreExt, RpcReceipt}; use reth_rpc_eth_types::{EthApiError, EthReceiptBuilder}; @@ -8,7 +9,7 @@ use crate::EthApi; impl LoadReceipt for EthApi where - Self: RpcNodeCoreExt, + Self: RpcNodeCoreExt>, { async fn build_transaction_receipt( &self, diff --git a/crates/stages/stages/Cargo.toml b/crates/stages/stages/Cargo.toml index eedd5f9ca41e..f97214f46433 100644 --- a/crates/stages/stages/Cargo.toml +++ b/crates/stages/stages/Cargo.toml @@ -39,6 +39,7 @@ reth-trie-db = { workspace = true, features = ["metrics"] } reth-testing-utils = { workspace = true, optional = true } +alloy-eips.workspace = true alloy-primitives.workspace = true alloy-consensus.workspace = true diff --git a/crates/stages/stages/src/stages/tx_lookup.rs b/crates/stages/stages/src/stages/tx_lookup.rs index 5208cc936ce6..fab10b0f9535 100644 --- a/crates/stages/stages/src/stages/tx_lookup.rs +++ b/crates/stages/stages/src/stages/tx_lookup.rs @@ -1,12 +1,15 @@ +use alloy_eips::eip2718::Encodable2718; use alloy_primitives::{TxHash, TxNumber}; use num_traits::Zero; use reth_config::config::{EtlConfig, TransactionLookupConfig}; -use reth_db::{tables, RawKey, RawValue}; +use reth_db::{table::Value, tables, RawKey, RawValue}; use reth_db_api::{ cursor::{DbCursorRO, DbCursorRW}, transaction::{DbTx, DbTxMut}, }; use reth_etl::Collector; +use reth_primitives::NodePrimitives; +use reth_primitives_traits::SignedTransaction; use reth_provider::{ BlockReader, DBProvider, PruneCheckpointReader, PruneCheckpointWriter, StaticFileProviderFactory, StatsReader, TransactionsProvider, TransactionsProviderExt, @@ -60,7 +63,7 @@ where + BlockReader + PruneCheckpointReader + StatsReader - + StaticFileProviderFactory + + StaticFileProviderFactory> + TransactionsProviderExt, { /// Return the id of the stage @@ -206,7 +209,7 @@ where for tx_id in body.tx_num_range() { // First delete the transaction and hash to id mapping if let Some(transaction) = static_file_provider.transaction_by_id(tx_id)? { - if tx_hash_number_cursor.seek_exact(transaction.hash())?.is_some() { + if tx_hash_number_cursor.seek_exact(transaction.trie_hash())?.is_some() { tx_hash_number_cursor.delete_current()?; } } diff --git a/crates/storage/provider/src/providers/blockchain_provider.rs b/crates/storage/provider/src/providers/blockchain_provider.rs index 744120dd0c06..967ac785b47e 100644 --- a/crates/storage/provider/src/providers/blockchain_provider.rs +++ b/crates/storage/provider/src/providers/blockchain_provider.rs @@ -25,7 +25,7 @@ use reth_db::{models::BlockNumberAddress, transaction::DbTx, Database}; use reth_db_api::models::{AccountBeforeTx, StoredBlockBodyIndices}; use reth_evm::ConfigureEvmEnv; use reth_execution_types::ExecutionOutcome; -use reth_node_types::NodeTypesWithDB; +use reth_node_types::{NodeTypesWithDB, TxTy}; use reth_primitives::{ Account, Block, BlockWithSenders, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, StorageEntry, TransactionMeta, TransactionSigned, TransactionSignedNoHash, @@ -331,29 +331,31 @@ impl BlockReader for BlockchainProvider2 { } impl TransactionsProvider for BlockchainProvider2 { + type Transaction = TxTy; + fn transaction_id(&self, tx_hash: TxHash) -> ProviderResult> { self.consistent_provider()?.transaction_id(tx_hash) } - fn transaction_by_id(&self, id: TxNumber) -> ProviderResult> { + fn transaction_by_id(&self, id: TxNumber) -> ProviderResult> { self.consistent_provider()?.transaction_by_id(id) } fn transaction_by_id_unhashed( &self, id: TxNumber, - ) -> ProviderResult> { + ) -> ProviderResult> { self.consistent_provider()?.transaction_by_id_unhashed(id) } - fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { + fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { self.consistent_provider()?.transaction_by_hash(hash) } fn transaction_by_hash_with_meta( &self, tx_hash: TxHash, - ) -> ProviderResult> { + ) -> ProviderResult> { self.consistent_provider()?.transaction_by_hash_with_meta(tx_hash) } @@ -364,21 +366,21 @@ impl TransactionsProvider for BlockchainProvider2 { fn transactions_by_block( &self, id: BlockHashOrNumber, - ) -> ProviderResult>> { + ) -> ProviderResult>> { self.consistent_provider()?.transactions_by_block(id) } fn transactions_by_block_range( &self, range: impl RangeBounds, - ) -> ProviderResult>> { + ) -> ProviderResult>> { self.consistent_provider()?.transactions_by_block_range(range) } fn transactions_by_tx_range( &self, range: impl RangeBounds, - ) -> ProviderResult> { + ) -> ProviderResult> { self.consistent_provider()?.transactions_by_tx_range(range) } @@ -2244,9 +2246,7 @@ mod tests { (transactions_by_tx_range, |block: &SealedBlock, _: &Vec>| block .body .transactions - .iter() - .map(|tx| Into::::into(tx.clone())) - .collect::>()), + .clone()), (receipts_by_tx_range, |block: &SealedBlock, receipts: &Vec>| receipts [block.number as usize] .clone()) @@ -2591,9 +2591,7 @@ mod tests { transaction_by_id_unhashed, |block: &SealedBlock, tx_num: TxNumber, _: B256, _: &Vec>| ( tx_num, - Some(Into::::into( - block.body.transactions[test_tx_index].clone() - )) + Some(block.body.transactions[test_tx_index].clone()) ), u64::MAX ), diff --git a/crates/storage/provider/src/providers/consistent.rs b/crates/storage/provider/src/providers/consistent.rs index 7d52dfcc4bb0..fc9d739b0fea 100644 --- a/crates/storage/provider/src/providers/consistent.rs +++ b/crates/storage/provider/src/providers/consistent.rs @@ -18,9 +18,10 @@ use reth_db::models::BlockNumberAddress; use reth_db_api::models::{AccountBeforeTx, StoredBlockBodyIndices}; use reth_evm::ConfigureEvmEnv; use reth_execution_types::{BundleStateInit, ExecutionOutcome, RevertsInit}; +use reth_node_types::TxTy; use reth_primitives::{ Account, Block, BlockWithSenders, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, - StorageEntry, TransactionMeta, TransactionSigned, TransactionSignedNoHash, + StorageEntry, TransactionMeta, }; use reth_prune_types::{PruneCheckpoint, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; @@ -927,6 +928,8 @@ impl BlockReader for ConsistentProvider { } impl TransactionsProvider for ConsistentProvider { + type Transaction = TxTy; + fn transaction_id(&self, tx_hash: TxHash) -> ProviderResult> { self.get_in_memory_or_storage_by_tx( tx_hash.into(), @@ -935,12 +938,19 @@ impl TransactionsProvider for ConsistentProvider { ) } - fn transaction_by_id(&self, id: TxNumber) -> ProviderResult> { + fn transaction_by_id(&self, id: TxNumber) -> ProviderResult> { self.get_in_memory_or_storage_by_tx( id.into(), |provider| provider.transaction_by_id(id), |tx_index, _, block_state| { - Ok(block_state.block_ref().block().body.transactions.get(tx_index).cloned()) + Ok(block_state + .block_ref() + .block() + .body + .transactions + .get(tx_index) + .cloned() + .map(Into::into)) }, ) } @@ -948,7 +958,7 @@ impl TransactionsProvider for ConsistentProvider { fn transaction_by_id_unhashed( &self, id: TxNumber, - ) -> ProviderResult> { + ) -> ProviderResult> { self.get_in_memory_or_storage_by_tx( id.into(), |provider| provider.transaction_by_id_unhashed(id), @@ -965,9 +975,9 @@ impl TransactionsProvider for ConsistentProvider { ) } - fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { + fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { if let Some(tx) = self.head_block.as_ref().and_then(|b| b.transaction_on_chain(hash)) { - return Ok(Some(tx)) + return Ok(Some(tx.into())) } self.storage_provider.transaction_by_hash(hash) @@ -976,11 +986,11 @@ impl TransactionsProvider for ConsistentProvider { fn transaction_by_hash_with_meta( &self, tx_hash: TxHash, - ) -> ProviderResult> { + ) -> ProviderResult> { if let Some((tx, meta)) = self.head_block.as_ref().and_then(|b| b.transaction_meta_on_chain(tx_hash)) { - return Ok(Some((tx, meta))) + return Ok(Some((tx.into(), meta))) } self.storage_provider.transaction_by_hash_with_meta(tx_hash) @@ -997,22 +1007,44 @@ impl TransactionsProvider for ConsistentProvider { fn transactions_by_block( &self, id: BlockHashOrNumber, - ) -> ProviderResult>> { + ) -> ProviderResult>> { self.get_in_memory_or_storage_by_block( id, |provider| provider.transactions_by_block(id), - |block_state| Ok(Some(block_state.block_ref().block().body.transactions.clone())), + |block_state| { + Ok(Some( + block_state + .block_ref() + .block() + .body + .transactions + .iter() + .map(|tx| tx.clone().into()) + .collect(), + )) + }, ) } fn transactions_by_block_range( &self, range: impl RangeBounds, - ) -> ProviderResult>> { + ) -> ProviderResult>> { self.get_in_memory_or_storage_by_block_range_while( range, |db_provider, range, _| db_provider.transactions_by_block_range(range), - |block_state, _| Some(block_state.block_ref().block().body.transactions.clone()), + |block_state, _| { + Some( + block_state + .block_ref() + .block() + .body + .transactions + .iter() + .map(|tx| tx.clone().into()) + .collect(), + ) + }, |_| true, ) } @@ -1020,7 +1052,7 @@ impl TransactionsProvider for ConsistentProvider { fn transactions_by_tx_range( &self, range: impl RangeBounds, - ) -> ProviderResult> { + ) -> ProviderResult> { self.get_in_memory_or_storage_by_tx_range( range, |db_provider, db_range| db_provider.transactions_by_tx_range(db_range), diff --git a/crates/storage/provider/src/providers/database/mod.rs b/crates/storage/provider/src/providers/database/mod.rs index 491c79d7aa64..57f09e72306f 100644 --- a/crates/storage/provider/src/providers/database/mod.rs +++ b/crates/storage/provider/src/providers/database/mod.rs @@ -19,10 +19,10 @@ use reth_db::{init_db, mdbx::DatabaseArguments, DatabaseEnv}; use reth_db_api::{database::Database, models::StoredBlockBodyIndices}; use reth_errors::{RethError, RethResult}; use reth_evm::ConfigureEvmEnv; -use reth_node_types::NodeTypesWithDB; +use reth_node_types::{NodeTypesWithDB, TxTy}; use reth_primitives::{ Block, BlockWithSenders, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, - StaticFileSegment, TransactionMeta, TransactionSigned, TransactionSignedNoHash, + StaticFileSegment, TransactionMeta, }; use reth_prune_types::{PruneCheckpoint, PruneModes, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; @@ -420,11 +420,13 @@ impl BlockReader for ProviderFactory { } impl TransactionsProvider for ProviderFactory { + type Transaction = TxTy; + fn transaction_id(&self, tx_hash: TxHash) -> ProviderResult> { self.provider()?.transaction_id(tx_hash) } - fn transaction_by_id(&self, id: TxNumber) -> ProviderResult> { + fn transaction_by_id(&self, id: TxNumber) -> ProviderResult> { self.static_file_provider.get_with_static_file_or_database( StaticFileSegment::Transactions, id, @@ -436,7 +438,7 @@ impl TransactionsProvider for ProviderFactory { fn transaction_by_id_unhashed( &self, id: TxNumber, - ) -> ProviderResult> { + ) -> ProviderResult> { self.static_file_provider.get_with_static_file_or_database( StaticFileSegment::Transactions, id, @@ -445,14 +447,14 @@ impl TransactionsProvider for ProviderFactory { ) } - fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { + fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { self.provider()?.transaction_by_hash(hash) } fn transaction_by_hash_with_meta( &self, tx_hash: TxHash, - ) -> ProviderResult> { + ) -> ProviderResult> { self.provider()?.transaction_by_hash_with_meta(tx_hash) } @@ -463,21 +465,21 @@ impl TransactionsProvider for ProviderFactory { fn transactions_by_block( &self, id: BlockHashOrNumber, - ) -> ProviderResult>> { + ) -> ProviderResult>> { self.provider()?.transactions_by_block(id) } fn transactions_by_block_range( &self, range: impl RangeBounds, - ) -> ProviderResult>> { + ) -> ProviderResult>> { self.provider()?.transactions_by_block_range(range) } fn transactions_by_tx_range( &self, range: impl RangeBounds, - ) -> ProviderResult> { + ) -> ProviderResult> { self.provider()?.transactions_by_tx_range(range) } diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index 279637abd84f..bf9762037262 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -3,7 +3,7 @@ use crate::{ providers::{ database::{chain::ChainStorage, metrics}, static_file::StaticFileWriter, - ProviderNodeTypes, StaticFileProvider, + NodeTypesForProvider, StaticFileProvider, }, to_range, traits::{ @@ -46,7 +46,7 @@ use reth_db_api::{ use reth_evm::ConfigureEvmEnv; use reth_execution_types::{Chain, ExecutionOutcome}; use reth_network_p2p::headers::downloader::SyncTarget; -use reth_node_types::NodeTypes; +use reth_node_types::{NodeTypes, TxTy}; use reth_primitives::{ Account, Block, BlockBody, BlockWithSenders, Bytecode, GotExpected, NodePrimitives, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, StaticFileSegment, StorageEntry, @@ -243,7 +243,7 @@ impl AsRef for DatabaseProvider { } } -impl DatabaseProvider { +impl DatabaseProvider { /// Unwinds trie state for the given range. /// /// This includes calculating the resulted state root and comparing it with the parent block @@ -374,7 +374,7 @@ impl TryIntoHistoricalStateProvider for Databa } } -impl DatabaseProvider { +impl DatabaseProvider { // TODO: uncomment below, once `reth debug_cmd` has been feature gated with dev. // #[cfg(any(test, feature = "test-utils"))] /// Inserts an historical block. **Used for setting up test environments** @@ -486,14 +486,16 @@ impl DatabaseProvider { pub fn chain_spec(&self) -> &N::ChainSpec { &self.chain_spec } +} +impl DatabaseProvider { fn transactions_by_tx_range_with_cursor( &self, range: impl RangeBounds, cursor: &mut C, - ) -> ProviderResult> + ) -> ProviderResult>> where - C: DbCursorRO, + C: DbCursorRO>>, { self.static_file_provider.get_range_with_static_file_or_database( StaticFileSegment::Transactions, @@ -507,7 +509,7 @@ impl DatabaseProvider { fn block_with_senders( &self, id: BlockHashOrNumber, - transaction_kind: TransactionVariant, + _transaction_kind: TransactionVariant, header_by_number: HF, construct_block: BF, ) -> ProviderResult> @@ -546,15 +548,7 @@ impl DatabaseProvider { (self.transactions_by_tx_range(tx_range.clone())?, self.senders_by_tx_range(tx_range)?) }; - let body = transactions - .into_iter() - .map(|tx| match transaction_kind { - TransactionVariant::NoHash => { - TransactionSigned::new_unhashed(tx.transaction, tx.signature) - } - TransactionVariant::WithHash => tx.with_hash(), - }) - .collect(); + let body = transactions.into_iter().map(Into::into).collect(); construct_block(header, body, senders, ommers, withdrawals) } @@ -663,7 +657,7 @@ impl DatabaseProvider { Vec
, ) -> ProviderResult, { - let mut tx_cursor = self.tx.cursor_read::()?; + let mut tx_cursor = self.tx.cursor_read::>>()?; let mut senders_cursor = self.tx.cursor_read::()?; self.block_range(range, headers_range, |header, tx_range, ommers, withdrawals| { @@ -1219,9 +1213,7 @@ impl BlockNumReader for DatabaseProvider> BlockReader - for DatabaseProvider -{ +impl BlockReader for DatabaseProvider { fn find_block_by_hash(&self, hash: B256, source: BlockSource) -> ProviderResult> { if source.is_canonical() { self.block(hash.into()) @@ -1245,7 +1237,7 @@ impl> BlockReader // If they exist but are not indexed, we don't have enough // information to return the block anyways, so we return `None`. let transactions = match self.transactions_by_block(number.into())? { - Some(transactions) => transactions, + Some(transactions) => transactions.into_iter().map(Into::into).collect(), None => return Ok(None), }; @@ -1345,7 +1337,7 @@ impl> BlockReader } fn block_range(&self, range: RangeInclusive) -> ProviderResult> { - let mut tx_cursor = self.tx.cursor_read::()?; + let mut tx_cursor = self.tx.cursor_read::>>()?; self.block_range( range, |range| self.headers_range(range), @@ -1396,7 +1388,7 @@ impl> BlockReader } } -impl> TransactionsProviderExt +impl TransactionsProviderExt for DatabaseProvider { /// Recovers transaction hashes by walking through `Transactions` table and @@ -1466,53 +1458,49 @@ impl> Transaction } // Calculates the hash of the given transaction -impl> TransactionsProvider - for DatabaseProvider -{ +impl TransactionsProvider for DatabaseProvider { + type Transaction = TxTy; + fn transaction_id(&self, tx_hash: TxHash) -> ProviderResult> { Ok(self.tx.get::(tx_hash)?) } - fn transaction_by_id(&self, id: TxNumber) -> ProviderResult> { + fn transaction_by_id(&self, id: TxNumber) -> ProviderResult> { self.static_file_provider.get_with_static_file_or_database( StaticFileSegment::Transactions, id, |static_file| static_file.transaction_by_id(id), - || Ok(self.tx.get::(id)?.map(Into::into)), + || Ok(self.tx.get::>(id)?), ) } fn transaction_by_id_unhashed( &self, id: TxNumber, - ) -> ProviderResult> { + ) -> ProviderResult> { self.static_file_provider.get_with_static_file_or_database( StaticFileSegment::Transactions, id, |static_file| static_file.transaction_by_id_unhashed(id), - || Ok(self.tx.get::(id)?), + || Ok(self.tx.get::>(id)?), ) } - fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { + fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { if let Some(id) = self.transaction_id(hash)? { - Ok(self - .transaction_by_id_unhashed(id)? - .map(|tx| TransactionSigned::new(tx.transaction, tx.signature, hash))) + Ok(self.transaction_by_id_unhashed(id)?) } else { Ok(None) } - .map(|tx| tx.map(Into::into)) } fn transaction_by_hash_with_meta( &self, tx_hash: TxHash, - ) -> ProviderResult> { + ) -> ProviderResult> { let mut transaction_cursor = self.tx.cursor_read::()?; if let Some(transaction_id) = self.transaction_id(tx_hash)? { - if let Some(tx) = self.transaction_by_id_unhashed(transaction_id)? { - let transaction = TransactionSigned::new(tx.transaction, tx.signature, tx_hash); + if let Some(transaction) = self.transaction_by_id_unhashed(transaction_id)? { if let Some(block_number) = transaction_cursor.seek(transaction_id).map(|b| b.map(|(_, bn)| bn))? { @@ -1553,8 +1541,8 @@ impl> Transaction fn transactions_by_block( &self, id: BlockHashOrNumber, - ) -> ProviderResult>> { - let mut tx_cursor = self.tx.cursor_read::()?; + ) -> ProviderResult>> { + let mut tx_cursor = self.tx.cursor_read::>()?; if let Some(block_number) = self.convert_hash_or_number(id)? { if let Some(body) = self.block_body_indices(block_number)? { @@ -1562,12 +1550,7 @@ impl> Transaction return if tx_range.is_empty() { Ok(Some(Vec::new())) } else { - Ok(Some( - self.transactions_by_tx_range_with_cursor(tx_range, &mut tx_cursor)? - .into_iter() - .map(Into::into) - .collect(), - )) + Ok(Some(self.transactions_by_tx_range_with_cursor(tx_range, &mut tx_cursor)?)) } } } @@ -1577,8 +1560,8 @@ impl> Transaction fn transactions_by_block_range( &self, range: impl RangeBounds, - ) -> ProviderResult>> { - let mut tx_cursor = self.tx.cursor_read::()?; + ) -> ProviderResult>> { + let mut tx_cursor = self.tx.cursor_read::>()?; let mut results = Vec::new(); let mut body_cursor = self.tx.cursor_read::()?; for entry in body_cursor.walk_range(range)? { @@ -1590,7 +1573,6 @@ impl> Transaction results.push( self.transactions_by_tx_range_with_cursor(tx_num_range, &mut tx_cursor)? .into_iter() - .map(Into::into) .collect(), ); } @@ -1601,10 +1583,10 @@ impl> Transaction fn transactions_by_tx_range( &self, range: impl RangeBounds, - ) -> ProviderResult> { + ) -> ProviderResult> { self.transactions_by_tx_range_with_cursor( range, - &mut self.tx.cursor_read::()?, + &mut self.tx.cursor_read::>()?, ) } @@ -1620,9 +1602,7 @@ impl> Transaction } } -impl> ReceiptProvider - for DatabaseProvider -{ +impl ReceiptProvider for DatabaseProvider { fn receipt(&self, id: TxNumber) -> ProviderResult> { self.static_file_provider.get_with_static_file_or_database( StaticFileSegment::Receipts, @@ -1887,7 +1867,9 @@ impl StorageReader for DatabaseProvider } } -impl StateChangeWriter for DatabaseProvider { +impl StateChangeWriter + for DatabaseProvider +{ fn write_state_reverts( &self, reverts: PlainStateReverts, @@ -2710,13 +2692,13 @@ impl HistoryWriter for DatabaseProvi } } -impl StateReader for DatabaseProvider { +impl StateReader for DatabaseProvider { fn get_state(&self, block: BlockNumber) -> ProviderResult> { self.get_state(block..=block) } } -impl BlockExecutionWriter +impl BlockExecutionWriter for DatabaseProvider { fn take_block_and_execution_above( @@ -2766,7 +2748,7 @@ impl BlockExecutio } } -impl BlockWriter +impl BlockWriter for DatabaseProvider { type Body = <::Block as reth_primitives_traits::Block>::Body; diff --git a/crates/storage/provider/src/providers/mod.rs b/crates/storage/provider/src/providers/mod.rs index 4d641bb290e3..68d1a168f150 100644 --- a/crates/storage/provider/src/providers/mod.rs +++ b/crates/storage/provider/src/providers/mod.rs @@ -23,10 +23,10 @@ use reth_chainspec::{ChainInfo, EthereumHardforks}; use reth_db::table::Value; use reth_db_api::models::{AccountBeforeTx, StoredBlockBodyIndices}; use reth_evm::ConfigureEvmEnv; -use reth_node_types::{FullNodePrimitives, NodeTypes, NodeTypesWithDB}; +use reth_node_types::{FullNodePrimitives, NodeTypes, NodeTypesWithDB, TxTy}; use reth_primitives::{ Account, Block, BlockWithSenders, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, - TransactionMeta, TransactionSigned, TransactionSignedNoHash, + TransactionMeta, TransactionSigned, }; use reth_prune_types::{PruneCheckpoint, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; @@ -76,7 +76,9 @@ where Self: NodeTypes< ChainSpec: EthereumHardforks, Storage: ChainStorage, - Primitives: FullNodePrimitives, + Primitives: FullNodePrimitives< + SignedTx: Value + From + Into, + >, >, { } @@ -85,7 +87,9 @@ impl NodeTypesForProvider for T where T: NodeTypes< ChainSpec: EthereumHardforks, Storage: ChainStorage, - Primitives: FullNodePrimitives, + Primitives: FullNodePrimitives< + SignedTx: Value + From + Into, + >, > { } @@ -417,29 +421,31 @@ impl BlockReader for BlockchainProvider { } impl TransactionsProvider for BlockchainProvider { + type Transaction = TxTy; + fn transaction_id(&self, tx_hash: TxHash) -> ProviderResult> { self.database.transaction_id(tx_hash) } - fn transaction_by_id(&self, id: TxNumber) -> ProviderResult> { + fn transaction_by_id(&self, id: TxNumber) -> ProviderResult> { self.database.transaction_by_id(id) } fn transaction_by_id_unhashed( &self, id: TxNumber, - ) -> ProviderResult> { + ) -> ProviderResult> { self.database.transaction_by_id_unhashed(id) } - fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { + fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { self.database.transaction_by_hash(hash) } fn transaction_by_hash_with_meta( &self, tx_hash: TxHash, - ) -> ProviderResult> { + ) -> ProviderResult> { self.database.transaction_by_hash_with_meta(tx_hash) } @@ -450,21 +456,21 @@ impl TransactionsProvider for BlockchainProvider { fn transactions_by_block( &self, id: BlockHashOrNumber, - ) -> ProviderResult>> { + ) -> ProviderResult>> { self.database.transactions_by_block(id) } fn transactions_by_block_range( &self, range: impl RangeBounds, - ) -> ProviderResult>> { + ) -> ProviderResult>> { self.database.transactions_by_block_range(range) } fn transactions_by_tx_range( &self, range: impl RangeBounds, - ) -> ProviderResult> { + ) -> ProviderResult> { self.database.transactions_by_tx_range(range) } diff --git a/crates/storage/provider/src/providers/static_file/jar.rs b/crates/storage/provider/src/providers/static_file/jar.rs index 9bde4a5f7603..e04d46312f67 100644 --- a/crates/storage/provider/src/providers/static_file/jar.rs +++ b/crates/storage/provider/src/providers/static_file/jar.rs @@ -7,17 +7,19 @@ use crate::{ TransactionsProvider, }; use alloy_consensus::Header; -use alloy_eips::BlockHashOrNumber; +use alloy_eips::{eip2718::Encodable2718, BlockHashOrNumber}; use alloy_primitives::{Address, BlockHash, BlockNumber, TxHash, TxNumber, B256, U256}; use reth_chainspec::ChainInfo; -use reth_db::static_file::{ - BlockHashMask, HeaderMask, HeaderWithHashMask, ReceiptMask, StaticFileCursor, TDWithHashMask, - TotalDifficultyMask, TransactionMask, +use reth_db::{ + static_file::{ + BlockHashMask, HeaderMask, HeaderWithHashMask, ReceiptMask, StaticFileCursor, + TDWithHashMask, TotalDifficultyMask, TransactionMask, + }, + table::Decompress, }; use reth_node_types::NodePrimitives; -use reth_primitives::{ - Receipt, SealedHeader, TransactionMeta, TransactionSigned, TransactionSignedNoHash, -}; +use reth_primitives::{transaction::recover_signers, Receipt, SealedHeader, TransactionMeta}; +use reth_primitives_traits::SignedTransaction; use reth_storage_errors::provider::{ProviderError, ProviderResult}; use std::{ fmt::Debug, @@ -207,40 +209,38 @@ impl BlockNumReader for StaticFileJarProvider<'_, N> { } } -impl TransactionsProvider for StaticFileJarProvider<'_, N> { +impl> TransactionsProvider + for StaticFileJarProvider<'_, N> +{ + type Transaction = N::SignedTx; + fn transaction_id(&self, hash: TxHash) -> ProviderResult> { let mut cursor = self.cursor()?; Ok(cursor - .get_one::>((&hash).into())? - .and_then(|res| (res.hash() == hash).then(|| cursor.number()).flatten())) + .get_one::>((&hash).into())? + .and_then(|res| (res.trie_hash() == hash).then(|| cursor.number()).flatten())) } - fn transaction_by_id(&self, num: TxNumber) -> ProviderResult> { - Ok(self - .cursor()? - .get_one::>(num.into())? - .map(|tx| tx.with_hash())) + fn transaction_by_id(&self, num: TxNumber) -> ProviderResult> { + self.cursor()?.get_one::>(num.into()) } fn transaction_by_id_unhashed( &self, num: TxNumber, - ) -> ProviderResult> { - self.cursor()?.get_one::>(num.into()) + ) -> ProviderResult> { + self.cursor()?.get_one::>(num.into()) } - fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { - Ok(self - .cursor()? - .get_one::>((&hash).into())? - .map(|tx| tx.with_hash())) + fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { + self.cursor()?.get_one::>((&hash).into()) } fn transaction_by_hash_with_meta( &self, _hash: TxHash, - ) -> ProviderResult> { + ) -> ProviderResult> { // Information required on indexing table [`tables::TransactionBlocks`] Err(ProviderError::UnsupportedProvider) } @@ -253,7 +253,7 @@ impl TransactionsProvider for StaticFileJarProvider<'_, N> { fn transactions_by_block( &self, _block_id: BlockHashOrNumber, - ) -> ProviderResult>> { + ) -> ProviderResult>> { // Related to indexing tables. Live database should get the tx_range and call static file // provider with `transactions_by_tx_range` instead. Err(ProviderError::UnsupportedProvider) @@ -262,7 +262,7 @@ impl TransactionsProvider for StaticFileJarProvider<'_, N> { fn transactions_by_block_range( &self, _range: impl RangeBounds, - ) -> ProviderResult>> { + ) -> ProviderResult>> { // Related to indexing tables. Live database should get the tx_range and call static file // provider with `transactions_by_tx_range` instead. Err(ProviderError::UnsupportedProvider) @@ -271,15 +271,13 @@ impl TransactionsProvider for StaticFileJarProvider<'_, N> { fn transactions_by_tx_range( &self, range: impl RangeBounds, - ) -> ProviderResult> { + ) -> ProviderResult> { let range = to_range(range); let mut cursor = self.cursor()?; let mut txes = Vec::with_capacity((range.end - range.start) as usize); for num in range { - if let Some(tx) = - cursor.get_one::>(num.into())? - { + if let Some(tx) = cursor.get_one::>(num.into())? { txes.push(tx) } } @@ -291,19 +289,20 @@ impl TransactionsProvider for StaticFileJarProvider<'_, N> { range: impl RangeBounds, ) -> ProviderResult> { let txs = self.transactions_by_tx_range(range)?; - TransactionSignedNoHash::recover_signers(&txs, txs.len()) - .ok_or(ProviderError::SenderRecoveryError) + recover_signers(&txs, txs.len()).ok_or(ProviderError::SenderRecoveryError) } fn transaction_sender(&self, num: TxNumber) -> ProviderResult> { Ok(self .cursor()? - .get_one::>(num.into())? + .get_one::>(num.into())? .and_then(|tx| tx.recover_signer())) } } -impl ReceiptProvider for StaticFileJarProvider<'_, N> { +impl> ReceiptProvider + for StaticFileJarProvider<'_, N> +{ fn receipt(&self, num: TxNumber) -> ProviderResult> { self.cursor()?.get_one::>(num.into()) } diff --git a/crates/storage/provider/src/providers/static_file/manager.rs b/crates/storage/provider/src/providers/static_file/manager.rs index 8ecc33240b4e..14821fde547d 100644 --- a/crates/storage/provider/src/providers/static_file/manager.rs +++ b/crates/storage/provider/src/providers/static_file/manager.rs @@ -9,6 +9,7 @@ use crate::{ }; use alloy_consensus::Header; use alloy_eips::{ + eip2718::Encodable2718, eip4895::{Withdrawal, Withdrawals}, BlockHashOrNumber, }; @@ -23,6 +24,7 @@ use reth_db::{ iter_static_files, BlockHashMask, HeaderMask, HeaderWithHashMask, ReceiptMask, StaticFileCursor, TDWithHashMask, TransactionMask, }, + table::{Decompress, Value}, tables, }; use reth_db_api::{ @@ -35,9 +37,11 @@ use reth_primitives::{ find_fixed_range, HighestStaticFiles, SegmentHeader, SegmentRangeInclusive, DEFAULT_BLOCKS_PER_STATIC_FILE, }, + transaction::recover_signers, Block, BlockWithSenders, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, - StaticFileSegment, TransactionMeta, TransactionSigned, TransactionSignedNoHash, + StaticFileSegment, TransactionMeta, TransactionSignedNoHash, }; +use reth_primitives_traits::SignedTransaction; use reth_stages_types::{PipelineTarget, StageId}; use reth_storage_api::DBProvider; use reth_storage_errors::provider::{ProviderError, ProviderResult}; @@ -1337,7 +1341,9 @@ impl BlockHashReader for StaticFileProvider { } } -impl ReceiptProvider for StaticFileProvider { +impl> ReceiptProvider + for StaticFileProvider +{ fn receipt(&self, num: TxNumber) -> ProviderResult> { self.get_segment_provider_from_transaction(StaticFileSegment::Receipts, num, None) .and_then(|provider| provider.receipt(num)) @@ -1374,7 +1380,9 @@ impl ReceiptProvider for StaticFileProvider { } } -impl TransactionsProviderExt for StaticFileProvider { +impl> TransactionsProviderExt + for StaticFileProvider +{ fn transaction_hashes_by_range( &self, tx_range: Range, @@ -1435,13 +1443,17 @@ impl TransactionsProviderExt for StaticFileProvider { } } -impl TransactionsProvider for StaticFileProvider { +impl> TransactionsProvider + for StaticFileProvider +{ + type Transaction = N::SignedTx; + fn transaction_id(&self, tx_hash: TxHash) -> ProviderResult> { self.find_static_file(StaticFileSegment::Transactions, |jar_provider| { let mut cursor = jar_provider.cursor()?; if cursor - .get_one::>((&tx_hash).into())? - .and_then(|tx| (tx.hash() == tx_hash).then_some(tx)) + .get_one::>((&tx_hash).into())? + .and_then(|tx| (tx.trie_hash() == tx_hash).then_some(tx)) .is_some() { Ok(cursor.number()) @@ -1451,7 +1463,7 @@ impl TransactionsProvider for StaticFileProvider { }) } - fn transaction_by_id(&self, num: TxNumber) -> ProviderResult> { + fn transaction_by_id(&self, num: TxNumber) -> ProviderResult> { self.get_segment_provider_from_transaction(StaticFileSegment::Transactions, num, None) .and_then(|provider| provider.transaction_by_id(num)) .or_else(|err| { @@ -1466,7 +1478,7 @@ impl TransactionsProvider for StaticFileProvider { fn transaction_by_id_unhashed( &self, num: TxNumber, - ) -> ProviderResult> { + ) -> ProviderResult> { self.get_segment_provider_from_transaction(StaticFileSegment::Transactions, num, None) .and_then(|provider| provider.transaction_by_id_unhashed(num)) .or_else(|err| { @@ -1478,20 +1490,19 @@ impl TransactionsProvider for StaticFileProvider { }) } - fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { + fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { self.find_static_file(StaticFileSegment::Transactions, |jar_provider| { Ok(jar_provider .cursor()? - .get_one::>((&hash).into())? - .map(|tx| tx.with_hash()) - .and_then(|tx| (tx.hash_ref() == &hash).then_some(tx))) + .get_one::>((&hash).into())? + .and_then(|tx| (tx.trie_hash() == hash).then_some(tx))) }) } fn transaction_by_hash_with_meta( &self, _hash: TxHash, - ) -> ProviderResult> { + ) -> ProviderResult> { // Required data not present in static_files Err(ProviderError::UnsupportedProvider) } @@ -1504,7 +1515,7 @@ impl TransactionsProvider for StaticFileProvider { fn transactions_by_block( &self, _block_id: BlockHashOrNumber, - ) -> ProviderResult>> { + ) -> ProviderResult>> { // Required data not present in static_files Err(ProviderError::UnsupportedProvider) } @@ -1512,7 +1523,7 @@ impl TransactionsProvider for StaticFileProvider { fn transactions_by_block_range( &self, _range: impl RangeBounds, - ) -> ProviderResult>> { + ) -> ProviderResult>> { // Required data not present in static_files Err(ProviderError::UnsupportedProvider) } @@ -1520,13 +1531,11 @@ impl TransactionsProvider for StaticFileProvider { fn transactions_by_tx_range( &self, range: impl RangeBounds, - ) -> ProviderResult> { + ) -> ProviderResult> { self.fetch_range_with_predicate( StaticFileSegment::Transactions, to_range(range), - |cursor, number| { - cursor.get_one::>(number.into()) - }, + |cursor, number| cursor.get_one::>(number.into()), |_| true, ) } @@ -1536,8 +1545,7 @@ impl TransactionsProvider for StaticFileProvider { range: impl RangeBounds, ) -> ProviderResult> { let txes = self.transactions_by_tx_range(range)?; - TransactionSignedNoHash::recover_signers(&txes, txes.len()) - .ok_or(ProviderError::SenderRecoveryError) + recover_signers(&txes, txes.len()).ok_or(ProviderError::SenderRecoveryError) } fn transaction_sender(&self, id: TxNumber) -> ProviderResult> { @@ -1569,7 +1577,7 @@ impl BlockNumReader for StaticFileProvider { } } -impl BlockReader for StaticFileProvider { +impl> BlockReader for StaticFileProvider { fn find_block_by_hash( &self, _hash: B256, diff --git a/crates/storage/provider/src/providers/static_file/mod.rs b/crates/storage/provider/src/providers/static_file/mod.rs index 58a9e3bb378c..673451de65f1 100644 --- a/crates/storage/provider/src/providers/static_file/mod.rs +++ b/crates/storage/provider/src/providers/static_file/mod.rs @@ -415,7 +415,7 @@ mod tests { #[allow(clippy::too_many_arguments)] fn prune_and_validate( - sf_rw: &StaticFileProvider<()>, + sf_rw: &StaticFileProvider, static_dir: impl AsRef, segment: StaticFileSegment, prune_count: u64, diff --git a/crates/storage/provider/src/test_utils/mock.rs b/crates/storage/provider/src/test_utils/mock.rs index 77a4b75a0e2b..a0ecb7256cb8 100644 --- a/crates/storage/provider/src/test_utils/mock.rs +++ b/crates/storage/provider/src/test_utils/mock.rs @@ -25,7 +25,6 @@ use reth_node_types::NodeTypes; use reth_primitives::{ Account, Block, BlockWithSenders, Bytecode, EthPrimitives, GotExpected, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, TransactionMeta, TransactionSigned, - TransactionSignedNoHash, }; use reth_stages_types::{StageCheckpoint, StageId}; use reth_storage_api::{ @@ -244,6 +243,8 @@ impl ChainSpecProvider for MockEthProvider { } impl TransactionsProvider for MockEthProvider { + type Transaction = TransactionSigned; + fn transaction_id(&self, tx_hash: TxHash) -> ProviderResult> { let lock = self.blocks.lock(); let tx_number = lock @@ -255,7 +256,7 @@ impl TransactionsProvider for MockEthProvider { Ok(tx_number) } - fn transaction_by_id(&self, id: TxNumber) -> ProviderResult> { + fn transaction_by_id(&self, id: TxNumber) -> ProviderResult> { let lock = self.blocks.lock(); let transaction = lock.values().flat_map(|block| &block.body.transactions).nth(id as usize).cloned(); @@ -266,13 +267,10 @@ impl TransactionsProvider for MockEthProvider { fn transaction_by_id_unhashed( &self, id: TxNumber, - ) -> ProviderResult> { + ) -> ProviderResult> { let lock = self.blocks.lock(); - let transaction = lock - .values() - .flat_map(|block| &block.body.transactions) - .nth(id as usize) - .map(|tx| Into::::into(tx.clone())); + let transaction = + lock.values().flat_map(|block| &block.body.transactions).nth(id as usize).cloned(); Ok(transaction) } @@ -286,7 +284,7 @@ impl TransactionsProvider for MockEthProvider { fn transaction_by_hash_with_meta( &self, hash: TxHash, - ) -> ProviderResult> { + ) -> ProviderResult> { let lock = self.blocks.lock(); for (block_hash, block) in lock.iter() { for (index, tx) in block.body.transactions.iter().enumerate() { @@ -322,14 +320,14 @@ impl TransactionsProvider for MockEthProvider { fn transactions_by_block( &self, id: BlockHashOrNumber, - ) -> ProviderResult>> { + ) -> ProviderResult>> { Ok(self.block(id)?.map(|b| b.body.transactions)) } fn transactions_by_block_range( &self, range: impl RangeBounds, - ) -> ProviderResult>> { + ) -> ProviderResult>> { // init btreemap so we can return in order let mut map = BTreeMap::new(); for (_, block) in self.blocks.lock().iter() { @@ -344,14 +342,14 @@ impl TransactionsProvider for MockEthProvider { fn transactions_by_tx_range( &self, range: impl RangeBounds, - ) -> ProviderResult> { + ) -> ProviderResult> { let lock = self.blocks.lock(); let transactions = lock .values() .flat_map(|block| &block.body.transactions) .enumerate() .filter(|&(tx_number, _)| range.contains(&(tx_number as TxNumber))) - .map(|(_, tx)| tx.clone().into()) + .map(|(_, tx)| tx.clone()) .collect(); Ok(transactions) diff --git a/crates/storage/provider/src/test_utils/noop.rs b/crates/storage/provider/src/test_utils/noop.rs index 966bab5944cc..9a88c8c9ab74 100644 --- a/crates/storage/provider/src/test_utils/noop.rs +++ b/crates/storage/provider/src/test_utils/noop.rs @@ -23,7 +23,7 @@ use reth_errors::ProviderError; use reth_evm::ConfigureEvmEnv; use reth_primitives::{ Account, Block, BlockWithSenders, Bytecode, Receipt, SealedBlock, SealedBlockWithSenders, - SealedHeader, TransactionMeta, TransactionSigned, TransactionSignedNoHash, + SealedHeader, TransactionMeta, TransactionSigned, }; use reth_prune_types::{PruneCheckpoint, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; @@ -192,29 +192,31 @@ impl BlockIdReader for NoopProvider { } impl TransactionsProvider for NoopProvider { + type Transaction = TransactionSigned; + fn transaction_id(&self, _tx_hash: TxHash) -> ProviderResult> { Ok(None) } - fn transaction_by_id(&self, _id: TxNumber) -> ProviderResult> { + fn transaction_by_id(&self, _id: TxNumber) -> ProviderResult> { Ok(None) } fn transaction_by_id_unhashed( &self, _id: TxNumber, - ) -> ProviderResult> { + ) -> ProviderResult> { Ok(None) } - fn transaction_by_hash(&self, _hash: TxHash) -> ProviderResult> { + fn transaction_by_hash(&self, _hash: TxHash) -> ProviderResult> { Ok(None) } fn transaction_by_hash_with_meta( &self, _hash: TxHash, - ) -> ProviderResult> { + ) -> ProviderResult> { Ok(None) } @@ -225,21 +227,21 @@ impl TransactionsProvider for NoopProvider { fn transactions_by_block( &self, _block_id: BlockHashOrNumber, - ) -> ProviderResult>> { + ) -> ProviderResult>> { Ok(None) } fn transactions_by_block_range( &self, _range: impl RangeBounds, - ) -> ProviderResult>> { + ) -> ProviderResult>> { Ok(Vec::default()) } fn transactions_by_tx_range( &self, _range: impl RangeBounds, - ) -> ProviderResult> { + ) -> ProviderResult> { Ok(Vec::default()) } diff --git a/crates/storage/provider/src/traits/full.rs b/crates/storage/provider/src/traits/full.rs index 4998e9741656..9bb357e33a3c 100644 --- a/crates/storage/provider/src/traits/full.rs +++ b/crates/storage/provider/src/traits/full.rs @@ -7,13 +7,13 @@ use crate::{ }; use reth_chain_state::{CanonStateSubscriptions, ForkChoiceSubscriptions}; use reth_chainspec::EthereumHardforks; -use reth_node_types::NodeTypesWithDB; +use reth_node_types::{NodeTypesWithDB, TxTy}; /// Helper trait to unify all provider traits for simplicity. pub trait FullProvider: DatabaseProviderFactory - + StaticFileProviderFactory - + BlockReaderIdExt + + StaticFileProviderFactory + + BlockReaderIdExt> + AccountReader + StateProviderFactory + EvmEnvProvider @@ -30,8 +30,8 @@ pub trait FullProvider: impl FullProvider for T where T: DatabaseProviderFactory - + StaticFileProviderFactory - + BlockReaderIdExt + + StaticFileProviderFactory + + BlockReaderIdExt> + AccountReader + StateProviderFactory + EvmEnvProvider diff --git a/crates/storage/storage-api/src/transactions.rs b/crates/storage/storage-api/src/transactions.rs index a639fcedde5a..ca2bcaeb4690 100644 --- a/crates/storage/storage-api/src/transactions.rs +++ b/crates/storage/storage-api/src/transactions.rs @@ -1,7 +1,8 @@ use crate::{BlockNumReader, BlockReader}; use alloy_eips::BlockHashOrNumber; use alloy_primitives::{Address, BlockNumber, TxHash, TxNumber}; -use reth_primitives::{TransactionMeta, TransactionSigned, TransactionSignedNoHash}; +use reth_primitives::TransactionMeta; +use reth_primitives_traits::SignedTransaction; use reth_storage_errors::provider::{ProviderError, ProviderResult}; use std::ops::{Range, RangeBounds, RangeInclusive}; @@ -18,9 +19,12 @@ pub enum TransactionVariant { WithHash, } -/// Client trait for fetching [TransactionSigned] related data. +/// Client trait for fetching transactions related data. #[auto_impl::auto_impl(&, Arc)] pub trait TransactionsProvider: BlockNumReader + Send + Sync { + /// The transaction type this provider reads. + type Transaction: Send + Sync + SignedTransaction; + /// Get internal transaction identifier by transaction hash. /// /// This is the inverse of [TransactionsProvider::transaction_by_id]. @@ -28,23 +32,21 @@ pub trait TransactionsProvider: BlockNumReader + Send + Sync { fn transaction_id(&self, tx_hash: TxHash) -> ProviderResult>; /// Get transaction by id, computes hash every time so more expensive. - fn transaction_by_id(&self, id: TxNumber) -> ProviderResult>; + fn transaction_by_id(&self, id: TxNumber) -> ProviderResult>; /// Get transaction by id without computing the hash. - fn transaction_by_id_unhashed( - &self, - id: TxNumber, - ) -> ProviderResult>; + fn transaction_by_id_unhashed(&self, id: TxNumber) + -> ProviderResult>; /// Get transaction by transaction hash. - fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult>; + fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult>; /// Get transaction by transaction hash and additional metadata of the block the transaction was /// mined in fn transaction_by_hash_with_meta( &self, hash: TxHash, - ) -> ProviderResult>; + ) -> ProviderResult>; /// Get transaction block number fn transaction_block(&self, id: TxNumber) -> ProviderResult>; @@ -53,19 +55,19 @@ pub trait TransactionsProvider: BlockNumReader + Send + Sync { fn transactions_by_block( &self, block: BlockHashOrNumber, - ) -> ProviderResult>>; + ) -> ProviderResult>>; /// Get transactions by block range. fn transactions_by_block_range( &self, range: impl RangeBounds, - ) -> ProviderResult>>; + ) -> ProviderResult>>; /// Get transactions by tx range. fn transactions_by_tx_range( &self, range: impl RangeBounds, - ) -> ProviderResult>; + ) -> ProviderResult>; /// Get Senders from a tx range. fn senders_by_tx_range( @@ -79,7 +81,10 @@ pub trait TransactionsProvider: BlockNumReader + Send + Sync { fn transaction_sender(&self, id: TxNumber) -> ProviderResult>; } -/// Client trait for fetching additional [TransactionSigned] related data. +/// A helper type alias to access [`TransactionsProvider::Transaction`]. +pub type ProviderTx

=

::Transaction; + +/// Client trait for fetching additional transactions related data. #[auto_impl::auto_impl(&, Arc)] pub trait TransactionsProviderExt: BlockReader + Send + Sync { /// Get transactions range by block range. diff --git a/examples/db-access/src/main.rs b/examples/db-access/src/main.rs index f3b7fdf58421..179d12160535 100644 --- a/examples/db-access/src/main.rs +++ b/examples/db-access/src/main.rs @@ -4,7 +4,7 @@ use reth_chainspec::ChainSpecBuilder; use reth_db::{open_db_read_only, DatabaseEnv}; use reth_node_ethereum::EthereumNode; use reth_node_types::NodeTypesWithDBAdapter; -use reth_primitives::SealedHeader; +use reth_primitives::{SealedHeader, TransactionSigned}; use reth_provider::{ providers::StaticFileProvider, AccountReader, BlockReader, BlockSource, HeaderProvider, ProviderFactory, ReceiptProvider, StateProvider, TransactionsProvider, @@ -83,7 +83,9 @@ fn header_provider_example(provider: T, number: u64) -> eyre: } /// The `TransactionsProvider` allows querying transaction-related information -fn txs_provider_example(provider: T) -> eyre::Result<()> { +fn txs_provider_example>( + provider: T, +) -> eyre::Result<()> { // Try the 5th tx let txid = 5; @@ -160,7 +162,9 @@ fn block_provider_example(provider: T, number: u64) -> eyre::Res } /// The `ReceiptProvider` allows querying the receipts tables. -fn receipts_provider_example( +fn receipts_provider_example< + T: ReceiptProvider + TransactionsProvider + HeaderProvider, +>( provider: T, ) -> eyre::Result<()> { let txid = 5; From 047bf8630aebe79a5f79a0edfe98f42345194b56 Mon Sep 17 00:00:00 2001 From: Hai | RISE <150876604+hai-rise@users.noreply.github.com> Date: Sat, 23 Nov 2024 12:05:03 +0700 Subject: [PATCH 135/156] perf(rpc-tx-helpers): simplify clones (#12800) --- .../rpc/rpc-eth-api/src/helpers/transaction.rs | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs index d87a4855b1db..2223ecdc9f71 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs @@ -166,10 +166,9 @@ pub trait EthTransactions: LoadTransaction { where Self: 'static, { - let this = self.clone(); + let provider = self.provider().clone(); self.spawn_blocking_io(move |_| { - let (tx, meta) = match this - .provider() + let (tx, meta) = match provider .transaction_by_hash_with_meta(hash) .map_err(Self::Error::from_eth_err)? { @@ -177,11 +176,10 @@ pub trait EthTransactions: LoadTransaction { None => return Ok(None), }; - let receipt = - match this.provider().receipt_by_hash(hash).map_err(Self::Error::from_eth_err)? { - Some(recpt) => recpt, - None => return Ok(None), - }; + let receipt = match provider.receipt_by_hash(hash).map_err(Self::Error::from_eth_err)? { + Some(recpt) => recpt, + None => return Ok(None), + }; Ok(Some((tx, meta, receipt))) }) @@ -334,7 +332,7 @@ pub trait EthTransactions: LoadTransaction { tx: Bytes, ) -> impl Future> + Send { async move { - let recovered = recover_raw_transaction(tx.clone())?; + let recovered = recover_raw_transaction(tx)?; let pool_transaction = ::Transaction::from_pooled(recovered.into()); From f8a88c50d19d3749df94eb52dd009738552e31d6 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 23 Nov 2024 06:21:20 +0100 Subject: [PATCH 136/156] chore: add missing from impl (#12801) --- crates/primitives/src/transaction/pooled.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/crates/primitives/src/transaction/pooled.rs b/crates/primitives/src/transaction/pooled.rs index 09111c61a170..e526eb3894fc 100644 --- a/crates/primitives/src/transaction/pooled.rs +++ b/crates/primitives/src/transaction/pooled.rs @@ -572,6 +572,12 @@ impl InMemorySize for PooledTransactionsElement { } } +impl From for PooledTransactionsElement { + fn from(recovered: PooledTransactionsElementEcRecovered) -> Self { + recovered.into_transaction() + } +} + impl TryFrom for PooledTransactionsElement { type Error = TransactionConversionError; From 13786c76d440e9db157d0bf50722ae6ed13e950c Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 23 Nov 2024 06:21:29 +0100 Subject: [PATCH 137/156] chore: use inmemory size functions for tx impl (#12802) --- crates/primitives/src/transaction/mod.rs | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index cc966154c09c..abbf4d402486 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -14,10 +14,7 @@ use alloy_primitives::{ keccak256, Address, Bytes, ChainId, PrimitiveSignature as Signature, TxHash, TxKind, B256, U256, }; use alloy_rlp::{Decodable, Encodable, Error as RlpError, Header}; -use core::{ - hash::{Hash, Hasher}, - mem, -}; +use core::hash::{Hash, Hasher}; use derive_more::{AsRef, Deref}; use once_cell as _; #[cfg(not(feature = "std"))] @@ -1461,7 +1458,7 @@ impl InMemorySize for TransactionSigned { /// Calculate a heuristic for the in-memory size of the [`TransactionSigned`]. #[inline] fn size(&self) -> usize { - mem::size_of::() + self.transaction.size() + mem::size_of::() + self.hash().size() + self.transaction.size() + self.signature().size() } } From 0d17f14e3d5ffdc6bfb602a6cb06b24b398fd0d8 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 23 Nov 2024 08:08:41 +0100 Subject: [PATCH 138/156] perf: inline pooled transactions max (#12805) --- crates/transaction-pool/src/lib.rs | 2 +- crates/transaction-pool/src/pool/mod.rs | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/crates/transaction-pool/src/lib.rs b/crates/transaction-pool/src/lib.rs index 8d11d7595b14..3194ebba6f8b 100644 --- a/crates/transaction-pool/src/lib.rs +++ b/crates/transaction-pool/src/lib.rs @@ -409,7 +409,7 @@ where &self, max: usize, ) -> Vec>> { - self.pooled_transactions().into_iter().take(max).collect() + self.pool.pooled_transactions_max(max) } fn get_pooled_transaction_elements( diff --git a/crates/transaction-pool/src/pool/mod.rs b/crates/transaction-pool/src/pool/mod.rs index 3f7ecfa7836f..8c17da783acf 100644 --- a/crates/transaction-pool/src/pool/mod.rs +++ b/crates/transaction-pool/src/pool/mod.rs @@ -304,6 +304,14 @@ where self.get_pool_data().all().transactions_iter().filter(|tx| tx.propagate).collect() } + /// Returns only the first `max` transactions in the pool. + pub(crate) fn pooled_transactions_max( + &self, + max: usize, + ) -> Vec>> { + self.get_pool_data().all().transactions_iter().filter(|tx| tx.propagate).take(max).collect() + } + /// Returns the [`BlobTransaction`] for the given transaction if the sidecar exists. /// /// Caution: this assumes the given transaction is eip-4844 From ebb4fc2bb65aaa3ede9155bdd273cb7fcc773e3b Mon Sep 17 00:00:00 2001 From: Z <12710516+zitup@users.noreply.github.com> Date: Sat, 23 Nov 2024 19:43:14 +0800 Subject: [PATCH 139/156] chore(sdk): Add MaybeArbitrary as super trait (#12661) Co-authored-by: Emilia Hane --- Cargo.lock | 242 +++++++++--------- crates/optimism/node/Cargo.toml | 1 + crates/optimism/primitives/Cargo.toml | 14 + crates/optimism/primitives/src/tx_type.rs | 7 +- crates/primitives-traits/src/block/body.rs | 3 +- crates/primitives-traits/src/block/header.rs | 4 +- crates/primitives-traits/src/block/mod.rs | 16 +- crates/primitives-traits/src/header/sealed.rs | 7 +- crates/primitives-traits/src/receipt.rs | 5 +- .../src/transaction/tx_type.rs | 3 +- 10 files changed, 171 insertions(+), 131 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0f3d34cfba5f..e0cfc8390293 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -418,7 +418,7 @@ checksum = "2b09cae092c27b6f1bde952653a22708691802e57bfef4a2973b80bea21efd3f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -643,7 +643,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -659,7 +659,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", "syn-solidity", "tiny-keccak", ] @@ -675,7 +675,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", "syn-solidity", ] @@ -881,7 +881,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -1104,7 +1104,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -1115,7 +1115,7 @@ checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -1153,7 +1153,7 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -1259,7 +1259,7 @@ dependencies = [ "regex", "rustc-hash 1.1.0", "shlex", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -1441,7 +1441,7 @@ checksum = "240f4126219a83519bad05c9a40bfc0303921eeb571fc2d7e44c17ffac99d3f1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", "synstructure", ] @@ -1548,9 +1548,9 @@ checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" [[package]] name = "bytemuck" -version = "1.19.0" +version = "1.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8334215b81e418a0a7bdb8ef0849474f40bb10c8b71f1c4ed315cff49f32494d" +checksum = "8b37c88a63ffd85d15b406896cc343916d7cf57838a847b3a6f2ca5d39a5695a" dependencies = [ "bytemuck_derive", ] @@ -1563,7 +1563,7 @@ checksum = "bcfcc3cd946cb52f0bbfdbbcfa2f4e24f75ebb6c0e1002f7c25904fada18b9ec" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -1781,7 +1781,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -2225,7 +2225,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -2249,7 +2249,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -2260,7 +2260,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -2382,7 +2382,7 @@ checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -2393,7 +2393,7 @@ checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -2414,7 +2414,7 @@ dependencies = [ "convert_case", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", "unicode-xid", ] @@ -2528,7 +2528,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -2678,7 +2678,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -2689,7 +2689,7 @@ checksum = "2f9ed6b3789237c8a0c1c505af1c7eb2c560df6186f01b098c3a1064ea532f38" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -2746,7 +2746,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -3306,7 +3306,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -3485,9 +3485,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.4.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e8ac6999421f49a846c2d4411f337e53497d8ec55d67753beffa43c5d9205" +checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" dependencies = [ "atomic-waker", "bytes", @@ -3750,9 +3750,9 @@ dependencies = [ [[package]] name = "hyper" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbbff0a806a4728c99295b254c8838933b5b082d75e3cb70c8dab21fdfbcfa9a" +checksum = "97818827ef4f364230e16705d4706e2897df2bb60617d6ca15d598025a3c481f" dependencies = [ "bytes", "futures-channel", @@ -3832,7 +3832,7 @@ dependencies = [ "quote", "serde", "serde_json", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -3982,7 +3982,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -4043,13 +4043,13 @@ dependencies = [ [[package]] name = "impl-trait-for-tuples" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" +checksum = "a0eb5a3343abf848c0984fe4604b2b105da9539376e24fc0a3b0007411ae4fd9" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.89", ] [[package]] @@ -4171,7 +4171,7 @@ dependencies = [ "pretty_assertions", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -4272,9 +4272,9 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.11" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +checksum = "540654e97a3f4470a492cd30ff187bc95d89557a903a2bbf112e2fae98104ef2" [[package]] name = "jni" @@ -4419,7 +4419,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -4579,9 +4579,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.162" +version = "0.2.164" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18d287de67fe55fd7e1581fe933d965a5a9477b38e949cfa9f8574ef01506398" +checksum = "433bfe06b8c75da9b2e3fbea6e5329ff87748f0b144ef75306e674c3f6f7c13f" [[package]] name = "libloading" @@ -4601,9 +4601,9 @@ checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa" [[package]] name = "libp2p-identity" -version = "0.2.9" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55cca1eb2bc1fd29f099f3daaab7effd01e1a54b7c577d0ed082521034d912e8" +checksum = "257b5621d159b32282eac446bed6670c39c7dc68a200a992d8f056afa0066f6d" dependencies = [ "asn1_der", "bs58", @@ -4837,7 +4837,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -4964,9 +4964,9 @@ dependencies = [ [[package]] name = "mockall" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4c28b3fb6d753d28c20e826cd46ee611fda1cf3cde03a443a974043247c065a" +checksum = "39a6bfcc6c8c7eed5ee98b9c3e33adc726054389233e201c95dab2d41a3839d2" dependencies = [ "cfg-if", "downcast", @@ -4978,14 +4978,14 @@ dependencies = [ [[package]] name = "mockall_derive" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "341014e7f530314e9a1fdbc7400b244efea7122662c96bfa248c31da5bfb2020" +checksum = "25ca3004c2efe9011bd4e461bd8256445052b9615405b4f7ea43fc8ca5c20898" dependencies = [ "cfg-if", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -5233,7 +5233,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -5286,9 +5286,9 @@ checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" [[package]] name = "op-alloy-consensus" -version = "0.6.7" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72da577a88d35b893fae6467112651f26ef023434c196b2a0b3dc75bc853e0e4" +checksum = "fce158d886815d419222daa67fcdf949a34f7950653a4498ebeb4963331f70ed" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5304,9 +5304,9 @@ dependencies = [ [[package]] name = "op-alloy-genesis" -version = "0.6.7" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "818180672dd14ca6642fb57942e1cbd602669f42b6e0222b7ea9bbcae065d67e" +checksum = "2734e9a65efb90fe4520303f984c124766b7d2f2e5dd51cbe54d6269c85a3c91" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5319,9 +5319,9 @@ dependencies = [ [[package]] name = "op-alloy-network" -version = "0.6.7" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12f82e805bad171ceae2af45efaecf8d0b50622cff3473e3c998ff1dd340de35" +checksum = "87e4aef8ed017004a176ab1de49df419f59c0fb4a6ce3b693a10fe099fe1afe7" dependencies = [ "alloy-consensus", "alloy-network", @@ -5334,9 +5334,9 @@ dependencies = [ [[package]] name = "op-alloy-protocol" -version = "0.6.7" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1803a1ac96203b8f713b1fa9b7509c46c645ca7bc22b582761a7495e999d4301" +checksum = "6c68a3e2770890da3ad2fd20d7fe0c8e15672707577b4168a60e388c8eceaca0" dependencies = [ "alloc-no-stdlib", "alloy-consensus", @@ -5357,9 +5357,9 @@ dependencies = [ [[package]] name = "op-alloy-rpc-types" -version = "0.6.7" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a838c125256e02e2f9da88c51e263b02a06cda7e60382fe2551a3385b516f5bb" +checksum = "060ebeaea8c772e396215f69bb86d231ec8b7f36aca0dd6ce367ceaa9a8c33e6" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5376,9 +5376,9 @@ dependencies = [ [[package]] name = "op-alloy-rpc-types-engine" -version = "0.6.7" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c227fcc7d81d4023363ba12406e57ebcc1c7cbb1075c38ea471ae32138d4706d" +checksum = "864dbd5511ef4ef00b6c2c980739259b25b24048007b7751ca0069b30b1e3fee" dependencies = [ "alloy-eips", "alloy-primitives", @@ -5470,9 +5470,9 @@ dependencies = [ [[package]] name = "parity-scale-codec" -version = "3.6.12" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "306800abfa29c7f16596b5970a588435e3d5b3149683d00c12b699cc19f895ee" +checksum = "8be4817d39f3272f69c59fe05d0535ae6456c2dc2fa1ba02910296c7e0a5c590" dependencies = [ "arbitrary", "arrayvec", @@ -5481,19 +5481,20 @@ dependencies = [ "bytes", "impl-trait-for-tuples", "parity-scale-codec-derive", + "rustversion", "serde", ] [[package]] name = "parity-scale-codec-derive" -version = "3.6.12" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d830939c76d294956402033aee57a6da7b438f2294eb94864c37b0569053a42c" +checksum = "8781a75c6205af67215f382092b6e0a4ff3734798523e69073d4bcd294ec767b" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.89", ] [[package]] @@ -5608,7 +5609,7 @@ dependencies = [ "phf_shared", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -5637,7 +5638,7 @@ checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -5809,7 +5810,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033" dependencies = [ "proc-macro2", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -5860,14 +5861,14 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] name = "proc-macro2" -version = "1.0.89" +version = "1.0.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e" +checksum = "307e3004becf10f5a6e0d59d20f3cd28231b0e0827a96cd3e0ce6d14bc1e4bb3" dependencies = [ "unicode-ident", ] @@ -5958,7 +5959,7 @@ checksum = "6ff7ff745a347b87471d859a377a9a404361e7efc2a971d73424a6d183c0fc77" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -6315,7 +6316,7 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded", - "sync_wrapper 1.0.1", + "sync_wrapper 1.0.2", "tokio", "tokio-rustls", "tokio-util", @@ -6770,7 +6771,7 @@ dependencies = [ "proc-macro2", "quote", "similar-asserts", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -8388,6 +8389,7 @@ dependencies = [ "alloy-eips", "alloy-primitives", "alloy-rlp", + "arbitrary", "bytes", "derive_more 1.0.0", "op-alloy-consensus", @@ -9682,7 +9684,7 @@ dependencies = [ "regex", "relative-path", "rustc_version 0.4.1", - "syn 2.0.87", + "syn 2.0.89", "unicode-ident", ] @@ -9764,9 +9766,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.40" +version = "0.38.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99e4ea3e1cdc4b559b8e5650f9c8e5998e3e5c1343b4eaf034565f32318d63c0" +checksum = "d7f649912bc1495e167a6edee79151c84b1bad49748cb4f1f1167f459f6224f6" dependencies = [ "bitflags 2.6.0", "errno", @@ -9777,9 +9779,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.16" +version = "0.23.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eee87ff5d9b36712a58574e12e9f0ea80f915a5b0ac518d322b24a465617925e" +checksum = "7f1a745511c54ba6d4465e8d5dfbd81b45791756de28d4981af70d6dca128f1e" dependencies = [ "log", "once_cell", @@ -9922,9 +9924,9 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.26" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01227be5826fa0690321a2ba6c5cd57a19cf3f6a09e76973b58e61de6ab9d1c1" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" dependencies = [ "windows-sys 0.59.0", ] @@ -10036,9 +10038,9 @@ dependencies = [ [[package]] name = "semver-parser" -version = "0.10.2" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7" +checksum = "9900206b54a3527fdc7b8a938bffd94a568bac4f4aa8113b209df75a09c0dec2" dependencies = [ "pest", ] @@ -10072,14 +10074,14 @@ checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] name = "serde_json" -version = "1.0.132" +version = "1.0.133" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03" +checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" dependencies = [ "indexmap 2.6.0", "itoa", @@ -10107,7 +10109,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -10158,7 +10160,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -10181,7 +10183,7 @@ checksum = "5d69265a08751de7844521fd15003ae0a888e035773ba05695c5c759a6f89eef" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -10464,7 +10466,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -10522,9 +10524,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.87" +version = "2.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d" +checksum = "44d46482f1c1c87acd84dea20c1bf5ebff4c757009ed6bf19cfd36fb10e92c4e" dependencies = [ "proc-macro2", "quote", @@ -10540,7 +10542,7 @@ dependencies = [ "paste", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -10551,9 +10553,9 @@ checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" [[package]] name = "sync_wrapper" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" dependencies = [ "futures-core", ] @@ -10566,7 +10568,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -10643,7 +10645,7 @@ dependencies = [ "prettyplease", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -10691,7 +10693,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -10702,7 +10704,7 @@ checksum = "f077553d607adc1caf65430528a576c757a71ed73944b66ebb58ef2bbd243568" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -10879,7 +10881,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -11007,9 +11009,9 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.6.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8437150ab6bbc8c5f0f519e3d5ed4aa883a83dd4cdd3d1b21f9482936046cb97" +checksum = "403fa3b783d4b626a8ad51d766ab03cb6d2dbfc46b1c5d4448395e6628dc9697" dependencies = [ "async-compression", "base64 0.22.1", @@ -11080,7 +11082,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -11329,9 +11331,9 @@ checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" [[package]] name = "unicode-ident" -version = "1.0.13" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" +checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" [[package]] name = "unicode-normalization" @@ -11478,7 +11480,7 @@ checksum = "d674d135b4a8c1d7e813e2f8d1c9a58308aee4a680323066025e53132218bd91" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -11549,7 +11551,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", "wasm-bindgen-shared", ] @@ -11583,7 +11585,7 @@ checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -11609,9 +11611,9 @@ dependencies = [ [[package]] name = "wasmtimer" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb4f099acbc1043cc752b91615b24b02d7f6fcd975bd781fed9f50b3c3e15bf7" +checksum = "0048ad49a55b9deb3953841fa1fc5858f0efbcb7a18868c899a360269fac1b23" dependencies = [ "futures", "js-sys", @@ -11749,7 +11751,7 @@ checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -11760,7 +11762,7 @@ checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -11771,7 +11773,7 @@ checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -11782,7 +11784,7 @@ checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -12057,7 +12059,7 @@ checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", "synstructure", ] @@ -12079,7 +12081,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -12099,7 +12101,7 @@ checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", "synstructure", ] @@ -12120,7 +12122,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -12142,7 +12144,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] diff --git a/crates/optimism/node/Cargo.toml b/crates/optimism/node/Cargo.toml index 18ceee8ef8b7..fbc055a82e98 100644 --- a/crates/optimism/node/Cargo.toml +++ b/crates/optimism/node/Cargo.toml @@ -119,6 +119,7 @@ test-utils = [ "reth-trie-db/test-utils", "revm/test-utils", "reth-optimism-node/test-utils", + "reth-optimism-primitives/arbitrary", ] reth-codec = [ "reth-primitives/reth-codec", diff --git a/crates/optimism/primitives/Cargo.toml b/crates/optimism/primitives/Cargo.toml index fc368807736f..33f936b2fd13 100644 --- a/crates/optimism/primitives/Cargo.toml +++ b/crates/optimism/primitives/Cargo.toml @@ -34,9 +34,13 @@ serde = { workspace = true, optional = true } # misc derive_more.workspace = true +# test-utils +arbitrary = { workspace = true, features = ["derive"], optional = true } + [dev-dependencies] reth-codecs = { workspace = true, features = ["test-utils"] } rstest.workspace = true +arbitrary.workspace = true [features] default = ["std", "reth-codec"] @@ -65,3 +69,13 @@ serde = [ "reth-codecs/serde", "op-alloy-consensus/serde", ] +arbitrary = [ + "dep:arbitrary", + "reth-primitives-traits/arbitrary", + "reth-primitives/arbitrary", + "reth-codecs?/arbitrary", + "op-alloy-consensus/arbitrary", + "alloy-consensus/arbitrary", + "alloy-eips/arbitrary", + "alloy-primitives/arbitrary", +] \ No newline at end of file diff --git a/crates/optimism/primitives/src/tx_type.rs b/crates/optimism/primitives/src/tx_type.rs index c6e7fcc0a806..9976221b4240 100644 --- a/crates/optimism/primitives/src/tx_type.rs +++ b/crates/optimism/primitives/src/tx_type.rs @@ -2,10 +2,11 @@ //! `OpTxType` implements `reth_primitives_traits::TxType`. //! This type is required because a `Compact` impl is needed on the deposit tx type. +use core::fmt::Debug; + use alloy_primitives::{U64, U8}; use alloy_rlp::{Decodable, Encodable, Error}; use bytes::BufMut; -use core::fmt::Debug; use derive_more::{ derive::{From, Into}, Display, @@ -13,8 +14,10 @@ use derive_more::{ use op_alloy_consensus::OpTxType as AlloyOpTxType; use reth_primitives_traits::{InMemorySize, TxType}; -/// Wrapper type for [`op_alloy_consensus::OpTxType`] to implement [`TxType`] trait. +/// Wrapper type for [`op_alloy_consensus::OpTxType`] to implement +/// [`TxType`] trait. #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Display, Ord, Hash, From, Into)] +#[cfg_attr(any(test, feature = "arbitrary"), derive(arbitrary::Arbitrary))] #[into(u8)] pub struct OpTxType(AlloyOpTxType); diff --git a/crates/primitives-traits/src/block/body.rs b/crates/primitives-traits/src/block/body.rs index ff41536ba3f0..fd7f7f1c631f 100644 --- a/crates/primitives-traits/src/block/body.rs +++ b/crates/primitives-traits/src/block/body.rs @@ -4,7 +4,7 @@ use alloc::fmt; use alloy_consensus::Transaction; -use crate::{FullSignedTx, InMemorySize, MaybeSerde}; +use crate::{FullSignedTx, InMemorySize, MaybeArbitrary, MaybeSerde}; /// Helper trait that unifies all behaviour required by transaction to support full node operations. pub trait FullBlockBody: BlockBody {} @@ -26,6 +26,7 @@ pub trait BlockBody: + alloy_rlp::Decodable + InMemorySize + MaybeSerde + + MaybeArbitrary { /// Ordered list of signed transactions as committed in block. type Transaction: Transaction; diff --git a/crates/primitives-traits/src/block/header.rs b/crates/primitives-traits/src/block/header.rs index 695e63ed10ee..26806808532b 100644 --- a/crates/primitives-traits/src/block/header.rs +++ b/crates/primitives-traits/src/block/header.rs @@ -4,7 +4,7 @@ use core::fmt; use alloy_primitives::Sealable; -use crate::{InMemorySize, MaybeCompact, MaybeSerde}; +use crate::{InMemorySize, MaybeArbitrary, MaybeCompact, MaybeSerde}; /// Helper trait that unifies all behaviour required by block header to support full node /// operations. @@ -28,6 +28,7 @@ pub trait BlockHeader: + Sealable + InMemorySize + MaybeSerde + + MaybeArbitrary { } @@ -46,5 +47,6 @@ impl BlockHeader for T where + Sealable + InMemorySize + MaybeSerde + + MaybeArbitrary { } diff --git a/crates/primitives-traits/src/block/mod.rs b/crates/primitives-traits/src/block/mod.rs index 3f4fbd343eec..c0f5a1ffc63c 100644 --- a/crates/primitives-traits/src/block/mod.rs +++ b/crates/primitives-traits/src/block/mod.rs @@ -5,7 +5,9 @@ pub mod header; use alloc::fmt; -use crate::{BlockHeader, FullBlockBody, FullBlockHeader, InMemorySize, MaybeSerde}; +use crate::{ + BlockHeader, FullBlockBody, FullBlockHeader, InMemorySize, MaybeArbitrary, MaybeSerde, +}; /// Helper trait that unifies all behaviour required by block to support full node operations. pub trait FullBlock: @@ -26,7 +28,17 @@ impl FullBlock for T where // senders #[auto_impl::auto_impl(&, Arc)] pub trait Block: - Send + Sync + Unpin + Clone + Default + fmt::Debug + PartialEq + Eq + InMemorySize + MaybeSerde + Send + + Sync + + Unpin + + Clone + + Default + + fmt::Debug + + PartialEq + + Eq + + InMemorySize + + MaybeSerde + + MaybeArbitrary { /// Header part of the block. type Header: BlockHeader + 'static; diff --git a/crates/primitives-traits/src/header/sealed.rs b/crates/primitives-traits/src/header/sealed.rs index f0a6869ed1e9..08add0ac3c15 100644 --- a/crates/primitives-traits/src/header/sealed.rs +++ b/crates/primitives-traits/src/header/sealed.rs @@ -159,9 +159,12 @@ impl From> for Sealed { } #[cfg(any(test, feature = "arbitrary"))] -impl<'a> arbitrary::Arbitrary<'a> for SealedHeader { +impl<'a, H> arbitrary::Arbitrary<'a> for SealedHeader +where + H: for<'b> arbitrary::Arbitrary<'b> + Sealable, +{ fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { - let header = Header::arbitrary(u)?; + let header = H::arbitrary(u)?; Ok(Self::seal(header)) } diff --git a/crates/primitives-traits/src/receipt.rs b/crates/primitives-traits/src/receipt.rs index 4370d2ac00f7..e2af40c447ed 100644 --- a/crates/primitives-traits/src/receipt.rs +++ b/crates/primitives-traits/src/receipt.rs @@ -1,12 +1,12 @@ //! Receipt abstraction +use alloc::vec::Vec; use core::fmt; -use alloc::vec::Vec; use alloy_consensus::TxReceipt; use alloy_primitives::B256; -use crate::{InMemorySize, MaybeCompact, MaybeSerde}; +use crate::{InMemorySize, MaybeArbitrary, MaybeCompact, MaybeSerde}; /// Helper trait that unifies all behaviour required by receipt to support full node operations. pub trait FullReceipt: Receipt + MaybeCompact {} @@ -27,6 +27,7 @@ pub trait Receipt: + alloy_rlp::Decodable + MaybeSerde + InMemorySize + + MaybeArbitrary { /// Returns transaction type. fn tx_type(&self) -> u8; diff --git a/crates/primitives-traits/src/transaction/tx_type.rs b/crates/primitives-traits/src/transaction/tx_type.rs index 931fcb773bf4..d2caebe4c9f1 100644 --- a/crates/primitives-traits/src/transaction/tx_type.rs +++ b/crates/primitives-traits/src/transaction/tx_type.rs @@ -4,7 +4,7 @@ use core::fmt; use alloy_primitives::{U64, U8}; -use crate::{InMemorySize, MaybeCompact}; +use crate::{InMemorySize, MaybeArbitrary, MaybeCompact}; /// Helper trait that unifies all behaviour required by transaction type ID to support full node /// operations. @@ -33,6 +33,7 @@ pub trait TxType: + alloy_rlp::Encodable + alloy_rlp::Decodable + InMemorySize + + MaybeArbitrary { /// Returns `true` if this is a legacy transaction. fn is_legacy(&self) -> bool; From e3ffb3f43b3717990a94e21ab5c3fc732a4234a4 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 23 Nov 2024 15:22:49 +0100 Subject: [PATCH 140/156] fix: add arbitrary feature for op cli dev (#12807) --- crates/optimism/cli/Cargo.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/optimism/cli/Cargo.toml b/crates/optimism/cli/Cargo.toml index d090075927aa..ba36568efe86 100644 --- a/crates/optimism/cli/Cargo.toml +++ b/crates/optimism/cli/Cargo.toml @@ -111,7 +111,8 @@ jemalloc = [ dev = [ "dep:proptest", - "reth-cli-commands/arbitrary" + "reth-cli-commands/arbitrary", + "reth-optimism-primitives/arbitrary" ] serde = [ "alloy-consensus?/serde", From 69e54da04983bd8b4f5c3c17d8d3bc63ab0fe181 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 23 Nov 2024 15:24:17 +0100 Subject: [PATCH 141/156] chore: remove duplicated functions (#12804) --- Cargo.lock | 1 + crates/optimism/evm/src/l1.rs | 1 + crates/primitives/src/transaction/mod.rs | 102 ++---------------- crates/primitives/src/transaction/pooled.rs | 1 + .../rpc/rpc-types-compat/src/transaction.rs | 2 +- crates/rpc/rpc/src/eth/bundle.rs | 8 +- crates/rpc/rpc/src/eth/helpers/signer.rs | 1 + .../beacon-api-sidecar-fetcher/Cargo.toml | 1 + .../src/mined_sidecar.rs | 1 + 9 files changed, 20 insertions(+), 98 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e0cfc8390293..9ae0574414e0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2759,6 +2759,7 @@ checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" name = "example-beacon-api-sidecar-fetcher" version = "0.1.0" dependencies = [ + "alloy-consensus", "alloy-primitives", "alloy-rpc-types-beacon", "clap", diff --git a/crates/optimism/evm/src/l1.rs b/crates/optimism/evm/src/l1.rs index 9d3e76fb442b..ef8c3f3b3dbe 100644 --- a/crates/optimism/evm/src/l1.rs +++ b/crates/optimism/evm/src/l1.rs @@ -2,6 +2,7 @@ use crate::OpBlockExecutionError; use alloc::{string::ToString, sync::Arc}; +use alloy_consensus::Transaction; use alloy_primitives::{address, b256, hex, Address, Bytes, B256, U256}; use reth_chainspec::ChainSpec; use reth_execution_errors::BlockExecutionError; diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index abbf4d402486..af0529132b9d 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -219,29 +219,6 @@ impl Transaction { } } - /// Gets the transaction's [`TxKind`], which is the address of the recipient or - /// [`TxKind::Create`] if the transaction is a contract creation. - pub const fn kind(&self) -> TxKind { - match self { - Self::Legacy(TxLegacy { to, .. }) | - Self::Eip2930(TxEip2930 { to, .. }) | - Self::Eip1559(TxEip1559 { to, .. }) => *to, - Self::Eip4844(TxEip4844 { to, .. }) | Self::Eip7702(TxEip7702 { to, .. }) => { - TxKind::Call(*to) - } - #[cfg(feature = "optimism")] - Self::Deposit(TxDeposit { to, .. }) => *to, - } - } - - /// Get the transaction's address of the contract that will be called, or the address that will - /// receive the transfer. - /// - /// Returns `None` if this is a `CREATE` transaction. - pub fn to(&self) -> Option

{ - self.kind().to().copied() - } - /// Get the transaction's type pub const fn tx_type(&self) -> TxType { match self { @@ -255,56 +232,6 @@ impl Transaction { } } - /// Returns the [`AccessList`] of the transaction. - /// - /// Returns `None` for legacy transactions. - pub const fn access_list(&self) -> Option<&AccessList> { - match self { - Self::Legacy(_) => None, - Self::Eip2930(tx) => Some(&tx.access_list), - Self::Eip1559(tx) => Some(&tx.access_list), - Self::Eip4844(tx) => Some(&tx.access_list), - Self::Eip7702(tx) => Some(&tx.access_list), - #[cfg(feature = "optimism")] - Self::Deposit(_) => None, - } - } - - /// Returns the [`SignedAuthorization`] list of the transaction. - /// - /// Returns `None` if this transaction is not EIP-7702. - pub fn authorization_list(&self) -> Option<&[SignedAuthorization]> { - match self { - Self::Eip7702(tx) => Some(&tx.authorization_list), - _ => None, - } - } - - /// Returns true if the tx supports dynamic fees - pub const fn is_dynamic_fee(&self) -> bool { - match self { - Self::Legacy(_) | Self::Eip2930(_) => false, - Self::Eip1559(_) | Self::Eip4844(_) | Self::Eip7702(_) => true, - #[cfg(feature = "optimism")] - Self::Deposit(_) => false, - } - } - - /// Blob versioned hashes for eip4844 transaction, for legacy, eip1559, eip2930 and eip7702 - /// transactions this is `None` - /// - /// This is also commonly referred to as the "blob versioned hashes" (`BlobVersionedHashes`). - pub fn blob_versioned_hashes(&self) -> Option> { - match self { - Self::Legacy(_) | Self::Eip2930(_) | Self::Eip1559(_) | Self::Eip7702(_) => None, - Self::Eip4844(TxEip4844 { blob_versioned_hashes, .. }) => { - Some(blob_versioned_hashes.clone()) - } - #[cfg(feature = "optimism")] - Self::Deposit(_) => None, - } - } - /// Returns the blob gas used for all blobs of the EIP-4844 transaction if it is an EIP-4844 /// transaction. /// @@ -345,19 +272,6 @@ impl Transaction { } } - /// Get the transaction's input field. - pub const fn input(&self) -> &Bytes { - match self { - Self::Legacy(TxLegacy { input, .. }) | - Self::Eip2930(TxEip2930 { input, .. }) | - Self::Eip1559(TxEip1559 { input, .. }) | - Self::Eip4844(TxEip4844 { input, .. }) | - Self::Eip7702(TxEip7702 { input, .. }) => input, - #[cfg(feature = "optimism")] - Self::Deposit(TxDeposit { input, .. }) => input, - } - } - /// This encodes the transaction _without_ the signature, and is only suitable for creating a /// hash intended for signing. pub fn encode_for_signing(&self, out: &mut dyn bytes::BufMut) { @@ -2097,13 +2011,15 @@ mod tests { assert_eq!( tx.blob_versioned_hashes(), - Some(vec![ - b256!("012ec3d6f66766bedb002a190126b3549fce0047de0d4c25cffce0dc1c57921a"), - b256!("0152d8e24762ff22b1cfd9f8c0683786a7ca63ba49973818b3d1e9512cd2cec4"), - b256!("013b98c6c83e066d5b14af2b85199e3d4fc7d1e778dd53130d180f5077e2d1c7"), - b256!("01148b495d6e859114e670ca54fb6e2657f0cbae5b08063605093a4b3dc9f8f1"), - b256!("011ac212f13c5dff2b2c6b600a79635103d6f580a4221079951181b25c7e6549"), - ]) + Some( + &[ + b256!("012ec3d6f66766bedb002a190126b3549fce0047de0d4c25cffce0dc1c57921a"), + b256!("0152d8e24762ff22b1cfd9f8c0683786a7ca63ba49973818b3d1e9512cd2cec4"), + b256!("013b98c6c83e066d5b14af2b85199e3d4fc7d1e778dd53130d180f5077e2d1c7"), + b256!("01148b495d6e859114e670ca54fb6e2657f0cbae5b08063605093a4b3dc9f8f1"), + b256!("011ac212f13c5dff2b2c6b600a79635103d6f580a4221079951181b25c7e6549"), + ][..] + ) ); } diff --git a/crates/primitives/src/transaction/pooled.rs b/crates/primitives/src/transaction/pooled.rs index e526eb3894fc..dff6d0900961 100644 --- a/crates/primitives/src/transaction/pooled.rs +++ b/crates/primitives/src/transaction/pooled.rs @@ -696,6 +696,7 @@ impl TryFrom for PooledTransactionsElementEcRecove #[cfg(test)] mod tests { use super::*; + use alloy_consensus::Transaction as _; use alloy_primitives::{address, hex}; use assert_matches::assert_matches; use bytes::Bytes; diff --git a/crates/rpc/rpc-types-compat/src/transaction.rs b/crates/rpc/rpc-types-compat/src/transaction.rs index 31c9d967cd15..b439b61d44e8 100644 --- a/crates/rpc/rpc-types-compat/src/transaction.rs +++ b/crates/rpc/rpc-types-compat/src/transaction.rs @@ -75,7 +75,7 @@ pub fn transaction_to_call_request(tx: TransactionSignedEcRecovered) -> Transact let access_list = tx.transaction.access_list().cloned(); let max_fee_per_blob_gas = tx.transaction.max_fee_per_blob_gas(); let authorization_list = tx.transaction.authorization_list().map(|l| l.to_vec()); - let blob_versioned_hashes = tx.transaction.blob_versioned_hashes(); + let blob_versioned_hashes = tx.transaction.blob_versioned_hashes().map(Vec::from); let tx_type = tx.transaction.tx_type(); // fees depending on the transaction type diff --git a/crates/rpc/rpc/src/eth/bundle.rs b/crates/rpc/rpc/src/eth/bundle.rs index ee2b3ed5e7cd..10eec4dbf974 100644 --- a/crates/rpc/rpc/src/eth/bundle.rs +++ b/crates/rpc/rpc/src/eth/bundle.rs @@ -1,11 +1,12 @@ //! `Eth` bundle implementation and helpers. +use alloy_consensus::Transaction as _; use alloy_primitives::{Keccak256, U256}; use alloy_rpc_types_mev::{EthCallBundle, EthCallBundleResponse, EthCallBundleTransactionResult}; use jsonrpsee::core::RpcResult; use reth_chainspec::EthChainSpec; use reth_evm::{ConfigureEvm, ConfigureEvmEnv}; -use reth_primitives::PooledTransactionsElement; +use reth_primitives::{PooledTransactionsElement, Transaction}; use reth_provider::{ChainSpecProvider, HeaderProvider}; use reth_revm::database::StateProviderDatabase; use reth_rpc_eth_api::{ @@ -19,7 +20,7 @@ use revm::{ primitives::{ResultAndState, TxEnv}, }; use revm_primitives::{EnvKzgSettings, EnvWithHandlerCfg, SpecId, MAX_BLOB_GAS_PER_BLOCK}; -use std::sync::Arc; +use std::{ops::Deref, sync::Arc}; /// `Eth` bundle implementation. pub struct EthBundle { @@ -179,8 +180,7 @@ where let tx = tx.into_transaction(); hasher.update(tx.hash()); - let gas_price = tx - .effective_tip_per_gas(basefee) + let gas_price = Transaction::effective_tip_per_gas(tx.deref(), basefee) .ok_or_else(|| RpcInvalidTransactionError::FeeCapTooLow) .map_err(Eth::Error::from_eth_err)?; eth_api.evm_config().fill_tx_env(evm.tx_mut(), &tx, signer); diff --git a/crates/rpc/rpc/src/eth/helpers/signer.rs b/crates/rpc/rpc/src/eth/helpers/signer.rs index e7e9c64447b1..32645ba08d6c 100644 --- a/crates/rpc/rpc/src/eth/helpers/signer.rs +++ b/crates/rpc/rpc/src/eth/helpers/signer.rs @@ -109,6 +109,7 @@ impl EthSigner for DevSigner { #[cfg(test)] mod tests { + use alloy_consensus::Transaction; use alloy_primitives::{Bytes, U256}; use alloy_rpc_types_eth::TransactionInput; use revm_primitives::TxKind; diff --git a/examples/beacon-api-sidecar-fetcher/Cargo.toml b/examples/beacon-api-sidecar-fetcher/Cargo.toml index 47a2a181f7e5..d9590f87e07a 100644 --- a/examples/beacon-api-sidecar-fetcher/Cargo.toml +++ b/examples/beacon-api-sidecar-fetcher/Cargo.toml @@ -11,6 +11,7 @@ reth-node-ethereum.workspace = true alloy-rpc-types-beacon.workspace = true alloy-primitives.workspace = true +alloy-consensus.workspace = true clap.workspace = true eyre.workspace = true diff --git a/examples/beacon-api-sidecar-fetcher/src/mined_sidecar.rs b/examples/beacon-api-sidecar-fetcher/src/mined_sidecar.rs index 2436ee0210e4..d2077edafff4 100644 --- a/examples/beacon-api-sidecar-fetcher/src/mined_sidecar.rs +++ b/examples/beacon-api-sidecar-fetcher/src/mined_sidecar.rs @@ -1,4 +1,5 @@ use crate::BeaconSidecarConfig; +use alloy_consensus::Transaction as _; use alloy_primitives::B256; use alloy_rpc_types_beacon::sidecar::{BeaconBlobBundle, SidecarIterator}; use eyre::Result; From c96118346a218f2845d9047895cd9e38205a7d50 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 23 Nov 2024 15:25:18 +0100 Subject: [PATCH 142/156] chore: rm unused variant type (#12798) --- crates/primitives/src/transaction/mod.rs | 2 - crates/primitives/src/transaction/variant.rs | 145 ------------------- 2 files changed, 147 deletions(-) delete mode 100644 crates/primitives/src/transaction/variant.rs diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index af0529132b9d..ca03bfe4f7c5 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -41,7 +41,6 @@ pub use reth_primitives_traits::WithEncoded; pub use sidecar::BlobTransaction; pub use signature::{recover_signer, recover_signer_unchecked}; pub use tx_type::TxType; -pub use variant::TransactionSignedVariant; pub(crate) mod access_list; mod compat; @@ -56,7 +55,6 @@ mod tx_type; pub mod signature; pub(crate) mod util; -mod variant; /// Expected number of transactions where we can expect a speed-up by recovering the senders in /// parallel. diff --git a/crates/primitives/src/transaction/variant.rs b/crates/primitives/src/transaction/variant.rs deleted file mode 100644 index dd47df9a8693..000000000000 --- a/crates/primitives/src/transaction/variant.rs +++ /dev/null @@ -1,145 +0,0 @@ -//! Helper enum functions for `Transaction`, `TransactionSigned` and -//! `TransactionSignedEcRecovered` - -use crate::{ - Transaction, TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, -}; -use alloy_primitives::{Address, B256}; -use core::ops::Deref; - -/// Represents various different transaction formats used in reth. -/// -/// All variants are based on a the raw [Transaction] data and can contain additional information -/// extracted (expensive) from that transaction, like the hash and the signer. -#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] -pub enum TransactionSignedVariant { - /// A signed transaction without a hash. - SignedNoHash(TransactionSignedNoHash), - /// Contains the plain transaction data its signature and hash. - Signed(TransactionSigned), - /// Contains the plain transaction data its signature and hash and the successfully recovered - /// signer. - SignedEcRecovered(TransactionSignedEcRecovered), -} - -impl TransactionSignedVariant { - /// Returns the raw transaction object - pub const fn as_raw(&self) -> &Transaction { - match self { - Self::SignedNoHash(tx) => &tx.transaction, - Self::Signed(tx) => &tx.transaction, - Self::SignedEcRecovered(tx) => &tx.signed_transaction.transaction, - } - } - - /// Returns the hash of the transaction - pub fn hash(&self) -> B256 { - match self { - Self::SignedNoHash(tx) => tx.hash(), - Self::Signed(tx) => tx.hash(), - Self::SignedEcRecovered(tx) => tx.hash(), - } - } - - /// Returns the signer of the transaction. - /// - /// If the transaction is of not of [`TransactionSignedEcRecovered`] it will be recovered. - pub fn signer(&self) -> Option
{ - match self { - Self::SignedNoHash(tx) => tx.recover_signer(), - Self::Signed(tx) => tx.recover_signer(), - Self::SignedEcRecovered(tx) => Some(tx.signer), - } - } - - /// Returns [`TransactionSigned`] type - /// else None - pub const fn as_signed(&self) -> Option<&TransactionSigned> { - match self { - Self::Signed(tx) => Some(tx), - _ => None, - } - } - - /// Returns `TransactionSignedEcRecovered` type - /// else None - pub const fn as_signed_ec_recovered(&self) -> Option<&TransactionSignedEcRecovered> { - match self { - Self::SignedEcRecovered(tx) => Some(tx), - _ => None, - } - } - - /// Returns true if the transaction is of [`TransactionSigned`] variant - pub const fn is_signed(&self) -> bool { - matches!(self, Self::Signed(_)) - } - - /// Returns true if the transaction is of [`TransactionSignedNoHash`] variant - pub const fn is_signed_no_hash(&self) -> bool { - matches!(self, Self::SignedNoHash(_)) - } - - /// Returns true if the transaction is of [`TransactionSignedEcRecovered`] variant - pub const fn is_signed_ec_recovered(&self) -> bool { - matches!(self, Self::SignedEcRecovered(_)) - } - - /// Consumes the [`TransactionSignedVariant`] and returns the consumed [Transaction] - pub fn into_raw(self) -> Transaction { - match self { - Self::SignedNoHash(tx) => tx.transaction, - Self::Signed(tx) => tx.transaction, - Self::SignedEcRecovered(tx) => tx.signed_transaction.transaction, - } - } - - /// Consumes the [`TransactionSignedVariant`] and returns the consumed [`TransactionSigned`] - pub fn into_signed(self) -> TransactionSigned { - match self { - Self::SignedNoHash(tx) => tx.with_hash(), - Self::Signed(tx) => tx, - Self::SignedEcRecovered(tx) => tx.signed_transaction, - } - } - - /// Consumes the [`TransactionSignedVariant`] and converts it into a - /// [`TransactionSignedEcRecovered`] - /// - /// If the variants is not a [`TransactionSignedEcRecovered`] it will recover the sender. - /// - /// Returns `None` if the transaction's signature is invalid - pub fn into_signed_ec_recovered(self) -> Option { - self.try_into_signed_ec_recovered().ok() - } - - /// Consumes the [`TransactionSignedVariant`] and converts it into a - /// [`TransactionSignedEcRecovered`] - /// - /// If the variants is not a [`TransactionSignedEcRecovered`] it will recover the sender. - /// - /// Returns an error if the transaction's signature is invalid. - pub fn try_into_signed_ec_recovered( - self, - ) -> Result { - match self { - Self::SignedEcRecovered(tx) => Ok(tx), - Self::Signed(tx) => tx.try_into_ecrecovered(), - Self::SignedNoHash(tx) => tx.with_hash().try_into_ecrecovered(), - } - } -} - -impl AsRef for TransactionSignedVariant { - fn as_ref(&self) -> &Transaction { - self.as_raw() - } -} - -impl Deref for TransactionSignedVariant { - type Target = Transaction; - - fn deref(&self) -> &Self::Target { - self.as_raw() - } -} From c869c7118365d65251dfd7a0c9fc703ec4485495 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 23 Nov 2024 15:42:36 +0100 Subject: [PATCH 143/156] Revert "fix: add arbitrary feature for op cli dev" (#12808) --- crates/optimism/cli/Cargo.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/optimism/cli/Cargo.toml b/crates/optimism/cli/Cargo.toml index ba36568efe86..d090075927aa 100644 --- a/crates/optimism/cli/Cargo.toml +++ b/crates/optimism/cli/Cargo.toml @@ -111,8 +111,7 @@ jemalloc = [ dev = [ "dep:proptest", - "reth-cli-commands/arbitrary", - "reth-optimism-primitives/arbitrary" + "reth-cli-commands/arbitrary" ] serde = [ "alloy-consensus?/serde", From 795e29cb5b88f674e8d873b86009b3956b3c96f6 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 23 Nov 2024 15:54:39 +0100 Subject: [PATCH 144/156] Revert "chore(sdk): Add MaybeArbitrary as super trait" (#12809) --- Cargo.lock | 242 +++++++++--------- crates/optimism/node/Cargo.toml | 1 - crates/optimism/primitives/Cargo.toml | 14 - crates/optimism/primitives/src/tx_type.rs | 7 +- crates/primitives-traits/src/block/body.rs | 3 +- crates/primitives-traits/src/block/header.rs | 4 +- crates/primitives-traits/src/block/mod.rs | 16 +- crates/primitives-traits/src/header/sealed.rs | 7 +- crates/primitives-traits/src/receipt.rs | 5 +- .../src/transaction/tx_type.rs | 3 +- 10 files changed, 131 insertions(+), 171 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9ae0574414e0..8a92aadda193 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -418,7 +418,7 @@ checksum = "2b09cae092c27b6f1bde952653a22708691802e57bfef4a2973b80bea21efd3f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -643,7 +643,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -659,7 +659,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", "syn-solidity", "tiny-keccak", ] @@ -675,7 +675,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", "syn-solidity", ] @@ -881,7 +881,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -1104,7 +1104,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -1115,7 +1115,7 @@ checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -1153,7 +1153,7 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -1259,7 +1259,7 @@ dependencies = [ "regex", "rustc-hash 1.1.0", "shlex", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -1441,7 +1441,7 @@ checksum = "240f4126219a83519bad05c9a40bfc0303921eeb571fc2d7e44c17ffac99d3f1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", "synstructure", ] @@ -1548,9 +1548,9 @@ checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" [[package]] name = "bytemuck" -version = "1.20.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b37c88a63ffd85d15b406896cc343916d7cf57838a847b3a6f2ca5d39a5695a" +checksum = "8334215b81e418a0a7bdb8ef0849474f40bb10c8b71f1c4ed315cff49f32494d" dependencies = [ "bytemuck_derive", ] @@ -1563,7 +1563,7 @@ checksum = "bcfcc3cd946cb52f0bbfdbbcfa2f4e24f75ebb6c0e1002f7c25904fada18b9ec" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -1781,7 +1781,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -2225,7 +2225,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -2249,7 +2249,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -2260,7 +2260,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -2382,7 +2382,7 @@ checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -2393,7 +2393,7 @@ checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -2414,7 +2414,7 @@ dependencies = [ "convert_case", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", "unicode-xid", ] @@ -2528,7 +2528,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -2678,7 +2678,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -2689,7 +2689,7 @@ checksum = "2f9ed6b3789237c8a0c1c505af1c7eb2c560df6186f01b098c3a1064ea532f38" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -2746,7 +2746,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -3307,7 +3307,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -3486,9 +3486,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.4.7" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" +checksum = "524e8ac6999421f49a846c2d4411f337e53497d8ec55d67753beffa43c5d9205" dependencies = [ "atomic-waker", "bytes", @@ -3751,9 +3751,9 @@ dependencies = [ [[package]] name = "hyper" -version = "1.5.1" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97818827ef4f364230e16705d4706e2897df2bb60617d6ca15d598025a3c481f" +checksum = "bbbff0a806a4728c99295b254c8838933b5b082d75e3cb70c8dab21fdfbcfa9a" dependencies = [ "bytes", "futures-channel", @@ -3833,7 +3833,7 @@ dependencies = [ "quote", "serde", "serde_json", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -3983,7 +3983,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -4044,13 +4044,13 @@ dependencies = [ [[package]] name = "impl-trait-for-tuples" -version = "0.2.3" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0eb5a3343abf848c0984fe4604b2b105da9539376e24fc0a3b0007411ae4fd9" +checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 1.0.109", ] [[package]] @@ -4172,7 +4172,7 @@ dependencies = [ "pretty_assertions", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -4273,9 +4273,9 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.13" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "540654e97a3f4470a492cd30ff187bc95d89557a903a2bbf112e2fae98104ef2" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jni" @@ -4420,7 +4420,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -4580,9 +4580,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.164" +version = "0.2.162" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "433bfe06b8c75da9b2e3fbea6e5329ff87748f0b144ef75306e674c3f6f7c13f" +checksum = "18d287de67fe55fd7e1581fe933d965a5a9477b38e949cfa9f8574ef01506398" [[package]] name = "libloading" @@ -4602,9 +4602,9 @@ checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa" [[package]] name = "libp2p-identity" -version = "0.2.10" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "257b5621d159b32282eac446bed6670c39c7dc68a200a992d8f056afa0066f6d" +checksum = "55cca1eb2bc1fd29f099f3daaab7effd01e1a54b7c577d0ed082521034d912e8" dependencies = [ "asn1_der", "bs58", @@ -4838,7 +4838,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -4965,9 +4965,9 @@ dependencies = [ [[package]] name = "mockall" -version = "0.13.1" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39a6bfcc6c8c7eed5ee98b9c3e33adc726054389233e201c95dab2d41a3839d2" +checksum = "d4c28b3fb6d753d28c20e826cd46ee611fda1cf3cde03a443a974043247c065a" dependencies = [ "cfg-if", "downcast", @@ -4979,14 +4979,14 @@ dependencies = [ [[package]] name = "mockall_derive" -version = "0.13.1" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25ca3004c2efe9011bd4e461bd8256445052b9615405b4f7ea43fc8ca5c20898" +checksum = "341014e7f530314e9a1fdbc7400b244efea7122662c96bfa248c31da5bfb2020" dependencies = [ "cfg-if", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -5234,7 +5234,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -5287,9 +5287,9 @@ checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" [[package]] name = "op-alloy-consensus" -version = "0.6.8" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fce158d886815d419222daa67fcdf949a34f7950653a4498ebeb4963331f70ed" +checksum = "72da577a88d35b893fae6467112651f26ef023434c196b2a0b3dc75bc853e0e4" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5305,9 +5305,9 @@ dependencies = [ [[package]] name = "op-alloy-genesis" -version = "0.6.8" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2734e9a65efb90fe4520303f984c124766b7d2f2e5dd51cbe54d6269c85a3c91" +checksum = "818180672dd14ca6642fb57942e1cbd602669f42b6e0222b7ea9bbcae065d67e" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5320,9 +5320,9 @@ dependencies = [ [[package]] name = "op-alloy-network" -version = "0.6.8" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87e4aef8ed017004a176ab1de49df419f59c0fb4a6ce3b693a10fe099fe1afe7" +checksum = "12f82e805bad171ceae2af45efaecf8d0b50622cff3473e3c998ff1dd340de35" dependencies = [ "alloy-consensus", "alloy-network", @@ -5335,9 +5335,9 @@ dependencies = [ [[package]] name = "op-alloy-protocol" -version = "0.6.8" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c68a3e2770890da3ad2fd20d7fe0c8e15672707577b4168a60e388c8eceaca0" +checksum = "1803a1ac96203b8f713b1fa9b7509c46c645ca7bc22b582761a7495e999d4301" dependencies = [ "alloc-no-stdlib", "alloy-consensus", @@ -5358,9 +5358,9 @@ dependencies = [ [[package]] name = "op-alloy-rpc-types" -version = "0.6.8" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "060ebeaea8c772e396215f69bb86d231ec8b7f36aca0dd6ce367ceaa9a8c33e6" +checksum = "a838c125256e02e2f9da88c51e263b02a06cda7e60382fe2551a3385b516f5bb" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5377,9 +5377,9 @@ dependencies = [ [[package]] name = "op-alloy-rpc-types-engine" -version = "0.6.8" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "864dbd5511ef4ef00b6c2c980739259b25b24048007b7751ca0069b30b1e3fee" +checksum = "c227fcc7d81d4023363ba12406e57ebcc1c7cbb1075c38ea471ae32138d4706d" dependencies = [ "alloy-eips", "alloy-primitives", @@ -5471,9 +5471,9 @@ dependencies = [ [[package]] name = "parity-scale-codec" -version = "3.7.0" +version = "3.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8be4817d39f3272f69c59fe05d0535ae6456c2dc2fa1ba02910296c7e0a5c590" +checksum = "306800abfa29c7f16596b5970a588435e3d5b3149683d00c12b699cc19f895ee" dependencies = [ "arbitrary", "arrayvec", @@ -5482,20 +5482,19 @@ dependencies = [ "bytes", "impl-trait-for-tuples", "parity-scale-codec-derive", - "rustversion", "serde", ] [[package]] name = "parity-scale-codec-derive" -version = "3.7.0" +version = "3.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8781a75c6205af67215f382092b6e0a4ff3734798523e69073d4bcd294ec767b" +checksum = "d830939c76d294956402033aee57a6da7b438f2294eb94864c37b0569053a42c" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.89", + "syn 1.0.109", ] [[package]] @@ -5610,7 +5609,7 @@ dependencies = [ "phf_shared", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -5639,7 +5638,7 @@ checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -5811,7 +5810,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033" dependencies = [ "proc-macro2", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -5862,14 +5861,14 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] name = "proc-macro2" -version = "1.0.91" +version = "1.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "307e3004becf10f5a6e0d59d20f3cd28231b0e0827a96cd3e0ce6d14bc1e4bb3" +checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e" dependencies = [ "unicode-ident", ] @@ -5960,7 +5959,7 @@ checksum = "6ff7ff745a347b87471d859a377a9a404361e7efc2a971d73424a6d183c0fc77" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -6317,7 +6316,7 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded", - "sync_wrapper 1.0.2", + "sync_wrapper 1.0.1", "tokio", "tokio-rustls", "tokio-util", @@ -6772,7 +6771,7 @@ dependencies = [ "proc-macro2", "quote", "similar-asserts", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -8390,7 +8389,6 @@ dependencies = [ "alloy-eips", "alloy-primitives", "alloy-rlp", - "arbitrary", "bytes", "derive_more 1.0.0", "op-alloy-consensus", @@ -9685,7 +9683,7 @@ dependencies = [ "regex", "relative-path", "rustc_version 0.4.1", - "syn 2.0.89", + "syn 2.0.87", "unicode-ident", ] @@ -9767,9 +9765,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.41" +version = "0.38.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7f649912bc1495e167a6edee79151c84b1bad49748cb4f1f1167f459f6224f6" +checksum = "99e4ea3e1cdc4b559b8e5650f9c8e5998e3e5c1343b4eaf034565f32318d63c0" dependencies = [ "bitflags 2.6.0", "errno", @@ -9780,9 +9778,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.17" +version = "0.23.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f1a745511c54ba6d4465e8d5dfbd81b45791756de28d4981af70d6dca128f1e" +checksum = "eee87ff5d9b36712a58574e12e9f0ea80f915a5b0ac518d322b24a465617925e" dependencies = [ "log", "once_cell", @@ -9925,9 +9923,9 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.27" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" +checksum = "01227be5826fa0690321a2ba6c5cd57a19cf3f6a09e76973b58e61de6ab9d1c1" dependencies = [ "windows-sys 0.59.0", ] @@ -10039,9 +10037,9 @@ dependencies = [ [[package]] name = "semver-parser" -version = "0.10.3" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9900206b54a3527fdc7b8a938bffd94a568bac4f4aa8113b209df75a09c0dec2" +checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7" dependencies = [ "pest", ] @@ -10075,14 +10073,14 @@ checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] name = "serde_json" -version = "1.0.133" +version = "1.0.132" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" +checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03" dependencies = [ "indexmap 2.6.0", "itoa", @@ -10110,7 +10108,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -10161,7 +10159,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -10184,7 +10182,7 @@ checksum = "5d69265a08751de7844521fd15003ae0a888e035773ba05695c5c759a6f89eef" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -10467,7 +10465,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -10525,9 +10523,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.89" +version = "2.0.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d46482f1c1c87acd84dea20c1bf5ebff4c757009ed6bf19cfd36fb10e92c4e" +checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d" dependencies = [ "proc-macro2", "quote", @@ -10543,7 +10541,7 @@ dependencies = [ "paste", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -10554,9 +10552,9 @@ checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" [[package]] name = "sync_wrapper" -version = "1.0.2" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" dependencies = [ "futures-core", ] @@ -10569,7 +10567,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -10646,7 +10644,7 @@ dependencies = [ "prettyplease", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -10694,7 +10692,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -10705,7 +10703,7 @@ checksum = "f077553d607adc1caf65430528a576c757a71ed73944b66ebb58ef2bbd243568" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -10882,7 +10880,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -11010,9 +11008,9 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.6.2" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "403fa3b783d4b626a8ad51d766ab03cb6d2dbfc46b1c5d4448395e6628dc9697" +checksum = "8437150ab6bbc8c5f0f519e3d5ed4aa883a83dd4cdd3d1b21f9482936046cb97" dependencies = [ "async-compression", "base64 0.22.1", @@ -11083,7 +11081,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -11332,9 +11330,9 @@ checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" [[package]] name = "unicode-ident" -version = "1.0.14" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" +checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" [[package]] name = "unicode-normalization" @@ -11481,7 +11479,7 @@ checksum = "d674d135b4a8c1d7e813e2f8d1c9a58308aee4a680323066025e53132218bd91" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -11552,7 +11550,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", "wasm-bindgen-shared", ] @@ -11586,7 +11584,7 @@ checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -11612,9 +11610,9 @@ dependencies = [ [[package]] name = "wasmtimer" -version = "0.4.1" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0048ad49a55b9deb3953841fa1fc5858f0efbcb7a18868c899a360269fac1b23" +checksum = "bb4f099acbc1043cc752b91615b24b02d7f6fcd975bd781fed9f50b3c3e15bf7" dependencies = [ "futures", "js-sys", @@ -11752,7 +11750,7 @@ checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -11763,7 +11761,7 @@ checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -11774,7 +11772,7 @@ checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -11785,7 +11783,7 @@ checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -12060,7 +12058,7 @@ checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", "synstructure", ] @@ -12082,7 +12080,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -12102,7 +12100,7 @@ checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", "synstructure", ] @@ -12123,7 +12121,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] @@ -12145,7 +12143,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.87", ] [[package]] diff --git a/crates/optimism/node/Cargo.toml b/crates/optimism/node/Cargo.toml index fbc055a82e98..18ceee8ef8b7 100644 --- a/crates/optimism/node/Cargo.toml +++ b/crates/optimism/node/Cargo.toml @@ -119,7 +119,6 @@ test-utils = [ "reth-trie-db/test-utils", "revm/test-utils", "reth-optimism-node/test-utils", - "reth-optimism-primitives/arbitrary", ] reth-codec = [ "reth-primitives/reth-codec", diff --git a/crates/optimism/primitives/Cargo.toml b/crates/optimism/primitives/Cargo.toml index 33f936b2fd13..fc368807736f 100644 --- a/crates/optimism/primitives/Cargo.toml +++ b/crates/optimism/primitives/Cargo.toml @@ -34,13 +34,9 @@ serde = { workspace = true, optional = true } # misc derive_more.workspace = true -# test-utils -arbitrary = { workspace = true, features = ["derive"], optional = true } - [dev-dependencies] reth-codecs = { workspace = true, features = ["test-utils"] } rstest.workspace = true -arbitrary.workspace = true [features] default = ["std", "reth-codec"] @@ -69,13 +65,3 @@ serde = [ "reth-codecs/serde", "op-alloy-consensus/serde", ] -arbitrary = [ - "dep:arbitrary", - "reth-primitives-traits/arbitrary", - "reth-primitives/arbitrary", - "reth-codecs?/arbitrary", - "op-alloy-consensus/arbitrary", - "alloy-consensus/arbitrary", - "alloy-eips/arbitrary", - "alloy-primitives/arbitrary", -] \ No newline at end of file diff --git a/crates/optimism/primitives/src/tx_type.rs b/crates/optimism/primitives/src/tx_type.rs index 9976221b4240..c6e7fcc0a806 100644 --- a/crates/optimism/primitives/src/tx_type.rs +++ b/crates/optimism/primitives/src/tx_type.rs @@ -2,11 +2,10 @@ //! `OpTxType` implements `reth_primitives_traits::TxType`. //! This type is required because a `Compact` impl is needed on the deposit tx type. -use core::fmt::Debug; - use alloy_primitives::{U64, U8}; use alloy_rlp::{Decodable, Encodable, Error}; use bytes::BufMut; +use core::fmt::Debug; use derive_more::{ derive::{From, Into}, Display, @@ -14,10 +13,8 @@ use derive_more::{ use op_alloy_consensus::OpTxType as AlloyOpTxType; use reth_primitives_traits::{InMemorySize, TxType}; -/// Wrapper type for [`op_alloy_consensus::OpTxType`] to implement -/// [`TxType`] trait. +/// Wrapper type for [`op_alloy_consensus::OpTxType`] to implement [`TxType`] trait. #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Display, Ord, Hash, From, Into)] -#[cfg_attr(any(test, feature = "arbitrary"), derive(arbitrary::Arbitrary))] #[into(u8)] pub struct OpTxType(AlloyOpTxType); diff --git a/crates/primitives-traits/src/block/body.rs b/crates/primitives-traits/src/block/body.rs index fd7f7f1c631f..ff41536ba3f0 100644 --- a/crates/primitives-traits/src/block/body.rs +++ b/crates/primitives-traits/src/block/body.rs @@ -4,7 +4,7 @@ use alloc::fmt; use alloy_consensus::Transaction; -use crate::{FullSignedTx, InMemorySize, MaybeArbitrary, MaybeSerde}; +use crate::{FullSignedTx, InMemorySize, MaybeSerde}; /// Helper trait that unifies all behaviour required by transaction to support full node operations. pub trait FullBlockBody: BlockBody {} @@ -26,7 +26,6 @@ pub trait BlockBody: + alloy_rlp::Decodable + InMemorySize + MaybeSerde - + MaybeArbitrary { /// Ordered list of signed transactions as committed in block. type Transaction: Transaction; diff --git a/crates/primitives-traits/src/block/header.rs b/crates/primitives-traits/src/block/header.rs index 26806808532b..695e63ed10ee 100644 --- a/crates/primitives-traits/src/block/header.rs +++ b/crates/primitives-traits/src/block/header.rs @@ -4,7 +4,7 @@ use core::fmt; use alloy_primitives::Sealable; -use crate::{InMemorySize, MaybeArbitrary, MaybeCompact, MaybeSerde}; +use crate::{InMemorySize, MaybeCompact, MaybeSerde}; /// Helper trait that unifies all behaviour required by block header to support full node /// operations. @@ -28,7 +28,6 @@ pub trait BlockHeader: + Sealable + InMemorySize + MaybeSerde - + MaybeArbitrary { } @@ -47,6 +46,5 @@ impl BlockHeader for T where + Sealable + InMemorySize + MaybeSerde - + MaybeArbitrary { } diff --git a/crates/primitives-traits/src/block/mod.rs b/crates/primitives-traits/src/block/mod.rs index c0f5a1ffc63c..3f4fbd343eec 100644 --- a/crates/primitives-traits/src/block/mod.rs +++ b/crates/primitives-traits/src/block/mod.rs @@ -5,9 +5,7 @@ pub mod header; use alloc::fmt; -use crate::{ - BlockHeader, FullBlockBody, FullBlockHeader, InMemorySize, MaybeArbitrary, MaybeSerde, -}; +use crate::{BlockHeader, FullBlockBody, FullBlockHeader, InMemorySize, MaybeSerde}; /// Helper trait that unifies all behaviour required by block to support full node operations. pub trait FullBlock: @@ -28,17 +26,7 @@ impl FullBlock for T where // senders #[auto_impl::auto_impl(&, Arc)] pub trait Block: - Send - + Sync - + Unpin - + Clone - + Default - + fmt::Debug - + PartialEq - + Eq - + InMemorySize - + MaybeSerde - + MaybeArbitrary + Send + Sync + Unpin + Clone + Default + fmt::Debug + PartialEq + Eq + InMemorySize + MaybeSerde { /// Header part of the block. type Header: BlockHeader + 'static; diff --git a/crates/primitives-traits/src/header/sealed.rs b/crates/primitives-traits/src/header/sealed.rs index 08add0ac3c15..f0a6869ed1e9 100644 --- a/crates/primitives-traits/src/header/sealed.rs +++ b/crates/primitives-traits/src/header/sealed.rs @@ -159,12 +159,9 @@ impl From> for Sealed { } #[cfg(any(test, feature = "arbitrary"))] -impl<'a, H> arbitrary::Arbitrary<'a> for SealedHeader -where - H: for<'b> arbitrary::Arbitrary<'b> + Sealable, -{ +impl<'a> arbitrary::Arbitrary<'a> for SealedHeader { fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { - let header = H::arbitrary(u)?; + let header = Header::arbitrary(u)?; Ok(Self::seal(header)) } diff --git a/crates/primitives-traits/src/receipt.rs b/crates/primitives-traits/src/receipt.rs index e2af40c447ed..4370d2ac00f7 100644 --- a/crates/primitives-traits/src/receipt.rs +++ b/crates/primitives-traits/src/receipt.rs @@ -1,12 +1,12 @@ //! Receipt abstraction -use alloc::vec::Vec; use core::fmt; +use alloc::vec::Vec; use alloy_consensus::TxReceipt; use alloy_primitives::B256; -use crate::{InMemorySize, MaybeArbitrary, MaybeCompact, MaybeSerde}; +use crate::{InMemorySize, MaybeCompact, MaybeSerde}; /// Helper trait that unifies all behaviour required by receipt to support full node operations. pub trait FullReceipt: Receipt + MaybeCompact {} @@ -27,7 +27,6 @@ pub trait Receipt: + alloy_rlp::Decodable + MaybeSerde + InMemorySize - + MaybeArbitrary { /// Returns transaction type. fn tx_type(&self) -> u8; diff --git a/crates/primitives-traits/src/transaction/tx_type.rs b/crates/primitives-traits/src/transaction/tx_type.rs index d2caebe4c9f1..931fcb773bf4 100644 --- a/crates/primitives-traits/src/transaction/tx_type.rs +++ b/crates/primitives-traits/src/transaction/tx_type.rs @@ -4,7 +4,7 @@ use core::fmt; use alloy_primitives::{U64, U8}; -use crate::{InMemorySize, MaybeArbitrary, MaybeCompact}; +use crate::{InMemorySize, MaybeCompact}; /// Helper trait that unifies all behaviour required by transaction type ID to support full node /// operations. @@ -33,7 +33,6 @@ pub trait TxType: + alloy_rlp::Encodable + alloy_rlp::Decodable + InMemorySize - + MaybeArbitrary { /// Returns `true` if this is a legacy transaction. fn is_legacy(&self) -> bool; From 7c5cb90e9ae22204af332b4d0851d3ede608732a Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 23 Nov 2024 15:27:51 +0100 Subject: [PATCH 145/156] feat: add signedtx for pooled tx (#12799) --- crates/primitives/src/transaction/pooled.rs | 60 +++++++++++++++++++-- 1 file changed, 56 insertions(+), 4 deletions(-) diff --git a/crates/primitives/src/transaction/pooled.rs b/crates/primitives/src/transaction/pooled.rs index dff6d0900961..cecc995ddba7 100644 --- a/crates/primitives/src/transaction/pooled.rs +++ b/crates/primitives/src/transaction/pooled.rs @@ -1,12 +1,17 @@ //! Defines the types for blob transactions, legacy, and other EIP-2718 transactions included in a //! response to `GetPooledTransactions`. -use super::{error::TransactionConversionError, signature::recover_signer, TxEip7702}; -use crate::{BlobTransaction, Transaction, TransactionSigned, TransactionSignedEcRecovered}; +use super::{ + error::TransactionConversionError, recover_signer_unchecked, signature::recover_signer, + TxEip7702, +}; +use crate::{ + BlobTransaction, Transaction, TransactionSigned, TransactionSignedEcRecovered, TxType, +}; use alloy_consensus::{ constants::EIP4844_TX_TYPE_ID, transaction::{TxEip1559, TxEip2930, TxEip4844, TxLegacy}, - Signed, TxEip4844WithSidecar, + SignableTransaction, Signed, TxEip4844WithSidecar, }; use alloy_eips::{ eip2718::{Decodable2718, Eip2718Result, Encodable2718}, @@ -19,8 +24,9 @@ use alloy_primitives::{ }; use alloy_rlp::{Decodable, Encodable, Error as RlpError, Header}; use bytes::Buf; +use core::hash::{Hash, Hasher}; use derive_more::{AsRef, Deref}; -use reth_primitives_traits::InMemorySize; +use reth_primitives_traits::{InMemorySize, SignedTransaction}; use serde::{Deserialize, Serialize}; /// A response to `GetPooledTransactions`. This can include either a blob transaction, or a @@ -220,6 +226,18 @@ impl PooledTransactionsElement { } } +impl Default for PooledTransactionsElement { + fn default() -> Self { + Self::Legacy(TxLegacy::default().into_signed(Signature::test_signature())) + } +} + +impl Hash for PooledTransactionsElement { + fn hash(&self, state: &mut H) { + self.trie_hash().hash(state); + } +} + impl Encodable for PooledTransactionsElement { /// This encodes the transaction _with_ the signature, and an rlp header. /// @@ -560,6 +578,40 @@ impl alloy_consensus::Transaction for PooledTransactionsElement { } } +impl SignedTransaction for PooledTransactionsElement { + type Type = TxType; + + fn tx_hash(&self) -> &TxHash { + match self { + Self::Legacy(tx) => tx.hash(), + Self::Eip2930(tx) => tx.hash(), + Self::Eip1559(tx) => tx.hash(), + Self::Eip7702(tx) => tx.hash(), + Self::BlobTransaction(tx) => tx.hash(), + } + } + + fn signature(&self) -> &Signature { + match self { + Self::Legacy(tx) => tx.signature(), + Self::Eip2930(tx) => tx.signature(), + Self::Eip1559(tx) => tx.signature(), + Self::Eip7702(tx) => tx.signature(), + Self::BlobTransaction(tx) => tx.signature(), + } + } + + fn recover_signer(&self) -> Option
{ + let signature_hash = self.signature_hash(); + recover_signer(self.signature(), signature_hash) + } + + fn recover_signer_unchecked(&self) -> Option
{ + let signature_hash = self.signature_hash(); + recover_signer_unchecked(self.signature(), signature_hash) + } +} + impl InMemorySize for PooledTransactionsElement { fn size(&self) -> usize { match self { From 9b289351b6835774b7398afcbcb66e7c2ef6e7f7 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 23 Nov 2024 15:33:19 +0100 Subject: [PATCH 146/156] feat: use defined pool type internally (#12803) --- crates/transaction-pool/src/pool/mod.rs | 84 ++++++++++--------------- crates/transaction-pool/src/traits.rs | 11 +++- 2 files changed, 43 insertions(+), 52 deletions(-) diff --git a/crates/transaction-pool/src/pool/mod.rs b/crates/transaction-pool/src/pool/mod.rs index 8c17da783acf..1a23bf3e07ce 100644 --- a/crates/transaction-pool/src/pool/mod.rs +++ b/crates/transaction-pool/src/pool/mod.rs @@ -78,7 +78,8 @@ use crate::{ PoolTransaction, PropagatedTransactions, TransactionOrigin, }, validate::{TransactionValidationOutcome, ValidPoolTransaction}, - CanonicalStateUpdate, PoolConfig, TransactionOrdering, TransactionValidator, + CanonicalStateUpdate, EthPoolTransaction, PoolConfig, TransactionOrdering, + TransactionValidator, }; use alloy_primitives::{Address, TxHash, B256}; use best::BestTransactions; @@ -87,9 +88,7 @@ use reth_eth_wire_types::HandleMempoolData; use reth_execution_types::ChangedAccount; use alloy_eips::eip4844::BlobTransactionSidecar; -use reth_primitives::{ - BlobTransaction, PooledTransactionsElement, TransactionSigned, TransactionSignedEcRecovered, -}; +use reth_primitives::PooledTransactionsElement; use std::{ collections::{HashMap, HashSet}, fmt, @@ -312,18 +311,33 @@ where self.get_pool_data().all().transactions_iter().filter(|tx| tx.propagate).take(max).collect() } - /// Returns the [`BlobTransaction`] for the given transaction if the sidecar exists. + /// Converts the internally tracked transaction to the pooled format. /// - /// Caution: this assumes the given transaction is eip-4844 - fn get_blob_transaction(&self, transaction: TransactionSigned) -> Option { - if let Ok(Some(sidecar)) = self.blob_store.get(transaction.hash()) { - if let Ok(blob) = - BlobTransaction::try_from_signed(transaction, Arc::unwrap_or_clone(sidecar)) - { - return Some(blob) - } + /// If the transaction is an EIP-4844 transaction, the blob sidecar is fetched from the blob + /// store and attached to the transaction. + fn to_pooled_transaction( + &self, + transaction: Arc>, + ) -> Option<<::Transaction as PoolTransaction>::Pooled> + where + ::Transaction: EthPoolTransaction, + { + if transaction.is_eip4844() { + let sidecar = self.blob_store.get(*transaction.hash()).ok()??; + transaction.transaction.clone().try_into_pooled_eip4844(sidecar) + } else { + transaction + .transaction + .clone() + .try_into_pooled() + .inspect_err(|err| { + debug!( + target: "txpool", %err, + "failed to convert transaction to pooled element; skipping", + ); + }) + .ok() } - None } /// Returns converted [`PooledTransactionsElement`] for the given transaction hashes. @@ -333,39 +347,19 @@ where limit: GetPooledTransactionLimit, ) -> Vec where - ::Transaction: - PoolTransaction>, + ::Transaction: EthPoolTransaction, { let transactions = self.get_all(tx_hashes); let mut elements = Vec::with_capacity(transactions.len()); let mut size = 0; for transaction in transactions { let encoded_len = transaction.encoded_length(); - let recovered: TransactionSignedEcRecovered = - transaction.transaction.clone().into_consensus().into(); - let tx = recovered.into_signed(); - let pooled = if tx.is_eip4844() { - // for EIP-4844 transactions, we need to fetch the blob sidecar from the blob store - if let Some(blob) = self.get_blob_transaction(tx) { - PooledTransactionsElement::BlobTransaction(blob) - } else { - continue - } - } else { - match PooledTransactionsElement::try_from(tx) { - Ok(element) => element, - Err(err) => { - debug!( - target: "txpool", %err, - "failed to convert transaction to pooled element; skipping", - ); - continue - } - } + let Some(pooled) = self.to_pooled_transaction(transaction) else { + continue; }; size += encoded_len; - elements.push(pooled); + elements.push(pooled.into()); if limit.exceeds(size) { break @@ -381,19 +375,9 @@ where tx_hash: TxHash, ) -> Option where - ::Transaction: - PoolTransaction>, + ::Transaction: EthPoolTransaction, { - self.get(&tx_hash).and_then(|transaction| { - let recovered: TransactionSignedEcRecovered = - transaction.transaction.clone().into_consensus().into(); - let tx = recovered.into_signed(); - if tx.is_eip4844() { - self.get_blob_transaction(tx).map(PooledTransactionsElement::BlobTransaction) - } else { - PooledTransactionsElement::try_from(tx).ok() - } - }) + self.get(&tx_hash).and_then(|tx| self.to_pooled_transaction(tx).map(Into::into)) } /// Updates the entire pool after a new block was executed. diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index 27bed950c501..9d19105b5dab 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -932,7 +932,7 @@ impl BestTransactionsAttributes { /// a subset of the `Pooled` format. pub trait PoolTransaction: fmt::Debug + Send + Sync + Clone { /// Associated error type for the `try_from_consensus` method. - type TryFromConsensusError; + type TryFromConsensusError: fmt::Display; /// Associated type representing the raw consensus variant of the transaction. type Consensus: From + TryInto; @@ -955,6 +955,11 @@ pub trait PoolTransaction: fmt::Debug + Send + Sync + Clone { pooled.into() } + /// Tries to convert the `Consensus` type into the `Pooled` type. + fn try_into_pooled(self) -> Result { + Self::try_consensus_into_pooled(self.into_consensus()) + } + /// Tries to convert the `Consensus` type into the `Pooled` type. fn try_consensus_into_pooled( tx: Self::Consensus, @@ -1084,7 +1089,9 @@ pub trait EthPoolTransaction: Consensus: From + Into + Into, - Pooled: From + Into, + Pooled: From + + Into + + Into, > { /// Extracts the blob sidecar from the transaction. From 6695d07c656e7461efeb8cd5e76d54628b1cce20 Mon Sep 17 00:00:00 2001 From: "0xriazaka.eth" <168359025+0xriazaka@users.noreply.github.com> Date: Sun, 24 Nov 2024 07:53:01 +0100 Subject: [PATCH 147/156] Make PostExectuionInput generic over receipt (#12814) Co-authored-by: Emilia Hane --- crates/consensus/consensus/src/lib.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/consensus/consensus/src/lib.rs b/crates/consensus/consensus/src/lib.rs index da90439af7ff..3ad53456cbdf 100644 --- a/crates/consensus/consensus/src/lib.rs +++ b/crates/consensus/consensus/src/lib.rs @@ -30,16 +30,16 @@ pub mod test_utils; /// Post execution input passed to [`Consensus::validate_block_post_execution`]. #[derive(Debug)] -pub struct PostExecutionInput<'a> { +pub struct PostExecutionInput<'a, R = Receipt> { /// Receipts of the block. - pub receipts: &'a [Receipt], + pub receipts: &'a [R], /// EIP-7685 requests of the block. pub requests: &'a Requests, } -impl<'a> PostExecutionInput<'a> { +impl<'a, R> PostExecutionInput<'a, R> { /// Creates a new instance of `PostExecutionInput`. - pub const fn new(receipts: &'a [Receipt], requests: &'a Requests) -> Self { + pub const fn new(receipts: &'a [R], requests: &'a Requests) -> Self { Self { receipts, requests } } } From 0d6ebec5746838127b0e67d45e3f9faf6b67ee71 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sun, 24 Nov 2024 08:46:16 +0100 Subject: [PATCH 148/156] Revert "Revert "chore(sdk): Add MaybeArbitrary as super trait"" (#12810) Co-authored-by: Emilia Hane --- Cargo.lock | 242 +++++++++--------- crates/optimism/bin/Cargo.toml | 3 +- crates/optimism/evm/Cargo.toml | 2 +- crates/optimism/node/Cargo.toml | 1 + crates/optimism/primitives/Cargo.toml | 14 + crates/optimism/primitives/src/tx_type.rs | 7 +- crates/primitives-traits/src/block/body.rs | 3 +- crates/primitives-traits/src/block/header.rs | 4 +- crates/primitives-traits/src/block/mod.rs | 16 +- crates/primitives-traits/src/header/sealed.rs | 7 +- crates/primitives-traits/src/receipt.rs | 5 +- .../src/transaction/tx_type.rs | 3 +- crates/storage/provider/Cargo.toml | 1 + 13 files changed, 175 insertions(+), 133 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8a92aadda193..9ae0574414e0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -418,7 +418,7 @@ checksum = "2b09cae092c27b6f1bde952653a22708691802e57bfef4a2973b80bea21efd3f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -643,7 +643,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -659,7 +659,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", "syn-solidity", "tiny-keccak", ] @@ -675,7 +675,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", "syn-solidity", ] @@ -881,7 +881,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -1104,7 +1104,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -1115,7 +1115,7 @@ checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -1153,7 +1153,7 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -1259,7 +1259,7 @@ dependencies = [ "regex", "rustc-hash 1.1.0", "shlex", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -1441,7 +1441,7 @@ checksum = "240f4126219a83519bad05c9a40bfc0303921eeb571fc2d7e44c17ffac99d3f1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", "synstructure", ] @@ -1548,9 +1548,9 @@ checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" [[package]] name = "bytemuck" -version = "1.19.0" +version = "1.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8334215b81e418a0a7bdb8ef0849474f40bb10c8b71f1c4ed315cff49f32494d" +checksum = "8b37c88a63ffd85d15b406896cc343916d7cf57838a847b3a6f2ca5d39a5695a" dependencies = [ "bytemuck_derive", ] @@ -1563,7 +1563,7 @@ checksum = "bcfcc3cd946cb52f0bbfdbbcfa2f4e24f75ebb6c0e1002f7c25904fada18b9ec" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -1781,7 +1781,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -2225,7 +2225,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -2249,7 +2249,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -2260,7 +2260,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -2382,7 +2382,7 @@ checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -2393,7 +2393,7 @@ checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -2414,7 +2414,7 @@ dependencies = [ "convert_case", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", "unicode-xid", ] @@ -2528,7 +2528,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -2678,7 +2678,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -2689,7 +2689,7 @@ checksum = "2f9ed6b3789237c8a0c1c505af1c7eb2c560df6186f01b098c3a1064ea532f38" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -2746,7 +2746,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -3307,7 +3307,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -3486,9 +3486,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.4.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e8ac6999421f49a846c2d4411f337e53497d8ec55d67753beffa43c5d9205" +checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" dependencies = [ "atomic-waker", "bytes", @@ -3751,9 +3751,9 @@ dependencies = [ [[package]] name = "hyper" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbbff0a806a4728c99295b254c8838933b5b082d75e3cb70c8dab21fdfbcfa9a" +checksum = "97818827ef4f364230e16705d4706e2897df2bb60617d6ca15d598025a3c481f" dependencies = [ "bytes", "futures-channel", @@ -3833,7 +3833,7 @@ dependencies = [ "quote", "serde", "serde_json", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -3983,7 +3983,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -4044,13 +4044,13 @@ dependencies = [ [[package]] name = "impl-trait-for-tuples" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" +checksum = "a0eb5a3343abf848c0984fe4604b2b105da9539376e24fc0a3b0007411ae4fd9" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.89", ] [[package]] @@ -4172,7 +4172,7 @@ dependencies = [ "pretty_assertions", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -4273,9 +4273,9 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.11" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +checksum = "540654e97a3f4470a492cd30ff187bc95d89557a903a2bbf112e2fae98104ef2" [[package]] name = "jni" @@ -4420,7 +4420,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -4580,9 +4580,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.162" +version = "0.2.164" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18d287de67fe55fd7e1581fe933d965a5a9477b38e949cfa9f8574ef01506398" +checksum = "433bfe06b8c75da9b2e3fbea6e5329ff87748f0b144ef75306e674c3f6f7c13f" [[package]] name = "libloading" @@ -4602,9 +4602,9 @@ checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa" [[package]] name = "libp2p-identity" -version = "0.2.9" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55cca1eb2bc1fd29f099f3daaab7effd01e1a54b7c577d0ed082521034d912e8" +checksum = "257b5621d159b32282eac446bed6670c39c7dc68a200a992d8f056afa0066f6d" dependencies = [ "asn1_der", "bs58", @@ -4838,7 +4838,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -4965,9 +4965,9 @@ dependencies = [ [[package]] name = "mockall" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4c28b3fb6d753d28c20e826cd46ee611fda1cf3cde03a443a974043247c065a" +checksum = "39a6bfcc6c8c7eed5ee98b9c3e33adc726054389233e201c95dab2d41a3839d2" dependencies = [ "cfg-if", "downcast", @@ -4979,14 +4979,14 @@ dependencies = [ [[package]] name = "mockall_derive" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "341014e7f530314e9a1fdbc7400b244efea7122662c96bfa248c31da5bfb2020" +checksum = "25ca3004c2efe9011bd4e461bd8256445052b9615405b4f7ea43fc8ca5c20898" dependencies = [ "cfg-if", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -5234,7 +5234,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -5287,9 +5287,9 @@ checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" [[package]] name = "op-alloy-consensus" -version = "0.6.7" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72da577a88d35b893fae6467112651f26ef023434c196b2a0b3dc75bc853e0e4" +checksum = "fce158d886815d419222daa67fcdf949a34f7950653a4498ebeb4963331f70ed" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5305,9 +5305,9 @@ dependencies = [ [[package]] name = "op-alloy-genesis" -version = "0.6.7" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "818180672dd14ca6642fb57942e1cbd602669f42b6e0222b7ea9bbcae065d67e" +checksum = "2734e9a65efb90fe4520303f984c124766b7d2f2e5dd51cbe54d6269c85a3c91" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5320,9 +5320,9 @@ dependencies = [ [[package]] name = "op-alloy-network" -version = "0.6.7" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12f82e805bad171ceae2af45efaecf8d0b50622cff3473e3c998ff1dd340de35" +checksum = "87e4aef8ed017004a176ab1de49df419f59c0fb4a6ce3b693a10fe099fe1afe7" dependencies = [ "alloy-consensus", "alloy-network", @@ -5335,9 +5335,9 @@ dependencies = [ [[package]] name = "op-alloy-protocol" -version = "0.6.7" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1803a1ac96203b8f713b1fa9b7509c46c645ca7bc22b582761a7495e999d4301" +checksum = "6c68a3e2770890da3ad2fd20d7fe0c8e15672707577b4168a60e388c8eceaca0" dependencies = [ "alloc-no-stdlib", "alloy-consensus", @@ -5358,9 +5358,9 @@ dependencies = [ [[package]] name = "op-alloy-rpc-types" -version = "0.6.7" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a838c125256e02e2f9da88c51e263b02a06cda7e60382fe2551a3385b516f5bb" +checksum = "060ebeaea8c772e396215f69bb86d231ec8b7f36aca0dd6ce367ceaa9a8c33e6" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5377,9 +5377,9 @@ dependencies = [ [[package]] name = "op-alloy-rpc-types-engine" -version = "0.6.7" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c227fcc7d81d4023363ba12406e57ebcc1c7cbb1075c38ea471ae32138d4706d" +checksum = "864dbd5511ef4ef00b6c2c980739259b25b24048007b7751ca0069b30b1e3fee" dependencies = [ "alloy-eips", "alloy-primitives", @@ -5471,9 +5471,9 @@ dependencies = [ [[package]] name = "parity-scale-codec" -version = "3.6.12" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "306800abfa29c7f16596b5970a588435e3d5b3149683d00c12b699cc19f895ee" +checksum = "8be4817d39f3272f69c59fe05d0535ae6456c2dc2fa1ba02910296c7e0a5c590" dependencies = [ "arbitrary", "arrayvec", @@ -5482,19 +5482,20 @@ dependencies = [ "bytes", "impl-trait-for-tuples", "parity-scale-codec-derive", + "rustversion", "serde", ] [[package]] name = "parity-scale-codec-derive" -version = "3.6.12" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d830939c76d294956402033aee57a6da7b438f2294eb94864c37b0569053a42c" +checksum = "8781a75c6205af67215f382092b6e0a4ff3734798523e69073d4bcd294ec767b" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.89", ] [[package]] @@ -5609,7 +5610,7 @@ dependencies = [ "phf_shared", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -5638,7 +5639,7 @@ checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -5810,7 +5811,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033" dependencies = [ "proc-macro2", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -5861,14 +5862,14 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] name = "proc-macro2" -version = "1.0.89" +version = "1.0.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e" +checksum = "307e3004becf10f5a6e0d59d20f3cd28231b0e0827a96cd3e0ce6d14bc1e4bb3" dependencies = [ "unicode-ident", ] @@ -5959,7 +5960,7 @@ checksum = "6ff7ff745a347b87471d859a377a9a404361e7efc2a971d73424a6d183c0fc77" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -6316,7 +6317,7 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded", - "sync_wrapper 1.0.1", + "sync_wrapper 1.0.2", "tokio", "tokio-rustls", "tokio-util", @@ -6771,7 +6772,7 @@ dependencies = [ "proc-macro2", "quote", "similar-asserts", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -8389,6 +8390,7 @@ dependencies = [ "alloy-eips", "alloy-primitives", "alloy-rlp", + "arbitrary", "bytes", "derive_more 1.0.0", "op-alloy-consensus", @@ -9683,7 +9685,7 @@ dependencies = [ "regex", "relative-path", "rustc_version 0.4.1", - "syn 2.0.87", + "syn 2.0.89", "unicode-ident", ] @@ -9765,9 +9767,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.40" +version = "0.38.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99e4ea3e1cdc4b559b8e5650f9c8e5998e3e5c1343b4eaf034565f32318d63c0" +checksum = "d7f649912bc1495e167a6edee79151c84b1bad49748cb4f1f1167f459f6224f6" dependencies = [ "bitflags 2.6.0", "errno", @@ -9778,9 +9780,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.16" +version = "0.23.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eee87ff5d9b36712a58574e12e9f0ea80f915a5b0ac518d322b24a465617925e" +checksum = "7f1a745511c54ba6d4465e8d5dfbd81b45791756de28d4981af70d6dca128f1e" dependencies = [ "log", "once_cell", @@ -9923,9 +9925,9 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.26" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01227be5826fa0690321a2ba6c5cd57a19cf3f6a09e76973b58e61de6ab9d1c1" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" dependencies = [ "windows-sys 0.59.0", ] @@ -10037,9 +10039,9 @@ dependencies = [ [[package]] name = "semver-parser" -version = "0.10.2" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7" +checksum = "9900206b54a3527fdc7b8a938bffd94a568bac4f4aa8113b209df75a09c0dec2" dependencies = [ "pest", ] @@ -10073,14 +10075,14 @@ checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] name = "serde_json" -version = "1.0.132" +version = "1.0.133" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03" +checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" dependencies = [ "indexmap 2.6.0", "itoa", @@ -10108,7 +10110,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -10159,7 +10161,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -10182,7 +10184,7 @@ checksum = "5d69265a08751de7844521fd15003ae0a888e035773ba05695c5c759a6f89eef" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -10465,7 +10467,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -10523,9 +10525,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.87" +version = "2.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d" +checksum = "44d46482f1c1c87acd84dea20c1bf5ebff4c757009ed6bf19cfd36fb10e92c4e" dependencies = [ "proc-macro2", "quote", @@ -10541,7 +10543,7 @@ dependencies = [ "paste", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -10552,9 +10554,9 @@ checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" [[package]] name = "sync_wrapper" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" dependencies = [ "futures-core", ] @@ -10567,7 +10569,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -10644,7 +10646,7 @@ dependencies = [ "prettyplease", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -10692,7 +10694,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -10703,7 +10705,7 @@ checksum = "f077553d607adc1caf65430528a576c757a71ed73944b66ebb58ef2bbd243568" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -10880,7 +10882,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -11008,9 +11010,9 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.6.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8437150ab6bbc8c5f0f519e3d5ed4aa883a83dd4cdd3d1b21f9482936046cb97" +checksum = "403fa3b783d4b626a8ad51d766ab03cb6d2dbfc46b1c5d4448395e6628dc9697" dependencies = [ "async-compression", "base64 0.22.1", @@ -11081,7 +11083,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -11330,9 +11332,9 @@ checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" [[package]] name = "unicode-ident" -version = "1.0.13" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" +checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" [[package]] name = "unicode-normalization" @@ -11479,7 +11481,7 @@ checksum = "d674d135b4a8c1d7e813e2f8d1c9a58308aee4a680323066025e53132218bd91" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -11550,7 +11552,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", "wasm-bindgen-shared", ] @@ -11584,7 +11586,7 @@ checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -11610,9 +11612,9 @@ dependencies = [ [[package]] name = "wasmtimer" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb4f099acbc1043cc752b91615b24b02d7f6fcd975bd781fed9f50b3c3e15bf7" +checksum = "0048ad49a55b9deb3953841fa1fc5858f0efbcb7a18868c899a360269fac1b23" dependencies = [ "futures", "js-sys", @@ -11750,7 +11752,7 @@ checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -11761,7 +11763,7 @@ checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -11772,7 +11774,7 @@ checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -11783,7 +11785,7 @@ checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -12058,7 +12060,7 @@ checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", "synstructure", ] @@ -12080,7 +12082,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -12100,7 +12102,7 @@ checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", "synstructure", ] @@ -12121,7 +12123,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -12143,7 +12145,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] diff --git a/crates/optimism/bin/Cargo.toml b/crates/optimism/bin/Cargo.toml index 77166763100a..45f4492e82b6 100644 --- a/crates/optimism/bin/Cargo.toml +++ b/crates/optimism/bin/Cargo.toml @@ -48,7 +48,8 @@ optimism = [ ] dev = [ - "reth-optimism-cli/dev" + "reth-optimism-cli/dev", + "reth-optimism-primitives/arbitrary", ] min-error-logs = ["tracing/release_max_level_error"] diff --git a/crates/optimism/evm/Cargo.toml b/crates/optimism/evm/Cargo.toml index 98496bb26534..807f224ca4b8 100644 --- a/crates/optimism/evm/Cargo.toml +++ b/crates/optimism/evm/Cargo.toml @@ -48,7 +48,7 @@ reth-primitives = { workspace = true, features = ["test-utils"] } reth-optimism-chainspec.workspace = true alloy-genesis.workspace = true alloy-consensus.workspace = true -reth-optimism-primitives.workspace = true +reth-optimism-primitives = { workspace = true, features = ["arbitrary"] } [features] default = ["std"] diff --git a/crates/optimism/node/Cargo.toml b/crates/optimism/node/Cargo.toml index 18ceee8ef8b7..fbc055a82e98 100644 --- a/crates/optimism/node/Cargo.toml +++ b/crates/optimism/node/Cargo.toml @@ -119,6 +119,7 @@ test-utils = [ "reth-trie-db/test-utils", "revm/test-utils", "reth-optimism-node/test-utils", + "reth-optimism-primitives/arbitrary", ] reth-codec = [ "reth-primitives/reth-codec", diff --git a/crates/optimism/primitives/Cargo.toml b/crates/optimism/primitives/Cargo.toml index fc368807736f..33f936b2fd13 100644 --- a/crates/optimism/primitives/Cargo.toml +++ b/crates/optimism/primitives/Cargo.toml @@ -34,9 +34,13 @@ serde = { workspace = true, optional = true } # misc derive_more.workspace = true +# test-utils +arbitrary = { workspace = true, features = ["derive"], optional = true } + [dev-dependencies] reth-codecs = { workspace = true, features = ["test-utils"] } rstest.workspace = true +arbitrary.workspace = true [features] default = ["std", "reth-codec"] @@ -65,3 +69,13 @@ serde = [ "reth-codecs/serde", "op-alloy-consensus/serde", ] +arbitrary = [ + "dep:arbitrary", + "reth-primitives-traits/arbitrary", + "reth-primitives/arbitrary", + "reth-codecs?/arbitrary", + "op-alloy-consensus/arbitrary", + "alloy-consensus/arbitrary", + "alloy-eips/arbitrary", + "alloy-primitives/arbitrary", +] \ No newline at end of file diff --git a/crates/optimism/primitives/src/tx_type.rs b/crates/optimism/primitives/src/tx_type.rs index c6e7fcc0a806..9976221b4240 100644 --- a/crates/optimism/primitives/src/tx_type.rs +++ b/crates/optimism/primitives/src/tx_type.rs @@ -2,10 +2,11 @@ //! `OpTxType` implements `reth_primitives_traits::TxType`. //! This type is required because a `Compact` impl is needed on the deposit tx type. +use core::fmt::Debug; + use alloy_primitives::{U64, U8}; use alloy_rlp::{Decodable, Encodable, Error}; use bytes::BufMut; -use core::fmt::Debug; use derive_more::{ derive::{From, Into}, Display, @@ -13,8 +14,10 @@ use derive_more::{ use op_alloy_consensus::OpTxType as AlloyOpTxType; use reth_primitives_traits::{InMemorySize, TxType}; -/// Wrapper type for [`op_alloy_consensus::OpTxType`] to implement [`TxType`] trait. +/// Wrapper type for [`op_alloy_consensus::OpTxType`] to implement +/// [`TxType`] trait. #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Display, Ord, Hash, From, Into)] +#[cfg_attr(any(test, feature = "arbitrary"), derive(arbitrary::Arbitrary))] #[into(u8)] pub struct OpTxType(AlloyOpTxType); diff --git a/crates/primitives-traits/src/block/body.rs b/crates/primitives-traits/src/block/body.rs index ff41536ba3f0..fd7f7f1c631f 100644 --- a/crates/primitives-traits/src/block/body.rs +++ b/crates/primitives-traits/src/block/body.rs @@ -4,7 +4,7 @@ use alloc::fmt; use alloy_consensus::Transaction; -use crate::{FullSignedTx, InMemorySize, MaybeSerde}; +use crate::{FullSignedTx, InMemorySize, MaybeArbitrary, MaybeSerde}; /// Helper trait that unifies all behaviour required by transaction to support full node operations. pub trait FullBlockBody: BlockBody {} @@ -26,6 +26,7 @@ pub trait BlockBody: + alloy_rlp::Decodable + InMemorySize + MaybeSerde + + MaybeArbitrary { /// Ordered list of signed transactions as committed in block. type Transaction: Transaction; diff --git a/crates/primitives-traits/src/block/header.rs b/crates/primitives-traits/src/block/header.rs index 695e63ed10ee..26806808532b 100644 --- a/crates/primitives-traits/src/block/header.rs +++ b/crates/primitives-traits/src/block/header.rs @@ -4,7 +4,7 @@ use core::fmt; use alloy_primitives::Sealable; -use crate::{InMemorySize, MaybeCompact, MaybeSerde}; +use crate::{InMemorySize, MaybeArbitrary, MaybeCompact, MaybeSerde}; /// Helper trait that unifies all behaviour required by block header to support full node /// operations. @@ -28,6 +28,7 @@ pub trait BlockHeader: + Sealable + InMemorySize + MaybeSerde + + MaybeArbitrary { } @@ -46,5 +47,6 @@ impl BlockHeader for T where + Sealable + InMemorySize + MaybeSerde + + MaybeArbitrary { } diff --git a/crates/primitives-traits/src/block/mod.rs b/crates/primitives-traits/src/block/mod.rs index 3f4fbd343eec..c0f5a1ffc63c 100644 --- a/crates/primitives-traits/src/block/mod.rs +++ b/crates/primitives-traits/src/block/mod.rs @@ -5,7 +5,9 @@ pub mod header; use alloc::fmt; -use crate::{BlockHeader, FullBlockBody, FullBlockHeader, InMemorySize, MaybeSerde}; +use crate::{ + BlockHeader, FullBlockBody, FullBlockHeader, InMemorySize, MaybeArbitrary, MaybeSerde, +}; /// Helper trait that unifies all behaviour required by block to support full node operations. pub trait FullBlock: @@ -26,7 +28,17 @@ impl FullBlock for T where // senders #[auto_impl::auto_impl(&, Arc)] pub trait Block: - Send + Sync + Unpin + Clone + Default + fmt::Debug + PartialEq + Eq + InMemorySize + MaybeSerde + Send + + Sync + + Unpin + + Clone + + Default + + fmt::Debug + + PartialEq + + Eq + + InMemorySize + + MaybeSerde + + MaybeArbitrary { /// Header part of the block. type Header: BlockHeader + 'static; diff --git a/crates/primitives-traits/src/header/sealed.rs b/crates/primitives-traits/src/header/sealed.rs index f0a6869ed1e9..08add0ac3c15 100644 --- a/crates/primitives-traits/src/header/sealed.rs +++ b/crates/primitives-traits/src/header/sealed.rs @@ -159,9 +159,12 @@ impl From> for Sealed { } #[cfg(any(test, feature = "arbitrary"))] -impl<'a> arbitrary::Arbitrary<'a> for SealedHeader { +impl<'a, H> arbitrary::Arbitrary<'a> for SealedHeader +where + H: for<'b> arbitrary::Arbitrary<'b> + Sealable, +{ fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { - let header = Header::arbitrary(u)?; + let header = H::arbitrary(u)?; Ok(Self::seal(header)) } diff --git a/crates/primitives-traits/src/receipt.rs b/crates/primitives-traits/src/receipt.rs index 4370d2ac00f7..e2af40c447ed 100644 --- a/crates/primitives-traits/src/receipt.rs +++ b/crates/primitives-traits/src/receipt.rs @@ -1,12 +1,12 @@ //! Receipt abstraction +use alloc::vec::Vec; use core::fmt; -use alloc::vec::Vec; use alloy_consensus::TxReceipt; use alloy_primitives::B256; -use crate::{InMemorySize, MaybeCompact, MaybeSerde}; +use crate::{InMemorySize, MaybeArbitrary, MaybeCompact, MaybeSerde}; /// Helper trait that unifies all behaviour required by receipt to support full node operations. pub trait FullReceipt: Receipt + MaybeCompact {} @@ -27,6 +27,7 @@ pub trait Receipt: + alloy_rlp::Decodable + MaybeSerde + InMemorySize + + MaybeArbitrary { /// Returns transaction type. fn tx_type(&self) -> u8; diff --git a/crates/primitives-traits/src/transaction/tx_type.rs b/crates/primitives-traits/src/transaction/tx_type.rs index 931fcb773bf4..d2caebe4c9f1 100644 --- a/crates/primitives-traits/src/transaction/tx_type.rs +++ b/crates/primitives-traits/src/transaction/tx_type.rs @@ -4,7 +4,7 @@ use core::fmt; use alloy_primitives::{U64, U8}; -use crate::{InMemorySize, MaybeCompact}; +use crate::{InMemorySize, MaybeArbitrary, MaybeCompact}; /// Helper trait that unifies all behaviour required by transaction type ID to support full node /// operations. @@ -33,6 +33,7 @@ pub trait TxType: + alloy_rlp::Encodable + alloy_rlp::Decodable + InMemorySize + + MaybeArbitrary { /// Returns `true` if this is a legacy transaction. fn is_legacy(&self) -> bool; diff --git a/crates/storage/provider/Cargo.toml b/crates/storage/provider/Cargo.toml index 974de01e0045..86f2f3a51b91 100644 --- a/crates/storage/provider/Cargo.toml +++ b/crates/storage/provider/Cargo.toml @@ -131,4 +131,5 @@ test-utils = [ "revm/test-utils", "reth-prune-types/test-utils", "reth-stages-types/test-utils", + "reth-optimism-primitives?/arbitrary", ] From a552b1ffc9a6a40a84d3846c789ef1b46c8ab547 Mon Sep 17 00:00:00 2001 From: Tien Nguyen <116023870+htiennv@users.noreply.github.com> Date: Sun, 24 Nov 2024 15:57:34 +0700 Subject: [PATCH 149/156] chore: make `EngineSyncEvent` generic over data primitives (#12827) --- crates/consensus/beacon/src/engine/sync.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/consensus/beacon/src/engine/sync.rs b/crates/consensus/beacon/src/engine/sync.rs index b6e75f802e30..b140846981e9 100644 --- a/crates/consensus/beacon/src/engine/sync.rs +++ b/crates/consensus/beacon/src/engine/sync.rs @@ -10,7 +10,7 @@ use reth_network_p2p::{ full_block::{FetchFullBlockFuture, FetchFullBlockRangeFuture, FullBlockClient}, EthBlockClient, }; -use reth_primitives::SealedBlock; +use reth_primitives::{EthPrimitives, NodePrimitives, SealedBlock}; use reth_provider::providers::ProviderNodeTypes; use reth_stages_api::{ControlFlow, Pipeline, PipelineError, PipelineTarget, PipelineWithResult}; use reth_tasks::TaskSpawner; @@ -361,9 +361,9 @@ impl Ord for OrderedSealedBlock { /// The event type emitted by the [`EngineSyncController`]. #[derive(Debug)] -pub(crate) enum EngineSyncEvent { +pub(crate) enum EngineSyncEvent { /// A full block has been downloaded from the network. - FetchedFullBlock(SealedBlock), + FetchedFullBlock(SealedBlock), /// Pipeline started syncing /// /// This is none if the pipeline is triggered without a specific target. From 21bc75df39ee823b3ae9f59623a55d6d74dcd99f Mon Sep 17 00:00:00 2001 From: tedison <76473430+edisontim@users.noreply.github.com> Date: Sun, 24 Nov 2024 04:08:36 -0500 Subject: [PATCH 150/156] feat: start implementing OpTransaction (#12529) Co-authored-by: Emilia Hane --- crates/optimism/primitives/Cargo.toml | 8 +- crates/optimism/primitives/src/lib.rs | 4 +- .../primitives/src/transaction/mod.rs | 173 ++++++++++++++++++ .../src/{ => transaction}/tx_type.rs | 0 crates/primitives/src/transaction/mod.rs | 6 + crates/primitives/src/transaction/tx_type.rs | 20 +- 6 files changed, 204 insertions(+), 7 deletions(-) create mode 100644 crates/optimism/primitives/src/transaction/mod.rs rename crates/optimism/primitives/src/{ => transaction}/tx_type.rs (100%) diff --git a/crates/optimism/primitives/Cargo.toml b/crates/optimism/primitives/Cargo.toml index 33f936b2fd13..e7200c40ed80 100644 --- a/crates/optimism/primitives/Cargo.toml +++ b/crates/optimism/primitives/Cargo.toml @@ -16,7 +16,7 @@ workspace = true reth-node-types.workspace = true reth-primitives.workspace = true reth-primitives-traits.workspace = true -reth-codecs = { workspace = true, optional = true } +reth-codecs = { workspace = true, optional = true, features = ["optimism"] } # ethereum alloy-primitives.workspace = true @@ -34,7 +34,7 @@ serde = { workspace = true, optional = true } # misc derive_more.workspace = true -# test-utils +# test arbitrary = { workspace = true, features = ["derive"], optional = true } [dev-dependencies] @@ -66,7 +66,7 @@ serde = [ "alloy-consensus/serde", "alloy-eips/serde", "bytes/serde", - "reth-codecs/serde", + "reth-codecs?/serde", "op-alloy-consensus/serde", ] arbitrary = [ @@ -78,4 +78,4 @@ arbitrary = [ "alloy-consensus/arbitrary", "alloy-eips/arbitrary", "alloy-primitives/arbitrary", -] \ No newline at end of file +] diff --git a/crates/optimism/primitives/src/lib.rs b/crates/optimism/primitives/src/lib.rs index 334440ea1062..0f4608a8ebe8 100644 --- a/crates/optimism/primitives/src/lib.rs +++ b/crates/optimism/primitives/src/lib.rs @@ -9,9 +9,9 @@ #![cfg_attr(not(feature = "std"), no_std)] pub mod bedrock; -pub mod tx_type; +pub mod transaction; -pub use tx_type::OpTxType; +pub use transaction::{tx_type::OpTxType, OpTransaction}; use alloy_consensus::Header; use reth_node_types::NodePrimitives; diff --git a/crates/optimism/primitives/src/transaction/mod.rs b/crates/optimism/primitives/src/transaction/mod.rs new file mode 100644 index 000000000000..070b3d984e0b --- /dev/null +++ b/crates/optimism/primitives/src/transaction/mod.rs @@ -0,0 +1,173 @@ +//! Wrapper of [`OpTypedTransaction`], that implements reth database encoding [`Compact`]. + +pub mod tx_type; + +use alloy_primitives::{bytes, Bytes, TxKind, Uint, B256}; + +use alloy_consensus::{constants::EIP7702_TX_TYPE_ID, TxLegacy}; +use alloy_eips::{eip2930::AccessList, eip7702::SignedAuthorization}; +use derive_more::{Deref, From}; +use op_alloy_consensus::{OpTypedTransaction, DEPOSIT_TX_TYPE_ID}; +use reth_codecs::Compact; +use reth_primitives::transaction::{ + COMPACT_EXTENDED_IDENTIFIER_FLAG, COMPACT_IDENTIFIER_EIP1559, COMPACT_IDENTIFIER_EIP2930, + COMPACT_IDENTIFIER_LEGACY, +}; +use reth_primitives_traits::InMemorySize; + +#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[derive(Debug, Clone, PartialEq, Eq, Deref, Hash, From)] +/// Optimistic transaction. +pub struct OpTransaction(OpTypedTransaction); + +impl Default for OpTransaction { + fn default() -> Self { + Self(OpTypedTransaction::Legacy(TxLegacy::default())) + } +} + +impl Compact for OpTransaction { + fn to_compact(&self, out: &mut B) -> usize + where + B: bytes::BufMut + AsMut<[u8]>, + { + match &self.0 { + OpTypedTransaction::Legacy(tx) => tx.to_compact(out), + OpTypedTransaction::Eip2930(tx) => tx.to_compact(out), + OpTypedTransaction::Eip1559(tx) => tx.to_compact(out), + OpTypedTransaction::Eip7702(tx) => tx.to_compact(out), + OpTypedTransaction::Deposit(tx) => tx.to_compact(out), + } + } + + fn from_compact(mut buf: &[u8], identifier: usize) -> (Self, &[u8]) { + use bytes::Buf; + + match identifier { + COMPACT_IDENTIFIER_LEGACY => { + let (tx, buf) = TxLegacy::from_compact(buf, buf.len()); + (Self(OpTypedTransaction::Legacy(tx)), buf) + } + COMPACT_IDENTIFIER_EIP2930 => { + let (tx, buf) = + alloy_consensus::transaction::TxEip2930::from_compact(buf, buf.len()); + (Self(OpTypedTransaction::Eip2930(tx)), buf) + } + COMPACT_IDENTIFIER_EIP1559 => { + let (tx, buf) = + alloy_consensus::transaction::TxEip1559::from_compact(buf, buf.len()); + (Self(OpTypedTransaction::Eip1559(tx)), buf) + } + COMPACT_EXTENDED_IDENTIFIER_FLAG => { + // An identifier of 3 indicates that the transaction type did not fit into + // the backwards compatible 2 bit identifier, their transaction types are + // larger than 2 bits (eg. 4844 and Deposit Transactions). In this case, + // we need to read the concrete transaction type from the buffer by + // reading the full 8 bits (single byte) and match on this transaction type. + let identifier = buf.get_u8(); + match identifier { + EIP7702_TX_TYPE_ID => { + let (tx, buf) = + alloy_consensus::transaction::TxEip7702::from_compact(buf, buf.len()); + (Self(OpTypedTransaction::Eip7702(tx)), buf) + } + DEPOSIT_TX_TYPE_ID => { + let (tx, buf) = op_alloy_consensus::TxDeposit::from_compact(buf, buf.len()); + (Self(OpTypedTransaction::Deposit(tx)), buf) + } + _ => unreachable!( + "Junk data in database: unknown Transaction variant: {identifier}" + ), + } + } + _ => unreachable!("Junk data in database: unknown Transaction variant: {identifier}"), + } + } +} + +impl alloy_consensus::Transaction for OpTransaction { + fn chain_id(&self) -> Option { + self.0.chain_id() + } + + fn nonce(&self) -> u64 { + self.0.nonce() + } + + fn gas_limit(&self) -> u64 { + self.0.gas_limit() + } + + fn gas_price(&self) -> Option { + self.0.gas_price() + } + + fn max_fee_per_gas(&self) -> u128 { + self.0.max_fee_per_gas() + } + + fn max_priority_fee_per_gas(&self) -> Option { + self.0.max_priority_fee_per_gas() + } + + fn max_fee_per_blob_gas(&self) -> Option { + self.0.max_fee_per_blob_gas() + } + + fn priority_fee_or_price(&self) -> u128 { + self.0.priority_fee_or_price() + } + + fn kind(&self) -> TxKind { + self.0.kind() + } + + fn value(&self) -> Uint<256, 4> { + self.0.value() + } + + fn input(&self) -> &Bytes { + self.0.input() + } + + fn ty(&self) -> u8 { + self.0.ty() + } + + fn access_list(&self) -> Option<&AccessList> { + self.0.access_list() + } + + fn blob_versioned_hashes(&self) -> Option<&[B256]> { + self.0.blob_versioned_hashes() + } + + fn authorization_list(&self) -> Option<&[SignedAuthorization]> { + self.0.authorization_list() + } + + fn is_dynamic_fee(&self) -> bool { + self.0.is_dynamic_fee() + } + + fn effective_gas_price(&self, base_fee: Option) -> u128 { + self.0.effective_gas_price(base_fee) + } + + fn effective_tip_per_gas(&self, base_fee: u64) -> Option { + self.0.effective_tip_per_gas(base_fee) + } +} + +impl InMemorySize for OpTransaction { + fn size(&self) -> usize { + match &self.0 { + OpTypedTransaction::Legacy(tx) => tx.size(), + OpTypedTransaction::Eip2930(tx) => tx.size(), + OpTypedTransaction::Eip1559(tx) => tx.size(), + OpTypedTransaction::Eip7702(tx) => tx.size(), + OpTypedTransaction::Deposit(tx) => tx.size(), + } + } +} diff --git a/crates/optimism/primitives/src/tx_type.rs b/crates/optimism/primitives/src/transaction/tx_type.rs similarity index 100% rename from crates/optimism/primitives/src/tx_type.rs rename to crates/optimism/primitives/src/transaction/tx_type.rs diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index ca03bfe4f7c5..b8a3f4a719bc 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -56,6 +56,12 @@ pub mod signature; pub(crate) mod util; +#[cfg(any(test, feature = "reth-codec"))] +pub use tx_type::{ + COMPACT_EXTENDED_IDENTIFIER_FLAG, COMPACT_IDENTIFIER_EIP1559, COMPACT_IDENTIFIER_EIP2930, + COMPACT_IDENTIFIER_LEGACY, +}; + /// Expected number of transactions where we can expect a speed-up by recovering the senders in /// parallel. pub static PARALLEL_SENDER_RECOVERY_THRESHOLD: LazyLock = diff --git a/crates/primitives/src/transaction/tx_type.rs b/crates/primitives/src/transaction/tx_type.rs index 0e344374d202..784a976ab792 100644 --- a/crates/primitives/src/transaction/tx_type.rs +++ b/crates/primitives/src/transaction/tx_type.rs @@ -8,6 +8,24 @@ use derive_more::Display; use reth_primitives_traits::InMemorySize; use serde::{Deserialize, Serialize}; +/// Identifier parameter for legacy transaction +#[cfg(any(test, feature = "reth-codec"))] +pub const COMPACT_IDENTIFIER_LEGACY: usize = 0; + +/// Identifier parameter for EIP-2930 transaction +#[cfg(any(test, feature = "reth-codec"))] +pub const COMPACT_IDENTIFIER_EIP2930: usize = 1; + +/// Identifier parameter for EIP-1559 transaction +#[cfg(any(test, feature = "reth-codec"))] +pub const COMPACT_IDENTIFIER_EIP1559: usize = 2; + +/// For backwards compatibility purposes only 2 bits of the type are encoded in the identifier +/// parameter. In the case of a [`COMPACT_EXTENDED_IDENTIFIER_FLAG`], the full transaction type is +/// read from the buffer as a single byte. +#[cfg(any(test, feature = "reth-codec"))] +pub const COMPACT_EXTENDED_IDENTIFIER_FLAG: usize = 3; + /// Transaction Type /// /// Currently being used as 2-bit type when encoding it to `reth_codecs::Compact` on @@ -256,7 +274,7 @@ impl Decodable for TxType { mod tests { use super::*; use alloy_primitives::hex; - use reth_codecs::{txtype::*, Compact}; + use reth_codecs::Compact; use reth_primitives_traits::TxType as _; use rstest::rstest; From e020eb71bd501e512e49d7d5d8dcf091ec0c42ec Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Sun, 24 Nov 2024 11:28:31 +0100 Subject: [PATCH 151/156] chore(payload): fix withdrawals field pre-shanghai in Ethereum payload (#12828) --- crates/ethereum/payload/src/lib.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/ethereum/payload/src/lib.rs b/crates/ethereum/payload/src/lib.rs index 24312fecbf48..b2f78da6de97 100644 --- a/crates/ethereum/payload/src/lib.rs +++ b/crates/ethereum/payload/src/lib.rs @@ -440,14 +440,14 @@ where requests_hash, }; + let withdrawals = chain_spec + .is_shanghai_active_at_timestamp(attributes.timestamp) + .then(|| attributes.withdrawals.clone()); + // seal the block let block = Block { header, - body: BlockBody { - transactions: executed_txs, - ommers: vec![], - withdrawals: Some(attributes.withdrawals.clone()), - }, + body: BlockBody { transactions: executed_txs, ommers: vec![], withdrawals }, }; let sealed_block = Arc::new(block.seal_slow()); From 8958e9f4df42cdf2a1ca443efeb168db7ead1dea Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Sun, 24 Nov 2024 11:50:02 +0100 Subject: [PATCH 152/156] chore(ci): run hive 2x/day (#12829) --- .github/workflows/hive.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/hive.yml b/.github/workflows/hive.yml index 6c50923d3e6b..b8d3f378fca4 100644 --- a/.github/workflows/hive.yml +++ b/.github/workflows/hive.yml @@ -5,8 +5,8 @@ name: hive on: workflow_dispatch: schedule: - # every day - - cron: "0 0 * * *" + # run every 12 hours + - cron: "0 */12 * * *" env: CARGO_TERM_COLOR: always From 736edf70ad5227141cf43c0320208c86b608bd6b Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sun, 24 Nov 2024 12:03:56 +0100 Subject: [PATCH 153/156] fix: check withdrawals in op builder (#12832) --- crates/optimism/payload/src/builder.rs | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index ec7668768364..132c26492721 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -3,13 +3,13 @@ use std::{fmt::Display, sync::Arc}; use alloy_consensus::{Header, Transaction, EMPTY_OMMER_ROOT_HASH}; -use alloy_eips::merge::BEACON_NONCE; +use alloy_eips::{eip4895::Withdrawals, merge::BEACON_NONCE}; use alloy_primitives::{Address, Bytes, B256, U256}; use alloy_rpc_types_debug::ExecutionWitness; use alloy_rpc_types_engine::PayloadId; use reth_basic_payload_builder::*; use reth_chain_state::ExecutedBlock; -use reth_chainspec::ChainSpecProvider; +use reth_chainspec::{ChainSpecProvider, EthereumHardforks}; use reth_evm::{system_calls::SystemCaller, ConfigureEvm, NextBlockEnvAttributes}; use reth_execution_types::ExecutionOutcome; use reth_optimism_chainspec::OpChainSpec; @@ -416,7 +416,7 @@ where body: BlockBody { transactions: info.executed_transactions, ommers: vec![], - withdrawals: Some(ctx.attributes().payload_attributes.withdrawals.clone()), + withdrawals: ctx.withdrawals().cloned(), }, }; @@ -560,6 +560,13 @@ impl OpPayloadBuilderCtx { &self.config.attributes } + /// Returns the withdrawals if shanghai is active. + pub fn withdrawals(&self) -> Option<&Withdrawals> { + self.chain_spec + .is_shanghai_active_at_timestamp(self.attributes().timestamp()) + .then(|| &self.attributes().payload_attributes.withdrawals) + } + /// Returns the block gas limit to target. pub fn block_gas_limit(&self) -> u64 { self.attributes() From c8e5b233ef242c4c5eaebe6dc5ae66b98184d8a8 Mon Sep 17 00:00:00 2001 From: morito Date: Sun, 24 Nov 2024 20:08:30 +0900 Subject: [PATCH 154/156] Use `adjust_index_for_rlp` from alloy (#12815) --- crates/trie/common/src/root.rs | 11 ----------- crates/trie/trie/benches/trie_root.rs | 3 ++- 2 files changed, 2 insertions(+), 12 deletions(-) diff --git a/crates/trie/common/src/root.rs b/crates/trie/common/src/root.rs index dbcbf4200d74..982dec98837f 100644 --- a/crates/trie/common/src/root.rs +++ b/crates/trie/common/src/root.rs @@ -7,17 +7,6 @@ use alloy_trie::HashBuilder; use itertools::Itertools; use nybbles::Nibbles; -/// Adjust the index of an item for rlp encoding. -pub const fn adjust_index_for_rlp(i: usize, len: usize) -> usize { - if i > 0x7f { - i - } else if i == 0x7f || i + 1 == len { - 0 - } else { - i + 1 - } -} - /// Hashes and sorts account keys, then proceeds to calculating the root hash of the state /// represented as MPT. /// See [`state_root_unsorted`] for more info. diff --git a/crates/trie/trie/benches/trie_root.rs b/crates/trie/trie/benches/trie_root.rs index ad169936463a..893e6e9e9994 100644 --- a/crates/trie/trie/benches/trie_root.rs +++ b/crates/trie/trie/benches/trie_root.rs @@ -44,7 +44,8 @@ criterion_main!(benches); mod implementations { use super::*; use alloy_rlp::Encodable; - use reth_trie_common::{root::adjust_index_for_rlp, HashBuilder, Nibbles}; + use alloy_trie::root::adjust_index_for_rlp; + use reth_trie_common::{HashBuilder, Nibbles}; pub fn trie_hash_ordered_trie_root(receipts: &[ReceiptWithBloom]) -> B256 { triehash::ordered_trie_root::(receipts.iter().map(|receipt| { From 53839a952d528e116359716f3991823dbcc30f3c Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 24 Nov 2024 11:33:42 +0000 Subject: [PATCH 155/156] chore(deps): weekly `cargo update` (#12813) Co-authored-by: github-merge-queue <118344674+github-merge-queue@users.noreply.github.com> Co-authored-by: Matthias Seitz --- Cargo.lock | 102 ++++++++++++++++++++++++++++++++--------------------- 1 file changed, 62 insertions(+), 40 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9ae0574414e0..6463fd11a016 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1057,9 +1057,9 @@ dependencies = [ [[package]] name = "async-compression" -version = "0.4.17" +version = "0.4.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cb8f1d480b0ea3783ab015936d2a55c87e219676f0c0b7dec61494043f21857" +checksum = "df895a515f70646414f4b45c0b79082783b80552b373a68283012928df56f522" dependencies = [ "brotli", "flate2", @@ -1164,9 +1164,9 @@ checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "backon" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4fa97bb310c33c811334143cf64c5bb2b7b3c06e453db6b095d7061eff8f113" +checksum = "ba5289ec98f68f28dd809fd601059e6aa908bb8f6108620930828283d4ee23d7" dependencies = [ "fastrand 2.2.0", "tokio", @@ -1915,9 +1915,9 @@ dependencies = [ [[package]] name = "const-hex" -version = "1.13.1" +version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0121754e84117e65f9d90648ee6aa4882a6e63110307ab73967a4c5e7e69e586" +checksum = "487981fa1af147182687064d0a2c336586d337a606595ced9ffb0c685c250c73" dependencies = [ "cfg-if", "cpufeatures", @@ -1972,6 +1972,16 @@ dependencies = [ "libc", ] +[[package]] +name = "core-foundation" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b55271e5c8c478ad3f38ad24ef34923091e0548492a266d19b3c0b4d82574c63" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "core-foundation-sys" version = "0.8.7" @@ -1998,9 +2008,9 @@ dependencies = [ [[package]] name = "cpufeatures" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ca741a962e1b0bff6d724a1a0958b686406e853bb14061f218562e1896f95e6" +checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" dependencies = [ "libc", ] @@ -3782,7 +3792,7 @@ dependencies = [ "hyper-util", "log", "rustls", - "rustls-native-certs 0.8.0", + "rustls-native-certs 0.8.1", "rustls-pki-types", "tokio", "tokio-rustls", @@ -4186,9 +4196,9 @@ dependencies = [ [[package]] name = "interprocess" -version = "2.2.1" +version = "2.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2f4e4a06d42fab3e85ab1b419ad32b09eab58b901d40c57935ff92db3287a13" +checksum = "894148491d817cb36b6f778017b8ac46b17408d522dd90f539d677ea938362eb" dependencies = [ "doctest-file", "futures-core", @@ -4713,9 +4723,9 @@ checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] name = "litemap" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "643cb0b8d4fcc284004d5fd0d67ccf61dfffadb7f75e1e71bc420f4688a3a704" +checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" [[package]] name = "lock_api" @@ -5727,9 +5737,9 @@ dependencies = [ [[package]] name = "portable-atomic" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc9c68a3f6da06753e9335d63e27f6b9754dd1920d941135b7ea8224f141adb2" +checksum = "280dc24453071f1b63954171985a0b0d30058d287960968b9b2aca264c8d4ee6" [[package]] name = "powerfmt" @@ -5867,9 +5877,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.91" +version = "1.0.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "307e3004becf10f5a6e0d59d20f3cd28231b0e0827a96cd3e0ce6d14bc1e4bb3" +checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" dependencies = [ "unicode-ident", ] @@ -6311,7 +6321,7 @@ dependencies = [ "pin-project-lite", "quinn", "rustls", - "rustls-native-certs 0.8.0", + "rustls-native-certs 0.8.1", "rustls-pemfile", "rustls-pki-types", "serde", @@ -9780,9 +9790,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.17" +version = "0.23.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f1a745511c54ba6d4465e8d5dfbd81b45791756de28d4981af70d6dca128f1e" +checksum = "9c9cc1d47e243d655ace55ed38201c19ae02c148ae56412ab8750e8f0166ab7f" dependencies = [ "log", "once_cell", @@ -9803,20 +9813,19 @@ dependencies = [ "rustls-pemfile", "rustls-pki-types", "schannel", - "security-framework", + "security-framework 2.11.1", ] [[package]] name = "rustls-native-certs" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcaf18a4f2be7326cd874a5fa579fae794320a0f388d365dca7e480e55f83f8a" +checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3" dependencies = [ "openssl-probe", - "rustls-pemfile", "rustls-pki-types", "schannel", - "security-framework", + "security-framework 3.0.1", ] [[package]] @@ -9843,7 +9852,7 @@ version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "afbb878bdfdf63a336a5e63561b1835e7a8c91524f51621db870169eac84b490" dependencies = [ - "core-foundation", + "core-foundation 0.9.4", "core-foundation-sys", "jni", "log", @@ -9852,7 +9861,7 @@ dependencies = [ "rustls-native-certs 0.7.3", "rustls-platform-verifier-android", "rustls-webpki", - "security-framework", + "security-framework 2.11.1", "security-framework-sys", "webpki-roots", "winapi", @@ -10002,13 +10011,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ "bitflags 2.6.0", - "core-foundation", + "core-foundation 0.9.4", "core-foundation-sys", "libc", "num-bigint", "security-framework-sys", ] +[[package]] +name = "security-framework" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1415a607e92bec364ea2cf9264646dcce0f91e6d65281bd6f2819cca3bf39c8" +dependencies = [ + "bitflags 2.6.0", + "core-foundation 0.10.0", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + [[package]] name = "security-framework-sys" version = "2.12.1" @@ -11404,9 +11426,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.5.3" +version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d157f1b96d14500ffdc1f10ba712e780825526c03d9a49b4d0324b0d9113ada" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", "idna 1.0.3", @@ -11646,9 +11668,9 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.26.6" +version = "0.26.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "841c67bff177718f1d4dfefde8d8f0e78f9b6589319ba88312f567fc5841a958" +checksum = "5d642ff16b7e79272ae451b7322067cdc17cadf68c23264be9d94a32319efe7e" dependencies = [ "rustls-pki-types", ] @@ -12042,9 +12064,9 @@ checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" [[package]] name = "yoke" -version = "0.7.4" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c5b1314b079b0930c31e3af543d8ee1757b1951ae1e1565ec704403a7240ca5" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" dependencies = [ "serde", "stable_deref_trait", @@ -12054,9 +12076,9 @@ dependencies = [ [[package]] name = "yoke-derive" -version = "0.7.4" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", @@ -12087,18 +12109,18 @@ dependencies = [ [[package]] name = "zerofrom" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ec111ce797d0e0784a1116d0ddcdbea84322cd79e5d5ad173daeba4f93ab55" +checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5" +checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" dependencies = [ "proc-macro2", "quote", From 02824da4fcd0794d45442adbdbf6b953be04d4f3 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sun, 24 Nov 2024 12:47:37 +0100 Subject: [PATCH 156/156] chore: remove default bound for txs (#12834) --- crates/primitives-traits/src/node.rs | 11 +---------- crates/primitives-traits/src/transaction/mod.rs | 2 -- crates/primitives-traits/src/transaction/signed.rs | 1 - crates/primitives/src/transaction/pooled.rs | 8 +------- 4 files changed, 2 insertions(+), 20 deletions(-) diff --git a/crates/primitives-traits/src/node.rs b/crates/primitives-traits/src/node.rs index 19f6bd8456a9..904ed7d12f1d 100644 --- a/crates/primitives-traits/src/node.rs +++ b/crates/primitives-traits/src/node.rs @@ -42,16 +42,7 @@ pub trait NodePrimitives: + MaybeSerde + 'static; /// Signed version of the transaction type. - type SignedTx: Send - + Sync - + Unpin - + Clone - + Default - + fmt::Debug - + PartialEq - + Eq - + MaybeSerde - + 'static; + type SignedTx: Send + Sync + Unpin + Clone + fmt::Debug + PartialEq + Eq + MaybeSerde + 'static; /// Transaction envelope type ID. type TxType: Send + Sync + Unpin + Clone + Default + fmt::Debug + PartialEq + Eq + 'static; /// A receipt. diff --git a/crates/primitives-traits/src/transaction/mod.rs b/crates/primitives-traits/src/transaction/mod.rs index f176382146b7..b67e51024bf6 100644 --- a/crates/primitives-traits/src/transaction/mod.rs +++ b/crates/primitives-traits/src/transaction/mod.rs @@ -18,7 +18,6 @@ pub trait Transaction: + Sync + Unpin + Clone - + Default + fmt::Debug + Eq + PartialEq @@ -35,7 +34,6 @@ impl Transaction for T where + Sync + Unpin + Clone - + Default + fmt::Debug + Eq + PartialEq diff --git a/crates/primitives-traits/src/transaction/signed.rs b/crates/primitives-traits/src/transaction/signed.rs index 64acbd3415c0..ae9a8f0d2acf 100644 --- a/crates/primitives-traits/src/transaction/signed.rs +++ b/crates/primitives-traits/src/transaction/signed.rs @@ -18,7 +18,6 @@ pub trait SignedTransaction: + Sync + Unpin + Clone - + Default + fmt::Debug + PartialEq + Eq diff --git a/crates/primitives/src/transaction/pooled.rs b/crates/primitives/src/transaction/pooled.rs index cecc995ddba7..2bd344ea2a17 100644 --- a/crates/primitives/src/transaction/pooled.rs +++ b/crates/primitives/src/transaction/pooled.rs @@ -11,7 +11,7 @@ use crate::{ use alloy_consensus::{ constants::EIP4844_TX_TYPE_ID, transaction::{TxEip1559, TxEip2930, TxEip4844, TxLegacy}, - SignableTransaction, Signed, TxEip4844WithSidecar, + Signed, TxEip4844WithSidecar, }; use alloy_eips::{ eip2718::{Decodable2718, Eip2718Result, Encodable2718}, @@ -226,12 +226,6 @@ impl PooledTransactionsElement { } } -impl Default for PooledTransactionsElement { - fn default() -> Self { - Self::Legacy(TxLegacy::default().into_signed(Signature::test_signature())) - } -} - impl Hash for PooledTransactionsElement { fn hash(&self, state: &mut H) { self.trie_hash().hash(state);