diff --git a/eth/abc.py b/eth/abc.py index a753a26f95..ccd7eebf8e 100644 --- a/eth/abc.py +++ b/eth/abc.py @@ -43,6 +43,7 @@ JournalDBCheckpoint, AccountState, HeaderParams, + VMConfiguration, ) @@ -2269,6 +2270,59 @@ def get_transaction_context(cls, ... +class ConsensusContextAPI(ABC): + """ + A class representing a data context for the :class:`~eth.abc.ConsensusAPI` which is + instantiated once per chain instance and stays in memory across VM runs. + """ + + @abstractmethod + def __init__(self, db: AtomicDatabaseAPI) -> None: + """ + Initialize the context with a database. + """ + ... + + +class ConsensusAPI(ABC): + """ + A class encapsulating the consensus scheme to allow chains to run under different kind of + EVM-compatible consensus mechanisms such as the Clique Proof of Authority scheme. + """ + + @abstractmethod + def __init__(self, context: ConsensusContextAPI) -> None: + """ + Initialize the consensus api. + """ + ... + + @abstractmethod + def validate_seal(self, header: BlockHeaderAPI) -> None: + """ + Validate the seal on the given header, even if its parent is missing. + """ + ... + + @abstractmethod + def validate_seal_extension(self, + header: BlockHeaderAPI, + parents: Iterable[BlockHeaderAPI]) -> None: + """ + Validate the seal on the given header when all parents must be present. Parent headers + that are not yet in the database must be passed as ``parents``. + """ + ... + + @classmethod + @abstractmethod + def get_fee_recipient(cls, header: BlockHeaderAPI) -> Address: + """ + Return the address that should receive rewards for creating the block. + """ + ... + + class VirtualMachineAPI(ConfigurableAPI): """ The :class:`~eth.abc.VirtualMachineAPI` class represents the Chain rules for a @@ -2285,9 +2339,14 @@ class VirtualMachineAPI(ConfigurableAPI): fork: str # noqa: E701 # flake8 bug that's fixed in 3.6.0+ chaindb: ChainDatabaseAPI extra_data_max_bytes: ClassVar[int] + consensus_class: Type[ConsensusAPI] + consensus_context: ConsensusContextAPI @abstractmethod - def __init__(self, header: BlockHeaderAPI, chaindb: ChainDatabaseAPI) -> None: + def __init__(self, + header: BlockHeaderAPI, + chaindb: ChainDatabaseAPI, + consensus_context: ConsensusContextAPI) -> None: """ Initialize the virtual machine. """ @@ -2637,9 +2696,8 @@ def validate_block(self, block: BlockAPI) -> None: """ ... - @classmethod @abstractmethod - def validate_header(cls, + def validate_header(self, header: BlockHeaderAPI, parent_header: BlockHeaderAPI, check_seal: bool = True @@ -2663,14 +2721,23 @@ def validate_transaction_against_header(self, """ ... - @classmethod @abstractmethod - def validate_seal(cls, header: BlockHeaderAPI) -> None: + def validate_seal(self, header: BlockHeaderAPI) -> None: """ Validate the seal on the given header. """ ... + @abstractmethod + def validate_seal_extension(self, + header: BlockHeaderAPI, + parents: Iterable[BlockHeaderAPI]) -> None: + """ + Validate the seal on the given header when all parents must be present. Parent headers + that are not yet in the database must be passed as ``parents``. + """ + ... + @classmethod @abstractmethod def validate_uncle(cls, @@ -2703,6 +2770,20 @@ def state_in_temp_block(self) -> ContextManager[StateAPI]: ... +class VirtualMachineModifierAPI(ABC): + """ + Amend a set of VMs for a chain. This allows modifying a chain for different consensus schemes. + """ + + @abstractmethod + def amend_vm_configuration(self, vm_config: VMConfiguration) -> VMConfiguration: + """ + Amend the ``vm_config`` by configuring the VM classes, and hence returning a modified + set of VM classes. + """ + ... + + class HeaderChainAPI(ABC): """ Like :class:`eth.abc.ChainAPI` but does only support headers, not entire blocks. @@ -2814,6 +2895,7 @@ class ChainAPI(ConfigurableAPI): vm_configuration: Tuple[Tuple[BlockNumber, Type[VirtualMachineAPI]], ...] chain_id: int chaindb: ChainDatabaseAPI + consensus_context_class: Type[ConsensusContextAPI] # # Helpers @@ -3155,10 +3237,9 @@ def validate_uncles(self, block: BlockAPI) -> None: """ ... - @classmethod @abstractmethod def validate_chain( - cls, + self, root: BlockHeaderAPI, descendants: Tuple[BlockHeaderAPI, ...], seal_check_random_sample_rate: int = 1) -> None: @@ -3171,6 +3252,17 @@ def validate_chain( """ ... + @abstractmethod + def validate_chain_extension(self, headers: Tuple[BlockHeaderAPI, ...]) -> None: + """ + Validate a chain of headers under the assumption that the entire chain of headers is + present. Headers that are not already in the database must exist in ``headers``. Calling + this API is not a replacement for calling :meth:`~eth.abc.ChainAPI.validate_chain`, it is + an additional API to call at a different stage of header processing to enable consensus + schemes where the consensus can not be verified out of order. + """ + ... + class MiningChainAPI(ChainAPI): """ diff --git a/eth/chains/base.py b/eth/chains/base.py index 228d105dd6..6a7cf9b45c 100644 --- a/eth/chains/base.py +++ b/eth/chains/base.py @@ -44,6 +44,7 @@ BlockHeaderAPI, ChainAPI, ChainDatabaseAPI, + ConsensusContextAPI, VirtualMachineAPI, ReceiptAPI, ComputationAPI, @@ -51,6 +52,9 @@ SignedTransactionAPI, UnsignedTransactionAPI, ) +from eth.consensus import ( + ConsensusContext, +) from eth.constants import ( EMPTY_UNCLE_HASH, MAX_UNCLE_DEPTH, @@ -111,6 +115,7 @@ class BaseChain(Configurable, ChainAPI): """ chaindb: ChainDatabaseAPI = None chaindb_class: Type[ChainDatabaseAPI] = None + consensus_context_class: Type[ConsensusContextAPI] = None vm_configuration: Tuple[Tuple[BlockNumber, Type[VirtualMachineAPI]], ...] = None chain_id: int = None @@ -133,9 +138,8 @@ def get_vm_class(cls, header: BlockHeaderAPI) -> Type[VirtualMachineAPI]: # # Validation API # - @classmethod def validate_chain( - cls, + self, root: BlockHeaderAPI, descendants: Tuple[BlockHeaderAPI, ...], seal_check_random_sample_rate: int = 1) -> None: @@ -158,20 +162,29 @@ def validate_chain( f" but expected {encode_hex(parent.hash)}" ) should_check_seal = index in indices_to_check_seal - vm_class = cls.get_vm_class_for_block_number(child.block_number) + vm = self.get_vm(child) try: - vm_class.validate_header(child, parent, check_seal=should_check_seal) + vm.validate_header(child, parent, check_seal=should_check_seal) except ValidationError as exc: raise ValidationError( f"{child} is not a valid child of {parent}: {exc}" ) from exc + def validate_chain_extension(self, headers: Tuple[BlockHeaderAPI, ...]) -> None: + for index, header in enumerate(headers): + vm = self.get_vm(header) + + # pass in any parents that are not already in the database + parents = headers[:index] + vm.validate_seal_extension(header, parents) + class Chain(BaseChain): logger = logging.getLogger("eth.chain.chain.Chain") gas_estimator: StaticMethod[Callable[[StateAPI, SignedTransactionAPI], int]] = None chaindb_class: Type[ChainDatabaseAPI] = ChainDB + consensus_context_class: Type[ConsensusContextAPI] = ConsensusContext def __init__(self, base_db: AtomicDatabaseAPI) -> None: if not self.vm_configuration: @@ -182,6 +195,7 @@ def __init__(self, base_db: AtomicDatabaseAPI) -> None: validate_vm_configuration(self.vm_configuration) self.chaindb = self.get_chaindb_class()(base_db) + self.consensus_context = self.consensus_context_class(self.chaindb.db) self.headerdb = HeaderDB(base_db) if self.gas_estimator is None: self.gas_estimator = get_gas_estimator() @@ -247,7 +261,13 @@ def get_vm(self, at_header: BlockHeaderAPI = None) -> VirtualMachineAPI: header = self.ensure_header(at_header) vm_class = self.get_vm_class_for_block_number(header.block_number) chain_context = ChainContext(self.chain_id) - return vm_class(header=header, chaindb=self.chaindb, chain_context=chain_context) + + return vm_class( + header=header, + chaindb=self.chaindb, + chain_context=chain_context, + consensus_context=self.consensus_context + ) # # Header API @@ -484,15 +504,16 @@ def validate_receipt(self, receipt: ReceiptAPI, at_header: BlockHeaderAPI) -> No def validate_block(self, block: BlockAPI) -> None: if block.is_genesis: raise ValidationError("Cannot validate genesis block this way") - VM_class = self.get_vm_class_for_block_number(BlockNumber(block.number)) + vm = self.get_vm(block.header) parent_header = self.get_block_header_by_hash(block.header.parent_hash) - VM_class.validate_header(block.header, parent_header, check_seal=True) + vm.validate_header(block.header, parent_header, check_seal=True) + vm.validate_seal_extension(block.header, ()) self.validate_uncles(block) self.validate_gaslimit(block.header) def validate_seal(self, header: BlockHeaderAPI) -> None: - VM_class = self.get_vm_class_for_block_number(BlockNumber(header.block_number)) - VM_class.validate_seal(header) + vm = self.get_vm(header) + vm.validate_seal(header) def validate_gaslimit(self, header: BlockHeaderAPI) -> None: parent_header = self.get_block_header_by_hash(header.parent_hash) diff --git a/eth/chains/mainnet/__init__.py b/eth/chains/mainnet/__init__.py index 7fe74265dc..e303740923 100644 --- a/eth/chains/mainnet/__init__.py +++ b/eth/chains/mainnet/__init__.py @@ -47,8 +47,7 @@ class MainnetDAOValidatorVM(HomesteadVM): """Only on mainnet, TheDAO fork is accompanied by special extra data. Validate those headers""" - @classmethod - def validate_header(cls, + def validate_header(self, header: BlockHeaderAPI, previous_header: BlockHeaderAPI, check_seal: bool=True) -> None: @@ -56,17 +55,17 @@ def validate_header(cls, super().validate_header(header, previous_header, check_seal) # The special extra_data is set on the ten headers starting at the fork - dao_fork_at = cls.get_dao_fork_block_number() + dao_fork_at = self.get_dao_fork_block_number() extra_data_block_nums = range(dao_fork_at, dao_fork_at + 10) if header.block_number in extra_data_block_nums: - if cls.support_dao_fork and header.extra_data != DAO_FORK_MAINNET_EXTRA_DATA: + if self.support_dao_fork and header.extra_data != DAO_FORK_MAINNET_EXTRA_DATA: raise ValidationError( f"Block {header!r} must have extra data " f"{encode_hex(DAO_FORK_MAINNET_EXTRA_DATA)} not " f"{encode_hex(header.extra_data)} when supporting DAO fork" ) - elif not cls.support_dao_fork and header.extra_data == DAO_FORK_MAINNET_EXTRA_DATA: + elif not self.support_dao_fork and header.extra_data == DAO_FORK_MAINNET_EXTRA_DATA: raise ValidationError( f"Block {header!r} must not have extra data " f"{encode_hex(DAO_FORK_MAINNET_EXTRA_DATA)} when declining the DAO fork" diff --git a/eth/chains/tester/__init__.py b/eth/chains/tester/__init__.py index f29be459c6..5740bcf53f 100644 --- a/eth/chains/tester/__init__.py +++ b/eth/chains/tester/__init__.py @@ -146,8 +146,8 @@ class MainnetTesterChain(BaseMainnetTesterChain): It exposes one additional API `configure_forks` to allow for in-flight configuration of fork rules. """ - @classmethod - def validate_seal(cls, block: BlockAPI) -> None: + + def validate_seal(self, block: BlockAPI) -> None: """ We don't validate the proof of work seal on the tester chain. """ diff --git a/eth/consensus/__init__.py b/eth/consensus/__init__.py index e69de29bb2..d24349836e 100644 --- a/eth/consensus/__init__.py +++ b/eth/consensus/__init__.py @@ -0,0 +1,9 @@ +from .applier import ConsensusApplier # noqa: F401 +from .clique.clique import ( # noqa: F401 + CliqueApplier, + CliqueConsensus, + CliqueConsensusContext, +) +from .context import ConsensusContext # noqa: F401 +from .noproof import NoProofConsensus # noqa: F401 +from .pow import PowConsensus # noqa: F401 diff --git a/eth/consensus/applier.py b/eth/consensus/applier.py new file mode 100644 index 0000000000..670127ad48 --- /dev/null +++ b/eth/consensus/applier.py @@ -0,0 +1,37 @@ +from typing import ( + Iterable, + Type, +) + +from eth_utils import ( + to_tuple, +) + +from eth.abc import ( + ConsensusAPI, + VirtualMachineModifierAPI, + VMConfiguration, +) +from eth.typing import ( + VMFork, +) + + +class ConsensusApplier(VirtualMachineModifierAPI): + """ + This class is used to apply simple types of consensus engines to a series of virtual machines. + + Note that this *must not* be used for Clique, which has its own modifier + """ + + def __init__(self, consensus_class: Type[ConsensusAPI]) -> None: + self._consensus_class = consensus_class + + @to_tuple + def amend_vm_configuration(self, config: VMConfiguration) -> Iterable[VMFork]: + """ + Amend the given ``VMConfiguration`` to operate under the rules of the pre-defined consensus + """ + for pair in config: + block_number, vm = pair + yield block_number, vm.configure(consensus_class=self._consensus_class) diff --git a/eth/consensus/clique/__init__.py b/eth/consensus/clique/__init__.py index eeb351b418..d86b110075 100644 --- a/eth/consensus/clique/__init__.py +++ b/eth/consensus/clique/__init__.py @@ -1,5 +1,7 @@ from .clique import ( # noqa: F401 + CliqueApplier, CliqueConsensus, + CliqueConsensusContext, ) from .constants import ( # noqa: F401 NONCE_AUTH, diff --git a/eth/consensus/clique/clique.py b/eth/consensus/clique/clique.py index 64c3123b6b..5c6e640386 100644 --- a/eth/consensus/clique/clique.py +++ b/eth/consensus/clique/clique.py @@ -1,16 +1,19 @@ import logging -from typing import Sequence, Iterable +from typing import ( + Iterable, + Sequence, +) from eth.abc import ( AtomicDatabaseAPI, BlockHeaderAPI, VirtualMachineAPI, + VirtualMachineModifierAPI, ) from eth.db.chain import ChainDB from eth_typing import ( Address, - Hash32, ) from eth_utils import ( encode_hex, @@ -18,15 +21,15 @@ ValidationError, ) +from eth.abc import ( + ConsensusAPI, + ConsensusContextAPI, +) from eth.typing import ( HeaderParams, VMConfiguration, VMFork, ) -from eth.vm.chain_context import ChainContext -from eth.vm.execution_context import ( - ExecutionContext, -) from .constants import ( EPOCH_LENGTH, @@ -34,7 +37,6 @@ from .datatypes import ( Snapshot, ) -from .header_cache import HeaderCache from .snapshot_manager import SnapshotManager from ._utils import ( get_block_signer, @@ -65,7 +67,16 @@ def _construct_turn_error_message(expected_difficulty: int, ) -class CliqueConsensus: +class CliqueConsensusContext(ConsensusContextAPI): + + epoch_length = EPOCH_LENGTH + + def __init__(self, db: AtomicDatabaseAPI): + self.db = db + self.snapshot_manager = SnapshotManager(ChainDB(db), self.epoch_length) + + +class CliqueConsensus(ConsensusAPI): """ This class is the entry point to operate a chain under the rules of Clique consensus which is defined in EIP-225: https://eips.ethereum.org/EIPS/eip-225 @@ -73,55 +84,22 @@ class CliqueConsensus: logger = logging.getLogger('eth.consensus.clique.CliqueConsensus') - def __init__(self, base_db: AtomicDatabaseAPI, epoch_length: int = EPOCH_LENGTH) -> None: - if base_db is None: - raise ValueError("Can not instantiate without `base_db`") - self._epoch_length = epoch_length - self._chain_db = ChainDB(base_db) - self._header_cache = HeaderCache(self._chain_db) - self._snapshot_manager = SnapshotManager( - self._chain_db, - self._header_cache, - self._epoch_length, - ) + def __init__(self, context: CliqueConsensusContext) -> None: + if context is None: + raise ValueError("Can not instantiate without `context`") + self._epoch_length = context.epoch_length + self._snapshot_manager = context.snapshot_manager - @to_tuple - def amend_vm_configuration(self, config: VMConfiguration) -> Iterable[VMFork]: + @classmethod + def get_fee_recipient(cls, header: BlockHeaderAPI) -> Address: """ - Amend the given ``VMConfiguration`` to operate under the rules of Clique consensus. + If the ``header`` has a signer, return the signer, otherwise return the ``coinbase`` + of the passed header. """ - for pair in config: - block_number, vm = pair - vm_class = vm.configure( - extra_data_max_bytes=65535, - validate_seal=staticmethod(self.validate_seal), - create_execution_context=staticmethod(self.create_execution_context), - configure_header=configure_header, - _assign_block_rewards=lambda _, __: None, - ) - - yield block_number, vm_class - - @staticmethod - def create_execution_context(header: BlockHeaderAPI, - prev_hashes: Iterable[Hash32], - chain_context: ChainContext) -> ExecutionContext: - - # In Clique consensus, the tx fee goes to the signer try: - coinbase = get_block_signer(header) + return get_block_signer(header) except ValueError: - coinbase = header.coinbase - - return ExecutionContext( - coinbase=coinbase, - timestamp=header.timestamp, - block_number=header.block_number, - difficulty=header.difficulty, - gas_limit=header.gas_limit, - prev_hashes=prev_hashes, - chain_id=chain_context.chain_id, - ) + return header.coinbase def get_snapshot(self, header: BlockHeaderAPI) -> Snapshot: """ @@ -131,18 +109,26 @@ def get_snapshot(self, header: BlockHeaderAPI) -> Snapshot: def validate_seal(self, header: BlockHeaderAPI) -> None: """ + Only validate the integrity of the header, use `validate_seal_extension` to validate + the consensus relevant seal of the header. + """ + validate_header_integrity(header, self._epoch_length) + + def validate_seal_extension(self, + header: BlockHeaderAPI, + parents: Iterable[BlockHeaderAPI]) -> None: + """ Validate the seal of the given ``header`` according to the Clique consensus rules. """ + if header.block_number == 0: return validate_header_integrity(header, self._epoch_length) - self._header_cache[header.hash] = header - signer = get_block_signer(header) snapshot = self._snapshot_manager.get_or_create_snapshot( - header.block_number - 1, header.parent_hash) + header.block_number - 1, header.parent_hash, parents) in_turn = is_in_turn(signer, snapshot, header) authorized_signers = snapshot.get_sorted_signers() @@ -164,4 +150,25 @@ def validate_seal(self, header: BlockHeaderAPI) -> None: f"Signer {encode_hex(signer)} not in {authorized_signers}" ) - self._header_cache.evict() + +class CliqueApplier(VirtualMachineModifierAPI): + """ + This class is used to apply a clique consensus engine to a series of virtual machines + """ + + @to_tuple + def amend_vm_configuration(self, config: VMConfiguration) -> Iterable[VMFork]: + """ + Amend the given ``VMConfiguration`` to operate under the rules of Clique consensus. + """ + for pair in config: + block_number, vm = pair + vm_class = vm.configure( + extra_data_max_bytes=65535, + consensus_class=CliqueConsensus, + configure_header=configure_header, + get_block_reward=staticmethod(int), + get_uncle_reward=staticmethod(int), + ) + + yield block_number, vm_class diff --git a/eth/consensus/clique/header_cache.py b/eth/consensus/clique/header_cache.py deleted file mode 100644 index dad12c56d2..0000000000 --- a/eth/consensus/clique/header_cache.py +++ /dev/null @@ -1,51 +0,0 @@ -from typing import Dict - -from eth_typing import Hash32 -from eth_utils import get_extended_debug_logger - -from eth.abc import ChainDatabaseAPI -from eth.rlp.headers import BlockHeader - - -class HeaderCache: - """ - The ``HeaderCache`` is responsible for holding on to all headers during validation until - they are persisted in the database. This is necessary because validation in Clique depends - on looking up past headers which may not be persisted at the time when they are needed. - """ - - logger = get_extended_debug_logger('eth.consensus.clique.header_cache.HeaderCache') - - def __init__(self, chaindb: ChainDatabaseAPI) -> None: - self._chaindb = chaindb - self._cache: Dict[Hash32, BlockHeader] = {} - self._gc_threshold = 1000 - - def __getitem__(self, key: Hash32) -> BlockHeader: - return self._cache[key] - - def __setitem__(self, key: Hash32, value: BlockHeader) -> None: - self._cache[key] = value - - def __contains__(self, key: bytes) -> bool: - return key in self._cache - - def __delitem__(self, key: Hash32) -> None: - del self._cache[key] - - def __len__(self) -> int: - return len(self._cache) - - def evict(self) -> None: - """ - Evict all headers from the cache that have a block number lower than the oldest - block number that is considered needed. - """ - head = self._chaindb.get_canonical_head() - oldest_needed_header = head.block_number - self._gc_threshold - - for header in list(self._cache.values()): - if header.block_number < oldest_needed_header: - self._cache.pop(header.hash) - - self.logger.debug2("Finished cache cleanup. Cache length: %s", len(self)) diff --git a/eth/consensus/clique/snapshot_manager.py b/eth/consensus/clique/snapshot_manager.py index d8b46c166c..cc0904133c 100644 --- a/eth/consensus/clique/snapshot_manager.py +++ b/eth/consensus/clique/snapshot_manager.py @@ -1,18 +1,20 @@ import lru +from typing import Iterable from eth.abc import ( + BlockHeaderAPI, ChainDatabaseAPI, ) from eth.exceptions import ( HeaderNotFound, ) -from eth.rlp.headers import BlockHeader from eth_typing import ( Address, Hash32, ) from eth_utils import ( + encode_hex, get_extended_debug_logger, ValidationError, ) @@ -36,9 +38,6 @@ Vote, VoteAction, ) -from .header_cache import ( - HeaderCache, -) from ._utils import ( get_signers_at_checkpoint, @@ -62,24 +61,24 @@ class SnapshotManager: def __init__(self, chain_db: ChainDatabaseAPI, - header_cache: HeaderCache, epoch_length: int) -> None: - self._snapshots = lru.LRU(IN_MEMORY_SNAPSHOTS) - self._header_cache = header_cache self._chain_db = chain_db self._epoch_length = epoch_length + self._snapshots = lru.LRU(IN_MEMORY_SNAPSHOTS) - def _lookup_header(self, block_hash: Hash32) -> BlockHeader: - - if block_hash in self._header_cache: - return self._header_cache[block_hash] + def _lookup_header(self, + block_hash: Hash32, + parents: Iterable[BlockHeaderAPI]) -> BlockHeaderAPI: + for parent in parents: + if parent.hash == block_hash: + return parent try: return self._chain_db.get_block_header_by_hash(block_hash) except HeaderNotFound: - raise ValidationError("Unknown ancestor %s", block_hash) + raise ValidationError("Unknown ancestor %s", encode_hex(block_hash)) - def _create_snapshot_from_checkpoint_header(self, header: BlockHeader) -> Snapshot: + def _create_snapshot_from_checkpoint_header(self, header: BlockHeaderAPI) -> Snapshot: signers = get_signers_at_checkpoint(header) self.logger.debug2("Created snapshot from checkpoint at %s", header) @@ -91,7 +90,7 @@ def _create_snapshot_from_checkpoint_header(self, header: BlockHeader) -> Snapsh ) return self.add_snapshot(snapshot) - def apply(self, current_snapshot: Snapshot, header: BlockHeader) -> Snapshot: + def apply(self, current_snapshot: Snapshot, header: BlockHeaderAPI) -> Snapshot: """ Apply the given header on top of the current snapshot to create a new snapshot. """ @@ -148,7 +147,10 @@ def apply(self, current_snapshot: Snapshot, header: BlockHeader) -> Snapshot: return snapshot.get_immutable_clone() - def get_or_create_snapshot(self, block_number: int, block_hash: Hash32) -> Snapshot: + def get_or_create_snapshot(self, + block_number: int, + block_hash: Hash32, + parents: Iterable[BlockHeaderAPI] = ()) -> Snapshot: """ Return a snapshot either by creating or retrieving it or raise a ``ValidationError`` if the header does not have a known ancestor. @@ -156,11 +158,13 @@ def get_or_create_snapshot(self, block_number: int, block_hash: Hash32) -> Snaps try: snapshot = self.get_snapshot(block_number, block_hash) except SnapshotNotFound: - return self.create_snapshot_for(block_hash) + return self.create_snapshot_for(block_hash, parents) else: return snapshot - def create_snapshot_for(self, block_hash: Hash32) -> Snapshot: + def create_snapshot_for(self, + block_hash: Hash32, + cached_parents: Iterable[BlockHeaderAPI]) -> Snapshot: """ Create a ``Snapshot`` for the given ``block_hash``. This involves traversing backwards through the chain of headers to find a suitable base snapshot either @@ -168,7 +172,7 @@ def create_snapshot_for(self, block_hash: Hash32) -> Snapshot: After we've found a suitable base snapshot, apply all headers from after the base snapshot up to the header of ``block_hash`` to create the requested snapshot. """ - current_header = header = self._lookup_header(block_hash) + current_header = header = self._lookup_header(block_hash, cached_parents) if is_checkpoint(header.block_number, self._epoch_length): return self._create_snapshot_from_checkpoint_header(current_header) @@ -179,7 +183,7 @@ def create_snapshot_for(self, block_hash: Hash32) -> Snapshot: new_snapshot = self.get_snapshot( current_header.block_number, current_header.parent_hash) except SnapshotNotFound as e: - current_header = self._lookup_header(current_header.parent_hash) + current_header = self._lookup_header(current_header.parent_hash, cached_parents) if is_checkpoint(current_header.block_number, self._epoch_length): new_snapshot = self._create_snapshot_from_checkpoint_header(current_header) @@ -205,6 +209,7 @@ def get_snapshot(self, block_number: int, block_hash: Hash32) -> Snapshot: Return a ``Snapshot`` if it exists in memory, on-disk or can be computed directly from a header that serves as a checkpoint. Otherwise raise a ``SnapshotNotFound`` error. """ + # We first try to find the snapshot in memory if block_hash in self._snapshots: return self._snapshots[block_hash] diff --git a/eth/consensus/context.py b/eth/consensus/context.py new file mode 100644 index 0000000000..fe0d34ec47 --- /dev/null +++ b/eth/consensus/context.py @@ -0,0 +1,10 @@ +from eth.abc import ( + AtomicDatabaseAPI, + ConsensusContextAPI, +) + + +class ConsensusContext(ConsensusContextAPI): + + def __init__(self, db: AtomicDatabaseAPI): + self.db = db diff --git a/eth/consensus/noproof.py b/eth/consensus/noproof.py new file mode 100644 index 0000000000..06fb9e3870 --- /dev/null +++ b/eth/consensus/noproof.py @@ -0,0 +1,32 @@ +from typing import ( + Iterable, +) +from eth.abc import ( + AtomicDatabaseAPI, + BlockHeaderAPI, + ConsensusAPI, +) +from eth.typing import ( + Address, +) + + +class NoProofConsensus(ConsensusAPI): + """ + Modify a set of VMs to accept blocks without any validation. + """ + + def __init__(self, base_db: AtomicDatabaseAPI) -> None: + pass + + def validate_seal(self, header: BlockHeaderAPI) -> None: + pass + + def validate_seal_extension(self, + header: BlockHeaderAPI, + parents: Iterable[BlockHeaderAPI]) -> None: + pass + + @classmethod + def get_fee_recipient(cls, header: BlockHeaderAPI) -> Address: + return header.coinbase diff --git a/eth/consensus/pow.py b/eth/consensus/pow.py index a2436dd053..bf5228f6f0 100644 --- a/eth/consensus/pow.py +++ b/eth/consensus/pow.py @@ -1,10 +1,12 @@ from collections import OrderedDict from typing import ( - Tuple + Iterable, + Tuple, ) from eth_typing import ( - Hash32 + Address, + Hash32, ) from eth_utils import ( @@ -22,6 +24,11 @@ ) +from eth.abc import ( + AtomicDatabaseAPI, + BlockHeaderAPI, + ConsensusAPI, +) from eth.validation import ( validate_length, validate_lte, @@ -92,3 +99,33 @@ def mine_pow_nonce(block_number: int, mining_hash: Hash32, difficulty: int) -> T return nonce.to_bytes(8, 'big'), mining_output[b'mix digest'] raise Exception("Too many attempts at POW mining, giving up") + + +class PowConsensus(ConsensusAPI): + """ + Modify a set of VMs to validate blocks via Proof of Work (POW) + """ + + def __init__(self, base_db: AtomicDatabaseAPI) -> None: + pass + + def validate_seal(self, header: BlockHeaderAPI) -> None: + """ + Validate the seal on the given header by checking the proof of work. + """ + check_pow( + header.block_number, header.mining_hash, + header.mix_hash, header.nonce, header.difficulty) + + def validate_seal_extension(self, + header: BlockHeaderAPI, + parents: Iterable[BlockHeaderAPI]) -> None: + pass + + @classmethod + def get_fee_recipient(cls, header: BlockHeaderAPI) -> Address: + """ + Return the ``coinbase`` of the passed ``header`` as the receipient for any + rewards for the block. + """ + return header.coinbase diff --git a/eth/tools/builder/chain/builders.py b/eth/tools/builder/chain/builders.py index dd330c44a1..ee33adeeb8 100644 --- a/eth/tools/builder/chain/builders.py +++ b/eth/tools/builder/chain/builders.py @@ -33,11 +33,12 @@ from eth.abc import ( AtomicDatabaseAPI, BlockAPI, - BlockHeaderAPI, ChainAPI, MiningChainAPI, VirtualMachineAPI, ) +from eth.consensus.applier import ConsensusApplier +from eth.consensus.noproof import NoProofConsensus from eth.db.atomic import AtomicDB from eth.db.backends.memory import ( MemoryDB, @@ -289,33 +290,6 @@ def enable_pow_mining(chain_class: Type[ChainAPI]) -> Type[ChainAPI]: return chain_class.configure(vm_configuration=vm_configuration) -class NoChainSealValidationMixin: - @classmethod - def validate_seal(cls, block: BlockAPI) -> None: - pass - - -class NoVMSealValidationMixin: - @classmethod - def validate_seal(cls, header: BlockHeaderAPI) -> None: - pass - - -@to_tuple -def _mix_in_disable_seal_validation(vm_configuration: VMConfiguration) -> Iterable[VMFork]: - for fork_block, vm_class in vm_configuration: - if issubclass(vm_class, NoVMSealValidationMixin): - # Seal validation already disabled, hence nothing to change - vm_class_without_seal_validation = vm_class - else: - vm_class_without_seal_validation = type( - vm_class.__name__, - (NoVMSealValidationMixin, vm_class), - {}, - ) - yield fork_block, vm_class_without_seal_validation - - @curry def disable_pow_check(chain_class: Type[ChainAPI]) -> Type[ChainAPI]: """ @@ -327,23 +301,10 @@ def disable_pow_check(chain_class: Type[ChainAPI]) -> Type[ChainAPI]: blocks mined this way will not be importable on any chain that does not have proof of work disabled. """ - if not chain_class.vm_configuration: - raise ValidationError("Chain class has no vm_configuration") + original_vms = chain_class.vm_configuration + no_pow_vms = ConsensusApplier(NoProofConsensus).amend_vm_configuration(original_vms) - if issubclass(chain_class, NoChainSealValidationMixin): - # Seal validation already disabled, hence nothing to change - chain_class_without_seal_validation = chain_class - else: - chain_class_without_seal_validation = type( - chain_class.__name__, - (chain_class, NoChainSealValidationMixin), - {}, - ) - return chain_class_without_seal_validation.configure( # type: ignore - vm_configuration=_mix_in_disable_seal_validation( - chain_class_without_seal_validation.vm_configuration # type: ignore - ), - ) + return chain_class.configure(vm_configuration=no_pow_vms) # diff --git a/eth/vm/base.py b/eth/vm/base.py index 2e15b4bc1c..1e9266905c 100644 --- a/eth/vm/base.py +++ b/eth/vm/base.py @@ -8,13 +8,13 @@ Iterator, Optional, Sequence, + Set, Tuple, Type, Union, ) -from typing import Set - +from cached_property import cached_property from eth_hash.auto import keccak from eth_typing import ( Address, @@ -32,6 +32,8 @@ ChainContextAPI, ChainDatabaseAPI, ComputationAPI, + ConsensusAPI, + ConsensusContextAPI, ExecutionContextAPI, ReceiptAPI, SignedTransactionAPI, @@ -40,7 +42,7 @@ VirtualMachineAPI, ) from eth.consensus.pow import ( - check_pow, + PowConsensus, ) from eth.constants import ( GENESIS_PARENT_HASH, @@ -84,6 +86,7 @@ class VM(Configurable, VirtualMachineAPI): block_class: Type[BlockAPI] = None + consensus_class: Type[ConsensusAPI] = PowConsensus extra_data_max_bytes: ClassVar[int] = 32 fork: str = None # noqa: E701 # flake8 bug that's fixed in 3.6.0+ chaindb: ChainDatabaseAPI = None @@ -97,9 +100,11 @@ class VM(Configurable, VirtualMachineAPI): def __init__(self, header: BlockHeaderAPI, chaindb: ChainDatabaseAPI, - chain_context: ChainContextAPI) -> None: + chain_context: ChainContextAPI, + consensus_context: ConsensusContextAPI) -> None: self.chaindb = chaindb self.chain_context = chain_context + self.consensus_context = consensus_context self._initial_header = header def get_header(self) -> BlockHeaderAPI: @@ -133,6 +138,10 @@ def build_state(cls, execution_context = cls.create_execution_context(header, previous_hashes, chain_context) return cls.get_state_class()(db, execution_context, header.state_root) + @cached_property + def _consensus(self) -> ConsensusAPI: + return self.consensus_class(self.consensus_context) + # # Logging # @@ -158,12 +167,14 @@ def apply_transaction(self, return receipt, computation - @staticmethod - def create_execution_context(header: BlockHeaderAPI, + @classmethod + def create_execution_context(cls, + header: BlockHeaderAPI, prev_hashes: Iterable[Hash32], chain_context: ChainContextAPI) -> ExecutionContextAPI: + fee_recipient = cls.consensus_class.get_fee_recipient(header) return ExecutionContext( - coinbase=header.coinbase, + coinbase=fee_recipient, timestamp=header.timestamp, block_number=header.block_number, difficulty=header.difficulty, @@ -328,21 +339,28 @@ def _assign_block_rewards(self, block: BlockAPI) -> None: len(block.uncles) * self.get_nephew_reward() ) - self.state.delta_balance(block.header.coinbase, block_reward) - self.logger.debug( - "BLOCK REWARD: %s -> %s", - block_reward, - block.header.coinbase, - ) + if block_reward != 0: + self.state.delta_balance(block.header.coinbase, block_reward) + self.logger.debug( + "BLOCK REWARD: %s -> %s", + block_reward, + block.header.coinbase, + ) + else: + self.logger.debug("No block reward given to %s", block.header.coinbase) for uncle in block.uncles: uncle_reward = self.get_uncle_reward(block.number, uncle) - self.state.delta_balance(uncle.coinbase, uncle_reward) - self.logger.debug( - "UNCLE REWARD REWARD: %s -> %s", - uncle_reward, - uncle.coinbase, - ) + + if uncle_reward != 0: + self.state.delta_balance(uncle.coinbase, uncle_reward) + self.logger.debug( + "UNCLE REWARD REWARD: %s -> %s", + uncle_reward, + uncle.coinbase, + ) + else: + self.logger.debug("No uncle reward given to %s", uncle.coinbase) def finalize_block(self, block: BlockAPI) -> BlockAPI: if block.number > 0: @@ -530,8 +548,7 @@ def validate_block(self, block: BlockAPI) -> None: f" - header uncle_hash: {block.header.uncles_hash}" ) - @classmethod - def validate_header(cls, + def validate_header(self, header: BlockHeaderAPI, parent_header: BlockHeaderAPI, check_seal: bool = True) -> None: @@ -540,7 +557,7 @@ def validate_header(cls, raise ValidationError("Must have access to parent header to validate current header") else: validate_length_lte( - header.extra_data, cls.extra_data_max_bytes, title="BlockHeader.extra_data") + header.extra_data, self.extra_data_max_bytes, title="BlockHeader.extra_data") validate_gas_limit(header.gas_limit, parent_header.gas_limit) @@ -562,19 +579,21 @@ def validate_header(cls, if check_seal: try: - cls.validate_seal(header) + self.validate_seal(header) except ValidationError: - cls.cls_logger.warning( + self.cls_logger.warning( "Failed to validate header proof of work on header: %r", header.as_dict() ) raise - @classmethod - def validate_seal(cls, header: BlockHeaderAPI) -> None: - check_pow( - header.block_number, header.mining_hash, - header.mix_hash, header.nonce, header.difficulty) + def validate_seal(self, header: BlockHeaderAPI) -> None: + self._consensus.validate_seal(header) + + def validate_seal_extension(self, + header: BlockHeaderAPI, + parents: Iterable[BlockHeaderAPI]) -> None: + self._consensus.validate_seal_extension(header, parents) @classmethod def validate_uncle(cls, block: BlockAPI, uncle: BlockAPI, uncle_parent: BlockAPI) -> None: diff --git a/newsfragments/1899.feature.rst b/newsfragments/1899.feature.rst new file mode 100644 index 0000000000..dcc1929856 --- /dev/null +++ b/newsfragments/1899.feature.rst @@ -0,0 +1,14 @@ +Make handling of different consensus mechanisms more flexible and sound. + +1. ``validate_seal`` and ``validate_header`` are now instance methods. The only reason they can +be classmethods today is because our Pow implementation relies on a globally shared cache +which should be refactored to use the ``ConsensusContextAPI``. + +2. There a two new methods: ``chain.validate_chain_extension(header, parents)`` and +``vm.validate_seal_extension``. They perform extension seal checks to support consensus schemes +where headers can not be checked if parents are missing. + +3. The consensus mechanism is now abstracted via ``ConsensusAPI`` and ``ConsensusContextAPI``. +VMs instantiate a consensus api based on the set ``consensus_class`` and pass it a context which +they receive from the chain upon instantiation. The chain instantiates the consensus context api +based on the ``consensus_context_class``. diff --git a/tests/conftest.py b/tests/conftest.py index 6f07d70a9e..31c9da17a8 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -18,6 +18,7 @@ Chain, MiningChain, ) +from eth.consensus.noproof import NoProofConsensus from eth.db.atomic import AtomicDB from eth.rlp.headers import BlockHeader from eth.vm.forks import ( @@ -146,7 +147,7 @@ def _chain_with_block_validation(VM, base_db, genesis_state, chain_cls=Chain): klass = chain_cls.configure( __name__='TestChain', vm_configuration=( - (constants.GENESIS_BLOCK_NUMBER, VM), + (constants.GENESIS_BLOCK_NUMBER, VM.configure(consensus_class=NoProofConsensus)), ), chain_id=1337, ) @@ -207,12 +208,11 @@ def _chain_without_block_validation(request, VM, base_db, genesis_state): 'import_block': import_block_without_validation, 'validate_block': lambda self, block: None, } - VMForTesting = VM.configure(validate_seal=lambda block: None) chain_class = request.param klass = chain_class.configure( __name__='TestChainWithoutBlockValidation', vm_configuration=( - (constants.GENESIS_BLOCK_NUMBER, VMForTesting), + (constants.GENESIS_BLOCK_NUMBER, VM.configure(consensus_class=NoProofConsensus)), ), chain_id=1337, **overrides, diff --git a/tests/core/builder-tools/test_chain_builder.py b/tests/core/builder-tools/test_chain_builder.py index 6f1e6e8b4c..7596425853 100644 --- a/tests/core/builder-tools/test_chain_builder.py +++ b/tests/core/builder-tools/test_chain_builder.py @@ -2,11 +2,6 @@ from eth_utils import ValidationError -from eth.chains import ( - MainnetChain, - MainnetTesterChain, - RopstenChain, -) from eth.chains.base import ( Chain, MiningChain, @@ -24,9 +19,6 @@ mine_block, mine_blocks, ) -from eth.tools.builder.chain.builders import ( - NoChainSealValidationMixin, -) MINING_CHAIN_PARAMS = ( @@ -237,18 +229,3 @@ def test_chain_builder_chain_split(mining_chain): head_b = chain_b.get_canonical_head() assert head_b.block_number == 3 - - -@pytest.mark.parametrize( - "chain", - ( - MainnetChain, - MainnetTesterChain, - RopstenChain, - ) -) -def test_disabling_pow_for_already_pow_disabled_chain(chain): - pow_disabled_chain = disable_pow_check(chain) - assert issubclass(pow_disabled_chain, NoChainSealValidationMixin) - again_pow_disabled_chain = disable_pow_check(pow_disabled_chain) - assert issubclass(again_pow_disabled_chain, NoChainSealValidationMixin) diff --git a/tests/core/consensus/test_clique_consensus.py b/tests/core/consensus/test_clique_consensus.py index 758a632388..a5ea22fc19 100644 --- a/tests/core/consensus/test_clique_consensus.py +++ b/tests/core/consensus/test_clique_consensus.py @@ -14,7 +14,9 @@ GOERLI_GENESIS_HEADER, ) from eth.consensus.clique import ( + CliqueApplier, CliqueConsensus, + CliqueConsensusContext, NONCE_AUTH, NONCE_DROP, VoteAction, @@ -24,6 +26,7 @@ SIGNATURE_LENGTH, ) from eth.consensus.clique._utils import ( + get_block_signer, sign_block_header, ) from eth.constants import ( @@ -32,6 +35,7 @@ from eth.rlp.headers import BlockHeader from eth.tools.factories.keys import PublicKeyFactory from eth.tools.factories.transaction import new_transaction +from eth.vm.forks.istanbul import IstanbulVM from eth.vm.forks.petersburg import PetersburgVM @@ -178,53 +182,60 @@ def alice_nominates_bob_and_ron_then_they_kick_her(): def validate_seal_and_get_snapshot(clique, header): - clique.validate_seal(header) + clique.validate_seal_extension(header, ()) return clique.get_snapshot(header) @pytest.fixture -def clique(paragon_chain_with_clique): - _, clique = paragon_chain_with_clique - return clique +def paragon_chain(base_db): + vms = ( + (0, PetersburgVM,), + (2, IstanbulVM,) + ) + clique_vms = CliqueApplier().amend_vm_configuration(vms) -@pytest.fixture -def paragon_chain(paragon_chain_with_clique): - chain, _ = paragon_chain_with_clique + chain = MiningChain.configure( + vm_configuration=clique_vms, + consensus_context_class=CliqueConsensusContext, + chain_id=5, + ).from_genesis(base_db, PARAGON_GENESIS_PARAMS, PARAGON_GENESIS_STATE) return chain -@pytest.fixture -def paragon_chain_with_clique(base_db): - - vms = ((0, PetersburgVM,),) - - clique = CliqueConsensus(base_db) - - vms = clique.amend_vm_configuration(vms) +def get_clique(chain, header=None): + if header: + vm = chain.get_vm(header) + else: + vm = chain.get_vm() - chain = MiningChain.configure( - vm_configuration=vms, - chain_id=5, - ).from_genesis(base_db, PARAGON_GENESIS_PARAMS, PARAGON_GENESIS_STATE) - return chain, clique + clique = vm._consensus + assert isinstance(clique, CliqueConsensus) + return clique -def test_can_retrieve_root_snapshot(paragon_chain, clique): +def test_can_retrieve_root_snapshot(paragon_chain): head = paragon_chain.get_canonical_head() - snapshot = clique.get_snapshot(head) + snapshot = get_clique(paragon_chain, head).get_snapshot(head) assert snapshot.get_sorted_signers() == [ALICE] -def test_raises_unknown_ancestor_error(paragon_chain, clique): +def test_raises_unknown_ancestor_error(paragon_chain): head = paragon_chain.get_canonical_head() next_header = make_next_header(head, ALICE_PK, RON, NONCE_AUTH) + clique = get_clique(paragon_chain, head) with pytest.raises(ValidationError, match='Unknown ancestor'): clique.get_snapshot(next_header) -def test_import_block(paragon_chain, clique): +def test_validate_chain_works_across_forks(paragon_chain): + voting_chain = alice_nominates_bob_and_ron_then_they_kick_her() + + paragon_chain.validate_chain_extension((PARAGON_GENESIS_HEADER,) + voting_chain) + + +def test_import_block(paragon_chain): vm = paragon_chain.get_vm() tx = new_transaction(vm, ALICE, BOB, 10, ALICE_PK) @@ -247,6 +258,8 @@ def test_import_block(paragon_chain, clique): transactions=[tx] ) + assert get_block_signer(block.header) == ALICE + paragon_chain.import_block(block) # Alice new balance is old balance - 10 + 21000 tx fee (she's the signer) @@ -256,20 +269,22 @@ def test_import_block(paragon_chain, clique): assert paragon_chain.get_vm().state.get_balance(vm.get_block().header.coinbase) == 0 -def test_reapplies_headers_without_snapshots(clique): +def test_reapplies_headers_without_snapshots(paragon_chain): voting_chain = alice_nominates_bob_and_ron_then_they_kick_her() # We save the headers but we do not create intermediate snapshots # to proof that the SnapshotManager re-applies all needed headers # on its own. for i in range(5): - clique._chain_db.persist_header(voting_chain[i]) + paragon_chain.chaindb.persist_header(voting_chain[i]) + clique = get_clique(paragon_chain) snapshot = validate_seal_and_get_snapshot(clique, voting_chain[5]) assert snapshot.signers == {BOB, RON} -def test_can_persist_and_restore_snapshot_from_db(clique): +def test_can_persist_and_restore_snapshot_from_db(paragon_chain): + clique = get_clique(paragon_chain) snapshot = validate_seal_and_get_snapshot(clique, PARAGON_GENESIS_HEADER) clique._snapshot_manager.persist_snapshot(snapshot) @@ -277,8 +292,9 @@ def test_can_persist_and_restore_snapshot_from_db(clique): assert snapshot == revived -def test_revert_previous_nominate(paragon_chain, clique): +def test_revert_previous_nominate(paragon_chain): head = paragon_chain.get_canonical_head() + clique = get_clique(paragon_chain) snapshot = validate_seal_and_get_snapshot(clique, head) assert len(snapshot.tallies) == 0 alice_votes_bob = make_next_header(head, ALICE_PK, coinbase=BOB, nonce=NONCE_AUTH) @@ -298,8 +314,9 @@ def test_revert_previous_nominate(paragon_chain, clique): assert RON not in snapshot.tallies -def test_revert_previous_kick(paragon_chain, clique): +def test_revert_previous_kick(paragon_chain): head = paragon_chain.get_canonical_head() + clique = get_clique(paragon_chain) snapshot = validate_seal_and_get_snapshot(clique, head) assert len(snapshot.tallies) == 0 alice_votes_bob = make_next_header(head, ALICE_PK, coinbase=BOB, nonce=NONCE_AUTH) @@ -320,8 +337,9 @@ def test_revert_previous_kick(paragon_chain, clique): assert BOB not in snapshot.tallies -def test_does_not_count_multiple_kicks(paragon_chain, clique): +def test_does_not_count_multiple_kicks(paragon_chain): head = paragon_chain.get_canonical_head() + clique = get_clique(paragon_chain) snapshot = validate_seal_and_get_snapshot(clique, head) assert len(snapshot.tallies) == 0 alice_votes_bob = make_next_header(head, ALICE_PK, coinbase=BOB, nonce=NONCE_AUTH) @@ -340,8 +358,9 @@ def test_does_not_count_multiple_kicks(paragon_chain, clique): assert snapshot.tallies[BOB].votes == 1 -def test_does_not_count_multiple_nominates(paragon_chain, clique): +def test_does_not_count_multiple_nominates(paragon_chain): head = paragon_chain.get_canonical_head() + clique = get_clique(paragon_chain) snapshot = validate_seal_and_get_snapshot(clique, head) assert len(snapshot.tallies) == 0 alice_votes_bob = make_next_header(head, ALICE_PK, coinbase=BOB, nonce=NONCE_AUTH) @@ -360,7 +379,8 @@ def test_does_not_count_multiple_nominates(paragon_chain, clique): assert snapshot.tallies[RON].votes == 1 -def test_alice_votes_in_bob_and_ron_then_gets_kicked(clique): +def test_alice_votes_in_bob_and_ron_then_gets_kicked(paragon_chain): + clique = get_clique(paragon_chain) voting_chain = alice_nominates_bob_and_ron_then_they_kick_her() @@ -389,7 +409,8 @@ def test_alice_votes_in_bob_and_ron_then_gets_kicked(clique): assert ALICE not in snapshot.tallies -def test_removes_all_pending_votes_after_nomination(clique): +def test_removes_all_pending_votes_after_nomination(paragon_chain): + clique = get_clique(paragon_chain) voting_chain = alice_nominates_bob_and_ron_then_they_kick_her() @@ -414,7 +435,8 @@ def test_removes_all_pending_votes_after_nomination(clique): assert not has_vote_from(ALICE, snapshot.votes) -def test_removes_all_pending_votes_after_kick(clique): +def test_removes_all_pending_votes_after_kick(paragon_chain): + clique = get_clique(paragon_chain) ALICE_FRIEND = PublicKeyFactory().to_canonical_address() diff --git a/tests/core/consensus/test_consensus_engine.py b/tests/core/consensus/test_consensus_engine.py new file mode 100644 index 0000000000..8980066079 --- /dev/null +++ b/tests/core/consensus/test_consensus_engine.py @@ -0,0 +1,91 @@ +import pytest + +from eth.abc import ( + ConsensusAPI, +) +from eth.consensus import ( + ConsensusContext, +) + +from eth.chains.base import MiningChain +from eth.tools.builder.chain import ( + genesis, +) +from eth.vm.forks.istanbul import IstanbulVM + +from eth_utils import ( + ValidationError, +) + + +CONSENSUS_DATA_LENGH = 9 + +WHITELISTED_ROOT = b"root" + +ZERO_BYTE = b'\x00' + + +class WhitelistConsensus(ConsensusAPI): + """ + A pseudo consensus engine for testing. Each accepted block puts another block on a whitelist. + """ + + def __init__(self, context: ConsensusContext) -> None: + self.base_db = context.db + + def _get_consensus_data(self, consensus_data): + if len(consensus_data) != CONSENSUS_DATA_LENGH: + raise ValidationError( + f"The `extra_data` field must be of length {CONSENSUS_DATA_LENGH}" + f"but was {len(consensus_data)}" + ) + + return consensus_data[:4], consensus_data[5:] + + def validate_seal(self, header): + + current, following = self._get_consensus_data(header.extra_data) + + if current == WHITELISTED_ROOT or current in self.base_db: + self.base_db[following] = ZERO_BYTE + else: + raise ValidationError(f"Block isn't on whitelist: {current}") + + def validate_seal_extension(self, header, parents): + pass + + @classmethod + def get_fee_recipient(cls, header): + return header.coinbase + + +def test_stateful_consensus_isnt_shared_across_chain_instances(): + + class ChainClass(MiningChain): + vm_configuration = ( + (0, IstanbulVM.configure(consensus_class=WhitelistConsensus)), + ) + + chain = genesis(ChainClass) + + chain.mine_block(extra_data=b"root-1000") + chain.mine_block(extra_data=b"1000-1001") + # we could even mine the same block twice + chain.mine_block(extra_data=b"1000-1001") + + # But we can not jump ahead + with pytest.raises(ValidationError, match="Block isn't on whitelist"): + chain.mine_block(extra_data=b"1002-1003") + + # A different chain but same consensus engine class + second_chain = genesis(ChainClass) + + # Should maintain its independent whitelist + with pytest.raises(ValidationError, match="Block isn't on whitelist"): + second_chain.mine_block(extra_data=b"1000-1001") + + second_chain.mine_block(extra_data=b"root-2000") + + # And the second chain's whitelist should also not interfere with the first one's + with pytest.raises(ValidationError, match="Block isn't on whitelist"): + chain.mine_block(extra_data=b"2000-2001") diff --git a/tests/core/opcodes/test_opcodes.py b/tests/core/opcodes/test_opcodes.py index 93087008f9..267954a7a5 100644 --- a/tests/core/opcodes/test_opcodes.py +++ b/tests/core/opcodes/test_opcodes.py @@ -15,6 +15,7 @@ from eth._utils.address import ( force_bytes_to_address, ) +from eth.consensus import ConsensusContext from eth.db.atomic import ( AtomicDB ) @@ -98,7 +99,8 @@ def setup_computation( origin=CANONICAL_ADDRESS_B, ) - vm = vm_class(GENESIS_HEADER, ChainDB(AtomicDB()), chain_context) + db = AtomicDB() + vm = vm_class(GENESIS_HEADER, ChainDB(db), chain_context, ConsensusContext(db)) computation = vm_class._state_class.computation_class( state=vm.state, diff --git a/tests/core/vm/test_clique_validation.py b/tests/core/vm/test_clique_validation.py index e7c52c5f39..e989e8da89 100644 --- a/tests/core/vm/test_clique_validation.py +++ b/tests/core/vm/test_clique_validation.py @@ -2,12 +2,17 @@ from eth_utils import ( decode_hex, + ValidationError, ) +from eth.chains.base import MiningChain from eth.chains.goerli import ( GOERLI_GENESIS_HEADER, ) -from eth.consensus.clique import CliqueConsensus +from eth.consensus.clique import ( + CliqueApplier, + CliqueConsensusContext, +) from eth.rlp.headers import BlockHeader @@ -34,23 +39,52 @@ nonce=decode_hex('0x0000000000000000'), ) +GOERLI_HEADER_TWO = BlockHeader( + difficulty=2, + block_number=2, + gas_limit=10465292, + timestamp=1548947468, + coinbase=decode_hex('0x0000000000000000000000000000000000000000'), + parent_hash=decode_hex('0x8f5bab218b6bb34476f51ca588e9f4553a3a7ce5e13a66c660a5283e97e9a85a'), + uncles_hash=decode_hex('0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347'), + state_root=decode_hex('0x5d6cded585e73c4e322c30c2f782a336316f17dd85a4863b9d838d2d4b8b3008'), + transaction_root=decode_hex('0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421'), # noqa: E501 + receipt_root=decode_hex('0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421'), + bloom=0, + gas_used=0, + extra_data=decode_hex('0x506172697479205465636820417574686f726974790000000000000000000000fdd66d441eff7d4116fe987f0f10812fc68b06cc500ff71c492234b9a7b8b2f45597190d97cd85f6daa45ac9518bef9f715f4bd414504b1a21d8c681654055df00'), # noqa: E501 + mix_hash=decode_hex('0x0000000000000000000000000000000000000000000000000000000000000000'), + nonce=decode_hex('0x0000000000000000'), +) + @pytest.fixture -def clique(base_db): - clique = CliqueConsensus(base_db) - clique._chain_db.persist_header(GOERLI_GENESIS_HEADER) - return clique +def goerli_chain(base_db): + + vms = ( + (0, PetersburgVM,), + ) + clique_vms = CliqueApplier().amend_vm_configuration(vms) + + chain = MiningChain.configure( + vm_configuration=clique_vms, + consensus_context_class=CliqueConsensusContext, + chain_id=5, + ).from_genesis_header(base_db, GOERLI_GENESIS_HEADER) + return chain @pytest.mark.parametrize( - 'VM, header, previous_header, valid', + 'headers, valid', ( - (PetersburgVM, GOERLI_HEADER_ONE, GOERLI_GENESIS_HEADER, True), + ((GOERLI_GENESIS_HEADER, GOERLI_HEADER_ONE), True), + ((GOERLI_GENESIS_HEADER, GOERLI_HEADER_TWO), False), ), ) -def test_can_validate_header(clique, VM, header, previous_header, valid): - CliqueVM = VM.configure( - extra_data_max_bytes=128, - validate_seal=lambda header: clique.validate_seal(header), - ) - CliqueVM.validate_header(header, previous_header, check_seal=True) +def test_can_validate_header(goerli_chain, headers, valid): + + if valid: + goerli_chain.validate_chain_extension(headers) + else: + with pytest.raises(ValidationError): + goerli_chain.validate_chain_extension(headers) diff --git a/tests/core/vm/test_mainnet_dao_fork.py b/tests/core/vm/test_mainnet_dao_fork.py index 335b05aeb9..d693a71fd0 100644 --- a/tests/core/vm/test_mainnet_dao_fork.py +++ b/tests/core/vm/test_mainnet_dao_fork.py @@ -10,7 +10,9 @@ from eth.chains.mainnet import ( MainnetHomesteadVM, ) +from eth.consensus import ConsensusContext from eth.rlp.headers import BlockHeader +from eth.vm.chain_context import ChainContext class ETC_VM(MainnetHomesteadVM): @@ -267,6 +269,11 @@ def header_pairs(VM, headers, valid): yield VM, pair[1], pair[0], valid +class FakeChainDB: + def __init__(self, db): + self.db = db + + @pytest.mark.parametrize( 'VM, header, previous_header, valid', header_pairs(MainnetHomesteadVM, ETH_HEADERS_NEAR_FORK, valid=True) + ( @@ -280,11 +287,19 @@ def header_pairs(VM, headers, valid): ), ) def test_mainnet_dao_fork_header_validation(VM, header, previous_header, valid): + chain_db = FakeChainDB({}) + consensus_context = ConsensusContext(chain_db.db) + vm = VM( + header=previous_header, + chaindb=chain_db, + chain_context=ChainContext(1), + consensus_context=consensus_context + ) if valid: - VM.validate_header(header, previous_header, check_seal=True) + vm.validate_header(header, previous_header, check_seal=True) else: try: - VM.validate_header(header, previous_header, check_seal=True) + vm.validate_header(header, previous_header, check_seal=True) except ValidationError: pass else: diff --git a/tests/json-fixtures/test_virtual_machine.py b/tests/json-fixtures/test_virtual_machine.py index 779a2e9b45..da095abc06 100644 --- a/tests/json-fixtures/test_virtual_machine.py +++ b/tests/json-fixtures/test_virtual_machine.py @@ -6,6 +6,7 @@ to_bytes, ) +from eth.consensus import ConsensusContext from eth.db import ( get_db_backend, ) @@ -180,7 +181,9 @@ def fixture_to_bytecode_computation(fixture, code, vm): ), ) def test_vm_fixtures(fixture, vm_class, computation_getter): - chaindb = ChainDB(get_db_backend()) + db = get_db_backend() + chaindb = ChainDB(db) + consensus_context = ConsensusContext(db) header = BlockHeader( coinbase=fixture['env']['currentCoinbase'], difficulty=fixture['env']['currentDifficulty'], @@ -194,7 +197,12 @@ def test_vm_fixtures(fixture, vm_class, computation_getter): # For now, just hard-code it to something not used in practice: chain_context = ChainContext(chain_id=0) - vm = vm_class(header=header, chaindb=chaindb, chain_context=chain_context) + vm = vm_class( + header=header, + chaindb=chaindb, + chain_context=chain_context, + consensus_context=consensus_context + ) state = vm.state setup_state(fixture['pre'], state) code = state.get_code(fixture['exec']['address'])