From 2a6d4d06406e29ca852eee5203901d6650e44ab7 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Tue, 23 Jan 2024 00:03:49 -0300 Subject: [PATCH] refactor(verification): externalize verification dependencies [part 1/2] --- hathor/builder/builder.py | 7 +- hathor/builder/cli_builder.py | 6 +- hathor/cli/mining.py | 2 +- hathor/transaction/resources/create_tx.py | 8 +- .../storage/simple_memory_storage.py | 21 ++-- hathor/verification/block_verifier.py | 23 ++--- hathor/verification/transaction_verifier.py | 11 +-- .../verification/verification_dependencies.py | 68 +++++++++++++ hathor/verification/verification_service.py | 98 +++++++++++++------ hathor/verification/vertex_verifier.py | 11 +-- tests/tx/test_block.py | 7 +- tests/tx/test_genesis.py | 2 +- tests/tx/test_tx.py | 52 +++++++--- tests/tx/test_tx_deserialization.py | 2 +- 14 files changed, 228 insertions(+), 90 deletions(-) create mode 100644 hathor/verification/verification_dependencies.py diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py index 6f2fa58e0..f77dd3b08 100644 --- a/hathor/builder/builder.py +++ b/hathor/builder/builder.py @@ -473,8 +473,13 @@ def _get_or_create_bit_signaling_service(self) -> BitSignalingService: def _get_or_create_verification_service(self) -> VerificationService: if self._verification_service is None: verifiers = self._get_or_create_vertex_verifiers() + daa = self._get_or_create_daa() feature_service = self._get_or_create_feature_service() - self._verification_service = VerificationService(verifiers=verifiers, feature_service=feature_service) + self._verification_service = VerificationService( + verifiers=verifiers, + daa=daa, + feature_service=feature_service + ) return self._verification_service diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index 0138c24bb..d3d59683f 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -223,7 +223,11 @@ def create_manager(self, reactor: Reactor) -> HathorManager: daa = DifficultyAdjustmentAlgorithm(settings=settings, test_mode=test_mode) vertex_verifiers = VertexVerifiers.create_defaults(settings=settings, daa=daa) - verification_service = VerificationService(verifiers=vertex_verifiers, feature_service=self.feature_service) + verification_service = VerificationService( + verifiers=vertex_verifiers, + daa=daa, + feature_service=self.feature_service + ) cpu_mining_service = CpuMiningService() diff --git a/hathor/cli/mining.py b/hathor/cli/mining.py index 35a131640..597593825 100644 --- a/hathor/cli/mining.py +++ b/hathor/cli/mining.py @@ -141,7 +141,7 @@ def execute(args: Namespace) -> None: settings = get_global_settings() daa = DifficultyAdjustmentAlgorithm(settings=settings) verifiers = VertexVerifiers.create_defaults(settings=settings, daa=daa) - verification_service = VerificationService(verifiers=verifiers) + verification_service = VerificationService(verifiers=verifiers, daa=daa) verification_service.verify_without_storage(block) except HathorError: print('[{}] ERROR: Block has not been pushed because it is not valid.'.format(datetime.datetime.now())) diff --git a/hathor/transaction/resources/create_tx.py b/hathor/transaction/resources/create_tx.py index 897bd0ead..9db505391 100644 --- a/hathor/transaction/resources/create_tx.py +++ b/hathor/transaction/resources/create_tx.py @@ -22,6 +22,7 @@ from hathor.transaction import Transaction, TxInput, TxOutput from hathor.transaction.scripts import create_output_script from hathor.util import api_catch_exceptions, json_dumpb, json_loadb +from hathor.verification.verification_dependencies import TransactionDependencies def from_raw_output(raw_output: dict, tokens: list[bytes]) -> TxOutput: @@ -109,15 +110,16 @@ def _verify_unsigned_skip_pow(self, tx: Transaction) -> None: """ Same as .verify but skipping pow and signature verification.""" assert type(tx) is Transaction verifiers = self.manager.verification_service.verifiers + deps = TransactionDependencies.create(tx) verifiers.tx.verify_number_of_inputs(tx) verifiers.vertex.verify_number_of_outputs(tx) verifiers.vertex.verify_outputs(tx) verifiers.tx.verify_output_token_indexes(tx) verifiers.vertex.verify_sigops_output(tx) - verifiers.tx.verify_sigops_input(tx) + verifiers.tx.verify_sigops_input(tx, deps) # need to run verify_inputs first to check if all inputs exist - verifiers.tx.verify_inputs(tx, skip_script=True) - verifiers.vertex.verify_parents(tx) + verifiers.tx.verify_inputs(tx, deps, skip_script=True) + verifiers.vertex.verify_parents(tx, deps) verifiers.tx.verify_sum(tx.get_complete_token_info()) diff --git a/hathor/transaction/storage/simple_memory_storage.py b/hathor/transaction/storage/simple_memory_storage.py index 6e521f052..3baa6deef 100644 --- a/hathor/transaction/storage/simple_memory_storage.py +++ b/hathor/transaction/storage/simple_memory_storage.py @@ -17,6 +17,7 @@ from hathor.transaction.storage import TransactionStorage from hathor.transaction.storage.exceptions import TransactionDoesNotExist from hathor.types import VertexId +from hathor.util import not_none class SimpleMemoryStorage: @@ -47,6 +48,10 @@ def get_transaction(self, tx_id: VertexId) -> Transaction: assert isinstance(tx, Transaction) return tx + def get_vertex(self, vertex_id: VertexId) -> BaseTransaction: + """Return a vertex from the storage, raise if it's not found.""" + return self._get_vertex(self._vertices, vertex_id) + @staticmethod def _get_vertex(storage: dict[VertexId, BaseTransaction], vertex_id: VertexId) -> BaseTransaction: """Return a vertex from a storage, throw if it's not found.""" @@ -71,13 +76,19 @@ def add_vertices_from_storage(self, storage: TransactionStorage, ids: list[Verte def add_vertex_from_storage(self, storage: TransactionStorage, vertex_id: VertexId) -> None: """ - Add a vertex to this storage. It automatically fetches data from the provided TransactionStorage and a list - of ids. + Add a vertex to this storage. It automatically fetches data from the provided TransactionStorage and vertex_id. """ + vertex = storage.get_transaction(vertex_id) + + self.add_vertex(vertex) + + def add_vertex(self, vertex: BaseTransaction) -> None: + """Add a vertex to this storage.""" + vertex_id = not_none(vertex.hash) + if vertex_id in self._vertices: return - vertex = storage.get_transaction(vertex_id) clone = vertex.clone(include_metadata=True, include_storage=False) if isinstance(vertex, Block): @@ -90,10 +101,6 @@ def add_vertex_from_storage(self, storage: TransactionStorage, vertex_id: Vertex raise NotImplementedError - def get_vertex(self, vertex_id: VertexId) -> BaseTransaction: - # TODO: Currently unused, will be implemented in a next PR. - raise NotImplementedError - def get_best_block_tips(self) -> list[VertexId]: # TODO: Currently unused, will be implemented in a next PR. raise NotImplementedError diff --git a/hathor/verification/block_verifier.py b/hathor/verification/block_verifier.py index 2fd0f33bf..cd8ec423d 100644 --- a/hathor/verification/block_verifier.py +++ b/hathor/verification/block_verifier.py @@ -16,7 +16,7 @@ from hathor.conf.settings import HathorSettings from hathor.daa import DifficultyAdjustmentAlgorithm -from hathor.feature_activation.feature_service import BlockIsMissingSignal, BlockIsSignaling, BlockSignalingState +from hathor.feature_activation.feature_service import BlockIsMissingSignal, BlockIsSignaling from hathor.transaction import Block from hathor.transaction.exceptions import ( BlockMustSignalError, @@ -27,8 +27,7 @@ TransactionDataError, WeightError, ) -from hathor.transaction.storage.simple_memory_storage import SimpleMemoryStorage -from hathor.util import not_none +from hathor.verification.verification_dependencies import BlockDependencies class BlockVerifier: @@ -51,20 +50,16 @@ def verify_height(self, block: Block) -> None: if meta.height < meta.min_height: raise RewardLocked(f'Block needs {meta.min_height} height but has {meta.height}') - def verify_weight(self, block: Block) -> None: + def verify_weight(self, block: Block, block_deps: BlockDependencies) -> None: """Validate minimum block difficulty.""" - memory_storage = SimpleMemoryStorage() - dependencies = self._daa.get_block_dependencies(block) - memory_storage.add_vertices_from_storage(not_none(block.storage), dependencies) - - min_block_weight = self._daa.calculate_block_difficulty(block, memory_storage) + min_block_weight = self._daa.calculate_block_difficulty(block, block_deps.storage) if block.weight < min_block_weight - self._settings.WEIGHT_TOL: raise WeightError(f'Invalid new block {block.hash_hex}: weight ({block.weight}) is ' f'smaller than the minimum weight ({min_block_weight})') - def verify_reward(self, block: Block) -> None: + def verify_reward(self, block: Block, block_deps: BlockDependencies) -> None: """Validate reward amount.""" - parent_block = block.get_block_parent() + parent_block = block_deps.storage.get_parent_block(block) tokens_issued_per_block = self._daa.get_tokens_issued_per_block(parent_block.get_height() + 1) if block.sum_outputs != tokens_issued_per_block: raise InvalidBlockReward( @@ -86,9 +81,9 @@ def verify_data(self, block: Block) -> None: if len(block.data) > self._settings.BLOCK_DATA_MAX_SIZE: raise TransactionDataError('block data has {} bytes'.format(len(block.data))) - def verify_mandatory_signaling(self, signaling_state: BlockSignalingState) -> None: + def verify_mandatory_signaling(self, block_deps: BlockDependencies) -> None: """Verify whether this block is missing mandatory signaling for any feature.""" - match signaling_state: + match block_deps.signaling_state: case BlockIsSignaling(): return case BlockIsMissingSignal(feature): @@ -96,4 +91,4 @@ def verify_mandatory_signaling(self, signaling_state: BlockSignalingState) -> No f"Block must signal support for feature '{feature.value}' during MUST_SIGNAL phase." ) case _: - assert_never(signaling_state) + assert_never(block_deps.signaling_state) diff --git a/hathor/verification/transaction_verifier.py b/hathor/verification/transaction_verifier.py index 2d86883c2..68ce3a699 100644 --- a/hathor/verification/transaction_verifier.py +++ b/hathor/verification/transaction_verifier.py @@ -38,6 +38,7 @@ from hathor.transaction.util import get_deposit_amount, get_withdraw_amount from hathor.types import TokenUid, VertexId from hathor.util import not_none +from hathor.verification.verification_dependencies import TransactionDependencies cpu = get_cpu_profiler() @@ -51,8 +52,6 @@ def __init__(self, *, settings: HathorSettings, daa: DifficultyAdjustmentAlgorit def verify_parents_basic(self, tx: Transaction) -> None: """Verify number and non-duplicity of parents.""" - assert tx.storage is not None - # check if parents are duplicated parents_set = set(tx.parents) if len(tx.parents) > len(parents_set): @@ -72,7 +71,7 @@ def verify_weight(self, tx: Transaction) -> None: raise WeightError(f'Invalid new tx {tx.hash_hex}: weight ({tx.weight}) is ' f'greater than the maximum allowed ({max_tx_weight})') - def verify_sigops_input(self, tx: Transaction) -> None: + def verify_sigops_input(self, tx: Transaction, tx_deps: TransactionDependencies) -> None: """ Count sig operations on all inputs and verify that the total sum is below the limit """ from hathor.transaction.scripts import get_sigops_count @@ -80,7 +79,7 @@ def verify_sigops_input(self, tx: Transaction) -> None: n_txops = 0 for tx_input in tx.inputs: try: - spent_tx = tx.get_spent_tx(tx_input) + spent_tx = tx_deps.storage.get_vertex(tx_input.tx_id) except TransactionDoesNotExist: raise InexistentInput('Input tx does not exist: {}'.format(tx_input.tx_id.hex())) assert spent_tx.hash is not None @@ -93,7 +92,7 @@ def verify_sigops_input(self, tx: Transaction) -> None: raise TooManySigOps( 'TX[{}]: Max number of sigops for inputs exceeded ({})'.format(tx.hash_hex, n_txops)) - def verify_inputs(self, tx: Transaction, *, skip_script: bool = False) -> None: + def verify_inputs(self, tx: Transaction, tx_deps: TransactionDependencies, *, skip_script: bool = False) -> None: """Verify inputs signatures and ownership and all inputs actually exist""" from hathor.transaction.storage.exceptions import TransactionDoesNotExist @@ -105,7 +104,7 @@ def verify_inputs(self, tx: Transaction, *, skip_script: bool = False) -> None: )) try: - spent_tx = tx.get_spent_tx(input_tx) + spent_tx = tx_deps.storage.get_vertex(input_tx.tx_id) assert spent_tx.hash is not None if input_tx.index >= len(spent_tx.outputs): raise InexistentInput('Output spent by this input does not exist: {} index {}'.format( diff --git a/hathor/verification/verification_dependencies.py b/hathor/verification/verification_dependencies.py new file mode 100644 index 000000000..ac399c4a8 --- /dev/null +++ b/hathor/verification/verification_dependencies.py @@ -0,0 +1,68 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dataclasses import dataclass + +from typing_extensions import Self + +from hathor.daa import DifficultyAdjustmentAlgorithm +from hathor.feature_activation.feature_service import BlockSignalingState, FeatureService +from hathor.transaction import Block +from hathor.transaction.storage.simple_memory_storage import SimpleMemoryStorage +from hathor.transaction.transaction import Transaction + + +@dataclass(frozen=True, slots=True) +class VertexDependencies: + """A dataclass of dependencies necessary for vertex verification.""" + storage: SimpleMemoryStorage + + +@dataclass(frozen=True, slots=True) +class BlockDependencies(VertexDependencies): + """A dataclass of dependencies necessary for block verification.""" + signaling_state: BlockSignalingState + + @classmethod + def create(cls, block: Block, daa: DifficultyAdjustmentAlgorithm, feature_service: FeatureService) -> Self: + """Create a block dependencies instance.""" + assert block.storage is not None + signaling_state = feature_service.is_signaling_mandatory_features(block) + simple_storage = SimpleMemoryStorage() + daa_deps = daa.get_block_dependencies(block) + deps = block.parents + daa_deps + + simple_storage.add_vertices_from_storage(block.storage, deps) + simple_storage.add_vertex(block) # we add the block itself so its metadata can be used as a dependency. + + return cls( + storage=simple_storage, + signaling_state=signaling_state + ) + + +class TransactionDependencies(VertexDependencies): + """A dataclass of dependencies necessary for transaction verification.""" + + @classmethod + def create(cls, tx: Transaction) -> Self: + """Create a transaction dependencies instance.""" + assert tx.storage is not None + simple_storage = SimpleMemoryStorage() + spent_txs = [tx_input.tx_id for tx_input in tx.inputs] + deps = tx.parents + spent_txs + + simple_storage.add_vertices_from_storage(tx.storage, deps) + + return cls(storage=simple_storage) diff --git a/hathor/verification/verification_service.py b/hathor/verification/verification_service.py index 0c9876cc4..32d31ef1f 100644 --- a/hathor/verification/verification_service.py +++ b/hathor/verification/verification_service.py @@ -14,23 +14,32 @@ from typing_extensions import assert_never -from hathor.feature_activation.feature_service import BlockSignalingState, FeatureService +from hathor.daa import DifficultyAdjustmentAlgorithm +from hathor.feature_activation.feature_service import FeatureService from hathor.profiler import get_cpu_profiler from hathor.transaction import BaseTransaction, Block, MergeMinedBlock, Transaction, TxVersion from hathor.transaction.token_creation_tx import TokenCreationTransaction from hathor.transaction.transaction import TokenInfo from hathor.transaction.validation_state import ValidationState from hathor.types import TokenUid +from hathor.verification.verification_dependencies import BlockDependencies, TransactionDependencies from hathor.verification.vertex_verifiers import VertexVerifiers cpu = get_cpu_profiler() class VerificationService: - __slots__ = ('verifiers', '_feature_service') + __slots__ = ('verifiers', '_daa', '_feature_service') - def __init__(self, *, verifiers: VertexVerifiers, feature_service: FeatureService | None = None) -> None: + def __init__( + self, + *, + verifiers: VertexVerifiers, + daa: DifficultyAdjustmentAlgorithm, + feature_service: FeatureService | None = None + ) -> None: self.verifiers = verifiers + self._daa = daa self._feature_service = feature_service def validate_basic(self, vertex: BaseTransaction, *, skip_block_weight_verification: bool = False) -> bool: @@ -84,14 +93,20 @@ def verify_basic(self, vertex: BaseTransaction, *, skip_block_weight_verificatio """Basic verifications (the ones without access to dependencies: parents+inputs). Raises on error. Used by `self.validate_basic`. Should not modify the validation state.""" + assert self._feature_service is not None + # We assert with type() instead of isinstance() because each subclass has a specific branch. match vertex.version: case TxVersion.REGULAR_BLOCK: assert type(vertex) is Block - self._verify_basic_block(vertex, skip_weight_verification=skip_block_weight_verification) + block_deps = BlockDependencies.create(vertex, self._daa, self._feature_service) + self._verify_basic_block(vertex, block_deps, skip_weight_verification=skip_block_weight_verification) case TxVersion.MERGE_MINED_BLOCK: assert type(vertex) is MergeMinedBlock - self._verify_basic_merge_mined_block(vertex, skip_weight_verification=skip_block_weight_verification) + block_deps = BlockDependencies.create(vertex, self._daa, self._feature_service) + self._verify_basic_merge_mined_block( + vertex, block_deps, skip_weight_verification=skip_block_weight_verification + ) case TxVersion.REGULAR_TRANSACTION: assert type(vertex) is Transaction self._verify_basic_tx(vertex) @@ -101,14 +116,26 @@ def verify_basic(self, vertex: BaseTransaction, *, skip_block_weight_verificatio case _: assert_never(vertex.version) - def _verify_basic_block(self, block: Block, *, skip_weight_verification: bool) -> None: + def _verify_basic_block( + self, + block: Block, + block_deps: BlockDependencies, + *, + skip_weight_verification: bool + ) -> None: """Partially run validations, the ones that need parents/inputs are skipped.""" if not skip_weight_verification: - self.verifiers.block.verify_weight(block) - self.verifiers.block.verify_reward(block) + self.verifiers.block.verify_weight(block, block_deps) + self.verifiers.block.verify_reward(block, block_deps) - def _verify_basic_merge_mined_block(self, block: MergeMinedBlock, *, skip_weight_verification: bool) -> None: - self._verify_basic_block(block, skip_weight_verification=skip_weight_verification) + def _verify_basic_merge_mined_block( + self, + block: MergeMinedBlock, + block_deps: BlockDependencies, + *, + skip_weight_verification: bool + ) -> None: + self._verify_basic_block(block, block_deps, skip_weight_verification=skip_weight_verification) def _verify_basic_tx(self, tx: Transaction) -> None: """Partially run validations, the ones that need parents/inputs are skipped.""" @@ -126,28 +153,34 @@ def verify(self, vertex: BaseTransaction, *, reject_locked_reward: bool = True) """Run all verifications. Raises on error. Used by `self.validate_full`. Should not modify the validation state.""" + if vertex.is_genesis: + # TODO do genesis validation + return + assert self._feature_service is not None # We assert with type() instead of isinstance() because each subclass has a specific branch. match vertex.version: case TxVersion.REGULAR_BLOCK: assert type(vertex) is Block - signaling_state = self._feature_service.is_signaling_mandatory_features(vertex) - self._verify_block(vertex, signaling_state) + block_deps = BlockDependencies.create(vertex, self._daa, self._feature_service) + self._verify_block(vertex, block_deps) case TxVersion.MERGE_MINED_BLOCK: assert type(vertex) is MergeMinedBlock - signaling_state = self._feature_service.is_signaling_mandatory_features(vertex) - self._verify_merge_mined_block(vertex, signaling_state) + block_deps = BlockDependencies.create(vertex, self._daa, self._feature_service) + self._verify_merge_mined_block(vertex, block_deps) case TxVersion.REGULAR_TRANSACTION: assert type(vertex) is Transaction - self._verify_tx(vertex, reject_locked_reward=reject_locked_reward) + tx_deps = TransactionDependencies.create(vertex) + self._verify_tx(vertex, tx_deps, reject_locked_reward=reject_locked_reward) case TxVersion.TOKEN_CREATION_TRANSACTION: assert type(vertex) is TokenCreationTransaction - self._verify_token_creation_tx(vertex, reject_locked_reward=reject_locked_reward) + tx_deps = TransactionDependencies.create(vertex) + self._verify_token_creation_tx(vertex, tx_deps, reject_locked_reward=reject_locked_reward) case _: assert_never(vertex.version) @cpu.profiler(key=lambda _, block: 'block-verify!{}'.format(block.hash.hex())) - def _verify_block(self, block: Block, signaling_state: BlockSignalingState) -> None: + def _verify_block(self, block: Block, block_deps: BlockDependencies) -> None: """ (1) confirms at least two pending transactions and references last block (2) solves the pow with the correct weight (done in HathorManager) @@ -157,26 +190,24 @@ def _verify_block(self, block: Block, signaling_state: BlockSignalingState) -> N (6) whether this block must signal feature support """ # TODO Should we validate a limit of outputs? - if block.is_genesis: - # TODO do genesis validation - return self.verify_without_storage(block) # (1) and (4) - self.verifiers.vertex.verify_parents(block) + self.verifiers.vertex.verify_parents(block, block_deps) self.verifiers.block.verify_height(block) - self.verifiers.block.verify_mandatory_signaling(signaling_state) + self.verifiers.block.verify_mandatory_signaling(block_deps) - def _verify_merge_mined_block(self, block: MergeMinedBlock, signaling_state: BlockSignalingState) -> None: - self._verify_block(block, signaling_state) + def _verify_merge_mined_block(self, block: MergeMinedBlock, block_deps: BlockDependencies) -> None: + self._verify_block(block, block_deps) @cpu.profiler(key=lambda _, tx: 'tx-verify!{}'.format(tx.hash.hex())) def _verify_tx( self, tx: Transaction, + tx_deps: TransactionDependencies, *, reject_locked_reward: bool, token_dict: dict[TokenUid, TokenInfo] | None = None @@ -192,24 +223,27 @@ def _verify_tx( (viii) validate input's timestamps (ix) validate inputs and outputs sum """ - if tx.is_genesis: - # TODO do genesis validation - return self.verify_without_storage(tx) - self.verifiers.tx.verify_sigops_input(tx) - self.verifiers.tx.verify_inputs(tx) # need to run verify_inputs first to check if all inputs exist - self.verifiers.vertex.verify_parents(tx) + self.verifiers.tx.verify_sigops_input(tx, tx_deps) + self.verifiers.tx.verify_inputs(tx, tx_deps) # need to run verify_inputs first to check if all inputs exist + self.verifiers.vertex.verify_parents(tx, tx_deps) self.verifiers.tx.verify_sum(token_dict or tx.get_complete_token_info()) if reject_locked_reward: self.verifiers.tx.verify_reward_locked(tx) - def _verify_token_creation_tx(self, tx: TokenCreationTransaction, *, reject_locked_reward: bool) -> None: + def _verify_token_creation_tx( + self, + tx: TokenCreationTransaction, + tx_deps: TransactionDependencies, + *, + reject_locked_reward: bool + ) -> None: """ Run all validations as regular transactions plus validation on token info. We also overload verify_sum to make some different checks """ token_dict = tx.get_complete_token_info() - self._verify_tx(tx, reject_locked_reward=reject_locked_reward, token_dict=token_dict) + self._verify_tx(tx, tx_deps, reject_locked_reward=reject_locked_reward, token_dict=token_dict) self.verifiers.token_creation_tx.verify_minted_tokens(tx, token_dict) self.verifiers.token_creation_tx.verify_token_info(tx) diff --git a/hathor/verification/vertex_verifier.py b/hathor/verification/vertex_verifier.py index 80a621502..1a0c0a047 100644 --- a/hathor/verification/vertex_verifier.py +++ b/hathor/verification/vertex_verifier.py @@ -29,6 +29,7 @@ TooManyOutputs, TooManySigOps, ) +from hathor.verification.verification_dependencies import VertexDependencies # tx should have 2 parents, both other transactions _TX_PARENTS_TXS = 2 @@ -46,7 +47,7 @@ def __init__(self, *, settings: HathorSettings, daa: DifficultyAdjustmentAlgorit self._settings = settings self._daa = daa - def verify_parents(self, vertex: BaseTransaction) -> None: + def verify_parents(self, vertex: BaseTransaction, vertex_deps: VertexDependencies) -> None: """All parents must exist and their timestamps must be smaller than ours. Also, txs should have 2 other txs as parents, while blocks should have 2 txs + 1 block. @@ -59,8 +60,6 @@ def verify_parents(self, vertex: BaseTransaction) -> None: """ from hathor.transaction.storage.exceptions import TransactionDoesNotExist - assert vertex.storage is not None - # check if parents are duplicated parents_set = set(vertex.parents) if len(vertex.parents) > len(parents_set): @@ -72,7 +71,7 @@ def verify_parents(self, vertex: BaseTransaction) -> None: for parent_hash in vertex.parents: try: - parent = vertex.storage.get_transaction(parent_hash) + parent = vertex_deps.storage.get_vertex(parent_hash) assert parent.hash is not None if vertex.timestamp <= parent.timestamp: raise TimestampError('tx={} timestamp={}, parent={} timestamp={}'.format( @@ -90,7 +89,7 @@ def verify_parents(self, vertex: BaseTransaction) -> None: if my_parents_txs > 0: raise IncorrectParents('Parents which are blocks must come before transactions') for pi_hash in parent.parents: - pi = vertex.storage.get_transaction(parent_hash) + pi = vertex_deps.storage.get_vertex(parent_hash) if not pi.is_block: min_timestamp = ( min(min_timestamp, pi.timestamp) if min_timestamp is not None @@ -160,7 +159,7 @@ def verify_outputs(self, vertex: BaseTransaction) -> None: )) def verify_number_of_outputs(self, vertex: BaseTransaction) -> None: - """Verify number of outputs does not exceeds the limit""" + """Verify number of outputs does not exceed the limit""" if len(vertex.outputs) > self._settings.MAX_NUM_OUTPUTS: raise TooManyOutputs('Maximum number of outputs exceeded') diff --git a/tests/tx/test_block.py b/tests/tx/test_block.py index 2c7abc425..94ced1612 100644 --- a/tests/tx/test_block.py +++ b/tests/tx/test_block.py @@ -24,6 +24,7 @@ from hathor.transaction.exceptions import BlockMustSignalError from hathor.transaction.storage import TransactionMemoryStorage, TransactionStorage from hathor.verification.block_verifier import BlockVerifier +from hathor.verification.verification_dependencies import BlockDependencies def test_calculate_feature_activation_bit_counts_genesis(): @@ -141,9 +142,10 @@ def test_get_feature_activation_bit_value() -> None: def test_verify_must_signal() -> None: settings = Mock(spec_set=HathorSettings) verifier = BlockVerifier(settings=settings, daa=Mock()) + deps = BlockDependencies(storage=Mock(), signaling_state=BlockIsMissingSignal(feature=Feature.NOP_FEATURE_1)) with pytest.raises(BlockMustSignalError) as e: - verifier.verify_mandatory_signaling(BlockIsMissingSignal(feature=Feature.NOP_FEATURE_1)) + verifier.verify_mandatory_signaling(deps) assert str(e.value) == "Block must signal support for feature 'NOP_FEATURE_1' during MUST_SIGNAL phase." @@ -151,5 +153,6 @@ def test_verify_must_signal() -> None: def test_verify_must_not_signal() -> None: settings = Mock(spec_set=HathorSettings) verifier = BlockVerifier(settings=settings, daa=Mock()) + deps = BlockDependencies(storage=Mock(), signaling_state=BlockIsSignaling()) - verifier.verify_mandatory_signaling(BlockIsSignaling()) + verifier.verify_mandatory_signaling(deps) diff --git a/tests/tx/test_genesis.py b/tests/tx/test_genesis.py index 37f2343eb..3a54a7c34 100644 --- a/tests/tx/test_genesis.py +++ b/tests/tx/test_genesis.py @@ -33,7 +33,7 @@ def setUp(self): super().setUp() self._daa = DifficultyAdjustmentAlgorithm(settings=self._settings) verifiers = VertexVerifiers.create_defaults(settings=self._settings, daa=self._daa) - self._verification_service = VerificationService(verifiers=verifiers) + self._verification_service = VerificationService(verifiers=verifiers, daa=self._daa) self.storage = TransactionMemoryStorage() def test_pow(self): diff --git a/tests/tx/test_tx.py b/tests/tx/test_tx.py index c216b2f3e..252170035 100644 --- a/tests/tx/test_tx.py +++ b/tests/tx/test_tx.py @@ -28,8 +28,11 @@ WeightError, ) from hathor.transaction.scripts import P2PKH, parse_address_script +from hathor.transaction.storage.exceptions import TransactionDoesNotExist +from hathor.transaction.storage.simple_memory_storage import SimpleMemoryStorage from hathor.transaction.util import int_to_bytes from hathor.transaction.validation_state import ValidationState +from hathor.verification.verification_dependencies import TransactionDependencies, VertexDependencies from hathor.wallet import Wallet from tests import unittest from tests.utils import add_blocks_unlock_reward, add_new_transactions, create_script_with_sigops, get_genesis_key @@ -114,8 +117,9 @@ def test_script(self): data_wrong = P2PKH.create_input_data(public_bytes, signature) _input.data = data_wrong + deps = TransactionDependencies.create(tx) with self.assertRaises(InvalidInputData): - self._verifiers.tx.verify_inputs(tx) + self._verifiers.tx.verify_inputs(tx, deps) def test_too_many_inputs(self): random_bytes = bytes.fromhex('0000184e64683b966b4268f387c269915cc61f6af5329823a93e3696cb0fe902') @@ -193,8 +197,9 @@ def test_children_update(self): def test_block_inputs(self): # a block with inputs should be invalid - parents = [tx.hash for tx in self.genesis] genesis_block = self.genesis_blocks[0] + parents = [genesis_block, *self.genesis_txs] + parents = [tx.hash for tx in parents] tx_inputs = [TxInput(genesis_block.hash, 0, b'')] @@ -415,8 +420,12 @@ def test_block_unknown_parent(self): storage=self.tx_storage) self.manager.cpu_mining_service.resolve(block) + + with self.assertRaises(TransactionDoesNotExist): + self.manager.verification_service.verify(block) + with self.assertRaises(ParentDoesNotExist): - self._verifiers.vertex.verify_parents(block) + self._verifiers.vertex.verify_parents(block, VertexDependencies(SimpleMemoryStorage())) def test_block_number_parents(self): address = get_address_from_public_key(self.genesis_public_key) @@ -433,8 +442,10 @@ def test_block_number_parents(self): storage=self.tx_storage) self.manager.cpu_mining_service.resolve(block) + storage = SimpleMemoryStorage() + storage.add_vertices_from_storage(self.tx_storage, parents) with self.assertRaises(IncorrectParents): - self._verifiers.vertex.verify_parents(block) + self._verifiers.vertex.verify_parents(block, VertexDependencies(storage)) def test_tx_inputs_out_of_range(self): # we'll try to spend output 3 from genesis transaction, which does not exist @@ -472,9 +483,13 @@ def test_tx_inputs_out_of_range(self): _input = [TxInput(random_bytes, 3, data)] tx.inputs = _input self.manager.cpu_mining_service.resolve(tx) - with self.assertRaises(InexistentInput): + + with self.assertRaises(TransactionDoesNotExist): self.manager.verification_service.verify(tx) + with self.assertRaises(InexistentInput): + self._verifiers.tx.verify_inputs(tx, TransactionDependencies(SimpleMemoryStorage())) + def test_tx_inputs_conflict(self): # the new tx inputs will try to spend the same output parents = [tx.hash for tx in self.genesis_txs] @@ -683,28 +698,32 @@ def test_tx_methods(self): self._verifiers.vertex.verify_pow(tx2) # Verify parent timestamps - self._verifiers.vertex.verify_parents(tx2) + deps = TransactionDependencies.create(tx2) + self._verifiers.vertex.verify_parents(tx2, deps) tx2_timestamp = tx2.timestamp tx2.timestamp = 2 with self.assertRaises(TimestampError): - self._verifiers.vertex.verify_parents(tx2) + self._verifiers.vertex.verify_parents(tx2, deps) tx2.timestamp = tx2_timestamp # Verify inputs timestamps - self._verifiers.tx.verify_inputs(tx2) + self._verifiers.tx.verify_inputs(tx2, deps) tx2.timestamp = 2 with self.assertRaises(TimestampError): - self._verifiers.tx.verify_inputs(tx2) + self._verifiers.tx.verify_inputs(tx2, deps) tx2.timestamp = tx2_timestamp # Validate maximum distance between blocks block = blocks[0] block2 = blocks[1] block2.timestamp = block.timestamp + self._settings.MAX_DISTANCE_BETWEEN_BLOCKS - self._verifiers.vertex.verify_parents(block2) + storage = SimpleMemoryStorage() + storage.add_vertices_from_storage(self.tx_storage, block2.parents) + deps = VertexDependencies(storage) + self._verifiers.vertex.verify_parents(block2, deps) block2.timestamp += 1 with self.assertRaises(TimestampError): - self._verifiers.vertex.verify_parents(block2) + self._verifiers.vertex.verify_parents(block2, deps) def test_block_big_nonce(self): block = self.genesis_blocks[0] @@ -802,7 +821,7 @@ def test_output_value(self): # 'Manually resolving', to validate verify method tx.hash = bytes.fromhex('012cba011be3c29f1c406f9015e42698b97169dbc6652d1f5e4d5c5e83138858') with self.assertRaises(InvalidOutputValue): - self.manager.verification_service.verify(tx) + self._verifiers.vertex.verify_outputs(tx) # Invalid output value invalid_output = bytes.fromhex('ffffffff') @@ -906,7 +925,8 @@ def _test_txin_data_limit(self, offset): outputs=[_output], storage=self.tx_storage ) - self._verifiers.tx.verify_inputs(tx, skip_script=True) + deps = TransactionDependencies.create(tx) + self._verifiers.tx.verify_inputs(tx, deps, skip_script=True) def test_txin_data_limit_exceeded(self): with self.assertRaises(InvalidInputDataSize): @@ -1113,7 +1133,8 @@ def test_sigops_input_single_below_limit(self) -> None: input3 = TxInput(genesis_block.hash, 0, hscript) tx = Transaction(inputs=[input3], outputs=[_output], storage=self.tx_storage) tx.update_hash() - self._verifiers.tx.verify_sigops_input(tx) + deps = TransactionDependencies.create(tx) + self._verifiers.tx.verify_sigops_input(tx, deps) def test_sigops_input_multi_below_limit(self) -> None: genesis_block = self.genesis_blocks[0] @@ -1127,7 +1148,8 @@ def test_sigops_input_multi_below_limit(self) -> None: input4 = TxInput(genesis_block.hash, 0, hscript) tx = Transaction(inputs=[input4]*num_inputs, outputs=[_output], storage=self.tx_storage) tx.update_hash() - self._verifiers.tx.verify_sigops_input(tx) + deps = TransactionDependencies.create(tx) + self._verifiers.tx.verify_sigops_input(tx, deps) def test_compare_bytes_equal(self) -> None: # create some block diff --git a/tests/tx/test_tx_deserialization.py b/tests/tx/test_tx_deserialization.py index 4e878c802..08435d683 100644 --- a/tests/tx/test_tx_deserialization.py +++ b/tests/tx/test_tx_deserialization.py @@ -12,7 +12,7 @@ def setUp(self) -> None: super().setUp() daa = DifficultyAdjustmentAlgorithm(settings=self._settings) verifiers = VertexVerifiers.create_defaults(settings=self._settings, daa=daa) - self._verification_service = VerificationService(verifiers=verifiers) + self._verification_service = VerificationService(verifiers=verifiers, daa=daa) def test_deserialize(self): cls = self.get_tx_class()