diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 63c98bd26..714e775ef 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -101,5 +101,7 @@ jobs: - name: Run tests run: poetry run make tests - name: Upload coverage - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v4 if: matrix.python == 3.11 && startsWith(matrix.os, 'ubuntu') + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} diff --git a/hathor/builder/__init__.py b/hathor/builder/__init__.py index 40145b2bc..4e492162e 100644 --- a/hathor/builder/__init__.py +++ b/hathor/builder/__init__.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from hathor.builder.builder import BuildArtifacts, Builder +from hathor.builder.builder import BuildArtifacts, Builder, SyncSupportLevel from hathor.builder.cli_builder import CliBuilder from hathor.builder.resources_builder import ResourcesBuilder @@ -21,4 +21,5 @@ 'Builder', 'CliBuilder', 'ResourcesBuilder', + 'SyncSupportLevel', ] diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py index 815ed352f..ec79c4af1 100644 --- a/hathor/builder/builder.py +++ b/hathor/builder/builder.py @@ -12,10 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from enum import Enum +from enum import Enum, IntEnum from typing import Any, Callable, NamedTuple, Optional, TypeAlias from structlog import get_logger +from typing_extensions import assert_never from hathor.checkpoint import Checkpoint from hathor.conf.get_settings import get_global_settings @@ -25,9 +26,11 @@ from hathor.event import EventManager from hathor.event.storage import EventMemoryStorage, EventRocksDBStorage, EventStorage from hathor.event.websocket import EventWebsocketFactory +from hathor.execution_manager import ExecutionManager from hathor.feature_activation.bit_signaling_service import BitSignalingService from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import FeatureService +from hathor.feature_activation.storage.feature_activation_storage import FeatureActivationStorage from hathor.indexes import IndexesManager, MemoryIndexesManager, RocksDBIndexesManager from hathor.manager import HathorManager from hathor.mining.cpu_mining_service import CpuMiningService @@ -44,12 +47,42 @@ TransactionStorage, ) from hathor.util import Random, get_environment_info, not_none -from hathor.verification.verification_service import VerificationService, VertexVerifiers +from hathor.verification.verification_service import VerificationService +from hathor.verification.vertex_verifiers import VertexVerifiers +from hathor.vertex_handler import VertexHandler from hathor.wallet import BaseWallet, Wallet logger = get_logger() +class SyncSupportLevel(IntEnum): + UNAVAILABLE = 0 # not possible to enable at runtime + DISABLED = 1 # available but disabled by default, possible to enable at runtime + ENABLED = 2 # available and enabled by default, possible to disable at runtime + + @classmethod + def add_factories(cls, + p2p_manager: ConnectionsManager, + sync_v1_support: 'SyncSupportLevel', + sync_v2_support: 'SyncSupportLevel', + ) -> None: + """Adds the sync factory to the manager according to the support level.""" + from hathor.p2p.sync_v1.factory import SyncV11Factory + from hathor.p2p.sync_v2.factory import SyncV2Factory + from hathor.p2p.sync_version import SyncVersion + + # sync-v1 support: + if sync_v1_support > cls.UNAVAILABLE: + p2p_manager.add_sync_factory(SyncVersion.V1_1, SyncV11Factory(p2p_manager)) + if sync_v1_support is cls.ENABLED: + p2p_manager.enable_sync_version(SyncVersion.V1_1) + # sync-v2 support: + if sync_v2_support > cls.UNAVAILABLE: + p2p_manager.add_sync_factory(SyncVersion.V2, SyncV2Factory(p2p_manager)) + if sync_v2_support is cls.ENABLED: + p2p_manager.enable_sync_version(SyncVersion.V2) + + class StorageType(Enum): MEMORY = 'memory' ROCKSDB = 'rocksdb' @@ -67,6 +100,7 @@ class BuildArtifacts(NamedTuple): consensus: ConsensusAlgorithm tx_storage: TransactionStorage feature_service: FeatureService + bit_signaling_service: BitSignalingService indexes: Optional[IndexesManager] wallet: Optional[BaseWallet] rocksdb_storage: Optional[RocksDBStorage] @@ -141,8 +175,8 @@ def __init__(self) -> None: self._enable_tokens_index: bool = False self._enable_utxo_index: bool = False - self._enable_sync_v1: bool = True - self._enable_sync_v2: bool = False + self._sync_v1_support: SyncSupportLevel = SyncSupportLevel.UNAVAILABLE + self._sync_v2_support: SyncSupportLevel = SyncSupportLevel.UNAVAILABLE self._enable_stratum_server: Optional[bool] = None @@ -150,6 +184,11 @@ def __init__(self) -> None: self._soft_voided_tx_ids: Optional[set[bytes]] = None + self._execution_manager: ExecutionManager | None = None + self._vertex_handler: VertexHandler | None = None + self._consensus: ConsensusAlgorithm | None = None + self._p2p_manager: ConnectionsManager | None = None + def build(self) -> BuildArtifacts: if self.artifacts is not None: raise ValueError('cannot call build twice') @@ -157,16 +196,19 @@ def build(self) -> BuildArtifacts: if self._network is None: raise TypeError('you must set a network') + if SyncSupportLevel.ENABLED not in {self._sync_v1_support, self._sync_v2_support}: + raise TypeError('you must enable at least one sync version') + settings = self._get_or_create_settings() reactor = self._get_reactor() pubsub = self._get_or_create_pubsub() peer_id = self._get_peer_id() - soft_voided_tx_ids = self._get_soft_voided_tx_ids() - consensus_algorithm = ConsensusAlgorithm(soft_voided_tx_ids, pubsub) + execution_manager = self._get_or_create_execution_manager() + consensus_algorithm = self._get_or_create_consensus() - p2p_manager = self._get_p2p_manager() + p2p_manager = self._get_or_create_p2p_manager() wallet = self._get_or_create_wallet() event_manager = self._get_or_create_event_manager() @@ -177,6 +219,7 @@ def build(self) -> BuildArtifacts: verification_service = self._get_or_create_verification_service() daa = self._get_or_create_daa() cpu_mining_service = self._get_or_create_cpu_mining_service() + vertex_handler = self._get_or_create_vertex_handler() if self._enable_address_index: indexes.enable_address_index(pubsub) @@ -211,10 +254,11 @@ def build(self) -> BuildArtifacts: checkpoints=self._checkpoints, capabilities=self._capabilities, environment_info=get_environment_info(self._cmdline, peer_id.id), - feature_service=feature_service, bit_signaling_service=bit_signaling_service, verification_service=verification_service, cpu_mining_service=cpu_mining_service, + execution_manager=execution_manager, + vertex_handler=vertex_handler, **kwargs ) @@ -239,6 +283,7 @@ def build(self) -> BuildArtifacts: rocksdb_storage=self._rocksdb_storage, stratum_factory=stratum_factory, feature_service=feature_service, + bit_signaling_service=bit_signaling_service ) return self.artifacts @@ -306,6 +351,22 @@ def _get_peer_id(self) -> PeerId: return self._peer_id raise ValueError('peer_id not set') + def _get_or_create_execution_manager(self) -> ExecutionManager: + if self._execution_manager is None: + reactor = self._get_reactor() + self._execution_manager = ExecutionManager(reactor) + + return self._execution_manager + + def _get_or_create_consensus(self) -> ConsensusAlgorithm: + if self._consensus is None: + soft_voided_tx_ids = self._get_soft_voided_tx_ids() + pubsub = self._get_or_create_pubsub() + execution_manager = self._get_or_create_execution_manager() + self._consensus = ConsensusAlgorithm(soft_voided_tx_ids, pubsub, execution_manager=execution_manager) + + return self._consensus + def _get_or_create_pubsub(self) -> PubSubManager: if self._pubsub is None: self._pubsub = PubSubManager(self._get_reactor()) @@ -334,10 +395,9 @@ def _get_or_create_rocksdb_storage(self) -> RocksDBStorage: return self._rocksdb_storage - def _get_p2p_manager(self) -> ConnectionsManager: - from hathor.p2p.sync_v1.factory import SyncV11Factory - from hathor.p2p.sync_v2.factory import SyncV2Factory - from hathor.p2p.sync_version import SyncVersion + def _get_or_create_p2p_manager(self) -> ConnectionsManager: + if self._p2p_manager: + return self._p2p_manager enable_ssl = True reactor = self._get_reactor() @@ -345,7 +405,7 @@ def _get_p2p_manager(self) -> ConnectionsManager: assert self._network is not None - p2p_manager = ConnectionsManager( + self._p2p_manager = ConnectionsManager( reactor, network=self._network, my_peer=my_peer, @@ -354,13 +414,8 @@ def _get_p2p_manager(self) -> ConnectionsManager: whitelist_only=False, rng=self._rng, ) - p2p_manager.add_sync_factory(SyncVersion.V1_1, SyncV11Factory(p2p_manager)) - p2p_manager.add_sync_factory(SyncVersion.V2, SyncV2Factory(p2p_manager)) - if self._enable_sync_v1: - p2p_manager.enable_sync_version(SyncVersion.V1_1) - if self._enable_sync_v2: - p2p_manager.enable_sync_version(SyncVersion.V2) - return p2p_manager + SyncSupportLevel.add_factories(self._p2p_manager, self._sync_v1_support, self._sync_v2_support) + return self._p2p_manager def _get_or_create_indexes_manager(self) -> IndexesManager: if self._indexes_manager is not None: @@ -438,7 +493,8 @@ def _get_or_create_event_manager(self) -> EventManager: reactor=reactor, pubsub=self._get_or_create_pubsub(), event_storage=storage, - event_ws_factory=factory + event_ws_factory=factory, + execution_manager=self._get_or_create_execution_manager() ) return self._event_manager @@ -460,12 +516,14 @@ def _get_or_create_bit_signaling_service(self) -> BitSignalingService: settings = self._get_or_create_settings() tx_storage = self._get_or_create_tx_storage() feature_service = self._get_or_create_feature_service() + feature_storage = self._get_or_create_feature_storage() self._bit_signaling_service = BitSignalingService( feature_settings=settings.FEATURE_ACTIVATION, feature_service=feature_service, tx_storage=tx_storage, support_features=self._support_features, not_support_features=self._not_support_features, + feature_storage=feature_storage, ) return self._bit_signaling_service @@ -477,6 +535,15 @@ def _get_or_create_verification_service(self) -> VerificationService: return self._verification_service + def _get_or_create_feature_storage(self) -> FeatureActivationStorage | None: + match self._storage_type: + case StorageType.MEMORY: return None + case StorageType.ROCKSDB: return FeatureActivationStorage( + settings=self._get_or_create_settings(), + rocksdb_storage=self._get_or_create_rocksdb_storage() + ) + case _: assert_never(self._storage_type) + def _get_or_create_vertex_verifiers(self) -> VertexVerifiers: if self._vertex_verifiers is None: settings = self._get_or_create_settings() @@ -507,6 +574,22 @@ def _get_or_create_cpu_mining_service(self) -> CpuMiningService: return self._cpu_mining_service + def _get_or_create_vertex_handler(self) -> VertexHandler: + if self._vertex_handler is None: + self._vertex_handler = VertexHandler( + reactor=self._get_reactor(), + settings=self._get_or_create_settings(), + tx_storage=self._get_or_create_tx_storage(), + verification_service=self._get_or_create_verification_service(), + consensus=self._get_or_create_consensus(), + p2p_manager=self._get_or_create_p2p_manager(), + feature_service=self._get_or_create_feature_service(), + pubsub=self._get_or_create_pubsub(), + wallet=self._get_or_create_wallet(), + ) + + return self._vertex_handler + def use_memory(self) -> 'Builder': self.check_if_can_modify() self._storage_type = StorageType.MEMORY @@ -536,16 +619,14 @@ def force_memory_index(self) -> 'Builder': def _get_or_create_wallet(self) -> Optional[BaseWallet]: if self._wallet is not None: - assert self._wallet_directory is None - assert self._wallet_unlock is None return self._wallet if self._wallet_directory is None: return None - wallet = Wallet(directory=self._wallet_directory) + self._wallet = Wallet(directory=self._wallet_directory) if self._wallet_unlock is not None: - wallet.unlock(self._wallet_unlock) - return wallet + self._wallet.unlock(self._wallet_unlock) + return self._wallet def set_wallet(self, wallet: BaseWallet) -> 'Builder': self.check_if_can_modify() @@ -639,34 +720,34 @@ def set_network(self, network: str) -> 'Builder': self._network = network return self - def set_enable_sync_v1(self, enable_sync_v1: bool) -> 'Builder': + def set_sync_v1_support(self, support_level: SyncSupportLevel) -> 'Builder': self.check_if_can_modify() - self._enable_sync_v1 = enable_sync_v1 + self._sync_v1_support = support_level return self - def set_enable_sync_v2(self, enable_sync_v2: bool) -> 'Builder': + def set_sync_v2_support(self, support_level: SyncSupportLevel) -> 'Builder': self.check_if_can_modify() - self._enable_sync_v2 = enable_sync_v2 + self._sync_v2_support = support_level return self def enable_sync_v1(self) -> 'Builder': self.check_if_can_modify() - self._enable_sync_v1 = True + self._sync_v1_support = SyncSupportLevel.ENABLED return self def disable_sync_v1(self) -> 'Builder': self.check_if_can_modify() - self._enable_sync_v1 = False + self._sync_v1_support = SyncSupportLevel.DISABLED return self def enable_sync_v2(self) -> 'Builder': self.check_if_can_modify() - self._enable_sync_v2 = True + self._sync_v2_support = SyncSupportLevel.ENABLED return self def disable_sync_v2(self) -> 'Builder': self.check_if_can_modify() - self._enable_sync_v2 = False + self._sync_v2_support = SyncSupportLevel.DISABLED return self def set_full_verification(self, full_verification: bool) -> 'Builder': diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index 8c1f41fff..9581be9fd 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -13,7 +13,6 @@ # limitations under the License. import getpass -import json import os import platform import sys @@ -27,8 +26,10 @@ from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.event import EventManager from hathor.exception import BuilderError +from hathor.execution_manager import ExecutionManager from hathor.feature_activation.bit_signaling_service import BitSignalingService from hathor.feature_activation.feature_service import FeatureService +from hathor.feature_activation.storage.feature_activation_storage import FeatureActivationStorage from hathor.indexes import IndexesManager, MemoryIndexesManager, RocksDBIndexesManager from hathor.manager import HathorManager from hathor.mining.cpu_mining_service import CpuMiningService @@ -41,15 +42,17 @@ from hathor.util import Random, not_none from hathor.verification.verification_service import VerificationService from hathor.verification.vertex_verifiers import VertexVerifiers +from hathor.vertex_handler import VertexHandler from hathor.wallet import BaseWallet, HDWallet, Wallet logger = get_logger() class SyncChoice(Enum): - V1_ONLY = auto() - V2_ONLY = auto() - BRIDGE = auto() + V1_DEFAULT = auto() # v1 enabled, v2 disabled but can be enabled in runtime + V2_DEFAULT = auto() # v2 enabled, v1 disabled but can be enabled in runtime + BRIDGE_DEFAULT = auto() # both enabled, either can be disabled in runtime + V2_ONLY = auto() # v1 is unavailable, it cannot be enabled in runtime class CliBuilder: @@ -68,15 +71,13 @@ def check_or_raise(self, condition: bool, message: str) -> None: def create_manager(self, reactor: Reactor) -> HathorManager: import hathor + from hathor.builder import SyncSupportLevel from hathor.conf.get_settings import get_global_settings, get_settings_source from hathor.daa import TestMode from hathor.event.storage import EventMemoryStorage, EventRocksDBStorage, EventStorage from hathor.event.websocket.factory import EventWebsocketFactory from hathor.p2p.netfilter.utils import add_peer_id_blacklist from hathor.p2p.peer_discovery import BootstrapPeerDiscovery, DNSPeerDiscovery - from hathor.p2p.sync_v1.factory import SyncV11Factory - from hathor.p2p.sync_v2.factory import SyncV2Factory - from hathor.p2p.sync_version import SyncVersion from hathor.storage import RocksDBStorage from hathor.transaction.storage import ( TransactionCacheStorage, @@ -94,8 +95,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager: self.log = logger.new() self.reactor = reactor - peer_id = self.create_peer_id() - + peer_id = PeerId.create_from_json_path(self._args.peer) if self._args.peer else PeerId() python = f'{platform.python_version()}-{platform.python_implementation()}' self.log.info( @@ -119,6 +119,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager: tx_storage: TransactionStorage event_storage: EventStorage indexes: IndexesManager + feature_storage: FeatureActivationStorage | None = None self.rocksdb_storage: Optional[RocksDBStorage] = None self.event_ws_factory: Optional[EventWebsocketFactory] = None @@ -151,6 +152,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager: kwargs['indexes'] = indexes tx_storage = TransactionRocksDBStorage(self.rocksdb_storage, **kwargs) event_storage = EventRocksDBStorage(self.rocksdb_storage) + feature_storage = FeatureActivationStorage(settings=settings, rocksdb_storage=self.rocksdb_storage) self.log.info('with storage', storage_class=type(tx_storage).__name__, path=self._args.data) if self._args.cache: @@ -174,33 +176,39 @@ def create_manager(self, reactor: Reactor) -> HathorManager: sync_choice: SyncChoice if self._args.sync_bridge: - self.log.warn('--sync-bridge is the default, this parameter has no effect') - sync_choice = SyncChoice.BRIDGE + sync_choice = SyncChoice.BRIDGE_DEFAULT elif self._args.sync_v1_only: - sync_choice = SyncChoice.V1_ONLY + sync_choice = SyncChoice.V1_DEFAULT elif self._args.sync_v2_only: + self.log.warn('--sync-v2-only is the default, this parameter has no effect') + sync_choice = SyncChoice.V2_DEFAULT + elif self._args.x_remove_sync_v1: sync_choice = SyncChoice.V2_ONLY elif self._args.x_sync_bridge: self.log.warn('--x-sync-bridge is deprecated and will be removed, use --sync-bridge instead') - sync_choice = SyncChoice.BRIDGE + sync_choice = SyncChoice.BRIDGE_DEFAULT elif self._args.x_sync_v2_only: self.log.warn('--x-sync-v2-only is deprecated and will be removed, use --sync-v2-only instead') - sync_choice = SyncChoice.V2_ONLY + sync_choice = SyncChoice.V2_DEFAULT else: - sync_choice = SyncChoice.BRIDGE + # XXX: this is the default behavior when no parameter is given + sync_choice = SyncChoice.V2_DEFAULT - enable_sync_v1: bool - enable_sync_v2: bool + sync_v1_support: SyncSupportLevel + sync_v2_support: SyncSupportLevel match sync_choice: - case SyncChoice.V1_ONLY: - enable_sync_v1 = True - enable_sync_v2 = False + case SyncChoice.V1_DEFAULT: + sync_v1_support = SyncSupportLevel.ENABLED + sync_v2_support = SyncSupportLevel.DISABLED + case SyncChoice.V2_DEFAULT: + sync_v1_support = SyncSupportLevel.DISABLED + sync_v2_support = SyncSupportLevel.ENABLED + case SyncChoice.BRIDGE_DEFAULT: + sync_v1_support = SyncSupportLevel.ENABLED + sync_v2_support = SyncSupportLevel.ENABLED case SyncChoice.V2_ONLY: - enable_sync_v1 = False - enable_sync_v2 = True - case SyncChoice.BRIDGE: - enable_sync_v1 = True - enable_sync_v2 = True + sync_v1_support = SyncSupportLevel.UNAVAILABLE + sync_v2_support = SyncSupportLevel.ENABLED pubsub = PubSubManager(reactor) @@ -212,11 +220,14 @@ def create_manager(self, reactor: Reactor) -> HathorManager: event_storage=event_storage ) + execution_manager = ExecutionManager(reactor) + event_manager = EventManager( event_storage=event_storage, event_ws_factory=self.event_ws_factory, pubsub=pubsub, - reactor=reactor + reactor=reactor, + execution_manager=execution_manager, ) if self._args.wallet_index and tx_storage.indexes is not None: @@ -236,7 +247,11 @@ def create_manager(self, reactor: Reactor) -> HathorManager: full_verification = True soft_voided_tx_ids = set(settings.SOFT_VOIDED_TX_IDS) - consensus_algorithm = ConsensusAlgorithm(soft_voided_tx_ids, pubsub=pubsub) + consensus_algorithm = ConsensusAlgorithm( + soft_voided_tx_ids, + pubsub=pubsub, + execution_manager=execution_manager + ) if self._args.x_enable_event_queue: self.log.info('--x-enable-event-queue flag provided. ' @@ -252,7 +267,8 @@ def create_manager(self, reactor: Reactor) -> HathorManager: feature_service=self.feature_service, tx_storage=tx_storage, support_features=self._args.signal_support, - not_support_features=self._args.signal_not_support + not_support_features=self._args.signal_not_support, + feature_storage=feature_storage, ) test_mode = TestMode.DISABLED @@ -281,12 +297,19 @@ def create_manager(self, reactor: Reactor) -> HathorManager: whitelist_only=False, rng=Random(), ) - p2p_manager.add_sync_factory(SyncVersion.V1_1, SyncV11Factory(p2p_manager)) - p2p_manager.add_sync_factory(SyncVersion.V2, SyncV2Factory(p2p_manager)) - if enable_sync_v1: - p2p_manager.enable_sync_version(SyncVersion.V1_1) - if enable_sync_v2: - p2p_manager.enable_sync_version(SyncVersion.V2) + SyncSupportLevel.add_factories(p2p_manager, sync_v1_support, sync_v2_support) + + vertex_handler = VertexHandler( + reactor=reactor, + settings=settings, + tx_storage=tx_storage, + verification_service=verification_service, + consensus=consensus_algorithm, + p2p_manager=p2p_manager, + feature_service=self.feature_service, + pubsub=pubsub, + wallet=self.wallet, + ) self.manager = HathorManager( reactor, @@ -305,10 +328,11 @@ def create_manager(self, reactor: Reactor) -> HathorManager: environment_info=get_environment_info(args=str(self._args), peer_id=peer_id.id), full_verification=full_verification, enable_event_queue=self._args.x_enable_event_queue, - feature_service=self.feature_service, bit_signaling_service=bit_signaling_service, verification_service=verification_service, - cpu_mining_service=cpu_mining_service + cpu_mining_service=cpu_mining_service, + execution_manager=execution_manager, + vertex_handler=vertex_handler, ) if self._args.x_ipython_kernel: @@ -354,7 +378,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager: self.log.warn('--memory-indexes is implied for memory storage or JSON storage') for description in self._args.listen: - p2p_manager.add_listen_address(description) + p2p_manager.add_listen_address_description(description) if self._args.peer_id_blacklist: self.log.info('with peer id blacklist', blacklist=self._args.peer_id_blacklist) @@ -384,14 +408,6 @@ def get_hostname(self) -> Optional[str]: print('Hostname discovered and set to {}'.format(hostname)) return hostname - def create_peer_id(self) -> PeerId: - if not self._args.peer: - peer_id = PeerId() - else: - data = json.load(open(self._args.peer, 'r')) - peer_id = PeerId.create_from_json(data) - return peer_id - def create_wallet(self) -> BaseWallet: if self._args.wallet == 'hd': kwargs: dict[str, Any] = { diff --git a/hathor/builder/sysctl_builder.py b/hathor/builder/sysctl_builder.py index e34cd4879..0b2131ad8 100644 --- a/hathor/builder/sysctl_builder.py +++ b/hathor/builder/sysctl_builder.py @@ -13,7 +13,13 @@ # limitations under the License. from hathor.builder import BuildArtifacts -from hathor.sysctl import ConnectionsManagerSysctl, HathorManagerSysctl, Sysctl, WebsocketManagerSysctl +from hathor.sysctl import ( + ConnectionsManagerSysctl, + FeatureActivationSysctl, + HathorManagerSysctl, + Sysctl, + WebsocketManagerSysctl, +) class SysctlBuilder: @@ -25,7 +31,11 @@ def __init__(self, artifacts: BuildArtifacts) -> None: def build(self) -> Sysctl: """Build the sysctl tree.""" root = Sysctl() - root.put_child('core', HathorManagerSysctl(self.artifacts.manager)) + + core = HathorManagerSysctl(self.artifacts.manager) + core.put_child('features', FeatureActivationSysctl(self.artifacts.bit_signaling_service)) + + root.put_child('core', core) root.put_child('p2p', ConnectionsManagerSysctl(self.artifacts.p2p_manager)) ws_factory = self.artifacts.manager.metrics.websocket_factory diff --git a/hathor/cli/db_export.py b/hathor/cli/db_export.py index 62187e021..b00d9596d 100644 --- a/hathor/cli/db_export.py +++ b/hathor/cli/db_export.py @@ -85,7 +85,6 @@ def iter_tx(self) -> Iterator['BaseTransaction']: soft_voided_ids = set(settings.SOFT_VOIDED_TX_IDS) for tx in self._iter_tx: - assert tx.hash is not None # XXX: if we're skipping voided transactions, we have to be careful not to skip soft-voided ones if self.skip_voided: voided_by = tx.get_metadata().voided_by or set() @@ -110,7 +109,6 @@ def run(self) -> None: # nothing, and it's probably better to finish sooner than expected, rather than later than expected total = self.tx_storage.get_vertices_count() for tx in tx_progress(self.iter_tx(), log=self.log, total=total): - assert tx.hash is not None tx_meta = tx.get_metadata() if tx.is_block: assert isinstance(tx, Block) diff --git a/hathor/cli/db_import.py b/hathor/cli/db_import.py index a43132c40..6369890e4 100644 --- a/hathor/cli/db_import.py +++ b/hathor/cli/db_import.py @@ -86,9 +86,8 @@ def _import_txs(self) -> Iterator['BaseTransaction']: sys.exit(2) tx = tx_or_block_from_bytes(tx_bytes) assert tx is not None - assert tx.hash is not None tx.storage = self.tx_storage - self.manager.on_new_tx(tx, quiet=True, fails_silently=False, skip_block_weight_verification=True) + self.manager.on_new_tx(tx, quiet=True, fails_silently=False) yield tx diff --git a/hathor/cli/events_simulator/scenario.py b/hathor/cli/events_simulator/scenario.py index 25723697a..8a2d20251 100644 --- a/hathor/cli/events_simulator/scenario.py +++ b/hathor/cli/events_simulator/scenario.py @@ -99,7 +99,6 @@ def simulate_reorg(simulator: 'Simulator', manager: 'HathorManager') -> None: def simulate_unvoided_transaction(simulator: 'Simulator', manager: 'HathorManager') -> None: from hathor.conf.get_settings import get_global_settings from hathor.simulator.utils import add_new_block, add_new_blocks, gen_new_tx - from hathor.util import not_none settings = get_global_settings() assert manager.wallet is not None @@ -132,7 +131,7 @@ def simulate_unvoided_transaction(simulator: 'Simulator', manager: 'HathorManage block.parents = [ block.parents[0], settings.GENESIS_TX1_HASH, - not_none(tx2.hash), + tx2.hash, ] block.update_hash() assert manager.propagate_tx(block, fails_silently=False) diff --git a/hathor/cli/main.py b/hathor/cli/main.py index a9c287cbf..a1ab960d2 100644 --- a/hathor/cli/main.py +++ b/hathor/cli/main.py @@ -49,6 +49,7 @@ def __init__(self) -> None: quick_test, replay_logs, reset_event_queue, + reset_feature_settings, run_node, shell, stratum_mining, @@ -81,6 +82,8 @@ def __init__(self) -> None: self.add_cmd('oracle', 'oracle-encode-data', oracle_encode_data, 'Encode data and sign it with a private key') self.add_cmd('events', 'reset-event-queue', reset_event_queue, 'Delete all events and related data from the ' 'database') + self.add_cmd('features', 'reset-feature-settings', reset_feature_settings, 'Delete existing Feature ' + 'Activation settings from the database') self.add_cmd('dev', 'shell', shell, 'Run a Python shell') self.add_cmd('dev', 'quick_test', quick_test, 'Similar to run_node but will quit after receiving a tx') self.add_cmd('dev', 'generate_nginx_config', nginx_config, 'Generate nginx config from OpenAPI json') diff --git a/hathor/cli/mining.py b/hathor/cli/mining.py index 491aff1e4..a769ed3a0 100644 --- a/hathor/cli/mining.py +++ b/hathor/cli/mining.py @@ -119,7 +119,6 @@ def execute(args: Namespace) -> None: block_bytes = base64.b64decode(data['block_bytes']) block = Block.create_from_struct(block_bytes) - assert block.hash is not None assert isinstance(block, Block) print('Mining block with weight {}'.format(block.weight)) @@ -130,7 +129,6 @@ def execute(args: Namespace) -> None: block = q_out.get() block.update_hash() - assert block.hash is not None print('[{}] New block found: {} (nonce={}, weight={})'.format(datetime.datetime.now(), block.hash.hex(), block.nonce, block.weight)) @@ -139,7 +137,8 @@ def execute(args: Namespace) -> None: from hathor.conf.get_settings import get_global_settings from hathor.daa import DifficultyAdjustmentAlgorithm - from hathor.verification.verification_service import VerificationService, VertexVerifiers + from hathor.verification.verification_service import VerificationService + from hathor.verification.vertex_verifiers import VertexVerifiers settings = get_global_settings() daa = DifficultyAdjustmentAlgorithm(settings=settings) verifiers = VertexVerifiers.create_defaults(settings=settings, daa=daa, feature_service=Mock()) diff --git a/hathor/cli/openapi_files/openapi_base.json b/hathor/cli/openapi_files/openapi_base.json index a3401d9a1..ea10d9442 100644 --- a/hathor/cli/openapi_files/openapi_base.json +++ b/hathor/cli/openapi_files/openapi_base.json @@ -7,7 +7,7 @@ ], "info": { "title": "Hathor API", - "version": "0.59.0" + "version": "0.60.0" }, "consumes": [ "application/json" diff --git a/hathor/cli/reset_feature_settings.py b/hathor/cli/reset_feature_settings.py new file mode 100644 index 000000000..a4c8ea9e0 --- /dev/null +++ b/hathor/cli/reset_feature_settings.py @@ -0,0 +1,49 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from argparse import ArgumentParser, Namespace + +from structlog import get_logger + +logger = get_logger() + + +def create_parser() -> ArgumentParser: + from hathor.cli.util import create_parser + + parser = create_parser() + parser.add_argument('--data', help='Data directory') + + return parser + + +def execute(args: Namespace) -> None: + from hathor.conf.get_settings import get_global_settings + from hathor.feature_activation.storage.feature_activation_storage import FeatureActivationStorage + from hathor.storage import RocksDBStorage + + assert args.data is not None, '--data is required' + + rocksdb_storage = RocksDBStorage(path=args.data) + feature_storage = FeatureActivationStorage(settings=get_global_settings(), rocksdb_storage=rocksdb_storage) + + logger.info('removing feature activation settings...') + feature_storage.reset_settings() + logger.info('reset complete') + + +def main(): + parser = create_parser() + args = parser.parse_args() + execute(args) diff --git a/hathor/cli/run_node.py b/hathor/cli/run_node.py index 89f4e37de..166c7cef6 100644 --- a/hathor/cli/run_node.py +++ b/hathor/cli/run_node.py @@ -131,6 +131,8 @@ def create_parser(cls) -> ArgumentParser: help='Enable running both sync protocols.') sync_args.add_argument('--sync-v1-only', action='store_true', help='Disable support for running sync-v2.') sync_args.add_argument('--sync-v2-only', action='store_true', help='Disable support for running sync-v1.') + sync_args.add_argument('--x-remove-sync-v1', action='store_true', help='Make sync-v1 unavailable, thus ' + 'impossible to be enable in runtime.') sync_args.add_argument('--x-sync-v2-only', action='store_true', help=SUPPRESS) # old argument sync_args.add_argument('--x-sync-bridge', action='store_true', help=SUPPRESS) # old argument parser.add_argument('--x-localhost-only', action='store_true', help='Only connect to peers on localhost') @@ -221,7 +223,8 @@ def prepare(self, *, register_resources: bool = True) -> None: wallet=self.manager.wallet, rocksdb_storage=getattr(builder, 'rocksdb_storage', None), stratum_factory=self.manager.stratum_factory, - feature_service=self.manager._feature_service + feature_service=self.manager.vertex_handler._feature_service, + bit_signaling_service=self.manager._bit_signaling_service, ) def start_sentry_if_possible(self) -> None: @@ -264,9 +267,8 @@ def register_signal_handlers(self) -> None: def signal_usr1_handler(self, sig: int, frame: Any) -> None: """Called when USR1 signal is received.""" try: - self.log.warn('USR1 received. Killing all connections...') - if self.manager and self.manager.connections: - self.manager.connections.disconnect_all_peers(force=True) + self.log.warn('USR1 received.') + self.manager.connections.reload_entrypoints_and_connections() except Exception: # see: https://docs.python.org/3/library/signal.html#note-on-signal-handlers-and-exceptions self.log.error('prevented exception from escaping the signal handler', exc_info=True) diff --git a/hathor/cli/run_node_args.py b/hathor/cli/run_node_args.py index 4df46f009..1d161cecf 100644 --- a/hathor/cli/run_node_args.py +++ b/hathor/cli/run_node_args.py @@ -65,6 +65,7 @@ class RunNodeArgs(BaseModel, extra=Extra.allow): enable_crash_api: bool x_sync_bridge: bool x_sync_v2_only: bool + x_remove_sync_v1: bool sync_bridge: bool sync_v1_only: bool sync_v2_only: bool diff --git a/hathor/client.py b/hathor/client.py index 4f7ab4475..1c68f6787 100644 --- a/hathor/client.py +++ b/hathor/client.py @@ -338,7 +338,6 @@ def _on_new_tx(self, key: HathorEvents, args: EventArguments) -> None: async def submit(self, block: Block) -> Optional[BlockTemplate]: if await self.submit(block): - assert block.hash is not None return self.manager.make_block_template(block.hash) else: return None @@ -397,7 +396,6 @@ def create_tx_from_dict(data: dict[str, Any], update_hash: bool = False, tx = cls(**data) if update_hash: tx.update_hash() - assert tx.hash is not None if hash_bytes: assert tx.hash == hash_bytes, f'Hashes differ: {tx.hash!r} != {hash_bytes!r}' if metadata: diff --git a/hathor/conf/mainnet.py b/hathor/conf/mainnet.py index e88ac4cf3..a0cc0372d 100644 --- a/hathor/conf/mainnet.py +++ b/hathor/conf/mainnet.py @@ -237,6 +237,18 @@ version='0.59.0', signal_support_by_default=False, ), + Feature.INCREASE_MAX_MERKLE_PATH_LENGTH: Criteria( + bit=0, + # N = 4_475_520 + # Expected to be reached around Monday, 2024-05-13 17:31:03 GMT + # Right now the best block is 4_463_619 at Thursday, 2024-05-09 14:20:33 GMT + start_height=4_475_520, + timeout_height=4_798_080, # N + 16 * 20160 (16 weeks after the start) + minimum_activation_height=4_636_800, # N + 8 * 20160 (8 weeks after the start) + lock_in_on_timeout=False, + version='0.60.0', + signal_support_by_default=True, + ), } ) ) diff --git a/hathor/conf/mainnet.yml b/hathor/conf/mainnet.yml index 9c51cde42..dea15cf56 100644 --- a/hathor/conf/mainnet.yml +++ b/hathor/conf/mainnet.yml @@ -221,3 +221,17 @@ FEATURE_ACTIVATION: lock_in_on_timeout: false version: 0.59.0 signal_support_by_default: false + + #### Actual features #### + + INCREASE_MAX_MERKLE_PATH_LENGTH: + bit: 0 + # N = 4_475_520 + # Expected to be reached around Monday, 2024-05-13 17:31:03 GMT + # Right now the best block is 4_463_619 at Thursday, 2024-05-09 14:20:33 GMT + start_height: 4_475_520 + timeout_height: 4_798_080 # N + 16 * 20160 (16 weeks after the start) + minimum_activation_height: 4_636_800 # N + 8 * 20160 (8 weeks after the start) + lock_in_on_timeout: false + version: 0.60.0 + signal_support_by_default: true diff --git a/hathor/conf/settings.py b/hathor/conf/settings.py index 62718bf2a..682279f6c 100644 --- a/hathor/conf/settings.py +++ b/hathor/conf/settings.py @@ -423,6 +423,10 @@ def GENESIS_TX2_TIMESTAMP(self) -> int: OLD_MAX_MERKLE_PATH_LENGTH: int = 12 NEW_MAX_MERKLE_PATH_LENGTH: int = 20 + # Maximum number of tx tips to accept in the initial phase of the mempool sync 1000 is arbitrary, but it should be + # more than enough for the forseeable future + MAX_MEMPOOL_RECEIVING_TIPS: int = 1000 + # Used to enable nano contracts. # # This should NEVER be enabled for mainnet and testnet, since both networks will diff --git a/hathor/consensus/block_consensus.py b/hathor/consensus/block_consensus.py index 515b96a07..7ce12f458 100644 --- a/hathor/consensus/block_consensus.py +++ b/hathor/consensus/block_consensus.py @@ -13,14 +13,14 @@ # limitations under the License. from itertools import chain -from typing import TYPE_CHECKING, Iterable, Optional, cast +from typing import TYPE_CHECKING, Any, Iterable, Optional, cast from structlog import get_logger from hathor.conf.get_settings import get_global_settings from hathor.profiler import get_cpu_profiler from hathor.transaction import BaseTransaction, Block, Transaction, sum_weights -from hathor.util import classproperty, not_none +from hathor.util import classproperty if TYPE_CHECKING: from hathor.consensus.context import ConsensusAlgorithmContext @@ -39,7 +39,7 @@ def __init__(self, context: 'ConsensusAlgorithmContext') -> None: self.context = context @classproperty - def log(cls): + def log(cls) -> Any: """ This is a workaround because of a bug on structlog (or abc). See: https://github.com/hynek/structlog/issues/229 @@ -107,7 +107,6 @@ def update_voided_info(self, block: Block) -> None: return assert block.storage is not None - assert block.hash is not None storage = block.storage assert storage.indexes is not None @@ -216,8 +215,8 @@ def update_voided_info(self, block: Block) -> None: if common_block not in heads: self.context.mark_as_reorg(common_block) else: - best_block_tips = [not_none(blk.hash) for blk in heads] - best_block_tips.append(not_none(block.hash)) + best_block_tips = [blk.hash for blk in heads] + best_block_tips.append(block.hash) storage.update_best_block_tips_cache(best_block_tips) if not meta.voided_by: self.context.mark_as_reorg(common_block) @@ -231,7 +230,6 @@ def union_voided_by_from_parents(self, block: Block) -> set[bytes]: """ voided_by: set[bytes] = set() for parent in block.get_parents(): - assert parent.hash is not None parent_meta = parent.get_metadata() voided_by2 = parent_meta.voided_by if voided_by2: @@ -370,7 +368,6 @@ def add_voided_by(self, block: Block, voided_hash: Optional[bytes] = None) -> bo the block's own hash. """ assert block.storage is not None - assert block.hash is not None storage = block.storage @@ -401,7 +398,6 @@ def remove_voided_by(self, block: Block, voided_hash: Optional[bytes] = None) -> the block's own hash. """ assert block.storage is not None - assert block.hash is not None storage = block.storage @@ -454,7 +450,6 @@ def _score_block_dfs(self, block: BaseTransaction, used: set[bytes], """ Internal method to run a DFS. It is used by `calculate_score()`. """ assert block.storage is not None - assert block.hash is not None assert block.is_block storage = block.storage @@ -475,7 +470,6 @@ def _score_block_dfs(self, block: BaseTransaction, used: set[bytes], from hathor.transaction.storage.traversal import BFSTimestampWalk bfs = BFSTimestampWalk(storage, is_dag_verifications=True, is_left_to_right=False) for tx in bfs.run(parent, skip_root=False): - assert tx.hash is not None assert not tx.is_block if tx.hash in used: diff --git a/hathor/consensus/consensus.py b/hathor/consensus/consensus.py index 34167d973..0317c2fab 100644 --- a/hathor/consensus/consensus.py +++ b/hathor/consensus/consensus.py @@ -18,6 +18,7 @@ from hathor.consensus.block_consensus import BlockConsensusAlgorithmFactory from hathor.consensus.context import ConsensusAlgorithmContext from hathor.consensus.transaction_consensus import TransactionConsensusAlgorithmFactory +from hathor.execution_manager import ExecutionManager from hathor.profiler import get_cpu_profiler from hathor.pubsub import HathorEvents, PubSubManager from hathor.transaction import BaseTransaction @@ -55,13 +56,20 @@ class ConsensusAlgorithm: b0 will not be propagated to the voided_by of b1, b2, and b3. """ - def __init__(self, soft_voided_tx_ids: set[bytes], pubsub: PubSubManager) -> None: + def __init__( + self, + soft_voided_tx_ids: set[bytes], + pubsub: PubSubManager, + *, + execution_manager: ExecutionManager + ) -> None: self._settings = get_global_settings() self.log = logger.new() self._pubsub = pubsub self.soft_voided_tx_ids = frozenset(soft_voided_tx_ids) self.block_algorithm_factory = BlockConsensusAlgorithmFactory() self.transaction_algorithm_factory = TransactionConsensusAlgorithmFactory() + self._execution_manager = execution_manager def create_context(self) -> ConsensusAlgorithmContext: """Handy method to create a context that can be used to access block and transaction algorithms.""" @@ -75,11 +83,11 @@ def update(self, base: BaseTransaction) -> None: assert meta.validation.is_valid() try: self._unsafe_update(base) - except Exception: + except BaseException: meta.add_voided_by(self._settings.CONSENSUS_FAIL_ID) assert base.storage is not None base.storage.save_transaction(base, only_metadata=True) - raise + self._execution_manager.crash_and_exit(reason=f'Consensus update failed for tx {base.hash_hex}') def _unsafe_update(self, base: BaseTransaction) -> None: """Run a consensus update with its own context, indexes will be updated accordingly.""" diff --git a/hathor/consensus/transaction_consensus.py b/hathor/consensus/transaction_consensus.py index 17a32202d..358503c78 100644 --- a/hathor/consensus/transaction_consensus.py +++ b/hathor/consensus/transaction_consensus.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Iterable, cast +from typing import TYPE_CHECKING, Any, Iterable, cast from structlog import get_logger @@ -38,7 +38,7 @@ def __init__(self, context: 'ConsensusAlgorithmContext') -> None: self.context = context @classproperty - def log(cls): + def log(cls) -> Any: """ This is a workaround because of a bug on structlog (or abc). See: https://github.com/hynek/structlog/issues/229 @@ -59,7 +59,6 @@ def mark_inputs_as_used(self, tx: Transaction) -> None: def mark_input_as_used(self, tx: Transaction, txin: TxInput) -> None: """ Mark a given input as used """ - assert tx.hash is not None assert tx.storage is not None spent_tx = tx.storage.get_transaction(txin.tx_id) @@ -117,7 +116,6 @@ def check_twins(self, tx: Transaction, transactions: Iterable[BaseTransaction]) :param transactions: list of transactions to be checked if they are twins with self """ - assert tx.hash is not None assert tx.storage is not None # Getting tx metadata to save the new twins @@ -128,7 +126,6 @@ def check_twins(self, tx: Transaction, transactions: Iterable[BaseTransaction]) sorted_outputs = sorted(tx.outputs, key=lambda x: (x.script, x.value)) for candidate in transactions: - assert candidate.hash is not None # If quantity of inputs is different, it's not a twin. if len(candidate.inputs) != len(tx.inputs): @@ -170,7 +167,6 @@ def check_twins(self, tx: Transaction, transactions: Iterable[BaseTransaction]) def update_voided_info(self, tx: Transaction) -> None: """ This method should be called only once when the transactions is added to the DAG. """ - assert tx.hash is not None assert tx.storage is not None voided_by: set[bytes] = set() @@ -267,7 +263,6 @@ def check_conflicts(self, tx: Transaction) -> None: The verification is made for each input, and `self` is only marked as winner if it wins in all its inputs. """ - assert tx.hash is not None assert tx.storage is not None self.log.debug('tx.check_conflicts', tx=tx.hash_hex) @@ -315,7 +310,7 @@ def check_conflicts(self, tx: Transaction) -> None: # If we got here, either it was a tie or we won. # So, let's void the conflict txs. - for conflict_tx in conflict_list: + for conflict_tx in sorted(conflict_list, key=lambda x: x.timestamp, reverse=True): self.mark_as_voided(conflict_tx) if not tie_list: @@ -326,7 +321,6 @@ def mark_as_winner(self, tx: Transaction) -> None: """ Mark a transaction as winner when it has a conflict and its aggregated weight is the greatest one. """ - assert tx.hash is not None self.log.debug('tx.mark_as_winner', tx=tx.hash_hex) meta = tx.get_metadata() assert bool(meta.conflict_with) # FIXME: this looks like a runtime guarantee, MUST NOT be an assert @@ -341,7 +335,6 @@ def remove_voided_by(self, tx: Transaction, voided_hash: bytes) -> bool: """ from hathor.transaction.storage.traversal import BFSTimestampWalk - assert tx.hash is not None assert tx.storage is not None meta = tx.get_metadata() @@ -382,7 +375,6 @@ def mark_as_voided(self, tx: Transaction) -> None: """ Mark a transaction as voided when it has a conflict and its aggregated weight is NOT the greatest one. """ - assert tx.hash is not None self.log.debug('tx.mark_as_voided', tx=tx.hash_hex) meta = tx.get_metadata() assert bool(meta.conflict_with) @@ -395,7 +387,6 @@ def add_voided_by(self, tx: Transaction, voided_hash: bytes) -> bool: """ Add a hash from `meta.voided_by` and its descendants (both from verification DAG and funds tree). """ - assert tx.hash is not None assert tx.storage is not None meta = tx.get_metadata() @@ -415,7 +406,6 @@ def add_voided_by(self, tx: Transaction, voided_hash: bytes) -> bool: check_list: list[Transaction] = [] for tx2 in bfs.run(tx, skip_root=False): assert tx2.storage is not None - assert tx2.hash is not None meta2 = tx2.get_metadata() if tx2.is_block: diff --git a/hathor/daa.py b/hathor/daa.py index 680ef4dfc..afd309726 100644 --- a/hathor/daa.py +++ b/hathor/daa.py @@ -21,17 +21,17 @@ from enum import IntFlag from math import log -from typing import TYPE_CHECKING, ClassVar, Optional +from typing import TYPE_CHECKING, Callable, ClassVar, Optional from structlog import get_logger from hathor.conf.settings import HathorSettings from hathor.profiler import get_cpu_profiler -from hathor.util import iwindows, not_none +from hathor.types import VertexId +from hathor.util import iwindows if TYPE_CHECKING: from hathor.transaction import Block, Transaction - from hathor.transaction.storage.vertex_storage_protocol import VertexStorageProtocol logger = get_logger() cpu = get_cpu_profiler() @@ -58,17 +58,43 @@ def __init__(self, *, settings: HathorSettings, test_mode: TestMode = TestMode.D DifficultyAdjustmentAlgorithm.singleton = self @cpu.profiler(key=lambda _, block: 'calculate_block_difficulty!{}'.format(block.hash.hex())) - def calculate_block_difficulty(self, block: 'Block') -> float: - """ Calculate block weight according to the ascendents of `block`, using calculate_next_weight.""" + def calculate_block_difficulty(self, block: 'Block', parent_block_getter: Callable[['Block'], 'Block']) -> float: + """ Calculate block weight according to the ascendants of `block`, using calculate_next_weight.""" if self.TEST_MODE & TestMode.TEST_BLOCK_WEIGHT: return 1.0 if block.is_genesis: return self.MIN_BLOCK_WEIGHT - return self.calculate_next_weight(block.get_block_parent(), block.timestamp, not_none(block.storage)) - - def calculate_next_weight(self, parent_block: 'Block', timestamp: int, storage: 'VertexStorageProtocol') -> float: + parent_block = parent_block_getter(block) + return self.calculate_next_weight(parent_block, block.timestamp, parent_block_getter) + + def _calculate_N(self, parent_block: 'Block') -> int: + """Calculate the N value for the `calculate_next_weight` algorithm.""" + return min(2 * self._settings.BLOCK_DIFFICULTY_N_BLOCKS, parent_block.get_height() - 1) + + def get_block_dependencies( + self, + block: 'Block', + parent_block_getter: Callable[['Block'], 'Block'], + ) -> list[VertexId]: + """Return the ids of the required blocks to call `calculate_block_difficulty` for the provided block.""" + parent_block = parent_block_getter(block) + N = self._calculate_N(parent_block) + ids: list[VertexId] = [parent_block.hash] + + while len(ids) <= N + 1: + parent_block = parent_block_getter(parent_block) + ids.append(parent_block.hash) + + return ids + + def calculate_next_weight( + self, + parent_block: 'Block', + timestamp: int, + parent_block_getter: Callable[['Block'], 'Block'], + ) -> float: """ Calculate the next block weight, aka DAA/difficulty adjustment algorithm. The algorithm used is described in [RFC 22](https://gitlab.com/HathorNetwork/rfcs/merge_requests/22). @@ -81,7 +107,7 @@ def calculate_next_weight(self, parent_block: 'Block', timestamp: int, storage: from hathor.transaction import sum_weights root = parent_block - N = min(2 * self._settings.BLOCK_DIFFICULTY_N_BLOCKS, parent_block.get_height() - 1) + N = self._calculate_N(parent_block) K = N // 2 T = self.AVG_TIME_BETWEEN_BLOCKS S = 5 @@ -91,8 +117,7 @@ def calculate_next_weight(self, parent_block: 'Block', timestamp: int, storage: blocks: list['Block'] = [] while len(blocks) < N + 1: blocks.append(root) - root = storage.get_parent_block(root) - assert root is not None + root = parent_block_getter(root) # TODO: revise if this assertion can be safely removed assert blocks == sorted(blocks, key=lambda tx: -tx.timestamp) diff --git a/hathor/event/event_manager.py b/hathor/event/event_manager.py index 6306707c6..748abe90a 100644 --- a/hathor/event/event_manager.py +++ b/hathor/event/event_manager.py @@ -22,6 +22,7 @@ from hathor.event.model.node_state import NodeState from hathor.event.storage import EventStorage from hathor.event.websocket import EventWebsocketFactory +from hathor.execution_manager import ExecutionManager from hathor.pubsub import EventArguments, HathorEvents, PubSubManager from hathor.reactor import ReactorProtocol as Reactor from hathor.transaction import BaseTransaction @@ -70,6 +71,7 @@ def __init__( event_storage: EventStorage, pubsub: PubSubManager, reactor: Reactor, + execution_manager: ExecutionManager, event_ws_factory: Optional[EventWebsocketFactory] = None, ) -> None: self.log = logger.new() @@ -78,6 +80,7 @@ def __init__( self._event_storage = event_storage self._event_ws_factory = event_ws_factory self._pubsub = pubsub + self._execution_manager = execution_manager def start(self, peer_id: str) -> None: """Starts the EventManager.""" @@ -85,6 +88,7 @@ def start(self, peer_id: str) -> None: assert self._event_ws_factory is not None, 'Cannot start, EventWebsocketFactory is not set' assert self.get_event_queue_state() is True, 'Cannot start, event queue feature is disabled' + self._execution_manager.register_on_crash_callback(self.on_full_node_crash) self._previous_node_state = self._event_storage.get_node_state() if self._should_reload_events(): @@ -133,7 +137,7 @@ def _subscribe_events(self) -> None: for event in _SUBSCRIBE_EVENTS: self._pubsub.subscribe(event, self._handle_hathor_event) - def load_started(self): + def load_started(self) -> None: if not self._is_running: return @@ -143,7 +147,7 @@ def load_started(self): ) self._event_storage.save_node_state(NodeState.LOAD) - def load_finished(self): + def load_finished(self) -> None: if not self._is_running: return @@ -153,6 +157,15 @@ def load_finished(self): ) self._event_storage.save_node_state(NodeState.SYNC) + def on_full_node_crash(self) -> None: + if not self._is_running: + return + + self._handle_event( + event_type=EventType.FULL_NODE_CRASHED, + event_args=EventArguments(), + ) + def _handle_hathor_event(self, hathor_event: HathorEvents, event_args: EventArguments) -> None: """Handles a PubSub 'HathorEvents' event.""" event_type = EventType.from_hathor_event(hathor_event) diff --git a/hathor/event/model/base_event.py b/hathor/event/model/base_event.py index 8f15fca88..e59db1f7c 100644 --- a/hathor/event/model/base_event.py +++ b/hathor/event/model/base_event.py @@ -12,11 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Optional +from typing import Any, Optional from pydantic import NonNegativeInt, validator -from hathor.event.model.event_data import EventData +from hathor.event.model.event_data import BaseEventData, EventData from hathor.event.model.event_type import EventType from hathor.pubsub import EventArguments from hathor.utils.pydantic import BaseModel @@ -58,7 +58,7 @@ def from_event_arguments( ) @validator('data') - def data_type_must_match_event_type(cls, v, values): + def data_type_must_match_event_type(cls, v: BaseEventData, values: dict[str, Any]) -> BaseEventData: event_type = EventType(values['type']) expected_data_type = event_type.data_type() diff --git a/hathor/event/model/event_data.py b/hathor/event/model/event_data.py index f3003d0cd..cf22fa424 100644 --- a/hathor/event/model/event_data.py +++ b/hathor/event/model/event_data.py @@ -101,6 +101,7 @@ class TxData(BaseEventData, extra=Extra.ignore): hash: str nonce: Optional[int] = None timestamp: int + signal_bits: int version: int weight: float inputs: list['TxInput'] diff --git a/hathor/event/model/event_type.py b/hathor/event/model/event_type.py index 7c697fbc8..617ea74d8 100644 --- a/hathor/event/model/event_type.py +++ b/hathor/event/model/event_type.py @@ -25,6 +25,7 @@ class EventType(Enum): REORG_STARTED = 'REORG_STARTED' REORG_FINISHED = 'REORG_FINISHED' VERTEX_METADATA_CHANGED = 'VERTEX_METADATA_CHANGED' + FULL_NODE_CRASHED = 'FULL_NODE_CRASHED' @classmethod def from_hathor_event(cls, hathor_event: HathorEvents) -> 'EventType': @@ -53,4 +54,5 @@ def data_type(self) -> type[BaseEventData]: EventType.REORG_STARTED: ReorgData, EventType.REORG_FINISHED: EmptyData, EventType.VERTEX_METADATA_CHANGED: TxData, + EventType.FULL_NODE_CRASHED: EmptyData, } diff --git a/hathor/event/resources/event.py b/hathor/event/resources/event.py index febc2bb62..87e7ada9b 100644 --- a/hathor/event/resources/event.py +++ b/hathor/event/resources/event.py @@ -16,6 +16,7 @@ from typing import Optional from pydantic import Field, NonNegativeInt +from twisted.web.http import Request from hathor.api_util import Resource, set_cors from hathor.cli.openapi_files.register import register_resource @@ -35,7 +36,7 @@ def __init__(self, event_manager: Optional[EventManager]): super().__init__() self.event_manager = event_manager - def render_GET(self, request): + def render_GET(self, request: Request) -> bytes: request.setHeader(b'content-type', b'application/json; charset=utf-8') set_cors(request, 'GET') diff --git a/hathor/event/websocket/protocol.py b/hathor/event/websocket/protocol.py index 102617546..c8da7e1f6 100644 --- a/hathor/event/websocket/protocol.py +++ b/hathor/event/websocket/protocol.py @@ -12,13 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Callable, Optional +from typing import TYPE_CHECKING, Optional from autobahn.exception import Disconnected from autobahn.twisted.websocket import WebSocketServerProtocol from autobahn.websocket import ConnectionRequest from pydantic import ValidationError from structlog import get_logger +from typing_extensions import assert_never from hathor.event.websocket.request import AckRequest, Request, RequestWrapper, StartStreamRequest, StopStreamRequest from hathor.event.websocket.response import EventResponse, InvalidRequestResponse, InvalidRequestType, Response @@ -50,7 +51,7 @@ class EventWebsocketProtocol(WebSocketServerProtocol): # Whether the stream is enabled or not. _stream_is_active: bool = False - def __init__(self): + def __init__(self) -> None: super().__init__() self.log = logger.new() @@ -102,18 +103,11 @@ def onMessage(self, payload: bytes, isBinary: bool) -> None: def _handle_request(self, request: Request) -> None: """Handles a request message according to its type.""" - # This could be a pattern match in Python 3.10 - request_type = type(request) - handlers: dict[type, Callable] = { - StartStreamRequest: self._handle_start_stream_request, - AckRequest: self._handle_ack_request, - StopStreamRequest: lambda _: self._handle_stop_stream_request() - } - handle_fn = handlers.get(request_type) - - assert handle_fn is not None, f'cannot handle request of unknown type "{request_type}"' - - handle_fn(request) + match request: + case StartStreamRequest(): self._handle_start_stream_request(request) + case AckRequest(): self._handle_ack_request(request) + case StopStreamRequest(): self._handle_stop_stream_request() + case _: assert_never(request) def _handle_start_stream_request(self, request: StartStreamRequest) -> None: """ diff --git a/hathor/event/websocket/request.py b/hathor/event/websocket/request.py index 446c62840..64446887d 100644 --- a/hathor/event/websocket/request.py +++ b/hathor/event/websocket/request.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Annotated, Literal, Optional, Union +from typing import Annotated, Literal, Optional from pydantic import Field, NonNegativeInt @@ -54,7 +54,7 @@ class StopStreamRequest(BaseModel): type: Literal['STOP_STREAM'] -Request = Annotated[Union[StartStreamRequest, AckRequest, StopStreamRequest], Field(discriminator='type')] +Request = Annotated[StartStreamRequest | AckRequest | StopStreamRequest, Field(discriminator='type')] class RequestWrapper(BaseModel): diff --git a/hathor/execution_manager.py b/hathor/execution_manager.py new file mode 100644 index 000000000..e3336430f --- /dev/null +++ b/hathor/execution_manager.py @@ -0,0 +1,65 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +from typing import Callable, NoReturn + +from structlog import get_logger + +from hathor.reactor import ReactorProtocol + +logger = get_logger() + + +class ExecutionManager: + """Class to manage actions related to full node execution.""" + __slots__ = ('_log', '_reactor', '_on_crash_callbacks') + + def __init__(self, reactor: ReactorProtocol) -> None: + self._log = logger.new() + self._reactor = reactor + self._on_crash_callbacks: list[tuple[int, Callable[[], None]]] = [] + + def register_on_crash_callback(self, callback: Callable[[], None], *, priority: int = 0) -> None: + """Register a callback to be executed before the full node exits.""" + self._on_crash_callbacks.append((priority, callback)) + + def _run_on_crash_callbacks(self) -> None: + """Run all registered on crash callbacks.""" + callbacks = sorted(self._on_crash_callbacks, reverse=True, key=lambda item: item[0]) + + for _, callback in callbacks: + try: + callback() + except BaseException as e: + self._log.critical(f'Failed execution of on_crash callback "{callback}". Exception: {repr(e)}') + + def crash_and_exit(self, *, reason: str) -> NoReturn: + """ + Calling this function is a very extreme thing to do, so be careful. It should only be called when a + critical, unrecoverable failure happens. It crashes and exits the full node, maybe rendering the database + corrupted, and requiring manual intervention. In other words, a restart with a clean database (from scratch + or a snapshot) may be required. + """ + self._run_on_crash_callbacks() + self._log.critical( + 'Critical failure occurred, causing the full node to halt execution. Manual intervention is required.', + reason=reason, + exc_info=True + ) + # We sequentially call more extreme exit methods, so the full node exits as gracefully as possible, while + # guaranteeing that it will indeed exit. + self._reactor.stop() + self._reactor.crash() + sys.exit(-1) diff --git a/hathor/feature_activation/bit_signaling_service.py b/hathor/feature_activation/bit_signaling_service.py index a8f7f09a4..639eb1a5c 100644 --- a/hathor/feature_activation/bit_signaling_service.py +++ b/hathor/feature_activation/bit_signaling_service.py @@ -19,6 +19,7 @@ from hathor.feature_activation.model.criteria import Criteria from hathor.feature_activation.model.feature_state import FeatureState from hathor.feature_activation.settings import Settings as FeatureSettings +from hathor.feature_activation.storage.feature_activation_storage import FeatureActivationStorage from hathor.transaction import Block from hathor.transaction.storage import TransactionStorage @@ -32,7 +33,8 @@ class BitSignalingService: '_feature_service', '_tx_storage', '_support_features', - '_not_support_features' + '_not_support_features', + '_feature_storage', ) def __init__( @@ -42,7 +44,8 @@ def __init__( feature_service: FeatureService, tx_storage: TransactionStorage, support_features: set[Feature], - not_support_features: set[Feature] + not_support_features: set[Feature], + feature_storage: FeatureActivationStorage | None, ) -> None: self._log = logger.new() self._feature_settings = feature_settings @@ -50,14 +53,19 @@ def __init__( self._tx_storage = tx_storage self._support_features = support_features self._not_support_features = not_support_features + self._feature_storage = feature_storage self._validate_support_intersection() + self._feature_service.bit_signaling_service = self def start(self) -> None: """ Log information related to bit signaling. Must be called after the storage is ready and migrations have been applied. """ + if self._feature_storage: + self._feature_storage.validate_settings() + best_block = self._tx_storage.get_best_block() self._warn_non_signaling_features(best_block) @@ -74,21 +82,66 @@ def generate_signal_bits(self, *, block: Block, log: bool = False) -> int: Returns: a number that represents the signal bits in binary. """ - signaling_features = self._get_signaling_features(block) + feature_signals = self._calculate_feature_signals(block=block, log=log) signal_bits = 0 + for feature, (criteria, enable_bit) in feature_signals.items(): + signal_bits |= int(enable_bit) << criteria.bit + + return signal_bits + + def _calculate_feature_signals(self, *, block: Block, log: bool = False) -> dict[Feature, tuple[Criteria, bool]]: + """ + Calculate the signal value for each signaling feature. + + Args: + block: the block that is used to determine signaling features. + log: whether to log the signal for each feature. + + Returns: a dict with each feature paired with its criteria and its signal value. + """ + signaling_features = self._get_signaling_features(block) + signals: dict[Feature, tuple[Criteria, bool]] = {} + for feature, criteria in signaling_features.items(): default_enable_bit = criteria.signal_support_by_default support = feature in self._support_features not_support = feature in self._not_support_features enable_bit = (default_enable_bit or support) and not not_support + signals[feature] = (criteria, enable_bit) if log: self._log_signal_bits(feature, enable_bit, support, not_support) - signal_bits |= int(enable_bit) << criteria.bit + return signals - return signal_bits + def get_support_features(self) -> list[Feature]: + """Get a list of features with enabled support.""" + best_block = self._tx_storage.get_best_block() + feature_signals = self._calculate_feature_signals(block=best_block) + return [feature for feature, (_, enable_bit) in feature_signals.items() if enable_bit] + + def get_not_support_features(self) -> list[Feature]: + """Get a list of features with disabled support.""" + best_block = self._tx_storage.get_best_block() + feature_signals = self._calculate_feature_signals(block=best_block) + return [feature for feature, (_, enable_bit) in feature_signals.items() if not enable_bit] + + def add_feature_support(self, feature: Feature) -> None: + """Add explicit support for a feature by enabling its signaling bit.""" + self._not_support_features.discard(feature) + self._support_features.add(feature) + + def remove_feature_support(self, feature: Feature) -> None: + """Remove explicit support for a feature by disabling its signaling bit.""" + self._support_features.discard(feature) + self._not_support_features.add(feature) + + def on_must_signal(self, feature: Feature) -> None: + """ + When the MUST_SIGNAL phase is reached, feature support is automatically enabled. + """ + self.add_feature_support(feature) def _log_signal_bits(self, feature: Feature, enable_bit: bool, support: bool, not_support: bool) -> None: """Generate info log for a feature's signal.""" @@ -123,6 +176,11 @@ def _get_signaling_features(self, block: Block) -> dict[Feature, Criteria]: return signaling_features + def get_best_block_signaling_features(self) -> dict[Feature, Criteria]: + """Given the current best block, return all features that are in a signaling state.""" + best_block = self._tx_storage.get_best_block() + return self._get_signaling_features(best_block) + def _validate_support_intersection(self) -> None: """Validate that the provided support and not-support arguments do not conflict.""" if intersection := self._support_features.intersection(self._not_support_features): diff --git a/hathor/feature_activation/feature.py b/hathor/feature_activation/feature.py index 56082def8..05b08226e 100644 --- a/hathor/feature_activation/feature.py +++ b/hathor/feature_activation/feature.py @@ -16,7 +16,7 @@ @unique -class Feature(Enum): +class Feature(str, Enum): """ An enum containing all features that participate in the feature activation process, past or future, activated or not, for all networks. Features should NOT be removed from this enum, to preserve history. Their values diff --git a/hathor/feature_activation/feature_service.py b/hathor/feature_activation/feature_service.py index f02195cec..caadb62fb 100644 --- a/hathor/feature_activation/feature_service.py +++ b/hathor/feature_activation/feature_service.py @@ -13,7 +13,7 @@ # limitations under the License. from dataclasses import dataclass -from typing import TYPE_CHECKING, TypeAlias +from typing import TYPE_CHECKING, Optional, TypeAlias from hathor.feature_activation.feature import Feature from hathor.feature_activation.model.feature_description import FeatureDescription @@ -21,6 +21,7 @@ from hathor.feature_activation.settings import Settings as FeatureSettings if TYPE_CHECKING: + from hathor.feature_activation.bit_signaling_service import BitSignalingService from hathor.transaction import Block from hathor.transaction.storage import TransactionStorage @@ -41,11 +42,12 @@ class BlockIsMissingSignal: class FeatureService: - __slots__ = ('_feature_settings', '_tx_storage') + __slots__ = ('_feature_settings', '_tx_storage', 'bit_signaling_service') def __init__(self, *, feature_settings: FeatureSettings, tx_storage: 'TransactionStorage') -> None: self._feature_settings = feature_settings self._tx_storage = tx_storage + self.bit_signaling_service: Optional['BitSignalingService'] = None def is_feature_active(self, *, block: 'Block', feature: Feature) -> bool: """Returns whether a Feature is active at a certain block.""" @@ -113,6 +115,10 @@ def get_state(self, *, block: 'Block', feature: Feature) -> FeatureState: previous_state=previous_boundary_state ) + if new_state == FeatureState.MUST_SIGNAL: + assert self.bit_signaling_service is not None + self.bit_signaling_service.on_must_signal(feature) + # We cache the just calculated state of the current block _without saving it_, as it may still be unverified, # so we cannot persist its metadata. That's why we cache and save the previous boundary block above. block.set_feature_state(feature=feature, state=new_state) diff --git a/hathor/feature_activation/settings.py b/hathor/feature_activation/settings.py index aa4c119b4..3d36e052b 100644 --- a/hathor/feature_activation/settings.py +++ b/hathor/feature_activation/settings.py @@ -83,7 +83,7 @@ def _validate_conflicting_bits(cls, features: dict[Feature, Criteria]) -> dict[F first, second = overlap raise ValueError( f'At least one pair of Features have the same bit configured for an overlapping interval: ' - f'{first.feature} and {second.feature}' + f'{first.feature.value} and {second.feature.value}' ) return features diff --git a/hathor/feature_activation/storage/__init__.py b/hathor/feature_activation/storage/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/hathor/feature_activation/storage/feature_activation_storage.py b/hathor/feature_activation/storage/feature_activation_storage.py new file mode 100644 index 000000000..101f213dd --- /dev/null +++ b/hathor/feature_activation/storage/feature_activation_storage.py @@ -0,0 +1,101 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from structlog import get_logger + +from hathor.conf.settings import HathorSettings +from hathor.exception import InitializationError +from hathor.feature_activation.feature import Feature +from hathor.feature_activation.model.criteria import Criteria +from hathor.feature_activation.settings import Settings as FeatureActivationSettings +from hathor.storage import RocksDBStorage + +_CF_NAME_META = b'feature-activation-metadata' +_KEY_SETTINGS = b'feature-activation-settings' + +logger = get_logger() + + +class FeatureActivationStorage: + __slots__ = ('_log', '_settings', '_db', '_cf_meta') + + def __init__(self, *, settings: HathorSettings, rocksdb_storage: RocksDBStorage) -> None: + self._log = logger.new() + self._settings = settings + self._db = rocksdb_storage.get_db() + self._cf_meta = rocksdb_storage.get_or_create_column_family(_CF_NAME_META) + + def reset_settings(self) -> None: + """Reset feature settings from the database.""" + self._db.delete((self._cf_meta, _KEY_SETTINGS)) + + def validate_settings(self) -> None: + """Validate new feature settings against the previous configuration from the database.""" + new_settings = self._settings.FEATURE_ACTIVATION + db_settings_bytes: bytes | None = self._db.get((self._cf_meta, _KEY_SETTINGS)) + + if not db_settings_bytes: + self._save_settings(new_settings) + return + + db_settings: FeatureActivationSettings = FeatureActivationSettings.parse_raw(db_settings_bytes) + db_basic_settings = db_settings.copy(deep=True, exclude={'features'}) + new_basic_settings = new_settings.copy(deep=True, exclude={'features'}) + + self._validate_basic_settings(db_basic_settings=db_basic_settings, new_basic_settings=new_basic_settings) + self._validate_features(db_features=db_settings.features, new_features=new_settings.features) + self._save_settings(new_settings) + + def _validate_basic_settings( + self, + *, + db_basic_settings: FeatureActivationSettings, + new_basic_settings: FeatureActivationSettings + ) -> None: + """Validate that the basic feature settings are the same.""" + if new_basic_settings != db_basic_settings: + self._log.error( + 'Feature Activation basic settings are incompatible with previous settings.', + previous_settings=db_basic_settings, new_settings=new_basic_settings + ) + raise InitializationError() + + def _validate_features( + self, + *, + db_features: dict[Feature, Criteria], + new_features: dict[Feature, Criteria] + ) -> None: + """Validate that all previous features exist and are the same.""" + for db_feature, db_criteria in db_features.items(): + new_criteria = new_features.get(db_feature) + + if not new_criteria: + self._log.error( + 'Configuration for existing feature missing in new settings.', + feature=db_feature, previous_features=db_features, new_features=new_features + ) + raise InitializationError() + + if new_criteria != db_criteria: + self._log.error( + 'Criteria for feature is different than previous settings.', + feature=db_feature, previous_criteria=db_criteria, new_criteria=new_criteria + ) + raise InitializationError() + + def _save_settings(self, settings: FeatureActivationSettings) -> None: + """Save feature settings to the database.""" + settings_bytes = settings.json_dumpb() + + self._db.put((self._cf_meta, _KEY_SETTINGS), settings_bytes) diff --git a/hathor/graphviz.py b/hathor/graphviz.py index c75074576..978df0edf 100644 --- a/hathor/graphviz.py +++ b/hathor/graphviz.py @@ -63,7 +63,6 @@ def __init__(self, storage: TransactionStorage, include_funds: bool = False, def get_node_label(self, tx: BaseTransaction) -> str: """ Return the node's label for tx. """ - assert tx.hash is not None if tx.hash in self.labels: parts = [self.labels[tx.hash]] else: @@ -79,7 +78,6 @@ def get_node_label(self, tx: BaseTransaction) -> str: def get_node_attrs(self, tx: BaseTransaction) -> dict[str, str]: """ Return node's attributes. """ - assert tx.hash is not None node_attrs = {'label': self.get_node_label(tx)} if tx.is_block: @@ -151,7 +149,6 @@ def dot(self, format: str = 'pdf') -> Digraph: if self.only_blocks and not tx.is_block: continue - assert tx.hash is not None name = tx.hash.hex() node_attrs = self.get_node_attrs(tx) @@ -204,7 +201,6 @@ def tx_neighborhood(self, tx: BaseTransaction, format: str = 'pdf', while to_visit: level, tx = to_visit.pop() - assert tx.hash is not None assert tx.storage is not None name = tx.hash.hex() node_attrs = self.get_node_attrs(tx) diff --git a/hathor/indexes/address_index.py b/hathor/indexes/address_index.py index 9711e985f..2d08e4751 100644 --- a/hathor/indexes/address_index.py +++ b/hathor/indexes/address_index.py @@ -92,8 +92,11 @@ def get_from_address(self, address: str) -> list[bytes]: raise NotImplementedError @abstractmethod - def get_sorted_from_address(self, address: str) -> list[bytes]: + def get_sorted_from_address(self, address: str, tx_start: Optional[BaseTransaction] = None) -> Iterable[bytes]: """ Get a sorted list of transaction hashes of an address + + `tx_start` serves as a pagination marker, indicating the starting position for the iteration. + When tx_start is None, the iteration begins from the initial element. """ raise NotImplementedError diff --git a/hathor/indexes/height_index.py b/hathor/indexes/height_index.py index 7bf91e181..abebfdc55 100644 --- a/hathor/indexes/height_index.py +++ b/hathor/indexes/height_index.py @@ -19,7 +19,6 @@ from hathor.indexes.scope import Scope from hathor.transaction import BaseTransaction, Block from hathor.types import VertexId -from hathor.util import not_none SCOPE = Scope( include_blocks=True, @@ -66,7 +65,6 @@ def init_loop_step(self, tx: BaseTransaction) -> None: if tx.is_genesis: return assert isinstance(tx, Block) - assert tx.hash is not None if tx.get_metadata().voided_by: return self.add_new(tx.get_height(), tx.hash, tx.timestamp) @@ -118,7 +116,7 @@ def update_new_chain(self, height: int, block: Block) -> None: add_to_index: list[_AddToIndexItem] = [] while self.get(block_height) != side_chain_block.hash: add_to_index.append( - _AddToIndexItem(block_height, not_none(side_chain_block.hash), side_chain_block.timestamp) + _AddToIndexItem(block_height, side_chain_block.hash, side_chain_block.timestamp) ) side_chain_block = side_chain_block.get_block_parent() diff --git a/hathor/indexes/manager.py b/hathor/indexes/manager.py index 967ba7225..e681f716b 100644 --- a/hathor/indexes/manager.py +++ b/hathor/indexes/manager.py @@ -229,7 +229,6 @@ def del_tx(self, tx: BaseTransaction, *, remove_all: bool = False, relax_assert: :param tx: Transaction to be deleted """ assert tx.storage is not None - assert tx.hash is not None if remove_all: # We delete from indexes in two cases: (i) mark tx as voided, and (ii) remove tx. diff --git a/hathor/indexes/memory_address_index.py b/hathor/indexes/memory_address_index.py index 25588e594..4360bda21 100644 --- a/hathor/indexes/memory_address_index.py +++ b/hathor/indexes/memory_address_index.py @@ -49,8 +49,8 @@ def add_tx(self, tx: BaseTransaction) -> None: def get_from_address(self, address: str) -> list[bytes]: return list(self._get_from_key(address)) - def get_sorted_from_address(self, address: str) -> list[bytes]: - return list(self._get_sorted_from_key(address)) + def get_sorted_from_address(self, address: str, tx_start: Optional[BaseTransaction] = None) -> Iterable[bytes]: + return self._get_sorted_from_key(address, tx_start) def is_address_empty(self, address: str) -> bool: return self._is_key_empty(address) diff --git a/hathor/indexes/memory_timestamp_index.py b/hathor/indexes/memory_timestamp_index.py index f041f6296..a6c1c06a0 100644 --- a/hathor/indexes/memory_timestamp_index.py +++ b/hathor/indexes/memory_timestamp_index.py @@ -47,7 +47,6 @@ def force_clear(self) -> None: self._index = SortedKeyList(key=lambda x: (x.timestamp, x.hash)) def add_tx(self, tx: BaseTransaction) -> bool: - assert tx.hash is not None # It is safe to use the in operator because it is O(log(n)). # http://www.grantjenks.com/docs/sortedcontainers/sortedlist.html#sortedcontainers.SortedList.__contains__ element = TransactionIndexElement(tx.timestamp, tx.hash) diff --git a/hathor/indexes/memory_tips_index.py b/hathor/indexes/memory_tips_index.py index 47d8c0eca..58c9f447a 100644 --- a/hathor/indexes/memory_tips_index.py +++ b/hathor/indexes/memory_tips_index.py @@ -78,7 +78,6 @@ def add_tx(self, tx: BaseTransaction) -> bool: :param tx: Transaction to be added """ - assert tx.hash is not None assert tx.storage is not None if tx.hash in self.tx_last_interval: return False @@ -110,7 +109,6 @@ def add_tx(self, tx: BaseTransaction) -> bool: def del_tx(self, tx: BaseTransaction, *, relax_assert: bool = False) -> None: """ Remove a transaction from the index. """ - assert tx.hash is not None assert tx.storage is not None interval = self.tx_last_interval.pop(tx.hash, None) @@ -134,7 +132,6 @@ def update_tx(self, tx: BaseTransaction, *, relax_assert: bool = False) -> None: """ Update a tx according to its children. """ assert tx.storage is not None - assert tx.hash is not None meta = tx.get_metadata() if meta.voided_by: diff --git a/hathor/indexes/memory_tokens_index.py b/hathor/indexes/memory_tokens_index.py index 8b001dc51..26223feaa 100644 --- a/hathor/indexes/memory_tokens_index.py +++ b/hathor/indexes/memory_tokens_index.py @@ -80,7 +80,6 @@ def force_clear(self) -> None: def _add_to_index(self, tx: BaseTransaction, index: int) -> None: """ Add tx to mint/melt indexes and total amount """ - assert tx.hash is not None tx_output = tx.outputs[index] token_uid = tx.get_token_uid(tx_output.get_token_index()) @@ -98,7 +97,6 @@ def _add_to_index(self, tx: BaseTransaction, index: int) -> None: def _remove_from_index(self, tx: BaseTransaction, index: int) -> None: """ Remove tx from mint/melt indexes and total amount """ - assert tx.hash is not None tx_output = tx.outputs[index] token_uid = tx.get_token_uid(tx_output.get_token_index()) @@ -125,7 +123,6 @@ def add_tx(self, tx: BaseTransaction) -> None: if tx.version == TxVersion.TOKEN_CREATION_TRANSACTION: from hathor.transaction.token_creation_tx import TokenCreationTransaction tx = cast(TokenCreationTransaction, tx) - assert tx.hash is not None status = self._tokens[tx.hash] status._name = tx.token_name status._symbol = tx.token_symbol @@ -137,7 +134,6 @@ def add_tx(self, tx: BaseTransaction) -> None: transactions = self._tokens[token_uid]._transactions # It is safe to use the in operator because it is O(log(n)). # http://www.grantjenks.com/docs/sortedcontainers/sortedlist.html#sortedcontainers.SortedList.__contains__ - assert tx.hash is not None element = TransactionIndexElement(tx.timestamp, tx.hash) if element in transactions: return @@ -162,7 +158,6 @@ def del_tx(self, tx: BaseTransaction) -> None: # if it's a TokenCreationTransaction, remove it from index if tx.version == TxVersion.TOKEN_CREATION_TRANSACTION: - assert tx.hash is not None del self._tokens[tx.hash] def iter_all_tokens(self) -> Iterator[tuple[bytes, TokenIndexInfo]]: diff --git a/hathor/indexes/memory_tx_group_index.py b/hathor/indexes/memory_tx_group_index.py index 5b8415905..99a679f21 100644 --- a/hathor/indexes/memory_tx_group_index.py +++ b/hathor/indexes/memory_tx_group_index.py @@ -14,13 +14,12 @@ from abc import abstractmethod from collections import defaultdict -from typing import Iterable, Sized, TypeVar +from typing import Iterable, Optional, Sized, TypeVar from structlog import get_logger from hathor.indexes.tx_group_index import TxGroupIndex from hathor.transaction import BaseTransaction -from hathor.util import not_none logger = get_logger() @@ -31,7 +30,7 @@ class MemoryTxGroupIndex(TxGroupIndex[KT]): """Memory implementation of the TxGroupIndex. This class is abstract and cannot be used directly. """ - index: defaultdict[KT, set[bytes]] + index: defaultdict[KT, set[tuple[int, bytes]]] def __init__(self) -> None: self.force_clear() @@ -40,7 +39,7 @@ def force_clear(self) -> None: self.index = defaultdict(set) def _add_tx(self, key: KT, tx: BaseTransaction) -> None: - self.index[key].add(not_none(tx.hash)) + self.index[key].add((tx.timestamp, tx.hash)) @abstractmethod def _extract_keys(self, tx: BaseTransaction) -> Iterable[KT]: @@ -48,22 +47,28 @@ def _extract_keys(self, tx: BaseTransaction) -> Iterable[KT]: raise NotImplementedError def add_tx(self, tx: BaseTransaction) -> None: - assert tx.hash is not None for key in self._extract_keys(tx): self._add_tx(key, tx) def remove_tx(self, tx: BaseTransaction) -> None: - assert tx.hash is not None for key in self._extract_keys(tx): - self.index[key].discard(tx.hash) + self.index[key].discard((tx.timestamp, tx.hash)) def _get_from_key(self, key: KT) -> Iterable[bytes]: - yield from self.index[key] - - def _get_sorted_from_key(self, key: KT) -> Iterable[bytes]: - return sorted(self.index[key]) + for _, h in self.index[key]: + yield h + + def _get_sorted_from_key(self, key: KT, tx_start: Optional[BaseTransaction] = None) -> Iterable[bytes]: + sorted_elements = sorted(self.index[key]) + found = False + for _, h in sorted_elements: + if tx_start and h == tx_start.hash: + found = True + + if found or not tx_start: + yield h def _is_key_empty(self, key: KT) -> bool: return not bool(self.index[key]) diff --git a/hathor/indexes/mempool_tips_index.py b/hathor/indexes/mempool_tips_index.py index 460764239..222cc8140 100644 --- a/hathor/indexes/mempool_tips_index.py +++ b/hathor/indexes/mempool_tips_index.py @@ -104,14 +104,12 @@ def _add_many(self, txs: Iterable[bytes]) -> None: # PROVIDES: def update(self, tx: BaseTransaction, *, remove: Optional[bool] = None) -> None: - assert tx.hash is not None assert tx.storage is not None tx_meta = tx.get_metadata() to_remove: set[bytes] = set() to_remove_parents: set[bytes] = set() tx_storage = tx.storage for tip_tx in self.iter(tx_storage): - assert tip_tx.hash is not None meta = tip_tx.get_metadata() # a new tx/block added might cause a tx in the tips to become voided. For instance, there might be a tx1 a # double spending tx2, where tx1 is valid and tx2 voided. A new block confirming tx2 will make it valid @@ -175,7 +173,6 @@ def update(self, tx: BaseTransaction, *, remove: Optional[bool] = None) -> None: self._discard_many(set(tx.parents)) if tx.is_transaction and tx_meta.first_block is None: - assert tx.hash is not None self._add(tx.hash) def iter(self, tx_storage: 'TransactionStorage', max_timestamp: Optional[float] = None) -> Iterator[Transaction]: diff --git a/hathor/indexes/rocksdb_address_index.py b/hathor/indexes/rocksdb_address_index.py index f9f1c0322..cd7f78096 100644 --- a/hathor/indexes/rocksdb_address_index.py +++ b/hathor/indexes/rocksdb_address_index.py @@ -66,8 +66,8 @@ def add_tx(self, tx: BaseTransaction) -> None: def get_from_address(self, address: str) -> list[bytes]: return list(self._get_from_key(address)) - def get_sorted_from_address(self, address: str) -> list[bytes]: - return list(self._get_sorted_from_key(address)) + def get_sorted_from_address(self, address: str, tx_start: Optional[BaseTransaction] = None) -> Iterable[bytes]: + return self._get_sorted_from_key(address, tx_start) def is_address_empty(self, address: str) -> bool: return self._is_key_empty(address) diff --git a/hathor/indexes/rocksdb_tokens_index.py b/hathor/indexes/rocksdb_tokens_index.py index b978d9c38..a00d39812 100644 --- a/hathor/indexes/rocksdb_tokens_index.py +++ b/hathor/indexes/rocksdb_tokens_index.py @@ -252,8 +252,6 @@ def _subtract_from_total(self, token_uid: bytes, amount: int) -> None: def _add_utxo(self, tx: BaseTransaction, index: int) -> None: """ Add tx to mint/melt indexes and total amount """ - assert tx.hash is not None - tx_output = tx.outputs[index] token_uid = tx.get_token_uid(tx_output.get_token_index()) @@ -270,7 +268,6 @@ def _add_utxo(self, tx: BaseTransaction, index: int) -> None: def _remove_utxo(self, tx: BaseTransaction, index: int) -> None: """ Remove tx from mint/melt indexes and total amount """ - assert tx.hash is not None tx_output = tx.outputs[index] token_uid = tx.get_token_uid(tx_output.get_token_index()) @@ -291,7 +288,6 @@ def add_tx(self, tx: BaseTransaction) -> None: if tx.version == TxVersion.TOKEN_CREATION_TRANSACTION: from hathor.transaction.token_creation_tx import TokenCreationTransaction tx = cast(TokenCreationTransaction, tx) - assert tx.hash is not None self.log.debug('create_token_info', tx=tx.hash_hex, name=tx.token_name, symb=tx.token_symbol) self._create_token_info(tx.hash, tx.token_name, tx.token_symbol) @@ -299,7 +295,6 @@ def add_tx(self, tx: BaseTransaction) -> None: # Adding this tx to the transactions key list assert isinstance(tx, Transaction) for token_uid in tx.tokens: - assert tx.hash is not None self._add_transaction(token_uid, tx.timestamp, tx.hash) for tx_input in tx.inputs: @@ -322,12 +317,10 @@ def del_tx(self, tx: BaseTransaction) -> None: # Removing this tx from the transactions key list assert isinstance(tx, Transaction) for token_uid in tx.tokens: - assert tx.hash is not None self._remove_transaction(token_uid, tx.timestamp, tx.hash) # if it's a TokenCreationTransaction, remove it from index if tx.version == TxVersion.TOKEN_CREATION_TRANSACTION: - assert tx.hash is not None self._destroy_token(tx.hash) def iter_all_tokens(self) -> Iterator[tuple[bytes, TokenIndexInfo]]: diff --git a/hathor/indexes/rocksdb_tx_group_index.py b/hathor/indexes/rocksdb_tx_group_index.py index bbbe19790..f640fbafa 100644 --- a/hathor/indexes/rocksdb_tx_group_index.py +++ b/hathor/indexes/rocksdb_tx_group_index.py @@ -75,7 +75,6 @@ def _to_rocksdb_key(self, key: KT, tx: Optional[BaseTransaction] = None) -> byte rocksdb_key = self._serialize_key(key) assert len(rocksdb_key) == self._KEY_SIZE if tx: - assert tx.hash is not None assert len(tx.hash) == 32 rocksdb_key += struct.pack('>I', tx.timestamp) + tx.hash assert len(rocksdb_key) == self._KEY_SIZE + 4 + 32 @@ -94,23 +93,25 @@ def _from_rocksdb_key(self, rocksdb_key: bytes) -> tuple[KT, int, bytes]: return key, timestamp, tx_hash def add_tx(self, tx: BaseTransaction) -> None: - assert tx.hash is not None - for key in self._extract_keys(tx): self.log.debug('put key', key=key) self._db.put((self._cf, self._to_rocksdb_key(key, tx)), b'') def remove_tx(self, tx: BaseTransaction) -> None: - assert tx.hash is not None - for key in self._extract_keys(tx): self.log.debug('delete key', key=key) self._db.delete((self._cf, self._to_rocksdb_key(key, tx))) def _get_from_key(self, key: KT) -> Iterable[bytes]: + return self._util_get_from_key(key) + + def _get_sorted_from_key(self, key: KT, tx_start: Optional[BaseTransaction] = None) -> Iterable[bytes]: + return self._util_get_from_key(key, tx_start) + + def _util_get_from_key(self, key: KT, tx: Optional[BaseTransaction] = None) -> Iterable[bytes]: self.log.debug('seek to', key=key) it = self._db.iterkeys(self._cf) - it.seek(self._to_rocksdb_key(key)) + it.seek(self._to_rocksdb_key(key, tx)) for _cf, rocksdb_key in it: key2, _, tx_hash = self._from_rocksdb_key(rocksdb_key) if key2 != key: @@ -119,9 +120,6 @@ def _get_from_key(self, key: KT) -> Iterable[bytes]: yield tx_hash self.log.debug('seek end') - def _get_sorted_from_key(self, key: KT) -> Iterable[bytes]: - return self._get_from_key(key) - def _is_key_empty(self, key: KT) -> bool: self.log.debug('seek to', key=key) it = self._db.iterkeys(self._cf) diff --git a/hathor/indexes/tx_group_index.py b/hathor/indexes/tx_group_index.py index 4041917f5..139245fe9 100644 --- a/hathor/indexes/tx_group_index.py +++ b/hathor/indexes/tx_group_index.py @@ -13,7 +13,7 @@ # limitations under the License. from abc import abstractmethod -from typing import Generic, Iterable, Sized, TypeVar +from typing import Generic, Iterable, Optional, Sized, TypeVar from structlog import get_logger @@ -49,8 +49,12 @@ def _get_from_key(self, key: KT) -> Iterable[bytes]: raise NotImplementedError @abstractmethod - def _get_sorted_from_key(self, key: KT) -> Iterable[bytes]: - """Get all transactions that have a given key, sorted by timestamp.""" + def _get_sorted_from_key(self, key: KT, tx_start: Optional[BaseTransaction] = None) -> Iterable[bytes]: + """Get all transactions that have a given key, sorted by timestamp. + + `tx_start` serves as a pagination marker, indicating the starting position for the iteration. + When tx_start is None, the iteration begins from the initial element. + """ raise NotImplementedError @abstractmethod diff --git a/hathor/indexes/utxo_index.py b/hathor/indexes/utxo_index.py index 5ccbf07e4..bfdc0df78 100644 --- a/hathor/indexes/utxo_index.py +++ b/hathor/indexes/utxo_index.py @@ -59,7 +59,6 @@ def __repr__(self): @classmethod def from_tx_output(cls, tx: BaseTransaction, index: int, tx_output: TxOutput) -> 'UtxoIndexItem': - assert tx.hash is not None settings = get_global_settings() if tx_output.is_token_authority(): @@ -136,7 +135,6 @@ def _update_executed(self, tx: BaseTransaction) -> None: - inputs are removed from the index """ tx_meta = tx.get_metadata() - assert tx.hash is not None assert not tx_meta.voided_by log = self.log.new(tx=tx.hash_hex) log.debug('update executed') @@ -170,7 +168,6 @@ def _update_voided(self, tx: BaseTransaction) -> None: - outpus are removed from the index """ tx_meta = tx.get_metadata() - assert tx.hash is not None assert tx_meta.voided_by log = self.log.new(tx=tx.hash_hex) log.debug('update voided') diff --git a/hathor/manager.py b/hathor/manager.py index 566a7c936..354f7aaf1 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -34,21 +34,18 @@ from hathor.exception import ( BlockTemplateTimestampError, DoubleSpendingError, - HathorError, InitializationError, InvalidNewTransaction, NonStandardTxError, RewardLockedError, SpendingVoidedError, ) +from hathor.execution_manager import ExecutionManager from hathor.feature_activation.bit_signaling_service import BitSignalingService -from hathor.feature_activation.feature import Feature -from hathor.feature_activation.feature_service import FeatureService from hathor.mining import BlockTemplate, BlockTemplates from hathor.mining.cpu_mining_service import CpuMiningService from hathor.p2p.manager import ConnectionsManager from hathor.p2p.peer_id import PeerId -from hathor.p2p.protocol import HathorProtocol from hathor.profiler import get_cpu_profiler from hathor.pubsub import HathorEvents, PubSubManager from hathor.reactor import ReactorProtocol as Reactor @@ -56,12 +53,13 @@ from hathor.stratum import StratumFactory from hathor.transaction import BaseTransaction, Block, MergeMinedBlock, Transaction, TxVersion, sum_weights from hathor.transaction.exceptions import TxValidationError -from hathor.transaction.storage import TransactionStorage from hathor.transaction.storage.exceptions import TransactionDoesNotExist +from hathor.transaction.storage.transaction_storage import TransactionStorage from hathor.transaction.storage.tx_allow_scope import TxAllowScope from hathor.types import Address, VertexId from hathor.util import EnvironmentInfo, LogDuration, Random, calculate_min_significant_weight, not_none from hathor.verification.verification_service import VerificationService +from hathor.vertex_handler import VertexHandler from hathor.wallet import BaseWallet logger = get_logger() @@ -88,30 +86,33 @@ class UnhealthinessReason(str, Enum): # This is the interval to be used by the task to check if the node is synced CHECK_SYNC_STATE_INTERVAL = 30 # seconds - def __init__(self, - reactor: Reactor, - *, - settings: HathorSettings, - pubsub: PubSubManager, - consensus_algorithm: ConsensusAlgorithm, - daa: DifficultyAdjustmentAlgorithm, - peer_id: PeerId, - tx_storage: TransactionStorage, - p2p_manager: ConnectionsManager, - event_manager: EventManager, - feature_service: FeatureService, - bit_signaling_service: BitSignalingService, - verification_service: VerificationService, - cpu_mining_service: CpuMiningService, - network: str, - hostname: Optional[str] = None, - wallet: Optional[BaseWallet] = None, - capabilities: Optional[list[str]] = None, - checkpoints: Optional[list[Checkpoint]] = None, - rng: Optional[Random] = None, - environment_info: Optional[EnvironmentInfo] = None, - full_verification: bool = False, - enable_event_queue: bool = False): + def __init__( + self, + reactor: Reactor, + *, + settings: HathorSettings, + pubsub: PubSubManager, + consensus_algorithm: ConsensusAlgorithm, + daa: DifficultyAdjustmentAlgorithm, + peer_id: PeerId, + tx_storage: TransactionStorage, + p2p_manager: ConnectionsManager, + event_manager: EventManager, + bit_signaling_service: BitSignalingService, + verification_service: VerificationService, + cpu_mining_service: CpuMiningService, + network: str, + execution_manager: ExecutionManager, + vertex_handler: VertexHandler, + hostname: Optional[str] = None, + wallet: Optional[BaseWallet] = None, + capabilities: Optional[list[str]] = None, + checkpoints: Optional[list[Checkpoint]] = None, + rng: Optional[Random] = None, + environment_info: Optional[EnvironmentInfo] = None, + full_verification: bool = False, + enable_event_queue: bool = False, + ) -> None: """ :param reactor: Twisted reactor which handles the mainloop and the events. :param peer_id: Id of this node. @@ -129,6 +130,7 @@ def __init__(self, 'Either enable it, or use the reset-event-queue CLI command to remove all event-related data' ) + self._execution_manager = execution_manager self._settings = settings self.daa = daa self._cmd_path: Optional[str] = None @@ -182,7 +184,6 @@ def __init__(self, self._event_manager.save_event_queue_state(enable_event_queue) self._enable_event_queue = enable_event_queue - self._feature_service = feature_service self._bit_signaling_service = bit_signaling_service self.verification_service = verification_service self.cpu_mining_service = cpu_mining_service @@ -190,6 +191,7 @@ def __init__(self, self.consensus_algorithm = consensus_algorithm self.connections = p2p_manager + self.vertex_handler = vertex_handler self.metrics = Metrics( pubsub=self.pubsub, @@ -250,6 +252,15 @@ def start(self) -> None: self.is_started = True self.log.info('start manager', network=self.network) + + if self.tx_storage.is_full_node_crashed(): + self.log.error( + 'Error initializing node. The last time you executed your full node it wasn\'t stopped correctly. ' + 'The storage is not reliable anymore and, because of that, you must remove your storage and do a ' + 'full sync (either from scratch or from a snapshot).' + ) + sys.exit(-1) + # If it's a full verification, we save on the storage that we are starting it # this is required because if we stop the initilization in the middle, the metadata # saved on the storage is not reliable anymore, only if we finish it @@ -319,7 +330,7 @@ def start(self) -> None: self.stratum_factory.start() # Start running - self.tx_storage.start_running_manager() + self.tx_storage.start_running_manager(self._execution_manager) def stop(self) -> Deferred: if not self.is_started: @@ -418,8 +429,6 @@ def _initialize_components_full_verification(self) -> None: self.log.debug('load blocks and transactions') for tx in self.tx_storage._topological_sort_dfs(): - assert tx.hash is not None - tx_meta = tx.get_metadata() t2 = time.time() @@ -479,7 +488,6 @@ def _initialize_components_full_verification(self) -> None: block_count += 1 # this works because blocks on the best chain are iterated from lower to higher height - assert tx.hash is not None assert tx_meta.validation.is_at_least_basic() assert isinstance(tx, Block) blk_height = tx.get_height() @@ -642,7 +650,6 @@ def _verify_checkpoints(self) -> None: tx = self.tx_storage.get_transaction(checkpoint.hash) except TransactionDoesNotExist as e: raise InitializationError(f'Expected checkpoint does not exist in database: {checkpoint}') from e - assert tx.hash is not None tx_meta = tx.get_metadata() if tx_meta.height != checkpoint.height: raise InitializationError( @@ -771,7 +778,6 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur with_weight_decay: bool = False) -> BlockTemplate: """ Further implementation of making block template, used by make_block_template and make_custom_block_template """ - assert parent_block.hash is not None # the absolute minimum would be the previous timestamp + 1 timestamp_abs_min = parent_block.timestamp + 1 # and absolute maximum limited by max time between blocks @@ -810,7 +816,10 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur parent_block_metadata.score, 2 * self._settings.WEIGHT_TOL ) - weight = max(self.daa.calculate_next_weight(parent_block, timestamp, self.tx_storage), min_significant_weight) + weight = max( + self.daa.calculate_next_weight(parent_block, timestamp, self.tx_storage.get_parent_block), + min_significant_weight + ) height = parent_block.get_height() + 1 parents = [parent_block.hash] + parent_txs.must_include parents_any = parent_txs.can_include @@ -908,12 +917,6 @@ def push_tx(self, tx: Transaction, allow_non_standard_script: bool = False, if not tx_from_lib.is_standard(max_output_script_size, not allow_non_standard_script): raise NonStandardTxError('Transaction is non standard.') - # Validate tx. - try: - self.verification_service.verify(tx) - except TxValidationError as e: - raise InvalidNewTransaction(str(e)) - self.propagate_tx(tx, fails_silently=False) def propagate_tx(self, tx: BaseTransaction, fails_silently: bool = True) -> bool: @@ -930,173 +933,29 @@ def propagate_tx(self, tx: BaseTransaction, fails_silently: bool = True) -> bool return self.on_new_tx(tx, fails_silently=fails_silently, propagate_to_peers=True) @cpu.profiler('on_new_tx') - def on_new_tx(self, tx: BaseTransaction, *, conn: Optional[HathorProtocol] = None, - quiet: bool = False, fails_silently: bool = True, propagate_to_peers: bool = True, - skip_block_weight_verification: bool = False, reject_locked_reward: bool = True) -> bool: + def on_new_tx( + self, + tx: BaseTransaction, + *, + quiet: bool = False, + fails_silently: bool = True, + propagate_to_peers: bool = True, + reject_locked_reward: bool = True + ) -> bool: """ New method for adding transactions or blocks that steps the validation state machine. :param tx: transaction to be added - :param conn: optionally specify the protocol instance where this tx was received from :param quiet: if True will not log when a new tx is accepted :param fails_silently: if False will raise an exception when tx cannot be added :param propagate_to_peers: if True will relay the tx to other peers if it is accepted - :param skip_block_weight_verification: if True will not check the tx PoW """ - assert self.tx_storage.is_only_valid_allowed() - assert tx.hash is not None - - already_exists = False - if self.tx_storage.transaction_exists(tx.hash): - self.tx_storage.compare_bytes_with_local_tx(tx) - already_exists = True - - if tx.timestamp - self.reactor.seconds() > self._settings.MAX_FUTURE_TIMESTAMP_ALLOWED: - if not fails_silently: - raise InvalidNewTransaction('Ignoring transaction in the future {} (timestamp={})'.format( - tx.hash_hex, tx.timestamp)) - self.log.warn('on_new_tx(): Ignoring transaction in the future', tx=tx.hash_hex, - future_timestamp=tx.timestamp) - return False - - assert self.tx_storage.indexes is not None - tx.storage = self.tx_storage - - try: - metadata = tx.get_metadata() - except TransactionDoesNotExist: - if not fails_silently: - raise InvalidNewTransaction('cannot get metadata') - self.log.warn('on_new_tx(): cannot get metadata', tx=tx.hash_hex) - return False - - if already_exists and metadata.validation.is_fully_connected(): - if not fails_silently: - raise InvalidNewTransaction('Transaction already exists {}'.format(tx.hash_hex)) - self.log.warn('on_new_tx(): Transaction already exists', tx=tx.hash_hex) - return False - - if metadata.validation.is_invalid(): - if not fails_silently: - raise InvalidNewTransaction('previously marked as invalid') - self.log.warn('on_new_tx(): previously marked as invalid', tx=tx.hash_hex) - return False - - if not metadata.validation.is_fully_connected(): - try: - self.verification_service.validate_full(tx, reject_locked_reward=reject_locked_reward) - except HathorError as e: - if not fails_silently: - raise InvalidNewTransaction('full validation failed') from e - self.log.warn('on_new_tx(): full validation failed', tx=tx.hash_hex, exc_info=True) - return False - - # The method below adds the tx as a child of the parents - # This needs to be called right before the save because we were adding the children - # in the tx parents even if the tx was invalid (failing the verifications above) - # then I would have a children that was not in the storage - tx.update_initial_metadata(save=False) - self.tx_storage.save_transaction(tx) - self.tx_storage.add_to_indexes(tx) - try: - self.consensus_algorithm.update(tx) - except HathorError as e: - if not fails_silently: - raise InvalidNewTransaction('consensus update failed') from e - self.log.warn('on_new_tx(): consensus update failed', tx=tx.hash_hex, exc_info=True) - return False - - assert self.verification_service.validate_full( + return self.vertex_handler.on_new_vertex( tx, - skip_block_weight_verification=True, - reject_locked_reward=reject_locked_reward + quiet=quiet, + fails_silently=fails_silently, + propagate_to_peers=propagate_to_peers, + reject_locked_reward=reject_locked_reward, ) - self.tx_storage.indexes.update(tx) - if self.tx_storage.indexes.mempool_tips: - self.tx_storage.indexes.mempool_tips.update(tx) # XXX: move to indexes.update - self.tx_fully_validated(tx, quiet=quiet) - - if propagate_to_peers: - # Propagate to our peers. - self.connections.send_tx_to_peers(tx) - - return True - - def log_new_object(self, tx: BaseTransaction, message_fmt: str, *, quiet: bool) -> None: - """ A shortcut for logging additional information for block/txs. - """ - metadata = tx.get_metadata() - now = datetime.datetime.fromtimestamp(self.reactor.seconds()) - kwargs = { - 'tx': tx, - 'ts_date': datetime.datetime.fromtimestamp(tx.timestamp), - 'time_from_now': tx.get_time_from_now(now), - 'validation': metadata.validation.name, - } - if tx.is_block: - message = message_fmt.format('block') - if isinstance(tx, Block): - kwargs['height'] = tx.get_height() - else: - message = message_fmt.format('tx') - if not quiet: - log_func = self.log.info - else: - log_func = self.log.debug - log_func(message, **kwargs) - - def tx_fully_validated(self, tx: BaseTransaction, *, quiet: bool) -> None: - """ Handle operations that need to happen once the tx becomes fully validated. - - This might happen immediately after we receive the tx, if we have all dependencies - already. Or it might happen later. - """ - assert tx.hash is not None - assert self.tx_storage.indexes is not None - - # Publish to pubsub manager the new tx accepted, now that it's full validated - self.pubsub.publish(HathorEvents.NETWORK_NEW_TX_ACCEPTED, tx=tx) - - if self.tx_storage.indexes.mempool_tips: - self.tx_storage.indexes.mempool_tips.update(tx) - - if self.wallet: - # TODO Remove it and use pubsub instead. - self.wallet.on_new_tx(tx) - - self.log_new_object(tx, 'new {}', quiet=quiet) - self._log_feature_states(tx) - - def _log_feature_states(self, vertex: BaseTransaction) -> None: - """Log features states for a block. Used as part of the Feature Activation Phased Testing.""" - if not isinstance(vertex, Block): - return - - feature_descriptions = self._feature_service.get_bits_description(block=vertex) - state_by_feature = { - feature.value: description.state.value - for feature, description in feature_descriptions.items() - } - - self.log.info( - 'New block accepted with feature activation states', - block_hash=vertex.hash_hex, - block_height=vertex.get_height(), - features_states=state_by_feature - ) - - features = [Feature.NOP_FEATURE_1, Feature.NOP_FEATURE_2] - for feature in features: - self._log_if_feature_is_active(vertex, feature) - - def _log_if_feature_is_active(self, block: Block, feature: Feature) -> None: - """Log if a feature is ACTIVE for a block. Used as part of the Feature Activation Phased Testing.""" - if self._feature_service.is_feature_active(block=block, feature=feature): - self.log.info( - 'Feature is ACTIVE for block', - feature=feature.value, - block_hash=block.hash_hex, - block_height=block.get_height() - ) def has_sync_version_capability(self) -> bool: return self._settings.CAPABILITY_SYNC_VERSION in self.capabilities @@ -1167,6 +1026,13 @@ def get_cmd_path(self) -> Optional[str]: """Return the cmd path. If no cmd path is set, returns None.""" return self._cmd_path + def set_hostname_and_reset_connections(self, new_hostname: str) -> None: + """Set the hostname and reset all connections.""" + old_hostname = self.hostname + self.hostname = new_hostname + self.connections.update_hostname_entrypoints(old_hostname=old_hostname, new_hostname=self.hostname) + self.connections.disconnect_all_peers(force=True) + class ParentTxs(NamedTuple): """ Tuple where the `must_include` hash, when present (at most 1), must be included in a pair, and a list of hashes diff --git a/hathor/merged_mining/coordinator.py b/hathor/merged_mining/coordinator.py index 1a9ac39ff..9a191a47b 100644 --- a/hathor/merged_mining/coordinator.py +++ b/hathor/merged_mining/coordinator.py @@ -664,12 +664,10 @@ async def submit_to_hathor(self, job: SingleMinerJob, aux_pow: BitcoinAuxPow) -> block = job.hathor_block block.aux_pow = aux_pow block.update_hash() - assert block.hash is not None block_hash = Hash(block.hash) if block_hash.to_weight() < block.weight: self.log.debug('high hash for Hathor, keep mining') return - assert block.hash is not None if job.hathor_height is not None: if self.coordinator.should_skip_hathor_submit(job.hathor_height): self.log.debug('share is too late, skip Hathor submit') diff --git a/hathor/p2p/manager.py b/hathor/p2p/manager.py index d180af7c8..d7e7f422b 100644 --- a/hathor/p2p/manager.py +++ b/hathor/p2p/manager.py @@ -16,8 +16,9 @@ from structlog import get_logger from twisted.internet import endpoints +from twisted.internet.address import IPv4Address, IPv6Address from twisted.internet.defer import Deferred -from twisted.internet.interfaces import IProtocolFactory, IStreamClientEndpoint, IStreamServerEndpoint +from twisted.internet.interfaces import IListeningPort, IProtocolFactory, IStreamClientEndpoint from twisted.internet.task import LoopingCall from twisted.protocols.tls import TLSMemoryBIOFactory, TLSMemoryBIOProtocol from twisted.python.failure import Failure @@ -108,8 +109,11 @@ def __init__(self, self.network = network - # List of addresses to listen for new connections (eg: [tcp:8000]) - self.listen_addresses: list[str] = [] + # List of address descriptions to listen for new connections (eg: [tcp:8000]) + self.listen_address_descriptions: list[str] = [] + + # List of actual IP address instances to listen for new connections + self._listen_addresses: list[IPv4Address | IPv6Address] = [] # List of peer discovery methods. self.peer_discoveries: list[PeerDiscovery] = [] @@ -239,9 +243,9 @@ def set_manager(self, manager: 'HathorManager') -> None: self.log.debug('enable sync-v2 indexes') indexes.enable_mempool_index() - def add_listen_address(self, addr: str) -> None: + def add_listen_address_description(self, addr: str) -> None: """Add address to listen for incoming connections.""" - self.listen_addresses.append(addr) + self.listen_address_descriptions.append(addr) def add_peer_discovery(self, peer_discovery: PeerDiscovery) -> None: """Add a peer discovery method.""" @@ -279,7 +283,7 @@ def start(self) -> None: if self._settings.ENABLE_PEER_WHITELIST: self._start_whitelist_reconnect() - for description in self.listen_addresses: + for description in self.listen_address_descriptions: self.listen(description) self.do_discovery() @@ -635,7 +639,7 @@ def connect_to(self, description: str, peer: Optional[PeerId] = None, use_ssl: O peers_count=self._get_peers_count() ) - def listen(self, description: str, use_ssl: Optional[bool] = None) -> IStreamServerEndpoint: + def listen(self, description: str, use_ssl: Optional[bool] = None) -> None: """ Start to listen for new connection according to the description. If `ssl` is True, then the connection will be wraped by a TLS. @@ -661,20 +665,43 @@ def listen(self, description: str, use_ssl: Optional[bool] = None) -> IStreamSer factory = NetfilterFactory(self, factory) - self.log.info('listen on', endpoint=description) - endpoint.listen(factory) + self.log.info('trying to listen on', endpoint=description) + deferred: Deferred[IListeningPort] = endpoint.listen(factory) + deferred.addCallback(self._on_listen_success, description) + + def _on_listen_success(self, listening_port: IListeningPort, description: str) -> None: + """Callback to be called when listening to an endpoint succeeds.""" + self.log.info('success listening on', endpoint=description) + address = listening_port.getHost() + + if not isinstance(address, (IPv4Address, IPv6Address)): + self.log.error(f'unhandled address type for endpoint "{description}": {str(type(address))}') + return + + self._listen_addresses.append(address) - # XXX: endpoint: IStreamServerEndpoint does not intrinsically have a port, but in practice all concrete cases - # that we have will have a _port attribute - port = getattr(endpoint, '_port', None) assert self.manager is not None - if self.manager.hostname and port is not None: - proto, _, _ = description.partition(':') - address = '{}://{}:{}'.format(proto, self.manager.hostname, port) - assert self.manager.my_peer is not None - self.manager.my_peer.entrypoints.append(address) + if self.manager.hostname: + self._add_hostname_entrypoint(self.manager.hostname, address) - return endpoint + def update_hostname_entrypoints(self, *, old_hostname: str | None, new_hostname: str) -> None: + """Add new hostname entrypoints according to the listen addresses, and remove any old entrypoint.""" + assert self.manager is not None + for address in self._listen_addresses: + if old_hostname is not None: + old_address_str = self._get_hostname_address_str(old_hostname, address) + if old_address_str in self.my_peer.entrypoints: + self.my_peer.entrypoints.remove(old_address_str) + + self._add_hostname_entrypoint(new_hostname, address) + + def _add_hostname_entrypoint(self, hostname: str, address: IPv4Address | IPv6Address) -> None: + hostname_address_str = self._get_hostname_address_str(hostname, address) + self.my_peer.entrypoints.append(hostname_address_str) + + @staticmethod + def _get_hostname_address_str(hostname: str, address: IPv4Address | IPv6Address) -> str: + return '{}://{}:{}'.format(address.type, hostname, address.port).lower() def get_connection_to_drop(self, protocol: HathorProtocol) -> HathorProtocol: """ When there are duplicate connections, determine which one should be dropped. @@ -796,3 +823,9 @@ def _sync_rotate_if_needed(self, *, force: bool = False) -> None: for peer_id in info.to_enable: self.connected_peers[peer_id].enable_sync() + + def reload_entrypoints_and_connections(self) -> None: + """Kill all connections and reload entrypoints from the original peer config file.""" + self.log.warn('Killing all connections and resetting entrypoints...') + self.disconnect_all_peers(force=True) + self.my_peer.reload_entrypoints_from_source_file() diff --git a/hathor/p2p/peer_id.py b/hathor/p2p/peer_id.py index 678111f1c..1ab1ae58e 100644 --- a/hathor/p2p/peer_id.py +++ b/hathor/p2p/peer_id.py @@ -14,6 +14,7 @@ import base64 import hashlib +import json from enum import Enum from math import inf from typing import TYPE_CHECKING, Any, Optional, cast @@ -24,6 +25,7 @@ from cryptography.hazmat.primitives import hashes, serialization from cryptography.hazmat.primitives.asymmetric import padding, rsa from OpenSSL.crypto import X509, PKey +from structlog import get_logger from twisted.internet.interfaces import ISSLTransport from twisted.internet.ssl import Certificate, CertificateOptions, TLSVersion, trustRootFromCertificates @@ -35,6 +37,8 @@ if TYPE_CHECKING: from hathor.p2p.protocol import HathorProtocol # noqa: F401 +logger = get_logger() + class InvalidPeerIdException(Exception): pass @@ -64,8 +68,10 @@ class PeerId: retry_attempts: int # how many retries were made last_seen: float # last time this peer was seen flags: set[str] + source_file: str | None def __init__(self, auto_generate_keys: bool = True) -> None: + self._log = logger.new() self._settings = get_global_settings() self.id = None self.private_key = None @@ -78,6 +84,7 @@ def __init__(self, auto_generate_keys: bool = True) -> None: self.last_seen = inf self.flags = set() self._certificate_options: Optional[CertificateOptions] = None + self.source_file = None if auto_generate_keys: self.generate_keys() @@ -159,9 +166,17 @@ def verify_signature(self, signature: bytes, data: bytes) -> bool: else: return True + @classmethod + def create_from_json_path(cls, path: str) -> 'PeerId': + """Create a new PeerId from a JSON file.""" + data = json.load(open(path, 'r')) + peer = PeerId.create_from_json(data) + peer.source_file = path + return peer + @classmethod def create_from_json(cls, data: dict[str, Any]) -> 'PeerId': - """ Create a new PeerId from a JSON. + """ Create a new PeerId from JSON data. It is used both to load a PeerId from disk and to create a PeerId from a peer connection. @@ -408,3 +423,20 @@ def validate_certificate(self, protocol: 'HathorProtocol') -> bool: return False return True + + def reload_entrypoints_from_source_file(self) -> None: + """Update this PeerId's entrypoints from the json file.""" + if not self.source_file: + raise Exception('Trying to reload entrypoints but no peer config file was provided.') + + new_peer_id = PeerId.create_from_json_path(self.source_file) + + if new_peer_id.id != self.id: + self._log.error( + 'Ignoring peer id file update because the peer_id does not match.', + current_peer_id=self.id, + new_peer_id=new_peer_id.id, + ) + return + + self.entrypoints = new_peer_id.entrypoints diff --git a/hathor/p2p/sync_v1/agent.py b/hathor/p2p/sync_v1/agent.py index 110514a83..cf395e4e3 100644 --- a/hathor/p2p/sync_v1/agent.py +++ b/hathor/p2p/sync_v1/agent.py @@ -19,7 +19,7 @@ from weakref import WeakSet from structlog import get_logger -from twisted.internet.defer import Deferred, inlineCallbacks +from twisted.internet.defer import CancelledError, Deferred, inlineCallbacks from twisted.internet.interfaces import IDelayedCall from hathor.conf.get_settings import get_global_settings @@ -603,7 +603,6 @@ def handle_data(self, payload: str) -> None: return assert tx is not None - assert tx.hash is not None self.log.debug('tx received from peer', tx=tx.hash_hex, peer=self.protocol.get_peer_id()) @@ -612,7 +611,6 @@ def handle_data(self, payload: str) -> None: # Will it reduce peer reputation score? return tx.storage = self.protocol.node.tx_storage - assert tx.hash is not None key = self.get_data_key(tx.hash) deferred = self.deferred_by_key.pop(key, None) @@ -630,7 +628,7 @@ def handle_data(self, payload: str) -> None: self.log.info('tx received in real time from peer', tx=tx.hash_hex, peer=self.protocol.get_peer_id()) # If we have not requested the data, it is a new transaction being propagated # in the network, thus, we propagate it as well. - result = self.manager.on_new_tx(tx, conn=self.protocol, propagate_to_peers=True) + result = self.manager.on_new_tx(tx, propagate_to_peers=True) self.update_received_stats(tx, result) def update_received_stats(self, tx: 'BaseTransaction', result: bool) -> None: @@ -674,7 +672,6 @@ def on_tx_success(self, tx: 'BaseTransaction') -> 'BaseTransaction': # the parameter of the second callback is the return of the first # so I need to return the same tx to guarantee that all peers will receive it if tx: - assert tx.hash is not None if self.manager.tx_storage.transaction_exists(tx.hash): self.manager.tx_storage.compare_bytes_with_local_tx(tx) success = True @@ -685,12 +682,13 @@ def on_tx_success(self, tx: 'BaseTransaction') -> 'BaseTransaction': self.update_received_stats(tx, success) return tx - def on_get_data_failed(self, reason: 'Failure', hash_bytes: bytes) -> None: + def on_get_data_failed(self, failure: 'Failure', hash_bytes: bytes) -> None: """ Method called when get_data deferred fails. We need this errback because otherwise the sync crashes when the deferred is canceled. We should just log a warning because it will continue the sync and will try to get this tx again. """ - self.log.warn('failed to download tx', tx=hash_bytes.hex(), reason=reason) + log_func = self.log.debug if isinstance(failure.value, CancelledError) else self.log.warn + log_func('failed to download tx', tx=hash_bytes.hex(), reason=failure) def is_sync_enabled(self) -> bool: """Return True if sync is enabled for this connection.""" diff --git a/hathor/p2p/sync_v1/downloader.py b/hathor/p2p/sync_v1/downloader.py index 670b1133a..d8b3c12cf 100644 --- a/hathor/p2p/sync_v1/downloader.py +++ b/hathor/p2p/sync_v1/downloader.py @@ -19,6 +19,7 @@ from structlog import get_logger from twisted.internet import defer from twisted.internet.defer import Deferred +from twisted.python.failure import Failure from hathor.conf.get_settings import get_global_settings from hathor.transaction.storage.exceptions import TransactionDoesNotExist @@ -238,15 +239,14 @@ def on_deferred_timeout(self, result: Any, timeout: int, *, tx_id: bytes) -> Non """ self.retry(tx_id) - def on_error(self, result: Any) -> None: + def on_error(self, failure: Failure) -> None: """ Errback for downloading deferred. """ - self.log.error('failed to download tx', err=result) + self.log.error('failed to download tx', err=failure, traceback=failure.getTraceback()) def on_new_tx(self, tx: 'BaseTransaction') -> None: """ This is called when a new transaction arrives. """ - assert tx.hash is not None self.log.debug('new tx/block', tx=tx.hash_hex) details = self.pending_transactions.get(tx.hash, None) diff --git a/hathor/p2p/sync_v2/agent.py b/hathor/p2p/sync_v2/agent.py index 8382cdefc..780e84f41 100644 --- a/hathor/p2p/sync_v2/agent.py +++ b/hathor/p2p/sync_v2/agent.py @@ -142,7 +142,8 @@ def __init__(self, protocol: 'HathorProtocol', reactor: Reactor) -> None: # Saves if I am in the middle of a mempool sync # we don't execute any sync while in the middle of it self.mempool_manager = SyncMempoolManager(self) - self._receiving_tips: Optional[list[bytes]] = None + self._receiving_tips: Optional[list[VertexId]] = None + self.max_receiving_tips: int = self._settings.MAX_MEMPOOL_RECEIVING_TIPS # Cache for get_tx calls self._get_tx_cache: OrderedDict[bytes, BaseTransaction] = OrderedDict() @@ -336,7 +337,6 @@ def run_sync_mempool(self) -> Generator[Any, Any, None]: def get_my_best_block(self) -> _HeightInfo: """Return my best block info.""" bestblock = self.tx_storage.get_best_block() - assert bestblock.hash is not None meta = bestblock.get_metadata() assert meta.validation.is_fully_connected() return _HeightInfo(height=bestblock.get_height(), id=bestblock.hash) @@ -476,7 +476,13 @@ def handle_tips(self, payload: str) -> None: data = json.loads(payload) data = [bytes.fromhex(x) for x in data] # filter-out txs we already have - self._receiving_tips.extend(tx_id for tx_id in data if not self.partial_vertex_exists(tx_id)) + try: + self._receiving_tips.extend(VertexId(tx_id) for tx_id in data if not self.partial_vertex_exists(tx_id)) + except ValueError: + self.protocol.send_error_and_close_connection('Invalid trasaction ID received') + # XXX: it's OK to do this *after* the extend because the payload is limited by the line protocol + if len(self._receiving_tips) > self.max_receiving_tips: + self.protocol.send_error_and_close_connection(f'Too many tips: {len(self._receiving_tips)}') def handle_tips_end(self, _payload: str) -> None: """ Handle a TIPS-END message. @@ -602,11 +608,11 @@ def on_block_complete(self, blk: Block, vertex_list: list[BaseTransaction]) -> G """This method is called when a block and its transactions are downloaded.""" # Note: Any vertex and block could have already been added by another concurrent syncing peer. for tx in vertex_list: - if not self.tx_storage.transaction_exists(not_none(tx.hash)): + if not self.tx_storage.transaction_exists(tx.hash): self.manager.on_new_tx(tx, propagate_to_peers=False, fails_silently=False) yield deferLater(self.reactor, 0, lambda: None) - if not self.tx_storage.transaction_exists(not_none(blk.hash)): + if not self.tx_storage.transaction_exists(blk.hash): self.manager.on_new_tx(blk, propagate_to_peers=False, fails_silently=False) def get_peer_block_hashes(self, heights: list[int]) -> Deferred[list[_HeightInfo]]: @@ -767,7 +773,6 @@ def handle_blocks(self, payload: str) -> None: # Not a block. Punish peer? return blk.storage = self.tx_storage - assert blk.hash is not None assert self._blk_streaming_client is not None self._blk_streaming_client.handle_blocks(blk) @@ -832,7 +837,7 @@ def handle_get_best_block(self, _payload: str) -> None: meta = best_block.get_metadata() assert meta.validation.is_fully_connected() payload = BestBlockPayload( - block=not_none(best_block.hash), + block=best_block.hash, height=not_none(meta.height), ) self.send_message(ProtocolMessages.BEST_BLOCK, payload.json()) @@ -855,8 +860,8 @@ def start_transactions_streaming(self, partial_blocks: list[Block]) -> Deferred[ limit=self.DEFAULT_STREAMING_LIMIT) start_from: list[bytes] = [] - first_block_hash = not_none(partial_blocks[0].hash) - last_block_hash = not_none(partial_blocks[-1].hash) + first_block_hash = partial_blocks[0].hash + last_block_hash = partial_blocks[-1].hash self.log.info('requesting transactions streaming', start_from=[x.hex() for x in start_from], first_block=first_block_hash.hex(), @@ -871,8 +876,8 @@ def resume_transactions_streaming(self) -> Deferred[StreamEnd]: partial_blocks = self._tx_streaming_client.partial_blocks[idx:] assert partial_blocks start_from = list(self._tx_streaming_client._waiting_for) - first_block_hash = not_none(partial_blocks[0].hash) - last_block_hash = not_none(partial_blocks[-1].hash) + first_block_hash = partial_blocks[0].hash + last_block_hash = partial_blocks[-1].hash self.log.info('requesting transactions streaming', start_from=[x.hex() for x in start_from], first_block=first_block_hash.hex(), @@ -947,8 +952,6 @@ def handle_get_transactions_bfs(self, payload: str) -> None: self.log.debug('requested start_from_hash not found', start_from_hash=start_from_hash.hex()) self.send_message(ProtocolMessages.NOT_FOUND, start_from_hash.hex()) return - assert tx.hash is not None - assert first_block.hash is not None meta = tx.get_metadata() if meta.first_block != first_block.hash: self.log.debug('requested start_from not confirmed by first_block', @@ -1016,7 +1019,6 @@ def handle_transaction(self, payload: str) -> None: # tx_bytes = bytes.fromhex(payload) tx_bytes = base64.b64decode(payload) tx = tx_or_block_from_bytes(tx_bytes) - assert tx.hash is not None if not isinstance(tx, Transaction): self.log.warn('not a transaction', hash=tx.hash_hex) # Not a transaction. Punish peer? @@ -1063,7 +1065,6 @@ def get_data(self, tx_id: bytes, origin: str) -> Deferred[BaseTransaction]: def _on_get_data(self, tx: BaseTransaction, origin: str) -> None: """ Called when a requested tx is received. """ - assert tx.hash is not None deferred = self._deferred_txs.pop(tx.hash, None) if deferred is None: # Peer sent the wrong transaction?! @@ -1141,14 +1142,12 @@ def handle_data(self, payload: str) -> None: return assert tx is not None - assert tx.hash is not None if self.protocol.node.tx_storage.get_genesis(tx.hash): # We just got the data of a genesis tx/block. What should we do? # Will it reduce peer reputation score? return tx.storage = self.protocol.node.tx_storage - assert tx.hash is not None if self.partial_vertex_exists(tx.hash): # transaction already added to the storage, ignore it diff --git a/hathor/p2p/sync_v2/blockchain_streaming_client.py b/hathor/p2p/sync_v2/blockchain_streaming_client.py index 3635396b9..a08b305de 100644 --- a/hathor/p2p/sync_v2/blockchain_streaming_client.py +++ b/hathor/p2p/sync_v2/blockchain_streaming_client.py @@ -104,7 +104,6 @@ def handle_blocks(self, blk: Block) -> None: # return # Check for repeated blocks. - assert blk.hash is not None is_duplicated = False if self.partial_vertex_exists(blk.hash): # We reached a block we already have. Skip it. diff --git a/hathor/p2p/sync_v2/mempool.py b/hathor/p2p/sync_v2/mempool.py index b914804e9..d4eb7bfe6 100644 --- a/hathor/p2p/sync_v2/mempool.py +++ b/hathor/p2p/sync_v2/mempool.py @@ -133,6 +133,5 @@ def _next_missing_dep(self, tx: BaseTransaction) -> Optional[bytes]: def _add_tx(self, tx: BaseTransaction) -> None: """Add tx to the DAG.""" - assert tx.hash is not None self.missing_tips.discard(tx.hash) self.manager.on_new_tx(tx) diff --git a/hathor/p2p/sync_v2/streamers.py b/hathor/p2p/sync_v2/streamers.py index 22dbd8360..df11131ba 100644 --- a/hathor/p2p/sync_v2/streamers.py +++ b/hathor/p2p/sync_v2/streamers.py @@ -170,7 +170,6 @@ def send_next(self) -> None: cur = self.current_block assert cur is not None - assert cur.hash is not None meta = cur.get_metadata() if meta.voided_by: @@ -251,7 +250,7 @@ def get_iter(self) -> Iterator[BaseTransaction]: root = self.start_from skip_root = False self.log.debug('iterating over transactions from block', - block=not_none(self.current_block.hash).hex(), + block=self.current_block.hash.hex(), height=self.current_block.get_height(), start_from=self.start_from, skip_root=skip_root) @@ -287,7 +286,6 @@ def send_next(self) -> None: return assert isinstance(cur, Transaction) - assert cur.hash is not None cur_metadata = cur.get_metadata() if cur_metadata.first_block is None: diff --git a/hathor/p2p/sync_v2/transaction_streaming_client.py b/hathor/p2p/sync_v2/transaction_streaming_client.py index b46ea546b..d1b068222 100644 --- a/hathor/p2p/sync_v2/transaction_streaming_client.py +++ b/hathor/p2p/sync_v2/transaction_streaming_client.py @@ -28,7 +28,6 @@ from hathor.transaction import BaseTransaction from hathor.transaction.exceptions import HathorError, TxValidationError from hathor.types import VertexId -from hathor.util import not_none if TYPE_CHECKING: from hathor.p2p.sync_v2.agent import NodeBlockSync @@ -117,7 +116,6 @@ def handle_transaction(self, tx: BaseTransaction) -> None: self.fails(TooManyVerticesReceivedError()) return - assert tx.hash is not None self.log.debug('tx received', tx_id=tx.hash.hex()) self._queue.append(tx) assert len(self._queue) <= self._tx_max_quantity @@ -141,7 +139,7 @@ def process_queue(self) -> Generator[Any, Any, None]: self._is_processing = True try: tx = self._queue.popleft() - self.log.debug('processing tx', tx_id=not_none(tx.hash).hex()) + self.log.debug('processing tx', tx_id=tx.hash.hex()) yield self._process_transaction(tx) finally: self._is_processing = False @@ -151,7 +149,6 @@ def process_queue(self) -> Generator[Any, Any, None]: @inlineCallbacks def _process_transaction(self, tx: BaseTransaction) -> Generator[Any, Any, None]: """Process transaction.""" - assert tx.hash is not None # Run basic verification. if not tx.is_genesis: diff --git a/hathor/p2p/utils.py b/hathor/p2p/utils.py index 66f1bda37..4e2935a2e 100644 --- a/hathor/p2p/utils.py +++ b/hathor/p2p/utils.py @@ -33,18 +33,18 @@ from hathor.transaction.genesis import get_representation_for_all_genesis -def discover_hostname() -> Optional[str]: - """ Try to discover your hostname. It is a synchonous operation and +def discover_hostname(timeout: float | None = None) -> Optional[str]: + """ Try to discover your hostname. It is a synchronous operation and should not be called from twisted main loop. """ - return discover_ip_ipify() + return discover_ip_ipify(timeout) -def discover_ip_ipify() -> Optional[str]: +def discover_ip_ipify(timeout: float | None = None) -> Optional[str]: """ Try to discover your IP address using ipify's api. - It is a synchonous operation and should not be called from twisted main loop. + It is a synchronous operation and should not be called from twisted main loop. """ - response = requests.get('https://api.ipify.org') + response = requests.get('https://api.ipify.org', timeout=timeout) if response.ok: # It may be either an ipv4 or ipv6 in string format. ip = response.text diff --git a/hathor/profiler/resources/cpu_profiler.py b/hathor/profiler/resources/cpu_profiler.py index 88e87329b..f07b2bd8a 100644 --- a/hathor/profiler/resources/cpu_profiler.py +++ b/hathor/profiler/resources/cpu_profiler.py @@ -102,7 +102,7 @@ def render_OPTIONS(self, request: Request) -> int: CPUProfilerResource.openapi = { - '/profiler': { + '/top': { 'x-visibility': 'private', 'get': { 'operationId': 'cpu-profiler', diff --git a/hathor/reward_lock/reward_lock.py b/hathor/reward_lock/reward_lock.py index 45f252d08..446afb855 100644 --- a/hathor/reward_lock/reward_lock.py +++ b/hathor/reward_lock/reward_lock.py @@ -42,16 +42,16 @@ def get_spent_reward_locked_info(tx: 'Transaction', storage: 'VertexStorageProto """Check if any input block reward is locked, returning the locked information if any, or None if they are all unlocked.""" from hathor.transaction.transaction import RewardLockedInfo + best_height = get_minimum_best_height(storage) for blk in iter_spent_rewards(tx, storage): - assert blk.hash is not None - needed_height = _spent_reward_needed_height(blk, storage) + needed_height = _spent_reward_needed_height(blk, best_height) if needed_height > 0: return RewardLockedInfo(blk.hash, needed_height) return None -def _spent_reward_needed_height(block: Block, storage: 'VertexStorageProtocol') -> int: - """ Returns height still needed to unlock this `block` reward: 0 means it's unlocked.""" +def get_minimum_best_height(storage: 'VertexStorageProtocol') -> int: + """Return the height of the current best block that shall be used for `min_height` verification.""" import math # omitting timestamp to get the current best block, this will usually hit the cache instead of being slow @@ -62,6 +62,11 @@ def _spent_reward_needed_height(block: Block, storage: 'VertexStorageProtocol') blk = storage.get_block(tip) best_height = min(best_height, blk.get_height()) assert isinstance(best_height, int) + return best_height + + +def _spent_reward_needed_height(block: Block, best_height: int) -> int: + """ Returns height still needed to unlock this `block` reward: 0 means it's unlocked.""" spent_height = block.get_height() spend_blocks = best_height - spent_height settings = get_global_settings() diff --git a/hathor/simulator/patches.py b/hathor/simulator/patches.py index 3c056249e..95e9d4ebf 100644 --- a/hathor/simulator/patches.py +++ b/hathor/simulator/patches.py @@ -26,7 +26,6 @@ class SimulatorVertexVerifier(VertexVerifier): @classmethod def verify_pow(cls, vertex: BaseTransaction, *, override_weight: Optional[float] = None) -> None: - assert vertex.hash is not None logger.new().debug('Skipping VertexVerifier.verify_pow() for simulator') diff --git a/hathor/simulator/simulator.py b/hathor/simulator/simulator.py index 6155df3b8..5eb4e20e0 100644 --- a/hathor/simulator/simulator.py +++ b/hathor/simulator/simulator.py @@ -253,7 +253,7 @@ def _build_vertex_verifiers( """ return VertexVerifiers.create( settings=settings, - vertex_verifier=SimulatorVertexVerifier(settings=settings, daa=daa), + vertex_verifier=SimulatorVertexVerifier(settings=settings), daa=daa, feature_service=feature_service, ) diff --git a/hathor/simulator/tx_generator.py b/hathor/simulator/tx_generator.py index 8c977c870..ead648da5 100644 --- a/hathor/simulator/tx_generator.py +++ b/hathor/simulator/tx_generator.py @@ -13,22 +13,25 @@ # limitations under the License. from collections import deque -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Callable, TypeAlias from structlog import get_logger from hathor.conf.get_settings import get_global_settings from hathor.simulator.utils import NoCandidatesError, gen_new_double_spending, gen_new_tx +from hathor.transaction import Transaction from hathor.transaction.exceptions import RewardLocked +from hathor.types import VertexId from hathor.util import Random from hathor.wallet.exceptions import InsufficientFunds if TYPE_CHECKING: from hathor.manager import HathorManager - from hathor.transaction import Transaction logger = get_logger() +GenTxFunction: TypeAlias = Callable[['HathorManager', str, int], Transaction] + class RandomTransactionGenerator: """ Generates random transactions without mining. It is supposed to be used @@ -38,7 +41,8 @@ class RandomTransactionGenerator: MAX_LATEST_TRANSACTIONS_LEN = 10 def __init__(self, manager: 'HathorManager', rng: Random, *, - rate: float, hashpower: float, ignore_no_funds: bool = False): + rate: float, hashpower: float, ignore_no_funds: bool = False, + custom_gen_new_tx: GenTxFunction | None = None): """ :param: rate: Number of transactions per second :param: hashpower: Number of hashes per second @@ -58,11 +62,16 @@ def __init__(self, manager: 'HathorManager', rng: Random, *, self.delayedcall = None self.log = logger.new() self.rng = rng + self.gen_new_tx: GenTxFunction + if custom_gen_new_tx is not None: + self.gen_new_tx = custom_gen_new_tx + else: + self.gen_new_tx = gen_new_tx # Most recent transactions generated here. # The lowest index has the most recent transaction. self.transactions_found: int = 0 - self.latest_transactions: deque[Transaction] = deque() + self.latest_transactions: deque[VertexId] = deque() self.double_spending_only = False @@ -115,7 +124,7 @@ def new_tx_step1(self): if not self.double_spending_only: try: - tx = gen_new_tx(self.manager, address, value) + tx = self.gen_new_tx(self.manager, address, value) except (InsufficientFunds, RewardLocked): self.delayedcall = self.clock.callLater(0, self.schedule_next_transaction) return diff --git a/hathor/simulator/utils.py b/hathor/simulator/utils.py index 863bbfbdb..792380ae2 100644 --- a/hathor/simulator/utils.py +++ b/hathor/simulator/utils.py @@ -20,7 +20,7 @@ from hathor.types import Address, VertexId -def gen_new_tx(manager: HathorManager, address: str, value: int, verify: bool = True) -> Transaction: +def gen_new_tx(manager: HathorManager, address: str, value: int) -> Transaction: """ Generate and return a new transaction. @@ -28,7 +28,6 @@ def gen_new_tx(manager: HathorManager, address: str, value: int, verify: bool = manager: the HathorManager to generate the transaction for address: an address for the transaction's output value: a value for the transaction's output - verify: whether to verify the generated transaction Returns: the generated transaction. """ @@ -48,8 +47,6 @@ def gen_new_tx(manager: HathorManager, address: str, value: int, verify: bool = tx.weight = 1 tx.parents = manager.get_new_tx_parents(tx.timestamp) manager.cpu_mining_service.resolve(tx) - if verify: - manager.verification_service.verify(tx) return tx @@ -111,7 +108,6 @@ def add_new_block( if signal_bits is not None: block.signal_bits = signal_bits manager.cpu_mining_service.resolve(block) - manager.verification_service.validate_full(block) if propagate: manager.propagate_tx(block, fails_silently=False) if advance_clock: diff --git a/hathor/stratum/stratum.py b/hathor/stratum/stratum.py index 2b9dd8322..78a9f29ae 100644 --- a/hathor/stratum/stratum.py +++ b/hathor/stratum/stratum.py @@ -523,11 +523,10 @@ def handle_submit(self, params: dict, msgid: Optional[str]) -> None: else: tx.nonce = int(params['nonce'], 16) tx.update_hash() - assert tx.hash is not None self.log.debug('share received', block=tx, block_base=block_base.hex(), block_base_hash=block_base_hash.hex()) - verifier = VertexVerifier(settings=self._settings, daa=self.manager.daa) + verifier = VertexVerifier(settings=self._settings) try: verifier.verify_pow(tx, override_weight=job.weight) diff --git a/hathor/sysctl/__init__.py b/hathor/sysctl/__init__.py index af9d30e17..a73637650 100644 --- a/hathor/sysctl/__init__.py +++ b/hathor/sysctl/__init__.py @@ -13,6 +13,7 @@ # limitations under the License. from hathor.sysctl.core.manager import HathorManagerSysctl +from hathor.sysctl.feature_activation.manager import FeatureActivationSysctl from hathor.sysctl.p2p.manager import ConnectionsManagerSysctl from hathor.sysctl.sysctl import Sysctl from hathor.sysctl.websocket.manager import WebsocketManagerSysctl @@ -22,4 +23,5 @@ 'ConnectionsManagerSysctl', 'HathorManagerSysctl', 'WebsocketManagerSysctl', + 'FeatureActivationSysctl', ] diff --git a/hathor/sysctl/feature_activation/__init__.py b/hathor/sysctl/feature_activation/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/hathor/sysctl/feature_activation/manager.py b/hathor/sysctl/feature_activation/manager.py new file mode 100644 index 000000000..2649d26b8 --- /dev/null +++ b/hathor/sysctl/feature_activation/manager.py @@ -0,0 +1,72 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.feature_activation.bit_signaling_service import BitSignalingService +from hathor.feature_activation.feature import Feature +from hathor.sysctl.sysctl import Sysctl + + +class FeatureActivationSysctl(Sysctl): + def __init__(self, bit_signaling_service: BitSignalingService) -> None: + super().__init__() + self._bit_signaling_service = bit_signaling_service + + self.register( + path='supported_features', + getter=self.get_support_features, + setter=None, + ) + self.register( + path='not_supported_features', + getter=self.get_not_support_features, + setter=None, + ) + self.register( + path='signaling_features', + getter=self.get_signaling_features, + setter=None, + ) + self.register( + path='add_support', + getter=None, + setter=self.add_feature_support, + ) + self.register( + path='remove_support', + getter=None, + setter=self.remove_feature_support, + ) + + def get_support_features(self) -> list[str]: + """Get a list of feature names with enabled support.""" + return [feature.value for feature in self._bit_signaling_service.get_support_features()] + + def get_not_support_features(self) -> list[str]: + """Get a list of feature names with disabled support.""" + return [feature.value for feature in self._bit_signaling_service.get_not_support_features()] + + def add_feature_support(self, *features: str) -> None: + """Explicitly add support for a feature by enabling its signaling bit.""" + for feature in features: + self._bit_signaling_service.add_feature_support(Feature[feature]) + + def remove_feature_support(self, *features: str) -> None: + """Explicitly remove support for a feature by disabling its signaling bit.""" + for feature in features: + self._bit_signaling_service.remove_feature_support(Feature[feature]) + + def get_signaling_features(self) -> list[str]: + """Get a list of feature names that are currently in a signaling state.""" + features = self._bit_signaling_service.get_best_block_signaling_features().keys() + return [feature.value for feature in features] diff --git a/hathor/sysctl/p2p/manager.py b/hathor/sysctl/p2p/manager.py index ab6ef5902..e821039bd 100644 --- a/hathor/sysctl/p2p/manager.py +++ b/hathor/sysctl/p2p/manager.py @@ -16,9 +16,12 @@ from hathor.p2p.manager import ConnectionsManager from hathor.p2p.sync_version import SyncVersion +from hathor.p2p.utils import discover_hostname from hathor.sysctl.exception import SysctlException from hathor.sysctl.sysctl import Sysctl, signal_handler_safe +AUTO_HOSTNAME_TIMEOUT_SECONDS: float = 5 + def parse_text(text: str) -> list[str]: """Parse text per line skipping empty lines and comments.""" @@ -103,6 +106,21 @@ def __init__(self, connections: ConnectionsManager) -> None: None, self.set_kill_connection, ) + self.register( + 'hostname', + self.get_hostname, + self.set_hostname, + ) + self.register( + 'refresh_auto_hostname', + None, + self.refresh_auto_hostname, + ) + self.register( + 'reload_entrypoints_and_connections', + None, + self.reload_entrypoints_and_connections, + ) def set_force_sync_rotate(self) -> None: """Force a sync rotate.""" @@ -217,3 +235,32 @@ def set_kill_connection(self, peer_id: str, force: bool = False) -> None: self.log.warn('Killing connection', peer_id=peer_id) raise SysctlException('peer-id is not connected') conn.disconnect(force=force) + + def get_hostname(self) -> str | None: + """Return the configured hostname.""" + assert self.connections.manager is not None + return self.connections.manager.hostname + + def set_hostname(self, hostname: str) -> None: + """Set the hostname and reset all connections.""" + assert self.connections.manager is not None + self.connections.manager.set_hostname_and_reset_connections(hostname) + + def refresh_auto_hostname(self) -> None: + """ + Automatically discover the hostname and set it, if it's found. This operation blocks the event loop. + Then, reset all connections. + """ + assert self.connections.manager is not None + try: + hostname = discover_hostname(timeout=AUTO_HOSTNAME_TIMEOUT_SECONDS) + except Exception as e: + self.log.error(f'Could not refresh hostname. Error: {str(e)}') + return + + if hostname: + self.connections.manager.set_hostname_and_reset_connections(hostname) + + def reload_entrypoints_and_connections(self) -> None: + """Kill all connections and reload entrypoints from the peer config file.""" + self.connections.reload_entrypoints_and_connections() diff --git a/hathor/sysctl/sysctl.py b/hathor/sysctl/sysctl.py index 79bf3c5b0..28339365d 100644 --- a/hathor/sysctl/sysctl.py +++ b/hathor/sysctl/sysctl.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Callable, Iterator, NamedTuple, Optional +from typing import Any, Callable, Iterator, NamedTuple, Optional, ParamSpec, TypeVar from pydantic import validate_arguments from structlog import get_logger @@ -21,16 +21,18 @@ Getter = Callable[[], Any] Setter = Callable[..., None] +P = ParamSpec('P') +T = TypeVar('T') logger = get_logger() -def signal_handler_safe(f): +def signal_handler_safe(f: Callable[P, T]) -> Callable[P, T]: """Decorator to mark methods as signal handler safe. It should only be used if that method can be executed during a signal handling. Notice that a signal handling can pause the code execution at any point and the execution will resume after.""" - f._signal_handler_safe = True + f._signal_handler_safe = True # type: ignore[attr-defined] return f diff --git a/hathor/transaction/aux_pow.py b/hathor/transaction/aux_pow.py index c6772ac88..103f6997c 100644 --- a/hathor/transaction/aux_pow.py +++ b/hathor/transaction/aux_pow.py @@ -18,6 +18,8 @@ logger = get_logger() +MAX_MERKLE_PATH_COUNT = 100 + class BitcoinAuxPow(NamedTuple): header_head: bytes # 36 bytes @@ -96,8 +98,11 @@ def from_bytes(cls, b: bytes) -> 'BitcoinAuxPow': coinbase_head = read_bytes(a) coinbase_tail = read_bytes(a) c = read_varint(a) + if c > MAX_MERKLE_PATH_COUNT: + raise ValueError(f'invalid merkle path count: {c} > {MAX_MERKLE_PATH_COUNT}') merkle_path = [] for _ in range(c): + assert len(a) >= 32 merkle_path.append(bytes(a[:32])) del a[:32] header_tail = read_nbytes(a, 12) diff --git a/hathor/transaction/base_transaction.py b/hathor/transaction/base_transaction.py index 958c59c05..56898d6f9 100644 --- a/hathor/transaction/base_transaction.py +++ b/hathor/transaction/base_transaction.py @@ -168,7 +168,7 @@ def __init__(self, self.outputs = outputs or [] self.parents = parents or [] self.storage = storage - self.hash = hash # Stored as bytes. + self._hash: VertexId | None = hash # Stored as bytes. @classproperty def log(cls): @@ -253,7 +253,7 @@ def __eq__(self, other: object) -> bool: """ if not isinstance(other, BaseTransaction): return NotImplemented - if self.hash and other.hash: + if self._hash and other._hash: return self.hash == other.hash return False @@ -265,7 +265,6 @@ def __bytes__(self) -> bytes: return self.get_struct() def __hash__(self) -> int: - assert self.hash is not None return hash(self.hash) @abstractmethod @@ -276,10 +275,19 @@ def calculate_height(self) -> int: def calculate_min_height(self) -> int: raise NotImplementedError + @property + def hash(self) -> VertexId: + assert self._hash is not None, 'Vertex hash must be initialized.' + return self._hash + + @hash.setter + def hash(self, value: VertexId) -> None: + self._hash = value + @property def hash_hex(self) -> str: """Return the current stored hash in hex string format""" - if self.hash is not None: + if self._hash is not None: return self.hash.hex() else: return '' @@ -332,7 +340,7 @@ def is_genesis(self) -> bool: :rtype: bool """ - if self.hash is None: + if self._hash is None: return False from hathor.transaction.genesis import is_genesis return is_genesis(self.hash, settings=self._settings) @@ -451,7 +459,7 @@ def can_validate_full(self) -> bool: """ Check if this transaction is ready to be fully validated, either all deps are full-valid or one is invalid. """ assert self.storage is not None - assert self.hash is not None + assert self._hash is not None if self.is_genesis: return True deps = self.get_all_dependencies() @@ -608,7 +616,6 @@ def get_metadata(self, *, force_reload: bool = False, use_storage: bool = True) else: metadata = getattr(self, '_metadata', None) if not metadata and use_storage and self.storage: - assert self.hash is not None metadata = self.storage.get_metadata(self.hash) self._metadata = metadata if not metadata: @@ -617,17 +624,18 @@ def get_metadata(self, *, force_reload: bool = False, use_storage: bool = True) # happens include generating new mining blocks and some tests height = self.calculate_height() if self.storage else None score = self.weight if self.is_genesis else 0 + min_height = 0 if self.is_genesis else None metadata = TransactionMetadata( - hash=self.hash, + hash=self._hash, accumulated_weight=self.weight, height=height, score=score, - min_height=0, + min_height=min_height ) self._metadata = metadata if not metadata.hash: - metadata.hash = self.hash + metadata.hash = self._hash metadata._tx_ref = weakref.ref(self) return metadata @@ -638,7 +646,7 @@ def reset_metadata(self) -> None: from hathor.transaction.transaction_metadata import ValidationState assert self.storage is not None score = self.weight if self.is_genesis else 0 - self._metadata = TransactionMetadata(hash=self.hash, + self._metadata = TransactionMetadata(hash=self._hash, score=score, accumulated_weight=self.weight) if self.is_genesis: @@ -706,8 +714,9 @@ def update_initial_metadata(self, *, save: bool = True) -> None: """ self._update_height_metadata() self._update_parents_children_metadata() - self._update_reward_lock_metadata() + self.update_reward_lock_metadata() self._update_feature_activation_bit_counts() + self._update_initial_accumulated_weight() if save: assert self.storage is not None self.storage.save_transaction(self, only_metadata=True) @@ -717,14 +726,17 @@ def _update_height_metadata(self) -> None: meta = self.get_metadata() meta.height = self.calculate_height() - def _update_reward_lock_metadata(self) -> None: + def update_reward_lock_metadata(self) -> None: """Update the txs/block min_height metadata.""" metadata = self.get_metadata() - metadata.min_height = self.calculate_min_height() + min_height = self.calculate_min_height() + if metadata.min_height is not None: + assert metadata.min_height == min_height + metadata.min_height = min_height def _update_parents_children_metadata(self) -> None: """Update the txs/block parent's children metadata.""" - assert self.hash is not None + assert self._hash is not None assert self.storage is not None for parent in self.get_parents(existing_only=True): @@ -742,6 +754,11 @@ def _update_feature_activation_bit_counts(self) -> None: # This method lazily calculates and stores the value in metadata self.get_feature_activation_bit_counts() + def _update_initial_accumulated_weight(self) -> None: + """Update the vertex initial accumulated_weight.""" + metadata = self.get_metadata() + metadata.accumulated_weight = self.weight + def update_timestamp(self, now: int) -> None: """Update this tx's timestamp @@ -792,7 +809,6 @@ def to_json(self, decode_script: bool = False, include_metadata: bool = False) - return data def to_json_extended(self) -> dict[str, Any]: - assert self.hash is not None assert self.storage is not None def serialize_output(tx: BaseTransaction, tx_out: TxOutput) -> dict[str, Any]: @@ -814,6 +830,13 @@ def serialize_output(tx: BaseTransaction, tx_out: TxOutput) -> dict[str, Any]: 'parents': [], } + # A nano contract tx must be confirmed by one block at least + # to be considered "executed" + if meta.first_block is not None: + ret['first_block'] = meta.first_block.hex() + else: + ret['first_block'] = None + for parent in self.parents: ret['parents'].append(parent.hex()) @@ -824,7 +847,6 @@ def serialize_output(tx: BaseTransaction, tx_out: TxOutput) -> dict[str, Any]: tx2 = self.storage.get_transaction(tx_in.tx_id) tx2_out = tx2.outputs[tx_in.index] output = serialize_output(tx2, tx2_out) - assert tx2.hash is not None output['tx_id'] = tx2.hash_hex output['index'] = tx_in.index ret['inputs'].append(output) @@ -837,7 +859,7 @@ def serialize_output(tx: BaseTransaction, tx_out: TxOutput) -> dict[str, Any]: return ret - def clone(self, *, include_metadata: bool = True) -> 'BaseTransaction': + def clone(self, *, include_metadata: bool = True, include_storage: bool = True) -> 'BaseTransaction': """Return exact copy without sharing memory, including metadata if loaded. :return: Transaction or Block copy @@ -846,7 +868,8 @@ def clone(self, *, include_metadata: bool = True) -> 'BaseTransaction': if hasattr(self, '_metadata') and include_metadata: assert self._metadata is not None # FIXME: is this actually true or do we have to check if not None new_tx._metadata = self._metadata.clone() - new_tx.storage = self.storage + if include_storage: + new_tx.storage = self.storage return new_tx @abstractmethod diff --git a/hathor/transaction/resources/dashboard.py b/hathor/transaction/resources/dashboard.py index 47f366a8e..07add5d21 100644 --- a/hathor/transaction/resources/dashboard.py +++ b/hathor/transaction/resources/dashboard.py @@ -152,7 +152,8 @@ def render_GET(self, request): ], 'inputs': [], 'outputs': [], - 'tokens': [] + 'tokens': [], + 'first_block': None, }, { 'tx_id': ('00002b3be4e3876e67b5e090d76dcd71' @@ -166,7 +167,9 @@ def render_GET(self, request): ], 'inputs': [], 'outputs': [], - 'tokens': [] + 'tokens': [], + 'first_block': ('000005af290a55b079014a0be3246479' + 'e84eeb635f02010dbf3e5f3414a85bbb') } ], 'blocks': [ @@ -189,7 +192,9 @@ def render_GET(self, request): 'script': 'dqkUixvdsajkV6vO+9Jjgjbaheqn016IrA==' } ], - 'tokens': [] + 'tokens': [], + 'first_block': ('000005af290a55b079014a0be3246479' + 'e84eeb635f02010dbf3e5f3414a85bbb') }, { 'tx_id': ('00035e46a20d0ecbda0dc6fdcaa243e9' @@ -210,7 +215,9 @@ def render_GET(self, request): 'script': 'dqkUdNQbj29Md1xsAYinK+RsDJCCB7eIrA==' } ], - 'tokens': [] + 'tokens': [], + 'first_block': ('000005af290a55b079014a0be3246479' + 'e84eeb635f02010dbf3e5f3414a85bbb') }, { 'tx_id': ('000133cc80b625b1babbd454edc3474e' @@ -231,7 +238,8 @@ def render_GET(self, request): 'script': 'dqkU0AoLEAX+1b36s+VyaMc9bkj/5byIrA==' } ], - 'tokens': [] + 'tokens': [], + 'first_block': None } ] } diff --git a/hathor/transaction/resources/transaction.py b/hathor/transaction/resources/transaction.py index 8b755ae02..816185438 100644 --- a/hathor/transaction/resources/transaction.py +++ b/hathor/transaction/resources/transaction.py @@ -86,7 +86,6 @@ def get_tx_extra_data(tx: BaseTransaction, *, detail_tokens: bool = True) -> dic tx2 = tx.storage.get_transaction(tx_in.tx_id) tx2_out = tx2.outputs[tx_in.index] output = tx2_out.to_json(decode_script=True) - assert tx2.hash is not None output['tx_id'] = tx2.hash_hex output['index'] = tx_in.index @@ -466,6 +465,7 @@ def get_list_tx(self, request): ], 'tokens': [], 'height': 12345, + 'first_block': None }, { 'tx_id': ('00000b8792cb13e8adb51cc7d866541f' @@ -497,7 +497,9 @@ def get_list_tx(self, request): 'script': 'dqkUjjPg+zwG6JDe901I0ybQxcAPrAuIrA==' } ], - 'tokens': [] + 'tokens': [], + 'first_block': ('000005af290a55b079014a0be3246479' + 'e84eeb635f02010dbf3e5f3414a85bbb') } ], 'has_more': True diff --git a/hathor/transaction/storage/cache_storage.py b/hathor/transaction/storage/cache_storage.py index 8a1937a03..f8f058e5f 100644 --- a/hathor/transaction/storage/cache_storage.py +++ b/hathor/transaction/storage/cache_storage.py @@ -141,7 +141,6 @@ def _flush_to_storage(self, dirty_txs_copy: set[bytes]) -> None: self.store._save_transaction(tx) def remove_transaction(self, tx: BaseTransaction) -> None: - assert tx.hash is not None super().remove_transaction(tx) self.cache.pop(tx.hash, None) self.dirty_txs.discard(tx.hash) @@ -160,7 +159,6 @@ def get_all_genesis(self) -> set[BaseTransaction]: def _save_transaction(self, tx: BaseTransaction, *, only_metadata: bool = False) -> None: """Saves the transaction without modifying TimestampIndex entries (in superclass).""" - assert tx.hash is not None self._update_cache(tx) self.dirty_txs.add(tx.hash) @@ -179,7 +177,6 @@ def _update_cache(self, tx: BaseTransaction) -> None: If we need to evict a tx from cache and it's dirty, write it to disk immediately. """ - assert tx.hash is not None _tx = self.cache.get(tx.hash, None) if not _tx: if len(self.cache) >= self.capacity: diff --git a/hathor/transaction/storage/memory_storage.py b/hathor/transaction/storage/memory_storage.py index f7f897b73..25dba96ee 100644 --- a/hathor/transaction/storage/memory_storage.py +++ b/hathor/transaction/storage/memory_storage.py @@ -61,7 +61,6 @@ def set_migration_state(self, migration_name: str, state: MigrationState) -> Non pass def remove_transaction(self, tx: BaseTransaction) -> None: - assert tx.hash is not None super().remove_transaction(tx) self.transactions.pop(tx.hash, None) self.metadata.pop(tx.hash, None) @@ -71,7 +70,6 @@ def save_transaction(self, tx: 'BaseTransaction', *, only_metadata: bool = False self._save_transaction(tx, only_metadata=only_metadata) def _save_transaction(self, tx: BaseTransaction, *, only_metadata: bool = False) -> None: - assert tx.hash is not None if not only_metadata: self.transactions[tx.hash] = self._clone(tx) meta = getattr(tx, '_metadata', None) diff --git a/hathor/transaction/storage/transaction_storage.py b/hathor/transaction/storage/transaction_storage.py index 5b56431cb..9b90af63f 100644 --- a/hathor/transaction/storage/transaction_storage.py +++ b/hathor/transaction/storage/transaction_storage.py @@ -24,12 +24,14 @@ from structlog import get_logger from hathor.conf.get_settings import get_global_settings +from hathor.execution_manager import ExecutionManager from hathor.indexes import IndexesManager from hathor.indexes.height_index import HeightInfo from hathor.profiler import get_cpu_profiler from hathor.pubsub import PubSubManager from hathor.transaction.base_transaction import BaseTransaction, TxOutput from hathor.transaction.block import Block +from hathor.transaction.exceptions import RewardLocked from hathor.transaction.storage.exceptions import ( TransactionDoesNotExist, TransactionIsNotABlock, @@ -48,7 +50,7 @@ from hathor.transaction.transaction import Transaction from hathor.transaction.transaction_metadata import TransactionMetadata from hathor.types import VertexId -from hathor.util import not_none +from hathor.verification.transaction_verifier import TransactionVerifier cpu = get_cpu_profiler() @@ -72,6 +74,7 @@ class TransactionStorage(ABC): pubsub: Optional[PubSubManager] indexes: Optional[IndexesManager] + _latest_n_height_tips: list[HeightInfo] log = get_logger() @@ -84,6 +87,9 @@ class TransactionStorage(ABC): # Key storage attribute to save if the manager is running _manager_running_attribute: str = 'manager_running' + # Key storage attribute to save if the full node crashed + _full_node_crashed_attribute: str = 'full_node_crashed' + # Ket storage attribute to save the last time the node started _last_start_attribute: str = 'last_start' @@ -326,14 +332,12 @@ def _save_or_verify_genesis(self) -> None: for tx in genesis_txs: try: - assert tx.hash is not None tx2 = self.get_transaction(tx.hash) assert tx == tx2 except TransactionDoesNotExist: self.save_transaction(tx) self.add_to_indexes(tx) tx2 = tx - assert tx2.hash is not None self._genesis_cache[tx2.hash] = tx2 self._saving_genesis = False @@ -342,7 +346,6 @@ def _save_to_weakref(self, tx: BaseTransaction) -> None: """ if self._tx_weakref_disabled: return - assert tx.hash is not None tx2 = self._tx_weakref.get(tx.hash, None) if tx2 is None: self._tx_weakref[tx.hash] = tx @@ -354,7 +357,6 @@ def _remove_from_weakref(self, tx: BaseTransaction) -> None: """ if self._tx_weakref_disabled: return - assert tx.hash is not None self._tx_weakref.pop(tx.hash, None) def get_transaction_from_weakref(self, hash_bytes: bytes) -> Optional[BaseTransaction]: @@ -420,7 +422,6 @@ def save_transaction(self: 'TransactionStorage', tx: BaseTransaction, *, only_me :param tx: Transaction to save :param only_metadata: Don't save the transaction, only the metadata of this transaction """ - assert tx.hash is not None meta = tx.get_metadata() self.pre_save_validation(tx, meta) @@ -433,7 +434,6 @@ def pre_save_validation(self, tx: BaseTransaction, tx_meta: TransactionMetadata) This method receives the transaction AND the metadata in order to avoid calling ".get_metadata()" which could potentially create a fresh metadata. """ - assert tx.hash is not None assert tx_meta.hash is not None assert tx.hash == tx_meta.hash, f'{tx.hash.hex()} != {tx_meta.hash.hex()}' self._validate_partial_marker_consistency(tx_meta) @@ -493,9 +493,8 @@ def remove_transactions(self, txs: list[BaseTransaction]) -> None: """ parents_to_update: dict[bytes, list[bytes]] = defaultdict(list) dangling_children: set[bytes] = set() - txset = {not_none(tx.hash) for tx in txs} + txset = {tx.hash for tx in txs} for tx in txs: - assert tx.hash is not None tx_meta = tx.get_metadata() assert not tx_meta.validation.is_checkpoint() for parent in set(tx.parents) - txset: @@ -530,7 +529,6 @@ def transaction_exists(self, hash_bytes: bytes) -> bool: def compare_bytes_with_local_tx(self, tx: BaseTransaction) -> bool: """Compare byte-per-byte `tx` with the local transaction.""" - assert tx.hash is not None # XXX: we have to accept any scope because we only want to know what bytes we have stored with tx_allow_context(self, allow_scope=TxAllowScope.ALL): local_tx = self.get_transaction(tx.hash) @@ -968,9 +966,10 @@ def is_running_full_verification(self) -> bool: """ return self.get_value(self._running_full_verification_attribute) == '1' - def start_running_manager(self) -> None: + def start_running_manager(self, execution_manager: ExecutionManager) -> None: """ Save on storage that manager is running """ + execution_manager.register_on_crash_callback(self.on_full_node_crash) self.add_value(self._manager_running_attribute, '1') def stop_running_manager(self) -> None: @@ -983,6 +982,14 @@ def is_running_manager(self) -> bool: """ return self.get_value(self._manager_running_attribute) == '1' + def on_full_node_crash(self) -> None: + """Save on storage that the full node crashed and cannot be recovered.""" + self.add_value(self._full_node_crashed_attribute, '1') + + def is_full_node_crashed(self) -> bool: + """Return whether the full node was crashed.""" + return self.get_value(self._full_node_crashed_attribute) == '1' + def get_last_started_at(self) -> int: """ Return the timestamp when the database was last started. """ @@ -1092,10 +1099,11 @@ def compute_transactions_that_became_invalid(self, new_best_height: int) -> list from hathor.transaction.validation_state import ValidationState to_remove: list[BaseTransaction] = [] for tx in self.iter_mempool_from_best_index(): - tx_min_height = tx.get_metadata().min_height - assert tx_min_height is not None - # We use +1 here because a tx is valid if it can be confirmed by the next block - if new_best_height + 1 < tx_min_height: + try: + TransactionVerifier.verify_reward_locked_for_height( + tx, new_best_height, assert_min_height_verification=False + ) + except RewardLocked: tx.set_validation(ValidationState.INVALID) to_remove.append(tx) return to_remove @@ -1356,7 +1364,6 @@ def __init__(self, tx: BaseTransaction): heapq.heapify(to_visit) while to_visit: item = heapq.heappop(to_visit) - assert item.tx.hash is not None yield item.tx # XXX: We can safely discard because no other tx will try to visit this one, since timestamps are strictly # higher in children, meaning we cannot possibly have item.tx as a descendant of any tx in to_visit. @@ -1390,7 +1397,6 @@ def _run_topological_sort_dfs(self, root: BaseTransaction, visited: dict[bytes, stack = [root] while stack: tx = stack[-1] - assert tx.hash is not None if tx.hash in visited: if visited[tx.hash] == 0: visited[tx.hash] = 1 # 1 = Visited diff --git a/hathor/transaction/storage/traversal.py b/hathor/transaction/storage/traversal.py index fc6bbc110..d88b47b9d 100644 --- a/hathor/transaction/storage/traversal.py +++ b/hathor/transaction/storage/traversal.py @@ -134,7 +134,6 @@ def run(self, root: Union['BaseTransaction', Iterable['BaseTransaction']], *, roots = root if isinstance(root, Iterable) else [root] for root in roots: - assert root.hash is not None self.seen.add(root.hash) if not skip_root: self._push_visit(root) @@ -143,7 +142,6 @@ def run(self, root: Union['BaseTransaction', Iterable['BaseTransaction']], *, while not self._is_empty(): tx = self._pop_visit() - assert tx.hash is not None yield tx if not self._ignore_neighbors: self.add_neighbors(tx) @@ -172,7 +170,6 @@ def _pop_visit(self) -> 'BaseTransaction': tx = item.tx # We can safely remove it because we are walking in topological order # and it won't appear again in the future because this would be a cycle. - assert tx.hash is not None self.seen.remove(tx.hash) return tx diff --git a/hathor/transaction/token_creation_tx.py b/hathor/transaction/token_creation_tx.py index 08156ce90..61a676b2a 100644 --- a/hathor/transaction/token_creation_tx.py +++ b/hathor/transaction/token_creation_tx.py @@ -65,7 +65,6 @@ def update_hash(self) -> None: """ When we update the hash, we also have to update the tokens uid list """ super().update_hash() - assert self.hash is not None self.tokens = [self.hash] def get_funds_fields_from_struct(self, buf: bytes, *, verbose: VerboseCallback = None) -> bytes: @@ -221,7 +220,6 @@ def _get_token_info_from_inputs(self) -> dict[TokenUid, TokenInfo]: token_dict = super()._get_token_info_from_inputs() # we add the created token's info to token_dict, as the creation tx allows for mint/melt - assert self.hash is not None token_dict[self.hash] = TokenInfo(0, True, True) return token_dict diff --git a/hathor/transaction/transaction.py b/hathor/transaction/transaction.py index 54189693d..a9d9fec5a 100644 --- a/hathor/transaction/transaction.py +++ b/hathor/transaction/transaction.py @@ -360,7 +360,7 @@ def is_double_spending(self) -> bool: tx = self.storage.get_transaction(tx_in.tx_id) meta = tx.get_metadata() spent_by = meta.get_output_spent_by(tx_in.index) - if spent_by and spent_by != self.hash: + if spent_by and spent_by != self._hash: return True return False diff --git a/hathor/types.py b/hathor/types.py index f035a8c80..40ad6dead 100644 --- a/hathor/types.py +++ b/hathor/types.py @@ -12,13 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import TypeAlias + # XXX There is a lot of refactor to be done before we can use `NewType`. # So, let's skip using NewType until everything is refactored. -VertexId = bytes # NewType('TxId', bytes) -Address = bytes # NewType('Address', bytes) -AddressB58 = str -TxOutputScript = bytes # NewType('TxOutputScript', bytes) -Timestamp = int # NewType('Timestamp', int) -TokenUid = VertexId # NewType('TokenUid', VertexId) -Amount = int # NewType('Amount', int) +VertexId: TypeAlias = bytes # NewType('TxId', bytes) +Address: TypeAlias = bytes # NewType('Address', bytes) +AddressB58: TypeAlias = str +TxOutputScript: TypeAlias = bytes # NewType('TxOutputScript', bytes) +Timestamp: TypeAlias = int # NewType('Timestamp', int) +TokenUid: TypeAlias = VertexId # NewType('TokenUid', VertexId) +Amount: TypeAlias = int # NewType('Amount', int) diff --git a/hathor/util.py b/hathor/util.py index 1f409d0f1..cd1f0b090 100644 --- a/hathor/util.py +++ b/hathor/util.py @@ -30,7 +30,7 @@ import hathor from hathor.conf.get_settings import get_global_settings -from hathor.types import TokenUid +from hathor.types import TokenUid, VertexId if TYPE_CHECKING: import structlog @@ -477,7 +477,6 @@ def _tx_progress(iter_tx: Iterator['BaseTransaction'], *, log: 'structlog.stdlib if dt_next > _DT_ITER_NEXT_WARN: log.warn('iterator was slow to yield', took_sec=dt_next) - assert tx.hash is not None # XXX: this is only informative and made to work with either partially/fully validated blocks/transactions meta = tx.get_metadata() if meta.height: @@ -495,7 +494,11 @@ def _tx_progress(iter_tx: Iterator['BaseTransaction'], *, log: 'structlog.stdlib if total: progress_ = count / total elapsed_time = t_log - t_start - remaining_time = LogDuration(elapsed_time / progress_ - elapsed_time) + remaining_time: str | LogDuration + if progress_ == 0: + remaining_time = '?' + else: + remaining_time = LogDuration(elapsed_time / progress_ - elapsed_time) log.info( f'loading... {math.floor(progress_ * 100):2.0f}%', progress=progress_, @@ -806,3 +809,23 @@ def calculate_min_significant_weight(score: float, tol: float) -> float: """ This function will return the min significant weight to increase score by tol. """ return score + math.log2(2 ** tol - 1) + + +def bytes_to_vertexid(data: bytes) -> VertexId: + # XXX: using raw string for the docstring so we can more easily write byte literals + r""" Function to validate bytes and return a VertexId, raises ValueError if not valid. + + >>> bytes_to_vertexid(b'\0' * 32) + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + >>> bytes_to_vertexid(b'\0' * 31) + Traceback (most recent call last): + ... + ValueError: length must be exactly 32 bytes + >>> bytes_to_vertexid(b'\0' * 33) + Traceback (most recent call last): + ... + ValueError: length must be exactly 32 bytes + """ + if len(data) != 32: + raise ValueError('length must be exactly 32 bytes') + return VertexId(data) diff --git a/hathor/verification/block_verifier.py b/hathor/verification/block_verifier.py index b1184aea5..2110bbd91 100644 --- a/hathor/verification/block_verifier.py +++ b/hathor/verification/block_verifier.py @@ -51,7 +51,8 @@ def verify_height(self, block: Block) -> None: def verify_weight(self, block: Block) -> None: """Validate minimum block difficulty.""" - min_block_weight = self._daa.calculate_block_difficulty(block) + assert block.storage is not None + min_block_weight = self._daa.calculate_block_difficulty(block, block.storage.get_parent_block) if block.weight < min_block_weight - self._settings.WEIGHT_TOL: raise WeightError(f'Invalid new block {block.hash_hex}: weight ({block.weight}) is ' f'smaller than the minimum weight ({min_block_weight})') diff --git a/hathor/verification/token_creation_transaction_verifier.py b/hathor/verification/token_creation_transaction_verifier.py index 66d96f111..4d0ac543c 100644 --- a/hathor/verification/token_creation_transaction_verifier.py +++ b/hathor/verification/token_creation_transaction_verifier.py @@ -18,7 +18,6 @@ from hathor.transaction.transaction import TokenInfo from hathor.transaction.util import clean_token_string from hathor.types import TokenUid -from hathor.util import not_none class TokenCreationTransactionVerifier: @@ -36,7 +35,7 @@ def verify_minted_tokens(self, tx: TokenCreationTransaction, token_dict: dict[To :raises InputOutputMismatch: if sum of inputs is not equal to outputs and there's no mint/melt """ # make sure tokens are being minted - token_info = token_dict[not_none(tx.hash)] + token_info = token_dict[tx.hash] if token_info.amount <= 0: raise InvalidToken('Token creation transaction must mint new tokens') diff --git a/hathor/verification/transaction_verifier.py b/hathor/verification/transaction_verifier.py index 2d86883c2..f55e0239c 100644 --- a/hathor/verification/transaction_verifier.py +++ b/hathor/verification/transaction_verifier.py @@ -16,6 +16,7 @@ from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.profiler import get_cpu_profiler from hathor.reward_lock import get_spent_reward_locked_info +from hathor.reward_lock.reward_lock import get_minimum_best_height from hathor.transaction import BaseTransaction, Transaction, TxInput from hathor.transaction.exceptions import ( ConflictingInputs, @@ -37,7 +38,6 @@ from hathor.transaction.transaction import TokenInfo from hathor.transaction.util import get_deposit_amount, get_withdraw_amount from hathor.types import TokenUid, VertexId -from hathor.util import not_none cpu = get_cpu_profiler() @@ -83,7 +83,6 @@ def verify_sigops_input(self, tx: Transaction) -> None: spent_tx = tx.get_spent_tx(tx_input) except TransactionDoesNotExist: raise InexistentInput('Input tx does not exist: {}'.format(tx_input.tx_id.hex())) - assert spent_tx.hash is not None if tx_input.index >= len(spent_tx.outputs): raise InexistentInput('Output spent by this input does not exist: {} index {}'.format( tx_input.tx_id.hex(), tx_input.index)) @@ -106,7 +105,6 @@ def verify_inputs(self, tx: Transaction, *, skip_script: bool = False) -> None: try: spent_tx = tx.get_spent_tx(input_tx) - assert spent_tx.hash is not None if input_tx.index >= len(spent_tx.outputs): raise InexistentInput('Output spent by this input does not exist: {} index {}'.format( input_tx.tx_id.hex(), input_tx.index)) @@ -144,12 +142,51 @@ def verify_script(self, *, tx: Transaction, input_tx: TxInput, spent_tx: BaseTra raise InvalidInputData(e) from e def verify_reward_locked(self, tx: Transaction) -> None: - """Will raise `RewardLocked` if any reward is spent before the best block height is enough, considering only - the block rewards spent by this tx itself, and not the inherited `min_height`.""" - info = get_spent_reward_locked_info(tx, not_none(tx.storage)) + """Will raise `RewardLocked` if any reward is spent before the best block height is enough, considering both + the block rewards spent by this tx itself, and the inherited `min_height`.""" + assert tx.storage is not None + best_height = get_minimum_best_height(tx.storage) + self.verify_reward_locked_for_height(tx, best_height) + + @staticmethod + def verify_reward_locked_for_height( + tx: Transaction, + best_height: int, + *, + assert_min_height_verification: bool = True + ) -> None: + """ + Will raise `RewardLocked` if any reward is spent before the best block height is enough, considering both + the block rewards spent by this tx itself, and the inherited `min_height`. + + Args: + tx: the transaction to be verified. + best_height: the height of the best chain to be used for verification. + assert_min_height_verification: whether the inherited `min_height` verification must pass. + + Note: for verification of new transactions, `assert_min_height_verification` must be `True`. This + verification is always expected to pass for new txs, as a failure would mean one of its dependencies would + have failed too. So an `AssertionError` is raised if it fails. + + However, when txs are being re-verified for Reward Lock during a reorg, it's possible that txs may fail + their inherited `min_height` verification. So in that case `assert_min_height_verification` is `False`, + and a normal `RewardLocked` exception is raised instead. + """ + assert tx.storage is not None + info = get_spent_reward_locked_info(tx, tx.storage) if info is not None: raise RewardLocked(f'Reward {info.block_hash.hex()} still needs {info.blocks_needed} to be unlocked.') + meta = tx.get_metadata() + assert meta.min_height is not None + # We use +1 here because a tx is valid if it can be confirmed by the next block + if best_height + 1 < meta.min_height: + if assert_min_height_verification: + raise AssertionError('a new tx should never be invalid by its inherited min_height.') + raise RewardLocked( + f'Tx {tx.hash_hex} has min_height={meta.min_height}, but the best_height={best_height}.' + ) + def verify_number_of_inputs(self, tx: Transaction) -> None: """Verify number of inputs is in a valid range""" if len(tx.inputs) > self._settings.MAX_NUM_INPUTS: diff --git a/hathor/verification/vertex_verifier.py b/hathor/verification/vertex_verifier.py index 80a621502..d3ef72046 100644 --- a/hathor/verification/vertex_verifier.py +++ b/hathor/verification/vertex_verifier.py @@ -15,7 +15,6 @@ from typing import Optional from hathor.conf.settings import HathorSettings -from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.transaction import BaseTransaction from hathor.transaction.exceptions import ( DuplicatedParents, @@ -40,11 +39,10 @@ class VertexVerifier: - __slots__ = ('_settings', '_daa') + __slots__ = ('_settings',) - def __init__(self, *, settings: HathorSettings, daa: DifficultyAdjustmentAlgorithm): + def __init__(self, *, settings: HathorSettings) -> None: self._settings = settings - self._daa = daa def verify_parents(self, vertex: BaseTransaction) -> None: """All parents must exist and their timestamps must be smaller than ours. @@ -73,7 +71,6 @@ def verify_parents(self, vertex: BaseTransaction) -> None: for parent_hash in vertex.parents: try: parent = vertex.storage.get_transaction(parent_hash) - assert parent.hash is not None if vertex.timestamp <= parent.timestamp: raise TimestampError('tx={} timestamp={}, parent={} timestamp={}'.format( vertex.hash_hex, @@ -129,7 +126,6 @@ def verify_pow(self, vertex: BaseTransaction, *, override_weight: Optional[float :raises PowError: when the hash is equal or greater than the target """ - assert vertex.hash is not None numeric_hash = int(vertex.hash_hex, vertex.HEX_BASE) minimum_target = vertex.get_target(override_weight) if numeric_hash >= minimum_target: @@ -160,7 +156,7 @@ def verify_outputs(self, vertex: BaseTransaction) -> None: )) def verify_number_of_outputs(self, vertex: BaseTransaction) -> None: - """Verify number of outputs does not exceeds the limit""" + """Verify number of outputs does not exceed the limit""" if len(vertex.outputs) > self._settings.MAX_NUM_OUTPUTS: raise TooManyOutputs('Maximum number of outputs exceeded') diff --git a/hathor/verification/vertex_verifiers.py b/hathor/verification/vertex_verifiers.py index 98477c397..31e3fe190 100644 --- a/hathor/verification/vertex_verifiers.py +++ b/hathor/verification/vertex_verifiers.py @@ -44,7 +44,7 @@ def create_defaults( Create a VertexVerifiers instance using the default verifier for each vertex type, from all required dependencies. """ - vertex_verifier = VertexVerifier(settings=settings, daa=daa) + vertex_verifier = VertexVerifier(settings=settings) return cls.create( settings=settings, diff --git a/hathor/version.py b/hathor/version.py index 895f8bdd7..b1afb04ca 100644 --- a/hathor/version.py +++ b/hathor/version.py @@ -19,7 +19,7 @@ from structlog import get_logger -BASE_VERSION = '0.59.0' +BASE_VERSION = '0.60.0' DEFAULT_VERSION_SUFFIX = "local" BUILD_VERSION_FILE_PATH = "./BUILD_VERSION" diff --git a/hathor/vertex_handler/__init__.py b/hathor/vertex_handler/__init__.py new file mode 100644 index 000000000..8ac87643a --- /dev/null +++ b/hathor/vertex_handler/__init__.py @@ -0,0 +1,19 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.vertex_handler.vertex_handler import VertexHandler + +__all__ = [ + 'VertexHandler' +] diff --git a/hathor/vertex_handler/vertex_handler.py b/hathor/vertex_handler/vertex_handler.py new file mode 100644 index 000000000..5bcbc1369 --- /dev/null +++ b/hathor/vertex_handler/vertex_handler.py @@ -0,0 +1,268 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime + +from structlog import get_logger + +from hathor.conf.settings import HathorSettings +from hathor.consensus import ConsensusAlgorithm +from hathor.exception import HathorError, InvalidNewTransaction +from hathor.feature_activation.feature import Feature +from hathor.feature_activation.feature_service import FeatureService +from hathor.p2p.manager import ConnectionsManager +from hathor.pubsub import HathorEvents, PubSubManager +from hathor.reactor import ReactorProtocol +from hathor.transaction import BaseTransaction, Block +from hathor.transaction.storage import TransactionStorage +from hathor.transaction.storage.exceptions import TransactionDoesNotExist +from hathor.verification.verification_service import VerificationService +from hathor.wallet import BaseWallet + +logger = get_logger() + + +class VertexHandler: + __slots__ = ( + '_log', + '_reactor', + '_settings', + '_tx_storage', + '_verification_service', + '_consensus', + '_p2p_manager', + '_feature_service', + '_pubsub', + '_wallet', + ) + + def __init__( + self, + *, + reactor: ReactorProtocol, + settings: HathorSettings, + tx_storage: TransactionStorage, + verification_service: VerificationService, + consensus: ConsensusAlgorithm, + p2p_manager: ConnectionsManager, + feature_service: FeatureService, + pubsub: PubSubManager, + wallet: BaseWallet | None, + ) -> None: + self._log = logger.new() + self._reactor = reactor + self._settings = settings + self._tx_storage = tx_storage + self._verification_service = verification_service + self._consensus = consensus + self._p2p_manager = p2p_manager + self._feature_service = feature_service + self._pubsub = pubsub + self._wallet = wallet + + def on_new_vertex( + self, + vertex: BaseTransaction, + *, + quiet: bool = False, + fails_silently: bool = True, + propagate_to_peers: bool = True, + reject_locked_reward: bool = True, + ) -> bool: + """ New method for adding transactions or blocks that steps the validation state machine. + + :param vertex: transaction to be added + :param quiet: if True will not log when a new tx is accepted + :param fails_silently: if False will raise an exception when tx cannot be added + :param propagate_to_peers: if True will relay the tx to other peers if it is accepted + """ + is_valid = self._validate_vertex( + vertex, + fails_silently=fails_silently, + reject_locked_reward=reject_locked_reward + ) + + if not is_valid: + return False + + self._save_and_run_consensus(vertex) + self._post_consensus( + vertex, + quiet=quiet, + propagate_to_peers=propagate_to_peers, + reject_locked_reward=reject_locked_reward + ) + + return True + + def _validate_vertex( + self, + vertex: BaseTransaction, + *, + fails_silently: bool, + reject_locked_reward: bool, + ) -> bool: + assert self._tx_storage.is_only_valid_allowed() + already_exists = False + if self._tx_storage.transaction_exists(vertex.hash): + self._tx_storage.compare_bytes_with_local_tx(vertex) + already_exists = True + + if vertex.timestamp - self._reactor.seconds() > self._settings.MAX_FUTURE_TIMESTAMP_ALLOWED: + if not fails_silently: + raise InvalidNewTransaction('Ignoring transaction in the future {} (timestamp={})'.format( + vertex.hash_hex, vertex.timestamp)) + self._log.warn('on_new_tx(): Ignoring transaction in the future', tx=vertex.hash_hex, + future_timestamp=vertex.timestamp) + return False + + vertex.storage = self._tx_storage + + try: + metadata = vertex.get_metadata() + except TransactionDoesNotExist: + if not fails_silently: + raise InvalidNewTransaction('cannot get metadata') + self._log.warn('on_new_tx(): cannot get metadata', tx=vertex.hash_hex) + return False + + if already_exists and metadata.validation.is_fully_connected(): + if not fails_silently: + raise InvalidNewTransaction('Transaction already exists {}'.format(vertex.hash_hex)) + self._log.warn('on_new_tx(): Transaction already exists', tx=vertex.hash_hex) + return False + + if metadata.validation.is_invalid(): + if not fails_silently: + raise InvalidNewTransaction('previously marked as invalid') + self._log.warn('on_new_tx(): previously marked as invalid', tx=vertex.hash_hex) + return False + + if not metadata.validation.is_fully_connected(): + try: + # TODO: Remove this from here after a refactor in metadata initialization + vertex.update_reward_lock_metadata() + self._verification_service.validate_full(vertex, reject_locked_reward=reject_locked_reward) + except HathorError as e: + if not fails_silently: + raise InvalidNewTransaction(f'full validation failed: {str(e)}') from e + self._log.warn('on_new_tx(): full validation failed', tx=vertex.hash_hex, exc_info=True) + return False + + return True + + def _save_and_run_consensus(self, vertex: BaseTransaction) -> None: + # The method below adds the tx as a child of the parents + # This needs to be called right before the save because we were adding the children + # in the tx parents even if the tx was invalid (failing the verifications above) + # then I would have a children that was not in the storage + vertex.update_initial_metadata(save=False) + self._tx_storage.save_transaction(vertex) + self._tx_storage.add_to_indexes(vertex) + self._consensus.update(vertex) + + def _post_consensus( + self, + vertex: BaseTransaction, + *, + quiet: bool, + propagate_to_peers: bool, + reject_locked_reward: bool, + ) -> None: + """ Handle operations that need to happen once the tx becomes fully validated. + + This might happen immediately after we receive the tx, if we have all dependencies + already. Or it might happen later. + """ + assert self._tx_storage.indexes is not None + assert self._verification_service.validate_full( + vertex, + skip_block_weight_verification=True, + reject_locked_reward=reject_locked_reward + ) + self._tx_storage.indexes.update(vertex) + if self._tx_storage.indexes.mempool_tips: + self._tx_storage.indexes.mempool_tips.update(vertex) # XXX: move to indexes.update + + # Publish to pubsub manager the new tx accepted, now that it's full validated + self._pubsub.publish(HathorEvents.NETWORK_NEW_TX_ACCEPTED, tx=vertex) + + if self._tx_storage.indexes.mempool_tips: + self._tx_storage.indexes.mempool_tips.update(vertex) + + if self._wallet: + # TODO Remove it and use pubsub instead. + self._wallet.on_new_tx(vertex) + + self._log_new_object(vertex, 'new {}', quiet=quiet) + self._log_feature_states(vertex) + + if propagate_to_peers: + # Propagate to our peers. + self._p2p_manager.send_tx_to_peers(vertex) + + def _log_new_object(self, tx: BaseTransaction, message_fmt: str, *, quiet: bool) -> None: + """ A shortcut for logging additional information for block/txs. + """ + metadata = tx.get_metadata() + now = datetime.datetime.fromtimestamp(self._reactor.seconds()) + kwargs = { + 'tx': tx, + 'ts_date': datetime.datetime.fromtimestamp(tx.timestamp), + 'time_from_now': tx.get_time_from_now(now), + 'validation': metadata.validation.name, + } + if tx.is_block: + message = message_fmt.format('block') + if isinstance(tx, Block): + kwargs['height'] = tx.get_height() + else: + message = message_fmt.format('tx') + if not quiet: + log_func = self._log.info + else: + log_func = self._log.debug + log_func(message, **kwargs) + + def _log_feature_states(self, vertex: BaseTransaction) -> None: + """Log features states for a block. Used as part of the Feature Activation Phased Testing.""" + if not isinstance(vertex, Block): + return + + feature_descriptions = self._feature_service.get_bits_description(block=vertex) + state_by_feature = { + feature.value: description.state.value + for feature, description in feature_descriptions.items() + } + + self._log.info( + 'New block accepted with feature activation states', + block_hash=vertex.hash_hex, + block_height=vertex.get_height(), + features_states=state_by_feature + ) + + features = [Feature.NOP_FEATURE_1, Feature.NOP_FEATURE_2] + for feature in features: + self._log_if_feature_is_active(vertex, feature) + + def _log_if_feature_is_active(self, block: Block, feature: Feature) -> None: + """Log if a feature is ACTIVE for a block. Used as part of the Feature Activation Phased Testing.""" + if self._feature_service.is_feature_active(block=block, feature=feature): + self._log.info( + 'Feature is ACTIVE for block', + feature=feature.value, + block_hash=block.hash_hex, + block_height=block.get_height() + ) diff --git a/hathor/wallet/base_wallet.py b/hathor/wallet/base_wallet.py index ec5e2dc5e..7b38dfa12 100644 --- a/hathor/wallet/base_wallet.py +++ b/hathor/wallet/base_wallet.py @@ -517,7 +517,6 @@ def on_new_tx(self, tx: BaseTransaction) -> None: If an output matches, will add it to the unspent_txs dict. If an input matches, removes from unspent_txs dict and adds to spent_txs dict. """ - assert tx.hash is not None meta = tx.get_metadata() if meta.voided_by is not None: @@ -614,7 +613,6 @@ def on_tx_voided(self, tx: Transaction) -> None: :param tx: Transaction that was voided :type tx: :py:class:`hathor.transaction.Transaction` """ - assert tx.hash is not None assert tx.storage is not None should_update = False @@ -736,7 +734,6 @@ def on_tx_winner(self, tx: Transaction) -> None: :param tx: Transaction that was voided :type tx: :py:class:`hathor.transaction.Transaction` """ - assert tx.hash is not None assert tx.storage is not None should_update = False diff --git a/hathor/wallet/resources/send_tokens.py b/hathor/wallet/resources/send_tokens.py index 16bd97355..936faa2e9 100644 --- a/hathor/wallet/resources/send_tokens.py +++ b/hathor/wallet/resources/send_tokens.py @@ -127,6 +127,7 @@ def _render_POST_thread(self, values: dict[str, Any], request: Request) -> Union weight = self.manager.daa.minimum_tx_weight(tx) tx.weight = weight self.manager.cpu_mining_service.resolve(tx) + tx.update_reward_lock_metadata() self.manager.verification_service.verify(tx) return tx diff --git a/hathor/wallet/resources/thin_wallet/address_history.py b/hathor/wallet/resources/thin_wallet/address_history.py index 8e0ffc0ca..e7e231e71 100644 --- a/hathor/wallet/resources/thin_wallet/address_history.py +++ b/hathor/wallet/resources/thin_wallet/address_history.py @@ -21,6 +21,7 @@ from hathor.cli.openapi_files.register import register_resource from hathor.conf.get_settings import get_global_settings from hathor.crypto.util import decode_address +from hathor.transaction.storage.exceptions import TransactionDoesNotExist from hathor.util import json_dumpb, json_loadb from hathor.wallet.exceptions import InvalidAddress @@ -69,10 +70,6 @@ def render_POST(self, request: Request) -> bytes: def render_GET(self, request: Request) -> bytes: """ GET request for /thin_wallet/address_history/ - If 'paginate' parameter exists, it calls the new resource method - otherwise, it will call the old and deprecated one because it's - a request from a wallet still in an older version - Expects 'addresses[]' as request args, and 'hash' as optional args to be used in pagination @@ -124,24 +121,18 @@ def render_GET(self, request: Request) -> bytes: return json_dumpb({'success': False}) raw_args = get_args(request) - paginate = b'paginate' in raw_args and raw_args[b'paginate'][0].decode('utf-8') == 'true' - if paginate: - # New resource - if b'addresses[]' not in raw_args: - return get_missing_params_msg('addresses[]') + if b'addresses[]' not in raw_args: + return get_missing_params_msg('addresses[]') - addresses = raw_args[b'addresses[]'] + addresses = raw_args[b'addresses[]'] - ref_hash = None - if b'hash' in raw_args: - # If hash parameter is in the request, it must be a valid hex - ref_hash = raw_args[b'hash'][0].decode('utf-8') + ref_hash = None + if b'hash' in raw_args: + # If hash parameter is in the request, it must be a valid hex + ref_hash = raw_args[b'hash'][0].decode('utf-8') - return self.get_address_history([address.decode('utf-8') for address in addresses], ref_hash) - else: - # Old and deprecated resource - return self.deprecated_resource(request) + return self.get_address_history([address.decode('utf-8') for address in addresses], ref_hash) def get_address_history(self, addresses: list[str], ref_hash: Optional[str]) -> bytes: ref_hash_bytes = None @@ -166,12 +157,6 @@ def get_address_history(self, addresses: list[str], ref_hash: Optional[str]) -> history = [] seen: set[bytes] = set() - # XXX In this algorithm we need to sort all transactions of an address - # and find one specific (in case of a pagination request) - # so if this address has many txs, this could become slow - # I've done some tests with 10k txs in one address and the request - # returned in less than 50ms, so we will move forward with it for now - # but this could be improved in the future for idx, address in enumerate(addresses): try: decode_address(address) @@ -181,31 +166,28 @@ def get_address_history(self, addresses: list[str], ref_hash: Optional[str]) -> 'message': 'The address {} is invalid'.format(address) }) - hashes = addresses_index.get_sorted_from_address(address) - start_index = 0 - if ref_hash_bytes and idx == 0: - # It's not the first request, so we must continue from the hash - # but we do it only for the first address + tx = None + if ref_hash_bytes: try: - # Find index where the hash is - start_index = hashes.index(ref_hash_bytes) - except ValueError: - # ref_hash is not in the list + tx = self.manager.tx_storage.get_transaction(ref_hash_bytes) + except TransactionDoesNotExist: return json_dumpb({ 'success': False, - 'message': 'Hash {} is not a transaction from the address {}.'.format(ref_hash, address) + 'message': 'Hash {} is not a transaction hash.'.format(ref_hash) }) - # Slice the hashes array from the start_index - to_iterate = hashes[start_index:] + # The address index returns an iterable that starts at `tx`. + hashes = addresses_index.get_sorted_from_address(address, tx) did_break = False - for index, tx_hash in enumerate(to_iterate): + for tx_hash in hashes: if total_added == self._settings.MAX_TX_ADDRESSES_HISTORY: # If already added the max number of elements possible, then break # I need to add this if at the beginning of the loop to handle the case # when the first tx of the address exceeds the limit, so we must return # that the next request should start in the first tx of this address did_break = True + # Saving the first tx hash for the next request + first_hash = tx_hash.hex() break if tx_hash not in seen: @@ -216,6 +198,8 @@ def get_address_history(self, addresses: list[str], ref_hash: Optional[str]) -> # It's important to validate also the maximum number of inputs and outputs because some txs # are really big and the response payload becomes too big did_break = True + # Saving the first tx hash for the next request + first_hash = tx_hash.hex() break seen.add(tx_hash) @@ -226,10 +210,8 @@ def get_address_history(self, addresses: list[str], ref_hash: Optional[str]) -> if did_break: # We stopped in the middle of the txs of this address # So we return that we still have more data to send - break_index = start_index + index has_more = True # The hash to start the search and which address this hash belongs - first_hash = hashes[break_index].hex() first_address = address break @@ -242,38 +224,6 @@ def get_address_history(self, addresses: list[str], ref_hash: Optional[str]) -> } return json_dumpb(data) - def deprecated_resource(self, request: Request) -> bytes: - """ This resource is deprecated. It's here only to keep - compatibility with old wallet versions - """ - raw_args = get_args(request) - if b'addresses[]' not in raw_args: - return get_missing_params_msg('addresses[]') - - addresses_index = self.manager.tx_storage.indexes.addresses - - addresses = raw_args[b'addresses[]'] - history = [] - seen: set[bytes] = set() - for address_to_decode in addresses: - address = address_to_decode.decode('utf-8') - try: - decode_address(address) - except InvalidAddress: - return json_dumpb({ - 'success': False, - 'message': 'The address {} is invalid'.format(address) - }) - - for tx_hash in addresses_index.get_from_address(address): - tx = self.manager.tx_storage.get_transaction(tx_hash) - if tx_hash not in seen: - seen.add(tx_hash) - history.append(tx.to_json_extended()) - - data = {'history': history} - return json_dumpb(data) - AddressHistoryResource.openapi = { '/thin_wallet/address_history': { @@ -371,62 +321,10 @@ def deprecated_resource(self, request: Request) -> bytes: }, "token": "00" } - ] - } - ] - } - }, - 'deprecated_success': { - 'summary': 'Deprecated success', - 'value': { - 'history': [ - { - "hash": "00000299670db5814f69cede8b347f83" - "0f73985eaa4cd1ce87c9a7c793771336", - "timestamp": 1552422415, - "is_voided": False, - 'parents': [ - '00000b8792cb13e8adb51cc7d866541fc29b532e8dec95ae4661cf3da4d42cb5', - '00001417652b9d7bd53eb14267834eab08f27e5cbfaca45a24370e79e0348bb1' - ], - "inputs": [ - { - "value": 42500000044, - "script": "dqkURJPA8tDMJHU8tqv3SiO18ZCLEPaIrA==", - "decoded": { - "type": "P2PKH", - "address": "17Fbx9ouRUD1sd32bp4ptGkmgNzg7p2Krj", - "timelock": None - }, - "token": "00", - "tx": "000002d28696f94f89d639022ae81a1d" - "870d55d189c27b7161d9cb214ad1c90c", - "index": 0 - } ], - "outputs": [ - { - "value": 42499999255, - "script": "dqkU/B6Jbf5OnslsQrvHXQ4WKDTSEGKIrA==", - "decoded": { - "type": "P2PKH", - "address": "1Pz5s5WVL52MK4EwBy9XVQUzWjF2LWWKiS", - "timelock": None - }, - "token": "00" - }, - { - "value": 789, - "script": "dqkUrWoWhiP+qPeI/qwfwb5fgnmtd4CIrA==", - "decoded": { - "type": "P2PKH", - "address": "1GovzJvbzLw6x4H2a1hHb529cpEWzh3YRd", - "timelock": None - }, - "token": "00" - } - ] - } + "first_block": ("000005af290a55b079014a0be3246479" + "e84eeb635f02010dbf3e5f3414a85bbb") + } ] } }, diff --git a/hathor/wallet/resources/thin_wallet/send_tokens.py b/hathor/wallet/resources/thin_wallet/send_tokens.py index 6cd6badaf..2ffb10dfc 100644 --- a/hathor/wallet/resources/thin_wallet/send_tokens.py +++ b/hathor/wallet/resources/thin_wallet/send_tokens.py @@ -270,6 +270,7 @@ def _should_stop(): if context.should_stop_mining_thread: raise CancelledError() context.tx.update_hash() + context.tx.update_reward_lock_metadata() self.manager.verification_service.verify(context.tx) return context diff --git a/hathor/wallet/resources/thin_wallet/token_history.py b/hathor/wallet/resources/thin_wallet/token_history.py index a68cf4077..603a6c7f6 100644 --- a/hathor/wallet/resources/thin_wallet/token_history.py +++ b/hathor/wallet/resources/thin_wallet/token_history.py @@ -243,7 +243,8 @@ def render_GET(self, request: Request) -> bytes: 'script': 'dqkUjjPg+zwG6JDe901I0ybQxcAPrAuIrA==' } ], - 'tokens': [] + 'tokens': [], + 'first_block': None }, { 'tx_id': ('00000b8792cb13e8adb51cc7d866541f' @@ -276,7 +277,9 @@ def render_GET(self, request: Request) -> bytes: 'script': 'dqkUjjPg+zwG6JDe901I0ybQxcAPrAuIrA==' } ], - 'tokens': [] + 'tokens': [], + 'first_block': ('000005af290a55b079014a0be3246479' + 'e84eeb635f02010dbf3e5f3414a85bbb') } ], 'has_more': True diff --git a/poetry.lock b/poetry.lock index 475743517..163ae443e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "aiohttp" @@ -1061,38 +1061,38 @@ files = [ [[package]] name = "mypy" -version = "1.8.0" +version = "1.9.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, - {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, - {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, - {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, - {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, - {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, - {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, - {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, - {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, - {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, - {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, - {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, - {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, - {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, - {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, - {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, - {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, - {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, - {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, + {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, + {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, + {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, + {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, + {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, + {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, + {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, + {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, + {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, + {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, + {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, + {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, + {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, + {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, + {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, + {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, + {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, ] [package.dependencies] @@ -1119,17 +1119,17 @@ files = [ [[package]] name = "mypy-zope" -version = "1.0.3" +version = "1.0.4" description = "Plugin for mypy to support zope interfaces" optional = false python-versions = "*" files = [ - {file = "mypy-zope-1.0.3.tar.gz", hash = "sha256:149081bd2754d947747baefac569bb1c2bc127b4a2cc1fa505492336946bb3b4"}, - {file = "mypy_zope-1.0.3-py3-none-any.whl", hash = "sha256:7a30ce1a2589173f0be66662c9a9179f75737afc40e4104df4c76fb5a8421c14"}, + {file = "mypy-zope-1.0.4.tar.gz", hash = "sha256:a9569e73ae85a65247787d98590fa6d4290e76f26aabe035d1c3e94a0b9ab6ee"}, + {file = "mypy_zope-1.0.4-py3-none-any.whl", hash = "sha256:c7298f93963a84f2b145c2b5cc98709fc2a5be4adf54bfe23fa7fdd8fd19c975"}, ] [package.dependencies] -mypy = ">=1.0.0,<1.9.0" +mypy = ">=1.0.0,<1.10.0" "zope.interface" = "*" "zope.schema" = "*" @@ -2490,4 +2490,4 @@ sentry = ["sentry-sdk", "structlog-sentry"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<4" -content-hash = "cce7b9832ae2d13cc56fb572af82face7a824307ddd6953387737a27d6e7088a" +content-hash = "1eed0fc6c02c4ddb7b4a6634d6c5ba4873ce5a82c6b3d4197ca88b4644474c53" diff --git a/pyproject.toml b/pyproject.toml index e27cfb609..f5ac12f1c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ [tool.poetry] name = "hathor" -version = "0.59.0" +version = "0.60.0" description = "Hathor Network full-node" authors = ["Hathor Team "] license = "Apache-2.0" @@ -38,8 +38,8 @@ hathor-cli = 'hathor.cli.main:main' [tool.poetry.dev-dependencies] flake8 = "~6.1.0" isort = {version = "~5.12.0", extras = ["colors"]} -mypy = {version = "^1.5.1", markers = "implementation_name == 'cpython'"} -mypy-zope = {version = "^1.0.1", markers = "implementation_name == 'cpython'"} +mypy = {version = "^1.9.0", markers = "implementation_name == 'cpython'"} +mypy-zope = {version = "^1.0.4", markers = "implementation_name == 'cpython'"} pytest = "~7.4.3" pytest-cov = "~4.1.0" flaky = "~3.7.0" @@ -97,6 +97,8 @@ multi_line_output = 3 pretty = true disallow_incomplete_defs = true no_implicit_optional = true +extra_checks = true +disallow_untyped_decorators = true warn_redundant_casts = true warn_unused_configs = true warn_unused_ignores = true @@ -131,6 +133,37 @@ module = [ ] ignore_missing_imports = true +# This override enables stricter rules for some specific modules. +# Currently, we have only two options from strict-mode that are disabled, but we have to opt-in instead of opt-out +# because setting strict=true doesn't work for module-level settings. +# Reference: https://mypy.readthedocs.io/en/stable/existing_code.html#introduce-stricter-options +[[tool.mypy.overrides]] +module = [ + "hathor.consensus.*", + "hathor.feature_activation.*", + "hathor.event.*", + "hathor.verification.*", + "tests.consensus.*", + "tests.crypto.*", + "tests.event.*", + "tests.execution_manager.*", + "tests.feature_activation.*", + "tests.p2p.*", + "tests.pubsub.*", + "tests.simulation.*", + "tests.unittest", + "tests.utils", +] +strict_equality = true +strict_concatenate = true +check_untyped_defs = true +disallow_any_generics = true +disallow_untyped_defs = true +no_implicit_reexport = true +warn_return_any = true +# disallow_subclassing_any = true +# disallow_untyped_calls = true + [tool.pydantic-mypy] init_typed = true init_forbid_extra = true diff --git a/slow_tests/test_simulator.py b/slow_tests/test_simulator.py index 59c4c1967..f5477ad59 100644 --- a/slow_tests/test_simulator.py +++ b/slow_tests/test_simulator.py @@ -26,8 +26,8 @@ def setUp(self): print('Simulation seed config:', self.random_seed) print('-'*30) - def verify_pow(self) -> None: - assert self.hash is not None + def verify_pow(_) -> None: + pass self.old_verify_pow = BaseTransaction.verify_pow BaseTransaction.verify_pow = verify_pow diff --git a/tests/consensus/test_consensus.py b/tests/consensus/test_consensus.py index caa455a54..797b88a1f 100644 --- a/tests/consensus/test_consensus.py +++ b/tests/consensus/test_consensus.py @@ -1,7 +1,9 @@ -from unittest.mock import MagicMock +from unittest.mock import MagicMock, Mock +from hathor.execution_manager import ExecutionManager from hathor.simulator.utils import add_new_block, add_new_blocks, gen_new_tx from hathor.transaction.storage import TransactionMemoryStorage +from hathor.util import not_none from tests import unittest from tests.utils import add_blocks_unlock_reward, add_new_double_spending, add_new_transactions @@ -9,14 +11,14 @@ class BaseConsensusTestCase(unittest.TestCase): __test__ = False - def setUp(self): + def setUp(self) -> None: super().setUp() self.tx_storage = TransactionMemoryStorage() self.genesis = self.tx_storage.get_all_genesis() self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] self.genesis_txs = [tx for tx in self.genesis if not tx.is_block] - def test_unhandled_exception(self): + def test_unhandled_exception(self) -> None: manager = self.create_peer('testnet', tx_storage=self.tx_storage) # Mine a few blocks in a row with no transaction but the genesis @@ -30,16 +32,21 @@ def test_unhandled_exception(self): class MyError(Exception): pass + execution_manager_mock = Mock(spec_set=ExecutionManager) + manager.consensus_algorithm._execution_manager = execution_manager_mock manager.consensus_algorithm._unsafe_update = MagicMock(side_effect=MyError) - with self.assertRaises(MyError): - manager.propagate_tx(tx, fails_silently=False) + manager.propagate_tx(tx, fails_silently=False) + + execution_manager_mock.crash_and_exit.assert_called_once_with( + reason=f"Consensus update failed for tx {tx.hash_hex}" + ) tx2 = manager.tx_storage.get_transaction(tx.hash) meta2 = tx2.get_metadata() self.assertEqual({self._settings.CONSENSUS_FAIL_ID}, meta2.voided_by) - def test_revert_block_high_weight(self): + def test_revert_block_high_weight(self) -> None: """ A conflict transaction will be propagated. At first, it will be voided. But, a new block with high weight will verify it, which will flip it to executed. """ @@ -74,7 +81,6 @@ def test_revert_block_high_weight(self): b0 = tb0.generate_mining_block(manager.rng, storage=manager.tx_storage) b0.weight = 10 manager.cpu_mining_service.resolve(b0) - manager.verification_service.verify(b0) manager.propagate_tx(b0, fails_silently=False) b1 = add_new_block(manager, advance_clock=15) @@ -102,7 +108,7 @@ def test_revert_block_high_weight(self): self.assertConsensusValid(manager) - def test_dont_revert_block_low_weight(self): + def test_dont_revert_block_low_weight(self) -> None: """ A conflict transaction will be propagated and voided. A new block with low weight will verify it, which won't be enough to flip to executed. So, it will remain voided. @@ -136,7 +142,6 @@ def test_dont_revert_block_low_weight(self): b0 = manager.generate_mining_block() b0.parents = [blocks[-1].hash, conflicting_tx.hash, conflicting_tx.parents[0]] manager.cpu_mining_service.resolve(b0) - manager.verification_service.verify(b0) manager.propagate_tx(b0, fails_silently=False) b1 = add_new_block(manager, advance_clock=15) @@ -156,7 +161,7 @@ def test_dont_revert_block_low_weight(self): self.assertConsensusValid(manager) - def test_dont_revert_block_high_weight_transaction_verify_other(self): + def test_dont_revert_block_high_weight_transaction_verify_other(self) -> None: """ A conflict transaction will be propagated and voided. But this transaction verifies its conflicting transaction. So, its accumulated weight will always be smaller than the others and it will never be executed. @@ -174,8 +179,8 @@ def test_dont_revert_block_high_weight_transaction_verify_other(self): # Create a double spending transaction. conflicting_tx = add_new_double_spending(manager, tx=txs[-1]) meta = conflicting_tx.get_metadata() - self.assertEqual(len(meta.conflict_with), 1) - self.assertIn(list(meta.conflict_with)[0], conflicting_tx.parents) + self.assertEqual(len(not_none(meta.conflict_with)), 1) + self.assertIn(not_none(meta.conflict_with)[0], conflicting_tx.parents) # Add a few transactions. add_new_transactions(manager, 10, advance_clock=15) @@ -192,7 +197,6 @@ def test_dont_revert_block_high_weight_transaction_verify_other(self): b0 = tb0.generate_mining_block(manager.rng, storage=manager.tx_storage) b0.weight = 10 manager.cpu_mining_service.resolve(b0) - manager.verification_service.verify(b0) manager.propagate_tx(b0, fails_silently=False) b1 = add_new_block(manager, advance_clock=15) @@ -213,7 +217,7 @@ def test_dont_revert_block_high_weight_transaction_verify_other(self): self.assertConsensusValid(manager) - def test_dont_revert_block_high_weight_verify_both(self): + def test_dont_revert_block_high_weight_verify_both(self) -> None: """ A conflicting transaction will be propagated and voided. But the block with high weight verifies both the conflicting transactions, so this block will always be voided. """ @@ -246,7 +250,6 @@ def test_dont_revert_block_high_weight_verify_both(self): b0.parents = [b0.parents[0], conflicting_tx.hash, conflicting_tx.parents[0]] b0.weight = 10 manager.cpu_mining_service.resolve(b0) - manager.verification_service.verify(b0) manager.propagate_tx(b0, fails_silently=False) b1 = add_new_block(manager, advance_clock=15) diff --git a/tests/consensus/test_consensus2.py b/tests/consensus/test_consensus2.py index d8993c69e..da40a5703 100644 --- a/tests/consensus/test_consensus2.py +++ b/tests/consensus/test_consensus2.py @@ -1,12 +1,15 @@ from hathor.graphviz import GraphvizVisualizer +from hathor.manager import HathorManager from hathor.simulator.utils import gen_new_tx +from hathor.transaction import Transaction +from hathor.util import not_none from tests import unittest from tests.simulation.base import SimulatorTestCase from tests.utils import add_custom_tx class BaseConsensusSimulatorTestCase(SimulatorTestCase): - def checkConflict(self, tx1, tx2): + def checkConflict(self, tx1: Transaction, tx2: Transaction) -> None: meta1 = tx1.get_metadata() meta2 = tx2.get_metadata() self.assertIn(tx1.hash, meta2.conflict_with) @@ -19,7 +22,7 @@ def checkConflict(self, tx1, tx2): cnt += 1 self.assertLessEqual(cnt, 1) - def do_step(self, i, manager1, tx_base): + def do_step(self, i: int, manager1: HathorManager, tx_base: Transaction) -> Transaction: txA = add_custom_tx(manager1, [(tx_base, 0)], n_outputs=2) self.graphviz.labels[txA.hash] = f'txA-{i}' @@ -52,7 +55,7 @@ def do_step(self, i, manager1, tx_base): return txH - def test_two_conflicts_intertwined_once(self): + def test_two_conflicts_intertwined_once(self) -> None: manager1 = self.create_peer() manager1.allow_mining_without_peers() @@ -72,6 +75,7 @@ def test_two_conflicts_intertwined_once(self): self.graphviz = GraphvizVisualizer(manager1.tx_storage, include_verifications=True, include_funds=True) + assert manager1.wallet is not None address = manager1.wallet.get_unused_address(mark_as_used=False) value = 10 initial = gen_new_tx(manager1, address, value) @@ -87,7 +91,7 @@ def test_two_conflicts_intertwined_once(self): # dot = self.graphviz.dot() # dot.render('dot0') - def test_two_conflicts_intertwined_multiple_times(self): + def test_two_conflicts_intertwined_multiple_times(self) -> None: manager1 = self.create_peer() manager1.allow_mining_without_peers() @@ -107,13 +111,14 @@ def test_two_conflicts_intertwined_multiple_times(self): self.graphviz = GraphvizVisualizer(manager1.tx_storage, include_verifications=True, include_funds=True) + assert manager1.wallet is not None address = manager1.wallet.get_unused_address(mark_as_used=False) value = 10 initial = gen_new_tx(manager1, address, value) initial.weight = 25 initial.update_hash() manager1.propagate_tx(initial, fails_silently=False) - self.graphviz.labels[initial.hash] = 'initial' + self.graphviz.labels[not_none(initial.hash)] = 'initial' x = initial x = self.do_step(0, manager1, x) diff --git a/tests/consensus/test_consensus3.py b/tests/consensus/test_consensus3.py index 70099975c..dad7ca70b 100644 --- a/tests/consensus/test_consensus3.py +++ b/tests/consensus/test_consensus3.py @@ -6,14 +6,14 @@ class DoubleSpendingTestCase(unittest.TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.network = 'testnet' self.manager1 = self.create_peer(self.network, unlock_wallet=True, enable_sync_v1=True, enable_sync_v2=False) @pytest.mark.xfail(strict=True) - def test_double_spending_attempt_1(self): + def test_double_spending_attempt_1(self) -> None: manager = self.manager1 add_new_blocks(manager, 5, advance_clock=15) @@ -38,7 +38,7 @@ def test_double_spending_attempt_1(self): manager.cpu_mining_service.resolve(tx_fund0) self.assertTrue(manager.propagate_tx(tx_fund0)) - def do_step(tx_fund): + def do_step(tx_fund: Transaction) -> Transaction: inputs = [WalletInputInfo(tx_fund.hash, 0, manager.wallet.get_private_key(addr))] outputs = [WalletOutputInfo(decode_address(addr), 1, None)] tx1 = manager.wallet.prepare_transaction(Transaction, inputs, outputs, tx_fund.timestamp+1) @@ -79,7 +79,7 @@ def do_step(tx_fund): outputs = [] outputs.append(WalletOutputInfo(decode_address(addr), 1, None)) outputs.append(WalletOutputInfo(decode_address(addr), 2*tx_fund.outputs[1].value, None)) - tx5 = manager.wallet.prepare_transaction(Transaction, inputs, outputs, tx2.timestamp+1) + tx5: Transaction = manager.wallet.prepare_transaction(Transaction, inputs, outputs, tx2.timestamp+1) tx5.weight = tx3.weight - tx1.weight + 0.1 tx5.parents = [tx2.hash, tx4.hash] manager.cpu_mining_service.resolve(tx5) @@ -105,7 +105,7 @@ def do_step(tx_fund): self.assertConsensusValid(manager) @pytest.mark.xfail(strict=True) - def test_double_spending_attempt_2(self): + def test_double_spending_attempt_2(self) -> None: manager = self.manager1 add_new_blocks(manager, 5, advance_clock=15) @@ -128,7 +128,7 @@ def test_double_spending_attempt_2(self): manager.cpu_mining_service.resolve(tx_fund0) self.assertTrue(manager.propagate_tx(tx_fund0)) - def do_step(tx_fund): + def do_step(tx_fund: Transaction) -> Transaction: inputs = [WalletInputInfo(tx_fund.hash, 0, manager.wallet.get_private_key(addr))] outputs = [WalletOutputInfo(decode_address(addr), 1, None)] tx1 = manager.wallet.prepare_transaction(Transaction, inputs, outputs, tx_fund.timestamp+1) @@ -174,7 +174,7 @@ def do_step(tx_fund): outputs.append(WalletOutputInfo(decode_address(addr), 1, None)) outputs.append(WalletOutputInfo(decode_address(addr), 1, None)) outputs.append(WalletOutputInfo(decode_address(addr), 2*tx_fund.outputs[2].value, None)) - tx5 = manager.wallet.prepare_transaction(Transaction, inputs, outputs, tx4.timestamp+1) + tx5: Transaction = manager.wallet.prepare_transaction(Transaction, inputs, outputs, tx4.timestamp+1) tx5.weight = 1 tx5.parents = manager.get_new_tx_parents(tx5.timestamp) manager.cpu_mining_service.resolve(tx5) diff --git a/tests/consensus/test_consensus4.py b/tests/consensus/test_consensus4.py index bdc3f2047..906a82b10 100644 --- a/tests/consensus/test_consensus4.py +++ b/tests/consensus/test_consensus4.py @@ -1,4 +1,7 @@ from hathor.graphviz import GraphvizVisualizer +from hathor.manager import HathorManager +from hathor.transaction import Block +from hathor.types import VertexId from tests import unittest from tests.simulation.base import SimulatorTestCase from tests.utils import gen_custom_tx @@ -6,7 +9,14 @@ class BaseConsensusSimulatorTestCase(SimulatorTestCase): - def create_chain(self, manager, first_parent_block_hash, length, prefix, tx_parents=None): + def create_chain( + self, + manager: HathorManager, + first_parent_block_hash: VertexId, + length: int, + prefix: str, + tx_parents: list[VertexId] | None = None + ) -> list[Block]: current = first_parent_block_hash v = [] for i in range(length): @@ -23,7 +33,7 @@ def create_chain(self, manager, first_parent_block_hash, length, prefix, tx_pare current = blk.hash return v - def test_conflict_with_parent_tx(self): + def test_conflict_with_parent_tx(self) -> None: manager1 = self.create_peer() manager1.allow_mining_without_peers() diff --git a/tests/consensus/test_consensus5.py b/tests/consensus/test_consensus5.py index c6e4bebc7..19feaded3 100644 --- a/tests/consensus/test_consensus5.py +++ b/tests/consensus/test_consensus5.py @@ -1,4 +1,7 @@ from hathor.graphviz import GraphvizVisualizer +from hathor.manager import HathorManager +from hathor.transaction import Block +from hathor.types import VertexId from tests import unittest from tests.simulation.base import SimulatorTestCase from tests.utils import gen_custom_tx @@ -6,7 +9,14 @@ class BaseConsensusSimulatorTestCase(SimulatorTestCase): - def create_chain(self, manager, first_parent_block_hash, length, prefix, tx_parents=None): + def create_chain( + self, + manager: HathorManager, + first_parent_block_hash: VertexId, + length: int, + prefix: str, + tx_parents: list[VertexId] | None = None + ) -> list[Block]: current = first_parent_block_hash v = [] for i in range(length): @@ -23,7 +33,7 @@ def create_chain(self, manager, first_parent_block_hash, length, prefix, tx_pare current = blk.hash return v - def test_conflict_with_parent_tx(self): + def test_conflict_with_parent_tx(self) -> None: manager1 = self.create_peer() manager1.allow_mining_without_peers() diff --git a/tests/consensus/test_soft_voided.py b/tests/consensus/test_soft_voided.py index 97bb0d562..fd43b0f7e 100644 --- a/tests/consensus/test_soft_voided.py +++ b/tests/consensus/test_soft_voided.py @@ -1,7 +1,11 @@ +from typing import Iterator + from hathor.graphviz import GraphvizVisualizer -from hathor.simulator import FakeConnection, Simulator +from hathor.simulator import FakeConnection, RandomTransactionGenerator, Simulator from hathor.simulator.trigger import StopAfterNTransactions from hathor.simulator.utils import gen_new_tx +from hathor.transaction import Block +from hathor.types import VertexId from tests import unittest from tests.simulation.base import SimulatorTestCase from tests.utils import add_custom_tx @@ -10,14 +14,19 @@ class BaseSoftVoidedTestCase(SimulatorTestCase): seed_config = 5988775361793628170 - def assertNoParentsAreSoftVoided(self, tx): + def assertNoParentsAreSoftVoided(self, tx: Block) -> None: + assert tx.storage is not None for h in tx.parents: tx2 = tx.storage.get_transaction(h) tx2_meta = tx2.get_metadata() tx2_voided_by = tx2_meta.voided_by or set() self.assertNotIn(self._settings.SOFT_VOIDED_ID, tx2_voided_by) - def _run_test(self, simulator, soft_voided_tx_ids): + def _run_test( + self, + simulator: Simulator, + soft_voided_tx_ids: set[VertexId] + ) -> Iterator[RandomTransactionGenerator]: manager1 = self.create_peer(soft_voided_tx_ids=soft_voided_tx_ids, simulator=simulator) manager1.allow_mining_without_peers() @@ -30,7 +39,6 @@ def _run_test(self, simulator, soft_voided_tx_ids): simulator.run(300) manager2 = self.create_peer(soft_voided_tx_ids=soft_voided_tx_ids, simulator=simulator) - manager2.soft_voided_tx_ids = soft_voided_tx_ids graphviz = GraphvizVisualizer(manager2.tx_storage, include_verifications=True, include_funds=True) @@ -74,6 +82,7 @@ def _run_test(self, simulator, soft_voided_tx_ids): metaD1 = txD1.get_metadata() self.assertEqual({txA.hash, txD1.hash}, metaD1.voided_by) + assert manager2.wallet is not None address = manager2.wallet.get_unused_address(mark_as_used=False) value = 1 txC = gen_new_tx(manager2, address, value) @@ -127,7 +136,7 @@ def _run_test(self, simulator, soft_voided_tx_ids): # dot = graphviz.dot() # dot.render('dot0') - def _get_txA_hash(self): + def _get_txA_hash(self) -> VertexId: simulator = Simulator(seed=self.simulator.seed) simulator.start() @@ -140,7 +149,7 @@ def _get_txA_hash(self): return txA_hash - def test_soft_voided(self): + def test_soft_voided(self) -> None: txA_hash = self._get_txA_hash() soft_voided_tx_ids = set([ txA_hash, diff --git a/tests/consensus/test_soft_voided2.py b/tests/consensus/test_soft_voided2.py index 3e03de57a..116761235 100644 --- a/tests/consensus/test_soft_voided2.py +++ b/tests/consensus/test_soft_voided2.py @@ -1,6 +1,12 @@ +from typing import Iterator + from hathor.graphviz import GraphvizVisualizer +from hathor.manager import HathorManager from hathor.simulator import Simulator from hathor.simulator.utils import gen_new_tx +from hathor.transaction import Block, Transaction +from hathor.types import VertexId +from hathor.wallet import HDWallet from tests import unittest from tests.simulation.base import SimulatorTestCase from tests.utils import BURN_ADDRESS, add_custom_tx @@ -9,7 +15,7 @@ class BaseConsensusSimulatorTestCase(SimulatorTestCase): seed_config = 5988775361793628169 - def assertValidConflictResolution(self, tx1, tx2): + def assertValidConflictResolution(self, tx1: Transaction, tx2: Transaction) -> None: meta1 = tx1.get_metadata() meta2 = tx2.get_metadata() @@ -28,8 +34,9 @@ def assertValidConflictResolution(self, tx1, tx2): cnt += 1 self.assertLessEqual(cnt, 1) - def do_step(self, i, manager1, tx_base): + def do_step(self, i: int, manager1: HathorManager, tx_base: Transaction) -> Transaction: wallet = manager1.wallet + assert isinstance(wallet, HDWallet) address = wallet.get_address(wallet.get_key_at_index(0)) txA = add_custom_tx(manager1, [(tx_base, 0)], n_outputs=2, address=address) @@ -86,7 +93,7 @@ def do_step(self, i, manager1, tx_base): return txH - def gen_block(self, manager1, tx, parent_block=None): + def gen_block(self, manager1: HathorManager, tx: Transaction, parent_block: Block | None = None) -> Block: parent_block_hash = parent_block.hash if parent_block else None block = manager1.generate_mining_block(parent_block_hash=parent_block_hash, address=BURN_ADDRESS) block.parents[1] = tx.hash @@ -96,8 +103,8 @@ def gen_block(self, manager1, tx, parent_block=None): self.assertTrue(manager1.propagate_tx(block, fails_silently=False)) return block - def _run_test(self, simulator, soft_voided_tx_ids): - self.txF_hashes = [] + def _run_test(self, simulator: Simulator, soft_voided_tx_ids: set[VertexId]) -> Iterator[None]: + self.txF_hashes: list[VertexId] = [] manager1 = self.create_peer(soft_voided_tx_ids=soft_voided_tx_ids, simulator=simulator) manager1.allow_mining_without_peers() @@ -112,6 +119,7 @@ def _run_test(self, simulator, soft_voided_tx_ids): self.graphviz = GraphvizVisualizer(manager1.tx_storage, include_verifications=True, include_funds=True) + assert manager1.wallet is not None address = manager1.wallet.get_unused_address(mark_as_used=False) value = 10 initial = gen_new_tx(manager1, address, value) @@ -180,7 +188,7 @@ def _run_test(self, simulator, soft_voided_tx_ids): # dot = self.graphviz.dot() # dot.render('dot0') - def _get_txF_hashes(self): + def _get_txF_hashes(self) -> list[VertexId]: self.skip_asserts = True simulator = Simulator(seed=self.simulator.seed) simulator.start() @@ -194,7 +202,7 @@ def _get_txF_hashes(self): return list(self.txF_hashes) - def test_soft_voided(self): + def test_soft_voided(self) -> None: txF_hashes = self._get_txF_hashes() self.assertEqual(10, len(txF_hashes)) soft_voided_tx_ids = set(txF_hashes) diff --git a/tests/consensus/test_soft_voided3.py b/tests/consensus/test_soft_voided3.py index 92af7c201..807d6fe8c 100644 --- a/tests/consensus/test_soft_voided3.py +++ b/tests/consensus/test_soft_voided3.py @@ -1,7 +1,11 @@ +from typing import Iterator + from hathor.graphviz import GraphvizVisualizer -from hathor.simulator import FakeConnection, Simulator +from hathor.simulator import FakeConnection, RandomTransactionGenerator, Simulator from hathor.simulator.trigger import StopAfterNTransactions from hathor.simulator.utils import gen_new_tx +from hathor.transaction import BaseTransaction +from hathor.types import VertexId from tests import unittest from tests.simulation.base import SimulatorTestCase from tests.utils import add_custom_tx, gen_custom_tx @@ -10,14 +14,19 @@ class BaseSoftVoidedTestCase(SimulatorTestCase): seed_config = 5988775361793628169 - def assertNoParentsAreSoftVoided(self, tx): + def assertNoParentsAreSoftVoided(self, tx: BaseTransaction) -> None: + assert tx.storage is not None for h in tx.parents: tx2 = tx.storage.get_transaction(h) tx2_meta = tx2.get_metadata() tx2_voided_by = tx2_meta.voided_by or set() self.assertNotIn(self._settings.SOFT_VOIDED_ID, tx2_voided_by) - def _run_test(self, simulator, soft_voided_tx_ids): + def _run_test( + self, + simulator: Simulator, + soft_voided_tx_ids: set[VertexId] + ) -> Iterator[RandomTransactionGenerator]: manager1 = self.create_peer(soft_voided_tx_ids=soft_voided_tx_ids, simulator=simulator) manager1.allow_mining_without_peers() @@ -30,7 +39,6 @@ def _run_test(self, simulator, soft_voided_tx_ids): simulator.run(300) manager2 = self.create_peer(soft_voided_tx_ids=soft_voided_tx_ids, simulator=simulator) - manager2.soft_voided_tx_ids = soft_voided_tx_ids graphviz = GraphvizVisualizer(manager2.tx_storage, include_verifications=True, include_funds=True) @@ -83,6 +91,7 @@ def _run_test(self, simulator, soft_voided_tx_ids): graphviz.labels[blk1.hash] = 'blk1' simulator.run(10) + assert manager2.wallet is not None address = manager2.wallet.get_unused_address(mark_as_used=True) txC = gen_new_tx(manager2, address, 6400) if txD1.hash not in txC.parents: @@ -109,7 +118,7 @@ def _run_test(self, simulator, soft_voided_tx_ids): # dot = graphviz.dot() # dot.render('test_soft_voided3') - def _get_txA_hash(self): + def _get_txA_hash(self) -> VertexId: simulator = Simulator(seed=self.simulator.seed) simulator.start() @@ -122,7 +131,7 @@ def _get_txA_hash(self): return txA_hash - def test_soft_voided(self): + def test_soft_voided(self) -> None: txA_hash = self._get_txA_hash() soft_voided_tx_ids = set([ txA_hash, diff --git a/tests/consensus/test_soft_voided4.py b/tests/consensus/test_soft_voided4.py index bd914b341..cb9e1db7d 100644 --- a/tests/consensus/test_soft_voided4.py +++ b/tests/consensus/test_soft_voided4.py @@ -1,7 +1,11 @@ +from typing import Iterator + from hathor.graphviz import GraphvizVisualizer -from hathor.simulator import FakeConnection, Simulator +from hathor.simulator import FakeConnection, RandomTransactionGenerator, Simulator from hathor.simulator.trigger import StopAfterNTransactions from hathor.simulator.utils import gen_new_double_spending +from hathor.transaction import Transaction +from hathor.types import VertexId from tests import unittest from tests.simulation.base import SimulatorTestCase from tests.utils import add_custom_tx @@ -10,7 +14,11 @@ class BaseSoftVoidedTestCase(SimulatorTestCase): seed_config = 5988775361793628169 - def _run_test(self, simulator, soft_voided_tx_ids): + def _run_test( + self, + simulator: Simulator, + soft_voided_tx_ids: list[VertexId] + ) -> Iterator[RandomTransactionGenerator]: manager1 = self.create_peer(soft_voided_tx_ids=set(soft_voided_tx_ids), simulator=simulator) manager1.allow_mining_without_peers() @@ -24,7 +32,6 @@ def _run_test(self, simulator, soft_voided_tx_ids): gen_tx1.stop() manager2 = self.create_peer(soft_voided_tx_ids=set(soft_voided_tx_ids), simulator=simulator) - manager2.soft_voided_tx_ids = set(soft_voided_tx_ids) self.graphviz = GraphvizVisualizer(manager2.tx_storage, include_verifications=True, include_funds=True) @@ -54,6 +61,7 @@ def _run_test(self, simulator, soft_voided_tx_ids): gen_tx2.stop() + assert isinstance(soft_voided_tx_ids, list) self.assertEqual(2, len(soft_voided_tx_ids)) txA_hash = soft_voided_tx_ids[0] txB_hash = soft_voided_tx_ids[1] @@ -61,9 +69,11 @@ def _run_test(self, simulator, soft_voided_tx_ids): self.graphviz.labels[txB_hash] = 'txB' txB = manager2.tx_storage.get_transaction(txB_hash) + assert isinstance(txB, Transaction) # Get the tx confirmed by the soft voided that will be voided tx_base = manager2.tx_storage.get_transaction(txB.parents[0]) + assert isinstance(tx_base, Transaction) txC = gen_new_double_spending(manager2, use_same_parents=False, tx=tx_base) self.graphviz.labels[tx_base.hash] = 'tx_base' txC.weight = 30 @@ -125,12 +135,12 @@ def _run_test(self, simulator, soft_voided_tx_ids): metaD = txD.get_metadata() self.assertEqual(metaD.voided_by, {tx_base.hash}) - def _get_txA_hash(self): + def _get_txA_hash(self) -> VertexId: simulator = Simulator(seed=self.simulator.seed) simulator.start() try: - it = self._run_test(simulator, set()) + it = self._run_test(simulator, []) gen_tx = next(it) txA_hash = gen_tx.latest_transactions[0] finally: @@ -138,12 +148,12 @@ def _get_txA_hash(self): return txA_hash - def _get_txB_hash(self, txA_hash): + def _get_txB_hash(self, txA_hash: VertexId) -> VertexId: simulator = Simulator(seed=self.simulator.seed) simulator.start() try: - it = self._run_test(simulator, set([txA_hash])) + it = self._run_test(simulator, [txA_hash]) _ = next(it) _ = next(it) gen_tx = next(it) @@ -153,7 +163,7 @@ def _get_txB_hash(self, txA_hash): return txB_hash - def test_soft_voided(self): + def test_soft_voided(self) -> None: txA_hash = self._get_txA_hash() txB_hash = self._get_txB_hash(txA_hash) self.assertNotEqual(txA_hash, txB_hash) diff --git a/tests/crypto/test_util.py b/tests/crypto/test_util.py index e8ba0353b..17e611037 100644 --- a/tests/crypto/test_util.py +++ b/tests/crypto/test_util.py @@ -13,22 +13,24 @@ class CryptoUtilTestCase(unittest.TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() - self.private_key = ec.generate_private_key(ec.SECP256K1(), default_backend()) + key = ec.generate_private_key(ec.SECP256K1(), default_backend()) + assert isinstance(key, ec.EllipticCurvePrivateKeyWithSerialization) + self.private_key = key self.public_key = self.private_key.public_key() - def test_privkey_serialization(self): + def test_privkey_serialization(self) -> None: private_key_bytes = get_private_key_bytes(self.private_key) self.assertEqual(self.private_key.private_numbers(), get_private_key_from_bytes(private_key_bytes).private_numbers()) - def test_address(self): + def test_address(self) -> None: address = get_address_from_public_key(self.public_key) address_b58 = get_address_b58_from_public_key(self.public_key) self.assertEqual(address, decode_address(address_b58)) - def test_invalid_address(self): + def test_invalid_address(self) -> None: from hathor.wallet.exceptions import InvalidAddress address_b58 = get_address_b58_from_public_key(self.public_key) address_b58 += '0' # 0 is invalid in base58 diff --git a/tests/event/event_simulation_tester.py b/tests/event/event_simulation_tester.py index 338a90241..3e2bc4659 100644 --- a/tests/event/event_simulation_tester.py +++ b/tests/event/event_simulation_tester.py @@ -47,6 +47,7 @@ def _create_artifacts(self) -> None: self.settings = artifacts.settings event_ws_factory = self.manager._event_manager._event_ws_factory + assert event_ws_factory is not None event_ws_factory.openHandshakeTimeout = 0 self.protocol = event_ws_factory.buildProtocol(addr=Mock()) diff --git a/tests/event/test_base_event.py b/tests/event/test_base_event.py index 5751ae988..300157944 100644 --- a/tests/event/test_base_event.py +++ b/tests/event/test_base_event.py @@ -23,7 +23,7 @@ @pytest.mark.parametrize('event_id', [0, 1, 1000]) @pytest.mark.parametrize('group_id', [None, 0, 1, 1000]) -def test_create_base_event(event_id, group_id): +def test_create_base_event(event_id: int, group_id: int | None) -> None: event = BaseEvent( id=event_id, timestamp=123.3, @@ -40,6 +40,7 @@ def test_create_base_event(event_id, group_id): hash='abc', nonce=123, timestamp=456, + signal_bits=0, version=1, weight=10.0, inputs=[], @@ -70,8 +71,8 @@ def test_create_base_event(event_id, group_id): assert event.dict() == expected -@pytest.mark.parametrize('event_id', [None, -1, -1000]) -def test_create_base_event_fail_id(event_id): +@pytest.mark.parametrize('event_id', [-1, -1000]) +def test_create_base_event_fail_id(event_id: int) -> None: with pytest.raises(ValidationError): BaseEvent( id=event_id, @@ -82,7 +83,7 @@ def test_create_base_event_fail_id(event_id): @pytest.mark.parametrize('group_id', [-1, -1000]) -def test_create_base_event_fail_group_id(group_id): +def test_create_base_event_fail_group_id(group_id: int) -> None: with pytest.raises(ValidationError): BaseEvent( id=0, @@ -93,7 +94,7 @@ def test_create_base_event_fail_group_id(group_id): ) -def test_create_base_event_fail_data_type(): +def test_create_base_event_fail_data_type() -> None: with pytest.raises(ValidationError): BaseEvent( id=0, diff --git a/tests/event/test_event_manager.py b/tests/event/test_event_manager.py index e8a09c83b..289d5b0f5 100644 --- a/tests/event/test_event_manager.py +++ b/tests/event/test_event_manager.py @@ -1,13 +1,14 @@ from hathor.event.model.event_type import EventType from hathor.event.storage.memory_storage import EventMemoryStorage from hathor.pubsub import HathorEvents +from hathor.util import not_none from tests import unittest class BaseEventManagerTest(unittest.TestCase): __test__ = False - def setUp(self): + def setUp(self) -> None: super().setUp() self.network = 'testnet' self.event_storage = EventMemoryStorage() @@ -18,37 +19,37 @@ def setUp(self): event_storage=self.event_storage ) - def test_if_event_is_persisted(self): + def test_if_event_is_persisted(self) -> None: block = self.manager.tx_storage.get_best_block() self.manager.pubsub.publish(HathorEvents.NETWORK_NEW_TX_ACCEPTED, tx=block) self.run_to_completion() self.assertIsNotNone(self.event_storage.get_event(0)) - def _fake_reorg_started(self): + def _fake_reorg_started(self) -> None: block = self.manager.tx_storage.get_best_block() # XXX: since we're faking these events, they don't neet to be consistent self.manager.pubsub.publish(HathorEvents.REORG_STARTED, old_best_height=1, old_best_block=block, new_best_height=1, new_best_block=block, reorg_size=1, common_block=block) - def _fake_reorg_finished(self): + def _fake_reorg_finished(self) -> None: self.manager.pubsub.publish(HathorEvents.REORG_FINISHED) - def test_event_group(self): + def test_event_group(self) -> None: self._fake_reorg_started() self._fake_reorg_finished() self._fake_reorg_started() self._fake_reorg_finished() self.run_to_completion() - event0 = self.event_storage.get_event(0) - event1 = self.event_storage.get_event(1) - event2 = self.event_storage.get_event(2) - event3 = self.event_storage.get_event(3) - event4 = self.event_storage.get_event(4) - event5 = self.event_storage.get_event(5) - event6 = self.event_storage.get_event(6) - event7 = self.event_storage.get_event(7) - event8 = self.event_storage.get_event(8) + event0 = not_none(self.event_storage.get_event(0)) + event1 = not_none(self.event_storage.get_event(1)) + event2 = not_none(self.event_storage.get_event(2)) + event3 = not_none(self.event_storage.get_event(3)) + event4 = not_none(self.event_storage.get_event(4)) + event5 = not_none(self.event_storage.get_event(5)) + event6 = not_none(self.event_storage.get_event(6)) + event7 = not_none(self.event_storage.get_event(7)) + event8 = not_none(self.event_storage.get_event(8)) self.assertEqual(EventType(event0.type), EventType.LOAD_STARTED) self.assertEqual(EventType(event1.type), EventType.NEW_VERTEX_ACCEPTED) @@ -66,19 +67,19 @@ def test_event_group(self): self.assertIsNotNone(event7.group_id) self.assertEqual(event7.group_id, event8.group_id) - def test_cannot_start_group_twice(self): + def test_cannot_start_group_twice(self) -> None: self._fake_reorg_started() self.run_to_completion() with self.assertRaises(AssertionError): self._fake_reorg_started() self.run_to_completion() - def test_cannot_finish_group_that_was_not_started(self): + def test_cannot_finish_group_that_was_not_started(self) -> None: with self.assertRaises(AssertionError): self._fake_reorg_finished() self.run_to_completion() - def test_cannot_finish_group_twice(self): + def test_cannot_finish_group_twice(self) -> None: self._fake_reorg_started() self._fake_reorg_finished() self.run_to_completion() diff --git a/tests/event/test_event_reorg.py b/tests/event/test_event_reorg.py index 81648f456..5c4a64b8c 100644 --- a/tests/event/test_event_reorg.py +++ b/tests/event/test_event_reorg.py @@ -8,7 +8,7 @@ class BaseEventReorgTest(unittest.TestCase): __test__ = False - def setUp(self): + def setUp(self) -> None: super().setUp() self.network = 'testnet' self.event_storage = EventMemoryStorage() @@ -23,7 +23,7 @@ def setUp(self): self.genesis_private_key = get_genesis_key() self.genesis_public_key = self.genesis_private_key.public_key() - def test_reorg_events(self): + def test_reorg_events(self) -> None: assert self._settings.REWARD_SPEND_MIN_BLOCKS == 10, 'this test was made with this hardcoded value in mind' # add some blocks @@ -36,7 +36,6 @@ def test_reorg_events(self): b0 = tb0.generate_mining_block(self.manager.rng, storage=self.manager.tx_storage, address=BURN_ADDRESS) b0.weight = 10 self.manager.cpu_mining_service.resolve(b0) - self.manager.verification_service.verify(b0) self.manager.propagate_tx(b0, fails_silently=False) self.log.debug('reorg block propagated') self.run_to_completion() @@ -44,7 +43,7 @@ def test_reorg_events(self): # check events actual_events = list(self.event_storage.iter_from_event(0)) - expected_events = [ + expected_events: list[tuple[EventType, dict[str, str | int]]] = [ (EventType.LOAD_STARTED, {}), (EventType.NEW_VERTEX_ACCEPTED, {'hash': self._settings.GENESIS_BLOCK_HASH.hex()}), (EventType.NEW_VERTEX_ACCEPTED, {'hash': self._settings.GENESIS_TX1_HASH.hex()}), diff --git a/tests/event/test_event_simulation_responses.py b/tests/event/test_event_simulation_responses.py index 2bc628088..50557296c 100644 --- a/tests/event/test_event_simulation_responses.py +++ b/tests/event/test_event_simulation_responses.py @@ -282,7 +282,7 @@ def test_restart_with_ack_too_small(self) -> None: # get response response = self._get_error_response() - assert response.type == InvalidRequestType.ACK_TOO_SMALL.value + assert str(response.type) == InvalidRequestType.ACK_TOO_SMALL.value def test_multiple_interactions(self) -> None: miner = self.simulator.create_miner(self.manager, hashpower=1e6) @@ -333,7 +333,8 @@ def test_multiple_interactions(self) -> None: # get response response = self._get_error_response() - assert response.type == InvalidRequestType.ACK_TOO_SMALL.value # ACK too small because we've already sent it + # ACK too small because we've already sent it + assert str(response.type) == InvalidRequestType.ACK_TOO_SMALL.value # new ack ack = AckRequest(type='ACK', window_size=4, ack_event_id=5) diff --git a/tests/event/test_event_simulation_scenarios.py b/tests/event/test_event_simulation_scenarios.py index 65847491d..d3189093c 100644 --- a/tests/event/test_event_simulation_scenarios.py +++ b/tests/event/test_event_simulation_scenarios.py @@ -44,6 +44,7 @@ class BaseEventSimulationScenariosTest(BaseEventSimulationTester): def test_only_load(self) -> None: stream_id = self.manager._event_manager._stream_id + assert stream_id is not None Scenario.ONLY_LOAD.simulate(self.simulator, self.manager) self._start_stream() @@ -53,9 +54,9 @@ def test_only_load(self) -> None: # LOAD_STATED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=4, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=4, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=4, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=4, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=4, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=4, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=4, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=4, stream_id=stream_id) # noqa: E501 ] @@ -64,8 +65,9 @@ def test_only_load(self) -> None: expected = _remove_timestamp(expected) assert responses == expected, f'expected: {expected}\n\nactual: {responses}' - def test_single_chain_one_block(self): + def test_single_chain_one_block(self) -> None: stream_id = self.manager._event_manager._stream_id + assert stream_id is not None Scenario.SINGLE_CHAIN_ONE_BLOCK.simulate(self.simulator, self.manager) self._start_stream() @@ -75,25 +77,26 @@ def test_single_chain_one_block(self): # LOAD_STATED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id) # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id) # noqa: E501 ] responses = _remove_timestamp(responses) expected = _remove_timestamp(expected) assert responses == expected, f'expected: {expected}\n\nactual: {responses}' - def test_single_chain_blocks_and_transactions(self): + def test_single_chain_blocks_and_transactions(self) -> None: stream_id = self.manager._event_manager._stream_id + assert stream_id is not None Scenario.SINGLE_CHAIN_BLOCKS_AND_TRANSACTIONS.simulate(self.simulator, self.manager) self._start_stream() @@ -103,62 +106,63 @@ def test_single_chain_blocks_and_transactions(self): # LOAD_STATED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED and one NEW_VERTEX_ACCEPTED for 10 new blocks - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656776158409354, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656776158409354, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, timestamp=1578878970.5, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656776158409354, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, timestamp=1578878970.5, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656776158409354, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a tx, adding the new tx as spending their output and children # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, timestamp=1578879030.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.4904519466213, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, timestamp=1578879030.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[SpentOutput(index=0, tx_ids=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'])], conflict_with=[], voided_by=[], received_by=[], children=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'], twins=[], accumulated_weight=18.656776158409354, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, timestamp=1578879030.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.4904519466213, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, timestamp=1578879030.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[SpentOutput(index=0, tx_ids=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'])], conflict_with=[], voided_by=[], received_by=[], children=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'], twins=[], accumulated_weight=18.656776158409354, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, timestamp=1578879030.75, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.4904519466213, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, timestamp=1578879030.75, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.4904519466213, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each confirmed transaction (first block changed) # noqa E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', nonce=0, timestamp=1578879090, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', 'd2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.576585413276128, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9'], twins=[], accumulated_weight=18.4904519466213, score=0.0, first_block='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[SpentOutput(index=0, tx_ids=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'])], conflict_with=[], voided_by=[], received_by=[], children=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9'], twins=[], accumulated_weight=18.656776158409354, score=0.0, first_block='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', 'd2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.576585413276128, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9'], twins=[], accumulated_weight=18.4904519466213, score=0.0, first_block='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[SpentOutput(index=0, tx_ids=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'])], conflict_with=[], voided_by=[], received_by=[], children=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9'], twins=[], accumulated_weight=18.656776158409354, score=0.0, first_block='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=1578879091.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', nonce=0, timestamp=1578879090, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', 'd2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.576585413276128, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id) # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=1578879091.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', 'd2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.576585413276128, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id) # noqa: E501 ] responses = _remove_timestamp(responses) expected = _remove_timestamp(expected) assert responses == expected, f'expected: {expected}\n\nactual: {responses}' - def test_reorg(self): + def test_reorg(self) -> None: stream_id = self.manager._event_manager._stream_id + assert stream_id is not None Scenario.REORG.simulate(self.simulator, self.manager) self._start_stream() @@ -168,45 +172,46 @@ def test_reorg(self): # LOAD_STATED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], twins=[], accumulated_weight=2.0, score=2.0, first_block='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], twins=[], accumulated_weight=2.0, score=2.0, first_block='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], twins=[], accumulated_weight=2.0, score=2.0, first_block='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], twins=[], accumulated_weight=2.0, score=2.0, first_block='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block from manager1 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878940.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878940.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 # Also one VERTEX_METADATA_CHANGED for the previous block, voiding it - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block from manager2 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=1578879064.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=1578879064.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 # REORG_STARTED caused by a new block from manager2 (below) EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=1578879064.25, type=EventType.REORG_STARTED, data=ReorgData(reorg_size=1, previous_best_block='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', new_best_block='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', common_block='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792'), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 # Also one VERTEX_METADATA_CHANGED for the previous block, un-voiding it as it's now part of the best blockchain # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', nonce=0, timestamp=1578879001, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUgQrqLefPfPVpkXlfvvAp943epyOIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJHSdTickduA1MF9PTbzBQi6Z7stNAzwAu', timelock=None))], parents=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=2.0, first_block='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=2.0, first_block='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', nonce=0, timestamp=1578879001, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUgQrqLefPfPVpkXlfvvAp943epyOIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJHSdTickduA1MF9PTbzBQi6Z7stNAzwAu', timelock=None))], parents=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=2.0, first_block='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=2.0, first_block='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 # REORG_FINISHED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=1578879064.25, type=EventType.REORG_FINISHED, data=EmptyData(), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block from manager2 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=1578879064.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', nonce=0, timestamp=1578879001, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUgQrqLefPfPVpkXlfvvAp943epyOIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJHSdTickduA1MF9PTbzBQi6Z7stNAzwAu', timelock=None))], parents=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id) # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=1578879064.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', nonce=0, timestamp=1578879001, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUgQrqLefPfPVpkXlfvvAp943epyOIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJHSdTickduA1MF9PTbzBQi6Z7stNAzwAu', timelock=None))], parents=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id) # noqa: E501 ] responses = _remove_timestamp(responses) expected = _remove_timestamp(expected) assert responses == expected, f'expected: {expected}\n\nactual: {responses}' - def test_unvoided_transaction(self): + def test_unvoided_transaction(self) -> None: stream_id = self.manager._event_manager._stream_id + assert stream_id is not None Scenario.UNVOIDED_TRANSACTION.simulate(self.simulator, self.manager) self._start_stream() @@ -216,56 +221,56 @@ def test_unvoided_transaction(self): # LOAD_STATED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, type=EventType.LOAD_STARTED, timestamp=0, data=EmptyData(), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, type=EventType.LOAD_FINISHED, timestamp=0, data=EmptyData(), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED and one NEW_VERTEX_ACCEPTED for 10 new blocks - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=19.0005, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=19.0005, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=19.0005, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=19.0005, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output, and one VERTEX_METADATA_CHANGED adding the new tx as twin/conflict of the previous tx # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', nonce=0, timestamp=1578879030, version=1, weight=19.0, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', spent_outputs=[], conflict_with=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], voided_by=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], received_by=[], children=[], twins=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], accumulated_weight=19.0, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], voided_by=[], received_by=[], children=[], twins=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], accumulated_weight=19.0005, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=19.0, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', spent_outputs=[], conflict_with=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], voided_by=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], received_by=[], children=[], twins=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], accumulated_weight=19.0, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], voided_by=[], received_by=[], children=[], twins=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], accumulated_weight=19.0005, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx that is a twin of the previous one. It's voided. - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', nonce=0, timestamp=1578879030, version=1, weight=19.0, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', spent_outputs=[], conflict_with=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], voided_by=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], received_by=[], children=[], twins=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], accumulated_weight=19.0, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=19.0, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', spent_outputs=[], conflict_with=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], voided_by=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], received_by=[], children=[], twins=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], accumulated_weight=19.0, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each twin tx, inverting the voided state of them. # noqa E501 # The order of events is important, we receive the voided txs first, then reverse topological ordering. - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], voided_by=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], received_by=[], children=[], twins=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], accumulated_weight=19.0005, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', nonce=0, timestamp=1578879090, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.000858282039708, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', nonce=0, timestamp=1578879030, version=1, weight=19.0, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', spent_outputs=[], conflict_with=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], voided_by=[], received_by=[], children=['24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a'], twins=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], accumulated_weight=19.000704269011248, score=0.0, first_block='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], voided_by=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], received_by=[], children=[], twins=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], accumulated_weight=19.0005, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.000858282039708, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=19.0, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', spent_outputs=[], conflict_with=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], voided_by=[], received_by=[], children=['24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a'], twins=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], accumulated_weight=19.000704269011248, score=0.0, first_block='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=39, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', nonce=0, timestamp=1578879090, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.000858282039708, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=39, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.000858282039708, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 ] responses = _remove_timestamp(responses) diff --git a/tests/event/test_event_storage.py b/tests/event/test_event_storage.py index 1013fb25f..f9b2651a4 100644 --- a/tests/event/test_event_storage.py +++ b/tests/event/test_event_storage.py @@ -2,6 +2,7 @@ import pytest +from hathor.event.model.base_event import BaseEvent from hathor.event.model.node_state import NodeState from hathor.event.storage import EventStorage from hathor.event.storage.memory_storage import EventMemoryStorage @@ -16,18 +17,18 @@ class EventStorageBaseTest(unittest.TestCase): event_storage: EventStorage - def setUp(self): + def setUp(self) -> None: super().setUp() self.event_mocker = EventMocker(self.rng) - def test_save_event_and_retrieve(self): + def test_save_event_and_retrieve(self) -> None: event = self.event_mocker.generate_mocked_event() self.event_storage.save_event(event) event_retrieved = self.event_storage.get_event(event.id) assert event_retrieved == event - def test_save_events_and_retrieve(self): + def test_save_events_and_retrieve(self) -> None: event1 = self.event_mocker.generate_mocked_event() event2 = self.event_mocker.generate_mocked_event() self.event_storage.save_events([event1, event2]) @@ -37,7 +38,7 @@ def test_save_events_and_retrieve(self): assert event1_retrieved == event1 assert event2_retrieved == event2 - def test_get_negative_key(self): + def test_get_negative_key(self) -> None: with self.assertRaises(ValueError) as cm: self.event_storage.get_event(-1) @@ -46,20 +47,22 @@ def test_get_negative_key(self): str(cm.exception) ) - def test_get_nonexistent_event(self): + def test_get_nonexistent_event(self) -> None: assert self.event_storage.get_event(0) is None assert self.event_storage.get_event(9999) is None - def test_save_events_and_retrieve_the_last(self): - last_event = None + def test_save_events_and_retrieve_the_last(self) -> None: + last_event: BaseEvent | None = None for i in range(10): last_event = self.event_mocker.generate_mocked_event(i) self.event_storage.save_event(last_event) event_retrieved = self.event_storage.get_last_event() + assert event_retrieved is not None + assert last_event is not None assert event_retrieved.id == last_event.id - def test_save_non_sequential(self): + def test_save_non_sequential(self) -> None: for i in range(10): event = self.event_mocker.generate_mocked_event(i) self.event_storage.save_event(event) @@ -74,16 +77,16 @@ def test_save_non_sequential(self): str(cm.exception) ) - def test_iter_from_event_empty(self): + def test_iter_from_event_empty(self) -> None: self._test_iter_from_event(0) - def test_iter_from_event_single(self): + def test_iter_from_event_single(self) -> None: self._test_iter_from_event(1) - def test_iter_from_event_multiple(self): + def test_iter_from_event_multiple(self) -> None: self._test_iter_from_event(20) - def _test_iter_from_event(self, n_events): + def _test_iter_from_event(self, n_events: int) -> None: expected_events = [] for i in range(n_events): event = self.event_mocker.generate_mocked_event(i) @@ -94,7 +97,7 @@ def _test_iter_from_event(self, n_events): self.assertEqual(expected_events, actual_events) - def test_iter_from_event_negative_key(self): + def test_iter_from_event_negative_key(self) -> None: with self.assertRaises(ValueError) as cm: events = self.event_storage.iter_from_event(-10) list(events) @@ -104,7 +107,7 @@ def test_iter_from_event_negative_key(self): str(cm.exception) ) - def test_save_events_and_retrieve_last_group_id(self): + def test_save_events_and_retrieve_last_group_id(self) -> None: expected_group_id = 4 self._populate_events_and_last_group_id(n_events=10, last_group_id=expected_group_id) @@ -119,38 +122,38 @@ def _populate_events_and_last_group_id(self, n_events: int, last_group_id: int) event = self.event_mocker.generate_mocked_event(i, group_id) self.event_storage.save_event(event) - def test_get_empty_node_state(self): + def test_get_empty_node_state(self) -> None: node_state = self.event_storage.get_node_state() assert node_state is None - def test_save_node_state_and_retrieve(self): + def test_save_node_state_and_retrieve(self) -> None: self.event_storage.save_node_state(NodeState.SYNC) node_state = self.event_storage.get_node_state() assert node_state == NodeState.SYNC - def test_get_empty_event_queue_state(self): + def test_get_empty_event_queue_state(self) -> None: enabled = self.event_storage.get_event_queue_state() assert enabled is False - def test_save_event_queue_enabled_and_retrieve(self): + def test_save_event_queue_enabled_and_retrieve(self) -> None: self.event_storage.save_event_queue_state(True) enabled = self.event_storage.get_event_queue_state() assert enabled is True - def test_save_event_queue_disabled_and_retrieve(self): + def test_save_event_queue_disabled_and_retrieve(self) -> None: self.event_storage.save_event_queue_state(False) enabled = self.event_storage.get_event_queue_state() assert enabled is False - def test_reset_events_empty_database(self): + def test_reset_events_empty_database(self) -> None: self._test_reset_events() - def test_reset_all_empty_database(self): + def test_reset_all_empty_database(self) -> None: self._test_reset_events() def _test_reset_events(self) -> None: @@ -179,7 +182,7 @@ def _test_reset_all(self) -> None: assert node_state is None assert event_queue_state is False - def test_reset_events_full_database(self): + def test_reset_events_full_database(self) -> None: n_events = 10 expected_last_group_id = 4 expected_node_state = NodeState.SYNC @@ -206,7 +209,7 @@ def test_reset_events_full_database(self): assert node_state == expected_node_state assert event_queue_state is True - def test_reset_all_full_database(self): + def test_reset_all_full_database(self) -> None: n_events = 10 expected_last_group_id = 4 expected_node_state = NodeState.SYNC @@ -238,7 +241,7 @@ def test_reset_all_full_database(self): class EventStorageRocksDBTest(EventStorageBaseTest): __test__ = True - def setUp(self): + def setUp(self) -> None: super().setUp() self.directory = tempfile.mkdtemp() self.tmpdirs.append(self.directory) @@ -249,6 +252,6 @@ def setUp(self): class EventStorageMemoryTest(EventStorageBaseTest): __test__ = True - def setUp(self): + def setUp(self) -> None: super().setUp() self.event_storage = EventMemoryStorage() diff --git a/tests/event/websocket/test_factory.py b/tests/event/websocket/test_factory.py index b100fc6d5..6c703a609 100644 --- a/tests/event/websocket/test_factory.py +++ b/tests/event/websocket/test_factory.py @@ -24,7 +24,7 @@ from tests.utils import EventMocker -def test_started_register(): +def test_started_register() -> None: factory = _get_factory() connection = Mock(spec_set=EventWebsocketProtocol) connection.send_invalid_request_response = Mock() @@ -35,7 +35,7 @@ def test_started_register(): connection.send_invalid_request_response.assert_not_called() -def test_non_started_register(): +def test_non_started_register() -> None: factory = _get_factory() connection = Mock(spec_set=EventWebsocketProtocol) connection.send_invalid_request_response = Mock() @@ -45,7 +45,7 @@ def test_non_started_register(): connection.send_invalid_request_response.assert_called_once_with(InvalidRequestType.EVENT_WS_NOT_RUNNING) -def test_stopped_register(): +def test_stopped_register() -> None: factory = _get_factory() connection = Mock(spec_set=EventWebsocketProtocol) connection.send_invalid_request_response = Mock() @@ -84,7 +84,7 @@ def test_broadcast_event(can_receive_event: bool) -> None: connection.send_event_response.assert_called_once_with(response) -def test_broadcast_multiple_events_multiple_connections(): +def test_broadcast_multiple_events_multiple_connections() -> None: stream_id = 'stream_id' factory = _get_factory(10) connection1 = Mock(spec_set=EventWebsocketProtocol) diff --git a/tests/event/websocket/test_protocol.py b/tests/event/websocket/test_protocol.py index 2f3a4dcfe..426d74778 100644 --- a/tests/event/websocket/test_protocol.py +++ b/tests/event/websocket/test_protocol.py @@ -13,7 +13,7 @@ # limitations under the License. from typing import Optional -from unittest.mock import ANY, Mock +from unittest.mock import ANY, Mock, patch import pytest from autobahn.websocket import ConnectionRequest @@ -27,11 +27,11 @@ @pytest.fixture -def factory(): +def factory() -> Mock: return Mock(spec_set=EventWebsocketFactory) -def test_init(): +def test_init() -> None: protocol = EventWebsocketProtocol() assert protocol.client_peer is None @@ -41,7 +41,7 @@ def test_init(): assert not protocol._stream_is_active -def test_next_expected_event_id(): +def test_next_expected_event_id() -> None: protocol = EventWebsocketProtocol() assert protocol.next_expected_event_id() == 0 @@ -51,7 +51,7 @@ def test_next_expected_event_id(): assert protocol.next_expected_event_id() == 6 -def test_on_connect(): +def test_on_connect() -> None: protocol = EventWebsocketProtocol() request = Mock(spec_set=ConnectionRequest) request.peer = 'some_peer' @@ -61,7 +61,7 @@ def test_on_connect(): assert protocol.client_peer == 'some_peer' -def test_on_open(factory): +def test_on_open(factory: Mock) -> None: protocol = EventWebsocketProtocol() protocol.factory = factory @@ -70,7 +70,7 @@ def test_on_open(factory): factory.register.assert_called_once_with(protocol) -def test_on_close(factory): +def test_on_close(factory: Mock) -> None: protocol = EventWebsocketProtocol() protocol.factory = factory @@ -79,7 +79,7 @@ def test_on_close(factory): factory.unregister.assert_called_once_with(protocol) -def test_send_event_response(): +def test_send_event_response() -> None: protocol = EventWebsocketProtocol() protocol.sendMessage = Mock() response = EventResponse( @@ -99,7 +99,8 @@ def test_send_event_response(): expected_payload = (b'{"type":"EVENT","peer_id":"my_peer_id","network":"my_network","event":{"id":10,' b'"timestamp":123.0,"type":"VERTEX_METADATA_CHANGED","data":{"hash":"abc","nonce":123,' - b'"timestamp":456,"version":1,"weight":10.0,"inputs":[],"outputs":[],"parents":[],' + b'"timestamp":456,"signal_bits":0,"version":1,"weight":10.0,"inputs":[],"outputs":[],' + b'"parents":[],' b'"tokens":[],"token_name":null,"token_symbol":null,"metadata":{"hash":"abc",' b'"spent_outputs":[],"conflict_with":[],"voided_by":[],"received_by":[],"children":[],' b'"twins":[],"accumulated_weight":10.0,"score":20.0,"first_block":null,"height":100,' @@ -112,7 +113,11 @@ def test_send_event_response(): @pytest.mark.parametrize('_type', [InvalidRequestType.VALIDATION_ERROR, InvalidRequestType.STREAM_IS_INACTIVE]) @pytest.mark.parametrize('invalid_payload', [None, b'some_payload']) @pytest.mark.parametrize('error_message', [None, 'some error']) -def test_send_invalid_request_response(_type, invalid_payload, error_message): +def test_send_invalid_request_response( + _type: InvalidRequestType, + invalid_payload: bytes | None, + error_message: str | None +) -> None: protocol = EventWebsocketProtocol() protocol.sendMessage = Mock() @@ -173,7 +178,7 @@ def test_can_receive_event( assert result == expected_result -def test_on_valid_stop_message(): +def test_on_valid_stop_message() -> None: protocol = EventWebsocketProtocol() protocol._stream_is_active = True @@ -182,7 +187,7 @@ def test_on_valid_stop_message(): assert not protocol._stream_is_active -def test_stop_message_on_inactive(): +def test_stop_message_on_inactive() -> None: protocol = EventWebsocketProtocol() protocol.sendMessage = Mock() protocol._stream_is_active = False @@ -206,7 +211,7 @@ def test_stop_message_on_inactive(): (10, 0, 10), ] ) -def test_on_valid_ack_message(ack_event_id, window_size, last_sent_event_id): +def test_on_valid_ack_message(ack_event_id: int, window_size: int, last_sent_event_id: int) -> None: protocol = EventWebsocketProtocol() protocol._last_sent_event_id = last_sent_event_id protocol.factory = Mock() @@ -235,7 +240,7 @@ def test_on_valid_ack_message(ack_event_id, window_size, last_sent_event_id): (10, 0, 10), ] ) -def test_on_valid_start_message(ack_event_id, window_size, last_sent_event_id): +def test_on_valid_start_message(ack_event_id: int, window_size: int, last_sent_event_id: int | None) -> None: protocol = EventWebsocketProtocol() protocol._last_sent_event_id = last_sent_event_id protocol.factory = Mock() @@ -251,7 +256,7 @@ def test_on_valid_start_message(ack_event_id, window_size, last_sent_event_id): protocol.factory.send_next_event_to_connection.assert_called_once() -def test_ack_message_on_inactive(): +def test_ack_message_on_inactive() -> None: protocol = EventWebsocketProtocol() protocol.sendMessage = Mock() protocol._stream_is_active = False @@ -264,7 +269,7 @@ def test_ack_message_on_inactive(): protocol.sendMessage.assert_called_once_with(response) -def test_start_message_on_active(): +def test_start_message_on_active() -> None: protocol = EventWebsocketProtocol() protocol.sendMessage = Mock() protocol._stream_is_active = True @@ -294,17 +299,21 @@ def test_start_message_on_active(): (5, 1, 10, InvalidRequestType.ACK_TOO_LARGE), ] ) -def test_on_invalid_ack_message(_ack_event_id, last_sent_event_id, ack_event_id, _type): +def test_on_invalid_ack_message( + _ack_event_id: int, + last_sent_event_id: int | None, + ack_event_id: int, + _type: InvalidRequestType, +) -> None: protocol = EventWebsocketProtocol() protocol._ack_event_id = _ack_event_id protocol._last_sent_event_id = last_sent_event_id - protocol.send_invalid_request_response = Mock() protocol._stream_is_active = True payload = f'{{"type": "ACK", "ack_event_id": {ack_event_id}, "window_size": 0}}'.encode('utf8') - protocol.onMessage(payload, False) - - protocol.send_invalid_request_response.assert_called_once_with(_type, payload) + with patch.object(protocol, 'send_invalid_request_response') as mock: + protocol.onMessage(payload, False) + mock.assert_called_once_with(_type, payload) @pytest.mark.parametrize( @@ -318,16 +327,15 @@ def test_on_invalid_ack_message(_ack_event_id, last_sent_event_id, ack_event_id, (10, 5), ] ) -def test_on_invalid_start_message(_ack_event_id, ack_event_id): +def test_on_invalid_start_message(_ack_event_id: int, ack_event_id: int | None) -> None: protocol = EventWebsocketProtocol() protocol._ack_event_id = _ack_event_id - protocol.send_invalid_request_response = Mock() - ack_event_id = 'null' if ack_event_id is None else ack_event_id - payload = f'{{"type": "START_STREAM", "last_ack_event_id": {ack_event_id}, "window_size": 0}}'.encode('utf8') + ack_event_id_str: str = 'null' if ack_event_id is None else f'{ack_event_id}' + payload = f'{{"type": "START_STREAM", "last_ack_event_id": {ack_event_id_str}, "window_size": 0}}'.encode('utf8') - protocol.onMessage(payload, False) - - protocol.send_invalid_request_response.assert_called_once_with(InvalidRequestType.ACK_TOO_SMALL, payload) + with patch.object(protocol, 'send_invalid_request_response') as mock: + protocol.onMessage(payload, False) + mock.assert_called_once_with(InvalidRequestType.ACK_TOO_SMALL, payload) @pytest.mark.parametrize( @@ -343,11 +351,10 @@ def test_on_invalid_start_message(_ack_event_id, ack_event_id): b'{"type": "ACK", "ack_event_id": -10, "window_size": 0}', ] ) -def test_validation_error_on_message(payload): +def test_validation_error_on_message(payload: bytes) -> None: protocol = EventWebsocketProtocol() - protocol.send_invalid_request_response = Mock() protocol._stream_is_active = False - protocol.onMessage(payload, False) - - protocol.send_invalid_request_response.assert_called_once_with(InvalidRequestType.VALIDATION_ERROR, payload, ANY) + with patch.object(protocol, 'send_invalid_request_response') as mock: + protocol.onMessage(payload, False) + mock.assert_called_once_with(InvalidRequestType.VALIDATION_ERROR, payload, ANY) diff --git a/tests/execution_manager/__init__.py b/tests/execution_manager/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/execution_manager/test_execution_manager.py b/tests/execution_manager/test_execution_manager.py new file mode 100644 index 000000000..9093c64fc --- /dev/null +++ b/tests/execution_manager/test_execution_manager.py @@ -0,0 +1,47 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +from unittest.mock import Mock, patch + +from hathor.execution_manager import ExecutionManager +from hathor.reactor import ReactorProtocol + + +def test_crash_and_exit() -> None: + def callback() -> None: + pass + + callback_wrapped = Mock(wraps=callback) + log_mock = Mock() + reactor_mock = Mock(spec_set=ReactorProtocol) + manager = ExecutionManager(reactor_mock) + manager._log = log_mock + reason = 'some critical failure' + + manager.register_on_crash_callback(callback_wrapped) + + with patch.object(sys, 'exit') as exit_mock: + manager.crash_and_exit(reason=reason) + + callback_wrapped.assert_called_once() + log_mock.critical.assert_called_once_with( + 'Critical failure occurred, causing the full node to halt execution. Manual intervention is required.', + reason=reason, + exc_info=True + ) + + reactor_mock.stop.assert_called_once() + reactor_mock.crash.assert_called_once() + exit_mock.assert_called_once_with(-1) diff --git a/tests/feature_activation/test_bit_signaling_service.py b/tests/feature_activation/test_bit_signaling_service.py index f3b24e140..930ca39f2 100644 --- a/tests/feature_activation/test_bit_signaling_service.py +++ b/tests/feature_activation/test_bit_signaling_service.py @@ -173,7 +173,8 @@ def _test_generate_signal_bits( feature_service=feature_service, tx_storage=Mock(), support_features=support_features, - not_support_features=not_support_features + not_support_features=not_support_features, + feature_storage=Mock(), ) return service.generate_signal_bits(block=Mock()) @@ -216,6 +217,7 @@ def test_support_intersection_validation( tx_storage=Mock(), support_features=support_features, not_support_features=not_support_features, + feature_storage=Mock(), ) message = str(e.value) @@ -256,7 +258,7 @@ def test_non_signaling_features_warning( tx_storage = Mock(spec_set=TransactionStorage) tx_storage.get_best_block = lambda: best_block - def get_bits_description_mock(block): + def get_bits_description_mock(block: Block) -> dict[Feature, FeatureDescription]: if block == best_block: return {} raise NotImplementedError @@ -270,6 +272,7 @@ def get_bits_description_mock(block): tx_storage=tx_storage, support_features=support_features, not_support_features=not_support_features, + feature_storage=Mock(), ) logger_mock = Mock() service._log = logger_mock @@ -283,3 +286,35 @@ def get_bits_description_mock(block): best_block_hash='abc', non_signaling_features=non_signaling_features, ) + + +def test_on_must_signal_not_supported() -> None: + service = BitSignalingService( + feature_settings=Mock(), + feature_service=Mock(), + tx_storage=Mock(), + support_features=set(), + not_support_features={Feature.NOP_FEATURE_1}, + feature_storage=Mock(), + ) + + service.on_must_signal(feature=Feature.NOP_FEATURE_1) + + assert service._support_features == {Feature.NOP_FEATURE_1} + assert service._not_support_features == set() + + +def test_on_must_signal_supported() -> None: + service = BitSignalingService( + feature_settings=Mock(), + feature_service=Mock(), + tx_storage=Mock(), + support_features=set(), + not_support_features=set(), + feature_storage=Mock(), + ) + + service.on_must_signal(feature=Feature.NOP_FEATURE_1) + + assert service._support_features == {Feature.NOP_FEATURE_1} + assert service._not_support_features == set() diff --git a/tests/feature_activation/test_criteria.py b/tests/feature_activation/test_criteria.py index 2d8e5774a..b8ef70d2e 100644 --- a/tests/feature_activation/test_criteria.py +++ b/tests/feature_activation/test_criteria.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Any + import pytest from pydantic import ValidationError @@ -43,7 +45,7 @@ ) ] ) -def test_valid_criteria(criteria): +def test_valid_criteria(criteria: dict[str, Any]) -> None: Criteria(**criteria).to_validated(evaluation_interval=1000, max_signal_bits=2) @@ -56,10 +58,10 @@ def test_valid_criteria(criteria): (10, 'bit must be lower than max_signal_bits: 10 >= 2') ] ) -def test_bit(bit, error): +def test_bit(bit: int, error: str) -> None: criteria = VALID_CRITERIA | dict(bit=bit) with pytest.raises(ValidationError) as e: - Criteria(**criteria).to_validated(evaluation_interval=1000, max_signal_bits=2) + Criteria(**criteria).to_validated(evaluation_interval=1000, max_signal_bits=2) # type: ignore[arg-type] errors = e.value.errors() assert errors[0]['msg'] == error @@ -75,10 +77,10 @@ def test_bit(bit, error): (100, 'Should be a multiple of evaluation_interval: 100 % 1000 != 0') ] ) -def test_start_height(start_height, error): +def test_start_height(start_height: int, error: str) -> None: criteria = VALID_CRITERIA | dict(start_height=start_height) with pytest.raises(ValidationError) as e: - Criteria(**criteria).to_validated(evaluation_interval=1000, max_signal_bits=2) + Criteria(**criteria).to_validated(evaluation_interval=1000, max_signal_bits=2) # type: ignore[arg-type] errors = e.value.errors() assert errors[0]['msg'] == error @@ -95,10 +97,10 @@ def test_start_height(start_height, error): (3111, 'Should be a multiple of evaluation_interval: 3111 % 1000 != 0') ] ) -def test_timeout_height(timeout_height, error): +def test_timeout_height(timeout_height: int, error: str) -> None: criteria = VALID_CRITERIA | dict(timeout_height=timeout_height) with pytest.raises(ValidationError) as e: - Criteria(**criteria).to_validated(evaluation_interval=1000, max_signal_bits=2) + Criteria(**criteria).to_validated(evaluation_interval=1000, max_signal_bits=2) # type: ignore[arg-type] errors = e.value.errors() assert errors[0]['msg'] == error @@ -113,10 +115,10 @@ def test_timeout_height(timeout_height, error): (100000, 'threshold must not be greater than evaluation_interval: 100000 > 1000') ] ) -def test_threshold(threshold, error): +def test_threshold(threshold: int, error: str) -> None: criteria = VALID_CRITERIA | dict(threshold=threshold) with pytest.raises(ValidationError) as e: - Criteria(**criteria).to_validated(evaluation_interval=1000, max_signal_bits=2) + Criteria(**criteria).to_validated(evaluation_interval=1000, max_signal_bits=2) # type: ignore[arg-type] errors = e.value.errors() assert errors[0]['msg'] == error @@ -132,10 +134,10 @@ def test_threshold(threshold, error): (100, 'Should be a multiple of evaluation_interval: 100 % 1000 != 0'), ] ) -def test_minimum_activation_height(minimum_activation_height, error): +def test_minimum_activation_height(minimum_activation_height: int, error: str) -> None: criteria = VALID_CRITERIA | dict(minimum_activation_height=minimum_activation_height) with pytest.raises(ValidationError) as e: - Criteria(**criteria).to_validated(evaluation_interval=1000, max_signal_bits=2) + Criteria(**criteria).to_validated(evaluation_interval=1000, max_signal_bits=2) # type: ignore[arg-type] errors = e.value.errors() assert errors[0]['msg'] == error @@ -149,10 +151,10 @@ def test_minimum_activation_height(minimum_activation_height, error): ('0.0', 'string does not match regex "^(\\d+\\.\\d+\\.\\d+(-rc\\.\\d+)?|nightly-[a-f0-9]{7,8})$"') ] ) -def test_version(version, error): +def test_version(version: str, error: str) -> None: criteria = VALID_CRITERIA | dict(version=version) with pytest.raises(ValidationError) as e: - Criteria(**criteria).to_validated(evaluation_interval=1000, max_signal_bits=2) + Criteria(**criteria).to_validated(evaluation_interval=1000, max_signal_bits=2) # type: ignore[arg-type] errors = e.value.errors() assert errors[0]['msg'] == error diff --git a/tests/feature_activation/test_feature_service.py b/tests/feature_activation/test_feature_service.py index a66af95dc..60c76d8bc 100644 --- a/tests/feature_activation/test_feature_service.py +++ b/tests/feature_activation/test_feature_service.py @@ -119,6 +119,7 @@ def service(feature_settings: FeatureSettings, tx_storage: TransactionStorage) - feature_settings=feature_settings, tx_storage=tx_storage ) + service.bit_signaling_service = Mock() return service @@ -169,6 +170,7 @@ def test_get_state_from_defined( feature_settings=feature_settings, tx_storage=tx_storage ) + service.bit_signaling_service = Mock() block = block_mocks[block_height] result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) @@ -200,6 +202,7 @@ def test_get_state_from_started_to_failed( feature_settings=feature_settings, tx_storage=tx_storage ) + service.bit_signaling_service = Mock() block = block_mocks[block_height] result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) @@ -231,11 +234,13 @@ def test_get_state_from_started_to_must_signal_on_timeout( feature_settings=feature_settings, tx_storage=tx_storage ) + service.bit_signaling_service = Mock() block = block_mocks[block_height] result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) assert result == FeatureState.MUST_SIGNAL + service.bit_signaling_service.on_must_signal.assert_called_once_with(Feature.NOP_FEATURE_1) @pytest.mark.parametrize('block_height', [8, 9, 10, 11]) @@ -263,6 +268,7 @@ def test_get_state_from_started_to_locked_in_on_default_threshold( feature_settings=feature_settings, tx_storage=tx_storage ) + service.bit_signaling_service = Mock() block = block_mocks[block_height] result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) @@ -294,6 +300,7 @@ def test_get_state_from_started_to_locked_in_on_custom_threshold( feature_settings=feature_settings, tx_storage=tx_storage ) + service.bit_signaling_service = Mock() block = block_mocks[block_height] result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) @@ -333,6 +340,7 @@ def test_get_state_from_started_to_started( feature_settings=feature_settings, tx_storage=tx_storage ) + service.bit_signaling_service = Mock() block = block_mocks[block_height] result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) @@ -362,6 +370,7 @@ def test_get_state_from_must_signal_to_locked_in( feature_settings=feature_settings, tx_storage=tx_storage ) + service.bit_signaling_service = Mock() block = block_mocks[block_height] result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) @@ -394,6 +403,7 @@ def test_get_state_from_locked_in_to_active( feature_settings=feature_settings, tx_storage=tx_storage ) + service.bit_signaling_service = Mock() block = block_mocks[block_height] result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) @@ -426,6 +436,7 @@ def test_get_state_from_locked_in_to_locked_in( feature_settings=feature_settings, tx_storage=tx_storage ) + service.bit_signaling_service = Mock() block = block_mocks[block_height] result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) @@ -451,6 +462,7 @@ def test_get_state_from_active(block_mocks: list[Block], tx_storage: Transaction feature_settings=feature_settings, tx_storage=tx_storage ) + service.bit_signaling_service = Mock() block = block_mocks[block_height] result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) @@ -473,6 +485,7 @@ def test_caching_mechanism(block_mocks: list[Block], tx_storage: TransactionStor } ) service = FeatureService(feature_settings=feature_settings, tx_storage=tx_storage) + service.bit_signaling_service = Mock() block = block_mocks[block_height] calculate_new_state_mock = Mock(wraps=service._calculate_new_state) @@ -507,6 +520,7 @@ def test_is_feature_active(block_mocks: list[Block], tx_storage: TransactionStor feature_settings=feature_settings, tx_storage=tx_storage ) + service.bit_signaling_service = Mock() block = block_mocks[block_height] result = service.is_feature_active(block=block, feature=Feature.NOP_FEATURE_1) @@ -531,6 +545,7 @@ def test_get_state_from_failed(block_mocks: list[Block], tx_storage: Transaction feature_settings=feature_settings, tx_storage=tx_storage ) + service.bit_signaling_service = Mock() block = block_mocks[block_height] result = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) @@ -559,6 +574,7 @@ def test_get_bits_description(tx_storage: TransactionStorage) -> None: feature_settings=feature_settings, tx_storage=tx_storage ) + service.bit_signaling_service = Mock() def get_state(self: FeatureService, *, block: Block, feature: Feature) -> FeatureState: states = { @@ -596,6 +612,7 @@ def test_get_ancestor_at_height_invalid( ancestor_height: int ) -> None: service = FeatureService(feature_settings=feature_settings, tx_storage=tx_storage) + service.bit_signaling_service = Mock() block = block_mocks[block_height] with pytest.raises(AssertionError) as e: @@ -625,6 +642,7 @@ def test_get_ancestor_at_height( ancestor_height: int ) -> None: service = FeatureService(feature_settings=feature_settings, tx_storage=tx_storage) + service.bit_signaling_service = Mock() block = block_mocks[block_height] result = service._get_ancestor_at_height(block=block, ancestor_height=ancestor_height) @@ -653,6 +671,7 @@ def test_get_ancestor_at_height_voided( ancestor_height: int ) -> None: service = FeatureService(feature_settings=feature_settings, tx_storage=tx_storage) + service.bit_signaling_service = Mock() block = block_mocks[block_height] parent_block = block_mocks[block_height - 1] parent_block.get_metadata().voided_by = {b'some'} @@ -711,6 +730,7 @@ def test_check_must_signal( } ) service = FeatureService(feature_settings=feature_settings, tx_storage=tx_storage) + service.bit_signaling_service = Mock() block = block_mocks[block_height] result = service.is_signaling_mandatory_features(block) diff --git a/tests/feature_activation/test_feature_simulation.py b/tests/feature_activation/test_feature_simulation.py index c7e8cf253..91b077711 100644 --- a/tests/feature_activation/test_feature_simulation.py +++ b/tests/feature_activation/test_feature_simulation.py @@ -25,11 +25,13 @@ from hathor.feature_activation.resources.feature import FeatureResource from hathor.feature_activation.settings import Settings as FeatureSettings from hathor.simulator import FakeConnection +from hathor.simulator.utils import add_new_blocks from hathor.transaction.exceptions import BlockMustSignalError +from hathor.util import not_none from tests import unittest from tests.resources.base_resource import StubSite from tests.simulation.base import SimulatorTestCase -from tests.utils import HAS_ROCKSDB, add_new_blocks +from tests.utils import HAS_ROCKSDB class BaseFeatureSimulationTest(SimulatorTestCase): @@ -41,7 +43,7 @@ def get_simulator_builder(self) -> Builder: def _get_result(web_client: StubSite) -> dict[str, Any]: """Returns the feature activation api response.""" response = web_client.get('feature') - result = response.result.json_value() + result: dict[str, Any] = response.result.json_value() del result['block_hash'] # we don't assert the block hash because it's not always the same @@ -226,6 +228,7 @@ def test_feature(self) -> None: non_signaling_block = manager.generate_mining_block() manager.cpu_mining_service.resolve(non_signaling_block) non_signaling_block.signal_bits = 0b10 + non_signaling_block.update_reward_lock_metadata() with pytest.raises(BlockMustSignalError): manager.verification_service.verify(non_signaling_block) @@ -615,7 +618,7 @@ def test_feature_from_existing_storage(self) -> None: calculate_new_state_mock.reset_mock() manager1.stop() - artifacts1.rocksdb_storage.close() + not_none(artifacts1.rocksdb_storage).close() # new builder is created with the same storage from the previous manager builder2 = self.get_simulator_builder_from_dir(rocksdb_dir).set_settings(settings) diff --git a/tests/feature_activation/test_mining_simulation.py b/tests/feature_activation/test_mining_simulation.py index cb306a693..f65056ff1 100644 --- a/tests/feature_activation/test_mining_simulation.py +++ b/tests/feature_activation/test_mining_simulation.py @@ -143,7 +143,8 @@ def test_signal_bits_in_mining(self) -> None: def _get_signal_bits_from_get_block_template(self, web_client: StubSite) -> int: result = self._get_result(web_client) - return result['signal_bits'] + signal_bits: int = result['signal_bits'] + return signal_bits def _get_signal_bits_from_mining(self, web_client: StubSite) -> int: result = self._get_result(web_client) @@ -153,13 +154,14 @@ def _get_signal_bits_from_mining(self, web_client: StubSite) -> int: @staticmethod def _get_result(web_client: StubSite) -> dict[str, Any]: response = web_client.get('') - return response.result.json_value() + result: dict[str, Any] = response.result.json_value() + return result def _get_last_ws_signal_bits(self, transport: StringTransport) -> int: messages = self._get_transport_messages(transport) assert len(messages) > 0 last_message = messages[-1] - signal_bits = last_message['params'][0]['signal_bits'] + signal_bits: int = last_message['params'][0]['signal_bits'] return signal_bits diff --git a/tests/feature_activation/test_settings.py b/tests/feature_activation/test_settings.py index 04af34229..b2c7eac9a 100644 --- a/tests/feature_activation/test_settings.py +++ b/tests/feature_activation/test_settings.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from typing import Any import pytest from pydantic import ValidationError @@ -56,9 +57,9 @@ ) ] ) -def test_valid_settings(features): +def test_valid_settings(features: dict[str, Any]) -> None: data = dict(features=features) - FeatureSettings(**data) + FeatureSettings(**data) # type: ignore[arg-type] @pytest.mark.parametrize( @@ -114,14 +115,14 @@ def test_valid_settings(features): ) ] ) -def test_conflicting_bits(features): +def test_conflicting_bits(features: list[dict[str, Any]]) -> None: with pytest.raises(ValidationError) as e: data = dict(features=features) - FeatureSettings(**data) + FeatureSettings(**data) # type: ignore[arg-type] errors = e.value.errors() assert errors[0]['msg'] == 'At least one pair of Features have the same bit configured for an overlapping ' \ - 'interval: Feature.NOP_FEATURE_1 and Feature.NOP_FEATURE_2' + 'interval: NOP_FEATURE_1 and NOP_FEATURE_2' @pytest.mark.parametrize( @@ -131,10 +132,10 @@ def test_conflicting_bits(features): (100, 101, 'default_threshold must not be greater than evaluation_interval: 101 > 100') ] ) -def test_default_threshold(evaluation_interval, default_threshold, error): +def test_default_threshold(evaluation_interval: int, default_threshold: int, error: str) -> None: with pytest.raises(ValidationError) as e: data = dict(evaluation_interval=evaluation_interval, default_threshold=default_threshold) - FeatureSettings(**data) + FeatureSettings(**data) # type: ignore[arg-type] errors = e.value.errors() assert errors[0]['msg'] == error @@ -160,5 +161,5 @@ def test_default_threshold(evaluation_interval, default_threshold, error): ) ] ) -def test_find_overlap(intervals, expected): +def test_find_overlap(intervals: list[FeatureInterval], expected: tuple[FeatureInterval, FeatureInterval]) -> None: assert expected == _find_overlap(intervals) diff --git a/tests/others/test_cli_builder.py b/tests/others/test_cli_builder.py index 64e95e208..96a4aaeca 100644 --- a/tests/others/test_cli_builder.py +++ b/tests/others/test_cli_builder.py @@ -57,7 +57,7 @@ def test_all_default(self): self.assertIsInstance(manager.tx_storage.indexes, RocksDBIndexesManager) self.assertIsNone(manager.wallet) self.assertEqual('unittests', manager.network) - self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V1_1)) + self.assertFalse(manager.connections.is_sync_version_enabled(SyncVersion.V1_1)) self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V2)) self.assertFalse(self.resources_builder._built_prometheus) self.assertFalse(self.resources_builder._built_status) @@ -103,7 +103,7 @@ def test_memory_storage_with_rocksdb_indexes(self): def test_sync_default(self): manager = self._build(['--memory-storage']) - self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V1_1)) + self.assertFalse(manager.connections.is_sync_version_enabled(SyncVersion.V1_1)) self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V2)) def test_sync_bridge(self): diff --git a/tests/others/test_metrics.py b/tests/others/test_metrics.py index 41c4ddb25..f799fc961 100644 --- a/tests/others/test_metrics.py +++ b/tests/others/test_metrics.py @@ -109,6 +109,7 @@ def _init_manager(): b'migrations': 0.0, b'event': 0.0, b'event-metadata': 0.0, + b'feature-activation-metadata': 0.0, }) manager.tx_storage.pre_init() @@ -161,6 +162,7 @@ def _init_manager(): b'migrations': 0.0, b'event': 0.0, b'event-metadata': 0.0, + b'feature-activation-metadata': 0.0, }) manager.tx_storage.pre_init() diff --git a/tests/p2p/netfilter/test_factory.py b/tests/p2p/netfilter/test_factory.py index 2dc4d5cde..53ca409c8 100644 --- a/tests/p2p/netfilter/test_factory.py +++ b/tests/p2p/netfilter/test_factory.py @@ -1,3 +1,5 @@ +from unittest.mock import Mock + from twisted.internet.address import IPv4Address from hathor.p2p.netfilter import get_table @@ -10,7 +12,7 @@ class NetfilterFactoryTest(unittest.TestCase): - def test_factory(self): + def test_factory(self) -> None: pre_conn = get_table('filter').get_chain('pre_conn') match = NetfilterMatchIPAddress('192.168.0.1/32') @@ -20,7 +22,7 @@ def test_factory(self): builder = TestBuilder() artifacts = builder.build() wrapped_factory = artifacts.p2p_manager.server_factory - factory = NetfilterFactory(connections=None, wrappedFactory=wrapped_factory) + factory = NetfilterFactory(connections=Mock(), wrappedFactory=wrapped_factory) ret = factory.buildProtocol(IPv4Address('TCP', '192.168.0.1', 1234)) self.assertIsNone(ret) diff --git a/tests/p2p/netfilter/test_match.py b/tests/p2p/netfilter/test_match.py index ce59c28dd..39bb844fe 100644 --- a/tests/p2p/netfilter/test_match.py +++ b/tests/p2p/netfilter/test_match.py @@ -22,7 +22,7 @@ def match(self, context: 'NetfilterContext') -> bool: class NetfilterMatchTest(unittest.TestCase): - def test_match_all(self): + def test_match_all(self) -> None: matcher = NetfilterMatchAll() context = NetfilterContext() self.assertTrue(matcher.match(context)) @@ -31,7 +31,7 @@ def test_match_all(self): json = matcher.to_json() self.assertEqual(json['type'], 'NetfilterMatchAll') - def test_never_match(self): + def test_never_match(self) -> None: matcher = NetfilterNeverMatch() context = NetfilterContext() self.assertFalse(matcher.match(context)) @@ -40,14 +40,14 @@ def test_never_match(self): json = matcher.to_json() self.assertEqual(json['type'], 'NetfilterNeverMatch') - def test_match_and_success(self): + def test_match_and_success(self) -> None: m1 = NetfilterMatchAll() m2 = NetfilterMatchAll() matcher = NetfilterMatchAnd(m1, m2) context = NetfilterContext() self.assertTrue(matcher.match(context)) - def test_match_and_fail_01(self): + def test_match_and_fail_01(self) -> None: m1 = NetfilterNeverMatch() m2 = NetfilterMatchAll() matcher = NetfilterMatchAnd(m1, m2) @@ -60,28 +60,28 @@ def test_match_and_fail_01(self): self.assertEqual(json['match_params']['a']['type'], 'NetfilterNeverMatch') self.assertEqual(json['match_params']['b']['type'], 'NetfilterMatchAll') - def test_match_and_fail_10(self): + def test_match_and_fail_10(self) -> None: m1 = NetfilterMatchAll() m2 = NetfilterNeverMatch() matcher = NetfilterMatchAnd(m1, m2) context = NetfilterContext() self.assertFalse(matcher.match(context)) - def test_match_and_fail_00(self): + def test_match_and_fail_00(self) -> None: m1 = NetfilterNeverMatch() m2 = NetfilterNeverMatch() matcher = NetfilterMatchAnd(m1, m2) context = NetfilterContext() self.assertFalse(matcher.match(context)) - def test_match_or_success_11(self): + def test_match_or_success_11(self) -> None: m1 = NetfilterMatchAll() m2 = NetfilterMatchAll() matcher = NetfilterMatchOr(m1, m2) context = NetfilterContext() self.assertTrue(matcher.match(context)) - def test_match_or_success_10(self): + def test_match_or_success_10(self) -> None: m1 = NetfilterMatchAll() m2 = NetfilterNeverMatch() matcher = NetfilterMatchOr(m1, m2) @@ -94,21 +94,21 @@ def test_match_or_success_10(self): self.assertEqual(json['match_params']['a']['type'], 'NetfilterMatchAll') self.assertEqual(json['match_params']['b']['type'], 'NetfilterNeverMatch') - def test_match_or_success_01(self): + def test_match_or_success_01(self) -> None: m1 = NetfilterNeverMatch() m2 = NetfilterMatchAll() matcher = NetfilterMatchOr(m1, m2) context = NetfilterContext() self.assertTrue(matcher.match(context)) - def test_match_or_fail_00(self): + def test_match_or_fail_00(self) -> None: m1 = NetfilterNeverMatch() m2 = NetfilterNeverMatch() matcher = NetfilterMatchOr(m1, m2) context = NetfilterContext() self.assertFalse(matcher.match(context)) - def test_match_ip_address_empty_context(self): + def test_match_ip_address_empty_context(self) -> None: matcher = NetfilterMatchIPAddress('192.168.0.0/24') context = NetfilterContext() self.assertFalse(matcher.match(context)) @@ -118,7 +118,7 @@ def test_match_ip_address_empty_context(self): self.assertEqual(json['type'], 'NetfilterMatchIPAddress') self.assertEqual(json['match_params']['host'], '192.168.0.0/24') - def test_match_ip_address_ipv4_net(self): + def test_match_ip_address_ipv4_net(self) -> None: matcher = NetfilterMatchIPAddress('192.168.0.0/24') context = NetfilterContext(addr=IPv4Address('TCP', '192.168.0.10', 1234)) self.assertTrue(matcher.match(context)) @@ -129,7 +129,7 @@ def test_match_ip_address_ipv4_net(self): context = NetfilterContext(addr=IPv4Address('TCP', '', 1234)) self.assertFalse(matcher.match(context)) - def test_match_ip_address_ipv4_ip(self): + def test_match_ip_address_ipv4_ip(self) -> None: matcher = NetfilterMatchIPAddress('192.168.0.1/32') context = NetfilterContext(addr=IPv4Address('TCP', '192.168.0.1', 1234)) self.assertTrue(matcher.match(context)) @@ -138,24 +138,24 @@ def test_match_ip_address_ipv4_ip(self): context = NetfilterContext(addr=IPv4Address('TCP', '', 1234)) self.assertFalse(matcher.match(context)) - def test_match_ip_address_ipv4_hostname(self): + def test_match_ip_address_ipv4_hostname(self) -> None: matcher = NetfilterMatchIPAddress('192.168.0.1/32') - context = NetfilterContext(addr=HostnameAddress('hathor.network', 80)) + context = NetfilterContext(addr=HostnameAddress(b'hathor.network', 80)) self.assertFalse(matcher.match(context)) - def test_match_ip_address_ipv4_unix(self): + def test_match_ip_address_ipv4_unix(self) -> None: matcher = NetfilterMatchIPAddress('192.168.0.1/32') context = NetfilterContext(addr=UNIXAddress('/unix.sock')) self.assertFalse(matcher.match(context)) - def test_match_ip_address_ipv4_ipv6(self): + def test_match_ip_address_ipv4_ipv6(self) -> None: matcher = NetfilterMatchIPAddress('192.168.0.1/32') context = NetfilterContext(addr=IPv6Address('TCP', '2001:db8::', 80)) self.assertFalse(matcher.match(context)) context = NetfilterContext(addr=IPv6Address('TCP', '', 80)) self.assertFalse(matcher.match(context)) - def test_match_ip_address_ipv6_net(self): + def test_match_ip_address_ipv6_net(self) -> None: matcher = NetfilterMatchIPAddress('2001:0db8:0:f101::/64') context = NetfilterContext(addr=IPv6Address('TCP', '2001:db8::8a2e:370:7334', 1234)) self.assertFalse(matcher.match(context)) @@ -167,7 +167,7 @@ def test_match_ip_address_ipv6_net(self): self.assertEqual(json['type'], 'NetfilterMatchIPAddress') self.assertEqual(json['match_params']['host'], str(ip_network('2001:0db8:0:f101::/64'))) - def test_match_ip_address_ipv6_ip(self): + def test_match_ip_address_ipv6_ip(self) -> None: matcher = NetfilterMatchIPAddress('2001:0db8:0:f101::1/128') context = NetfilterContext(addr=IPv6Address('TCP', '2001:db8:0:f101::1', 1234)) self.assertTrue(matcher.match(context)) @@ -176,22 +176,22 @@ def test_match_ip_address_ipv6_ip(self): context = NetfilterContext(addr=IPv6Address('TCP', '2001:db8:0:f101:2::7334', 1234)) self.assertFalse(matcher.match(context)) - def test_match_ip_address_ipv6_hostname(self): + def test_match_ip_address_ipv6_hostname(self) -> None: matcher = NetfilterMatchIPAddress('2001:0db8:0:f101::1/128') - context = NetfilterContext(addr=HostnameAddress('hathor.network', 80)) + context = NetfilterContext(addr=HostnameAddress(b'hathor.network', 80)) self.assertFalse(matcher.match(context)) - def test_match_ip_address_ipv6_unix(self): + def test_match_ip_address_ipv6_unix(self) -> None: matcher = NetfilterMatchIPAddress('2001:0db8:0:f101::1/128') context = NetfilterContext(addr=UNIXAddress('/unix.sock')) self.assertFalse(matcher.match(context)) - def test_match_ip_address_ipv6_ipv4(self): + def test_match_ip_address_ipv6_ipv4(self) -> None: matcher = NetfilterMatchIPAddress('2001:0db8:0:f101::1/128') context = NetfilterContext(addr=IPv4Address('TCP', '192.168.0.1', 1234)) self.assertFalse(matcher.match(context)) - def test_match_peer_id_empty_context(self): + def test_match_peer_id_empty_context(self) -> None: matcher = NetfilterMatchPeerId('123') context = NetfilterContext() self.assertFalse(matcher.match(context)) @@ -200,7 +200,7 @@ def test_match_peer_id_empty_context(self): class BaseNetfilterMatchTest(unittest.TestCase): __test__ = False - def test_match_peer_id(self): + def test_match_peer_id(self) -> None: network = 'testnet' peer_id1 = PeerId() peer_id2 = PeerId() diff --git a/tests/p2p/netfilter/test_match_remote.py b/tests/p2p/netfilter/test_match_remote.py index 89df4acf7..1947f39be 100644 --- a/tests/p2p/netfilter/test_match_remote.py +++ b/tests/p2p/netfilter/test_match_remote.py @@ -6,7 +6,7 @@ class NetfilterMatchRemoteTest(unittest.TestCase): - def test_match_ip(self): + def test_match_ip(self) -> None: matcher = NetfilterMatchIPAddressRemoteURL('test', self.clock, 'http://localhost:8080') context = NetfilterContext(addr=IPv4Address('TCP', '192.168.0.1', 1234)) self.assertFalse(matcher.match(context)) diff --git a/tests/p2p/netfilter/test_tables.py b/tests/p2p/netfilter/test_tables.py index 6c845ec5e..a3505aa01 100644 --- a/tests/p2p/netfilter/test_tables.py +++ b/tests/p2p/netfilter/test_tables.py @@ -6,17 +6,17 @@ class NetfilterTableTest(unittest.TestCase): - def test_default_table_filter(self): + def test_default_table_filter(self) -> None: tb_filter = get_table('filter') tb_filter.get_chain('pre_conn') tb_filter.get_chain('post_hello') tb_filter.get_chain('post_peerid') - def test_default_table_not_exists(self): + def test_default_table_not_exists(self) -> None: with self.assertRaises(KeyError): get_table('do-not-exists') - def test_add_get_chain(self): + def test_add_get_chain(self) -> None: mytable = NetfilterTable('mytable') mychain = NetfilterChain('mychain', NetfilterAccept()) mytable.add_chain(mychain) diff --git a/tests/p2p/netfilter/test_utils.py b/tests/p2p/netfilter/test_utils.py index cde078af0..127cf9b0d 100644 --- a/tests/p2p/netfilter/test_utils.py +++ b/tests/p2p/netfilter/test_utils.py @@ -4,7 +4,7 @@ class NetfilterUtilsTest(unittest.TestCase): - def test_peer_id_blacklist(self): + def test_peer_id_blacklist(self) -> None: post_peerid = get_table('filter').get_chain('post_peerid') # Chain starts empty diff --git a/tests/p2p/test_capabilities.py b/tests/p2p/test_capabilities.py index 0380abaf4..022fb1fc6 100644 --- a/tests/p2p/test_capabilities.py +++ b/tests/p2p/test_capabilities.py @@ -1,3 +1,4 @@ +from hathor.p2p.states import ReadyState from hathor.p2p.sync_v1.agent import NodeSyncTimestamp from hathor.p2p.sync_v2.agent import NodeBlockSync from hathor.simulator import FakeConnection @@ -5,7 +6,7 @@ class SyncV1HathorCapabilitiesTestCase(unittest.SyncV1Params, unittest.TestCase): - def test_capabilities(self): + def test_capabilities(self) -> None: network = 'testnet' manager1 = self.create_peer(network, capabilities=[self._settings.CAPABILITY_WHITELIST]) manager2 = self.create_peer(network, capabilities=[]) @@ -18,6 +19,8 @@ def test_capabilities(self): self.clock.advance(0.1) # Even if we don't have the capability we must connect because the whitelist url conf is None + assert isinstance(conn._proto1.state, ReadyState) + assert isinstance(conn._proto2.state, ReadyState) self.assertEqual(conn._proto1.state.state_name, 'READY') self.assertEqual(conn._proto2.state.state_name, 'READY') self.assertIsInstance(conn._proto1.state.sync_agent, NodeSyncTimestamp) @@ -33,6 +36,8 @@ def test_capabilities(self): conn2.run_one_step(debug=True) self.clock.advance(0.1) + assert isinstance(conn2._proto1.state, ReadyState) + assert isinstance(conn2._proto2.state, ReadyState) self.assertEqual(conn2._proto1.state.state_name, 'READY') self.assertEqual(conn2._proto2.state.state_name, 'READY') self.assertIsInstance(conn2._proto1.state.sync_agent, NodeSyncTimestamp) @@ -40,7 +45,7 @@ def test_capabilities(self): class SyncV2HathorCapabilitiesTestCase(unittest.SyncV2Params, unittest.TestCase): - def test_capabilities(self): + def test_capabilities(self) -> None: network = 'testnet' manager1 = self.create_peer(network, capabilities=[self._settings.CAPABILITY_WHITELIST, self._settings.CAPABILITY_SYNC_VERSION]) @@ -54,6 +59,8 @@ def test_capabilities(self): self.clock.advance(0.1) # Even if we don't have the capability we must connect because the whitelist url conf is None + assert isinstance(conn._proto1.state, ReadyState) + assert isinstance(conn._proto2.state, ReadyState) self.assertEqual(conn._proto1.state.state_name, 'READY') self.assertEqual(conn._proto2.state.state_name, 'READY') self.assertIsInstance(conn._proto1.state.sync_agent, NodeBlockSync) @@ -71,6 +78,8 @@ def test_capabilities(self): conn2.run_one_step(debug=True) self.clock.advance(0.1) + assert isinstance(conn2._proto1.state, ReadyState) + assert isinstance(conn2._proto2.state, ReadyState) self.assertEqual(conn2._proto1.state.state_name, 'READY') self.assertEqual(conn2._proto2.state.state_name, 'READY') self.assertIsInstance(conn2._proto1.state.sync_agent, NodeBlockSync) diff --git a/tests/p2p/test_connections.py b/tests/p2p/test_connections.py index 03f56358f..c75abea7e 100644 --- a/tests/p2p/test_connections.py +++ b/tests/p2p/test_connections.py @@ -8,7 +8,7 @@ class ConnectionsTest(unittest.TestCase): @pytest.mark.skipif(sys.platform == 'win32', reason='run_server is very finicky on Windows') - def test_connections(self): + def test_connections(self) -> None: process = run_server() process2 = run_server(listen=8006, status=8086, bootstrap='tcp://127.0.0.1:8005') process3 = run_server(listen=8007, status=8087, bootstrap='tcp://127.0.0.1:8005') @@ -17,7 +17,7 @@ def test_connections(self): process2.terminate() process3.terminate() - def test_manager_connections(self): + def test_manager_connections(self) -> None: manager = self.create_peer('testnet', enable_sync_v1=True, enable_sync_v2=False) endpoint = 'tcp://127.0.0.1:8005' diff --git a/tests/p2p/test_double_spending.py b/tests/p2p/test_double_spending.py index 9eb408ee2..21b74d620 100644 --- a/tests/p2p/test_double_spending.py +++ b/tests/p2p/test_double_spending.py @@ -1,5 +1,10 @@ +from unittest.mock import Mock + from hathor.crypto.util import decode_address +from hathor.manager import HathorManager from hathor.simulator.utils import add_new_blocks +from hathor.transaction import Transaction +from hathor.util import not_none from tests import unittest from tests.utils import add_blocks_unlock_reward, add_new_tx @@ -7,7 +12,7 @@ class BaseHathorSyncMethodsTestCase(unittest.TestCase): __test__ = False - def setUp(self): + def setUp(self) -> None: super().setUp() self.network = 'testnet' @@ -16,16 +21,16 @@ def setUp(self): self.genesis = self.manager1.tx_storage.get_all_genesis() self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] - def _add_new_transactions(self, manager, num_txs): + def _add_new_transactions(self, manager: HathorManager, num_txs: int) -> list[Transaction]: txs = [] for _ in range(num_txs): - address = self.get_address(0) + address = not_none(self.get_address(0)) value = self.rng.choice([5, 10, 15, 20]) tx = add_new_tx(manager, address, value) txs.append(tx) return txs - def test_simple_double_spending(self): + def test_simple_double_spending(self) -> None: add_new_blocks(self.manager1, 5, advance_clock=15) add_blocks_unlock_reward(self.manager1) @@ -33,6 +38,7 @@ def test_simple_double_spending(self): from hathor.wallet.base_wallet import WalletOutputInfo address = self.get_address(0) + assert address is not None value = 500 outputs = [] @@ -125,7 +131,7 @@ def test_simple_double_spending(self): self.assertConsensusValid(self.manager1) - def test_double_spending_propagation(self): + def test_double_spending_propagation(self) -> None: blocks = add_new_blocks(self.manager1, 4, advance_clock=15) add_blocks_unlock_reward(self.manager1) @@ -165,7 +171,7 @@ def test_double_spending_propagation(self): outputs = [WalletOutputInfo(address=address, value=value, timelock=None), WalletOutputInfo(address=address, value=tx_total_value - 500, timelock=None)] self.clock.advance(1) - inputs = [WalletInputInfo(i.tx_id, i.index, b'') for i in tx1.inputs] + inputs = [WalletInputInfo(i.tx_id, i.index, Mock()) for i in tx1.inputs] tx4 = self.manager1.wallet.prepare_transaction_incomplete_inputs(Transaction, inputs, outputs, self.manager1.tx_storage) tx4.weight = 5 @@ -186,7 +192,7 @@ def test_double_spending_propagation(self): address = self.manager1.wallet.get_unused_address_bytes() value = 100 - inputs = [WalletInputInfo(tx_id=tx1.hash, index=1, private_key=None)] + inputs = [WalletInputInfo(tx_id=tx1.hash, index=1, private_key=Mock())] outputs = [WalletOutputInfo(address=address, value=int(value), timelock=None)] self.clock.advance(1) tx2 = self.manager1.wallet.prepare_transaction_incomplete_inputs(Transaction, inputs, outputs, @@ -236,7 +242,7 @@ def test_double_spending_propagation(self): address = self.manager1.wallet.get_unused_address_bytes() value = 500 - inputs = [WalletInputInfo(tx_id=tx4.hash, index=0, private_key=None)] + inputs = [WalletInputInfo(tx_id=tx4.hash, index=0, private_key=Mock())] outputs = [WalletOutputInfo(address=address, value=int(value), timelock=None)] self.clock.advance(1) tx5 = self.manager1.wallet.prepare_transaction_incomplete_inputs(Transaction, inputs, outputs, force=True, @@ -273,7 +279,7 @@ def test_double_spending_propagation(self): address = self.manager1.wallet.get_unused_address_bytes() value = blocks[3].outputs[0].value - inputs = [WalletInputInfo(tx_id=blocks[3].hash, index=0, private_key=None)] + inputs = [WalletInputInfo(tx_id=blocks[3].hash, index=0, private_key=Mock())] outputs = [WalletOutputInfo(address=address, value=value, timelock=None)] self.clock.advance(1) tx7 = self.manager1.wallet.prepare_transaction_incomplete_inputs(Transaction, inputs, outputs, diff --git a/tests/p2p/test_get_best_blockchain.py b/tests/p2p/test_get_best_blockchain.py index 4d00ea55b..ff0d95149 100644 --- a/tests/p2p/test_get_best_blockchain.py +++ b/tests/p2p/test_get_best_blockchain.py @@ -1,4 +1,4 @@ -from twisted.internet.defer import inlineCallbacks +from twisted.internet.protocol import Protocol from hathor.indexes.height_index import HeightInfo from hathor.p2p.messages import ProtocolMessages @@ -17,18 +17,15 @@ class BaseGetBestBlockchainTestCase(SimulatorTestCase): seed_config = 6 - def _send_cmd(self, proto, cmd, payload=None): + def _send_cmd(self, proto: Protocol, cmd: str, payload: str | None = None) -> None: if not payload: line = '{}\r\n'.format(cmd) else: line = '{} {}\r\n'.format(cmd, payload) - if isinstance(line, str): - line = line.encode('utf-8') + proto.dataReceived(line.encode('utf-8')) - return proto.dataReceived(line) - - def test_get_best_blockchain(self): + def test_get_best_blockchain(self) -> None: manager1 = self.create_peer() manager2 = self.create_peer() conn12 = FakeConnection(manager1, manager2, latency=0.05) @@ -54,8 +51,8 @@ def test_get_best_blockchain(self): # assert the protocol is in ReadyState state1 = protocol1.state state2 = protocol2.state - self.assertIsInstance(state1, ReadyState) - self.assertIsInstance(state2, ReadyState) + assert isinstance(state1, ReadyState) + assert isinstance(state2, ReadyState) # assert ReadyState commands self.assertIn(ProtocolMessages.GET_BEST_BLOCKCHAIN, state1.cmd_map) @@ -81,10 +78,10 @@ def test_get_best_blockchain(self): self.assertEqual(self._settings.DEFAULT_BEST_BLOCKCHAIN_BLOCKS, len(state1.peer_best_blockchain)) self.assertEqual(self._settings.DEFAULT_BEST_BLOCKCHAIN_BLOCKS, len(state2.peer_best_blockchain)) - self.assertIsInstance(state1.peer_best_blockchain[0], HeightInfo) - self.assertIsInstance(state2.peer_best_blockchain[0], HeightInfo) + assert isinstance(state1.peer_best_blockchain[0], HeightInfo) + assert isinstance(state2.peer_best_blockchain[0], HeightInfo) - def test_handle_get_best_blockchain(self): + def test_handle_get_best_blockchain(self) -> None: manager1 = self.create_peer() manager2 = self.create_peer() conn12 = FakeConnection(manager1, manager2, latency=0.05) @@ -101,13 +98,13 @@ def test_handle_get_best_blockchain(self): self.assertEqual(1, len(connected_peers1)) protocol2 = connected_peers1[0] state2 = protocol2.state - self.assertIsInstance(state2, ReadyState) + assert isinstance(state2, ReadyState) connected_peers2 = list(manager2.connections.connected_peers.values()) self.assertEqual(1, len(connected_peers2)) protocol1 = connected_peers2[0] state1 = protocol1.state - self.assertIsInstance(state1, ReadyState) + assert isinstance(state1, ReadyState) # assert compliance with N blocks inside the boundaries state1.send_get_best_blockchain(n_blocks=1) @@ -141,7 +138,7 @@ def test_handle_get_best_blockchain(self): self.assertEqual(1, len(connected_peers2)) protocol1 = connected_peers2[0] state1 = protocol1.state - self.assertIsInstance(state1, ReadyState) + assert isinstance(state1, ReadyState) # assert param validation exception closes connection state1.handle_get_best_blockchain('invalid single value') @@ -149,7 +146,7 @@ def test_handle_get_best_blockchain(self): # state1 is managed by manager2 self.assertTrue(conn12.tr2.disconnecting) - def test_handle_best_blockchain(self): + def test_handle_best_blockchain(self) -> None: manager1 = self.create_peer() manager2 = self.create_peer() conn12 = FakeConnection(manager1, manager2, latency=0.05) @@ -160,19 +157,19 @@ def test_handle_best_blockchain(self): self.assertEqual(1, len(connected_peers1)) protocol2 = connected_peers1[0] state2 = protocol2.state - self.assertIsInstance(state2, ReadyState) + assert isinstance(state2, ReadyState) connected_peers2 = list(manager2.connections.connected_peers.values()) self.assertEqual(1, len(connected_peers2)) protocol1 = connected_peers2[0] state1 = protocol1.state - self.assertIsInstance(state1, ReadyState) + assert isinstance(state1, ReadyState) self.assertFalse(conn12.tr1.disconnecting) self.simulator.run(60) # assert a valid blockchain keeps connections open - fake_blockchain = [ + fake_blockchain: list[tuple[float, str]] = [ (1, '0000000000000002eccfbca9bc06c449c01f37afb3cb49c04ee62921d9bcf9dc'), (2, '00000000000000006c846e182462a2cc437070288a486dfa21aa64bb373b8507'), ] @@ -203,7 +200,7 @@ def test_handle_best_blockchain(self): self.simulator.run(60) self.assertTrue(conn12.tr2.disconnecting) - def test_node_without_get_best_blockchain_capability(self): + def test_node_without_get_best_blockchain_capability(self) -> None: manager1 = self.create_peer() manager2 = self.create_peer() @@ -232,10 +229,10 @@ def test_node_without_get_best_blockchain_capability(self): # assert the peers don't engage in get_best_blockchain messages state2 = protocol2.state - self.assertIsInstance(state2, ReadyState) + assert isinstance(state2, ReadyState) self.assertIsNone(state2.lc_get_best_blockchain) state1 = protocol1.state - self.assertIsInstance(state1, ReadyState) + assert isinstance(state1, ReadyState) self.assertIsNone(state1.lc_get_best_blockchain) # assert the connections remains open @@ -261,7 +258,7 @@ def test_node_without_get_best_blockchain_capability(self): self.simulator.run(60) self.assertTrue(conn12.tr2.disconnecting) - def test_best_blockchain_from_storage(self): + def test_best_blockchain_from_storage(self) -> None: manager1 = self.create_peer() manager2 = self.create_peer() conn12 = FakeConnection(manager1, manager2, latency=0.05) @@ -281,8 +278,8 @@ def test_best_blockchain_from_storage(self): self.assertTrue(block is memo_block) # cache miss if best block doesn't match - fake_block = HeightInfo(1, 'fake hash') - manager1._latest_n_height_tips = [fake_block] + fake_block = HeightInfo(1, b'fake hash') + # manager1._latest_n_height_tips = [fake_block] # FIXME: This property is not defined. Fix this test. best_blockchain = manager1.tx_storage.get_n_height_tips(1) # there is only the genesis block block = best_blockchain[0] # the memoized best_blockchain is skiped @@ -309,7 +306,7 @@ def test_best_blockchain_from_storage(self): block = best_blockchain[0] self.assertTrue(block is memo_block) - def test_stop_looping_on_exit(self): + def test_stop_looping_on_exit(self) -> None: manager1 = self.create_peer() manager2 = self.create_peer() conn12 = FakeConnection(manager1, manager2, latency=0.05) @@ -320,18 +317,18 @@ def test_stop_looping_on_exit(self): self.assertEqual(1, len(connected_peers1)) protocol2 = connected_peers1[0] state2 = protocol2.state - self.assertIsInstance(state2, ReadyState) + assert isinstance(state2, ReadyState) connected_peers2 = list(manager2.connections.connected_peers.values()) self.assertEqual(1, len(connected_peers2)) protocol1 = connected_peers2[0] state1 = protocol1.state - self.assertIsInstance(state1, ReadyState) + assert isinstance(state1, ReadyState) - self.assertIsNotNone(state1.lc_get_best_blockchain) + assert state1.lc_get_best_blockchain is not None self.assertTrue(state1.lc_get_best_blockchain.running) - self.assertIsNotNone(state2.lc_get_best_blockchain) + assert state2.lc_get_best_blockchain is not None self.assertTrue(state2.lc_get_best_blockchain.running) state1.on_exit() @@ -343,8 +340,7 @@ def test_stop_looping_on_exit(self): self.assertIsNotNone(state2.lc_get_best_blockchain) self.assertFalse(state2.lc_get_best_blockchain.running) - @inlineCallbacks - def test_best_blockchain_from_status_resource(self): + async def test_best_blockchain_from_status_resource(self) -> None: manager1 = self.create_peer() manager2 = self.create_peer() conn12 = FakeConnection(manager1, manager2, latency=0.05) @@ -353,7 +349,7 @@ def test_best_blockchain_from_status_resource(self): # check /status before generate blocks self.web = StubSite(StatusResource(manager1)) - response = yield self.web.get("status") + response = await self.web.get("status") data = response.json_value() connections = data.get('connections') self.assertEqual(len(connections['connected_peers']), 1) @@ -385,7 +381,7 @@ def test_best_blockchain_from_status_resource(self): self.simulator.run(60) # check /status after mine blocks - response = yield self.web.get("status") + response = await self.web.get("status") data = response.json_value() connections = data.get('connections') self.assertEqual(len(connections['connected_peers']), 1) diff --git a/tests/p2p/test_peer_id.py b/tests/p2p/test_peer_id.py index c3e8be202..bccb9bcb2 100644 --- a/tests/p2p/test_peer_id.py +++ b/tests/p2p/test_peer_id.py @@ -1,37 +1,42 @@ import os import shutil import tempfile +from typing import cast +from unittest.mock import Mock + +from twisted.internet.interfaces import ITransport from hathor.p2p.peer_id import InvalidPeerIdException, PeerId from hathor.p2p.peer_storage import PeerStorage +from hathor.util import not_none from tests import unittest from tests.unittest import TestBuilder class PeerIdTest(unittest.TestCase): - def test_invalid_id(self): + def test_invalid_id(self) -> None: p1 = PeerId() - p1.id = p1.id[::-1] + p1.id = not_none(p1.id)[::-1] self.assertRaises(InvalidPeerIdException, p1.validate) - def test_invalid_public_key(self): + def test_invalid_public_key(self) -> None: p1 = PeerId() p2 = PeerId() p1.public_key = p2.public_key self.assertRaises(InvalidPeerIdException, p1.validate) - def test_invalid_private_key(self): + def test_invalid_private_key(self) -> None: p1 = PeerId() p2 = PeerId() p1.private_key = p2.private_key self.assertRaises(InvalidPeerIdException, p1.validate) - def test_no_private_key(self): + def test_no_private_key(self) -> None: p1 = PeerId() p1.private_key = None p1.validate() - def test_create_from_json(self): + def test_create_from_json(self) -> None: p1 = PeerId() data1 = p1.to_json(include_private_key=True) p2 = PeerId.create_from_json(data1) @@ -39,7 +44,7 @@ def test_create_from_json(self): self.assertEqual(data1, data2) p2.validate() - def test_create_from_json_without_private_key(self): + def test_create_from_json_without_private_key(self) -> None: p1 = PeerId() data1 = p1.to_json() # Just to test a part of the code @@ -51,20 +56,20 @@ def test_create_from_json_without_private_key(self): self.assertEqual(data1, data2) p2.validate() - def test_sign_verify(self): + def test_sign_verify(self) -> None: data = b'abacate' p1 = PeerId() signature = p1.sign(data) self.assertTrue(p1.verify_signature(signature, data)) - def test_sign_verify_fail(self): + def test_sign_verify_fail(self) -> None: data = b'abacate' p1 = PeerId() signature = p1.sign(data) signature = signature[::-1] self.assertFalse(p1.verify_signature(signature, data)) - def test_merge_peer(self): + def test_merge_peer(self) -> None: # Testing peer storage with merge of peers peer_storage = PeerStorage() @@ -72,14 +77,14 @@ def test_merge_peer(self): p2 = PeerId() p2.id = p1.id p2.public_key = p1.public_key - p1.public_key = '' + p1.public_key = None peer_storage.add_or_merge(p1) self.assertEqual(len(peer_storage), 1) peer_storage.add_or_merge(p2) - peer = peer_storage[p1.id] + peer = peer_storage[not_none(p1.id)] self.assertEqual(peer.id, p1.id) self.assertEqual(peer.private_key, p1.private_key) self.assertEqual(peer.public_key, p1.public_key) @@ -88,11 +93,11 @@ def test_merge_peer(self): p3 = PeerId() p3.entrypoints.append('1') p3.entrypoints.append('3') - p3.public_key = '' + p3.public_key = None p4 = PeerId() - p4.public_key = '' - p4.private_key = '' + p4.public_key = None + p4.private_key = None p4.id = p3.id p4.entrypoints.append('2') p4.entrypoints.append('3') @@ -103,7 +108,7 @@ def test_merge_peer(self): peer_storage.add_or_merge(p3) self.assertEqual(len(peer_storage), 2) - peer = peer_storage[p3.id] + peer = peer_storage[not_none(p3.id)] self.assertEqual(peer.id, p3.id) self.assertEqual(peer.private_key, p3.private_key) self.assertEqual(peer.entrypoints, ['2', '3', '1']) @@ -111,7 +116,7 @@ def test_merge_peer(self): with self.assertRaises(ValueError): peer_storage.add(p1) - def test_save_peer_file(self): + def test_save_peer_file(self) -> None: import json p = PeerId() @@ -127,7 +132,7 @@ def test_save_peer_file(self): # Removing tmpdir shutil.rmtree(tmpdir) - def test_retry_connection(self): + def test_retry_connection(self) -> None: p = PeerId() interval = p.retry_interval p.increment_retry_attempt(0) @@ -144,26 +149,27 @@ def test_retry_connection(self): self.assertEqual(p.retry_interval, 5) self.assertEqual(p.retry_timestamp, 0) - def test_validate_certificate(self): + def test_validate_certificate(self) -> None: builder = TestBuilder() artifacts = builder.build() - protocol = artifacts.p2p_manager.server_factory.buildProtocol('127.0.0.1') + protocol = artifacts.p2p_manager.server_factory.buildProtocol(Mock()) + + peer = PeerId() - peer = PeerId('testnet') + from OpenSSL import crypto class FakeTransport: - def getPeerCertificate(self): - from OpenSSL import crypto + def getPeerCertificate(self) -> crypto.X509: # we use a new peer here just to save the trouble of manually creating a certificate - random_peer = PeerId('testnet') + random_peer = PeerId() return crypto.X509.from_cryptography(random_peer.get_certificate()) - protocol.transport = FakeTransport() + protocol.transport = cast(ITransport, FakeTransport()) result = peer.validate_certificate(protocol) self.assertFalse(result) - def test_retry_logic(self): - peer = PeerId('testnet') + def test_retry_logic(self) -> None: + peer = PeerId() self.assertTrue(peer.can_retry(0)) retry_interval = peer.retry_interval @@ -207,7 +213,7 @@ def test_retry_logic(self): class BasePeerIdTest(unittest.TestCase): __test__ = False - async def test_validate_entrypoint(self): + async def test_validate_entrypoint(self) -> None: manager = self.create_peer('testnet', unlock_wallet=False) peer_id = manager.my_peer peer_id.entrypoints = ['tcp://127.0.0.1:40403'] @@ -230,10 +236,11 @@ async def test_validate_entrypoint(self): protocol.connection_string = None peer_id.entrypoints = ['tcp://127.0.0.1:40403'] + from collections import namedtuple + Peer = namedtuple('Peer', 'host') + class FakeTransport: - def getPeer(self): - from collections import namedtuple - Peer = namedtuple('Peer', 'host') + def getPeer(self) -> Peer: return Peer(host='127.0.0.1') protocol.transport = FakeTransport() result = await peer_id.validate_entrypoint(protocol) diff --git a/tests/p2p/test_protocol.py b/tests/p2p/test_protocol.py index 1aadea540..a834f9e20 100644 --- a/tests/p2p/test_protocol.py +++ b/tests/p2p/test_protocol.py @@ -2,7 +2,7 @@ from typing import Optional from unittest.mock import Mock, patch -from twisted.internet.defer import inlineCallbacks +from twisted.internet.protocol import Protocol from twisted.python.failure import Failure from hathor.p2p.peer_id import PeerId @@ -15,7 +15,7 @@ class BaseHathorProtocolTestCase(unittest.TestCase): __test__ = False - def setUp(self): + def setUp(self) -> None: super().setUp() self.network = 'testnet' self.peer_id1 = PeerId() @@ -32,52 +32,49 @@ def assertAndStepConn(self, conn: FakeConnection, regex1: bytes, regex2: Optiona self.assertRegex(conn.peek_tr2_value(), regex2) conn.run_one_step() - def assertIsConnected(self, conn=None): + def assertIsConnected(self, conn: FakeConnection | None = None) -> None: if conn is None: conn = self.conn self.assertFalse(conn.tr1.disconnecting) self.assertFalse(conn.tr2.disconnecting) - def assertIsNotConnected(self, conn=None): + def assertIsNotConnected(self, conn: FakeConnection | None = None) -> None: if conn is None: conn = self.conn self.assertTrue(conn.tr1.disconnecting) self.assertTrue(conn.tr2.disconnecting) - def _send_cmd(self, proto, cmd, payload=None): + def _send_cmd(self, proto: Protocol, cmd: str, payload: str | None = None) -> None: if not payload: line = '{}\r\n'.format(cmd) else: line = '{} {}\r\n'.format(cmd, payload) - if isinstance(line, str): - line = line.encode('utf-8') + proto.dataReceived(line.encode('utf-8')) - return proto.dataReceived(line) - - def _check_result_only_cmd(self, result, expected_cmd): + def _check_result_only_cmd(self, result: bytes, expected_cmd: bytes) -> None: cmd_list = [] for line in result.split(b'\r\n'): cmd, _, _ = line.partition(b' ') cmd_list.append(cmd) self.assertIn(expected_cmd, cmd_list) - def _check_cmd_and_value(self, result, expected): + def _check_cmd_and_value(self, result: bytes, expected: tuple[bytes, bytes]) -> None: result_list = [] for line in result.split(b'\r\n'): cmd, _, data = line.partition(b' ') result_list.append((cmd, data)) self.assertIn(expected, result_list) - def test_on_connect(self): + def test_on_connect(self) -> None: self._check_result_only_cmd(self.conn.peek_tr1_value(), b'HELLO') - def test_invalid_command(self): + def test_invalid_command(self) -> None: self._send_cmd(self.conn.proto1, 'INVALID-CMD') self.conn.proto1.state.handle_error('') self.assertTrue(self.conn.tr1.disconnecting) - def test_rate_limit(self): + def test_rate_limit(self) -> None: hits = 1 window = 60 @@ -99,7 +96,7 @@ def test_rate_limit(self): self.conn.proto1.connections = None self.conn.proto1.on_disconnect(Failure(Exception())) - def test_invalid_size(self): + def test_invalid_size(self) -> None: self.conn.tr1.clear() cmd = b'HELLO ' max_payload_bytes = HathorLineReceiver.MAX_LENGTH - len(cmd) @@ -123,32 +120,32 @@ def test_invalid_size(self): line_length_exceeded_wrapped.assert_called_once() self.assertTrue(self.conn.tr1.disconnecting) - def test_invalid_payload(self): + def test_invalid_payload(self) -> None: self.conn.run_one_step() # HELLO self.conn.run_one_step() # PEER-ID self.conn.run_one_step() # READY with self.assertRaises(JSONDecodeError): self._send_cmd(self.conn.proto1, 'PEERS', 'abc') - def test_invalid_hello1(self): + def test_invalid_hello1(self) -> None: self.conn.tr1.clear() self._send_cmd(self.conn.proto1, 'HELLO') self._check_result_only_cmd(self.conn.peek_tr1_value(), b'ERROR') self.assertTrue(self.conn.tr1.disconnecting) - def test_invalid_hello2(self): + def test_invalid_hello2(self) -> None: self.conn.tr1.clear() self._send_cmd(self.conn.proto1, 'HELLO', 'invalid_payload') self._check_result_only_cmd(self.conn.peek_tr1_value(), b'ERROR') self.assertTrue(self.conn.tr1.disconnecting) - def test_invalid_hello3(self): + def test_invalid_hello3(self) -> None: self.conn.tr1.clear() self._send_cmd(self.conn.proto1, 'HELLO', '{}') self._check_result_only_cmd(self.conn.peek_tr1_value(), b'ERROR') self.assertTrue(self.conn.tr1.disconnecting) - def test_invalid_hello4(self): + def test_invalid_hello4(self) -> None: self.conn.tr1.clear() self._send_cmd( self.conn.proto1, @@ -158,7 +155,7 @@ def test_invalid_hello4(self): self._check_result_only_cmd(self.conn.peek_tr1_value(), b'ERROR') self.assertTrue(self.conn.tr1.disconnecting) - def test_invalid_hello5(self): + def test_invalid_hello5(self) -> None: # hello with clocks too far apart self.conn.tr1.clear() data = self.conn.proto2.state._get_hello_data() @@ -171,14 +168,14 @@ def test_invalid_hello5(self): self._check_result_only_cmd(self.conn.peek_tr1_value(), b'ERROR') self.assertTrue(self.conn.tr1.disconnecting) - def test_valid_hello(self): + def test_valid_hello(self) -> None: self.conn.run_one_step() # HELLO self._check_result_only_cmd(self.conn.peek_tr1_value(), b'PEER-ID') self._check_result_only_cmd(self.conn.peek_tr2_value(), b'PEER-ID') self.assertFalse(self.conn.tr1.disconnecting) self.assertFalse(self.conn.tr2.disconnecting) - def test_invalid_same_peer_id(self): + def test_invalid_same_peer_id(self) -> None: manager3 = self.create_peer(self.network, peer_id=self.peer_id1) conn = FakeConnection(self.manager1, manager3) conn.run_one_step() # HELLO @@ -186,7 +183,7 @@ def test_invalid_same_peer_id(self): self._check_result_only_cmd(conn.peek_tr1_value(), b'ERROR') self.assertTrue(conn.tr1.disconnecting) - def test_invalid_same_peer_id2(self): + def test_invalid_same_peer_id2(self) -> None: """ We connect nodes 1-2 and 1-3. Nodes 2 and 3 have the same peer_id. The connections are established simultaneously, so we do not detect a peer id duplication in PEER_ID @@ -246,7 +243,7 @@ def test_invalid_same_peer_id2(self): # connection is still up self.assertIsConnected(conn_alive) - def test_invalid_different_network(self): + def test_invalid_different_network(self) -> None: manager3 = self.create_peer(network='mainnet') conn = FakeConnection(self.manager1, manager3) conn.run_one_step() # HELLO @@ -254,23 +251,23 @@ def test_invalid_different_network(self): self.assertTrue(conn.tr1.disconnecting) conn.run_one_step() # ERROR - def test_send_invalid_unicode(self): + def test_send_invalid_unicode(self) -> None: # \xff is an invalid unicode. self.conn.proto1.dataReceived(b'\xff\r\n') self.assertTrue(self.conn.tr1.disconnecting) - def test_on_disconnect(self): + def test_on_disconnect(self) -> None: self.assertIn(self.conn.proto1, self.manager1.connections.handshaking_peers) self.conn.disconnect(Failure(Exception('testing'))) self.assertNotIn(self.conn.proto1, self.manager1.connections.handshaking_peers) - def test_on_disconnect_after_hello(self): + def test_on_disconnect_after_hello(self) -> None: self.conn.run_one_step() # HELLO self.assertIn(self.conn.proto1, self.manager1.connections.handshaking_peers) self.conn.disconnect(Failure(Exception('testing'))) self.assertNotIn(self.conn.proto1, self.manager1.connections.handshaking_peers) - def test_on_disconnect_after_peer_id(self): + def test_on_disconnect_after_peer_id(self) -> None: self.conn.run_one_step() # HELLO self.assertIn(self.conn.proto1, self.manager1.connections.handshaking_peers) # No peer id in the peer_storage (known_peers) @@ -291,7 +288,7 @@ def test_on_disconnect_after_peer_id(self): # Peer id 2 removed from peer_storage (known_peers) after disconnection and after looping call self.assertNotIn(self.peer_id2.id, self.manager1.connections.peer_storage) - def test_idle_connection(self): + def test_idle_connection(self) -> None: self.clock.advance(self._settings.PEER_IDLE_TIMEOUT - 10) self.assertIsConnected(self.conn) self.clock.advance(15) @@ -301,7 +298,7 @@ def test_idle_connection(self): class SyncV1HathorProtocolTestCase(unittest.SyncV1Params, BaseHathorProtocolTestCase): __test__ = True - def test_two_connections(self): + def test_two_connections(self) -> None: self.conn.run_one_step() # HELLO self.conn.run_one_step() # PEER-ID self.conn.run_one_step() # READY @@ -318,8 +315,7 @@ def test_two_connections(self): self._check_result_only_cmd(self.conn.peek_tr1_value(), b'PEERS') self.conn.run_one_step() - @inlineCallbacks - def test_get_data(self): + def test_get_data(self) -> None: self.conn.run_one_step() # HELLO self.conn.run_one_step() # PEER-ID self.conn.run_one_step() # READY @@ -329,11 +325,11 @@ def test_get_data(self): self.conn.run_one_step() # TIPS self.assertIsConnected() missing_tx = '00000000228dfcd5dec1c9c6263f6430a5b4316bb9e3decb9441a6414bfd8697' - yield self._send_cmd(self.conn.proto1, 'GET-DATA', missing_tx) + self._send_cmd(self.conn.proto1, 'GET-DATA', missing_tx) self._check_result_only_cmd(self.conn.peek_tr1_value(), b'NOT-FOUND') self.conn.run_one_step() - def test_valid_hello_and_peer_id(self): + def test_valid_hello_and_peer_id(self) -> None: self._check_result_only_cmd(self.conn.peek_tr1_value(), b'HELLO') self._check_result_only_cmd(self.conn.peek_tr2_value(), b'HELLO') self.conn.run_one_step() # HELLO @@ -358,7 +354,7 @@ def test_valid_hello_and_peer_id(self): self.conn.run_one_step() # TIPS self.assertIsConnected() - def test_send_ping(self): + def test_send_ping(self) -> None: self.conn.run_one_step() # HELLO self.conn.run_one_step() # PEER-ID self.conn.run_one_step() # READY @@ -379,8 +375,7 @@ def test_send_ping(self): self.conn.run_one_step() self.assertEqual(self.clock.seconds(), self.conn.proto1.last_message) - @inlineCallbacks - def test_invalid_peer_id(self): + def test_invalid_peer_id(self) -> None: self.conn.run_one_step() # HELLO self.conn.run_one_step() # PEER-ID self.conn.run_one_step() # READY @@ -389,7 +384,7 @@ def test_invalid_peer_id(self): self.conn.run_one_step() # PEERS self.conn.run_one_step() # TIPS invalid_payload = {'id': '123', 'entrypoints': ['tcp://localhost:1234']} - yield self._send_cmd(self.conn.proto1, 'PEER-ID', json_dumps(invalid_payload)) + self._send_cmd(self.conn.proto1, 'PEER-ID', json_dumps(invalid_payload)) self._check_result_only_cmd(self.conn.peek_tr1_value(), b'ERROR') self.assertTrue(self.conn.tr1.disconnecting) @@ -397,7 +392,7 @@ def test_invalid_peer_id(self): class SyncV2HathorProtocolTestCase(unittest.SyncV2Params, BaseHathorProtocolTestCase): __test__ = True - def test_two_connections(self): + def test_two_connections(self) -> None: self.assertAndStepConn(self.conn, b'^HELLO') self.assertAndStepConn(self.conn, b'^PEER-ID') self.assertAndStepConn(self.conn, b'^READY') @@ -425,8 +420,7 @@ def test_two_connections(self): self.assertIsConnected() - @inlineCallbacks - def test_get_data(self): + def test_get_data(self) -> None: self.assertAndStepConn(self.conn, b'^HELLO') self.assertAndStepConn(self.conn, b'^PEER-ID') self.assertAndStepConn(self.conn, b'^READY') @@ -442,11 +436,11 @@ def test_get_data(self): 'last_block_hash': missing_tx, 'start_from': [self._settings.GENESIS_BLOCK_HASH.hex()] } - yield self._send_cmd(self.conn.proto1, 'GET-TRANSACTIONS-BFS', json_dumps(payload)) + self._send_cmd(self.conn.proto1, 'GET-TRANSACTIONS-BFS', json_dumps(payload)) self._check_result_only_cmd(self.conn.peek_tr1_value(), b'NOT-FOUND') self.conn.run_one_step() - def test_valid_hello_and_peer_id(self): + def test_valid_hello_and_peer_id(self) -> None: self.assertAndStepConn(self.conn, b'^HELLO') self.assertAndStepConn(self.conn, b'^PEER-ID') self.assertAndStepConn(self.conn, b'^READY') @@ -477,7 +471,7 @@ def test_valid_hello_and_peer_id(self): self.assertAndStepConn(self.conn, b'^BEST-BLOCK') self.assertIsConnected() - def test_send_ping(self): + def test_send_ping(self) -> None: self.assertAndStepConn(self.conn, b'^HELLO') self.assertAndStepConn(self.conn, b'^PEER-ID') self.assertAndStepConn(self.conn, b'^READY') diff --git a/tests/p2p/test_rate_limiter.py b/tests/p2p/test_rate_limiter.py index 83e7b6e56..3eace5471 100644 --- a/tests/p2p/test_rate_limiter.py +++ b/tests/p2p/test_rate_limiter.py @@ -1,13 +1,14 @@ from hathor.p2p.rate_limiter import RateLimiter +from hathor.util import not_none from tests import unittest class RateLimiterTestCase(unittest.TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.rate_limiter = RateLimiter(reactor=self.clock) - def test_limiter(self): + def test_limiter(self) -> None: key = 'test' self.rate_limiter.set_limit(key, 2, 2) @@ -31,7 +32,7 @@ def test_limiter(self): self.assertTrue(self.rate_limiter.add_hit(key)) # Get limit - self.assertEqual(self.rate_limiter.get_limit(key).max_hits, 2) + self.assertEqual(not_none(self.rate_limiter.get_limit(key)).max_hits, 2) # Unset limit self.rate_limiter.unset_limit(key) diff --git a/tests/p2p/test_split_brain.py b/tests/p2p/test_split_brain.py index 68ee24609..3a2352853 100644 --- a/tests/p2p/test_split_brain.py +++ b/tests/p2p/test_split_brain.py @@ -3,8 +3,10 @@ from hathor.daa import TestMode from hathor.graphviz import GraphvizVisualizer +from hathor.manager import HathorManager from hathor.simulator import FakeConnection from hathor.simulator.utils import add_new_block +from hathor.util import not_none from hathor.wallet import HDWallet from tests import unittest from tests.utils import add_blocks_unlock_reward, add_new_double_spending, add_new_transactions @@ -13,7 +15,7 @@ class BaseHathorSyncMethodsTestCase(unittest.TestCase): __test__ = False - def setUp(self): + def setUp(self) -> None: super().setUp() first_timestamp = self._settings.GENESIS_BLOCK_TIMESTAMP @@ -21,13 +23,13 @@ def setUp(self): self.network = 'testnet' - def create_peer(self, network, unlock_wallet=True): + def create_peer(self, network: str, unlock_wallet: bool = True) -> HathorManager: # type: ignore[override] wallet = HDWallet(gap_limit=2) wallet._manually_initialize() - manager = super().create_peer(network, wallet=wallet) + manager: HathorManager = super().create_peer(network, wallet=wallet) manager.daa.TEST_MODE = TestMode.TEST_ALL_WEIGHT - manager.avg_time_between_blocks = 64 + # manager.avg_time_between_blocks = 64 # FIXME: This property is not defined. Fix this test. # Don't use it anywhere else. It is unsafe to generate mnemonic words like this. # It should be used only for testing purposes. @@ -37,14 +39,14 @@ def create_peer(self, network, unlock_wallet=True): return manager @pytest.mark.slow - def test_split_brain_plain(self): + def test_split_brain_plain(self) -> None: debug_pdf = False manager1 = self.create_peer(self.network, unlock_wallet=True) - manager1.avg_time_between_blocks = 3 + # manager1.avg_time_between_blocks = 3 # FIXME: This property is not defined. Fix this test. manager2 = self.create_peer(self.network, unlock_wallet=True) - manager2.avg_time_between_blocks = 3 + # manager2.avg_time_between_blocks = 3 # FIXME: This property is not defined. Fix this test. for _ in range(10): add_new_block(manager1, advance_clock=1) @@ -100,12 +102,12 @@ def test_split_brain_plain(self): self.assertConsensusValid(manager2) @pytest.mark.slow - def test_split_brain_only_blocks_different_height(self): + def test_split_brain_only_blocks_different_height(self) -> None: manager1 = self.create_peer(self.network, unlock_wallet=True) - manager1.avg_time_between_blocks = 3 + # manager1.avg_time_between_blocks = 3 # FIXME: This property is not defined. Fix this test. manager2 = self.create_peer(self.network, unlock_wallet=True) - manager2.avg_time_between_blocks = 3 + # manager2.avg_time_between_blocks = 3 # FIXME: This property is not defined. Fix this test. for _ in range(10): add_new_block(manager1, advance_clock=1) @@ -117,7 +119,7 @@ def test_split_brain_only_blocks_different_height(self): # Add one more block to manager1, so it's the winner chain add_new_block(manager1, advance_clock=1) - block_tip1 = manager1.tx_storage.indexes.height.get_tip() + block_tip1 = not_none(manager1.tx_storage.indexes).height.get_tip() self.assertConsensusValid(manager1) self.assertConsensusValid(manager2) @@ -140,17 +142,17 @@ def test_split_brain_only_blocks_different_height(self): self.assertConsensusValid(manager2) self.assertConsensusEqual(manager1, manager2) - self.assertEqual(block_tip1, manager1.tx_storage.indexes.height.get_tip()) - self.assertEqual(block_tip1, manager2.tx_storage.indexes.height.get_tip()) + self.assertEqual(block_tip1, not_none(manager1.tx_storage.indexes).height.get_tip()) + self.assertEqual(block_tip1, not_none(manager2.tx_storage.indexes).height.get_tip()) # XXX We must decide what to do when different chains have the same score # For now we are voiding everyone until the first common block - def test_split_brain_only_blocks_same_height(self): + def test_split_brain_only_blocks_same_height(self) -> None: manager1 = self.create_peer(self.network, unlock_wallet=True) - manager1.avg_time_between_blocks = 3 + # manager1.avg_time_between_blocks = 3 # FIXME: This property is not defined. Fix this test. manager2 = self.create_peer(self.network, unlock_wallet=True) - manager2.avg_time_between_blocks = 3 + # manager2.avg_time_between_blocks = 3 # FIXME: This property is not defined. Fix this test. for _ in range(10): add_new_block(manager1, advance_clock=1) @@ -268,12 +270,12 @@ def test_split_brain_only_blocks_same_height(self): self.assertEqual(len(manager2.tx_storage.get_best_block_tips()), 1) self.assertCountEqual(manager2.tx_storage.get_best_block_tips(), {new_block.hash}) - def test_split_brain_only_blocks_bigger_score(self): + def test_split_brain_only_blocks_bigger_score(self) -> None: manager1 = self.create_peer(self.network, unlock_wallet=True) - manager1.avg_time_between_blocks = 3 + # manager1.avg_time_between_blocks = 3 # FIXME: This property is not defined. Fix this test. manager2 = self.create_peer(self.network, unlock_wallet=True) - manager2.avg_time_between_blocks = 3 + # manager2.avg_time_between_blocks = 3 # FIXME: This property is not defined. Fix this test. # Start with 1 because of the genesis block manager2_blocks = 1 @@ -328,13 +330,13 @@ def test_split_brain_only_blocks_bigger_score(self): # Assert that the consensus had the manager2 chain self.assertEqual(winners2_blocks, manager2_blocks) - def test_split_brain_no_double_spending(self): + def test_split_brain_no_double_spending(self) -> None: manager1 = self.create_peer(self.network, unlock_wallet=True) - manager1.avg_time_between_blocks = 3 + # manager1.avg_time_between_blocks = 3 # FIXME: This property is not defined. Fix this test. manager1.connections.disable_rate_limiter() manager2 = self.create_peer(self.network, unlock_wallet=True) - manager2.avg_time_between_blocks = 3 + # manager2.avg_time_between_blocks = 3 # FIXME: This property is not defined. Fix this test. manager2.connections.disable_rate_limiter() winner_blocks = 1 diff --git a/tests/p2p/test_split_brain2.py b/tests/p2p/test_split_brain2.py index fc4601898..e1622fb8a 100644 --- a/tests/p2p/test_split_brain2.py +++ b/tests/p2p/test_split_brain2.py @@ -10,7 +10,7 @@ class BaseHathorSyncMethodsTestCase(SimulatorTestCase): __test__ = False @pytest.mark.flaky(max_runs=3, min_passes=1) - def test_split_brain(self): + def test_split_brain(self) -> None: debug_pdf = False manager1 = self.create_peer() diff --git a/tests/p2p/test_sync.py b/tests/p2p/test_sync.py index e387bba89..0b23a23e3 100644 --- a/tests/p2p/test_sync.py +++ b/tests/p2p/test_sync.py @@ -5,7 +5,9 @@ from hathor.p2p.protocol import PeerIdState from hathor.p2p.sync_version import SyncVersion from hathor.simulator import FakeConnection +from hathor.transaction import Block, Transaction from hathor.transaction.storage.exceptions import TransactionIsNotABlock +from hathor.util import not_none from tests import unittest from tests.utils import add_blocks_unlock_reward @@ -13,7 +15,7 @@ class BaseHathorSyncMethodsTestCase(unittest.TestCase): __test__ = False - def setUp(self): + def setUp(self) -> None: super().setUp() # import sys @@ -27,49 +29,48 @@ def setUp(self): self.genesis = self.manager1.tx_storage.get_all_genesis() self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] - def _add_new_tx(self, address, value): - from hathor.transaction import Transaction + def _add_new_tx(self, address: str, value: int) -> Transaction: from hathor.wallet.base_wallet import WalletOutputInfo outputs = [] outputs.append( WalletOutputInfo(address=decode_address(address), value=int(value), timelock=None)) - tx = self.manager1.wallet.prepare_transaction_compute_inputs(Transaction, outputs, self.manager1.tx_storage) + tx: Transaction = self.manager1.wallet.prepare_transaction_compute_inputs( + Transaction, outputs, self.manager1.tx_storage + ) tx.timestamp = int(self.clock.seconds()) tx.storage = self.manager1.tx_storage tx.weight = 10 tx.parents = self.manager1.get_new_tx_parents() self.manager1.cpu_mining_service.resolve(tx) - self.manager1.verification_service.verify(tx) self.manager1.propagate_tx(tx) self.clock.advance(10) return tx - def _add_new_transactions(self, num_txs): + def _add_new_transactions(self, num_txs: int) -> list[Transaction]: txs = [] for _ in range(num_txs): - address = self.get_address(0) + address = not_none(self.get_address(0)) value = self.rng.choice([5, 10, 50, 100, 120]) tx = self._add_new_tx(address, value) txs.append(tx) return txs - def _add_new_block(self, propagate=True): - block = self.manager1.generate_mining_block() + def _add_new_block(self, propagate: bool = True) -> Block: + block: Block = self.manager1.generate_mining_block() self.assertTrue(self.manager1.cpu_mining_service.resolve(block)) - self.manager1.verification_service.verify(block) self.manager1.on_new_tx(block, propagate_to_peers=propagate) self.clock.advance(10) return block - def _add_new_blocks(self, num_blocks, propagate=True): + def _add_new_blocks(self, num_blocks: int, propagate: bool = True) -> list[Block]: blocks = [] for _ in range(num_blocks): blocks.append(self._add_new_block(propagate=propagate)) return blocks - def test_get_blocks_before(self): + def test_get_blocks_before(self) -> None: genesis_block = self.genesis_blocks[0] result = self.manager1.tx_storage.get_blocks_before(genesis_block.hash) self.assertEqual(0, len(result)) @@ -88,7 +89,7 @@ def test_get_blocks_before(self): expected_result = expected_result[::-1] self.assertEqual(result, expected_result) - def test_block_sync_only_genesis(self): + def test_block_sync_only_genesis(self) -> None: manager2 = self.create_peer(self.network) self.assertEqual(manager2.state, manager2.NodeState.READY) @@ -102,7 +103,7 @@ def test_block_sync_only_genesis(self): self.assertEqual(node_sync.synced_timestamp, node_sync.peer_timestamp) self.assertTipsEqual(self.manager1, manager2) - def test_block_sync_new_blocks(self): + def test_block_sync_new_blocks(self) -> None: self._add_new_blocks(15) manager2 = self.create_peer(self.network) @@ -123,7 +124,7 @@ def test_block_sync_new_blocks(self): self.assertConsensusValid(self.manager1) self.assertConsensusValid(manager2) - def test_block_sync_many_new_blocks(self): + def test_block_sync_many_new_blocks(self) -> None: self._add_new_blocks(150) manager2 = self.create_peer(self.network) @@ -143,7 +144,7 @@ def test_block_sync_many_new_blocks(self): self.assertConsensusValid(self.manager1) self.assertConsensusValid(manager2) - def test_block_sync_new_blocks_and_txs(self): + def test_block_sync_new_blocks_and_txs(self) -> None: self._add_new_blocks(25) self._add_new_transactions(3) self._add_new_blocks(4) @@ -172,7 +173,7 @@ def test_block_sync_new_blocks_and_txs(self): self.assertConsensusValid(self.manager1) self.assertConsensusValid(manager2) - def test_tx_propagation_nat_peers(self): + def test_tx_propagation_nat_peers(self) -> None: """ manager1 <- manager2 <- manager3 """ self._add_new_blocks(25) @@ -229,7 +230,7 @@ def test_tx_propagation_nat_peers(self): self.assertConsensusValid(self.manager2) self.assertConsensusValid(self.manager3) - def test_check_sync_state(self): + def test_check_sync_state(self) -> None: """Tests if the LoopingCall to check the sync state works""" # Initially it should do nothing, since there is no recent activity self.manager1.check_sync_state() @@ -249,7 +250,7 @@ def test_check_sync_state(self): class SyncV1HathorSyncMethodsTestCase(unittest.SyncV1Params, BaseHathorSyncMethodsTestCase): __test__ = True - def test_downloader(self): + def test_downloader(self) -> None: from hathor.p2p.sync_v1.agent import NodeSyncTimestamp blocks = self._add_new_blocks(3) @@ -326,7 +327,7 @@ def test_downloader(self): downloader.check_downloading_queue() self.assertEqual(len(downloader.downloading_deque), 0) - def _downloader_bug_setup(self): + def _downloader_bug_setup(self) -> None: """ This is an auxiliary method to setup a bug scenario.""" from hathor.p2p.sync_version import SyncVersion @@ -390,7 +391,7 @@ def _downloader_bug_setup(self): # by this point everything should be set to so we can trigger the bug, any issues that happen before this # comment are an issue in setting up the scenario, not related to the problem itself - def test_downloader_retry_reorder(self): + def test_downloader_retry_reorder(self) -> None: """ Reproduce the bug that causes a reorder in the downloader queue. The tracking issue for this bug is #465 @@ -454,7 +455,7 @@ def test_downloader_retry_reorder(self): # if the fix is applied, we would see tx_A in storage by this point self.assertTrue(self.manager_bug.tx_storage.transaction_exists(self.tx_A.hash)) - def test_downloader_disconnect(self): + def test_downloader_disconnect(self) -> None: """ This is related to test_downloader_retry_reorder, but it basically tests the change in behavior instead. When a peer disconnects it should be immediately removed from the tx-detail's connections list. @@ -474,7 +475,7 @@ def test_downloader_disconnect(self): class SyncV2HathorSyncMethodsTestCase(unittest.SyncV2Params, BaseHathorSyncMethodsTestCase): __test__ = True - def test_sync_metadata(self): + def test_sync_metadata(self) -> None: # test if the synced peer will build all tx metadata correctly height = 0 @@ -519,7 +520,7 @@ def test_sync_metadata(self): self.assertCountEqual(meta1.conflict_with or [], meta2.conflict_with or []) self.assertCountEqual(meta1.twins or [], meta2.twins or []) - def test_tx_propagation_nat_peers(self): + def test_tx_propagation_nat_peers(self) -> None: super().test_tx_propagation_nat_peers() node_sync1 = self.conn1.proto1.state.sync_agent @@ -534,7 +535,7 @@ def test_tx_propagation_nat_peers(self): self.assertEqual(node_sync2.peer_best_block.height, self.manager2.tx_storage.get_height_best_block()) self.assertConsensusEqual(self.manager2, self.manager3) - def test_block_sync_new_blocks_and_txs(self): + def test_block_sync_new_blocks_and_txs(self) -> None: self._add_new_blocks(25) self._add_new_transactions(3) self._add_new_blocks(4) @@ -563,7 +564,7 @@ def test_block_sync_new_blocks_and_txs(self): self.assertConsensusValid(self.manager1) self.assertConsensusValid(manager2) - def test_block_sync_many_new_blocks(self): + def test_block_sync_many_new_blocks(self) -> None: self._add_new_blocks(150) manager2 = self.create_peer(self.network) @@ -584,7 +585,7 @@ def test_block_sync_many_new_blocks(self): self.assertConsensusValid(self.manager1) self.assertConsensusValid(manager2) - def test_block_sync_new_blocks(self): + def test_block_sync_new_blocks(self) -> None: self._add_new_blocks(15) manager2 = self.create_peer(self.network) @@ -605,7 +606,7 @@ def test_block_sync_new_blocks(self): self.assertConsensusValid(self.manager1) self.assertConsensusValid(manager2) - def test_full_sync(self): + def test_full_sync(self) -> None: # 10 blocks blocks = self._add_new_blocks(10) # N blocks to unlock the reward @@ -677,7 +678,7 @@ def test_full_sync(self): self.assertEqual(len(manager2.tx_storage.indexes.mempool_tips.get()), 1) self.assertEqual(len(self.manager1.tx_storage.indexes.mempool_tips.get()), 1) - def test_block_sync_checkpoints(self): + def test_block_sync_checkpoints(self) -> None: TOTAL_BLOCKS = 30 LAST_CHECKPOINT = 15 FIRST_CHECKPOINT = LAST_CHECKPOINT // 2 @@ -718,7 +719,7 @@ def test_block_sync_checkpoints(self): self.assertConsensusValid(self.manager1) self.assertConsensusValid(manager2) - def test_block_sync_only_genesis(self): + def test_block_sync_only_genesis(self) -> None: manager2 = self.create_peer(self.network) self.assertEqual(manager2.state, manager2.NodeState.READY) diff --git a/tests/p2p/test_sync_bridge.py b/tests/p2p/test_sync_bridge.py index cdf000627..9c9024be0 100644 --- a/tests/p2p/test_sync_bridge.py +++ b/tests/p2p/test_sync_bridge.py @@ -5,7 +5,7 @@ class MixedSyncRandomSimulatorTestCase(SimulatorTestCase): __test__ = True - def test_the_three_transacting_miners(self): + def test_the_three_transacting_miners(self) -> None: manager1 = self.create_peer(enable_sync_v1=True, enable_sync_v2=False) manager2 = self.create_peer(enable_sync_v1=True, enable_sync_v2=True) manager3 = self.create_peer(enable_sync_v1=False, enable_sync_v2=True) @@ -44,7 +44,7 @@ def test_the_three_transacting_miners(self): # sync-v2 consensus test is more lenient (if sync-v1 assert passes sync-v2 assert will pass too) self.assertConsensusEqualSyncV2(manager_a, manager_b, strict_sync_v2_indexes=False) - def test_bridge_with_late_v2(self): + def test_bridge_with_late_v2(self) -> None: manager1 = self.create_peer(enable_sync_v1=True, enable_sync_v2=False) manager2 = self.create_peer(enable_sync_v1=True, enable_sync_v2=True) manager3 = self.create_peer(enable_sync_v1=False, enable_sync_v2=True) diff --git a/tests/p2p/test_sync_enabled.py b/tests/p2p/test_sync_enabled.py index a352c08a0..f681f90a0 100644 --- a/tests/p2p/test_sync_enabled.py +++ b/tests/p2p/test_sync_enabled.py @@ -5,7 +5,7 @@ class BaseRandomSimulatorTestCase(SimulatorTestCase): - def test_new_node_disabled(self): + def test_new_node_disabled(self) -> None: manager1 = self.create_peer() manager1.allow_mining_without_peers() @@ -39,7 +39,7 @@ def test_new_node_disabled(self): v2 = list(manager2.tx_storage.get_all_transactions()) self.assertEqual(3, len(v2)) - def test_sync_rotate(self): + def test_sync_rotate(self) -> None: manager1 = self.create_peer() manager1.connections.MAX_ENABLED_SYNC = 3 other_managers = [self.create_peer() for _ in range(15)] diff --git a/tests/p2p/test_sync_mempool.py b/tests/p2p/test_sync_mempool.py index f2a0219b3..27c518552 100644 --- a/tests/p2p/test_sync_mempool.py +++ b/tests/p2p/test_sync_mempool.py @@ -1,6 +1,8 @@ from hathor.crypto.util import decode_address from hathor.graphviz import GraphvizVisualizer from hathor.simulator import FakeConnection +from hathor.transaction import Block, Transaction +from hathor.util import not_none from tests import unittest from tests.utils import add_blocks_unlock_reward @@ -8,7 +10,7 @@ class BaseHathorSyncMempoolTestCase(unittest.TestCase): __test__ = False - def setUp(self): + def setUp(self) -> None: super().setUp() self.network = 'testnet' @@ -18,7 +20,7 @@ def setUp(self): self.genesis = self.manager1.tx_storage.get_all_genesis() self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] - def _add_new_tx(self, address, value): + def _add_new_tx(self, address: str, value: int) -> Transaction: from hathor.transaction import Transaction from hathor.wallet.base_wallet import WalletOutputInfo @@ -26,41 +28,41 @@ def _add_new_tx(self, address, value): outputs.append( WalletOutputInfo(address=decode_address(address), value=int(value), timelock=None)) - tx = self.manager1.wallet.prepare_transaction_compute_inputs(Transaction, outputs, self.manager1.tx_storage) + tx: Transaction = self.manager1.wallet.prepare_transaction_compute_inputs( + Transaction, outputs, self.manager1.tx_storage + ) tx.timestamp = int(self.clock.seconds()) tx.storage = self.manager1.tx_storage tx.weight = 10 tx.parents = self.manager1.get_new_tx_parents() self.manager1.cpu_mining_service.resolve(tx) - self.manager1.verification_service.verify(tx) self.manager1.propagate_tx(tx) self.clock.advance(10) return tx - def _add_new_transactions(self, num_txs): + def _add_new_transactions(self, num_txs: int) -> list[Transaction]: txs = [] for _ in range(num_txs): - address = self.get_address(0) + address = not_none(self.get_address(0)) value = self.rng.choice([5, 10, 50, 100, 120]) tx = self._add_new_tx(address, value) txs.append(tx) return txs - def _add_new_block(self, propagate=True): - block = self.manager1.generate_mining_block() + def _add_new_block(self, propagate: bool = True) -> Block: + block: Block = self.manager1.generate_mining_block() self.assertTrue(self.manager1.cpu_mining_service.resolve(block)) - self.manager1.verification_service.verify(block) self.manager1.on_new_tx(block, propagate_to_peers=propagate) self.clock.advance(10) return block - def _add_new_blocks(self, num_blocks, propagate=True): + def _add_new_blocks(self, num_blocks: int, propagate: bool = True) -> list[Block]: blocks = [] for _ in range(num_blocks): blocks.append(self._add_new_block(propagate=propagate)) return blocks - def test_mempool_basic(self): + def test_mempool_basic(self) -> None: # 10 blocks self._add_new_blocks(2) # N blocks to unlock the reward @@ -100,7 +102,7 @@ class SyncV1HathorSyncMempoolTestCase(unittest.SyncV1Params, BaseHathorSyncMempo class SyncV2HathorSyncMempoolTestCase(unittest.SyncV2Params, BaseHathorSyncMempoolTestCase): __test__ = True - def test_mempool_basic(self): + def test_mempool_basic(self) -> None: super().test_mempool_basic() # 3 genesis diff --git a/tests/p2p/test_sync_rate_limiter.py b/tests/p2p/test_sync_rate_limiter.py index 9433c7ade..04d091c27 100644 --- a/tests/p2p/test_sync_rate_limiter.py +++ b/tests/p2p/test_sync_rate_limiter.py @@ -1,7 +1,9 @@ -from unittest.mock import MagicMock, Mock +from unittest.mock import Mock, patch from twisted.python.failure import Failure +from hathor.p2p.states import ReadyState +from hathor.p2p.sync_v1.agent import NodeSyncTimestamp from hathor.simulator import FakeConnection from hathor.simulator.trigger import StopAfterNMinedBlocks from tests import unittest @@ -11,7 +13,7 @@ class SyncV1RandomSimulatorTestCase(unittest.SyncV1Params, SimulatorTestCase): __test__ = True - def test_sync_rate_limiter(self): + def test_sync_rate_limiter(self) -> None: manager1 = self.create_peer() miner1 = self.simulator.create_miner(manager1, hashpower=10e6) @@ -32,21 +34,23 @@ def test_sync_rate_limiter(self): connected_peers2 = list(manager2.connections.connected_peers.values()) self.assertEqual(1, len(connected_peers2)) protocol1 = connected_peers2[0] + assert isinstance(protocol1.state, ReadyState) sync2 = protocol1.state.sync_agent - sync2._send_tips = MagicMock() + assert isinstance(sync2, NodeSyncTimestamp) - for i in range(100): - sync2.send_tips() - self.assertEqual(sync2._send_tips.call_count, min(i + 1, 8)) - self.assertEqual(sync2._send_tips.call_count, 8) + with patch.object(sync2, '_send_tips') as mock: + for i in range(100): + sync2.send_tips() + self.assertEqual(mock.call_count, min(i + 1, 8)) + self.assertEqual(mock.call_count, 8) - sync2.send_tips() - self.assertEqual(sync2._send_tips.call_count, 8) + sync2.send_tips() + self.assertEqual(mock.call_count, 8) - self.simulator._clock.advance(2000) - self.assertTrue(sync2._send_tips.call_count, 16) + self.simulator._clock.advance(2000) + self.assertTrue(mock.call_count, 16) - def test_sync_rate_limiter_disconnect(self): + def test_sync_rate_limiter_disconnect(self) -> None: # Test send_tips delayed calls cancelation with disconnection manager1 = self.create_peer() manager2 = self.create_peer() @@ -64,36 +68,39 @@ def test_sync_rate_limiter_disconnect(self): self.assertEqual(1, len(connected_peers2)) protocol1 = connected_peers2[0] + assert isinstance(protocol1.state, ReadyState) sync1 = protocol1.state.sync_agent - sync1._send_tips = Mock(wraps=sync1._send_tips) + assert isinstance(sync1, NodeSyncTimestamp) + mock = Mock(wraps=sync1._send_tips) - sync1.send_tips() - self.assertEqual(sync1._send_tips.call_count, 1) - self.assertEqual(len(sync1._send_tips_call_later), 0) + with patch.object(sync1, '_send_tips', new=mock): + sync1.send_tips() + self.assertEqual(mock.call_count, 1) + self.assertEqual(len(sync1._send_tips_call_later), 0) - sync1.send_tips() - self.assertEqual(sync1._send_tips.call_count, 1) - self.assertEqual(len(sync1._send_tips_call_later), 1) + sync1.send_tips() + self.assertEqual(mock.call_count, 1) + self.assertEqual(len(sync1._send_tips_call_later), 1) - sync1.send_tips() - self.assertEqual(sync1._send_tips.call_count, 1) - self.assertEqual(len(sync1._send_tips_call_later), 2) + sync1.send_tips() + self.assertEqual(mock.call_count, 1) + self.assertEqual(len(sync1._send_tips_call_later), 2) - # Close the connection. - conn12.disconnect(Failure(Exception('testing'))) - self.simulator.remove_connection(conn12) + # Close the connection. + conn12.disconnect(Failure(Exception('testing'))) + self.simulator.remove_connection(conn12) - self.simulator.run(30) + self.simulator.run(30) - # Send tips should not be called any further since the connection has already been closed. - self.assertEqual(sync1._send_tips.call_count, 1) - # Residual delayed calls - self.assertEqual(len(sync1._send_tips_call_later), 2) - # The residual delayed calls should have been canceled - for call_later in sync1._send_tips_call_later: - self.assertFalse(call_later.active()) + # Send tips should not be called any further since the connection has already been closed. + self.assertEqual(mock.call_count, 1) + # Residual delayed calls + self.assertEqual(len(sync1._send_tips_call_later), 2) + # The residual delayed calls should have been canceled + for call_later in sync1._send_tips_call_later: + self.assertFalse(call_later.active()) - def test_sync_rate_limiter_delayed_calls_draining(self): + def test_sync_rate_limiter_delayed_calls_draining(self) -> None: # Test the draining of delayed calls from _send_tips_call_later list manager1 = self.create_peer() manager2 = self.create_peer() @@ -111,7 +118,9 @@ def test_sync_rate_limiter_delayed_calls_draining(self): self.assertEqual(1, len(connected_peers2)) protocol1 = connected_peers2[0] + assert isinstance(protocol1.state, ReadyState) sync1 = protocol1.state.sync_agent + assert isinstance(sync1, NodeSyncTimestamp) sync1.send_tips() self.assertEqual(len(sync1._send_tips_call_later), 0) @@ -131,7 +140,7 @@ def test_sync_rate_limiter_delayed_calls_draining(self): # should have been executed self.assertEqual(len(sync1._send_tips_call_later), 0) - def test_sync_rate_limiter_delayed_calls_stop(self): + def test_sync_rate_limiter_delayed_calls_stop(self) -> None: # Test the draining of delayed calls from _send_tips_call_later list manager1 = self.create_peer() manager2 = self.create_peer() @@ -149,7 +158,9 @@ def test_sync_rate_limiter_delayed_calls_stop(self): self.assertEqual(1, len(connected_peers2)) protocol1 = connected_peers2[0] + assert isinstance(protocol1.state, ReadyState) sync1 = protocol1.state.sync_agent + assert isinstance(sync1, NodeSyncTimestamp) sync1.send_tips() self.assertEqual(len(sync1._send_tips_call_later), 0) diff --git a/tests/p2p/test_sync_v2.py b/tests/p2p/test_sync_v2.py index 68be619de..06e35ebdf 100644 --- a/tests/p2p/test_sync_v2.py +++ b/tests/p2p/test_sync_v2.py @@ -1,13 +1,15 @@ import base64 import re +from unittest.mock import patch import pytest -from twisted.internet.defer import inlineCallbacks, succeed +from twisted.internet.defer import Deferred, succeed from twisted.python.failure import Failure from hathor.p2p.messages import ProtocolMessages from hathor.p2p.peer_id import PeerId -from hathor.p2p.sync_v2.agent import _HeightInfo +from hathor.p2p.states import ReadyState +from hathor.p2p.sync_v2.agent import NodeBlockSync, _HeightInfo from hathor.simulator import FakeConnection from hathor.simulator.trigger import ( StopAfterNMinedBlocks, @@ -16,7 +18,11 @@ StopWhenTrue, Trigger, ) +from hathor.transaction.storage import TransactionRocksDBStorage +from hathor.transaction.storage.transaction_storage import TransactionStorage from hathor.transaction.storage.traversal import DFSWalk +from hathor.types import VertexId +from hathor.util import not_none from tests.simulation.base import SimulatorTestCase from tests.utils import HAS_ROCKSDB @@ -26,7 +32,7 @@ class BaseRandomSimulatorTestCase(SimulatorTestCase): seed_config = 2 - def _get_partial_blocks(self, tx_storage): + def _get_partial_blocks(self, tx_storage: TransactionStorage) -> set[VertexId]: with tx_storage.allow_partially_validated_context(): partial_blocks = set() for tx in tx_storage.get_all_transactions(): @@ -89,6 +95,7 @@ def _run_restart_test(self, *, full_verification: bool, use_tx_storage_cache: bo conn12.disconnect(Failure(Exception('testing'))) self.simulator.remove_connection(conn12) manager2.stop() + assert isinstance(manager2.tx_storage, TransactionRocksDBStorage) manager2.tx_storage._rocksdb_storage.close() del manager2 @@ -146,19 +153,19 @@ def _run_restart_test(self, *, full_verification: bool, use_tx_storage_cache: bo self.assertConsensusEqualSyncV2(manager1, manager3) @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') - def test_restart_fullnode_full_verification(self): + def test_restart_fullnode_full_verification(self) -> None: self._run_restart_test(full_verification=True, use_tx_storage_cache=False) @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') - def test_restart_fullnode_quick(self): + def test_restart_fullnode_quick(self) -> None: self._run_restart_test(full_verification=False, use_tx_storage_cache=False) @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') - def test_restart_fullnode_quick_with_cache(self): + def test_restart_fullnode_quick_with_cache(self) -> None: self._run_restart_test(full_verification=False, use_tx_storage_cache=True) @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') - def test_restart_fullnode_full_verification_with_cache(self): + def test_restart_fullnode_full_verification_with_cache(self) -> None: self._run_restart_test(full_verification=True, use_tx_storage_cache=True) def test_exceeds_streaming_and_mempool_limits(self) -> None: @@ -250,7 +257,93 @@ def test_exceeds_streaming_and_mempool_limits(self) -> None: self.assertEqual(manager1.tx_storage.get_vertices_count(), manager2.tx_storage.get_vertices_count()) self.assertConsensusEqualSyncV2(manager1, manager2) - def _prepare_sync_v2_find_best_common_block_reorg(self): + def test_receiving_tips_limit(self) -> None: + from hathor.manager import HathorManager + from hathor.transaction import Transaction + from hathor.wallet.base_wallet import WalletOutputInfo + from tests.utils import BURN_ADDRESS + + manager1 = self.create_peer(enable_sync_v1=False, enable_sync_v2=True) + manager1.allow_mining_without_peers() + + # Find 100 blocks. + miner1 = self.simulator.create_miner(manager1, hashpower=10e6) + miner1.start() + trigger: Trigger = StopAfterNMinedBlocks(miner1, quantity=100) + self.assertTrue(self.simulator.run(3 * 3600, trigger=trigger)) + miner1.stop() + + # Custom tx generator that generates tips + parents = manager1.get_new_tx_parents(manager1.tx_storage.latest_timestamp) + + def custom_gen_new_tx(manager: HathorManager, _address: str, value: int) -> Transaction: + outputs = [] + # XXX: burn address guarantees that this output will not be used as input for any following transactions + # XXX: reduce value to make sure we can generate more transactions, otherwise it will spend a linear random + # percent from 1 to 100 of the available balance, this way it spends from 0.1% to 10% + outputs.append(WalletOutputInfo(address=BURN_ADDRESS, value=max(1, int(value / 10)), timelock=None)) + + assert manager.wallet is not None + tx = manager.wallet.prepare_transaction_compute_inputs(Transaction, outputs, manager.tx_storage) + tx.storage = manager.tx_storage + + max_ts_spent_tx = max(tx.get_spent_tx(txin).timestamp for txin in tx.inputs) + tx.timestamp = max(max_ts_spent_tx + 1, int(manager.reactor.seconds())) + + tx.weight = 1 + # XXX: fixed parents is the final requirement to make all the generated new tips + tx.parents = parents + manager.cpu_mining_service.resolve(tx) + return tx + + # Generate 100 tx-tips in mempool. + gen_tx1 = self.simulator.create_tx_generator(manager1, rate=3., hashpower=10e9, ignore_no_funds=True) + gen_tx1.gen_new_tx = custom_gen_new_tx + gen_tx1.start() + trigger = StopAfterNTransactions(gen_tx1, quantity=100) + self.simulator.run(3600, trigger=trigger) + self.assertGreater(manager1.tx_storage.get_vertices_count(), 100) + gen_tx1.stop() + assert manager1.tx_storage.indexes is not None + assert manager1.tx_storage.indexes.mempool_tips is not None + mempool_tips_count = len(manager1.tx_storage.indexes.mempool_tips.get()) + # we should expect at the very least 30 tips + self.assertGreater(mempool_tips_count, 30) + + # Create a new peer and run sync for a while (but stop before getting synced). + peer_id = PeerId() + builder2 = self.simulator.get_default_builder() \ + .set_peer_id(peer_id) \ + .disable_sync_v1() \ + .enable_sync_v2() \ + + manager2 = self.simulator.create_peer(builder2) + conn12 = FakeConnection(manager1, manager2, latency=0.05) + self.simulator.add_connection(conn12) + + # Let the connection start to sync. + self.simulator.run(1) + + # Run until blocks are synced + sync2 = conn12.proto2.state.sync_agent + trigger = StopWhenTrue(sync2.is_synced) + self.assertTrue(self.simulator.run(300, trigger=trigger)) + + # Change manager2's max_running_time to check if it correctly closes the connection + # 10 < 30, so this should be strict enough that it will fail + sync2.max_receiving_tips = 10 + self.assertIsNone(sync2._blk_streaming_server) + self.assertIsNone(sync2._tx_streaming_server) + + # This should fail because the get tips should be rejected because it exceeds the limit + self.simulator.run(300) + # we should expect only the tips to be missing from the second node + self.assertEqual(manager1.tx_storage.get_vertices_count(), + manager2.tx_storage.get_vertices_count() + mempool_tips_count) + # and also the second node should have aborted the connection + self.assertTrue(conn12.proto2.aborting) + + def _prepare_sync_v2_find_best_common_block_reorg(self) -> FakeConnection: manager1 = self.create_peer(enable_sync_v1=False, enable_sync_v2=True) manager1.allow_mining_without_peers() miner1 = self.simulator.create_miner(manager1, hashpower=10e6) @@ -265,50 +358,53 @@ def _prepare_sync_v2_find_best_common_block_reorg(self): self.assertTrue(self.simulator.run(3600)) return conn12 - @inlineCallbacks - def test_sync_v2_find_best_common_block_reorg_1(self): + async def test_sync_v2_find_best_common_block_reorg_1(self) -> None: conn12 = self._prepare_sync_v2_find_best_common_block_reorg() + assert isinstance(conn12._proto1.state, ReadyState) sync_agent = conn12._proto1.state.sync_agent + assert isinstance(sync_agent, NodeBlockSync) rng = conn12.manager2.rng my_best_block = sync_agent.get_my_best_block() - peer_best_block = sync_agent.peer_best_block + peer_best_block = not_none(sync_agent.peer_best_block) fake_peer_best_block = _HeightInfo(my_best_block.height + 3, rng.randbytes(32)) reorg_height = peer_best_block.height - 50 - def fake_get_peer_block_hashes(heights): + def fake_get_peer_block_hashes(heights: list[int]) -> Deferred[list[_HeightInfo]]: # return empty as soon as the search lowest height is not the genesis if heights[0] != 0: - return [] + return succeed([]) # simulate a reorg response = [] for h in heights: if h < reorg_height: - vertex_id = conn12.manager2.tx_storage.indexes.height.get(h) + index_manager = not_none(conn12.manager2.tx_storage.indexes) + vertex_id = not_none(index_manager.height.get(h)) else: vertex_id = rng.randbytes(32) response.append(_HeightInfo(height=h, id=vertex_id)) return succeed(response) - sync_agent.get_peer_block_hashes = fake_get_peer_block_hashes - common_block_info = yield sync_agent.find_best_common_block(my_best_block, fake_peer_best_block) - self.assertIsNone(common_block_info) + with patch.object(sync_agent, 'get_peer_block_hashes', new=fake_get_peer_block_hashes): + common_block_info = await sync_agent.find_best_common_block(my_best_block, fake_peer_best_block) + self.assertIsNone(common_block_info) - @inlineCallbacks - def test_sync_v2_find_best_common_block_reorg_2(self): + async def test_sync_v2_find_best_common_block_reorg_2(self) -> None: conn12 = self._prepare_sync_v2_find_best_common_block_reorg() + assert isinstance(conn12._proto1.state, ReadyState) sync_agent = conn12._proto1.state.sync_agent + assert isinstance(sync_agent, NodeBlockSync) rng = conn12.manager2.rng my_best_block = sync_agent.get_my_best_block() - peer_best_block = sync_agent.peer_best_block + peer_best_block = not_none(sync_agent.peer_best_block) fake_peer_best_block = _HeightInfo(my_best_block.height + 3, rng.randbytes(32)) reorg_height = peer_best_block.height - 50 - def fake_get_peer_block_hashes(heights): + def fake_get_peer_block_hashes(heights: list[int]) -> Deferred[list[_HeightInfo]]: if heights[0] != 0: return succeed([ _HeightInfo(height=h, id=rng.randbytes(32)) @@ -319,15 +415,16 @@ def fake_get_peer_block_hashes(heights): response = [] for h in heights: if h < reorg_height: - vertex_id = conn12.manager2.tx_storage.indexes.height.get(h) + index_manager = not_none(conn12.manager2.tx_storage.indexes) + vertex_id = not_none(index_manager.height.get(h)) else: vertex_id = rng.randbytes(32) response.append(_HeightInfo(height=h, id=vertex_id)) return succeed(response) - sync_agent.get_peer_block_hashes = fake_get_peer_block_hashes - common_block_info = yield sync_agent.find_best_common_block(my_best_block, fake_peer_best_block) - self.assertIsNone(common_block_info) + with patch.object(sync_agent, 'get_peer_block_hashes', new=fake_get_peer_block_hashes): + common_block_info = await sync_agent.find_best_common_block(my_best_block, fake_peer_best_block) + self.assertIsNone(common_block_info) def test_multiple_unexpected_txs(self) -> None: manager1 = self.create_peer(enable_sync_v1=False, enable_sync_v2=True) diff --git a/tests/p2p/test_twin_tx.py b/tests/p2p/test_twin_tx.py index 9e5e8857a..ae2339cb5 100644 --- a/tests/p2p/test_twin_tx.py +++ b/tests/p2p/test_twin_tx.py @@ -1,6 +1,7 @@ from hathor.crypto.util import decode_address from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction +from hathor.util import not_none from hathor.wallet.base_wallet import WalletOutputInfo from tests import unittest from tests.utils import add_blocks_unlock_reward, add_new_double_spending @@ -9,16 +10,16 @@ class BaseTwinTransactionTestCase(unittest.TestCase): __test__ = False - def setUp(self): + def setUp(self) -> None: super().setUp() self.network = 'testnet' self.manager = self.create_peer(self.network, unlock_wallet=True) - def test_twin_tx(self): + def test_twin_tx(self) -> None: add_new_blocks(self.manager, 5, advance_clock=15) add_blocks_unlock_reward(self.manager) - address = self.get_address(0) + address = not_none(self.get_address(0)) value1 = 100 value2 = 101 value3 = 102 diff --git a/tests/p2p/test_whitelist.py b/tests/p2p/test_whitelist.py index e7b83fc18..5cbc7e4ae 100644 --- a/tests/p2p/test_whitelist.py +++ b/tests/p2p/test_whitelist.py @@ -17,7 +17,7 @@ class WhitelistTestCase(unittest.SyncV1Params, unittest.TestCase): @patch('hathor.p2p.states.peer_id.settings', new=settings._replace(ENABLE_PEER_WHITELIST=True)) - def test_sync_v11_whitelist_no_no(self): + def test_sync_v11_whitelist_no_no(self) -> None: network = 'testnet' manager1 = self.create_peer(network) @@ -39,7 +39,7 @@ def test_sync_v11_whitelist_no_no(self): self.assertTrue(conn.tr2.disconnecting) @patch('hathor.p2p.states.peer_id.settings', new=settings._replace(ENABLE_PEER_WHITELIST=True)) - def test_sync_v11_whitelist_yes_no(self): + def test_sync_v11_whitelist_yes_no(self) -> None: network = 'testnet' manager1 = self.create_peer(network) @@ -63,7 +63,7 @@ def test_sync_v11_whitelist_yes_no(self): self.assertTrue(conn.tr2.disconnecting) @patch('hathor.p2p.states.peer_id.settings', new=settings._replace(ENABLE_PEER_WHITELIST=True)) - def test_sync_v11_whitelist_yes_yes(self): + def test_sync_v11_whitelist_yes_yes(self) -> None: network = 'testnet' manager1 = self.create_peer(network) diff --git a/tests/pubsub/test_pubsub.py b/tests/pubsub/test_pubsub.py index 2d3d1ef62..b2e76e646 100644 --- a/tests/pubsub/test_pubsub.py +++ b/tests/pubsub/test_pubsub.py @@ -1,10 +1,10 @@ -from hathor.pubsub import HathorEvents, PubSubManager +from hathor.pubsub import EventArguments, HathorEvents, PubSubManager from tests.unittest import TestCase class PubSubTestCase(TestCase): - def test_duplicate_subscribe(self): - def noop(): + def test_duplicate_subscribe(self) -> None: + def noop(event: HathorEvents, args: EventArguments) -> None: pass pubsub = PubSubManager(self.clock) pubsub.subscribe(HathorEvents.NETWORK_NEW_TX_ACCEPTED, noop) diff --git a/tests/pubsub/test_pubsub2.py b/tests/pubsub/test_pubsub2.py index faaf9c758..d0ede02ac 100644 --- a/tests/pubsub/test_pubsub2.py +++ b/tests/pubsub/test_pubsub2.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Callable +from typing import Any, Callable from unittest.mock import Mock, patch import pytest @@ -78,7 +78,7 @@ def test_memory_reactor_clock_running_with_threading() -> None: pubsub = PubSubManager(reactor) handler = Mock() - def fake_call_from_thread(f: Callable) -> None: + def fake_call_from_thread(f: Callable[..., Any]) -> None: reactor.callLater(0, f) call_from_thread_mock = Mock(side_effect=fake_call_from_thread) diff --git a/tests/resources/healthcheck/test_healthcheck.py b/tests/resources/healthcheck/test_healthcheck.py index c616d3a03..5beff1f24 100644 --- a/tests/resources/healthcheck/test_healthcheck.py +++ b/tests/resources/healthcheck/test_healthcheck.py @@ -6,9 +6,9 @@ from hathor.healthcheck.resources.healthcheck import HealthcheckResource from hathor.manager import HathorManager from hathor.simulator import FakeConnection +from hathor.simulator.utils import add_new_blocks from tests import unittest from tests.resources.base_resource import StubSite, _BaseResourceTest -from tests.utils import add_new_blocks class BaseHealthcheckReadinessTest(_BaseResourceTest._ResourceTest): diff --git a/tests/resources/transaction/test_mining.py b/tests/resources/transaction/test_mining.py index af97d8682..caae54ee2 100644 --- a/tests/resources/transaction/test_mining.py +++ b/tests/resources/transaction/test_mining.py @@ -38,7 +38,7 @@ def test_get_block_template_with_address(self): 'accumulated_weight': 1.0, 'score': 0, 'height': 1, - 'min_height': 0, + 'min_height': None, 'first_block': None, 'feature_activation_bit_counts': None }, @@ -71,7 +71,7 @@ def test_get_block_template_without_address(self): 'accumulated_weight': 1.0, 'score': 0, 'height': 1, - 'min_height': 0, + 'min_height': None, 'first_block': None, 'feature_activation_bit_counts': None }, diff --git a/tests/resources/transaction/test_pushtx.py b/tests/resources/transaction/test_pushtx.py index e861283df..7ed5b3e36 100644 --- a/tests/resources/transaction/test_pushtx.py +++ b/tests/resources/transaction/test_pushtx.py @@ -7,7 +7,6 @@ from hathor.transaction import Transaction, TxInput from hathor.transaction.resources import PushTxResource from hathor.transaction.scripts import P2PKH, parse_address_script -from hathor.util import not_none from hathor.wallet.base_wallet import WalletInputInfo, WalletOutputInfo from hathor.wallet.resources import SendTokensResource from tests import unittest @@ -100,7 +99,7 @@ def test_push_tx(self) -> Generator: # invalid transaction, without forcing tx.timestamp = 5 - tx.inputs = [TxInput(not_none(blocks[1].hash), 0, b'')] + tx.inputs = [TxInput(blocks[1].hash, 0, b'')] script_type_out = parse_address_script(blocks[1].outputs[0].script) assert script_type_out is not None private_key = self.manager.wallet.get_private_key(script_type_out.address) @@ -226,7 +225,6 @@ def test_spending_voided(self) -> Generator: p2pkh = parse_address_script(txout.script) assert p2pkh is not None private_key = wallet.get_private_key(p2pkh.address) - assert tx.hash is not None inputs = [WalletInputInfo(tx_id=tx.hash, index=0, private_key=private_key)] outputs = [WalletOutputInfo(address=decode_address(p2pkh.address), value=txout.value, timelock=None), ] tx2 = self.get_tx(inputs, outputs) @@ -237,7 +235,6 @@ def test_spending_voided(self) -> Generator: # Now we set this tx2 as voided and try to push a tx3 that spends tx2 tx_meta = tx2.get_metadata() - assert tx2.hash is not None tx_meta.voided_by = {tx2.hash} self.manager.tx_storage.save_transaction(tx2, only_metadata=True) diff --git a/tests/resources/wallet/test_thin_wallet.py b/tests/resources/wallet/test_thin_wallet.py index ed1710c7e..4f01a739d 100644 --- a/tests/resources/wallet/test_thin_wallet.py +++ b/tests/resources/wallet/test_thin_wallet.py @@ -197,7 +197,6 @@ def test_history_paginate(self): response_history = yield self.web_address_history.get( 'thin_wallet/address_history', { b'addresses[]': address.encode(), - b'paginate': b'true' } ) @@ -217,7 +216,6 @@ def test_history_paginate(self): response_history = yield self.web_address_history.get( 'thin_wallet/address_history', { b'addresses[]': address.encode(), - b'paginate': b'true' } ) @@ -248,7 +246,6 @@ def test_history_paginate(self): response_history = yield self.web_address_history.get( 'thin_wallet/address_history', { b'addresses[]': random_address.encode(), - b'paginate': b'true' } ) @@ -261,7 +258,6 @@ def test_history_paginate(self): 'thin_wallet/address_history', { b'addresses[]': random_address.encode(), b'hash': response_data['first_hash'].encode(), - b'paginate': b'true' } ) diff --git a/tests/simulation/base.py b/tests/simulation/base.py index 8acb087ca..1811dd873 100644 --- a/tests/simulation/base.py +++ b/tests/simulation/base.py @@ -1,6 +1,9 @@ from typing import Optional +from hathor.builder import SyncSupportLevel +from hathor.manager import HathorManager from hathor.simulator import Simulator +from hathor.types import VertexId from tests import unittest @@ -9,7 +12,7 @@ class SimulatorTestCase(unittest.TestCase): seed_config: Optional[int] = None - def setUp(self): + def setUp(self) -> None: super().setUp() self.simulator = Simulator(self.seed_config) @@ -19,11 +22,17 @@ def setUp(self): print('Simulation seed config:', self.simulator.seed) print('-'*30) - def tearDown(self): + def tearDown(self) -> None: self.simulator.stop() super().tearDown() - def create_peer(self, enable_sync_v1=None, enable_sync_v2=None, soft_voided_tx_ids=None, simulator=None): + def create_peer( # type: ignore[override] + self, + enable_sync_v1: bool | None = None, + enable_sync_v2: bool | None = None, + soft_voided_tx_ids: set[VertexId] = set(), + simulator: Simulator | None = None + ) -> HathorManager: if enable_sync_v1 is None: assert hasattr(self, '_enable_sync_v1'), ('`_enable_sync_v1` has no default by design, either set one on ' 'the test class or pass `enable_sync_v1` by argument') @@ -33,13 +42,15 @@ def create_peer(self, enable_sync_v1=None, enable_sync_v2=None, soft_voided_tx_i 'the test class or pass `enable_sync_v2` by argument') enable_sync_v2 = self._enable_sync_v2 assert enable_sync_v1 or enable_sync_v2, 'enable at least one sync version' + sync_v1_support = SyncSupportLevel.ENABLED if enable_sync_v1 else SyncSupportLevel.DISABLED + sync_v2_support = SyncSupportLevel.ENABLED if enable_sync_v2 else SyncSupportLevel.DISABLED if simulator is None: simulator = self.simulator builder = simulator.get_default_builder() \ .set_peer_id(self.get_random_peer_id_from_pool(rng=simulator.rng)) \ .set_soft_voided_tx_ids(soft_voided_tx_ids) \ - .set_enable_sync_v1(enable_sync_v1) \ - .set_enable_sync_v2(enable_sync_v2) + .set_sync_v1_support(sync_v1_support) \ + .set_sync_v2_support(sync_v2_support) return simulator.create_peer(builder) diff --git a/tests/simulation/test_simulator.py b/tests/simulation/test_simulator.py index aac7edd66..b2f8083ce 100644 --- a/tests/simulation/test_simulator.py +++ b/tests/simulation/test_simulator.py @@ -1,21 +1,22 @@ import pytest +from hathor.manager import HathorManager from hathor.simulator import FakeConnection -from hathor.simulator.trigger import All as AllTriggers, StopWhenSynced +from hathor.simulator.trigger import All as AllTriggers, StopWhenSynced, Trigger from hathor.verification.vertex_verifier import VertexVerifier from tests import unittest from tests.simulation.base import SimulatorTestCase class BaseRandomSimulatorTestCase(SimulatorTestCase): - def test_verify_pow(self): + def test_verify_pow(self) -> None: manager1 = self.create_peer() # just get one of the genesis, we don't really need to create any transaction tx = next(iter(manager1.tx_storage.get_all_genesis())) # optional argument must be valid, it just has to not raise any exception, there's no assert for that - VertexVerifier(settings=self._settings, daa=manager1.daa).verify_pow(tx, override_weight=0.) + VertexVerifier(settings=self._settings).verify_pow(tx, override_weight=0.) - def test_one_node(self): + def test_one_node(self) -> None: manager1 = self.create_peer() miner1 = self.simulator.create_miner(manager1, hashpower=100e6) @@ -29,7 +30,7 @@ def test_one_node(self): # FIXME: the setup above produces 0 new blocks and transactions # self.assertGreater(manager1.tx_storage.get_vertices_count(), 3) - def test_two_nodes(self): + def test_two_nodes(self) -> None: manager1 = self.create_peer() manager2 = self.create_peer() @@ -63,10 +64,10 @@ def test_two_nodes(self): self.assertTrue(conn12.is_connected) self.assertTipsEqual(manager1, manager2) - def test_many_miners_since_beginning(self): - nodes = [] + def test_many_miners_since_beginning(self) -> None: + nodes: list[HathorManager] = [] miners = [] - stop_triggers = [] + stop_triggers: list[Trigger] = [] for hashpower in [10e6, 5e6, 1e6, 1e6, 1e6]: manager = self.create_peer() @@ -96,11 +97,11 @@ def test_many_miners_since_beginning(self): self.assertTipsEqual(nodes[0], node) @pytest.mark.flaky(max_runs=5, min_passes=1) - def test_new_syncing_peer(self): + def test_new_syncing_peer(self) -> None: nodes = [] miners = [] tx_generators = [] - stop_triggers = [] + stop_triggers: list[Trigger] = [] manager = self.create_peer() nodes.append(manager) @@ -162,7 +163,7 @@ class SyncV2RandomSimulatorTestCase(unittest.SyncV2Params, BaseRandomSimulatorTe class SyncBridgeRandomSimulatorTestCase(unittest.SyncBridgeParams, SyncV2RandomSimulatorTestCase): __test__ = True - def test_compare_mempool_implementations(self): + def test_compare_mempool_implementations(self) -> None: manager1 = self.create_peer() manager2 = self.create_peer() @@ -170,7 +171,7 @@ def test_compare_mempool_implementations(self): tx_storage = manager1.tx_storage assert tx_storage.indexes is not None assert tx_storage.indexes.mempool_tips is not None - assert manager1.tx_storage.indexes.tx_tips is not None + assert manager1.tx_storage.indexes and manager1.tx_storage.indexes.tx_tips is not None mempool_tips = tx_storage.indexes.mempool_tips miner1 = self.simulator.create_miner(manager1, hashpower=10e6) diff --git a/tests/simulation/test_simulator_itself.py b/tests/simulation/test_simulator_itself.py index 6683a37b4..22b1c311b 100644 --- a/tests/simulation/test_simulator_itself.py +++ b/tests/simulation/test_simulator_itself.py @@ -1,5 +1,8 @@ import pytest +from hathor.builder import SyncSupportLevel +from hathor.manager import HathorManager +from hathor.p2p.peer_id import PeerId from hathor.simulator import FakeConnection, Simulator from tests import unittest @@ -11,7 +14,7 @@ class BaseSimulatorSelfTestCase(unittest.TestCase): __test__ = False - def setUp(self): + def setUp(self) -> None: super().setUp() seed = None @@ -29,14 +32,20 @@ def setUp(self): print('Simulation seed config:', self.simulator1.seed) print('-' * 30) - def tearDown(self): + def tearDown(self) -> None: super().tearDown() self.simulator1.stop() self.simulator2.stop() self.simulator3.stop() - def create_simulator_peer(self, simulator, peer_id_pool, enable_sync_v1=None, enable_sync_v2=None): + def create_simulator_peer( + self, + simulator: Simulator, + peer_id_pool: list[PeerId], + enable_sync_v1: bool | None = None, + enable_sync_v2: bool | None = None + ) -> HathorManager: if enable_sync_v1 is None: assert hasattr(self, '_enable_sync_v1'), ('`_enable_sync_v1` has no default by design, either set one on ' 'the test class or pass `enable_sync_v1` by argument') @@ -46,15 +55,17 @@ def create_simulator_peer(self, simulator, peer_id_pool, enable_sync_v1=None, en 'the test class or pass `enable_sync_v2` by argument') enable_sync_v2 = self._enable_sync_v2 assert enable_sync_v1 or enable_sync_v2, 'enable at least one sync version' + sync_v1_support = SyncSupportLevel.ENABLED if enable_sync_v1 else SyncSupportLevel.DISABLED + sync_v2_support = SyncSupportLevel.ENABLED if enable_sync_v2 else SyncSupportLevel.DISABLED builder = simulator.get_default_builder() \ .set_peer_id(self.get_random_peer_id_from_pool()) \ - .set_enable_sync_v1(enable_sync_v1) \ - .set_enable_sync_v2(enable_sync_v2) + .set_sync_v1_support(sync_v1_support) \ + .set_sync_v2_support(sync_v2_support) return simulator.create_peer(builder) - def _simulate_run(self, run_i, simulator): + def _simulate_run(self, run_i: int, simulator: Simulator) -> list[HathorManager]: # XXX: the following was adapted from test_new_syncing_peer, it doesn't matter too much, but has good coverage # of different behaviors that can be affected by non-determinism on the fullnode implementation @@ -110,7 +121,7 @@ def _simulate_run(self, run_i, simulator): # XXX: marked as flaky because of a known random issue @pytest.mark.flaky(max_runs=3, min_passes=1) - def test_determinism_full_runs(self): + def test_determinism_full_runs(self) -> None: # sanity assert as to not mess up with it on the setup self.assertEqual(self.simulator1.seed, self.simulator2.seed) self.assertEqual(self.simulator1.seed, self.simulator3.seed) @@ -128,7 +139,7 @@ def test_determinism_full_runs(self): # XXX: marked as flaky because of a known random issue @pytest.mark.flaky(max_runs=3, min_passes=1) - def test_determinism_interleaved(self): + def test_determinism_interleaved(self) -> None: # sanity assert as to not mess up with it on the setup self.assertEqual(self.simulator1.seed, self.simulator2.seed) diff --git a/tests/simulation/test_trigger.py b/tests/simulation/test_trigger.py index b91e4e293..678902d47 100644 --- a/tests/simulation/test_trigger.py +++ b/tests/simulation/test_trigger.py @@ -3,11 +3,12 @@ from hathor.p2p.messages import ProtocolMessages from hathor.simulator import FakeConnection, Simulator from hathor.simulator.trigger import StopAfterMinimumBalance, StopAfterNMinedBlocks, StopWhenSendLineMatch +from hathor.util import not_none from tests import unittest class TriggerTestCase(unittest.TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.simulator = Simulator() @@ -20,11 +21,11 @@ def setUp(self): print('Simulation seed config:', self.simulator.seed) print('-' * 30) - def tearDown(self): + def tearDown(self) -> None: super().tearDown() self.simulator.stop() - def test_stop_after_n_mined_blocks(self): + def test_stop_after_n_mined_blocks(self) -> None: miner1 = self.simulator.create_miner(self.manager1, hashpower=1e6) miner1.start() @@ -47,11 +48,11 @@ def test_stop_after_n_mined_blocks(self): self.assertEqual(miner1.get_blocks_found(), 16) self.assertLess(reactor.seconds(), t0 + 3600) - def test_stop_after_minimum_balance(self): + def test_stop_after_minimum_balance(self) -> None: miner1 = self.simulator.create_miner(self.manager1, hashpower=1e6) miner1.start() - wallet = self.manager1.wallet + wallet = not_none(self.manager1.wallet) settings = self.simulator.settings minimum_balance = 1000_00 # 16 blocks @@ -62,7 +63,7 @@ def test_stop_after_minimum_balance(self): self.assertTrue(self.simulator.run(3600, trigger=trigger)) self.assertGreaterEqual(wallet.balance[token_uid].available, minimum_balance) - def test_stop_after_sendline(self): + def test_stop_after_sendline(self) -> None: manager2 = self.simulator.create_peer() conn12 = FakeConnection(self.manager1, manager2, latency=0.05) self.simulator.add_connection(conn12) diff --git a/tests/sysctl/test_feature_activation.py b/tests/sysctl/test_feature_activation.py new file mode 100644 index 000000000..48aeb2713 --- /dev/null +++ b/tests/sysctl/test_feature_activation.py @@ -0,0 +1,38 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest.mock import Mock + +from hathor.feature_activation.bit_signaling_service import BitSignalingService +from hathor.feature_activation.feature import Feature +from hathor.sysctl import FeatureActivationSysctl + + +def test_feature_activation_sysctl() -> None: + bit_signaling_service_mock = Mock(spec_set=BitSignalingService) + sysctl = FeatureActivationSysctl(bit_signaling_service_mock) + + bit_signaling_service_mock.get_support_features = Mock(return_value=[Feature.NOP_FEATURE_1, Feature.NOP_FEATURE_2]) + bit_signaling_service_mock.get_not_support_features = Mock(return_value=[Feature.NOP_FEATURE_3]) + bit_signaling_service_mock.get_best_block_signaling_features = Mock(return_value={Feature.NOP_FEATURE_1: Mock()}) + + assert sysctl.get('supported_features') == ['NOP_FEATURE_1', 'NOP_FEATURE_2'] + assert sysctl.get('not_supported_features') == ['NOP_FEATURE_3'] + assert sysctl.get('signaling_features') == ['NOP_FEATURE_1'] + + sysctl.unsafe_set('add_support', 'NOP_FEATURE_3') + bit_signaling_service_mock.add_feature_support.assert_called_once_with(Feature.NOP_FEATURE_3) + + sysctl.unsafe_set('remove_support', 'NOP_FEATURE_1') + bit_signaling_service_mock.remove_feature_support.assert_called_once_with(Feature.NOP_FEATURE_1) diff --git a/tests/sysctl/test_sysctl.py b/tests/sysctl/test_sysctl.py index d629d4230..01d8b46cc 100644 --- a/tests/sysctl/test_sysctl.py +++ b/tests/sysctl/test_sysctl.py @@ -13,7 +13,7 @@ class SysctlTest(unittest.TestCase): # We need this patch because pydantic.validate_arguments fails when it gets a mock function. - @patch('hathor.sysctl.sysctl.validate_arguments', new=lambda x: x) + @patch('hathor.sysctl.sysctl.validate_arguments', new=lambda x: x) # type: ignore def setUp(self) -> None: super().setUp() diff --git a/tests/tx/test_blockchain.py b/tests/tx/test_blockchain.py index 2d03794ff..3e30adb28 100644 --- a/tests/tx/test_blockchain.py +++ b/tests/tx/test_blockchain.py @@ -114,7 +114,6 @@ def test_single_fork_not_best(self): fork_block1 = manager.generate_mining_block() fork_block1.parents = [fork_block1.parents[0]] + fork_block1.parents[:0:-1] manager.cpu_mining_service.resolve(fork_block1) - manager.verification_service.verify(fork_block1) # Mine 8 blocks in a row blocks = add_new_blocks(manager, 8, advance_clock=15) @@ -166,7 +165,6 @@ def test_single_fork_not_best(self): # This block belongs to case (iv). fork_block3 = manager.generate_mining_block(parent_block_hash=fork_block1.hash) manager.cpu_mining_service.resolve(fork_block3) - manager.verification_service.verify(fork_block3) self.assertTrue(manager.propagate_tx(fork_block3)) fork_meta3 = fork_block3.get_metadata() self.assertEqual(fork_meta3.voided_by, {fork_block3.hash}) @@ -236,7 +234,6 @@ def test_multiple_forks(self): # Propagate a block connected to the voided chain, case (iii). fork_block2 = manager.generate_mining_block(parent_block_hash=sidechain[-1].hash) manager.cpu_mining_service.resolve(fork_block2) - manager.verification_service.verify(fork_block2) self.assertTrue(manager.propagate_tx(fork_block2)) sidechain.append(fork_block2) @@ -284,7 +281,6 @@ def test_multiple_forks(self): # Propagate a block connected to the side chain, case (v). fork_block3 = manager.generate_mining_block(parent_block_hash=fork_block2.hash) manager.cpu_mining_service.resolve(fork_block3) - manager.verification_service.verify(fork_block3) self.assertTrue(manager.propagate_tx(fork_block3)) sidechain.append(fork_block3) @@ -310,7 +306,6 @@ def test_multiple_forks(self): fork_block4 = manager.generate_mining_block(parent_block_hash=sidechain3[-1].hash) fork_block4.weight = 10 manager.cpu_mining_service.resolve(fork_block4) - manager.verification_service.verify(fork_block4) self.assertTrue(manager.propagate_tx(fork_block4)) sidechain3.append(fork_block4) diff --git a/tests/tx/test_genesis.py b/tests/tx/test_genesis.py index a41021f8b..fe08117bf 100644 --- a/tests/tx/test_genesis.py +++ b/tests/tx/test_genesis.py @@ -37,7 +37,7 @@ def setUp(self): self.storage = TransactionMemoryStorage() def test_pow(self): - verifier = VertexVerifier(settings=self._settings, daa=self._daa) + verifier = VertexVerifier(settings=self._settings) genesis = self.storage.get_all_genesis() for g in genesis: self.assertEqual(g.calculate_hash(), g.hash) @@ -74,9 +74,9 @@ def test_genesis_weight(self): # Validate the block and tx weight # in test mode weight is always 1 self._daa.TEST_MODE = TestMode.TEST_ALL_WEIGHT - self.assertEqual(self._daa.calculate_block_difficulty(genesis_block), 1) + self.assertEqual(self._daa.calculate_block_difficulty(genesis_block, Mock()), 1) self.assertEqual(self._daa.minimum_tx_weight(genesis_tx), 1) self._daa.TEST_MODE = TestMode.DISABLED - self.assertEqual(self._daa.calculate_block_difficulty(genesis_block), genesis_block.weight) + self.assertEqual(self._daa.calculate_block_difficulty(genesis_block, Mock()), genesis_block.weight) self.assertEqual(self._daa.minimum_tx_weight(genesis_tx), genesis_tx.weight) diff --git a/tests/tx/test_indexes.py b/tests/tx/test_indexes.py index b28a7cfc4..cfdc607be 100644 --- a/tests/tx/test_indexes.py +++ b/tests/tx/test_indexes.py @@ -265,7 +265,6 @@ def check_utxos(*args): block2.timestamp = block1.timestamp block2.weight = 1.2 self.manager.cpu_mining_service.resolve(block2) - self.manager.verification_service.validate_full(block2) self.manager.propagate_tx(block2, fails_silently=False) self.graphviz.labels[block2.hash] = 'block2' @@ -631,7 +630,7 @@ def test_addresses_index_empty(self): address = self.get_address(10) assert address is not None self.assertTrue(addresses_indexes.is_address_empty(address)) - self.assertEqual(addresses_indexes.get_sorted_from_address(address), []) + self.assertEqual(list(addresses_indexes.get_sorted_from_address(address)), []) def test_addresses_index_last(self): """ @@ -653,7 +652,7 @@ def test_addresses_index_last(self): # XXX: this artificial address should major (be greater byte-wise) any possible "natural" address address = '\x7f' * 34 self.assertTrue(addresses_indexes.is_address_empty(address)) - self.assertEqual(addresses_indexes.get_sorted_from_address(address), []) + self.assertEqual(list(addresses_indexes.get_sorted_from_address(address)), []) # XXX: since we didn't add any multisig address, this is guaranteed to be reach the tail end of the index assert self._settings.P2PKH_VERSION_BYTE[0] < self._settings.MULTISIG_VERSION_BYTE[0] @@ -666,7 +665,7 @@ def test_addresses_index_last(self): assert address is not None self.assertTrue(addresses_indexes.is_address_empty(address)) - self.assertEqual(addresses_indexes.get_sorted_from_address(address), []) + self.assertEqual(list(addresses_indexes.get_sorted_from_address(address)), []) def test_height_index(self): from hathor.indexes.height_index import HeightInfo diff --git a/tests/tx/test_indexes2.py b/tests/tx/test_indexes2.py index b8df4d9eb..970903cc6 100644 --- a/tests/tx/test_indexes2.py +++ b/tests/tx/test_indexes2.py @@ -64,7 +64,7 @@ def test_timestamp_index(self): # XXX: we verified they're the same, doesn't matter which we pick: idx = idx_memory hashes = hashes_memory - self.log.debug('indexes match', idx=idx, hashes=unittest.shorten_hash(hashes)) + self.log.debug('indexes match', idx=idx, hashes=unittest.short_hashes(hashes)) if idx is None: break offset_variety.add(idx[1]) diff --git a/tests/tx/test_indexes3.py b/tests/tx/test_indexes3.py index 8a1f27ea6..c7a513acf 100644 --- a/tests/tx/test_indexes3.py +++ b/tests/tx/test_indexes3.py @@ -92,7 +92,6 @@ def test_topological_iterators(self): # XXX: sanity check that the children metadata is properly set (this is needed for one of the iterators) for tx in tx_storage.get_all_transactions(): - assert tx.hash is not None for parent_tx in map(tx_storage.get_transaction, tx.parents): self.assertIn(tx.hash, parent_tx.get_metadata().children) diff --git a/tests/tx/test_indexes4.py b/tests/tx/test_indexes4.py index cc0e726a3..7777d69e1 100644 --- a/tests/tx/test_indexes4.py +++ b/tests/tx/test_indexes4.py @@ -123,7 +123,6 @@ def test_topological_iterators(self): # XXX: sanity check that the children metadata is properly set (this is needed for one of the iterators) for tx in tx_storage.get_all_transactions(): - assert tx.hash is not None for parent_tx in map(tx_storage.get_transaction, tx.parents): self.assertIn(tx.hash, parent_tx.get_metadata().children) diff --git a/tests/tx/test_reward_lock.py b/tests/tx/test_reward_lock.py index c321b5beb..80f6f6e18 100644 --- a/tests/tx/test_reward_lock.py +++ b/tests/tx/test_reward_lock.py @@ -1,6 +1,7 @@ import pytest from hathor.crypto.util import get_address_from_public_key +from hathor.exception import InvalidNewTransaction from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction, TxInput, TxOutput from hathor.transaction.exceptions import RewardLocked @@ -87,16 +88,22 @@ def test_block_with_not_enough_height(self): add_new_blocks(self.manager, self._settings.REWARD_SPEND_MIN_BLOCKS - 1, advance_clock=1) # add tx bypassing reward-lock verification - # XXX: this situation is impossible in practice, but we force it to test that when a block tries to confirms a + # XXX: this situation is impossible in practice, but we force it to test that when a block tries to confirm a # transaction before it can the RewardLocked exception is raised tx = self._spend_reward_tx(self.manager, reward_block) self.assertEqual(tx.get_metadata().min_height, unlock_height) self.assertTrue(self.manager.on_new_tx(tx, fails_silently=False, reject_locked_reward=False)) # new block will try to confirm it and fail - with self.assertRaises(RewardLocked): + with pytest.raises(InvalidNewTransaction) as e: add_new_blocks(self.manager, 1, advance_clock=1) + assert isinstance(e.value.__cause__, RewardLocked) + + # check that the last block was not added to the storage + all_blocks = [vertex for vertex in self.manager.tx_storage.get_all_transactions() if vertex.is_block] + assert len(all_blocks) == 2 * self._settings.REWARD_SPEND_MIN_BLOCKS + 1 + def test_block_with_enough_height(self): # add block with a reward we can spend reward_block, unlock_height = self._add_reward_block() @@ -159,7 +166,6 @@ def test_mempool_tx_invalid_after_reorg(self): b0 = tb0.generate_mining_block(self.manager.rng, storage=self.manager.tx_storage) b0.weight = 10 self.manager.cpu_mining_service.resolve(b0) - self.manager.verification_service.verify(b0) self.manager.propagate_tx(b0, fails_silently=False) # now the new tx should not pass verification considering the reward lock diff --git a/tests/tx/test_tips.py b/tests/tx/test_tips.py index 9fbc0af46..c1ae8bfad 100644 --- a/tests/tx/test_tips.py +++ b/tests/tx/test_tips.py @@ -65,7 +65,6 @@ def test_tips_winner(self): new_block = add_new_block(self.manager, propagate=False) new_block.parents = [new_block.parents[0], tx1.hash, tx3.hash] self.manager.cpu_mining_service.resolve(new_block) - self.manager.verification_service.verify(new_block) self.manager.propagate_tx(new_block, fails_silently=False) self.manager.reactor.advance(10) @@ -167,8 +166,7 @@ class SyncV1TipsTestCase(unittest.SyncV1Params, BaseTipsTestCase): __test__ = True def get_tips(self): - from hathor.util import not_none - return {not_none(tx.hash) for tx in self.manager.tx_storage.iter_mempool_tips_from_tx_tips()} + return {tx.hash for tx in self.manager.tx_storage.iter_mempool_tips_from_tx_tips()} class SyncV2TipsTestCase(unittest.SyncV2Params, BaseTipsTestCase): diff --git a/tests/tx/test_tokens.py b/tests/tx/test_tokens.py index f84158e24..0906477e1 100644 --- a/tests/tx/test_tokens.py +++ b/tests/tx/test_tokens.py @@ -3,6 +3,7 @@ import pytest from hathor.crypto.util import decode_address +from hathor.exception import InvalidNewTransaction from hathor.indexes.tokens_index import TokenUtxoInfo from hathor.transaction import Block, Transaction, TxInput, TxOutput from hathor.transaction.exceptions import BlockWithTokensError, InputOutputMismatch, InvalidToken, TransactionDataError @@ -111,6 +112,7 @@ def test_token_transfer(self): public_bytes, signature = wallet.get_input_aux_data(data_to_sign, wallet.get_private_key(self.address_b58)) tx2.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) self.manager.cpu_mining_service.resolve(tx2) + tx2.update_reward_lock_metadata() self.manager.verification_service.verify(tx2) # missing tokens @@ -154,7 +156,6 @@ def test_token_mint(self): tx2.inputs[0].data = data tx2.inputs[1].data = data self.manager.cpu_mining_service.resolve(tx2) - self.manager.verification_service.verify(tx2) self.manager.propagate_tx(tx2) self.run_to_completion() @@ -261,7 +262,6 @@ def test_token_melt(self): tx2.inputs[0].data = data tx2.inputs[1].data = data self.manager.cpu_mining_service.resolve(tx2) - self.manager.verification_service.verify(tx2) self.manager.propagate_tx(tx2) self.run_to_completion() @@ -400,7 +400,6 @@ def test_token_index_with_conflict(self, mint_amount=0): tx2.inputs[1].data = data tx2.inputs[2].data = data self.manager.cpu_mining_service.resolve(tx2) - self.manager.verification_service.verify(tx2) self.manager.propagate_tx(tx2) self.run_to_completion() @@ -504,9 +503,11 @@ def update_tx(tx): def test_token_mint_zero(self): # try to mint 0 tokens - with self.assertRaises(InvalidToken): + with pytest.raises(InvalidNewTransaction) as e: create_tokens(self.manager, self.address_b58, mint_amount=0) + assert isinstance(e.value.__cause__, InvalidToken) + def test_token_struct(self): tx = create_tokens(self.manager, self.address_b58, mint_amount=500) tx2 = TokenCreationTransaction.create_from_struct(tx.get_struct()) diff --git a/tests/tx/test_tx.py b/tests/tx/test_tx.py index 9ebf999bd..349731ffd 100644 --- a/tests/tx/test_tx.py +++ b/tests/tx/test_tx.py @@ -3,8 +3,11 @@ from math import isinf, isnan from unittest.mock import patch +import pytest + from hathor.crypto.util import decode_address, get_address_from_public_key, get_private_key_from_bytes from hathor.daa import TestMode +from hathor.exception import InvalidNewTransaction from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import FeatureService from hathor.simulator.utils import add_new_blocks @@ -565,6 +568,7 @@ def test_regular_tx(self): _input.data = P2PKH.create_input_data(public_bytes, signature) self.manager.cpu_mining_service.resolve(tx) + tx.update_reward_lock_metadata() self.manager.verification_service.verify(tx) def test_tx_weight_too_high(self): @@ -776,9 +780,12 @@ def add_block_with_data(data: bytes = b'') -> None: add_block_with_data() add_block_with_data(b'Testing, testing 1, 2, 3...') add_block_with_data(100*b'a') - with self.assertRaises(TransactionDataError): + + with pytest.raises(InvalidNewTransaction) as e: add_block_with_data(101*b'a') + assert isinstance(e.value.__cause__, TransactionDataError) + def test_output_serialization(self): from hathor.transaction.base_transaction import ( _MAX_OUTPUT_VALUE_32, diff --git a/tests/tx/test_tx_storage.py b/tests/tx/test_tx_storage.py index e12880cd8..de377cb9b 100644 --- a/tests/tx/test_tx_storage.py +++ b/tests/tx/test_tx_storage.py @@ -62,6 +62,7 @@ def setUp(self): self.block = Block(timestamp=previous_timestamp + 1, weight=12, outputs=[output], parents=block_parents, nonce=100781, storage=self.tx_storage) self.manager.cpu_mining_service.resolve(self.block) + self.block.update_reward_lock_metadata() self.manager.verification_service.verify(self.block) self.block.get_metadata().validation = ValidationState.FULL @@ -514,7 +515,6 @@ def _add_new_block(self, parents=None): block.parents = parents block.weight = 10 self.assertTrue(self.manager.cpu_mining_service.resolve(block)) - self.manager.verification_service.verify(block) self.manager.propagate_tx(block, fails_silently=False) self.reactor.advance(5) return block diff --git a/tests/tx/test_verification.py b/tests/tx/test_verification.py index 336d54510..b3414d0b6 100644 --- a/tests/tx/test_verification.py +++ b/tests/tx/test_verification.py @@ -43,7 +43,7 @@ def setUp(self) -> None: self.verifiers = self.manager.verification_service.verifiers def _get_valid_block(self) -> Block: - return Block( + block = Block( hash=b'some_hash', storage=self.manager.tx_storage, weight=1, @@ -54,9 +54,11 @@ def _get_valid_block(self) -> Block: self._settings.GENESIS_TX2_HASH ] ) + block.update_reward_lock_metadata() + return block def _get_valid_merge_mined_block(self) -> MergeMinedBlock: - return MergeMinedBlock( + block = MergeMinedBlock( hash=b'some_hash', storage=self.manager.tx_storage, weight=1, @@ -68,6 +70,8 @@ def _get_valid_merge_mined_block(self) -> MergeMinedBlock: self._settings.GENESIS_TX2_HASH ], ) + block.update_reward_lock_metadata() + return block def _get_valid_tx(self) -> Transaction: genesis_private_key = get_genesis_key() @@ -91,6 +95,7 @@ def _get_valid_tx(self) -> Transaction: self._settings.GENESIS_TX2_HASH, ] ) + tx.update_reward_lock_metadata() data_to_sign = tx.get_sighash_all() assert self.manager.wallet @@ -102,7 +107,9 @@ def _get_valid_tx(self) -> Transaction: def _get_valid_token_creation_tx(self) -> TokenCreationTransaction: add_blocks_unlock_reward(self.manager) assert self.manager.wallet - return create_tokens(self.manager, self.manager.wallet.get_unused_address()) + tx = create_tokens(self.manager, self.manager.wallet.get_unused_address()) + tx.update_reward_lock_metadata() + return tx def test_block_verify_basic(self) -> None: block = self._get_valid_block() @@ -340,7 +347,7 @@ def test_merge_mined_block_verify_without_storage(self) -> None: verify_sigops_output_wrapped.assert_called_once() # MergeMinedBlock methods - verify_pow_wrapped.assert_called_once() + verify_aux_pow_wrapped.assert_called_once() def test_merge_mined_block_verify(self) -> None: block = self._get_valid_merge_mined_block() @@ -389,7 +396,7 @@ def test_merge_mined_block_verify(self) -> None: verify_mandatory_signaling_wrapped.assert_called_once() # MergeMinedBlock methods - verify_pow_wrapped.assert_called_once() + verify_aux_pow_wrapped.assert_called_once() def test_merge_mined_block_validate_basic(self) -> None: block = self._get_valid_merge_mined_block() @@ -484,7 +491,7 @@ def test_merge_mined_block_validate_full(self) -> None: verify_mandatory_signaling_wrapped.assert_called_once() # MergeMinedBlock methods - verify_pow_wrapped.assert_called_once() + verify_aux_pow_wrapped.assert_called_once() def test_transaction_verify_basic(self) -> None: tx = self._get_valid_tx() diff --git a/tests/unittest.py b/tests/unittest.py index 019437e26..f92bc0f50 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -3,23 +3,32 @@ import shutil import tempfile import time -from typing import Iterator, Optional +from typing import Any, Callable, Collection, Iterable, Iterator, Optional from unittest import main as ut_main from structlog import get_logger from twisted.trial import unittest from hathor.builder import BuildArtifacts, Builder +from hathor.checkpoint import Checkpoint from hathor.conf import HathorSettings from hathor.conf.get_settings import get_global_settings from hathor.daa import DifficultyAdjustmentAlgorithm, TestMode +from hathor.event import EventManager +from hathor.event.storage import EventStorage +from hathor.manager import HathorManager from hathor.p2p.peer_id import PeerId +from hathor.p2p.sync_v1.agent import NodeSyncTimestamp +from hathor.p2p.sync_v2.agent import NodeBlockSync from hathor.p2p.sync_version import SyncVersion +from hathor.pubsub import PubSubManager from hathor.reactor import ReactorProtocol as Reactor, get_global_reactor from hathor.simulator.clock import MemoryReactorHeapClock -from hathor.transaction import BaseTransaction -from hathor.util import Random -from hathor.wallet import HDWallet, Wallet +from hathor.transaction import BaseTransaction, Block, Transaction +from hathor.transaction.storage.transaction_storage import TransactionStorage +from hathor.types import VertexId +from hathor.util import Random, not_none +from hathor.wallet import BaseWallet, HDWallet, Wallet from tests.test_memory_reactor_clock import TestMemoryReactorClock logger = get_logger() @@ -28,9 +37,8 @@ USE_MEMORY_STORAGE = os.environ.get('HATHOR_TEST_MEMORY_STORAGE', 'false').lower() == 'true' -def shorten_hash(container): - container_type = type(container) - return container_type(h[-2:].hex() for h in container) +def short_hashes(container: Collection[bytes]) -> Iterable[str]: + return map(lambda hash_bytes: hash_bytes[-2:].hex(), container) def _load_peer_id_pool(file_path: Optional[str] = None) -> Iterator[PeerId]: @@ -45,7 +53,7 @@ def _load_peer_id_pool(file_path: Optional[str] = None) -> Iterator[PeerId]: yield PeerId.create_from_json(peer_id_dict) -def _get_default_peer_id_pool_filepath(): +def _get_default_peer_id_pool_filepath() -> str: this_file_path = os.path.dirname(__file__) file_name = 'peer_id_pool.json' file_path = os.path.join(this_file_path, file_name) @@ -79,6 +87,8 @@ class TestBuilder(Builder): def __init__(self) -> None: super().__init__() self.set_network('testnet') + # default builder has sync-v2 enabled for tests + self.enable_sync_v2() def build(self) -> BuildArtifacts: artifacts = super().build() @@ -93,9 +103,9 @@ def _get_peer_id(self) -> PeerId: return PeerId() def _get_reactor(self) -> Reactor: - if self._reactor: - return self._reactor - return MemoryReactorHeapClock() + if self._reactor is None: + self._reactor = MemoryReactorHeapClock() + return self._reactor class TestCase(unittest.TestCase): @@ -104,8 +114,8 @@ class TestCase(unittest.TestCase): use_memory_storage: bool = USE_MEMORY_STORAGE seed_config: Optional[int] = None - def setUp(self): - self.tmpdirs = [] + def setUp(self) -> None: + self.tmpdirs: list[str] = [] self.clock = TestMemoryReactorClock() self.clock.advance(time.time()) self.log = logger.new() @@ -113,10 +123,10 @@ def setUp(self): self.seed = secrets.randbits(64) if self.seed_config is None else self.seed_config self.log.info('set seed', seed=self.seed) self.rng = Random(self.seed) - self._pending_cleanups = [] + self._pending_cleanups: list[Callable[..., Any]] = [] self._settings = get_global_settings() - def tearDown(self): + def tearDown(self) -> None: self.clean_tmpdirs() for fn in self._pending_cleanups: fn() @@ -139,12 +149,12 @@ def get_random_peer_id_from_pool(self, pool: Optional[list[PeerId]] = None, pool.remove(peer_id) return peer_id - def mkdtemp(self): + def mkdtemp(self) -> str: tmpdir = tempfile.mkdtemp() self.tmpdirs.append(tmpdir) return tmpdir - def _create_test_wallet(self, unlocked=False): + def _create_test_wallet(self, unlocked: bool = False) -> Wallet: """ Generate a Wallet with a number of keypairs for testing :rtype: Wallet """ @@ -164,14 +174,14 @@ def get_builder(self, network: str) -> TestBuilder: .set_network(network) return builder - def create_peer_from_builder(self, builder, start_manager=True): + def create_peer_from_builder(self, builder: Builder, start_manager: bool = True) -> HathorManager: artifacts = builder.build() manager = artifacts.manager if artifacts.rocksdb_storage: self._pending_cleanups.append(artifacts.rocksdb_storage.close) - manager.avg_time_between_blocks = 0.0001 + # manager.avg_time_between_blocks = 0.0001 # FIXME: This property is not defined. Fix this. if start_manager: manager.start() @@ -180,11 +190,28 @@ def create_peer_from_builder(self, builder, start_manager=True): return manager - def create_peer(self, network, peer_id=None, wallet=None, tx_storage=None, unlock_wallet=True, wallet_index=False, - capabilities=None, full_verification=True, enable_sync_v1=None, enable_sync_v2=None, - checkpoints=None, utxo_index=False, event_manager=None, use_memory_index=None, start_manager=True, - pubsub=None, event_storage=None, enable_event_queue=None, use_memory_storage=None): - + def create_peer( # type: ignore[no-untyped-def] + self, + network: str, + peer_id: PeerId | None = None, + wallet: BaseWallet | None = None, + tx_storage: TransactionStorage | None = None, + unlock_wallet: bool = True, + wallet_index: bool = False, + capabilities: list[str] | None = None, + full_verification: bool = True, + enable_sync_v1: bool | None = None, + enable_sync_v2: bool | None = None, + checkpoints: list[Checkpoint] | None = None, + utxo_index: bool = False, + event_manager: EventManager | None = None, + use_memory_index: bool | None = None, + start_manager: bool = True, + pubsub: PubSubManager | None = None, + event_storage: EventStorage | None = None, + enable_event_queue: bool | None = None, + use_memory_storage: bool | None = None + ): # TODO: Add -> HathorManager here. It breaks the lint in a lot of places. enable_sync_v1, enable_sync_v2 = self._syncVersionFlags(enable_sync_v1, enable_sync_v2) builder = self.get_builder(network) \ @@ -203,8 +230,9 @@ def create_peer(self, network, peer_id=None, wallet=None, tx_storage=None, unloc if not wallet: wallet = self._create_test_wallet() if unlock_wallet: + assert isinstance(wallet, Wallet) wallet.unlock(b'MYPASS') - builder.set_wallet(wallet) + builder.set_wallet(not_none(wallet)) if event_storage: builder.set_event_storage(event_storage) @@ -254,7 +282,7 @@ def create_peer(self, network, peer_id=None, wallet=None, tx_storage=None, unloc return manager - def run_to_completion(self): + def run_to_completion(self) -> None: """ This will advance the test's clock until all calls scheduled are done. """ for call in self.clock.getDelayedCalls(): @@ -272,12 +300,15 @@ def assertIsTopological(self, tx_sequence: Iterator[BaseTransaction], message: O valid_deps = set(get_all_genesis_hashes(self._settings) if initial is None else initial) for tx in tx_sequence: - assert tx.hash is not None for dep in tx.get_all_dependencies(): self.assertIn(dep, valid_deps, message) valid_deps.add(tx.hash) - def _syncVersionFlags(self, enable_sync_v1=None, enable_sync_v2=None): + def _syncVersionFlags( + self, + enable_sync_v1: bool | None = None, + enable_sync_v2: bool | None = None + ) -> tuple[bool, bool]: """Internal: use this to check and get the flags and optionally provide override values.""" if enable_sync_v1 is None: assert hasattr(self, '_enable_sync_v1'), ('`_enable_sync_v1` has no default by design, either set one on ' @@ -290,19 +321,19 @@ def _syncVersionFlags(self, enable_sync_v1=None, enable_sync_v2=None): assert enable_sync_v1 or enable_sync_v2, 'enable at least one sync version' return enable_sync_v1, enable_sync_v2 - def assertTipsEqual(self, manager1, manager2): + def assertTipsEqual(self, manager1: HathorManager, manager2: HathorManager) -> None: _, enable_sync_v2 = self._syncVersionFlags() if enable_sync_v2: self.assertTipsEqualSyncV2(manager1, manager2) else: self.assertTipsEqualSyncV1(manager1, manager2) - def assertTipsNotEqual(self, manager1, manager2): + def assertTipsNotEqual(self, manager1: HathorManager, manager2: HathorManager) -> None: s1 = set(manager1.tx_storage.get_all_tips()) s2 = set(manager2.tx_storage.get_all_tips()) self.assertNotEqual(s1, s2) - def assertTipsEqualSyncV1(self, manager1, manager2): + def assertTipsEqualSyncV1(self, manager1: HathorManager, manager2: HathorManager) -> None: # XXX: this is the original implementation of assertTipsEqual s1 = set(manager1.tx_storage.get_all_tips()) s2 = set(manager2.tx_storage.get_all_tips()) @@ -312,39 +343,45 @@ def assertTipsEqualSyncV1(self, manager1, manager2): s2 = set(manager2.tx_storage.get_tx_tips()) self.assertEqual(s1, s2) - def assertTipsEqualSyncV2(self, manager1, manager2, *, strict_sync_v2_indexes=True): + def assertTipsEqualSyncV2( + self, + manager1: HathorManager, + manager2: HathorManager, + *, + strict_sync_v2_indexes: bool = True + ) -> None: # tx tips if strict_sync_v2_indexes: - tips1 = manager1.tx_storage.indexes.mempool_tips.get() - tips2 = manager2.tx_storage.indexes.mempool_tips.get() + tips1 = not_none(not_none(manager1.tx_storage.indexes).mempool_tips).get() + tips2 = not_none(not_none(manager2.tx_storage.indexes).mempool_tips).get() else: tips1 = {tx.hash for tx in manager1.tx_storage.iter_mempool_tips_from_best_index()} tips2 = {tx.hash for tx in manager2.tx_storage.iter_mempool_tips_from_best_index()} - self.log.debug('tx tips1', len=len(tips1), list=shorten_hash(tips1)) - self.log.debug('tx tips2', len=len(tips2), list=shorten_hash(tips2)) + self.log.debug('tx tips1', len=len(tips1), list=short_hashes(tips1)) + self.log.debug('tx tips2', len=len(tips2), list=short_hashes(tips2)) self.assertEqual(tips1, tips2) # best block s1 = set(manager1.tx_storage.get_best_block_tips()) s2 = set(manager2.tx_storage.get_best_block_tips()) - self.log.debug('block tips1', len=len(s1), list=shorten_hash(s1)) - self.log.debug('block tips2', len=len(s2), list=shorten_hash(s2)) + self.log.debug('block tips1', len=len(s1), list=short_hashes(s1)) + self.log.debug('block tips2', len=len(s2), list=short_hashes(s2)) self.assertEqual(s1, s2) # best block (from height index) - b1 = manager1.tx_storage.indexes.height.get_tip() - b2 = manager2.tx_storage.indexes.height.get_tip() + b1 = not_none(manager1.tx_storage.indexes).height.get_tip() + b2 = not_none(manager2.tx_storage.indexes).height.get_tip() self.assertIn(b1, s2) self.assertIn(b2, s1) - def assertConsensusEqual(self, manager1, manager2): + def assertConsensusEqual(self, manager1: HathorManager, manager2: HathorManager) -> None: _, enable_sync_v2 = self._syncVersionFlags() if enable_sync_v2: self.assertConsensusEqualSyncV2(manager1, manager2) else: self.assertConsensusEqualSyncV1(manager1, manager2) - def assertConsensusEqualSyncV1(self, manager1, manager2): + def assertConsensusEqualSyncV1(self, manager1: HathorManager, manager2: HathorManager) -> None: self.assertEqual(manager1.tx_storage.get_vertices_count(), manager2.tx_storage.get_vertices_count()) for tx1 in manager1.tx_storage.get_all_transactions(): tx2 = manager2.tx_storage.get_transaction(tx1.hash) @@ -358,12 +395,20 @@ def assertConsensusEqualSyncV1(self, manager1, manager2): self.assertIsNone(tx2_meta.voided_by) else: # If tx1 is voided, then tx2 must be voided. + assert tx1_meta.voided_by is not None + assert tx2_meta.voided_by is not None self.assertGreaterEqual(len(tx1_meta.voided_by), 1) self.assertGreaterEqual(len(tx2_meta.voided_by), 1) # Hard verification # self.assertEqual(tx1_meta.voided_by, tx2_meta.voided_by) - def assertConsensusEqualSyncV2(self, manager1, manager2, *, strict_sync_v2_indexes=True): + def assertConsensusEqualSyncV2( + self, + manager1: HathorManager, + manager2: HathorManager, + *, + strict_sync_v2_indexes: bool = True + ) -> None: # The current sync algorithm does not propagate voided blocks/txs # so the count might be different even though the consensus is equal # One peer might have voided txs that the other does not have @@ -374,13 +419,14 @@ def assertConsensusEqualSyncV2(self, manager1, manager2, *, strict_sync_v2_index # the following is specific to sync-v2 # helper function: - def get_all_executed_or_voided(tx_storage): + def get_all_executed_or_voided( + tx_storage: TransactionStorage + ) -> tuple[set[VertexId], set[VertexId], set[VertexId]]: """Get all txs separated into three sets: executed, voided, partial""" tx_executed = set() tx_voided = set() tx_partial = set() for tx in tx_storage.get_all_transactions(): - assert tx.hash is not None tx_meta = tx.get_metadata() if not tx_meta.validation.is_fully_connected(): tx_partial.add(tx.hash) @@ -401,14 +447,16 @@ def get_all_executed_or_voided(tx_storage): self.log.debug('node1 rest', len_voided=len(tx_voided1), len_partial=len(tx_partial1)) self.log.debug('node2 rest', len_voided=len(tx_voided2), len_partial=len(tx_partial2)) - def assertConsensusValid(self, manager): + def assertConsensusValid(self, manager: HathorManager) -> None: for tx in manager.tx_storage.get_all_transactions(): if tx.is_block: + assert isinstance(tx, Block) self.assertBlockConsensusValid(tx) else: + assert isinstance(tx, Transaction) self.assertTransactionConsensusValid(tx) - def assertBlockConsensusValid(self, block): + def assertBlockConsensusValid(self, block: Block) -> None: self.assertTrue(block.is_block) if not block.parents: # Genesis @@ -419,7 +467,8 @@ def assertBlockConsensusValid(self, block): parent_meta = parent.get_metadata() self.assertIsNone(parent_meta.voided_by) - def assertTransactionConsensusValid(self, tx): + def assertTransactionConsensusValid(self, tx: Transaction) -> None: + assert tx.storage is not None self.assertFalse(tx.is_block) meta = tx.get_metadata() if meta.voided_by and tx.hash in meta.voided_by: @@ -439,7 +488,7 @@ def assertTransactionConsensusValid(self, tx): spent_meta = spent_tx.get_metadata() if spent_meta.voided_by is not None: - self.assertIsNotNone(meta.voided_by) + assert meta.voided_by is not None self.assertTrue(spent_meta.voided_by) self.assertTrue(meta.voided_by) self.assertTrue(spent_meta.voided_by.issubset(meta.voided_by)) @@ -447,30 +496,32 @@ def assertTransactionConsensusValid(self, tx): for parent in tx.get_parents(): parent_meta = parent.get_metadata() if parent_meta.voided_by is not None: - self.assertIsNotNone(meta.voided_by) + assert meta.voided_by is not None self.assertTrue(parent_meta.voided_by) self.assertTrue(meta.voided_by) self.assertTrue(parent_meta.voided_by.issubset(meta.voided_by)) - def assertSyncedProgress(self, node_sync): + def assertSyncedProgress(self, node_sync: NodeSyncTimestamp | NodeBlockSync) -> None: """Check "synced" status of p2p-manager, uses self._enable_sync_vX to choose which check to run.""" enable_sync_v1, enable_sync_v2 = self._syncVersionFlags() if enable_sync_v2: + assert isinstance(node_sync, NodeBlockSync) self.assertV2SyncedProgress(node_sync) elif enable_sync_v1: + assert isinstance(node_sync, NodeSyncTimestamp) self.assertV1SyncedProgress(node_sync) - def assertV1SyncedProgress(self, node_sync): + def assertV1SyncedProgress(self, node_sync: NodeSyncTimestamp) -> None: self.assertEqual(node_sync.synced_timestamp, node_sync.peer_timestamp) - def assertV2SyncedProgress(self, node_sync): + def assertV2SyncedProgress(self, node_sync: NodeBlockSync) -> None: self.assertEqual(node_sync.synced_block, node_sync.peer_best_block) - def clean_tmpdirs(self): + def clean_tmpdirs(self) -> None: for tmpdir in self.tmpdirs: shutil.rmtree(tmpdir) - def clean_pending(self, required_to_quiesce=True): + def clean_pending(self, required_to_quiesce: bool = True) -> None: """ This handy method cleans all pending tasks from the reactor. diff --git a/tests/utils.py b/tests/utils.py index cdcbd7bb2..c72682c4d 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -5,9 +5,10 @@ import time import urllib.parse from dataclasses import dataclass -from typing import Optional, cast +from typing import Any, Optional import requests +from cryptography.hazmat.primitives.asymmetric import ec from hathorlib.scripts import DataScript from twisted.internet.task import Clock @@ -19,11 +20,11 @@ from hathor.manager import HathorManager from hathor.mining.cpu_mining_service import CpuMiningService from hathor.simulator.utils import add_new_block, add_new_blocks, gen_new_double_spending, gen_new_tx -from hathor.transaction import BaseTransaction, Transaction, TxInput, TxOutput +from hathor.transaction import BaseTransaction, Block, Transaction, TxInput, TxOutput from hathor.transaction.scripts import P2PKH, HathorScript, Opcode, parse_address_script from hathor.transaction.token_creation_tx import TokenCreationTransaction from hathor.transaction.util import get_deposit_amount -from hathor.util import Random, not_none +from hathor.util import Random try: import rocksdb # noqa: F401 @@ -72,7 +73,6 @@ def gen_custom_tx(manager: HathorManager, tx_inputs: list[tuple[BaseTransaction, value = 0 parents = [] for tx_base, txout_index in tx_inputs: - assert tx_base.hash is not None spent_tx = tx_base spent_txout = spent_tx.outputs[txout_index] p2pkh = parse_address_script(spent_txout.script) @@ -80,7 +80,6 @@ def gen_custom_tx(manager: HathorManager, tx_inputs: list[tuple[BaseTransaction, from hathor.wallet.base_wallet import WalletInputInfo, WalletOutputInfo value += spent_txout.value - assert spent_tx.hash is not None private_key = wallet.get_private_key(p2pkh.address) inputs.append(WalletInputInfo(tx_id=spent_tx.hash, index=txout_index, private_key=private_key)) if not tx_base.is_block: @@ -109,7 +108,6 @@ def gen_custom_tx(manager: HathorManager, tx_inputs: list[tuple[BaseTransaction, tx2.parents = parents[:2] if len(tx2.parents) < 2: if base_parent: - assert base_parent.hash is not None tx2.parents.append(base_parent.hash) elif not tx_base.is_block: tx2.parents.append(tx_base.parents[0]) @@ -134,7 +132,13 @@ def add_new_double_spending(manager: HathorManager, *, use_same_parents: bool = return tx -def add_new_tx(manager, address, value, advance_clock=None, propagate=True): +def add_new_tx( + manager: HathorManager, + address: str, + value: int, + advance_clock: int | None = None, + propagate: bool = True +) -> Transaction: """ Create, resolve and propagate a new tx :param manager: Manager object to handle the creation @@ -153,11 +157,16 @@ def add_new_tx(manager, address, value, advance_clock=None, propagate=True): if propagate: manager.propagate_tx(tx, fails_silently=False) if advance_clock: - manager.reactor.advance(advance_clock) + manager.reactor.advance(advance_clock) # type: ignore[attr-defined] return tx -def add_new_transactions(manager, num_txs, advance_clock=None, propagate=True): +def add_new_transactions( + manager: HathorManager, + num_txs: int, + advance_clock: int | None = None, + propagate: bool = True +) -> list[Transaction]: """ Create, resolve and propagate some transactions :param manager: Manager object to handle the creation @@ -178,7 +187,7 @@ def add_new_transactions(manager, num_txs, advance_clock=None, propagate=True): return txs -def add_blocks_unlock_reward(manager): +def add_blocks_unlock_reward(manager: HathorManager) -> list[Block]: """This method adds new blocks to a 'burn address' to make sure the existing block rewards can be spent. It uses a 'burn address' so the manager's wallet is not impacted. @@ -186,7 +195,14 @@ def add_blocks_unlock_reward(manager): return add_new_blocks(manager, settings.REWARD_SPEND_MIN_BLOCKS, advance_clock=1, address=BURN_ADDRESS) -def run_server(hostname='localhost', listen=8005, status=8085, bootstrap=None, tries=100, alive_for_at_least_sec=3): +def run_server( + hostname: str = 'localhost', + listen: int = 8005, + status: int = 8085, + bootstrap: str | None = None, + tries: int = 100, + alive_for_at_least_sec: int = 3 +) -> subprocess.Popen[bytes]: """ Starts a full node in a subprocess running the cli command :param hostname: Hostname used to be accessed by other peers @@ -249,7 +265,14 @@ def run_server(hostname='localhost', listen=8005, status=8085, bootstrap=None, t return process -def request_server(path, method, host='http://localhost', port=8085, data=None, prefix=settings.API_VERSION_PREFIX): +def request_server( + path: str, + method: str, + host: str = 'http://localhost', + port: int = 8085, + data: dict[str, Any] | None = None, + prefix: str = settings.API_VERSION_PREFIX +) -> dict[str, Any]: """ Execute a request for status server :param path: Url path of the request @@ -280,11 +303,18 @@ def request_server(path, method, host='http://localhost', port=8085, data=None, response = requests.put(url, json=data) else: raise ValueError('Unsuported method') - return response.json() - - -def execute_mining(path='mining', *, count, host='http://localhost', port=8085, data=None, - prefix=settings.API_VERSION_PREFIX): + json_response: dict[str, Any] = response.json() + return json_response + + +def execute_mining( + path: str = 'mining', + *, + count: int, + host: str = 'http://localhost', + port: int = 8085, + prefix: str = settings.API_VERSION_PREFIX +) -> None: """Execute a mining on a given server""" from hathor.cli.mining import create_parser, execute partial_url = '{}:{}/{}/'.format(host, port, prefix) @@ -294,8 +324,16 @@ def execute_mining(path='mining', *, count, host='http://localhost', port=8085, execute(args) -def execute_tx_gen(*, count, address=None, value=None, timestamp=None, host='http://localhost', port=8085, data=None, - prefix=settings.API_VERSION_PREFIX): +def execute_tx_gen( + *, + count: int, + address: str | None = None, + value: int | None = None, + timestamp: str | None = None, + host: str = 'http://localhost', + port: int = 8085, + prefix: str = settings.API_VERSION_PREFIX +) -> None: """Execute a tx generator on a given server""" from hathor.cli.tx_generator import create_parser, execute url = '{}:{}/{}/'.format(host, port, prefix) @@ -311,7 +349,7 @@ def execute_tx_gen(*, count, address=None, value=None, timestamp=None, host='htt execute(args) -def get_genesis_key(): +def get_genesis_key() -> ec.EllipticCurvePrivateKeyWithSerialization: private_key_bytes = base64.b64decode( 'MIGEAgEAMBAGByqGSM49AgEGBSuBBAAKBG0wawIBAQQgOCgCddzDZsfKgiMJLOt97eov9RLwHeePyBIK2WPF8MChRA' 'NCAAQ/XSOK+qniIY0F3X+lDrb55VQx5jWeBLhhzZnH6IzGVTtlAj9Ki73DVBm5+VXK400Idd6ddzS7FahBYYC7IaTl' @@ -378,14 +416,14 @@ def create_tokens(manager: 'HathorManager', address_b58: Optional[str] = None, m assert genesis_hash is not None deposit_input = [TxInput(genesis_hash, 0, b'')] change_output = TxOutput(genesis_block.outputs[0].value - deposit_amount, script, 0) - parents = [cast(bytes, tx.hash) for tx in genesis_txs] + parents = [tx.hash for tx in genesis_txs] timestamp = int(manager.reactor.seconds()) else: total_reward = 0 deposit_input = [] while total_reward < deposit_amount: block = add_new_block(manager, advance_clock=1, address=address) - deposit_input.append(TxInput(not_none(block.hash), 0, b'')) + deposit_input.append(TxInput(block.hash, 0, b'')) total_reward += block.outputs[0].value if total_reward > deposit_amount: @@ -434,7 +472,6 @@ def create_tokens(manager: 'HathorManager', address_b58: Optional[str] = None, m manager.cpu_mining_service.resolve(tx) if propagate: - manager.verification_service.verify(tx) manager.propagate_tx(tx, fails_silently=False) assert isinstance(manager.reactor, Clock) manager.reactor.advance(8) @@ -475,7 +512,7 @@ def add_tx_with_data_script(manager: 'HathorManager', data: list[str], propagate burn_input = [] while total_reward < burn_amount: block = add_new_block(manager, advance_clock=1, address=address) - burn_input.append(TxInput(not_none(block.hash), 0, b'')) + burn_input.append(TxInput(block.hash, 0, b'')) total_reward += block.outputs[0].value # Create the change output, if needed @@ -539,6 +576,7 @@ class EventMocker: hash='abc', nonce=123, timestamp=456, + signal_bits=0, version=1, weight=10, inputs=[], diff --git a/tests/wallet/test_balance_update.py b/tests/wallet/test_balance_update.py index 01bc0e337..c4e5981e0 100644 --- a/tests/wallet/test_balance_update.py +++ b/tests/wallet/test_balance_update.py @@ -426,7 +426,6 @@ def test_tokens_balance(self): ) tx2.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) self.manager.cpu_mining_service.resolve(tx2) - self.manager.verification_service.verify(tx2) self.manager.propagate_tx(tx2) self.run_to_completion() # verify balance diff --git a/tests/wallet/test_wallet.py b/tests/wallet/test_wallet.py index f962b731f..ab87d299e 100644 --- a/tests/wallet/test_wallet.py +++ b/tests/wallet/test_wallet.py @@ -206,6 +206,7 @@ def test_create_token_transaction(self): tx2.timestamp = tx.timestamp + 1 tx2.parents = self.manager.get_new_tx_parents() self.manager.cpu_mining_service.resolve(tx2) + tx2.update_reward_lock_metadata() self.manager.verification_service.verify(tx2) self.assertNotEqual(len(tx2.inputs), 0)