From 1a1d5c86c7a4ad34dfb773709a2d254714fd51d3 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Thu, 14 Mar 2024 13:58:46 -0300 Subject: [PATCH] refactor(mypy): add stricter rules to p2p tests --- hathor/builder/builder.py | 7 +- hathor/manager.py | 4 +- pyproject.toml | 2 +- tests/p2p/netfilter/test_factory.py | 6 +- tests/p2p/netfilter/test_match.py | 50 +++++++------- tests/p2p/netfilter/test_match_remote.py | 2 +- tests/p2p/netfilter/test_tables.py | 6 +- tests/p2p/netfilter/test_utils.py | 2 +- tests/p2p/test_capabilities.py | 13 +++- tests/p2p/test_connections.py | 4 +- tests/p2p/test_double_spending.py | 21 +++--- tests/p2p/test_get_best_blockchain.py | 63 ++++++++---------- tests/p2p/test_peer_id.py | 69 ++++++++++--------- tests/p2p/test_protocol.py | 84 +++++++++++------------- tests/p2p/test_rate_limiter.py | 7 +- tests/p2p/test_split_brain.py | 33 ++++------ tests/p2p/test_split_brain2.py | 2 +- tests/p2p/test_sync.py | 53 +++++++-------- tests/p2p/test_sync_bridge.py | 4 +- tests/p2p/test_sync_enabled.py | 4 +- tests/p2p/test_sync_mempool.py | 18 ++--- tests/p2p/test_sync_rate_limiter.py | 81 +++++++++++++---------- tests/p2p/test_sync_v2.py | 59 ++++++++++------- tests/p2p/test_twin_tx.py | 7 +- tests/p2p/test_whitelist.py | 6 +- tests/unittest.py | 54 ++++++++------- 26 files changed, 348 insertions(+), 313 deletions(-) diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py index 7636aa2b3..422373b2d 100644 --- a/hathor/builder/builder.py +++ b/hathor/builder/builder.py @@ -46,6 +46,7 @@ TransactionRocksDBStorage, TransactionStorage, ) +from hathor.transaction.storage.transaction_storage import BaseTransactionStorage from hathor.util import Random, get_environment_info, not_none from hathor.verification.verification_service import VerificationService from hathor.verification.vertex_verifiers import VertexVerifiers @@ -131,7 +132,7 @@ def __init__(self) -> None: self._tx_storage_cache_capacity: Optional[int] = None self._indexes_manager: Optional[IndexesManager] = None - self._tx_storage: Optional[TransactionStorage] = None + self._tx_storage: Optional[BaseTransactionStorage] = None self._event_storage: Optional[EventStorage] = None self._reactor: Optional[Reactor] = None @@ -393,7 +394,7 @@ def _get_or_create_indexes_manager(self) -> IndexesManager: return self._indexes_manager - def _get_or_create_tx_storage(self) -> TransactionStorage: + def _get_or_create_tx_storage(self) -> BaseTransactionStorage: indexes = self._get_or_create_indexes_manager() if self._tx_storage is not None: @@ -616,7 +617,7 @@ def enable_event_queue(self) -> 'Builder': self._enable_event_queue = True return self - def set_tx_storage(self, tx_storage: TransactionStorage) -> 'Builder': + def set_tx_storage(self, tx_storage: BaseTransactionStorage) -> 'Builder': self.check_if_can_modify() self._tx_storage = tx_storage return self diff --git a/hathor/manager.py b/hathor/manager.py index da1bdb42a..ec92cbf6b 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -57,8 +57,8 @@ from hathor.stratum import StratumFactory from hathor.transaction import BaseTransaction, Block, MergeMinedBlock, Transaction, TxVersion, sum_weights from hathor.transaction.exceptions import TxValidationError -from hathor.transaction.storage import TransactionStorage from hathor.transaction.storage.exceptions import TransactionDoesNotExist +from hathor.transaction.storage.transaction_storage import BaseTransactionStorage from hathor.transaction.storage.tx_allow_scope import TxAllowScope from hathor.types import Address, VertexId from hathor.util import EnvironmentInfo, LogDuration, Random, calculate_min_significant_weight, not_none @@ -97,7 +97,7 @@ def __init__(self, consensus_algorithm: ConsensusAlgorithm, daa: DifficultyAdjustmentAlgorithm, peer_id: PeerId, - tx_storage: TransactionStorage, + tx_storage: BaseTransactionStorage, p2p_manager: ConnectionsManager, event_manager: EventManager, feature_service: FeatureService, diff --git a/pyproject.toml b/pyproject.toml index df829fd9f..371317680 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -162,7 +162,7 @@ module = [ "tests.event.*", "tests.execution_manager.*", "tests.feature_activation.*", -# "tests.p2p.*", + "tests.p2p.*", "tests.pubsub.*", "tests.simulation.*", ] diff --git a/tests/p2p/netfilter/test_factory.py b/tests/p2p/netfilter/test_factory.py index 2dc4d5cde..53ca409c8 100644 --- a/tests/p2p/netfilter/test_factory.py +++ b/tests/p2p/netfilter/test_factory.py @@ -1,3 +1,5 @@ +from unittest.mock import Mock + from twisted.internet.address import IPv4Address from hathor.p2p.netfilter import get_table @@ -10,7 +12,7 @@ class NetfilterFactoryTest(unittest.TestCase): - def test_factory(self): + def test_factory(self) -> None: pre_conn = get_table('filter').get_chain('pre_conn') match = NetfilterMatchIPAddress('192.168.0.1/32') @@ -20,7 +22,7 @@ def test_factory(self): builder = TestBuilder() artifacts = builder.build() wrapped_factory = artifacts.p2p_manager.server_factory - factory = NetfilterFactory(connections=None, wrappedFactory=wrapped_factory) + factory = NetfilterFactory(connections=Mock(), wrappedFactory=wrapped_factory) ret = factory.buildProtocol(IPv4Address('TCP', '192.168.0.1', 1234)) self.assertIsNone(ret) diff --git a/tests/p2p/netfilter/test_match.py b/tests/p2p/netfilter/test_match.py index ce59c28dd..39bb844fe 100644 --- a/tests/p2p/netfilter/test_match.py +++ b/tests/p2p/netfilter/test_match.py @@ -22,7 +22,7 @@ def match(self, context: 'NetfilterContext') -> bool: class NetfilterMatchTest(unittest.TestCase): - def test_match_all(self): + def test_match_all(self) -> None: matcher = NetfilterMatchAll() context = NetfilterContext() self.assertTrue(matcher.match(context)) @@ -31,7 +31,7 @@ def test_match_all(self): json = matcher.to_json() self.assertEqual(json['type'], 'NetfilterMatchAll') - def test_never_match(self): + def test_never_match(self) -> None: matcher = NetfilterNeverMatch() context = NetfilterContext() self.assertFalse(matcher.match(context)) @@ -40,14 +40,14 @@ def test_never_match(self): json = matcher.to_json() self.assertEqual(json['type'], 'NetfilterNeverMatch') - def test_match_and_success(self): + def test_match_and_success(self) -> None: m1 = NetfilterMatchAll() m2 = NetfilterMatchAll() matcher = NetfilterMatchAnd(m1, m2) context = NetfilterContext() self.assertTrue(matcher.match(context)) - def test_match_and_fail_01(self): + def test_match_and_fail_01(self) -> None: m1 = NetfilterNeverMatch() m2 = NetfilterMatchAll() matcher = NetfilterMatchAnd(m1, m2) @@ -60,28 +60,28 @@ def test_match_and_fail_01(self): self.assertEqual(json['match_params']['a']['type'], 'NetfilterNeverMatch') self.assertEqual(json['match_params']['b']['type'], 'NetfilterMatchAll') - def test_match_and_fail_10(self): + def test_match_and_fail_10(self) -> None: m1 = NetfilterMatchAll() m2 = NetfilterNeverMatch() matcher = NetfilterMatchAnd(m1, m2) context = NetfilterContext() self.assertFalse(matcher.match(context)) - def test_match_and_fail_00(self): + def test_match_and_fail_00(self) -> None: m1 = NetfilterNeverMatch() m2 = NetfilterNeverMatch() matcher = NetfilterMatchAnd(m1, m2) context = NetfilterContext() self.assertFalse(matcher.match(context)) - def test_match_or_success_11(self): + def test_match_or_success_11(self) -> None: m1 = NetfilterMatchAll() m2 = NetfilterMatchAll() matcher = NetfilterMatchOr(m1, m2) context = NetfilterContext() self.assertTrue(matcher.match(context)) - def test_match_or_success_10(self): + def test_match_or_success_10(self) -> None: m1 = NetfilterMatchAll() m2 = NetfilterNeverMatch() matcher = NetfilterMatchOr(m1, m2) @@ -94,21 +94,21 @@ def test_match_or_success_10(self): self.assertEqual(json['match_params']['a']['type'], 'NetfilterMatchAll') self.assertEqual(json['match_params']['b']['type'], 'NetfilterNeverMatch') - def test_match_or_success_01(self): + def test_match_or_success_01(self) -> None: m1 = NetfilterNeverMatch() m2 = NetfilterMatchAll() matcher = NetfilterMatchOr(m1, m2) context = NetfilterContext() self.assertTrue(matcher.match(context)) - def test_match_or_fail_00(self): + def test_match_or_fail_00(self) -> None: m1 = NetfilterNeverMatch() m2 = NetfilterNeverMatch() matcher = NetfilterMatchOr(m1, m2) context = NetfilterContext() self.assertFalse(matcher.match(context)) - def test_match_ip_address_empty_context(self): + def test_match_ip_address_empty_context(self) -> None: matcher = NetfilterMatchIPAddress('192.168.0.0/24') context = NetfilterContext() self.assertFalse(matcher.match(context)) @@ -118,7 +118,7 @@ def test_match_ip_address_empty_context(self): self.assertEqual(json['type'], 'NetfilterMatchIPAddress') self.assertEqual(json['match_params']['host'], '192.168.0.0/24') - def test_match_ip_address_ipv4_net(self): + def test_match_ip_address_ipv4_net(self) -> None: matcher = NetfilterMatchIPAddress('192.168.0.0/24') context = NetfilterContext(addr=IPv4Address('TCP', '192.168.0.10', 1234)) self.assertTrue(matcher.match(context)) @@ -129,7 +129,7 @@ def test_match_ip_address_ipv4_net(self): context = NetfilterContext(addr=IPv4Address('TCP', '', 1234)) self.assertFalse(matcher.match(context)) - def test_match_ip_address_ipv4_ip(self): + def test_match_ip_address_ipv4_ip(self) -> None: matcher = NetfilterMatchIPAddress('192.168.0.1/32') context = NetfilterContext(addr=IPv4Address('TCP', '192.168.0.1', 1234)) self.assertTrue(matcher.match(context)) @@ -138,24 +138,24 @@ def test_match_ip_address_ipv4_ip(self): context = NetfilterContext(addr=IPv4Address('TCP', '', 1234)) self.assertFalse(matcher.match(context)) - def test_match_ip_address_ipv4_hostname(self): + def test_match_ip_address_ipv4_hostname(self) -> None: matcher = NetfilterMatchIPAddress('192.168.0.1/32') - context = NetfilterContext(addr=HostnameAddress('hathor.network', 80)) + context = NetfilterContext(addr=HostnameAddress(b'hathor.network', 80)) self.assertFalse(matcher.match(context)) - def test_match_ip_address_ipv4_unix(self): + def test_match_ip_address_ipv4_unix(self) -> None: matcher = NetfilterMatchIPAddress('192.168.0.1/32') context = NetfilterContext(addr=UNIXAddress('/unix.sock')) self.assertFalse(matcher.match(context)) - def test_match_ip_address_ipv4_ipv6(self): + def test_match_ip_address_ipv4_ipv6(self) -> None: matcher = NetfilterMatchIPAddress('192.168.0.1/32') context = NetfilterContext(addr=IPv6Address('TCP', '2001:db8::', 80)) self.assertFalse(matcher.match(context)) context = NetfilterContext(addr=IPv6Address('TCP', '', 80)) self.assertFalse(matcher.match(context)) - def test_match_ip_address_ipv6_net(self): + def test_match_ip_address_ipv6_net(self) -> None: matcher = NetfilterMatchIPAddress('2001:0db8:0:f101::/64') context = NetfilterContext(addr=IPv6Address('TCP', '2001:db8::8a2e:370:7334', 1234)) self.assertFalse(matcher.match(context)) @@ -167,7 +167,7 @@ def test_match_ip_address_ipv6_net(self): self.assertEqual(json['type'], 'NetfilterMatchIPAddress') self.assertEqual(json['match_params']['host'], str(ip_network('2001:0db8:0:f101::/64'))) - def test_match_ip_address_ipv6_ip(self): + def test_match_ip_address_ipv6_ip(self) -> None: matcher = NetfilterMatchIPAddress('2001:0db8:0:f101::1/128') context = NetfilterContext(addr=IPv6Address('TCP', '2001:db8:0:f101::1', 1234)) self.assertTrue(matcher.match(context)) @@ -176,22 +176,22 @@ def test_match_ip_address_ipv6_ip(self): context = NetfilterContext(addr=IPv6Address('TCP', '2001:db8:0:f101:2::7334', 1234)) self.assertFalse(matcher.match(context)) - def test_match_ip_address_ipv6_hostname(self): + def test_match_ip_address_ipv6_hostname(self) -> None: matcher = NetfilterMatchIPAddress('2001:0db8:0:f101::1/128') - context = NetfilterContext(addr=HostnameAddress('hathor.network', 80)) + context = NetfilterContext(addr=HostnameAddress(b'hathor.network', 80)) self.assertFalse(matcher.match(context)) - def test_match_ip_address_ipv6_unix(self): + def test_match_ip_address_ipv6_unix(self) -> None: matcher = NetfilterMatchIPAddress('2001:0db8:0:f101::1/128') context = NetfilterContext(addr=UNIXAddress('/unix.sock')) self.assertFalse(matcher.match(context)) - def test_match_ip_address_ipv6_ipv4(self): + def test_match_ip_address_ipv6_ipv4(self) -> None: matcher = NetfilterMatchIPAddress('2001:0db8:0:f101::1/128') context = NetfilterContext(addr=IPv4Address('TCP', '192.168.0.1', 1234)) self.assertFalse(matcher.match(context)) - def test_match_peer_id_empty_context(self): + def test_match_peer_id_empty_context(self) -> None: matcher = NetfilterMatchPeerId('123') context = NetfilterContext() self.assertFalse(matcher.match(context)) @@ -200,7 +200,7 @@ def test_match_peer_id_empty_context(self): class BaseNetfilterMatchTest(unittest.TestCase): __test__ = False - def test_match_peer_id(self): + def test_match_peer_id(self) -> None: network = 'testnet' peer_id1 = PeerId() peer_id2 = PeerId() diff --git a/tests/p2p/netfilter/test_match_remote.py b/tests/p2p/netfilter/test_match_remote.py index 89df4acf7..1947f39be 100644 --- a/tests/p2p/netfilter/test_match_remote.py +++ b/tests/p2p/netfilter/test_match_remote.py @@ -6,7 +6,7 @@ class NetfilterMatchRemoteTest(unittest.TestCase): - def test_match_ip(self): + def test_match_ip(self) -> None: matcher = NetfilterMatchIPAddressRemoteURL('test', self.clock, 'http://localhost:8080') context = NetfilterContext(addr=IPv4Address('TCP', '192.168.0.1', 1234)) self.assertFalse(matcher.match(context)) diff --git a/tests/p2p/netfilter/test_tables.py b/tests/p2p/netfilter/test_tables.py index 6c845ec5e..a3505aa01 100644 --- a/tests/p2p/netfilter/test_tables.py +++ b/tests/p2p/netfilter/test_tables.py @@ -6,17 +6,17 @@ class NetfilterTableTest(unittest.TestCase): - def test_default_table_filter(self): + def test_default_table_filter(self) -> None: tb_filter = get_table('filter') tb_filter.get_chain('pre_conn') tb_filter.get_chain('post_hello') tb_filter.get_chain('post_peerid') - def test_default_table_not_exists(self): + def test_default_table_not_exists(self) -> None: with self.assertRaises(KeyError): get_table('do-not-exists') - def test_add_get_chain(self): + def test_add_get_chain(self) -> None: mytable = NetfilterTable('mytable') mychain = NetfilterChain('mychain', NetfilterAccept()) mytable.add_chain(mychain) diff --git a/tests/p2p/netfilter/test_utils.py b/tests/p2p/netfilter/test_utils.py index cde078af0..127cf9b0d 100644 --- a/tests/p2p/netfilter/test_utils.py +++ b/tests/p2p/netfilter/test_utils.py @@ -4,7 +4,7 @@ class NetfilterUtilsTest(unittest.TestCase): - def test_peer_id_blacklist(self): + def test_peer_id_blacklist(self) -> None: post_peerid = get_table('filter').get_chain('post_peerid') # Chain starts empty diff --git a/tests/p2p/test_capabilities.py b/tests/p2p/test_capabilities.py index 0380abaf4..022fb1fc6 100644 --- a/tests/p2p/test_capabilities.py +++ b/tests/p2p/test_capabilities.py @@ -1,3 +1,4 @@ +from hathor.p2p.states import ReadyState from hathor.p2p.sync_v1.agent import NodeSyncTimestamp from hathor.p2p.sync_v2.agent import NodeBlockSync from hathor.simulator import FakeConnection @@ -5,7 +6,7 @@ class SyncV1HathorCapabilitiesTestCase(unittest.SyncV1Params, unittest.TestCase): - def test_capabilities(self): + def test_capabilities(self) -> None: network = 'testnet' manager1 = self.create_peer(network, capabilities=[self._settings.CAPABILITY_WHITELIST]) manager2 = self.create_peer(network, capabilities=[]) @@ -18,6 +19,8 @@ def test_capabilities(self): self.clock.advance(0.1) # Even if we don't have the capability we must connect because the whitelist url conf is None + assert isinstance(conn._proto1.state, ReadyState) + assert isinstance(conn._proto2.state, ReadyState) self.assertEqual(conn._proto1.state.state_name, 'READY') self.assertEqual(conn._proto2.state.state_name, 'READY') self.assertIsInstance(conn._proto1.state.sync_agent, NodeSyncTimestamp) @@ -33,6 +36,8 @@ def test_capabilities(self): conn2.run_one_step(debug=True) self.clock.advance(0.1) + assert isinstance(conn2._proto1.state, ReadyState) + assert isinstance(conn2._proto2.state, ReadyState) self.assertEqual(conn2._proto1.state.state_name, 'READY') self.assertEqual(conn2._proto2.state.state_name, 'READY') self.assertIsInstance(conn2._proto1.state.sync_agent, NodeSyncTimestamp) @@ -40,7 +45,7 @@ def test_capabilities(self): class SyncV2HathorCapabilitiesTestCase(unittest.SyncV2Params, unittest.TestCase): - def test_capabilities(self): + def test_capabilities(self) -> None: network = 'testnet' manager1 = self.create_peer(network, capabilities=[self._settings.CAPABILITY_WHITELIST, self._settings.CAPABILITY_SYNC_VERSION]) @@ -54,6 +59,8 @@ def test_capabilities(self): self.clock.advance(0.1) # Even if we don't have the capability we must connect because the whitelist url conf is None + assert isinstance(conn._proto1.state, ReadyState) + assert isinstance(conn._proto2.state, ReadyState) self.assertEqual(conn._proto1.state.state_name, 'READY') self.assertEqual(conn._proto2.state.state_name, 'READY') self.assertIsInstance(conn._proto1.state.sync_agent, NodeBlockSync) @@ -71,6 +78,8 @@ def test_capabilities(self): conn2.run_one_step(debug=True) self.clock.advance(0.1) + assert isinstance(conn2._proto1.state, ReadyState) + assert isinstance(conn2._proto2.state, ReadyState) self.assertEqual(conn2._proto1.state.state_name, 'READY') self.assertEqual(conn2._proto2.state.state_name, 'READY') self.assertIsInstance(conn2._proto1.state.sync_agent, NodeBlockSync) diff --git a/tests/p2p/test_connections.py b/tests/p2p/test_connections.py index 03f56358f..c75abea7e 100644 --- a/tests/p2p/test_connections.py +++ b/tests/p2p/test_connections.py @@ -8,7 +8,7 @@ class ConnectionsTest(unittest.TestCase): @pytest.mark.skipif(sys.platform == 'win32', reason='run_server is very finicky on Windows') - def test_connections(self): + def test_connections(self) -> None: process = run_server() process2 = run_server(listen=8006, status=8086, bootstrap='tcp://127.0.0.1:8005') process3 = run_server(listen=8007, status=8087, bootstrap='tcp://127.0.0.1:8005') @@ -17,7 +17,7 @@ def test_connections(self): process2.terminate() process3.terminate() - def test_manager_connections(self): + def test_manager_connections(self) -> None: manager = self.create_peer('testnet', enable_sync_v1=True, enable_sync_v2=False) endpoint = 'tcp://127.0.0.1:8005' diff --git a/tests/p2p/test_double_spending.py b/tests/p2p/test_double_spending.py index 9eb408ee2..f3f908a68 100644 --- a/tests/p2p/test_double_spending.py +++ b/tests/p2p/test_double_spending.py @@ -1,5 +1,9 @@ +from unittest.mock import Mock + from hathor.crypto.util import decode_address +from hathor.manager import HathorManager from hathor.simulator.utils import add_new_blocks +from hathor.transaction import Transaction from tests import unittest from tests.utils import add_blocks_unlock_reward, add_new_tx @@ -7,7 +11,7 @@ class BaseHathorSyncMethodsTestCase(unittest.TestCase): __test__ = False - def setUp(self): + def setUp(self) -> None: super().setUp() self.network = 'testnet' @@ -16,7 +20,7 @@ def setUp(self): self.genesis = self.manager1.tx_storage.get_all_genesis() self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] - def _add_new_transactions(self, manager, num_txs): + def _add_new_transactions(self, manager: HathorManager, num_txs: int) -> list[Transaction]: txs = [] for _ in range(num_txs): address = self.get_address(0) @@ -25,7 +29,7 @@ def _add_new_transactions(self, manager, num_txs): txs.append(tx) return txs - def test_simple_double_spending(self): + def test_simple_double_spending(self) -> None: add_new_blocks(self.manager1, 5, advance_clock=15) add_blocks_unlock_reward(self.manager1) @@ -33,6 +37,7 @@ def test_simple_double_spending(self): from hathor.wallet.base_wallet import WalletOutputInfo address = self.get_address(0) + assert address is not None value = 500 outputs = [] @@ -125,7 +130,7 @@ def test_simple_double_spending(self): self.assertConsensusValid(self.manager1) - def test_double_spending_propagation(self): + def test_double_spending_propagation(self) -> None: blocks = add_new_blocks(self.manager1, 4, advance_clock=15) add_blocks_unlock_reward(self.manager1) @@ -165,7 +170,7 @@ def test_double_spending_propagation(self): outputs = [WalletOutputInfo(address=address, value=value, timelock=None), WalletOutputInfo(address=address, value=tx_total_value - 500, timelock=None)] self.clock.advance(1) - inputs = [WalletInputInfo(i.tx_id, i.index, b'') for i in tx1.inputs] + inputs = [WalletInputInfo(i.tx_id, i.index, Mock()) for i in tx1.inputs] tx4 = self.manager1.wallet.prepare_transaction_incomplete_inputs(Transaction, inputs, outputs, self.manager1.tx_storage) tx4.weight = 5 @@ -186,7 +191,7 @@ def test_double_spending_propagation(self): address = self.manager1.wallet.get_unused_address_bytes() value = 100 - inputs = [WalletInputInfo(tx_id=tx1.hash, index=1, private_key=None)] + inputs = [WalletInputInfo(tx_id=tx1.hash, index=1, private_key=Mock())] outputs = [WalletOutputInfo(address=address, value=int(value), timelock=None)] self.clock.advance(1) tx2 = self.manager1.wallet.prepare_transaction_incomplete_inputs(Transaction, inputs, outputs, @@ -236,7 +241,7 @@ def test_double_spending_propagation(self): address = self.manager1.wallet.get_unused_address_bytes() value = 500 - inputs = [WalletInputInfo(tx_id=tx4.hash, index=0, private_key=None)] + inputs = [WalletInputInfo(tx_id=tx4.hash, index=0, private_key=Mock())] outputs = [WalletOutputInfo(address=address, value=int(value), timelock=None)] self.clock.advance(1) tx5 = self.manager1.wallet.prepare_transaction_incomplete_inputs(Transaction, inputs, outputs, force=True, @@ -273,7 +278,7 @@ def test_double_spending_propagation(self): address = self.manager1.wallet.get_unused_address_bytes() value = blocks[3].outputs[0].value - inputs = [WalletInputInfo(tx_id=blocks[3].hash, index=0, private_key=None)] + inputs = [WalletInputInfo(tx_id=blocks[3].hash, index=0, private_key=Mock())] outputs = [WalletOutputInfo(address=address, value=value, timelock=None)] self.clock.advance(1) tx7 = self.manager1.wallet.prepare_transaction_incomplete_inputs(Transaction, inputs, outputs, diff --git a/tests/p2p/test_get_best_blockchain.py b/tests/p2p/test_get_best_blockchain.py index 4d00ea55b..5e91cf138 100644 --- a/tests/p2p/test_get_best_blockchain.py +++ b/tests/p2p/test_get_best_blockchain.py @@ -1,4 +1,4 @@ -from twisted.internet.defer import inlineCallbacks +from twisted.internet.protocol import Protocol from hathor.indexes.height_index import HeightInfo from hathor.p2p.messages import ProtocolMessages @@ -17,18 +17,15 @@ class BaseGetBestBlockchainTestCase(SimulatorTestCase): seed_config = 6 - def _send_cmd(self, proto, cmd, payload=None): + def _send_cmd(self, proto: Protocol, cmd: str, payload: str | None = None) -> None: if not payload: line = '{}\r\n'.format(cmd) else: line = '{} {}\r\n'.format(cmd, payload) - if isinstance(line, str): - line = line.encode('utf-8') + return proto.dataReceived(line.encode('utf-8')) - return proto.dataReceived(line) - - def test_get_best_blockchain(self): + def test_get_best_blockchain(self) -> None: manager1 = self.create_peer() manager2 = self.create_peer() conn12 = FakeConnection(manager1, manager2, latency=0.05) @@ -54,8 +51,8 @@ def test_get_best_blockchain(self): # assert the protocol is in ReadyState state1 = protocol1.state state2 = protocol2.state - self.assertIsInstance(state1, ReadyState) - self.assertIsInstance(state2, ReadyState) + assert isinstance(state1, ReadyState) + assert isinstance(state2, ReadyState) # assert ReadyState commands self.assertIn(ProtocolMessages.GET_BEST_BLOCKCHAIN, state1.cmd_map) @@ -81,10 +78,10 @@ def test_get_best_blockchain(self): self.assertEqual(self._settings.DEFAULT_BEST_BLOCKCHAIN_BLOCKS, len(state1.peer_best_blockchain)) self.assertEqual(self._settings.DEFAULT_BEST_BLOCKCHAIN_BLOCKS, len(state2.peer_best_blockchain)) - self.assertIsInstance(state1.peer_best_blockchain[0], HeightInfo) - self.assertIsInstance(state2.peer_best_blockchain[0], HeightInfo) + assert isinstance(state1.peer_best_blockchain[0], HeightInfo) + assert isinstance(state2.peer_best_blockchain[0], HeightInfo) - def test_handle_get_best_blockchain(self): + def test_handle_get_best_blockchain(self) -> None: manager1 = self.create_peer() manager2 = self.create_peer() conn12 = FakeConnection(manager1, manager2, latency=0.05) @@ -101,13 +98,13 @@ def test_handle_get_best_blockchain(self): self.assertEqual(1, len(connected_peers1)) protocol2 = connected_peers1[0] state2 = protocol2.state - self.assertIsInstance(state2, ReadyState) + assert isinstance(state2, ReadyState) connected_peers2 = list(manager2.connections.connected_peers.values()) self.assertEqual(1, len(connected_peers2)) protocol1 = connected_peers2[0] state1 = protocol1.state - self.assertIsInstance(state1, ReadyState) + assert isinstance(state1, ReadyState) # assert compliance with N blocks inside the boundaries state1.send_get_best_blockchain(n_blocks=1) @@ -141,7 +138,7 @@ def test_handle_get_best_blockchain(self): self.assertEqual(1, len(connected_peers2)) protocol1 = connected_peers2[0] state1 = protocol1.state - self.assertIsInstance(state1, ReadyState) + assert isinstance(state1, ReadyState) # assert param validation exception closes connection state1.handle_get_best_blockchain('invalid single value') @@ -149,7 +146,7 @@ def test_handle_get_best_blockchain(self): # state1 is managed by manager2 self.assertTrue(conn12.tr2.disconnecting) - def test_handle_best_blockchain(self): + def test_handle_best_blockchain(self) -> None: manager1 = self.create_peer() manager2 = self.create_peer() conn12 = FakeConnection(manager1, manager2, latency=0.05) @@ -160,19 +157,19 @@ def test_handle_best_blockchain(self): self.assertEqual(1, len(connected_peers1)) protocol2 = connected_peers1[0] state2 = protocol2.state - self.assertIsInstance(state2, ReadyState) + assert isinstance(state2, ReadyState) connected_peers2 = list(manager2.connections.connected_peers.values()) self.assertEqual(1, len(connected_peers2)) protocol1 = connected_peers2[0] state1 = protocol1.state - self.assertIsInstance(state1, ReadyState) + assert isinstance(state1, ReadyState) self.assertFalse(conn12.tr1.disconnecting) self.simulator.run(60) # assert a valid blockchain keeps connections open - fake_blockchain = [ + fake_blockchain: list[tuple[float, str]] = [ (1, '0000000000000002eccfbca9bc06c449c01f37afb3cb49c04ee62921d9bcf9dc'), (2, '00000000000000006c846e182462a2cc437070288a486dfa21aa64bb373b8507'), ] @@ -203,7 +200,7 @@ def test_handle_best_blockchain(self): self.simulator.run(60) self.assertTrue(conn12.tr2.disconnecting) - def test_node_without_get_best_blockchain_capability(self): + def test_node_without_get_best_blockchain_capability(self) -> None: manager1 = self.create_peer() manager2 = self.create_peer() @@ -232,10 +229,10 @@ def test_node_without_get_best_blockchain_capability(self): # assert the peers don't engage in get_best_blockchain messages state2 = protocol2.state - self.assertIsInstance(state2, ReadyState) + assert isinstance(state2, ReadyState) self.assertIsNone(state2.lc_get_best_blockchain) state1 = protocol1.state - self.assertIsInstance(state1, ReadyState) + assert isinstance(state1, ReadyState) self.assertIsNone(state1.lc_get_best_blockchain) # assert the connections remains open @@ -261,7 +258,7 @@ def test_node_without_get_best_blockchain_capability(self): self.simulator.run(60) self.assertTrue(conn12.tr2.disconnecting) - def test_best_blockchain_from_storage(self): + def test_best_blockchain_from_storage(self) -> None: manager1 = self.create_peer() manager2 = self.create_peer() conn12 = FakeConnection(manager1, manager2, latency=0.05) @@ -281,8 +278,7 @@ def test_best_blockchain_from_storage(self): self.assertTrue(block is memo_block) # cache miss if best block doesn't match - fake_block = HeightInfo(1, 'fake hash') - manager1._latest_n_height_tips = [fake_block] + fake_block = HeightInfo(1, b'fake hash') best_blockchain = manager1.tx_storage.get_n_height_tips(1) # there is only the genesis block block = best_blockchain[0] # the memoized best_blockchain is skiped @@ -309,7 +305,7 @@ def test_best_blockchain_from_storage(self): block = best_blockchain[0] self.assertTrue(block is memo_block) - def test_stop_looping_on_exit(self): + def test_stop_looping_on_exit(self) -> None: manager1 = self.create_peer() manager2 = self.create_peer() conn12 = FakeConnection(manager1, manager2, latency=0.05) @@ -320,18 +316,18 @@ def test_stop_looping_on_exit(self): self.assertEqual(1, len(connected_peers1)) protocol2 = connected_peers1[0] state2 = protocol2.state - self.assertIsInstance(state2, ReadyState) + assert isinstance(state2, ReadyState) connected_peers2 = list(manager2.connections.connected_peers.values()) self.assertEqual(1, len(connected_peers2)) protocol1 = connected_peers2[0] state1 = protocol1.state - self.assertIsInstance(state1, ReadyState) + assert isinstance(state1, ReadyState) - self.assertIsNotNone(state1.lc_get_best_blockchain) + assert state1.lc_get_best_blockchain is not None self.assertTrue(state1.lc_get_best_blockchain.running) - self.assertIsNotNone(state2.lc_get_best_blockchain) + assert state2.lc_get_best_blockchain is not None self.assertTrue(state2.lc_get_best_blockchain.running) state1.on_exit() @@ -343,8 +339,7 @@ def test_stop_looping_on_exit(self): self.assertIsNotNone(state2.lc_get_best_blockchain) self.assertFalse(state2.lc_get_best_blockchain.running) - @inlineCallbacks - def test_best_blockchain_from_status_resource(self): + async def test_best_blockchain_from_status_resource(self) -> None: manager1 = self.create_peer() manager2 = self.create_peer() conn12 = FakeConnection(manager1, manager2, latency=0.05) @@ -353,7 +348,7 @@ def test_best_blockchain_from_status_resource(self): # check /status before generate blocks self.web = StubSite(StatusResource(manager1)) - response = yield self.web.get("status") + response = await self.web.get("status") data = response.json_value() connections = data.get('connections') self.assertEqual(len(connections['connected_peers']), 1) @@ -385,7 +380,7 @@ def test_best_blockchain_from_status_resource(self): self.simulator.run(60) # check /status after mine blocks - response = yield self.web.get("status") + response = await self.web.get("status") data = response.json_value() connections = data.get('connections') self.assertEqual(len(connections['connected_peers']), 1) diff --git a/tests/p2p/test_peer_id.py b/tests/p2p/test_peer_id.py index c3e8be202..bccb9bcb2 100644 --- a/tests/p2p/test_peer_id.py +++ b/tests/p2p/test_peer_id.py @@ -1,37 +1,42 @@ import os import shutil import tempfile +from typing import cast +from unittest.mock import Mock + +from twisted.internet.interfaces import ITransport from hathor.p2p.peer_id import InvalidPeerIdException, PeerId from hathor.p2p.peer_storage import PeerStorage +from hathor.util import not_none from tests import unittest from tests.unittest import TestBuilder class PeerIdTest(unittest.TestCase): - def test_invalid_id(self): + def test_invalid_id(self) -> None: p1 = PeerId() - p1.id = p1.id[::-1] + p1.id = not_none(p1.id)[::-1] self.assertRaises(InvalidPeerIdException, p1.validate) - def test_invalid_public_key(self): + def test_invalid_public_key(self) -> None: p1 = PeerId() p2 = PeerId() p1.public_key = p2.public_key self.assertRaises(InvalidPeerIdException, p1.validate) - def test_invalid_private_key(self): + def test_invalid_private_key(self) -> None: p1 = PeerId() p2 = PeerId() p1.private_key = p2.private_key self.assertRaises(InvalidPeerIdException, p1.validate) - def test_no_private_key(self): + def test_no_private_key(self) -> None: p1 = PeerId() p1.private_key = None p1.validate() - def test_create_from_json(self): + def test_create_from_json(self) -> None: p1 = PeerId() data1 = p1.to_json(include_private_key=True) p2 = PeerId.create_from_json(data1) @@ -39,7 +44,7 @@ def test_create_from_json(self): self.assertEqual(data1, data2) p2.validate() - def test_create_from_json_without_private_key(self): + def test_create_from_json_without_private_key(self) -> None: p1 = PeerId() data1 = p1.to_json() # Just to test a part of the code @@ -51,20 +56,20 @@ def test_create_from_json_without_private_key(self): self.assertEqual(data1, data2) p2.validate() - def test_sign_verify(self): + def test_sign_verify(self) -> None: data = b'abacate' p1 = PeerId() signature = p1.sign(data) self.assertTrue(p1.verify_signature(signature, data)) - def test_sign_verify_fail(self): + def test_sign_verify_fail(self) -> None: data = b'abacate' p1 = PeerId() signature = p1.sign(data) signature = signature[::-1] self.assertFalse(p1.verify_signature(signature, data)) - def test_merge_peer(self): + def test_merge_peer(self) -> None: # Testing peer storage with merge of peers peer_storage = PeerStorage() @@ -72,14 +77,14 @@ def test_merge_peer(self): p2 = PeerId() p2.id = p1.id p2.public_key = p1.public_key - p1.public_key = '' + p1.public_key = None peer_storage.add_or_merge(p1) self.assertEqual(len(peer_storage), 1) peer_storage.add_or_merge(p2) - peer = peer_storage[p1.id] + peer = peer_storage[not_none(p1.id)] self.assertEqual(peer.id, p1.id) self.assertEqual(peer.private_key, p1.private_key) self.assertEqual(peer.public_key, p1.public_key) @@ -88,11 +93,11 @@ def test_merge_peer(self): p3 = PeerId() p3.entrypoints.append('1') p3.entrypoints.append('3') - p3.public_key = '' + p3.public_key = None p4 = PeerId() - p4.public_key = '' - p4.private_key = '' + p4.public_key = None + p4.private_key = None p4.id = p3.id p4.entrypoints.append('2') p4.entrypoints.append('3') @@ -103,7 +108,7 @@ def test_merge_peer(self): peer_storage.add_or_merge(p3) self.assertEqual(len(peer_storage), 2) - peer = peer_storage[p3.id] + peer = peer_storage[not_none(p3.id)] self.assertEqual(peer.id, p3.id) self.assertEqual(peer.private_key, p3.private_key) self.assertEqual(peer.entrypoints, ['2', '3', '1']) @@ -111,7 +116,7 @@ def test_merge_peer(self): with self.assertRaises(ValueError): peer_storage.add(p1) - def test_save_peer_file(self): + def test_save_peer_file(self) -> None: import json p = PeerId() @@ -127,7 +132,7 @@ def test_save_peer_file(self): # Removing tmpdir shutil.rmtree(tmpdir) - def test_retry_connection(self): + def test_retry_connection(self) -> None: p = PeerId() interval = p.retry_interval p.increment_retry_attempt(0) @@ -144,26 +149,27 @@ def test_retry_connection(self): self.assertEqual(p.retry_interval, 5) self.assertEqual(p.retry_timestamp, 0) - def test_validate_certificate(self): + def test_validate_certificate(self) -> None: builder = TestBuilder() artifacts = builder.build() - protocol = artifacts.p2p_manager.server_factory.buildProtocol('127.0.0.1') + protocol = artifacts.p2p_manager.server_factory.buildProtocol(Mock()) + + peer = PeerId() - peer = PeerId('testnet') + from OpenSSL import crypto class FakeTransport: - def getPeerCertificate(self): - from OpenSSL import crypto + def getPeerCertificate(self) -> crypto.X509: # we use a new peer here just to save the trouble of manually creating a certificate - random_peer = PeerId('testnet') + random_peer = PeerId() return crypto.X509.from_cryptography(random_peer.get_certificate()) - protocol.transport = FakeTransport() + protocol.transport = cast(ITransport, FakeTransport()) result = peer.validate_certificate(protocol) self.assertFalse(result) - def test_retry_logic(self): - peer = PeerId('testnet') + def test_retry_logic(self) -> None: + peer = PeerId() self.assertTrue(peer.can_retry(0)) retry_interval = peer.retry_interval @@ -207,7 +213,7 @@ def test_retry_logic(self): class BasePeerIdTest(unittest.TestCase): __test__ = False - async def test_validate_entrypoint(self): + async def test_validate_entrypoint(self) -> None: manager = self.create_peer('testnet', unlock_wallet=False) peer_id = manager.my_peer peer_id.entrypoints = ['tcp://127.0.0.1:40403'] @@ -230,10 +236,11 @@ async def test_validate_entrypoint(self): protocol.connection_string = None peer_id.entrypoints = ['tcp://127.0.0.1:40403'] + from collections import namedtuple + Peer = namedtuple('Peer', 'host') + class FakeTransport: - def getPeer(self): - from collections import namedtuple - Peer = namedtuple('Peer', 'host') + def getPeer(self) -> Peer: return Peer(host='127.0.0.1') protocol.transport = FakeTransport() result = await peer_id.validate_entrypoint(protocol) diff --git a/tests/p2p/test_protocol.py b/tests/p2p/test_protocol.py index 1aadea540..f0b0e95e7 100644 --- a/tests/p2p/test_protocol.py +++ b/tests/p2p/test_protocol.py @@ -2,7 +2,7 @@ from typing import Optional from unittest.mock import Mock, patch -from twisted.internet.defer import inlineCallbacks +from twisted.internet.protocol import Protocol from twisted.python.failure import Failure from hathor.p2p.peer_id import PeerId @@ -15,7 +15,7 @@ class BaseHathorProtocolTestCase(unittest.TestCase): __test__ = False - def setUp(self): + def setUp(self) -> None: super().setUp() self.network = 'testnet' self.peer_id1 = PeerId() @@ -32,52 +32,49 @@ def assertAndStepConn(self, conn: FakeConnection, regex1: bytes, regex2: Optiona self.assertRegex(conn.peek_tr2_value(), regex2) conn.run_one_step() - def assertIsConnected(self, conn=None): + def assertIsConnected(self, conn: FakeConnection | None = None) -> None: if conn is None: conn = self.conn self.assertFalse(conn.tr1.disconnecting) self.assertFalse(conn.tr2.disconnecting) - def assertIsNotConnected(self, conn=None): + def assertIsNotConnected(self, conn: FakeConnection | None = None) -> None: if conn is None: conn = self.conn self.assertTrue(conn.tr1.disconnecting) self.assertTrue(conn.tr2.disconnecting) - def _send_cmd(self, proto, cmd, payload=None): + def _send_cmd(self, proto: Protocol, cmd: str, payload: str | None = None) -> None: if not payload: line = '{}\r\n'.format(cmd) else: line = '{} {}\r\n'.format(cmd, payload) - if isinstance(line, str): - line = line.encode('utf-8') + return proto.dataReceived(line.encode('utf-8')) - return proto.dataReceived(line) - - def _check_result_only_cmd(self, result, expected_cmd): + def _check_result_only_cmd(self, result: bytes, expected_cmd: bytes) -> None: cmd_list = [] for line in result.split(b'\r\n'): cmd, _, _ = line.partition(b' ') cmd_list.append(cmd) self.assertIn(expected_cmd, cmd_list) - def _check_cmd_and_value(self, result, expected): + def _check_cmd_and_value(self, result: bytes, expected: tuple[bytes, bytes]) -> None: result_list = [] for line in result.split(b'\r\n'): cmd, _, data = line.partition(b' ') result_list.append((cmd, data)) self.assertIn(expected, result_list) - def test_on_connect(self): + def test_on_connect(self) -> None: self._check_result_only_cmd(self.conn.peek_tr1_value(), b'HELLO') - def test_invalid_command(self): + def test_invalid_command(self) -> None: self._send_cmd(self.conn.proto1, 'INVALID-CMD') self.conn.proto1.state.handle_error('') self.assertTrue(self.conn.tr1.disconnecting) - def test_rate_limit(self): + def test_rate_limit(self) -> None: hits = 1 window = 60 @@ -99,7 +96,7 @@ def test_rate_limit(self): self.conn.proto1.connections = None self.conn.proto1.on_disconnect(Failure(Exception())) - def test_invalid_size(self): + def test_invalid_size(self) -> None: self.conn.tr1.clear() cmd = b'HELLO ' max_payload_bytes = HathorLineReceiver.MAX_LENGTH - len(cmd) @@ -123,32 +120,32 @@ def test_invalid_size(self): line_length_exceeded_wrapped.assert_called_once() self.assertTrue(self.conn.tr1.disconnecting) - def test_invalid_payload(self): + def test_invalid_payload(self) -> None: self.conn.run_one_step() # HELLO self.conn.run_one_step() # PEER-ID self.conn.run_one_step() # READY with self.assertRaises(JSONDecodeError): self._send_cmd(self.conn.proto1, 'PEERS', 'abc') - def test_invalid_hello1(self): + def test_invalid_hello1(self) -> None: self.conn.tr1.clear() self._send_cmd(self.conn.proto1, 'HELLO') self._check_result_only_cmd(self.conn.peek_tr1_value(), b'ERROR') self.assertTrue(self.conn.tr1.disconnecting) - def test_invalid_hello2(self): + def test_invalid_hello2(self) -> None: self.conn.tr1.clear() self._send_cmd(self.conn.proto1, 'HELLO', 'invalid_payload') self._check_result_only_cmd(self.conn.peek_tr1_value(), b'ERROR') self.assertTrue(self.conn.tr1.disconnecting) - def test_invalid_hello3(self): + def test_invalid_hello3(self) -> None: self.conn.tr1.clear() self._send_cmd(self.conn.proto1, 'HELLO', '{}') self._check_result_only_cmd(self.conn.peek_tr1_value(), b'ERROR') self.assertTrue(self.conn.tr1.disconnecting) - def test_invalid_hello4(self): + def test_invalid_hello4(self) -> None: self.conn.tr1.clear() self._send_cmd( self.conn.proto1, @@ -158,7 +155,7 @@ def test_invalid_hello4(self): self._check_result_only_cmd(self.conn.peek_tr1_value(), b'ERROR') self.assertTrue(self.conn.tr1.disconnecting) - def test_invalid_hello5(self): + def test_invalid_hello5(self) -> None: # hello with clocks too far apart self.conn.tr1.clear() data = self.conn.proto2.state._get_hello_data() @@ -171,14 +168,14 @@ def test_invalid_hello5(self): self._check_result_only_cmd(self.conn.peek_tr1_value(), b'ERROR') self.assertTrue(self.conn.tr1.disconnecting) - def test_valid_hello(self): + def test_valid_hello(self) -> None: self.conn.run_one_step() # HELLO self._check_result_only_cmd(self.conn.peek_tr1_value(), b'PEER-ID') self._check_result_only_cmd(self.conn.peek_tr2_value(), b'PEER-ID') self.assertFalse(self.conn.tr1.disconnecting) self.assertFalse(self.conn.tr2.disconnecting) - def test_invalid_same_peer_id(self): + def test_invalid_same_peer_id(self) -> None: manager3 = self.create_peer(self.network, peer_id=self.peer_id1) conn = FakeConnection(self.manager1, manager3) conn.run_one_step() # HELLO @@ -186,7 +183,7 @@ def test_invalid_same_peer_id(self): self._check_result_only_cmd(conn.peek_tr1_value(), b'ERROR') self.assertTrue(conn.tr1.disconnecting) - def test_invalid_same_peer_id2(self): + def test_invalid_same_peer_id2(self) -> None: """ We connect nodes 1-2 and 1-3. Nodes 2 and 3 have the same peer_id. The connections are established simultaneously, so we do not detect a peer id duplication in PEER_ID @@ -246,7 +243,7 @@ def test_invalid_same_peer_id2(self): # connection is still up self.assertIsConnected(conn_alive) - def test_invalid_different_network(self): + def test_invalid_different_network(self) -> None: manager3 = self.create_peer(network='mainnet') conn = FakeConnection(self.manager1, manager3) conn.run_one_step() # HELLO @@ -254,23 +251,23 @@ def test_invalid_different_network(self): self.assertTrue(conn.tr1.disconnecting) conn.run_one_step() # ERROR - def test_send_invalid_unicode(self): + def test_send_invalid_unicode(self) -> None: # \xff is an invalid unicode. self.conn.proto1.dataReceived(b'\xff\r\n') self.assertTrue(self.conn.tr1.disconnecting) - def test_on_disconnect(self): + def test_on_disconnect(self) -> None: self.assertIn(self.conn.proto1, self.manager1.connections.handshaking_peers) self.conn.disconnect(Failure(Exception('testing'))) self.assertNotIn(self.conn.proto1, self.manager1.connections.handshaking_peers) - def test_on_disconnect_after_hello(self): + def test_on_disconnect_after_hello(self) -> None: self.conn.run_one_step() # HELLO self.assertIn(self.conn.proto1, self.manager1.connections.handshaking_peers) self.conn.disconnect(Failure(Exception('testing'))) self.assertNotIn(self.conn.proto1, self.manager1.connections.handshaking_peers) - def test_on_disconnect_after_peer_id(self): + def test_on_disconnect_after_peer_id(self) -> None: self.conn.run_one_step() # HELLO self.assertIn(self.conn.proto1, self.manager1.connections.handshaking_peers) # No peer id in the peer_storage (known_peers) @@ -291,7 +288,7 @@ def test_on_disconnect_after_peer_id(self): # Peer id 2 removed from peer_storage (known_peers) after disconnection and after looping call self.assertNotIn(self.peer_id2.id, self.manager1.connections.peer_storage) - def test_idle_connection(self): + def test_idle_connection(self) -> None: self.clock.advance(self._settings.PEER_IDLE_TIMEOUT - 10) self.assertIsConnected(self.conn) self.clock.advance(15) @@ -301,7 +298,7 @@ def test_idle_connection(self): class SyncV1HathorProtocolTestCase(unittest.SyncV1Params, BaseHathorProtocolTestCase): __test__ = True - def test_two_connections(self): + def test_two_connections(self) -> None: self.conn.run_one_step() # HELLO self.conn.run_one_step() # PEER-ID self.conn.run_one_step() # READY @@ -318,8 +315,7 @@ def test_two_connections(self): self._check_result_only_cmd(self.conn.peek_tr1_value(), b'PEERS') self.conn.run_one_step() - @inlineCallbacks - def test_get_data(self): + def test_get_data(self) -> None: self.conn.run_one_step() # HELLO self.conn.run_one_step() # PEER-ID self.conn.run_one_step() # READY @@ -329,11 +325,11 @@ def test_get_data(self): self.conn.run_one_step() # TIPS self.assertIsConnected() missing_tx = '00000000228dfcd5dec1c9c6263f6430a5b4316bb9e3decb9441a6414bfd8697' - yield self._send_cmd(self.conn.proto1, 'GET-DATA', missing_tx) + self._send_cmd(self.conn.proto1, 'GET-DATA', missing_tx) self._check_result_only_cmd(self.conn.peek_tr1_value(), b'NOT-FOUND') self.conn.run_one_step() - def test_valid_hello_and_peer_id(self): + def test_valid_hello_and_peer_id(self) -> None: self._check_result_only_cmd(self.conn.peek_tr1_value(), b'HELLO') self._check_result_only_cmd(self.conn.peek_tr2_value(), b'HELLO') self.conn.run_one_step() # HELLO @@ -358,7 +354,7 @@ def test_valid_hello_and_peer_id(self): self.conn.run_one_step() # TIPS self.assertIsConnected() - def test_send_ping(self): + def test_send_ping(self) -> None: self.conn.run_one_step() # HELLO self.conn.run_one_step() # PEER-ID self.conn.run_one_step() # READY @@ -379,8 +375,7 @@ def test_send_ping(self): self.conn.run_one_step() self.assertEqual(self.clock.seconds(), self.conn.proto1.last_message) - @inlineCallbacks - def test_invalid_peer_id(self): + def test_invalid_peer_id(self) -> None: self.conn.run_one_step() # HELLO self.conn.run_one_step() # PEER-ID self.conn.run_one_step() # READY @@ -389,7 +384,7 @@ def test_invalid_peer_id(self): self.conn.run_one_step() # PEERS self.conn.run_one_step() # TIPS invalid_payload = {'id': '123', 'entrypoints': ['tcp://localhost:1234']} - yield self._send_cmd(self.conn.proto1, 'PEER-ID', json_dumps(invalid_payload)) + self._send_cmd(self.conn.proto1, 'PEER-ID', json_dumps(invalid_payload)) self._check_result_only_cmd(self.conn.peek_tr1_value(), b'ERROR') self.assertTrue(self.conn.tr1.disconnecting) @@ -397,7 +392,7 @@ def test_invalid_peer_id(self): class SyncV2HathorProtocolTestCase(unittest.SyncV2Params, BaseHathorProtocolTestCase): __test__ = True - def test_two_connections(self): + def test_two_connections(self) -> None: self.assertAndStepConn(self.conn, b'^HELLO') self.assertAndStepConn(self.conn, b'^PEER-ID') self.assertAndStepConn(self.conn, b'^READY') @@ -425,8 +420,7 @@ def test_two_connections(self): self.assertIsConnected() - @inlineCallbacks - def test_get_data(self): + def test_get_data(self) -> None: self.assertAndStepConn(self.conn, b'^HELLO') self.assertAndStepConn(self.conn, b'^PEER-ID') self.assertAndStepConn(self.conn, b'^READY') @@ -442,11 +436,11 @@ def test_get_data(self): 'last_block_hash': missing_tx, 'start_from': [self._settings.GENESIS_BLOCK_HASH.hex()] } - yield self._send_cmd(self.conn.proto1, 'GET-TRANSACTIONS-BFS', json_dumps(payload)) + self._send_cmd(self.conn.proto1, 'GET-TRANSACTIONS-BFS', json_dumps(payload)) self._check_result_only_cmd(self.conn.peek_tr1_value(), b'NOT-FOUND') self.conn.run_one_step() - def test_valid_hello_and_peer_id(self): + def test_valid_hello_and_peer_id(self) -> None: self.assertAndStepConn(self.conn, b'^HELLO') self.assertAndStepConn(self.conn, b'^PEER-ID') self.assertAndStepConn(self.conn, b'^READY') @@ -477,7 +471,7 @@ def test_valid_hello_and_peer_id(self): self.assertAndStepConn(self.conn, b'^BEST-BLOCK') self.assertIsConnected() - def test_send_ping(self): + def test_send_ping(self) -> None: self.assertAndStepConn(self.conn, b'^HELLO') self.assertAndStepConn(self.conn, b'^PEER-ID') self.assertAndStepConn(self.conn, b'^READY') diff --git a/tests/p2p/test_rate_limiter.py b/tests/p2p/test_rate_limiter.py index 83e7b6e56..3eace5471 100644 --- a/tests/p2p/test_rate_limiter.py +++ b/tests/p2p/test_rate_limiter.py @@ -1,13 +1,14 @@ from hathor.p2p.rate_limiter import RateLimiter +from hathor.util import not_none from tests import unittest class RateLimiterTestCase(unittest.TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.rate_limiter = RateLimiter(reactor=self.clock) - def test_limiter(self): + def test_limiter(self) -> None: key = 'test' self.rate_limiter.set_limit(key, 2, 2) @@ -31,7 +32,7 @@ def test_limiter(self): self.assertTrue(self.rate_limiter.add_hit(key)) # Get limit - self.assertEqual(self.rate_limiter.get_limit(key).max_hits, 2) + self.assertEqual(not_none(self.rate_limiter.get_limit(key)).max_hits, 2) # Unset limit self.rate_limiter.unset_limit(key) diff --git a/tests/p2p/test_split_brain.py b/tests/p2p/test_split_brain.py index 68ee24609..c66dd8ba1 100644 --- a/tests/p2p/test_split_brain.py +++ b/tests/p2p/test_split_brain.py @@ -3,8 +3,10 @@ from hathor.daa import TestMode from hathor.graphviz import GraphvizVisualizer +from hathor.manager import HathorManager from hathor.simulator import FakeConnection from hathor.simulator.utils import add_new_block +from hathor.util import not_none from hathor.wallet import HDWallet from tests import unittest from tests.utils import add_blocks_unlock_reward, add_new_double_spending, add_new_transactions @@ -13,7 +15,7 @@ class BaseHathorSyncMethodsTestCase(unittest.TestCase): __test__ = False - def setUp(self): + def setUp(self) -> None: super().setUp() first_timestamp = self._settings.GENESIS_BLOCK_TIMESTAMP @@ -21,13 +23,12 @@ def setUp(self): self.network = 'testnet' - def create_peer(self, network, unlock_wallet=True): + def create_peer(self, network: str, unlock_wallet: bool = True) -> HathorManager: # type: ignore[override] wallet = HDWallet(gap_limit=2) wallet._manually_initialize() manager = super().create_peer(network, wallet=wallet) manager.daa.TEST_MODE = TestMode.TEST_ALL_WEIGHT - manager.avg_time_between_blocks = 64 # Don't use it anywhere else. It is unsafe to generate mnemonic words like this. # It should be used only for testing purposes. @@ -37,14 +38,12 @@ def create_peer(self, network, unlock_wallet=True): return manager @pytest.mark.slow - def test_split_brain_plain(self): + def test_split_brain_plain(self) -> None: debug_pdf = False manager1 = self.create_peer(self.network, unlock_wallet=True) - manager1.avg_time_between_blocks = 3 manager2 = self.create_peer(self.network, unlock_wallet=True) - manager2.avg_time_between_blocks = 3 for _ in range(10): add_new_block(manager1, advance_clock=1) @@ -100,12 +99,10 @@ def test_split_brain_plain(self): self.assertConsensusValid(manager2) @pytest.mark.slow - def test_split_brain_only_blocks_different_height(self): + def test_split_brain_only_blocks_different_height(self) -> None: manager1 = self.create_peer(self.network, unlock_wallet=True) - manager1.avg_time_between_blocks = 3 manager2 = self.create_peer(self.network, unlock_wallet=True) - manager2.avg_time_between_blocks = 3 for _ in range(10): add_new_block(manager1, advance_clock=1) @@ -117,7 +114,7 @@ def test_split_brain_only_blocks_different_height(self): # Add one more block to manager1, so it's the winner chain add_new_block(manager1, advance_clock=1) - block_tip1 = manager1.tx_storage.indexes.height.get_tip() + block_tip1 = not_none(manager1.tx_storage.indexes).height.get_tip() self.assertConsensusValid(manager1) self.assertConsensusValid(manager2) @@ -140,17 +137,15 @@ def test_split_brain_only_blocks_different_height(self): self.assertConsensusValid(manager2) self.assertConsensusEqual(manager1, manager2) - self.assertEqual(block_tip1, manager1.tx_storage.indexes.height.get_tip()) - self.assertEqual(block_tip1, manager2.tx_storage.indexes.height.get_tip()) + self.assertEqual(block_tip1, not_none(manager1.tx_storage.indexes).height.get_tip()) + self.assertEqual(block_tip1, not_none(manager2.tx_storage.indexes).height.get_tip()) # XXX We must decide what to do when different chains have the same score # For now we are voiding everyone until the first common block - def test_split_brain_only_blocks_same_height(self): + def test_split_brain_only_blocks_same_height(self) -> None: manager1 = self.create_peer(self.network, unlock_wallet=True) - manager1.avg_time_between_blocks = 3 manager2 = self.create_peer(self.network, unlock_wallet=True) - manager2.avg_time_between_blocks = 3 for _ in range(10): add_new_block(manager1, advance_clock=1) @@ -268,12 +263,10 @@ def test_split_brain_only_blocks_same_height(self): self.assertEqual(len(manager2.tx_storage.get_best_block_tips()), 1) self.assertCountEqual(manager2.tx_storage.get_best_block_tips(), {new_block.hash}) - def test_split_brain_only_blocks_bigger_score(self): + def test_split_brain_only_blocks_bigger_score(self) -> None: manager1 = self.create_peer(self.network, unlock_wallet=True) - manager1.avg_time_between_blocks = 3 manager2 = self.create_peer(self.network, unlock_wallet=True) - manager2.avg_time_between_blocks = 3 # Start with 1 because of the genesis block manager2_blocks = 1 @@ -328,13 +321,11 @@ def test_split_brain_only_blocks_bigger_score(self): # Assert that the consensus had the manager2 chain self.assertEqual(winners2_blocks, manager2_blocks) - def test_split_brain_no_double_spending(self): + def test_split_brain_no_double_spending(self) -> None: manager1 = self.create_peer(self.network, unlock_wallet=True) - manager1.avg_time_between_blocks = 3 manager1.connections.disable_rate_limiter() manager2 = self.create_peer(self.network, unlock_wallet=True) - manager2.avg_time_between_blocks = 3 manager2.connections.disable_rate_limiter() winner_blocks = 1 diff --git a/tests/p2p/test_split_brain2.py b/tests/p2p/test_split_brain2.py index fc4601898..e1622fb8a 100644 --- a/tests/p2p/test_split_brain2.py +++ b/tests/p2p/test_split_brain2.py @@ -10,7 +10,7 @@ class BaseHathorSyncMethodsTestCase(SimulatorTestCase): __test__ = False @pytest.mark.flaky(max_runs=3, min_passes=1) - def test_split_brain(self): + def test_split_brain(self) -> None: debug_pdf = False manager1 = self.create_peer() diff --git a/tests/p2p/test_sync.py b/tests/p2p/test_sync.py index e387bba89..bd75e52a9 100644 --- a/tests/p2p/test_sync.py +++ b/tests/p2p/test_sync.py @@ -5,7 +5,9 @@ from hathor.p2p.protocol import PeerIdState from hathor.p2p.sync_version import SyncVersion from hathor.simulator import FakeConnection +from hathor.transaction import Block, Transaction from hathor.transaction.storage.exceptions import TransactionIsNotABlock +from hathor.util import not_none from tests import unittest from tests.utils import add_blocks_unlock_reward @@ -13,7 +15,7 @@ class BaseHathorSyncMethodsTestCase(unittest.TestCase): __test__ = False - def setUp(self): + def setUp(self) -> None: super().setUp() # import sys @@ -27,8 +29,7 @@ def setUp(self): self.genesis = self.manager1.tx_storage.get_all_genesis() self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] - def _add_new_tx(self, address, value): - from hathor.transaction import Transaction + def _add_new_tx(self, address: str, value: int) -> Transaction: from hathor.wallet.base_wallet import WalletOutputInfo outputs = [] @@ -46,16 +47,16 @@ def _add_new_tx(self, address, value): self.clock.advance(10) return tx - def _add_new_transactions(self, num_txs): + def _add_new_transactions(self, num_txs: int) -> list[Transaction]: txs = [] for _ in range(num_txs): - address = self.get_address(0) + address = not_none(self.get_address(0)) value = self.rng.choice([5, 10, 50, 100, 120]) tx = self._add_new_tx(address, value) txs.append(tx) return txs - def _add_new_block(self, propagate=True): + def _add_new_block(self, propagate: bool = True) -> Block: block = self.manager1.generate_mining_block() self.assertTrue(self.manager1.cpu_mining_service.resolve(block)) self.manager1.verification_service.verify(block) @@ -63,13 +64,13 @@ def _add_new_block(self, propagate=True): self.clock.advance(10) return block - def _add_new_blocks(self, num_blocks, propagate=True): + def _add_new_blocks(self, num_blocks: int, propagate: bool = True) -> list[Block]: blocks = [] for _ in range(num_blocks): blocks.append(self._add_new_block(propagate=propagate)) return blocks - def test_get_blocks_before(self): + def test_get_blocks_before(self) -> None: genesis_block = self.genesis_blocks[0] result = self.manager1.tx_storage.get_blocks_before(genesis_block.hash) self.assertEqual(0, len(result)) @@ -88,7 +89,7 @@ def test_get_blocks_before(self): expected_result = expected_result[::-1] self.assertEqual(result, expected_result) - def test_block_sync_only_genesis(self): + def test_block_sync_only_genesis(self) -> None: manager2 = self.create_peer(self.network) self.assertEqual(manager2.state, manager2.NodeState.READY) @@ -102,7 +103,7 @@ def test_block_sync_only_genesis(self): self.assertEqual(node_sync.synced_timestamp, node_sync.peer_timestamp) self.assertTipsEqual(self.manager1, manager2) - def test_block_sync_new_blocks(self): + def test_block_sync_new_blocks(self) -> None: self._add_new_blocks(15) manager2 = self.create_peer(self.network) @@ -123,7 +124,7 @@ def test_block_sync_new_blocks(self): self.assertConsensusValid(self.manager1) self.assertConsensusValid(manager2) - def test_block_sync_many_new_blocks(self): + def test_block_sync_many_new_blocks(self) -> None: self._add_new_blocks(150) manager2 = self.create_peer(self.network) @@ -143,7 +144,7 @@ def test_block_sync_many_new_blocks(self): self.assertConsensusValid(self.manager1) self.assertConsensusValid(manager2) - def test_block_sync_new_blocks_and_txs(self): + def test_block_sync_new_blocks_and_txs(self) -> None: self._add_new_blocks(25) self._add_new_transactions(3) self._add_new_blocks(4) @@ -172,7 +173,7 @@ def test_block_sync_new_blocks_and_txs(self): self.assertConsensusValid(self.manager1) self.assertConsensusValid(manager2) - def test_tx_propagation_nat_peers(self): + def test_tx_propagation_nat_peers(self) -> None: """ manager1 <- manager2 <- manager3 """ self._add_new_blocks(25) @@ -229,7 +230,7 @@ def test_tx_propagation_nat_peers(self): self.assertConsensusValid(self.manager2) self.assertConsensusValid(self.manager3) - def test_check_sync_state(self): + def test_check_sync_state(self) -> None: """Tests if the LoopingCall to check the sync state works""" # Initially it should do nothing, since there is no recent activity self.manager1.check_sync_state() @@ -249,7 +250,7 @@ def test_check_sync_state(self): class SyncV1HathorSyncMethodsTestCase(unittest.SyncV1Params, BaseHathorSyncMethodsTestCase): __test__ = True - def test_downloader(self): + def test_downloader(self) -> None: from hathor.p2p.sync_v1.agent import NodeSyncTimestamp blocks = self._add_new_blocks(3) @@ -326,7 +327,7 @@ def test_downloader(self): downloader.check_downloading_queue() self.assertEqual(len(downloader.downloading_deque), 0) - def _downloader_bug_setup(self): + def _downloader_bug_setup(self) -> None: """ This is an auxiliary method to setup a bug scenario.""" from hathor.p2p.sync_version import SyncVersion @@ -390,7 +391,7 @@ def _downloader_bug_setup(self): # by this point everything should be set to so we can trigger the bug, any issues that happen before this # comment are an issue in setting up the scenario, not related to the problem itself - def test_downloader_retry_reorder(self): + def test_downloader_retry_reorder(self) -> None: """ Reproduce the bug that causes a reorder in the downloader queue. The tracking issue for this bug is #465 @@ -454,7 +455,7 @@ def test_downloader_retry_reorder(self): # if the fix is applied, we would see tx_A in storage by this point self.assertTrue(self.manager_bug.tx_storage.transaction_exists(self.tx_A.hash)) - def test_downloader_disconnect(self): + def test_downloader_disconnect(self) -> None: """ This is related to test_downloader_retry_reorder, but it basically tests the change in behavior instead. When a peer disconnects it should be immediately removed from the tx-detail's connections list. @@ -474,7 +475,7 @@ def test_downloader_disconnect(self): class SyncV2HathorSyncMethodsTestCase(unittest.SyncV2Params, BaseHathorSyncMethodsTestCase): __test__ = True - def test_sync_metadata(self): + def test_sync_metadata(self) -> None: # test if the synced peer will build all tx metadata correctly height = 0 @@ -519,7 +520,7 @@ def test_sync_metadata(self): self.assertCountEqual(meta1.conflict_with or [], meta2.conflict_with or []) self.assertCountEqual(meta1.twins or [], meta2.twins or []) - def test_tx_propagation_nat_peers(self): + def test_tx_propagation_nat_peers(self) -> None: super().test_tx_propagation_nat_peers() node_sync1 = self.conn1.proto1.state.sync_agent @@ -534,7 +535,7 @@ def test_tx_propagation_nat_peers(self): self.assertEqual(node_sync2.peer_best_block.height, self.manager2.tx_storage.get_height_best_block()) self.assertConsensusEqual(self.manager2, self.manager3) - def test_block_sync_new_blocks_and_txs(self): + def test_block_sync_new_blocks_and_txs(self) -> None: self._add_new_blocks(25) self._add_new_transactions(3) self._add_new_blocks(4) @@ -563,7 +564,7 @@ def test_block_sync_new_blocks_and_txs(self): self.assertConsensusValid(self.manager1) self.assertConsensusValid(manager2) - def test_block_sync_many_new_blocks(self): + def test_block_sync_many_new_blocks(self) -> None: self._add_new_blocks(150) manager2 = self.create_peer(self.network) @@ -584,7 +585,7 @@ def test_block_sync_many_new_blocks(self): self.assertConsensusValid(self.manager1) self.assertConsensusValid(manager2) - def test_block_sync_new_blocks(self): + def test_block_sync_new_blocks(self) -> None: self._add_new_blocks(15) manager2 = self.create_peer(self.network) @@ -605,7 +606,7 @@ def test_block_sync_new_blocks(self): self.assertConsensusValid(self.manager1) self.assertConsensusValid(manager2) - def test_full_sync(self): + def test_full_sync(self) -> None: # 10 blocks blocks = self._add_new_blocks(10) # N blocks to unlock the reward @@ -677,7 +678,7 @@ def test_full_sync(self): self.assertEqual(len(manager2.tx_storage.indexes.mempool_tips.get()), 1) self.assertEqual(len(self.manager1.tx_storage.indexes.mempool_tips.get()), 1) - def test_block_sync_checkpoints(self): + def test_block_sync_checkpoints(self) -> None: TOTAL_BLOCKS = 30 LAST_CHECKPOINT = 15 FIRST_CHECKPOINT = LAST_CHECKPOINT // 2 @@ -718,7 +719,7 @@ def test_block_sync_checkpoints(self): self.assertConsensusValid(self.manager1) self.assertConsensusValid(manager2) - def test_block_sync_only_genesis(self): + def test_block_sync_only_genesis(self) -> None: manager2 = self.create_peer(self.network) self.assertEqual(manager2.state, manager2.NodeState.READY) diff --git a/tests/p2p/test_sync_bridge.py b/tests/p2p/test_sync_bridge.py index cdf000627..9c9024be0 100644 --- a/tests/p2p/test_sync_bridge.py +++ b/tests/p2p/test_sync_bridge.py @@ -5,7 +5,7 @@ class MixedSyncRandomSimulatorTestCase(SimulatorTestCase): __test__ = True - def test_the_three_transacting_miners(self): + def test_the_three_transacting_miners(self) -> None: manager1 = self.create_peer(enable_sync_v1=True, enable_sync_v2=False) manager2 = self.create_peer(enable_sync_v1=True, enable_sync_v2=True) manager3 = self.create_peer(enable_sync_v1=False, enable_sync_v2=True) @@ -44,7 +44,7 @@ def test_the_three_transacting_miners(self): # sync-v2 consensus test is more lenient (if sync-v1 assert passes sync-v2 assert will pass too) self.assertConsensusEqualSyncV2(manager_a, manager_b, strict_sync_v2_indexes=False) - def test_bridge_with_late_v2(self): + def test_bridge_with_late_v2(self) -> None: manager1 = self.create_peer(enable_sync_v1=True, enable_sync_v2=False) manager2 = self.create_peer(enable_sync_v1=True, enable_sync_v2=True) manager3 = self.create_peer(enable_sync_v1=False, enable_sync_v2=True) diff --git a/tests/p2p/test_sync_enabled.py b/tests/p2p/test_sync_enabled.py index a352c08a0..f681f90a0 100644 --- a/tests/p2p/test_sync_enabled.py +++ b/tests/p2p/test_sync_enabled.py @@ -5,7 +5,7 @@ class BaseRandomSimulatorTestCase(SimulatorTestCase): - def test_new_node_disabled(self): + def test_new_node_disabled(self) -> None: manager1 = self.create_peer() manager1.allow_mining_without_peers() @@ -39,7 +39,7 @@ def test_new_node_disabled(self): v2 = list(manager2.tx_storage.get_all_transactions()) self.assertEqual(3, len(v2)) - def test_sync_rotate(self): + def test_sync_rotate(self) -> None: manager1 = self.create_peer() manager1.connections.MAX_ENABLED_SYNC = 3 other_managers = [self.create_peer() for _ in range(15)] diff --git a/tests/p2p/test_sync_mempool.py b/tests/p2p/test_sync_mempool.py index f2a0219b3..d094a1af7 100644 --- a/tests/p2p/test_sync_mempool.py +++ b/tests/p2p/test_sync_mempool.py @@ -1,6 +1,8 @@ from hathor.crypto.util import decode_address from hathor.graphviz import GraphvizVisualizer from hathor.simulator import FakeConnection +from hathor.transaction import Block, Transaction +from hathor.util import not_none from tests import unittest from tests.utils import add_blocks_unlock_reward @@ -8,7 +10,7 @@ class BaseHathorSyncMempoolTestCase(unittest.TestCase): __test__ = False - def setUp(self): + def setUp(self) -> None: super().setUp() self.network = 'testnet' @@ -18,7 +20,7 @@ def setUp(self): self.genesis = self.manager1.tx_storage.get_all_genesis() self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] - def _add_new_tx(self, address, value): + def _add_new_tx(self, address: str, value: int) -> Transaction: from hathor.transaction import Transaction from hathor.wallet.base_wallet import WalletOutputInfo @@ -37,16 +39,16 @@ def _add_new_tx(self, address, value): self.clock.advance(10) return tx - def _add_new_transactions(self, num_txs): + def _add_new_transactions(self, num_txs: int) -> list[Transaction]: txs = [] for _ in range(num_txs): - address = self.get_address(0) + address = not_none(self.get_address(0)) value = self.rng.choice([5, 10, 50, 100, 120]) tx = self._add_new_tx(address, value) txs.append(tx) return txs - def _add_new_block(self, propagate=True): + def _add_new_block(self, propagate: bool = True) -> Block: block = self.manager1.generate_mining_block() self.assertTrue(self.manager1.cpu_mining_service.resolve(block)) self.manager1.verification_service.verify(block) @@ -54,13 +56,13 @@ def _add_new_block(self, propagate=True): self.clock.advance(10) return block - def _add_new_blocks(self, num_blocks, propagate=True): + def _add_new_blocks(self, num_blocks: int, propagate: bool = True) -> list[Block]: blocks = [] for _ in range(num_blocks): blocks.append(self._add_new_block(propagate=propagate)) return blocks - def test_mempool_basic(self): + def test_mempool_basic(self) -> None: # 10 blocks self._add_new_blocks(2) # N blocks to unlock the reward @@ -100,7 +102,7 @@ class SyncV1HathorSyncMempoolTestCase(unittest.SyncV1Params, BaseHathorSyncMempo class SyncV2HathorSyncMempoolTestCase(unittest.SyncV2Params, BaseHathorSyncMempoolTestCase): __test__ = True - def test_mempool_basic(self): + def test_mempool_basic(self) -> None: super().test_mempool_basic() # 3 genesis diff --git a/tests/p2p/test_sync_rate_limiter.py b/tests/p2p/test_sync_rate_limiter.py index 9433c7ade..04d091c27 100644 --- a/tests/p2p/test_sync_rate_limiter.py +++ b/tests/p2p/test_sync_rate_limiter.py @@ -1,7 +1,9 @@ -from unittest.mock import MagicMock, Mock +from unittest.mock import Mock, patch from twisted.python.failure import Failure +from hathor.p2p.states import ReadyState +from hathor.p2p.sync_v1.agent import NodeSyncTimestamp from hathor.simulator import FakeConnection from hathor.simulator.trigger import StopAfterNMinedBlocks from tests import unittest @@ -11,7 +13,7 @@ class SyncV1RandomSimulatorTestCase(unittest.SyncV1Params, SimulatorTestCase): __test__ = True - def test_sync_rate_limiter(self): + def test_sync_rate_limiter(self) -> None: manager1 = self.create_peer() miner1 = self.simulator.create_miner(manager1, hashpower=10e6) @@ -32,21 +34,23 @@ def test_sync_rate_limiter(self): connected_peers2 = list(manager2.connections.connected_peers.values()) self.assertEqual(1, len(connected_peers2)) protocol1 = connected_peers2[0] + assert isinstance(protocol1.state, ReadyState) sync2 = protocol1.state.sync_agent - sync2._send_tips = MagicMock() + assert isinstance(sync2, NodeSyncTimestamp) - for i in range(100): - sync2.send_tips() - self.assertEqual(sync2._send_tips.call_count, min(i + 1, 8)) - self.assertEqual(sync2._send_tips.call_count, 8) + with patch.object(sync2, '_send_tips') as mock: + for i in range(100): + sync2.send_tips() + self.assertEqual(mock.call_count, min(i + 1, 8)) + self.assertEqual(mock.call_count, 8) - sync2.send_tips() - self.assertEqual(sync2._send_tips.call_count, 8) + sync2.send_tips() + self.assertEqual(mock.call_count, 8) - self.simulator._clock.advance(2000) - self.assertTrue(sync2._send_tips.call_count, 16) + self.simulator._clock.advance(2000) + self.assertTrue(mock.call_count, 16) - def test_sync_rate_limiter_disconnect(self): + def test_sync_rate_limiter_disconnect(self) -> None: # Test send_tips delayed calls cancelation with disconnection manager1 = self.create_peer() manager2 = self.create_peer() @@ -64,36 +68,39 @@ def test_sync_rate_limiter_disconnect(self): self.assertEqual(1, len(connected_peers2)) protocol1 = connected_peers2[0] + assert isinstance(protocol1.state, ReadyState) sync1 = protocol1.state.sync_agent - sync1._send_tips = Mock(wraps=sync1._send_tips) + assert isinstance(sync1, NodeSyncTimestamp) + mock = Mock(wraps=sync1._send_tips) - sync1.send_tips() - self.assertEqual(sync1._send_tips.call_count, 1) - self.assertEqual(len(sync1._send_tips_call_later), 0) + with patch.object(sync1, '_send_tips', new=mock): + sync1.send_tips() + self.assertEqual(mock.call_count, 1) + self.assertEqual(len(sync1._send_tips_call_later), 0) - sync1.send_tips() - self.assertEqual(sync1._send_tips.call_count, 1) - self.assertEqual(len(sync1._send_tips_call_later), 1) + sync1.send_tips() + self.assertEqual(mock.call_count, 1) + self.assertEqual(len(sync1._send_tips_call_later), 1) - sync1.send_tips() - self.assertEqual(sync1._send_tips.call_count, 1) - self.assertEqual(len(sync1._send_tips_call_later), 2) + sync1.send_tips() + self.assertEqual(mock.call_count, 1) + self.assertEqual(len(sync1._send_tips_call_later), 2) - # Close the connection. - conn12.disconnect(Failure(Exception('testing'))) - self.simulator.remove_connection(conn12) + # Close the connection. + conn12.disconnect(Failure(Exception('testing'))) + self.simulator.remove_connection(conn12) - self.simulator.run(30) + self.simulator.run(30) - # Send tips should not be called any further since the connection has already been closed. - self.assertEqual(sync1._send_tips.call_count, 1) - # Residual delayed calls - self.assertEqual(len(sync1._send_tips_call_later), 2) - # The residual delayed calls should have been canceled - for call_later in sync1._send_tips_call_later: - self.assertFalse(call_later.active()) + # Send tips should not be called any further since the connection has already been closed. + self.assertEqual(mock.call_count, 1) + # Residual delayed calls + self.assertEqual(len(sync1._send_tips_call_later), 2) + # The residual delayed calls should have been canceled + for call_later in sync1._send_tips_call_later: + self.assertFalse(call_later.active()) - def test_sync_rate_limiter_delayed_calls_draining(self): + def test_sync_rate_limiter_delayed_calls_draining(self) -> None: # Test the draining of delayed calls from _send_tips_call_later list manager1 = self.create_peer() manager2 = self.create_peer() @@ -111,7 +118,9 @@ def test_sync_rate_limiter_delayed_calls_draining(self): self.assertEqual(1, len(connected_peers2)) protocol1 = connected_peers2[0] + assert isinstance(protocol1.state, ReadyState) sync1 = protocol1.state.sync_agent + assert isinstance(sync1, NodeSyncTimestamp) sync1.send_tips() self.assertEqual(len(sync1._send_tips_call_later), 0) @@ -131,7 +140,7 @@ def test_sync_rate_limiter_delayed_calls_draining(self): # should have been executed self.assertEqual(len(sync1._send_tips_call_later), 0) - def test_sync_rate_limiter_delayed_calls_stop(self): + def test_sync_rate_limiter_delayed_calls_stop(self) -> None: # Test the draining of delayed calls from _send_tips_call_later list manager1 = self.create_peer() manager2 = self.create_peer() @@ -149,7 +158,9 @@ def test_sync_rate_limiter_delayed_calls_stop(self): self.assertEqual(1, len(connected_peers2)) protocol1 = connected_peers2[0] + assert isinstance(protocol1.state, ReadyState) sync1 = protocol1.state.sync_agent + assert isinstance(sync1, NodeSyncTimestamp) sync1.send_tips() self.assertEqual(len(sync1._send_tips_call_later), 0) diff --git a/tests/p2p/test_sync_v2.py b/tests/p2p/test_sync_v2.py index 55ac0c2a6..72312c477 100644 --- a/tests/p2p/test_sync_v2.py +++ b/tests/p2p/test_sync_v2.py @@ -1,13 +1,15 @@ import base64 import re +from unittest.mock import patch import pytest -from twisted.internet.defer import inlineCallbacks, succeed +from twisted.internet.defer import Deferred, succeed from twisted.python.failure import Failure from hathor.p2p.messages import ProtocolMessages from hathor.p2p.peer_id import PeerId -from hathor.p2p.sync_v2.agent import _HeightInfo +from hathor.p2p.states import ReadyState +from hathor.p2p.sync_v2.agent import NodeBlockSync, _HeightInfo from hathor.simulator import FakeConnection from hathor.simulator.trigger import ( StopAfterNMinedBlocks, @@ -17,7 +19,10 @@ Trigger, ) from hathor.transaction.storage import TransactionRocksDBStorage +from hathor.transaction.storage.transaction_storage import BaseTransactionStorage from hathor.transaction.storage.traversal import DFSWalk +from hathor.types import VertexId +from hathor.util import not_none from tests.simulation.base import SimulatorTestCase from tests.utils import HAS_ROCKSDB @@ -27,7 +32,7 @@ class BaseRandomSimulatorTestCase(SimulatorTestCase): seed_config = 2 - def _get_partial_blocks(self, tx_storage): + def _get_partial_blocks(self, tx_storage: BaseTransactionStorage) -> set[VertexId]: with tx_storage.allow_partially_validated_context(): partial_blocks = set() for tx in tx_storage.get_all_transactions(): @@ -148,19 +153,19 @@ def _run_restart_test(self, *, full_verification: bool, use_tx_storage_cache: bo self.assertConsensusEqualSyncV2(manager1, manager3) @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') - def test_restart_fullnode_full_verification(self): + def test_restart_fullnode_full_verification(self) -> None: self._run_restart_test(full_verification=True, use_tx_storage_cache=False) @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') - def test_restart_fullnode_quick(self): + def test_restart_fullnode_quick(self) -> None: self._run_restart_test(full_verification=False, use_tx_storage_cache=False) @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') - def test_restart_fullnode_quick_with_cache(self): + def test_restart_fullnode_quick_with_cache(self) -> None: self._run_restart_test(full_verification=False, use_tx_storage_cache=True) @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') - def test_restart_fullnode_full_verification_with_cache(self): + def test_restart_fullnode_full_verification_with_cache(self) -> None: self._run_restart_test(full_verification=True, use_tx_storage_cache=True) def test_exceeds_streaming_and_mempool_limits(self) -> None: @@ -252,7 +257,7 @@ def test_exceeds_streaming_and_mempool_limits(self) -> None: self.assertEqual(manager1.tx_storage.get_vertices_count(), manager2.tx_storage.get_vertices_count()) self.assertConsensusEqualSyncV2(manager1, manager2) - def _prepare_sync_v2_find_best_common_block_reorg(self): + def _prepare_sync_v2_find_best_common_block_reorg(self) -> FakeConnection: manager1 = self.create_peer(enable_sync_v1=False, enable_sync_v2=True) manager1.allow_mining_without_peers() miner1 = self.simulator.create_miner(manager1, hashpower=10e6) @@ -267,50 +272,53 @@ def _prepare_sync_v2_find_best_common_block_reorg(self): self.assertTrue(self.simulator.run(3600)) return conn12 - @inlineCallbacks - def test_sync_v2_find_best_common_block_reorg_1(self): + async def test_sync_v2_find_best_common_block_reorg_1(self) -> None: conn12 = self._prepare_sync_v2_find_best_common_block_reorg() + assert isinstance(conn12._proto1.state, ReadyState) sync_agent = conn12._proto1.state.sync_agent + assert isinstance(sync_agent, NodeBlockSync) rng = conn12.manager2.rng my_best_block = sync_agent.get_my_best_block() - peer_best_block = sync_agent.peer_best_block + peer_best_block = not_none(sync_agent.peer_best_block) fake_peer_best_block = _HeightInfo(my_best_block.height + 3, rng.randbytes(32)) reorg_height = peer_best_block.height - 50 - def fake_get_peer_block_hashes(heights): + def fake_get_peer_block_hashes(heights: list[int]) -> Deferred[list[_HeightInfo]]: # return empty as soon as the search lowest height is not the genesis if heights[0] != 0: - return [] + return succeed([]) # simulate a reorg response = [] for h in heights: if h < reorg_height: - vertex_id = conn12.manager2.tx_storage.indexes.height.get(h) + index_manager = not_none(conn12.manager2.tx_storage.indexes) + vertex_id = not_none(index_manager.height.get(h)) else: vertex_id = rng.randbytes(32) response.append(_HeightInfo(height=h, id=vertex_id)) return succeed(response) - sync_agent.get_peer_block_hashes = fake_get_peer_block_hashes - common_block_info = yield sync_agent.find_best_common_block(my_best_block, fake_peer_best_block) - self.assertIsNone(common_block_info) + with patch.object(sync_agent, 'get_peer_block_hashes', new=fake_get_peer_block_hashes): + common_block_info = await sync_agent.find_best_common_block(my_best_block, fake_peer_best_block) + self.assertIsNone(common_block_info) - @inlineCallbacks - def test_sync_v2_find_best_common_block_reorg_2(self): + async def test_sync_v2_find_best_common_block_reorg_2(self) -> None: conn12 = self._prepare_sync_v2_find_best_common_block_reorg() + assert isinstance(conn12._proto1.state, ReadyState) sync_agent = conn12._proto1.state.sync_agent + assert isinstance(sync_agent, NodeBlockSync) rng = conn12.manager2.rng my_best_block = sync_agent.get_my_best_block() - peer_best_block = sync_agent.peer_best_block + peer_best_block = not_none(sync_agent.peer_best_block) fake_peer_best_block = _HeightInfo(my_best_block.height + 3, rng.randbytes(32)) reorg_height = peer_best_block.height - 50 - def fake_get_peer_block_hashes(heights): + def fake_get_peer_block_hashes(heights: list[int]) -> Deferred[list[_HeightInfo]]: if heights[0] != 0: return succeed([ _HeightInfo(height=h, id=rng.randbytes(32)) @@ -321,15 +329,16 @@ def fake_get_peer_block_hashes(heights): response = [] for h in heights: if h < reorg_height: - vertex_id = conn12.manager2.tx_storage.indexes.height.get(h) + index_manager = not_none(conn12.manager2.tx_storage.indexes) + vertex_id = not_none(index_manager.height.get(h)) else: vertex_id = rng.randbytes(32) response.append(_HeightInfo(height=h, id=vertex_id)) return succeed(response) - sync_agent.get_peer_block_hashes = fake_get_peer_block_hashes - common_block_info = yield sync_agent.find_best_common_block(my_best_block, fake_peer_best_block) - self.assertIsNone(common_block_info) + with patch.object(sync_agent, 'get_peer_block_hashes', new=fake_get_peer_block_hashes): + common_block_info = await sync_agent.find_best_common_block(my_best_block, fake_peer_best_block) + self.assertIsNone(common_block_info) def test_multiple_unexpected_txs(self) -> None: manager1 = self.create_peer(enable_sync_v1=False, enable_sync_v2=True) diff --git a/tests/p2p/test_twin_tx.py b/tests/p2p/test_twin_tx.py index 9e5e8857a..ae2339cb5 100644 --- a/tests/p2p/test_twin_tx.py +++ b/tests/p2p/test_twin_tx.py @@ -1,6 +1,7 @@ from hathor.crypto.util import decode_address from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction +from hathor.util import not_none from hathor.wallet.base_wallet import WalletOutputInfo from tests import unittest from tests.utils import add_blocks_unlock_reward, add_new_double_spending @@ -9,16 +10,16 @@ class BaseTwinTransactionTestCase(unittest.TestCase): __test__ = False - def setUp(self): + def setUp(self) -> None: super().setUp() self.network = 'testnet' self.manager = self.create_peer(self.network, unlock_wallet=True) - def test_twin_tx(self): + def test_twin_tx(self) -> None: add_new_blocks(self.manager, 5, advance_clock=15) add_blocks_unlock_reward(self.manager) - address = self.get_address(0) + address = not_none(self.get_address(0)) value1 = 100 value2 = 101 value3 = 102 diff --git a/tests/p2p/test_whitelist.py b/tests/p2p/test_whitelist.py index e7b83fc18..5cbc7e4ae 100644 --- a/tests/p2p/test_whitelist.py +++ b/tests/p2p/test_whitelist.py @@ -17,7 +17,7 @@ class WhitelistTestCase(unittest.SyncV1Params, unittest.TestCase): @patch('hathor.p2p.states.peer_id.settings', new=settings._replace(ENABLE_PEER_WHITELIST=True)) - def test_sync_v11_whitelist_no_no(self): + def test_sync_v11_whitelist_no_no(self) -> None: network = 'testnet' manager1 = self.create_peer(network) @@ -39,7 +39,7 @@ def test_sync_v11_whitelist_no_no(self): self.assertTrue(conn.tr2.disconnecting) @patch('hathor.p2p.states.peer_id.settings', new=settings._replace(ENABLE_PEER_WHITELIST=True)) - def test_sync_v11_whitelist_yes_no(self): + def test_sync_v11_whitelist_yes_no(self) -> None: network = 'testnet' manager1 = self.create_peer(network) @@ -63,7 +63,7 @@ def test_sync_v11_whitelist_yes_no(self): self.assertTrue(conn.tr2.disconnecting) @patch('hathor.p2p.states.peer_id.settings', new=settings._replace(ENABLE_PEER_WHITELIST=True)) - def test_sync_v11_whitelist_yes_yes(self): + def test_sync_v11_whitelist_yes_yes(self) -> None: network = 'testnet' manager1 = self.create_peer(network) diff --git a/tests/unittest.py b/tests/unittest.py index ed8d0c876..316a3f674 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -10,16 +10,21 @@ from twisted.trial import unittest from hathor.builder import BuildArtifacts, Builder +from hathor.checkpoint import Checkpoint from hathor.conf import HathorSettings from hathor.conf.get_settings import get_global_settings from hathor.daa import DifficultyAdjustmentAlgorithm, TestMode +from hathor.event import EventManager +from hathor.event.storage import EventStorage from hathor.p2p.peer_id import PeerId from hathor.p2p.sync_version import SyncVersion +from hathor.pubsub import PubSubManager from hathor.reactor import ReactorProtocol as Reactor, get_global_reactor from hathor.simulator.clock import MemoryReactorHeapClock from hathor.transaction import BaseTransaction -from hathor.util import Random -from hathor.wallet import HDWallet, Wallet +from hathor.transaction.storage.transaction_storage import BaseTransactionStorage +from hathor.util import Random, not_none +from hathor.wallet import BaseWallet, HDWallet, Wallet from tests.test_memory_reactor_clock import TestMemoryReactorClock logger = get_logger() @@ -180,28 +185,28 @@ def create_peer_from_builder(self, builder, start_manager=True): return manager - def create_peer( + def create_peer( # type: ignore[no-untyped-def] self, - network, - peer_id=None, - wallet=None, - tx_storage=None, - unlock_wallet=True, - wallet_index=False, - capabilities=None, - full_verification=True, - enable_sync_v1=None, - enable_sync_v2=None, - checkpoints=None, - utxo_index=False, - event_manager=None, - use_memory_index=None, - start_manager=True, - pubsub=None, - event_storage=None, - enable_event_queue=None, - use_memory_storage=None - ): + network: str, + peer_id: PeerId | None = None, + wallet: BaseWallet | None = None, + tx_storage: BaseTransactionStorage | None = None, + unlock_wallet: bool = True, + wallet_index: bool = False, + capabilities: list[str] | None = None, + full_verification: bool = True, + enable_sync_v1: bool | None = None, + enable_sync_v2: bool | None = None, + checkpoints: list[Checkpoint] | None = None, + utxo_index: bool = False, + event_manager: EventManager | None = None, + use_memory_index: bool | None = None, + start_manager: bool = True, + pubsub: PubSubManager | None = None, + event_storage: EventStorage | None = None, + enable_event_queue: bool | None = None, + use_memory_storage: bool | None = None + ): # TODO: Add -> HathorManager here. It breaks the lint in a lot of places. enable_sync_v1, enable_sync_v2 = self._syncVersionFlags(enable_sync_v1, enable_sync_v2) builder = self.get_builder(network) \ @@ -220,8 +225,9 @@ def create_peer( if not wallet: wallet = self._create_test_wallet() if unlock_wallet: + assert isinstance(wallet, Wallet) wallet.unlock(b'MYPASS') - builder.set_wallet(wallet) + builder.set_wallet(not_none(wallet)) if event_storage: builder.set_event_storage(event_storage)