From 2b1dd717a3ea67d742d0378827a290479c706596 Mon Sep 17 00:00:00 2001 From: Quinten Stokkink Date: Tue, 23 Apr 2024 14:19:25 +0200 Subject: [PATCH] Fixed most ruff violations in the libtorrent folder --- .../download_manager/dht_health_manager.py | 31 +- .../libtorrent/download_manager/download.py | 559 +++++++++++------ .../download_manager/download_config.py | 184 ++++-- .../download_manager/download_manager.py | 503 +++++++++------ .../download_manager/download_state.py | 77 ++- .../libtorrent/download_manager/stream.py | 137 +++-- .../restapi/create_torrent_endpoint.py | 105 ++-- .../libtorrent/restapi/downloads_endpoint.py | 574 ++++++++++-------- .../libtorrent/restapi/libtorrent_endpoint.py | 75 ++- .../restapi/torrentinfo_endpoint.py | 86 ++- .../core/libtorrent/torrent_file_tree.py | 32 +- src/tribler/core/libtorrent/torrentdef.py | 235 +++---- src/tribler/core/libtorrent/torrents.py | 94 +-- src/tribler/core/libtorrent/trackers.py | 94 +-- src/tribler/core/libtorrent/uris.py | 16 +- .../download_manager/test_download.py | 8 +- .../download_manager/test_download_manager.py | 4 +- 17 files changed, 1692 insertions(+), 1122 deletions(-) diff --git a/src/tribler/core/libtorrent/download_manager/dht_health_manager.py b/src/tribler/core/libtorrent/download_manager/dht_health_manager.py index 6713299136..a3cb805e07 100644 --- a/src/tribler/core/libtorrent/download_manager/dht_health_manager.py +++ b/src/tribler/core/libtorrent/download_manager/dht_health_manager.py @@ -1,10 +1,11 @@ -from binascii import hexlify +from __future__ import annotations -import libtorrent as lt import math from asyncio import Future +from binascii import hexlify from typing import Awaitable +import libtorrent as lt from ipv8.taskmanager import TaskManager from tribler.core.torrent_checker.dataclasses import HealthInfo @@ -15,9 +16,10 @@ class DHTHealthManager(TaskManager): This class manages BEP33 health requests to the libtorrent DHT. """ - def __init__(self, lt_session): + def __init__(self, lt_session: lt.session) -> None: """ Initialize the DHT health manager. + :param lt_session: The session used to perform health lookups. """ TaskManager.__init__(self) @@ -27,9 +29,10 @@ def __init__(self, lt_session): self.outstanding = {} # Map from transaction_id to infohash self.lt_session = lt_session - def get_health(self, infohash, timeout=15) -> Awaitable[HealthInfo]: + def get_health(self, infohash: bytes, timeout: float = 15) -> Awaitable[HealthInfo]: """ Lookup the health of a given infohash. + :param infohash: The 20-byte infohash to lookup. :param timeout: The timeout of the lookup. """ @@ -48,9 +51,10 @@ def get_health(self, infohash, timeout=15) -> Awaitable[HealthInfo]: return lookup_future - def finalize_lookup(self, infohash): + def finalize_lookup(self, infohash: bytes) -> None: """ Finalize the lookup of the provided infohash and invoke the appropriate deferred. + :param infohash: The infohash of the lookup we finialize. """ for transaction_id in [key for key, value in self.outstanding.items() if value == infohash]: @@ -71,9 +75,10 @@ def finalize_lookup(self, infohash): self.lookup_futures.pop(infohash, None) @staticmethod - def combine_bloomfilters(bf1, bf2): + def combine_bloomfilters(bf1: bytearray, bf2: bytearray) -> bytearray: """ Combine two given bloom filters by ORing the bits. + :param bf1: The first bloom filter to combine. :param bf2: The second bloom filter to combine. :return: A bytearray with the combined bloomfilter. @@ -85,19 +90,20 @@ def combine_bloomfilters(bf1, bf2): return final_bf @staticmethod - def get_size_from_bloomfilter(bf): + def get_size_from_bloomfilter(bf: bytearray) -> int: """ Return the estimated number of items in the bloom filter. + :param bf: The bloom filter of which we estimate the size. :return: A rounded integer, approximating the number of items in the filter. """ - def tobits(s): + def tobits(s: bytes) -> list[int]: result = [] for c in s: num = ord(c) if isinstance(c, str) else c bits = bin(num)[2:] - bits = '00000000'[len(bits):] + bits + bits = "00000000"[len(bits):] + bits result.extend([int(b) for b in bits]) return result @@ -114,10 +120,11 @@ def tobits(s): c = min(m - 1, total_zeros) return int(math.log(c / float(m)) / (2 * math.log(1 - 1 / float(m)))) - def requesting_bloomfilters(self, transaction_id, infohash): + def requesting_bloomfilters(self, transaction_id: str, infohash: bytes) -> None: """ Tne libtorrent DHT has sent a get_peers query for an infohash we may be interested in. If so, keep track of the transaction and node IDs. + :param transaction_id: The ID of the query :param infohash: The infohash for which the query was sent. """ @@ -127,9 +134,11 @@ def requesting_bloomfilters(self, transaction_id, infohash): # Libtorrent is reusing the transaction_id, and is now using it for a infohash that we're not interested in. self.outstanding.pop(transaction_id, None) - def received_bloomfilters(self, transaction_id, bf_seeds=bytearray(256), bf_peers=bytearray(256)): + def received_bloomfilters(self, transaction_id: str, bf_seeds: bytearray = bytearray(256), # noqa: B008 + bf_peers: bytearray = bytearray(256)) -> None: # noqa: B008 """ We have received bloom filters from the libtorrent DHT. Register the bloom filters and process them. + :param transaction_id: The ID of the query for which we are receiving the bloom filter. :param bf_seeds: The bloom filter indicating the IP addresses of the seeders. :param bf_peers: The bloom filter indicating the IP addresses of the peers (leechers). diff --git a/src/tribler/core/libtorrent/download_manager/download.py b/src/tribler/core/libtorrent/download_manager/download.py index 31edb53c45..9ecf374ad3 100644 --- a/src/tribler/core/libtorrent/download_manager/download.py +++ b/src/tribler/core/libtorrent/download_manager/download.py @@ -15,20 +15,23 @@ from contextlib import suppress from enum import Enum from pathlib import Path -from typing import Any, Awaitable, Callable, Dict, List, Optional, Tuple +from typing import TYPE_CHECKING, Any, Awaitable, Callable, Dict, List, Tuple, TypedDict +import libtorrent as lt from bitarray import bitarray from ipv8.taskmanager import TaskManager, task from ipv8.util import succeed -import libtorrent as lt from tribler.core.libtorrent.download_manager.download_config import DownloadConfig -from tribler.core.libtorrent.download_manager.download_state import DownloadState, DOWNLOAD, DownloadStatus +from tribler.core.libtorrent.download_manager.download_state import DownloadState, DownloadStatus from tribler.core.libtorrent.download_manager.stream import Stream from tribler.core.libtorrent.torrent_file_tree import TorrentFileTree from tribler.core.libtorrent.torrentdef import TorrentDef, TorrentDefNoMetainfo -from tribler.core.libtorrent.torrents import check_handle, require_handle, get_info_from_handle -from tribler.core.notifier import Notifier, Notification +from tribler.core.libtorrent.torrents import check_handle, get_info_from_handle, require_handle +from tribler.core.notifier import Notification, Notifier + +if TYPE_CHECKING: + from tribler.core.libtorrent.download_manager.download_manager import DownloadManager Getter = Callable[[Any], Any] @@ -41,63 +44,104 @@ class IllegalFileIndex(Enum): """ Error codes for Download.get_file_index(). These are used by the GUI to render directories. """ + collapsed_dir = -1 expanded_dir = -2 unloaded = -3 +class PeerDict(TypedDict): + """ + Information of another peer, connected through libtorrent. + """ + + id: str # PeerID or 'http seed' + extended_version: str # Peer client version, as received during the extend handshake message + ip: str # IP address as string or URL of httpseed + port: int + pex_received: bool + optimistic: bool + direction: str # 'L'/'R' (outgoing/incoming) + uprate: float # Upload rate in KB/s + uinterested: bool # Upload Interested: True/False + uchoked: bool # Upload Choked: True/False + uhasqueries: bool # Upload has requests in buffer and not choked + uflushed: bool # Upload is not flushed + downrate: float # Download rate in KB/s + dinterested: bool # Download interested: True/False + dchoked: bool # Download choked: True/False + snubbed: bool # Download snubbed: True/False + utotal: float # Total uploaded from peer in KB + dtotal: float # Total downloaded from peer in KB + completed: float # Fraction of download completed by peer (0-1.0) + speed: float # The peer's current total download speed (estimated) + + +class PeerDictHave(PeerDict): + """ + Extended peer info that includes the "have" field. + """ + + have: Any # Bitfield object for this peer if not completed + + class Download(TaskManager): - """ Download subclass that represents a libtorrent download.""" + """ + Download subclass that represents a libtorrent download. + """ - def __init__(self, + def __init__(self, # noqa: PLR0913 tdef: TorrentDef, - config: DownloadConfig = None, - notifier: Notifier = None, - state_dir: Path = None, - download_manager=None, - checkpoint_disabled=False, - hidden=False): + config: DownloadConfig | None = None, + notifier: Notifier | None = None, + state_dir: Path | None = None, + download_manager: DownloadManager | None =None, + checkpoint_disabled: bool = False, + hidden: bool = False) -> None: + """ + Create a new download. + """ super().__init__() self._logger = logging.getLogger(self.__class__.__name__) self.tdef = tdef - self.handle: Optional[lt.torrent_handle] = None + self.handle: lt.torrent_handle | None = None self.state_dir = state_dir self.download_manager = download_manager self.notifier = notifier # Libtorrent status - self.lt_status: Optional[lt.torrent_status] = None + self.lt_status: lt.torrent_status | None = None self.error = None self.pause_after_next_hashcheck = False self.checkpoint_after_next_hashcheck = False self.tracker_status = {} # {url: [num_peers, status_str]} - self.futures: Dict[str, list[tuple[Future, Callable, Optional[Getter]]]] = defaultdict(list) + self.futures: Dict[str, list[tuple[Future, Callable, Getter | None]]] = defaultdict(list) self.alert_handlers = defaultdict(list) - self.future_added = self.wait_for_alert('add_torrent_alert', lambda a: a.handle) - self.future_removed = self.wait_for_alert('torrent_removed_alert') - self.future_finished = self.wait_for_alert('torrent_finished_alert') - self.future_metainfo = self.wait_for_alert('metadata_received_alert', lambda a: self.tdef.get_metainfo()) - - alert_handlers = {'tracker_reply_alert': self.on_tracker_reply_alert, - 'tracker_error_alert': self.on_tracker_error_alert, - 'tracker_warning_alert': self.on_tracker_warning_alert, - 'metadata_received_alert': self.on_metadata_received_alert, - 'performance_alert': self.on_performance_alert, - 'torrent_checked_alert': self.on_torrent_checked_alert, - 'torrent_finished_alert': self.on_torrent_finished_alert, - 'save_resume_data_alert': self.on_save_resume_data_alert, - 'state_changed_alert': self.on_state_changed_alert, - 'torrent_error_alert': self.on_torrent_error_alert, - 'add_torrent_alert': self.on_add_torrent_alert, - 'torrent_removed_alert': self.on_torrent_removed_alert} + self.future_added = self.wait_for_alert("add_torrent_alert", lambda a: a.handle) + self.future_removed = self.wait_for_alert("torrent_removed_alert") + self.future_finished = self.wait_for_alert("torrent_finished_alert") + self.future_metainfo = self.wait_for_alert("metadata_received_alert", lambda a: self.tdef.get_metainfo()) + + alert_handlers = {"tracker_reply_alert": self.on_tracker_reply_alert, + "tracker_error_alert": self.on_tracker_error_alert, + "tracker_warning_alert": self.on_tracker_warning_alert, + "metadata_received_alert": self.on_metadata_received_alert, + "performance_alert": self.on_performance_alert, + "torrent_checked_alert": self.on_torrent_checked_alert, + "torrent_finished_alert": self.on_torrent_finished_alert, + "save_resume_data_alert": self.on_save_resume_data_alert, + "state_changed_alert": self.on_state_changed_alert, + "torrent_error_alert": self.on_torrent_error_alert, + "add_torrent_alert": self.on_add_torrent_alert, + "torrent_removed_alert": self.on_torrent_removed_alert} for alert_type, alert_handler in alert_handlers.items(): self.register_alert_handler(alert_type, alert_handler) - self.stream: Optional[Stream] = None + self.stream: Stream | None = None # With hidden True download will not be in GET/downloads set, as a result will not be shown in GUI self.hidden = hidden @@ -108,18 +152,27 @@ def __init__(self, self.checkpoint() - def __str__(self): + def __str__(self) -> str: + """ + Convert this download to a human-readable string. + """ return "Download(name=%s, hops=%d, checkpoint_disabled=%d)" % \ (self.tdef.get_name(), self.config.get_hops(), self.checkpoint_disabled) - def __repr__(self): + def __repr__(self) -> str: + """ + Convert this download to a print-safe human-readable string. + """ return self.__str__() - def add_stream(self): + def add_stream(self) -> None: + """ + Initialize a stream for this download. + """ assert self.stream is None self.stream = Stream(self) - def get_torrent_data(self) -> Optional[object]: + def get_torrent_data(self) -> bytes | None: """ Return torrent data, if the handle is valid and metadata is available. """ @@ -130,11 +183,17 @@ def get_torrent_data(self) -> Optional[object]: t = lt.create_torrent(torrent_info) return t.generate() - def register_alert_handler(self, alert_type: str, handler: lt.torrent_handle): + def register_alert_handler(self, alert_type: str, handler: Callable[[lt.torrent_alert], None]) -> None: + """ + Add (no replace) a callback for a given alert type. + """ self.alert_handlers[alert_type].append(handler) - def wait_for_alert(self, success_type: str, success_getter: Optional[Getter] = None, - fail_type: str = None, fail_getter: Optional[Getter] = None) -> Future: + def wait_for_alert(self, success_type: str, success_getter: Getter | None = None, + fail_type: str | None = None, fail_getter: Getter | None = None) -> Future: + """ + Create a future that fires when a certain alert is received. + """ future = Future() if success_type: self.futures[success_type].append((future, future.set_result, success_getter)) @@ -142,12 +201,18 @@ def wait_for_alert(self, success_type: str, success_getter: Optional[Getter] = N self.futures[fail_type].append((future, future.set_exception, fail_getter)) return future - async def wait_for_status(self, *status): + async def wait_for_status(self, *status: DownloadStatus) -> None: + """ + Wait for a given download status to occur. + """ while self.get_state().get_status() not in status: await sleep(0) - await self.wait_for_alert('state_changed_alert') + await self.wait_for_alert("state_changed_alert") def get_def(self) -> TorrentDef: + """ + Get the torrent def belonging to this download. + """ return self.tdef def get_handle(self) -> Awaitable[lt.torrent_handle]: @@ -161,7 +226,10 @@ def get_handle(self) -> Awaitable[lt.torrent_handle]: return self.future_added - def get_atp(self) -> Dict: + def get_atp(self) -> dict: + """ + Get the libtorrent "add torrent parameters" instantiation dictionary. + """ save_path = self.config.get_dest_dir() atp = {"save_path": str(save_path), "storage_mode": lt.storage_mode_t.storage_mode_sparse, @@ -193,22 +261,25 @@ def get_atp(self) -> Dict: return atp - def on_add_torrent_alert(self, alert: lt.add_torrent_alert): - self._logger.info(f'On add torrent alert: {alert!r}') + def on_add_torrent_alert(self, alert: lt.add_torrent_alert) -> None: + """ + Handle an add torrent alert. + """ + self._logger.info("On add torrent alert: %s", repr(alert)) - if hasattr(alert, 'error') and alert.error.value(): + if hasattr(alert, "error") and alert.error.value(): self._logger.error("Failed to add torrent (%s)", self.tdef.get_name_as_unicode()) raise RuntimeError(alert.error.message()) - elif not alert.handle.is_valid(): + if not alert.handle.is_valid(): self._logger.error("Received invalid torrent handle") return self.handle = alert.handle self._logger.debug("Added torrent %s", str(self.handle.info_hash())) # In LibTorrent auto_managed flag is now on by default, and as a result - # any torrent's state can change from Stopped to Downloading at any time. + # any torrent"s state can change from Stopped to Downloading at any time. # Here we unset this flag to prevent auto-resuming of stopped torrents. - if hasattr(self.handle, 'unset_flags'): + if hasattr(self.handle, "unset_flags"): self.handle.unset_flags(lt.add_torrent_params_flags_t.flag_auto_managed) self.set_selected_files() @@ -231,6 +302,9 @@ def on_add_torrent_alert(self, alert: lt.add_torrent_alert): self.checkpoint() def get_anon_mode(self) -> bool: + """ + Get whether this torrent is anonymized. + """ return self.config.get_hops() > 0 @check_handle(b'') @@ -242,62 +316,74 @@ def get_pieces_base64(self) -> bytes: bits = bitarray(binary_gen) return base64.b64encode(bits.tobytes()) - def post_alert(self, alert_type: str, alert_dict: Optional[Dict] = None): + def post_alert(self, alert_type: str, alert_dict: dict | None = None) -> None: + """ + Manually post an alert. + """ alert_dict = alert_dict or {} - alert_dict['category'] = lambda _: None - alert = type('anonymous_alert', (object,), alert_dict)() + alert_dict["category"] = lambda _: None + alert = type("anonymous_alert", (object,), alert_dict)() self.process_alert(alert, alert_type) - def process_alert(self, alert: lt.torrent_alert, alert_type: str): + def process_alert(self, alert: lt.torrent_alert, alert_type: str) -> None: + """ + Dispatch an alert to the appriopriate registered handlers. + """ try: if alert.category() in [lt.alert.category_t.error_notification, lt.alert.category_t.performance_warning]: - self._logger.debug(f"Got alert: {alert!r}") + self._logger.debug("Got alert: %s", repr(alert)) for handler in self.alert_handlers.get(alert_type, []): try: handler(alert) except UnicodeDecodeError as e: - self._logger.warning(f"UnicodeDecodeError in {handler.__name__}: {e}") + self._logger.warning("UnicodeDecodeError in %s: %s", handler.__name__, str(e)) for future, future_setter, getter in self.futures.pop(alert_type, []): if not future.done(): future_setter(getter(alert) if getter else alert) except Exception as e: - self._logger.exception(f'process_alert failed with {e.__class__.__name__}: {e} ' - f'for alert {alert!r}') + self._logger.exception("process_alert failed with %s: %s for alert %s", + e.__class__.__name__, str(e), repr(alert)) - def on_torrent_error_alert(self, alert: lt.torrent_error_alert): - self._logger.error(f'On torrent error alert: {alert!r}') + def on_torrent_error_alert(self, alert: lt.torrent_error_alert) -> None: + """ + Handle a torrent error alert. + """ + self._logger.error("On torrent error alert: %s", repr(alert)) - def on_state_changed_alert(self, alert: lt.state_changed_alert): - self._logger.info(f'On state changed alert: {alert!r}') + def on_state_changed_alert(self, alert: lt.state_changed_alert) -> None: + """ + Handle a state change alert. + """ + self._logger.info("On state changed alert: %s", repr(alert)) if not self.handle: return self.update_lt_status(self.handle.status()) enable = alert.state == lt.torrent_status.seeding and self.config.get_hops() > 0 - self._logger.debug('Setting IP filter for %s to %s', hexlify(self.tdef.get_infohash()), enable) + self._logger.debug("Setting IP filter for %s to %s", hexlify(self.tdef.get_infohash()), enable) self.apply_ip_filter(enable) # On a rare occasion we don't get a metadata_received_alert. If this is the case, post an alert manually. if alert.state == lt.torrent_status.downloading and isinstance(self.tdef, TorrentDefNoMetainfo): - self.post_alert('metadata_received_alert') + self.post_alert("metadata_received_alert") - def on_save_resume_data_alert(self, alert: lt.save_resume_data_alert): + def on_save_resume_data_alert(self, alert: lt.save_resume_data_alert) -> None: """ Callback for the alert that contains the resume data of a specific download. This resume data will be written to a file on disk. """ - self._logger.debug(f'On save resume data alert: {alert!r}') + self._logger.debug("On save resume data alert: %s", repr(alert)) if self.checkpoint_disabled: return resume_data = alert.resume_data # Make save_path relative if the torrent is saved in the Tribler state directory - if self.state_dir and b'save_path' in resume_data: - save_path = Path(resume_data[b'save_path'].decode()).absolute() - resume_data[b'save_path'] = str(save_path) + if self.state_dir and b"save_path" in resume_data: + save_path = Path(resume_data[b"save_path"].decode()).absolute() + resume_data[b"save_path"] = str(save_path) if not isinstance(self.tdef, TorrentDefNoMetainfo): self.config.set_metainfo(self.tdef.get_metainfo()) @@ -310,62 +396,71 @@ def on_save_resume_data_alert(self, alert: lt.save_resume_data_alert): self.config.set_engineresumedata(resume_data) # Save it to file - basename = hexlify(resume_data[b'info-hash']).decode() + '.conf' + basename = hexlify(resume_data[b"info-hash"]).decode() + ".conf" Path(self.download_manager.get_checkpoint_dir()).mkdir(parents=True, exist_ok=True) filename = self.download_manager.get_checkpoint_dir() / basename - self.config.config['download_defaults']['name'] = self.tdef.get_name_as_unicode() # store name (for debugging) + self.config.config["download_defaults"]["name"] = self.tdef.get_name_as_unicode() # store name (for debugging) try: self.config.write(filename) except OSError as e: - self._logger.warning(f'{e.__class__.__name__}: {e}') + self._logger.warning("%s: %s", e.__class__.__name__, str(e)) else: - self._logger.debug(f'Resume data has been saved to: {filename}') + self._logger.debug("Resume data has been saved to: %s", filename) - def on_tracker_reply_alert(self, alert: lt.tracker_reply_alert): - self._logger.info(f'On tracker reply alert: {alert!r}') + def on_tracker_reply_alert(self, alert: lt.tracker_reply_alert) -> None: + """ + Handle a tracker reply alert. + """ + self._logger.info("On tracker reply alert: %s", repr(alert)) self.tracker_status[alert.url] = [alert.num_peers, 'Working'] - def on_tracker_error_alert(self, alert: lt.tracker_error_alert): + def on_tracker_error_alert(self, alert: lt.tracker_error_alert) -> None: """ This alert is generated on tracker timeouts, premature disconnects, invalid response or an HTTP response other than "200 OK". - From Libtorrent documentation. """ # The try-except block is added as a workaround to suppress UnicodeDecodeError in `repr(alert)`, # `alert.url` and `alert.msg`. See https://github.com/arvidn/libtorrent/issues/143 - self._logger.error(f'On tracker error alert: {alert!r}') + self._logger.error("On tracker error alert: %s", repr(alert)) url = alert.url if alert.msg: - status = 'Error: ' + alert.msg + status = "Error: " + alert.msg elif alert.status_code > 0: - status = 'HTTP status code %d' % alert.status_code + status = "HTTP status code %d" % alert.status_code elif alert.status_code == 0: - status = 'Timeout' + status = "Timeout" else: - status = 'Not working' + status = "Not working" peers = 0 # If there is a tracker error, alert.num_peers is not available. So resetting peer count to zero. self.tracker_status[url] = [peers, status] - def on_tracker_warning_alert(self, alert: lt.tracker_warning_alert): - self._logger.warning(f'On tracker warning alert: {alert!r}') + def on_tracker_warning_alert(self, alert: lt.tracker_warning_alert) -> None: + """ + Handle a tracker warning alert. + """ + self._logger.warning("On tracker warning alert: %s", repr(alert)) peers = self.tracker_status[alert.url][0] if alert.url in self.tracker_status else 0 - status = 'Warning: ' + str(alert.message()) + status = "Warning: " + str(alert.message()) self.tracker_status[alert.url] = [peers, status] @check_handle() - def on_metadata_received_alert(self, alert: lt.metadata_received_alert): - self._logger.info(f'On metadata received alert: {alert!r}') + def on_metadata_received_alert(self, alert: lt.metadata_received_alert) -> None: # noqa: C901, PLR0912 + """ + Handle a metadata received alert. + """ + self._logger.info("On metadata received alert: %s", repr(alert)) torrent_info = get_info_from_handle(self.handle) if not torrent_info: return try: - metadata = {b'info': lt.bdecode(torrent_info.metadata()), b'leechers': 0, b'seeders': 0} + metadata = {b"info": lt.bdecode(torrent_info.metadata()), b"leechers": 0, b"seeders": 0} except (RuntimeError, ValueError) as e: self._logger.warning(e) return @@ -377,7 +472,7 @@ def on_metadata_received_alert(self, alert: lt.metadata_received_alert): except UnicodeDecodeError as e: self._logger.warning(e) for tracker in trackers: - url = tracker['url'] + url = tracker["url"] try: tracker_urls.append(url.encode()) except UnicodeEncodeError as e: @@ -406,8 +501,11 @@ def on_metadata_received_alert(self, alert: lt.metadata_received_alert): self.set_selected_files() self.checkpoint() - def on_performance_alert(self, alert: lt.performance_alert): - self._logger.info(f'On performance alert: {alert!r}') + def on_performance_alert(self, alert: lt.performance_alert) -> None: + """ + Handle a performance alert. + """ + self._logger.info("On performance alert: %s", repr(alert)) if self.get_anon_mode() or self.download_manager.ltsessions is None: return @@ -417,26 +515,32 @@ def on_performance_alert(self, alert: lt.performance_alert): lt_session = self.download_manager.get_session(self.config.get_hops()) settings = self.download_manager.get_session_settings(lt_session) if alert.message().endswith("send buffer watermark too low (upload rate will suffer)"): - if settings['send_buffer_watermark'] <= 26214400: - self._logger.info("Setting send_buffer_watermark to %s", 2 * settings['send_buffer_watermark']) - settings['send_buffer_watermark'] *= 2 + if settings["send_buffer_watermark"] <= 26214400: + self._logger.info("Setting send_buffer_watermark to %s", 2 * settings["send_buffer_watermark"]) + settings["send_buffer_watermark"] *= 2 self.download_manager.set_session_settings(self.download_manager.get_session(), settings) # When the write cache is too small, double the buffer size to a maximum # of 64MiB. Again, this is the same mechanism as Deluge uses. - elif alert.message().endswith("max outstanding disk writes reached"): - if settings['max_queued_disk_bytes'] <= 33554432: - self._logger.info("Setting max_queued_disk_bytes to %s", 2 * settings['max_queued_disk_bytes']) - settings['max_queued_disk_bytes'] *= 2 - self.download_manager.set_session_settings(self.download_manager.get_session(), settings) + elif (alert.message().endswith("max outstanding disk writes reached") + and settings["max_queued_disk_bytes"] <= 33554432): + self._logger.info("Setting max_queued_disk_bytes to %s", 2 * settings["max_queued_disk_bytes"]) + settings["max_queued_disk_bytes"] *= 2 + self.download_manager.set_session_settings(self.download_manager.get_session(), settings) - def on_torrent_removed_alert(self, alert: lt.torrent_removed_alert): - self._logger.info(f'On torrent remove alert: {alert!r}') + def on_torrent_removed_alert(self, alert: lt.torrent_removed_alert) -> None: + """ + Handle a torrent removed alert. + """ + self._logger.info("On torrent remove alert: %s", repr(alert)) self._logger.debug("Removing %s", self.tdef.get_name()) self.handle = None - def on_torrent_checked_alert(self, alert: lt.torrent_checked_alert): - self._logger.info(f'On torrent checked alert: {alert!r}') + def on_torrent_checked_alert(self, alert: lt.torrent_checked_alert) -> None: + """ + Handle a torrent checked alert. + """ + self._logger.info("On torrent checked alert: %s", repr(alert)) if self.pause_after_next_hashcheck: self.pause_after_next_hashcheck = False @@ -446,8 +550,11 @@ def on_torrent_checked_alert(self, alert: lt.torrent_checked_alert): self.checkpoint() @check_handle() - def on_torrent_finished_alert(self, alert: lt.torrent_finished_alert): - self._logger.info(f'On torrent finished alert: {alert!r}') + def on_torrent_finished_alert(self, alert: lt.torrent_finished_alert) -> None: + """ + Handle a torrent finished alert. + """ + self._logger.info("On torrent finished alert: %s", repr(alert)) self.update_lt_status(self.handle.status()) self.checkpoint() downloaded = self.get_state().total_download @@ -456,24 +563,28 @@ def on_torrent_finished_alert(self, alert: lt.torrent_finished_alert): infohash = self.tdef.get_infohash().hex() self.notifier.notify(Notification.torrent_finished, infohash=infohash, name=name, hidden=self.hidden) - def update_lt_status(self, lt_status: lt.torrent_status): - """ Update libtorrent stats and check if the download should be stopped.""" + def update_lt_status(self, lt_status: lt.torrent_status) -> None: + """ + Update libtorrent stats and check if the download should be stopped. + """ self.lt_status = lt_status - self._stop_if_finished() - def _stop_if_finished(self): state = self.get_state() if state.get_status() == DownloadStatus.SEEDING: mode = self.download_manager.config.get("libtorrent/download_defaults/seeding_mode") seeding_ratio = self.download_manager.config.get("libtorrent/download_defaults/seeding_ratio") seeding_time = self.download_manager.config.get("libtorrent/download_defaults/seeding_time") - if (mode == 'never' or - (mode == 'ratio' and state.get_all_time_ratio() >= seeding_ratio) or - (mode == 'time' and state.get_seeding_time() >= seeding_time)): + if (mode == "never" or + (mode == "ratio" and state.get_all_time_ratio() >= seeding_ratio) or + (mode == "time" and state.get_seeding_time() >= seeding_time)): self.stop() @check_handle() - def set_selected_files(self, selected_files=None, prio: int = 4, force: bool = False): + def set_selected_files(self, selected_files: list[int] | None = None, prio: int = 4, + force: bool = False) -> int | None: + """ + Set the selected files. If the selected files is None or empty, all files will be selected. + """ if not force and self.stream is not None: return if not isinstance(self.tdef, TorrentDefNoMetainfo) and not self.get_share_mode(): @@ -500,13 +611,19 @@ def map_selected(index: int) -> int: @check_handle(False) def move_storage(self, new_dir: Path) -> bool: + """ + Move the output files to a different location. + """ if not isinstance(self.tdef, TorrentDefNoMetainfo): self.handle.move_storage(str(new_dir)) self.config.set_dest_dir(new_dir) return True @check_handle() - def force_recheck(self): + def force_recheck(self) -> None: + """ + Force libtorrent to validate the files. + """ if not isinstance(self.tdef, TorrentDefNoMetainfo): if self.get_state().get_status() == DownloadStatus.STOPPED: self.pause_after_next_hashcheck = True @@ -521,50 +638,26 @@ def get_state(self) -> DownloadState: return DownloadState(self, self.lt_status, self.error) @task - async def save_resume_data(self, timeout: int = 10): + async def save_resume_data(self, timeout: int = 10) -> None: """ Save the resume data of a download. This method returns when the resume data is available. Note that this method only calls save_resume_data once on subsequent calls. """ - if 'save_resume_data' not in self.futures: + if "save_resume_data" not in self.futures: handle = await self.get_handle() handle.save_resume_data() try: - await wait_for(self.wait_for_alert('save_resume_data_alert', None, - 'save_resume_data_failed_alert', + await wait_for(self.wait_for_alert("save_resume_data_alert", None, + "save_resume_data_failed_alert", lambda a: SaveResumeDataError(a.error.message())), timeout=timeout) except (CancelledError, SaveResumeDataError, TimeoutError, asyncio.exceptions.TimeoutError) as e: - self._logger.error("Resume data failed to save: %s", e) - - def get_peer_list(self, include_have: bool = True) -> List[Dict[Any, Any]]: - """ Returns a list of dictionaries, one for each connected peer - containing the statistics for that peer. In particular, the - dictionary contains the keys: -
-        'id' = PeerID or 'http seed'
-        'extended_version' = Peer client version, as received during the extend handshake message
-        'ip' = IP address as string or URL of httpseed
-        'port' = Port
-        'pex_received' = True/False
-        'optimistic' = True/False
-        'direction' = 'L'/'R' (outgoing/incoming)
-        'uprate' = Upload rate in KB/s
-        'uinterested' = Upload Interested: True/False
-        'uchoked' = Upload Choked: True/False
-        'uhasqueries' = Upload has requests in buffer and not choked
-        'uflushed' = Upload is not flushed
-        'downrate' = Download rate in KB/s
-        'dinterested' = Download interested: True/Flase
-        'dchoked' = Download choked: True/False
-        'snubbed' = Download snubbed: True/False
-        'utotal' = Total uploaded from peer in KB
-        'dtotal' = Total downloaded from peer in KB
-        'completed' = Fraction of download completed by peer (0-1.0)
-        -- QUESTION(lipu): swift and Bitfield are gone. Does this 'have' thing has anything to do with swift?
-        'have' = Bitfield object for this peer if not complete
-        'speed' = The peer's current total download speed (estimated)
-        
+ self._logger.exception("Resume data failed to save: %s", e) + + def get_peer_list(self, include_have: bool = True) -> List[PeerDict | PeerDictHave]: + """ + Returns a list of dictionaries, one for each connected peer containing the statistics for that peer. + In particular, the dictionary contains the keys. """ peers = [] peer_infos = self.handle.get_peer_info() if self.handle and self.handle.is_valid() else [] @@ -604,7 +697,9 @@ def get_peer_list(self, include_have: bool = True) -> List[Dict[Any, Any]]: return peers def get_num_connected_seeds_peers(self) -> Tuple[int, int]: - """ Returns number of connected seeders and leechers """ + """ + Return the number of connected seeders and leechers. + """ num_seeds = num_peers = 0 if not self.handle or not self.handle.is_valid(): return 0, 0 @@ -617,7 +712,10 @@ def get_num_connected_seeds_peers(self) -> Tuple[int, int]: return num_seeds, num_peers - def get_torrent(self) -> object: + def get_torrent(self) -> bytes | None: + """ + Create the raw torrent data from this download. + """ if not self.handle or not self.handle.is_valid() or not self.handle.has_metadata(): return None @@ -626,15 +724,18 @@ def get_torrent(self) -> object: return t.generate() @check_handle(default={}) - def get_tracker_status(self): + def get_tracker_status(self) -> dict[str, tuple[int, str]]: + """ + Retrieve an overview of the trackers and their statuses. + """ # Make sure all trackers are in the tracker_status dict try: for announce_entry in self.handle.trackers(): - url = announce_entry['url'] + url = announce_entry["url"] if url not in self.tracker_status: - self.tracker_status[url] = [0, 'Not contacted yet'] + self.tracker_status[url] = [0, "Not contacted yet"] except UnicodeDecodeError: - self._logger.warning('UnicodeDecodeError in get_tracker_status') + self._logger.warning("UnicodeDecodeError in get_tracker_status") # Count DHT and PeX peers dht_peers = pex_peers = 0 @@ -655,24 +756,30 @@ def get_tracker_status(self): public = self.tdef and not self.tdef.is_private() result = self.tracker_status.copy() - result['[DHT]'] = [dht_peers, 'Working' if ltsession.is_dht_running() and public else 'Disabled'] - result['[PeX]'] = [pex_peers, 'Working'] + result["[DHT]"] = (dht_peers, "Working" if ltsession.is_dht_running() and public else "Disabled") + result["[PeX]"] = (pex_peers, "Working") return result - def set_state_callback(self, usercallback): - async def state_callback_loop(): + def set_state_callback(self, usercallback: Callable[[DownloadState], float | Awaitable[float]]) -> Future: + """ + Fire a callback after a second and subsequently whenever the callback returns a value larger than zero. + """ + async def state_callback_loop() -> None: if usercallback: when = 1 - while when and not self.future_removed.done() and not self.download_manager._shutdown: + while when and not self.future_removed.done() and not self.download_manager.is_shutting_down(): result = usercallback(self.get_state()) when = (await result) if iscoroutine(result) else result - if when > 0.0 and not self.download_manager._shutdown: + if when > 0.0 and not self.download_manager.is_shutting_down(): await sleep(when) return self.register_anonymous_task("downloads_cb", state_callback_loop) - async def shutdown(self): - self._logger.info('Shutting down...') + async def shutdown(self) -> None: + """ + Shut down the download. + """ + self._logger.info("Shutting down...") self.alert_handlers.clear() if self.stream is not None: self.stream.close() @@ -685,7 +792,10 @@ async def shutdown(self): self.futures.clear() await self.shutdown_task_manager() - def stop(self, user_stopped=None): + def stop(self, user_stopped: bool | None = None) -> Future[None]: + """ + Stop downloading the download. + """ self._logger.debug("Stopping %s", self.tdef.get_name()) if self.stream is not None: self.stream.disable() @@ -696,7 +806,10 @@ def stop(self, user_stopped=None): return self.checkpoint() return succeed(None) - def resume(self): + def resume(self) -> None: + """ + Resume downloading the download. + """ self._logger.debug("Resuming %s", self.tdef.get_name()) self.config.set_user_stopped(False) @@ -711,7 +824,7 @@ def get_content_dest(self) -> Path: """ return self.config.get_dest_dir() / self.tdef.get_name_as_unicode() - def checkpoint(self): + def checkpoint(self) -> Awaitable[None]: """ Checkpoint this download. Returns when the checkpointing is completed. """ @@ -726,105 +839,167 @@ def checkpoint(self): if not self.handle or not self.handle.is_valid(): # Libtorrent hasn't received or initialized this download yet # 1. Check if we have data for this infohash already (don't overwrite it if we do!) - basename = hexlify(self.tdef.get_infohash()).decode() + '.conf' + basename = hexlify(self.tdef.get_infohash()).decode() + ".conf" filename = Path(self.download_manager.get_checkpoint_dir() / basename) if not filename.is_file(): # 2. If there is no saved data for this infohash, checkpoint it without data so we do not # lose it when we crash or restart before the download becomes known. resume_data = self.config.get_engineresumedata() or { - b'file-format': b"libtorrent resume file", - b'file-version': 1, - b'info-hash': self.tdef.get_infohash() + b"file-format": b"libtorrent resume file", + b"file-version": 1, + b"info-hash": self.tdef.get_infohash() } - self.post_alert('save_resume_data_alert', dict(resume_data=resume_data)) + self.post_alert("save_resume_data_alert", {"resume_data": resume_data}) return succeed(None) return self.save_resume_data() - def set_def(self, tdef: TorrentDef): + def set_def(self, tdef: TorrentDef) -> None: + """ + Set the torrent definition for this download. + """ self.tdef = tdef @check_handle() - def add_trackers(self, trackers: List[str]): - if hasattr(self.handle, 'add_tracker'): + def add_trackers(self, trackers: list[str]) -> None: + """ + Add the given trackers to the handle. + """ + if hasattr(self.handle, "add_tracker"): for tracker in trackers: - self.handle.add_tracker({'url': tracker, 'verified': False}) + self.handle.add_tracker({"url": tracker, "verified": False}) @check_handle() def get_magnet_link(self) -> str: + """ + Generate a magnet link for our download. + """ return lt.make_magnet_uri(self.handle) @require_handle - def add_peer(self, addr): - """ Add a peer address from 3rd source (not tracker, not DHT) to this download. - @param (hostname_ip,port) tuple + def add_peer(self, addr: tuple[str, int]) -> None: + """ + Add a peer address from 3rd source (not tracker, not DHT) to this download. + + :param addr: The (hostname_ip,port) tuple to connect to """ self.handle.connect_peer(addr, 0) @require_handle - def set_priority(self, priority: int): + def set_priority(self, priority: int) -> None: + """ + Set the priority of this download. + """ self.handle.set_priority(priority) @require_handle - def set_max_upload_rate(self, value: int): + def set_max_upload_rate(self, value: int) -> None: + """ + Set the maximum upload rate of this download. + """ self.handle.set_upload_limit(value * 1024) @require_handle - def set_max_download_rate(self, value: int): + def set_max_download_rate(self, value: int) -> None: + """ + Set the maximum download rate of this download. + """ self.handle.set_download_limit(value * 1024) @require_handle - def apply_ip_filter(self, enable: bool): + def apply_ip_filter(self, enable: bool) -> None: + """ + Enable the IP filter on this download. + """ self.handle.apply_ip_filter(enable) - def get_share_mode(self): + def get_share_mode(self) -> bool: + """ + Get whether this download is in sharing mode. + """ return self.config.get_share_mode() @require_handle - def set_share_mode(self, share_mode): + def set_share_mode(self, share_mode: bool) -> None: + """ + Set whether this download is in sharing mode. + """ self.config.set_share_mode(share_mode) self.handle.set_share_mode(share_mode) - def get_upload_mode(self): + def get_upload_mode(self) -> bool: + """ + Get whether this download is in upload mode. + """ return self.config.get_upload_mode() @require_handle - def set_upload_mode(self, upload_mode): + def set_upload_mode(self, upload_mode: bool) -> None: + """ + Set whether this download is in upload mode. + """ self.config.set_upload_mode(upload_mode) self.handle.set_upload_mode(upload_mode) @require_handle - def force_dht_announce(self): + def force_dht_announce(self) -> None: + """ + Force announce thid download on the DHT. + """ self.handle.force_dht_announce() @require_handle - def set_sequential_download(self, enable): + def set_sequential_download(self, enable: bool) -> None: + """ + Set this download to sequential download mode. + """ self.handle.set_sequential_download(enable) @check_handle(None) - def set_piece_priorities(self, piece_priorities): + def set_piece_priorities(self, piece_priorities: list[int]) -> None: + """ + Set the priority for all pieces in the download. + """ self.handle.prioritize_pieces(piece_priorities) @check_handle([]) - def get_piece_priorities(self): + def get_piece_priorities(self) -> list[int]: + """ + Get the priorities of all pieces in the download. + """ return self.handle.piece_priorities() @check_handle(None) - def set_file_priorities(self, file_priorities): + def set_file_priorities(self, file_priorities: list[int]) -> None: + """ + Set the priority for all files in the download. + """ self.handle.prioritize_files(file_priorities) def set_file_priority(self, file_index: int, prio: int = 4) -> None: + """ + Set the priority for a particular file in the download. + """ self.handle.file_priority(file_index, prio) @check_handle(None) - def reset_piece_deadline(self, piece): + def reset_piece_deadline(self, piece: int) -> None: + """ + Reset the deadline for the given piece. + """ self.handle.reset_piece_deadline(piece) @check_handle(None) - def set_piece_deadline(self, piece, deadline, flags=0): + def set_piece_deadline(self, piece: int, deadline: int, flags: int = 0) -> None: + """ + Set the deadline for a given piece. + """ self.handle.set_piece_deadline(piece, deadline, flags) @check_handle([]) - def get_file_priorities(self): + def get_file_priorities(self) -> list[int]: + """ + Get the priorities of all files in the download. + """ return self.handle.file_priorities() def file_piece_range(self, file_path: Path) -> list[int]: diff --git a/src/tribler/core/libtorrent/download_manager/download_config.py b/src/tribler/core/libtorrent/download_manager/download_config.py index ca587998d7..bf3feddf5a 100644 --- a/src/tribler/core/libtorrent/download_manager/download_config.py +++ b/src/tribler/core/libtorrent/download_manager/download_config.py @@ -3,7 +3,7 @@ import base64 from io import StringIO from pathlib import Path -from typing import TYPE_CHECKING, Dict, Optional +from typing import TYPE_CHECKING, Dict import libtorrent as lt from configobj import ConfigObj @@ -36,26 +36,39 @@ def _from_dict(value: Dict) -> str: binary = lt.bencode(value) base64_bytes = base64.b64encode(binary) - return base64_bytes.decode('utf-8') + return base64_bytes.decode() -def _to_dict(value: str) -> Optional[Dict]: - binary = value.encode('utf-8') +def _to_dict(value: str) -> dict | None: + binary = value.encode() # b'==' is added to avoid incorrect padding - base64_bytes = base64.b64decode(binary + b'==') + base64_bytes = base64.b64decode(binary + b"==") return lt.bdecode(base64_bytes) class DownloadConfig: - def __init__(self, config: ConfigObj | None = None): + """ + A configuration belonging to a specific download. + """ + + def __init__(self, config: ConfigObj | None = None) -> None: + """ + Create a download config from the given ConfigObj. + """ self.config = config @staticmethod - def get_spec_file_name(settings: TriblerConfigManager): + def get_spec_file_name(settings: TriblerConfigManager) -> str: + """ + Get the file name of the download spec. + """ return str(Path(settings.get("state_dir")) / SPEC_FILENAME) @staticmethod - def from_defaults(settings: TriblerConfigManager): + def from_defaults(settings: TriblerConfigManager) -> DownloadConfig: + """ + Create a new download config from the given Tribler configuration. + """ spec_file_name = DownloadConfig.get_spec_file_name(settings) defaults = ConfigObj(StringIO(SPEC_CONTENT)) defaults["filename"] = spec_file_name @@ -71,63 +84,105 @@ def from_defaults(settings: TriblerConfigManager): return config - def copy(self): + def copy(self) -> DownloadConfig: + """ + Create a copy of this config. + """ return DownloadConfig(ConfigObj(self.config)) - def write(self, filename: Path): + def write(self, filename: Path) -> None: + """ + Write the contents of this config to a file. + """ self.config.filename = Path(filename) self.config.write() - def set_dest_dir(self, path: Path | str): + def set_dest_dir(self, path: Path | str) -> None: """ Sets the directory where to save this Download. :param path: A path of a directory. """ - self.config['download_defaults']['saveas'] = str(path) + self.config["download_defaults"]["saveas"] = str(path) def get_dest_dir(self) -> Path: """ Gets the directory where to save this Download. """ - dest_dir = self.config['download_defaults']['saveas'] + dest_dir = self.config["download_defaults"]["saveas"] return Path(dest_dir) - def set_hops(self, hops): - self.config['download_defaults']['hops'] = hops + def set_hops(self, hops: int) -> None: + """ + Set the number of hops for the download. + """ + self.config["download_defaults"]["hops"] = hops - def get_hops(self): - return self.config['download_defaults']['hops'] + def get_hops(self) -> int: + """ + Get the set number of hops for the download. + """ + return self.config["download_defaults"]["hops"] - def set_safe_seeding(self, value): - self.config['download_defaults']['safe_seeding'] = value + def set_safe_seeding(self, value: bool) -> None: + """ + Set the safe seeding mode of the download. + """ + self.config["download_defaults"]["safe_seeding"] = value - def get_safe_seeding(self): - return self.config['download_defaults']['safe_seeding'] + def get_safe_seeding(self) -> bool: + """ + Get the safe seeding mode of the download. + """ + return self.config["download_defaults"]["safe_seeding"] - def set_user_stopped(self, value): - self.config['download_defaults']['user_stopped'] = value + def set_user_stopped(self, value: bool) -> None: + """ + Set whether the download has been stopped by the user. + """ + self.config["download_defaults"]["user_stopped"] = value - def get_user_stopped(self): - return self.config['download_defaults']['user_stopped'] + def get_user_stopped(self) -> bool: + """ + Get whether the download has been stopped by the user. + """ + return self.config["download_defaults"]["user_stopped"] - def set_share_mode(self, value): - self.config['download_defaults']['share_mode'] = value + def set_share_mode(self, value: bool) -> None: + """ + Set whether the download is in sharing mode. + """ + self.config["download_defaults"]["share_mode"] = value - def get_share_mode(self): - return self.config['download_defaults']['share_mode'] + def get_share_mode(self) -> bool: + """ + Get whether the download is in sharing mode. + """ + return self.config["download_defaults"]["share_mode"] - def set_upload_mode(self, value): - self.config['download_defaults']['upload_mode'] = value + def set_upload_mode(self, value: bool) -> None: + """ + Set whether the download is in upload-only mode. + """ + self.config["download_defaults"]["upload_mode"] = value - def get_upload_mode(self): - return self.config['download_defaults']['upload_mode'] + def get_upload_mode(self) -> bool: + """ + Get whether the download is in upload-only mode. + """ + return self.config["download_defaults"]["upload_mode"] - def set_time_added(self, value): - self.config['download_defaults']['time_added'] = value + def set_time_added(self, value: int) -> None: + """ + Set the UNIX timestamp for when this download was added. + """ + self.config["download_defaults"]["time_added"] = value - def get_time_added(self): - return self.config['download_defaults']['time_added'] + def get_time_added(self) -> int: + """ + Get the UNIX timestamp for when this download was added. + """ + return self.config["download_defaults"]["time_added"] def set_selected_files(self, file_indexes: list[int]) -> None: """ @@ -135,27 +190,48 @@ def set_selected_files(self, file_indexes: list[int]) -> None: :param file_indexes: List of file indexes as ordered in the torrent (e.g. [0,1]) """ - self.config['download_defaults']['selected_file_indexes'] = file_indexes + self.config["download_defaults"]["selected_file_indexes"] = file_indexes - def get_selected_files(self): - """ Returns the list of files selected for download. - @return A list of file indexes. """ - return self.config['download_defaults']['selected_file_indexes'] + def get_selected_files(self) -> list[int]: + """ + Returns the list of files selected for download. - def set_bootstrap_download(self, value): - self.config['download_defaults']['bootstrap_download'] = value + :return: A list of file indexes. + """ + return self.config["download_defaults"]["selected_file_indexes"] - def get_bootstrap_download(self): - return self.config['download_defaults']['bootstrap_download'] + def set_bootstrap_download(self, value: bool) -> None: + """ + Mark this download as a bootstrap download. + """ + self.config["download_defaults"]["bootstrap_download"] = value + + def get_bootstrap_download(self) -> bool: + """ + Get whether this download is a bootstrap download. + """ + return self.config["download_defaults"]["bootstrap_download"] - def set_metainfo(self, metainfo: Dict): - self.config['state']['metainfo'] = _from_dict(metainfo) + def set_metainfo(self, metainfo: dict) -> None: + """ + Set the metainfo dict for this download. + """ + self.config["state"]["metainfo"] = _from_dict(metainfo) - def get_metainfo(self) -> Optional[Dict]: - return _to_dict(self.config['state']['metainfo']) + def get_metainfo(self) -> dict | None: + """ + Get the metainfo dict for this download or None if it cannot be decoded. + """ + return _to_dict(self.config["state"]["metainfo"]) - def set_engineresumedata(self, engineresumedata: Dict): - self.config['state']['engineresumedata'] = _from_dict(engineresumedata) + def set_engineresumedata(self, engineresumedata: dict) -> None: + """ + Set the engine resume data dict for this download. + """ + self.config["state"]["engineresumedata"] = _from_dict(engineresumedata) - def get_engineresumedata(self) -> Optional[Dict]: - return _to_dict(self.config['state']['engineresumedata']) + def get_engineresumedata(self) -> dict | None: + """ + Get the engine resume data dict for this download or None if it cannot be decoded. + """ + return _to_dict(self.config["state"]["engineresumedata"]) diff --git a/src/tribler/core/libtorrent/download_manager/download_manager.py b/src/tribler/core/libtorrent/download_manager/download_manager.py index 370bcc5be7..d94a1548c7 100644 --- a/src/tribler/core/libtorrent/download_manager/download_manager.py +++ b/src/tribler/core/libtorrent/download_manager/download_manager.py @@ -1,38 +1,40 @@ """ -A wrapper around libtorrent +A wrapper around libtorrent. Author(s): Egbert Bouman """ from __future__ import annotations import asyncio -from collections import defaultdict -from pathlib import Path -from tempfile import TemporaryDirectory - -import libtorrent as lt import logging import os import time as timemod from asyncio import CancelledError, gather, iscoroutine, shield, sleep, wait_for -from binascii import unhexlify, hexlify +from binascii import hexlify, unhexlify +from collections import defaultdict from copy import deepcopy -from typing import Callable, Dict, List, Optional, Union +from pathlib import Path +from tempfile import TemporaryDirectory +from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, List +import libtorrent as lt from configobj import ConfigObj from ipv8.taskmanager import TaskManager from validate import Validator from yarl import URL from tribler.core.libtorrent import torrents -from tribler.core.libtorrent.download_manager.dht_health_manager import DHTHealthManager from tribler.core.libtorrent.download_manager.download import Download from tribler.core.libtorrent.download_manager.download_config import DownloadConfig -from tribler.core.libtorrent.download_manager.download_state import DownloadStatus -from tribler.core.libtorrent.torrentdef import TorrentDefNoMetainfo, TorrentDef -from tribler.core.libtorrent.uris import url_to_path, unshorten -from tribler.core.notifier import Notifier, Notification -from tribler.tribler_config import TriblerConfigManager +from tribler.core.libtorrent.download_manager.download_state import DownloadState, DownloadStatus +from tribler.core.libtorrent.torrentdef import TorrentDef, TorrentDefNoMetainfo +from tribler.core.libtorrent.uris import unshorten, url_to_path +from tribler.core.notifier import Notification, Notifier + +if TYPE_CHECKING: + from tribler.core.libtorrent.download_manager.dht_health_manager import DHTHealthManager + from tribler.core.libtorrent.torrents import TorrentFileResult + from tribler.tribler_config import TriblerConfigManager SOCKS5_PROXY_DEF = 2 @@ -53,19 +55,28 @@ logger = logging.getLogger(__name__) -def encode_atp(atp): +def encode_atp(atp: dict) -> dict: + """ + Encode the "Add Torrent Params" dictionary to only include bytes, instead of strings and Paths. + """ for k, v in atp.items(): if isinstance(v, str): - atp[k] = v.encode('utf-8') + atp[k] = v.encode() elif isinstance(v, Path): atp[k] = str(v) return atp class DownloadManager(TaskManager): + """ + The manager of all downloads. + """ def __init__(self, config: TriblerConfigManager, notifier: Notifier, metadata_tmpdir: TemporaryDirectory | None = None) -> None: + """ + Create a new download manager. + """ super().__init__() self.config = config @@ -89,7 +100,7 @@ def __init__(self, config: TriblerConfigManager, notifier: Notifier, self.checkpoints_loaded = 0 self.all_checkpoints_are_loaded = False - self.metadata_tmpdir = metadata_tmpdir or TemporaryDirectory(suffix='tribler_metainfo_tmpdir') + self.metadata_tmpdir = metadata_tmpdir or TemporaryDirectory(suffix="tribler_metainfo_tmpdir") # Dictionary that maps infohashes to download instances. These include only downloads that have # been made specifically for fetching metainfo, and will be removed afterwards. self.metainfo_requests = {} @@ -98,7 +109,7 @@ def __init__(self, config: TriblerConfigManager, notifier: Notifier, self.default_alert_mask = lt.alert.category_t.error_notification | lt.alert.category_t.status_notification | \ lt.alert.category_t.storage_notification | lt.alert.category_t.performance_warning | \ lt.alert.category_t.tracker_notification | lt.alert.category_t.debug_notification - self.session_stats_callback: Optional[Callable] = None + self.session_stats_callback: Callable | None = None self.state_cb_count = 0 # Status of libtorrent session to indicate if it can safely close and no pending writes to disk exists. @@ -107,11 +118,17 @@ def __init__(self, config: TriblerConfigManager, notifier: Notifier, self.dht_readiness_timeout = config.get("libtorrent/dht_readiness_timeout") self._last_states_list = [] + def is_shutting_down(self) -> bool: + """ + Whether the download manager is currently shutting down. + """ + return self._shutdown + @staticmethod def convert_rate(rate: int) -> int: """ Rate conversion due to the fact that we had a different system with Swift - and the old python BitTorrent core: unlimited == 0, stop == -1, else rate in kbytes + and the old python BitTorrent core: unlimited == 0, stop == -1, else rate in kbytes. """ if rate == 0: return -1 @@ -123,7 +140,7 @@ def convert_rate(rate: int) -> int: def reverse_convert_rate(rate: int) -> int: """ Rate conversion due to the fact that we had a different system with Swift - and the old python BitTorrent core: unlimited == 0, stop == -1, else rate in kbytes + and the old python BitTorrent core: unlimited == 0, stop == -1, else rate in kbytes. """ if rate == -1: return 0 @@ -131,18 +148,22 @@ def reverse_convert_rate(rate: int) -> int: return -1 return rate // 1024 - async def _check_dht_ready(self, min_dht_peers=60): + async def _check_dht_ready(self, min_dht_peers: int = 60) -> None: """ Checks whether we got enough DHT peers. If the number of DHT peers is low, checking for a bunch of torrents in a short period of time may result in several consecutive requests sent to the same peers. This can trigger those peers' flood protection mechanism, which results in DHT checks stuck for hours. + See https://github.com/Tribler/tribler/issues/5319 """ while not (self.get_session() and self.get_session().status().dht_nodes > min_dht_peers): await asyncio.sleep(1) - def initialize(self): + def initialize(self) -> None: + """ + Initialize the directory structure, launch the periodic tasks and start libtorrent background processes. + """ # Create the checkpoints directory self.checkpoint_directory.mkdir(exist_ok=True) @@ -155,23 +176,32 @@ def initialize(self): if self.dht_readiness_timeout > 0 and self.config.get("libtorrent/dht"): self.dht_ready_task = self.register_task("check_dht_ready", self._check_dht_ready) self.register_task("request_torrent_updates", self._request_torrent_updates, interval=1) - self.register_task('task_cleanup_metacache', self._task_cleanup_metainfo_cache, interval=60, delay=0) + self.register_task("task_cleanup_metacache", self._task_cleanup_metainfo_cache, interval=60, delay=0) self.set_download_states_callback(self.sesscb_states_callback) - def start(self): + def start(self) -> None: + """ + Start loading the checkpoints from disk. + """ self.register_task("start", self.load_checkpoints) - def notify_shutdown_state(self, state): - logger.info(f'Notify shutdown state: {state}') + def notify_shutdown_state(self, state: str) -> None: + """ + Call the notifier to signal a shutdown state update. + """ + logger.info("Notify shutdown state: %s", state) self.notifier.notify(Notification.tribler_shutdown_state, state=state) - async def shutdown(self, timeout=30): - logger.info('Shutting down...') + async def shutdown(self, timeout: int = 30) -> None: + """ + Shut down all pending tasks and background tasks. + """ + logger.info("Shutting down...") self.cancel_pending_task("start") self.cancel_pending_task("download_states_lc") if self.downloads: - logger.info('Stopping downloads...') + logger.info("Stopping downloads...") self.notify_shutdown_state("Checkpointing Downloads...") await gather(*[download.stop() for download in self.downloads.values()], return_exceptions=True) @@ -187,7 +217,7 @@ async def shutdown(self, timeout=30): timeout -= 1 await asyncio.sleep(1) - logger.info('Awaiting shutdown task manager...') + logger.info("Awaiting shutdown task manager...") await self.shutdown_task_manager() if self.dht_health_manager: @@ -195,62 +225,68 @@ async def shutdown(self, timeout=30): # Save libtorrent state if self.has_session(): - logger.info('Saving state...') - with open(self.state_dir / LTSTATE_FILENAME, 'wb') as ltstate_file: + logger.info("Saving state...") + with open(self.state_dir / LTSTATE_FILENAME, "wb") as ltstate_file: # noqa: ASYNC101 ltstate_file.write(lt.bencode(self.get_session().save_state())) if self.has_session() and self.config.get("libtorrent/upnp"): - logger.info('Stopping upnp...') + logger.info("Stopping upnp...") self.get_session().stop_upnp() # Remove metadata temporary directory if self.metadata_tmpdir: - logger.info('Removing temp directory...') + logger.info("Removing temp directory...") self.metadata_tmpdir.cleanup() self.metadata_tmpdir = None - logger.info('Shutdown completed') + logger.info("Shutdown completed") - def is_shutdown_ready(self): + def is_shutdown_ready(self) -> bool: + """ + Check if the libtorrent shutdown is complete. + """ return all(self.lt_session_shutdown_ready.values()) - def create_session(self, hops=0): + def create_session(self, hops: int = 0) -> lt.session: # noqa: PLR0912, PLR0915 + """ + Construct a libtorrent session for the given number of anonymization hops. + """ # Due to a bug in Libtorrent 0.16.18, the outgoing_port and num_outgoing_ports value should be set in # the settings dictionary - logger.info('Creating a session') - settings = {'outgoing_port': 0, - 'num_outgoing_ports': 1, - 'allow_multiple_connections_per_ip': 0, - 'enable_upnp': int(self.config.get("libtorrent/upnp")), - 'enable_dht': int(self.config.get("libtorrent/dht")), - 'enable_lsd': int(self.config.get("libtorrent/lsd")), - 'enable_natpmp': int(self.config.get("libtorrent/natpmp"))} + logger.info("Creating a session") + settings = {"outgoing_port": 0, + "num_outgoing_ports": 1, + "allow_multiple_connections_per_ip": 0, + "enable_upnp": int(self.config.get("libtorrent/upnp")), + "enable_dht": int(self.config.get("libtorrent/dht")), + "enable_lsd": int(self.config.get("libtorrent/lsd")), + "enable_natpmp": int(self.config.get("libtorrent/natpmp"))} # Copy construct so we don't modify the default list extensions = list(DEFAULT_LT_EXTENSIONS) - logger.info(f'Hops: {hops}.') + logger.info("Hops: %d.", hops) # Elric: Strip out the -rcX, -beta, -whatever tail on the version string. - fingerprint = ['TL', 0, 0, 0, 0] + fingerprint = ["TL", 0, 0, 0, 0] ltsession = lt.session(lt.fingerprint(*fingerprint), flags=0) if hops == 0 else lt.session(flags=0) libtorrent_port = self.config.get("libtorrent/port") - logger.info(f'Libtorrent port: {libtorrent_port}') + logger.info("Libtorrent port: %d", libtorrent_port) if hops == 0: - settings['user_agent'] = 'Tribler/Experimental' + settings["user_agent"] = "Tribler/Experimental" enable_utp = self.config.get("libtorrent/utp") - settings['enable_outgoing_utp'] = enable_utp - settings['enable_incoming_utp'] = enable_utp - settings['prefer_rc4'] = True + settings["enable_outgoing_utp"] = enable_utp + settings["enable_incoming_utp"] = enable_utp + settings["prefer_rc4"] = True settings["listen_interfaces"] = f"0.0.0.0:{libtorrent_port or 6881}" - settings['handshake_client_version'] = f"Tribler/Experimental" + settings["handshake_client_version"] = "Tribler/Experimental" else: - settings['enable_outgoing_utp'] = True - settings['enable_incoming_utp'] = True - settings['enable_outgoing_tcp'] = False - settings['enable_incoming_tcp'] = False - settings['anonymous_mode'] = True - settings['force_proxy'] = True + settings["enable_outgoing_utp"] = True + settings["enable_incoming_utp"] = True + settings["enable_outgoing_tcp"] = False + settings["enable_incoming_tcp"] = False + settings["anonymous_mode"] = True + settings["force_proxy"] = True self.set_session_settings(ltsession, settings) ltsession.set_alert_mask(self.default_alert_mask) @@ -267,19 +303,19 @@ def create_session(self, hops=0): if hops == 0: ltsession.listen_on(libtorrent_port, libtorrent_port + 10) try: - with open(self.state_dir / LTSTATE_FILENAME, 'rb') as fp: + with open(self.state_dir / LTSTATE_FILENAME, "rb") as fp: lt_state = lt.bdecode(fp.read()) if lt_state is not None: ltsession.load_state(lt_state) else: logger.warning("the lt.state appears to be corrupt, writing new data on shutdown") except Exception as exc: - logger.info(f"could not load libtorrent state, got exception: {exc!r}. starting from scratch") + logger.info("could not load libtorrent state, got exception: %s. starting from scratch", repr(exc)) else: rate = DownloadManager.get_libtorrent_max_upload_rate(self.config) download_rate = DownloadManager.get_libtorrent_max_download_rate(self.config) - settings = {'upload_rate_limit': rate, - 'download_rate_limit': download_rate} + settings = {"upload_rate_limit": rate, + "download_rate_limit": download_rate} self.set_session_settings(ltsession, settings) if self.config.get("libtorrent/dht"): @@ -288,21 +324,28 @@ def create_session(self, hops=0): ltsession.add_dht_router(*router) ltsession.start_lsd() - logger.info(f"Started libtorrent session for {hops} hops on port {ltsession.listen_port()}") + logger.info("Started libtorrent session for %d hops on port %d", hops, ltsession.listen_port()) self.lt_session_shutdown_ready[hops] = False return ltsession - def has_session(self, hops=0): + def has_session(self, hops: int = 0) -> bool: + """ + Check if we have a session for the given number of anonymization hops. + """ return hops in self.ltsessions - def get_session(self, hops=0): + def get_session(self, hops: int = 0) -> lt.session: + """ + Get the session for the given number of anonymization hops. + """ if hops not in self.ltsessions: self.ltsessions[hops] = self.create_session(hops) return self.ltsessions[hops] - def set_proxy_settings(self, ltsession, ptype, server=None, auth=None): + def set_proxy_settings(self, ltsession: lt.session, ptype: int, server: tuple[str, str | int] | None = None, + auth: tuple[str, str] | None = None) -> None: """ Apply the proxy settings to a libtorrent session. This mechanism changed significantly in libtorrent 1.1.0. """ @@ -318,43 +361,61 @@ def set_proxy_settings(self, ltsession, ptype, server=None, auth=None): settings["proxy_password"] = auth[1] self.set_session_settings(ltsession, settings) - def set_max_connections(self, conns, hops=None): - self._map_call_on_ltsessions(hops, 'set_max_connections', conns) + def set_max_connections(self, conns: int, hops: int | None = None) -> None: + """ + Set the maximum number of connections for the given hop count. + """ + self._map_call_on_ltsessions(hops, "set_max_connections", conns) - def set_upload_rate_limit(self, rate, hops=None): + def set_upload_rate_limit(self, rate: int) -> None: + """ + Set the upload rate limit for the given session. + """ # Rate conversion due to the fact that we had a different system with Swift # and the old python BitTorrent core: unlimited == 0, stop == -1, else rate in kbytes libtorrent_rate = self.convert_rate(rate=rate) # Pass outgoing_port and num_outgoing_ports to dict due to bug in libtorrent 0.16.18 - settings_dict = {'upload_rate_limit': libtorrent_rate, 'outgoing_port': 0, 'num_outgoing_ports': 1} + settings_dict = {"upload_rate_limit": libtorrent_rate, "outgoing_port": 0, "num_outgoing_ports": 1} for session in self.ltsessions.values(): self.set_session_settings(session, settings_dict) - def get_upload_rate_limit(self, hops=0): + def get_upload_rate_limit(self, hops: int = 0) -> int: + """ + Get the upload rate limit for the session with the given hop count. + """ # Rate conversion due to the fact that we had a different system with Swift # and the old python BitTorrent core: unlimited == 0, stop == -1, else rate in kbytes libtorrent_rate = self.get_session(hops).upload_rate_limit() return self.reverse_convert_rate(rate=libtorrent_rate) - def set_download_rate_limit(self, rate, hops=None): + def set_download_rate_limit(self, rate: int) -> None: + """ + Set the download rate limit for the given session. + """ libtorrent_rate = self.convert_rate(rate=rate) # Pass outgoing_port and num_outgoing_ports to dict due to bug in libtorrent 0.16.18 - settings_dict = {'download_rate_limit': libtorrent_rate} + settings_dict = {"download_rate_limit": libtorrent_rate} for session in self.ltsessions.values(): self.set_session_settings(session, settings_dict) - def get_download_rate_limit(self, hops=0): + def get_download_rate_limit(self, hops: int = 0) -> int: + """ + Get the download rate limit for the session with the given hop count. + """ libtorrent_rate = self.get_session(hops=hops).download_rate_limit() return self.reverse_convert_rate(rate=libtorrent_rate) - def process_alert(self, alert, hops=0): + def process_alert(self, alert, hops: int = 0) -> None: # noqa: C901, PLR0912 + """ + Process a libtorrent alert. + """ alert_type = alert.__class__.__name__ # Periodically, libtorrent will send us a state_update_alert, which contains the torrent status of # all torrents changed since the last time we received this alert. - if alert_type == 'state_update_alert': + if alert_type == "state_update_alert": for status in alert.status: infohash = status.info_hash.to_bytes() if infohash not in self.downloads: @@ -362,20 +423,20 @@ def process_alert(self, alert, hops=0): continue self.downloads[infohash].update_lt_status(status) - if alert_type == 'state_changed_alert': + if alert_type == "state_changed_alert": infohash = alert.handle.info_hash().to_bytes() if infohash not in self.downloads: logger.debug("Got state_change for unknown torrent %s", hexlify(infohash)) else: self.downloads[infohash].update_lt_status(alert.handle.status()) - infohash = (alert.handle.info_hash().to_bytes() if hasattr(alert, 'handle') and alert.handle.is_valid() - else getattr(alert, 'info_hash', b'')) + infohash = (alert.handle.info_hash().to_bytes() if hasattr(alert, "handle") and alert.handle.is_valid() + else getattr(alert, "info_hash", b"")) download = self.downloads.get(infohash) if download: is_process_alert = (download.handle and download.handle.is_valid()) \ - or (not download.handle and alert_type == 'add_torrent_alert') \ - or (download.handle and alert_type == 'torrent_removed_alert') + or (not download.handle and alert_type == "add_torrent_alert") \ + or (download.handle and alert_type == "torrent_removed_alert") if is_process_alert: download.process_alert(alert, alert_type) else: @@ -383,16 +444,16 @@ def process_alert(self, alert, hops=0): elif infohash: logger.debug("Got alert for unknown download %s: %s", infohash, alert) - if alert_type == 'listen_succeeded_alert': + if alert_type == "listen_succeeded_alert": self.listen_ports[hops][alert.address] = alert.port - elif alert_type == 'peer_disconnected_alert': + elif alert_type == "peer_disconnected_alert": self.notifier.notify(Notification.peer_disconnected, peer_id=alert.pid.to_bytes()) - elif alert_type == 'session_stats_alert': - queued_disk_jobs = alert.values['disk.queued_disk_jobs'] - queued_write_bytes = alert.values['disk.queued_write_bytes'] - num_write_jobs = alert.values['disk.num_write_jobs'] + elif alert_type == "session_stats_alert": + queued_disk_jobs = alert.values["disk.queued_disk_jobs"] + queued_write_bytes = alert.values["disk.queued_write_bytes"] + num_write_jobs = alert.values["disk.num_write_jobs"] if queued_disk_jobs == queued_write_bytes == num_write_jobs == 0: self.lt_session_shutdown_ready[hops] = True @@ -402,33 +463,36 @@ def process_alert(self, alert, hops=0): elif alert_type == "dht_pkt_alert": # Unfortunately, the Python bindings don't have a direction attribute. # So, we'll have to resort to using the string representation of the alert instead. - incoming = str(alert).startswith('<==') + incoming = str(alert).startswith("<==") decoded = lt.bdecode(alert.pkt_buf) if not decoded: return # We are sending a raw DHT message - notify the DHTHealthManager of the outstanding request. - if not incoming and decoded.get(b'y') == b'q' \ - and decoded.get(b'q') == b'get_peers' and decoded[b'a'].get(b'scrape') == 1: - self.dht_health_manager.requesting_bloomfilters(decoded[b't'], - decoded[b'a'][b'info_hash']) + if not incoming and decoded.get(b"y") == b"q" \ + and decoded.get(b"q") == b"get_peers" and decoded[b"a"].get(b"scrape") == 1: + self.dht_health_manager.requesting_bloomfilters(decoded[b"t"], + decoded[b"a"][b"info_hash"]) # We received a raw DHT message - decode it and check whether it is a BEP33 message. - if incoming and b'r' in decoded and b'BFsd' in decoded[b'r'] and b'BFpe' in decoded[b'r']: - self.dht_health_manager.received_bloomfilters(decoded[b't'], - bytearray(decoded[b'r'][b'BFsd']), - bytearray(decoded[b'r'][b'BFpe'])) + if incoming and b"r" in decoded and b"BFsd" in decoded[b"r"] and b"BFpe" in decoded[b"r"]: + self.dht_health_manager.received_bloomfilters(decoded[b"t"], + bytearray(decoded[b"r"][b"BFsd"]), + bytearray(decoded[b"r"][b"BFpe"])) - def update_ip_filter(self, lt_session, ip_addresses): - logger.debug('Updating IP filter %s', ip_addresses) + def update_ip_filter(self, lt_session: lt.session, ip_addresses: Iterable[str]) -> None: + """ + Add illegal IPs to libtorrent. + """ + logger.debug("Updating IP filter %s", ip_addresses) ip_filter = lt.ip_filter() - ip_filter.add_rule('0.0.0.0', '255.255.255.255', 1) + ip_filter.add_rule("0.0.0.0", "255.255.255.255", 1) for ip in ip_addresses: ip_filter.add_rule(ip, ip, 0) lt_session.set_ip_filter(ip_filter) - async def get_metainfo(self, infohash: bytes, timeout: float = 7, hops: Optional[int] = None, - url: str | None = None, raise_errors: bool = False) -> Optional[Dict]: + async def get_metainfo(self, infohash: bytes, timeout: float = 7, hops: int | None = None, + url: str | None = None, raise_errors: bool = False) -> dict | None: """ Lookup metainfo for a given infohash. The mechanism works by joining the swarm for the infohash connecting to a few peers, and downloading the metadata for the torrent. @@ -441,17 +505,17 @@ async def get_metainfo(self, infohash: bytes, timeout: float = 7, hops: Optional """ infohash_hex = hexlify(infohash) if infohash in self.metainfo_cache: - logger.info('Returning metainfo from cache for %s', infohash_hex) - return self.metainfo_cache[infohash]['meta_info'] + logger.info("Returning metainfo from cache for %s", infohash_hex) + return self.metainfo_cache[infohash]["meta_info"] - logger.info('Trying to fetch metainfo for %s', infohash_hex) + logger.info("Trying to fetch metainfo for %s", infohash_hex) if infohash in self.metainfo_requests: download = self.metainfo_requests[infohash][0] self.metainfo_requests[infohash][1] += 1 elif infohash in self.downloads: download = self.downloads[infohash] else: - tdef = TorrentDefNoMetainfo(infohash, b'metainfo request', url=url) + tdef = TorrentDefNoMetainfo(infohash, b"metainfo request", url=url) dcfg = DownloadConfig.from_defaults(self.config) dcfg.set_hops(hops or self.config.get("libtorrent/download_defaults/number_hops")) dcfg.set_upload_mode(True) # Upload mode should prevent libtorrent from creating files @@ -461,21 +525,21 @@ async def get_metainfo(self, infohash: bytes, timeout: float = 7, hops: Optional except TypeError as e: logger.warning(e) if raise_errors: - raise e + raise return None self.metainfo_requests[infohash] = [download, 1] try: metainfo = download.tdef.get_metainfo() or await wait_for(shield(download.future_metainfo), timeout) except (CancelledError, asyncio.TimeoutError) as e: - logger.warning(f'{type(e).__name__}: {e} (timeout={timeout})') - logger.info('Failed to retrieve metainfo for %s', infohash_hex) + logger.warning("%s: %s (timeout=%f)", type(e).__name__, str(e), timeout) + logger.info("Failed to retrieve metainfo for %s", infohash_hex) if raise_errors: - raise e + raise return None - logger.info('Successfully retrieved metainfo for %s', infohash_hex) - self.metainfo_cache[infohash] = {'time': timemod.time(), 'meta_info': metainfo} + logger.info("Successfully retrieved metainfo for %s", infohash_hex) + self.metainfo_cache[infohash] = {"time": timemod.time(), "meta_info": metainfo} if infohash in self.metainfo_requests: self.metainfo_requests[infohash][1] -= 1 @@ -485,44 +549,47 @@ async def get_metainfo(self, infohash: bytes, timeout: float = 7, hops: Optional return metainfo - def _task_cleanup_metainfo_cache(self): + def _task_cleanup_metainfo_cache(self) -> None: oldest_time = timemod.time() - METAINFO_CACHE_PERIOD for info_hash, cache_entry in list(self.metainfo_cache.items()): - last_time = cache_entry['time'] + last_time = cache_entry["time"] if last_time < oldest_time: del self.metainfo_cache[info_hash] - def _request_torrent_updates(self): + def _request_torrent_updates(self) -> None: for ltsession in self.ltsessions.values(): if ltsession: ltsession.post_torrent_updates(0xffffffff) - def _task_process_alerts(self): + def _task_process_alerts(self) -> None: for hops, ltsession in list(self.ltsessions.items()): if ltsession: for alert in ltsession.pop_alerts(): self.process_alert(alert, hops=hops) - def _map_call_on_ltsessions(self, hops, funcname, *args, **kwargs): + def _map_call_on_ltsessions(self, hops: int | None, funcname: str, *args: Any, **kwargs) -> None: # noqa: ANN401 if hops is None: for session in self.ltsessions.values(): getattr(session, funcname)(*args, **kwargs) else: getattr(self.get_session(hops), funcname)(*args, **kwargs) - async def start_download_from_uri(self, uri, config=None): - logger.info(f'Start download from URI: {uri}') + async def start_download_from_uri(self, uri: str, config: DownloadConfig | None = None) -> Download: + """ + Start a download from the given uri. + """ + logger.info("Start download from URI: %s", uri) uri = await unshorten(uri) scheme = URL(uri).scheme if scheme in ("http", "https"): - logger.info('Http(s) scheme detected') + logger.info("Http(s) scheme detected") tdef = await TorrentDef.load_from_url(uri) return await self.start_download(tdef=tdef, config=config) if scheme == "magnet": - logger.info('Magnet scheme detected') + logger.info("Magnet scheme detected") params = lt.parse_magnet_uri(uri) try: # libtorrent 1.2.19 @@ -531,33 +598,39 @@ async def start_download_from_uri(self, uri, config=None): # libtorrent 2.0.9 name = params.name.encode() infohash = unhexlify(str(params.info_hash)) - logger.info(f'Name: {name}. Infohash: {infohash}') + logger.info("Name: %s. Infohash: %s", name, infohash) if infohash in self.metainfo_cache: - logger.info('Metainfo found in cache') - tdef = TorrentDef.load_from_dict(self.metainfo_cache[infohash]['meta_info']) + logger.info("Metainfo found in cache") + tdef = TorrentDef.load_from_dict(self.metainfo_cache[infohash]["meta_info"]) else: logger.info("Metainfo not found in cache") - tdef = TorrentDefNoMetainfo(infohash, b"Unknown name" if not name else name, url=uri) + tdef = TorrentDefNoMetainfo(infohash, name if name else b"Unknown name", url=uri) return await self.start_download(tdef=tdef, config=config) if scheme == "file": - logger.info('File scheme detected') + logger.info("File scheme detected") file = url_to_path(uri) return await self.start_download(torrent_file=file, config=config) - raise Exception("invalid uri") + msg = "invalid uri" + raise Exception(msg) - async def start_download(self, torrent_file=None, tdef=None, config: DownloadConfig = None, - checkpoint_disabled=False, hidden=False) -> Download: - logger.info(f'Starting download: filename: {torrent_file}, torrent def: {tdef}') + async def start_download(self, torrent_file: str | None = None, tdef: TorrentDef | None = None, + config: DownloadConfig | None = None, + checkpoint_disabled: bool = False, hidden: bool = False) -> Download: + """ + Start a download from the given information. + """ + logger.info("Starting download: filename: %s, torrent def: %s", str(torrent_file), str(tdef)) if config is None: config = DownloadConfig.from_defaults(self.config) - logger.info('Use a default config.') + logger.info("Use a default config.") # the priority of the parameters is: (1) tdef, (2) torrent_file. # so if we have tdef, and torrent_file will be ignored, and so on. if tdef is None: - logger.info('Torrent def is None. Trying to load it from torrent file.') + logger.info("Torrent def is None. Trying to load it from torrent file.") if torrent_file is None: - raise ValueError("Torrent file must be provided if tdef is not given") + msg = "Torrent file must be provided if tdef is not given" + raise ValueError(msg) # try to get the torrent from the given torrent file tdef = await TorrentDef.load(torrent_file) @@ -567,10 +640,10 @@ async def start_download(self, torrent_file=None, tdef=None, config: DownloadCon download = self.get_download(infohash) if download and infohash not in self.metainfo_requests: - logger.info('Download exists and metainfo is not requested.') + logger.info("Download exists and metainfo is not requested.") new_trackers = list(tdef.get_trackers() - download.get_def().get_trackers()) if new_trackers: - logger.info(f'New trackers: {new_trackers}') + logger.info("New trackers: %s", str(new_trackers)) self.update_trackers(tdef.get_infohash(), new_trackers) return download @@ -578,7 +651,7 @@ async def start_download(self, torrent_file=None, tdef=None, config: DownloadCon try: destination_directory = config.get_dest_dir() if not destination_directory.is_dir(): - logger.info(f'Destination directory does not exist. Creating it: {destination_directory}') + logger.info("Destination directory does not exist. Creating it: %s", str(destination_directory)) os.makedirs(destination_directory) except OSError: logger.exception("Unable to create the download destination directory.") @@ -594,26 +667,29 @@ async def start_download(self, torrent_file=None, tdef=None, config: DownloadCon notifier=self.notifier, state_dir=self.state_dir, download_manager=self) - logger.info(f'Download created: {download}') + logger.info("Download created: %s", str(download)) atp = download.get_atp() logger.info("ATP: %s", str({k: v for k, v in atp.items() if k not in ["resume_data"]})) # Keep metainfo downloads in self.downloads for now because we will need to remove it later, # and removing the download at this point will stop us from receiving any further alerts. if infohash not in self.metainfo_requests or self.metainfo_requests[infohash][0] == download: - logger.info('Metainfo is not requested or download is the first in the queue.') + logger.info("Metainfo is not requested or download is the first in the queue.") self.downloads[infohash] = download - logger.info('Starting handle.') + logger.info("Starting handle.") await self.start_handle(download, atp) return download - async def start_handle(self, download, atp): + async def start_handle(self, download: Download, atp: dict) -> None: + """ + Create and start the libtorrent handle for the given download. + """ atp_resume_data_skipped = atp.copy() - resume_data = atp.get('resume_data') + resume_data = atp.get("resume_data") if resume_data: - atp_resume_data_skipped['resume_data'] = '' - logger.info(f"Start handle. Download: {download}. Atp: {atp_resume_data_skipped}") + atp_resume_data_skipped["resume_data"] = "" + logger.info("Start handle. Download: %s. Atp: %s", str(download), str(atp_resume_data_skipped)) if resume_data: - logger.debug(f"Download resume data: {atp['resume_data']}") + logger.debug("Download resume data: %s", str(atp["resume_data"])) ltsession = self.get_session(download.config.get_hops()) infohash = download.get_def().get_infohash() @@ -630,13 +706,13 @@ async def start_handle(self, download, atp): if existing_handle: # Reuse existing handle logger.debug("Reusing handle %s", hexlify(infohash)) - download.post_alert('add_torrent_alert', {"handle": existing_handle}) + download.post_alert("add_torrent_alert", {"handle": existing_handle}) else: # Otherwise, add it anew _ = self.replace_task(f"AddTorrent{infohash}", self._async_add_torrent, ltsession, infohash, atp, ignore=(Exception,)) - async def _async_add_torrent(self, ltsession, infohash, atp): + async def _async_add_torrent(self, ltsession: lt.session, infohash: bytes , atp: dict) -> None: self._logger.debug("Adding handle %s", hexlify(infohash)) # To prevent flooding the DHT with a short burst of queries and triggering # flood protection, we postpone adding torrents until we get enough DHT peers. @@ -652,20 +728,22 @@ async def _async_add_torrent(self, ltsession, infohash, atp): self._logger.warning("Timeout waiting for libtorrent DHT getting enough peers") ltsession.async_add_torrent(encode_atp(atp)) - def get_libtorrent_version(self): + def get_libtorrent_version(self) -> str: + """ + Get the libtorrent version. + """ try: return lt.__version__ except AttributeError: return lt.version - def set_session_settings(self, lt_session, new_settings): + def set_session_settings(self, lt_session: lt.session, new_settings: dict) -> None: """ Apply/set new sessions in a libtorrent session. :param lt_session: The libtorrent session to apply the settings to. :param new_settings: The new settings to apply. """ - # Keeping a copy of the settings because subsequent calls to get_settings are likely to fail # when libtorrent will try to decode peer_fingerprint to unicode. if lt_session not in self.ltsettings: @@ -677,13 +755,17 @@ def set_session_settings(self, lt_session, new_settings): lt_session.apply_settings(new_settings) else: lt_session.set_settings(new_settings) - except OverflowError: - raise OverflowError(f"Overflow error when setting libtorrent sessions with settings: {new_settings}") + except OverflowError as e: + msg = f"Overflow error when setting libtorrent sessions with settings: {new_settings}" + raise OverflowError(msg) from e - def get_session_settings(self, lt_session): + def get_session_settings(self, lt_session: lt.session) -> dict: + """ + Get a copy of the libtorrent settings for the given session. + """ return deepcopy(self.ltsettings.get(lt_session, {})) - def update_max_rates_from_config(self): + def update_max_rates_from_config(self) -> None: """ Set the maximum download and maximum upload rate limits with the value in the config. @@ -693,16 +775,23 @@ def update_max_rates_from_config(self): for lt_session in self.ltsessions.values(): rate = DownloadManager.get_libtorrent_max_upload_rate(self.config) download_rate = DownloadManager.get_libtorrent_max_download_rate(self.config) - settings = {'download_rate_limit': download_rate, - 'upload_rate_limit': rate} + settings = {"download_rate_limit": download_rate, + "upload_rate_limit": rate} self.set_session_settings(lt_session, settings) - def post_session_stats(self): - logger.info('Post session stats') + def post_session_stats(self) -> None: + """ + Gather statistics and cause a ``session_stats_alert``. + """ + logger.info("Post session stats") for session in self.ltsessions.values(): session.post_session_stats() - async def remove_download(self, download, remove_content=False, remove_checkpoint=True): + async def remove_download(self, download: Download, remove_content: bool = False, + remove_checkpoint: bool = True) -> None: + """ + Remove a download and optionally also remove the downloaded file(s) and checkpoint. + """ infohash = download.get_def().get_infohash() handle = download.handle @@ -727,16 +816,25 @@ async def remove_download(self, download, remove_content=False, remove_checkpoin else: logger.debug("Cannot remove unknown download") - def get_download(self, infohash: bytes) -> Download: + def get_download(self, infohash: bytes) -> Download | None: + """ + Get the download belonging to a given infohash. + """ return self.downloads.get(infohash, None) def get_downloads(self) -> List[Download]: + """ + Get a list of all known downloads. + """ return list(self.downloads.values()) - def download_exists(self, infohash): + def download_exists(self, infohash: bytes) -> bool: + """ + Check if there is a download with a given infohash. + """ return infohash in self.downloads - async def update_hops(self, download, new_hops): + async def update_hops(self, download: Download, new_hops: int) -> None: """ Update the amount of hops for a specified download. This can be done on runtime. """ @@ -753,7 +851,7 @@ async def update_hops(self, download, new_hops): await self.start_download(tdef=download.tdef, config=config) - def update_trackers(self, infohash, trackers): + def update_trackers(self, infohash: bytes, trackers: list[str]) -> None: """ Update the trackers for a download. @@ -788,7 +886,7 @@ def update_trackers(self, infohash, trackers): download.set_def(new_def) download.checkpoint() - def set_download_states_callback(self, user_callback, interval=1.0): + def set_download_states_callback(self, user_callback, interval: float = 1.0) -> None: """ Set the download state callback. Remove any old callback if it's present. Calls user_callback with a list of @@ -804,7 +902,7 @@ def set_download_states_callback(self, user_callback, interval=1.0): logger.debug("Starting the download state callback with interval %f", interval) self.replace_task("download_states_lc", self._invoke_states_cb, user_callback, interval=interval) - async def _invoke_states_cb(self, callback): + async def _invoke_states_cb(self, callback) -> None: """ Invoke the download states callback with a list of the download states. """ @@ -812,7 +910,7 @@ async def _invoke_states_cb(self, callback): if iscoroutine(result): await result - async def sesscb_states_callback(self, states_list): + async def sesscb_states_callback(self, states_list: list[DownloadState]) -> None: """ This method is periodically (every second) called with a list of the download states of the active downloads. """ @@ -822,29 +920,35 @@ async def sesscb_states_callback(self, states_list): download = ds.get_download() infohash = download.get_def().get_infohash() - if ds.get_status() == DownloadStatus.SEEDING: - if download.config.get_hops() == 0 and download.config.get_safe_seeding(): - # Re-add the download with anonymity enabled - hops = self.config.get("libtorrent/download_defaults/number_hops") - await self.update_hops(download, hops) + if (ds.get_status() == DownloadStatus.SEEDING and download.config.get_hops() == 0 + and download.config.get_safe_seeding()): + # Re-add the download with anonymity enabled + hops = self.config.get("libtorrent/download_defaults/number_hops") + await self.update_hops(download, hops) # Check the peers of this download every five seconds and add them to the payout manager when # this peer runs a Tribler instance if self.state_cb_count % 5 == 0 and download.config.get_hops() == 0 and self.notifier: for peer in download.get_peer_list(): - if str(peer["extended_version"]).startswith('Tribler'): + if str(peer["extended_version"]).startswith("Tribler"): self.notifier.notify(Notification.tribler_torrent_peer_update, peer_id=unhexlify(peer["id"]), infohash=infohash, balance=peer["dtotal"]) if self.state_cb_count % 4 == 0: self._last_states_list = states_list - def get_last_download_states(self): + def get_last_download_states(self) -> list[DownloadState]: + """ + Get the last download states. + """ return self._last_states_list - async def load_checkpoints(self): + async def load_checkpoints(self) -> None: + """ + Load the checkpoint files in the checkpoint directory. + """ self._logger.info("Load checkpoints...") - checkpoint_filenames = list(self.get_checkpoint_dir().glob('*.conf')) + checkpoint_filenames = list(self.get_checkpoint_dir().glob("*.conf")) self.checkpoints_count = len(checkpoint_filenames) for i, filename in enumerate(checkpoint_filenames, start=1): await self.load_checkpoint(filename) @@ -853,7 +957,10 @@ async def load_checkpoints(self): self.all_checkpoints_are_loaded = True self._logger.info("Checkpoints are loaded") - async def load_checkpoint(self, filename) -> bool: + async def load_checkpoint(self, filename: str) -> bool: + """ + Load a checkpoint from a given file name. + """ try: conf_obj = ConfigObj(str(filename), configspec=DownloadConfig.get_spec_file_name(self.config)) conf_obj.validate(Validator()) @@ -872,16 +979,16 @@ async def load_checkpoint(self, filename) -> bool: return False try: - url = metainfo.get(b'url') + url = metainfo.get(b"url") url = url.decode() if url is not None else url - tdef = (TorrentDefNoMetainfo(metainfo[b'infohash'], metainfo[b'name'], url) - if b'infohash' in metainfo else TorrentDef.load_from_dict(metainfo)) + tdef = (TorrentDefNoMetainfo(metainfo[b"infohash"], metainfo[b"name"], url) + if b"infohash" in metainfo else TorrentDef.load_from_dict(metainfo)) except (KeyError, ValueError) as e: self._logger.exception("Could not restore tdef from metainfo dict: %s %s ", e, metainfo) return False config.state_dir = self.state_dir - if config.get_dest_dir() == '': # removed torrent ignoring + if config.get_dest_dir() == "": # removed torrent ignoring self._logger.info("Removing checkpoint %s destdir is %s", filename, config.get_dest_dir()) os.remove(filename) return False @@ -896,10 +1003,13 @@ async def load_checkpoint(self, filename) -> bool: self._logger.exception("Not resuming checkpoint due to exception while adding download") return False - def remove_config(self, infohash): + def remove_config(self, infohash: bytes) -> None: + """ + Remove the configuration for the download belonging to the given infohash. + """ if infohash not in self.downloads: try: - basename = hexlify(infohash).decode() + '.conf' + basename = hexlify(infohash).decode() + ".conf" filename = self.get_checkpoint_dir() / basename self._logger.debug("Removing download checkpoint %s", filename) if os.access(filename, os.F_OK): @@ -910,30 +1020,34 @@ def remove_config(self, infohash): else: self._logger.warning("Download is back, restarted? Cancelling removal! %s", hexlify(infohash)) - def get_checkpoint_dir(self): + def get_checkpoint_dir(self) -> Path: """ Returns the directory in which to checkpoint the Downloads in this Session. """ return self.state_dir / "dlcheckpoints" @staticmethod - async def create_torrent_file(file_path_list, params=None): + async def create_torrent_file(file_path_list: list[str], params: dict | None = None) -> TorrentFileResult: """ Creates a torrent file. :param file_path_list: files to add in torrent file :param params: optional parameters for torrent file - :return: a Deferred that fires when the torrent file has been created """ return await asyncio.get_event_loop().run_in_executor(None, torrents.create_torrent_file, file_path_list, params or {}) - def get_downloads_by_name(self, torrent_name): + def get_downloads_by_name(self, torrent_name: str) -> list[Download]: + """ + Get all downloads for which the UTF-8 name equals the given string. + """ downloads = self.get_downloads() return [d for d in downloads if d.get_def().get_name_utf8() == torrent_name] @staticmethod - def set_libtorrent_proxy_settings(config: TriblerConfigManager, proxy_type, server=None, auth=None): + def set_libtorrent_proxy_settings(config: TriblerConfigManager, proxy_type: int, + server: tuple[str, int] | None = None, + auth: tuple[str, str] | None = None) -> None: """ Set which proxy LibTorrent should use (default = 0). @@ -948,20 +1062,23 @@ def set_libtorrent_proxy_settings(config: TriblerConfigManager, proxy_type, serv :param auth: (username, password) tuple or None """ config.set("libtorrent/proxy_type", proxy_type) - config.set("libtorrent/proxy_server", server if proxy_type else ':') - config.set("libtorrent/proxy_auth", auth if proxy_type in [3, 5] else ':') + config.set("libtorrent/proxy_server", server if proxy_type else ":") + config.set("libtorrent/proxy_auth", auth if proxy_type in [3, 5] else ":") - def get_libtorrent_proxy_settings(self): + def get_libtorrent_proxy_settings(self) -> tuple[int, tuple[str, str] | None, tuple[str, str] | None]: + """ + Get the settings for the libtorrent proxy. + """ proxy_server = str(self.config.get("libtorrent/proxy_server")) - proxy_server = proxy_server.split(':') if proxy_server else None + proxy_server = proxy_server.split(":") if proxy_server else None proxy_auth = str(self.config.get("libtorrent/proxy_auth")) - proxy_auth = proxy_auth.split(':') if proxy_auth else None + proxy_auth = proxy_auth.split(":") if proxy_auth else None return self.config.get("libtorrent/proxy_type"), proxy_server, proxy_auth @staticmethod - def get_libtorrent_max_upload_rate(config: TriblerConfigManager): + def get_libtorrent_max_upload_rate(config: TriblerConfigManager) -> float: """ Gets the maximum upload rate (kB / s). @@ -970,7 +1087,7 @@ def get_libtorrent_max_upload_rate(config: TriblerConfigManager): return min(config.get("libtorrent/max_upload_rate"), 2147483647) @staticmethod - def get_libtorrent_max_download_rate(config: TriblerConfigManager): + def get_libtorrent_max_download_rate(config: TriblerConfigManager) -> float: """ Gets the maximum download rate (kB / s). diff --git a/src/tribler/core/libtorrent/download_manager/download_state.py b/src/tribler/core/libtorrent/download_manager/download_state.py index 937b3d6659..5ac4c96014 100644 --- a/src/tribler/core/libtorrent/download_manager/download_state.py +++ b/src/tribler/core/libtorrent/download_manager/download_state.py @@ -10,13 +10,19 @@ from enum import Enum from typing import TYPE_CHECKING -import libtorrent - if TYPE_CHECKING: - from tribler.core.libtorrent.download_manager.download import Download + from pathlib import Path + + import libtorrent + + from tribler.core.libtorrent.download_manager.download import Download, PeerDict, PeerDictHave class DownloadStatus(Enum): + """ + The libtorrent status for a download. + """ + ALLOCATING_DISKSPACE = 0 WAITING_FOR_HASHCHECK = 1 HASHCHECKING = 2 @@ -54,7 +60,7 @@ class DownloadState: cf. libtorrent torrent_status """ - def __init__(self, download: Download, lt_status: libtorrent.torrent_status, error) -> None: + def __init__(self, download: Download, lt_status: libtorrent.torrent_status, error: str | None) -> None: """ Internal constructor. @@ -71,8 +77,7 @@ def __str__(self) -> str: """ Create a pretty printed string. """ - return "DownloadState(infohash=%s, lt_status=%s, error=%s)" % \ - (self.download.tdef.infohash, str(self.lt_status), str(self.error)) + return f"DownloadState(infohash={self.download.tdef.infohash}, lt_status={self.lt_status}, error={self.error})" def get_download(self) -> Download: """ @@ -103,16 +108,19 @@ def get_status(self) -> DownloadStatus: return DownloadStatus.STOPPED_ON_ERROR return DownloadStatus.STOPPED - def get_error(self): - """ Returns the Exception that caused the download to be moved to STOPPED_ON_ERROR status. - @return An error message + def get_error(self) -> str: + """ + Returns the Exception that caused the download to be moved to STOPPED_ON_ERROR status. + + :return: An error message """ return self.error or (self.lt_status.error if self.lt_status and self.lt_status.error else None) - def get_current_speed(self, direct): + def get_current_speed(self, direct: str) -> int: """ Returns the current up or download speed. - @return The speed in bytes/s. + + :return: The speed in bytes/s. """ if not self.lt_status or self.get_status() not in [DownloadStatus.DOWNLOADING, DownloadStatus.SEEDING]: return 0 @@ -120,10 +128,11 @@ def get_current_speed(self, direct): return self.lt_status.upload_rate return self.lt_status.download_rate - def get_current_payload_speed(self, direct): + def get_current_payload_speed(self, direct: str) -> int: """ Returns the current up or download payload speed. - @return The speed in bytes/s. + + :return: The speed in bytes/s. """ if not self.lt_status or self.get_status() not in [DownloadStatus.DOWNLOADING, DownloadStatus.SEEDING]: return 0 @@ -209,22 +218,27 @@ def get_all_time_ratio(self) -> float: return self.all_time_upload / self.all_time_download - def get_seeding_time(self): + def get_seeding_time(self) -> int: + """ + The active time (not paused), while finished and while being a seed, in seconds. + """ return self.lt_status.finished_time if self.lt_status else 0 - def get_eta(self): + def get_eta(self) -> float: """ Returns the estimated time to finish of download. - @return The time in ?, as ?. + + :return: The time in ?, as ?. """ return (1.0 - self.get_progress()) * (float(self.download.get_def().get_length()) / max(0.000001, self.lt_status.download_rate)) \ if self.lt_status else 0.0 - def get_num_seeds_peers(self): + def get_num_seeds_peers(self) -> tuple[int, int]: """ Returns the sum of the number of seeds and peers. - @return A tuple (num seeds, num peers) + + :return: A tuple (num seeds, num peers) """ if not self.lt_status or self.get_status() not in [DownloadStatus.DOWNLOADING, DownloadStatus.SEEDING]: return 0, 0 @@ -244,16 +258,19 @@ def get_pieces_complete(self) -> list[bool]: """ return self.lt_status.pieces if self.lt_status else [] - def get_pieces_total_complete(self): - """ Returns the number of total and completed pieces - @return A tuple containing two integers, total and completed nr of pieces + def get_pieces_total_complete(self) -> tuple[int, int]: + """ + Returns the number of total and completed pieces. + + :return: A tuple containing two integers, total and completed nr of pieces """ return (len(self.lt_status.pieces), sum(self.lt_status.pieces)) if self.lt_status else (0, 0) - def get_files_completion(self): - """ Returns a list of filename, progress tuples indicating the progress + def get_files_completion(self) -> list[tuple[Path, float]]: + """ + Returns a list of filename, progress tuples indicating the progress for every file selected using set_selected_files. Progress is a float - between 0 and 1 + between 0 and 1. """ completion = [] @@ -272,12 +289,14 @@ def get_files_completion(self): return completion - def get_selected_files(self): + def get_selected_files(self) -> list[int] | None: + """ + Get the selection status of the download's files, or None if it is not available. + """ selected_files = self.download.config.get_selected_files() - if len(selected_files) > 0: - return selected_files + return selected_files if len(selected_files) > 0 else None - def get_availability(self): + def get_availability(self) -> float: """ Return the overall availability of all pieces, using connected peers. @@ -315,7 +334,7 @@ def get_availability(self): return nr_seeders_complete + nr_leechers_complete + fraction_additonal return nr_seeders_complete - def get_peer_list(self, include_have: bool = True): + def get_peer_list(self, include_have: bool = True) -> list[PeerDict | PeerDictHave]: """ Returns a list of dictionaries, one for each connected peer, containing the statistics for that peer. """ diff --git a/src/tribler/core/libtorrent/download_manager/stream.py b/src/tribler/core/libtorrent/download_manager/stream.py index 1256bcf424..a4d850ba34 100644 --- a/src/tribler/core/libtorrent/download_manager/stream.py +++ b/src/tribler/core/libtorrent/download_manager/stream.py @@ -20,11 +20,17 @@ import logging from asyncio import sleep -from types import TracebackType -from typing import Generator +from typing import TYPE_CHECKING, Generator + +from typing_extensions import Self from tribler.core.libtorrent.download_manager.download_state import DownloadStatus -from tribler.core.libtorrent.torrents import check_vod, get_info_from_handle +from tribler.core.libtorrent.torrents import check_vod + +if TYPE_CHECKING: + from types import TracebackType + + from tribler.core.libtorrent.download_manager.download import Download # Header and footer sizes are necessary for video client to detect file codecs and muxer metadata. # Without below pieces are ready, streamer should not start @@ -54,11 +60,15 @@ class NotStreamingError(Exception): - pass + """ + An attempt was made to create a chunk for no stream. + """ class NoAvailableStreamError(Exception): - pass + """ + An attempt was made to create a stream for no files. + """ class Stream: @@ -66,7 +76,10 @@ class Stream: Holds the streaming status of a specific download. """ - def __init__(self, download): + def __init__(self, download: Download) -> None: + """ + Create a stream for the given download. + """ self._logger = logging.getLogger(self.__class__.__name__) self.infohash = None self.filename = None @@ -106,7 +119,7 @@ def __init__(self, download): self.__resetdeadline = download.reset_piece_deadline self.__resumedownload = download.resume - async def __prepare(self, download): + async def __prepare(self, download: Download) -> None: # wait for an handle first await download.get_handle() self.destdir = download.get_content_dest() @@ -123,7 +136,7 @@ async def __prepare(self, download): self.infohash = tdef.get_infohash() self.mapfile = tdef.torrent_info.map_file - async def enable(self, fileindex=0, prebufpos=None): + async def enable(self, fileindex: int = 0, prebufpos: int | None = None) -> None: """ Enable streaming mode for a given fileindex. """ @@ -133,7 +146,7 @@ async def enable(self, fileindex=0, prebufpos=None): # if fileindex not available for torrent raise exception if fileindex >= len(self.files): - raise NoAvailableStreamError() + raise NoAvailableStreamError # if download is stopped for some reason, resume it. self.__resumedownload() @@ -157,7 +170,7 @@ async def enable(self, fileindex=0, prebufpos=None): # if the prebuffposiiton is updated, update the static prebuff pieces currrent_prebuf = list(self.prebuffpieces) currrent_prebuf.extend(self.bytestopieces(prebufpos, self.prebuffsize)) - self.prebuffpieces = sorted(list(set(currrent_prebuf))) + self.prebuffpieces = sorted(set(currrent_prebuf)) return # update the file name and size with the file index @@ -183,7 +196,7 @@ async def enable(self, fileindex=0, prebufpos=None): @property def enabled(self) -> bool: """ - Check if stream is enabled + Check if stream is enabled. """ return self.infohash is not None and self.fileindex is not None @@ -191,7 +204,7 @@ def enabled(self) -> bool: @check_vod(0) def headerprogress(self) -> float: """ - Get current progress of downloaded header pieces of the enabled stream, if not enabled returns 0 + Get current progress of downloaded header pieces of the enabled stream, if not enabled returns 0. """ return self.calculateprogress(self.headerpieces, False) @@ -199,7 +212,7 @@ def headerprogress(self) -> float: @check_vod(0) def footerprogress(self) -> float: """ - Get current progress of downloaded footer pieces of the enabled stream, if not enabled returns 0 + Get current progress of downloaded footer pieces of the enabled stream, if not enabled returns 0. """ return self.calculateprogress(self.footerpieces, False) @@ -207,7 +220,7 @@ def footerprogress(self) -> float: @check_vod(0) def prebuffprogress(self) -> float: """ - Get current progress of downloaded prebuff pieces of the enabled stream, if not enabled returns 0 + Get current progress of downloaded prebuff pieces of the enabled stream, if not enabled returns 0. """ return self.calculateprogress(self.prebuffpieces, False) @@ -215,7 +228,7 @@ def prebuffprogress(self) -> float: @check_vod(0) def prebuffprogress_consec(self) -> float: """ - Get current progress of cosequently downloaded prebuff pieces of the enabled stream, if not enabled returns 0 + Get current progress of cosequently downloaded prebuff pieces of the enabled stream, if not enabled returns 0. """ return self.calculateprogress(self.prebuffpieces, True) @@ -223,14 +236,14 @@ def prebuffprogress_consec(self) -> float: @check_vod([]) def pieceshave(self) -> list[int]: """ - Get a list of Booleans indicating that individual pieces of the selected fileindex has been downloaded or not + Get a list of Booleans indicating that individual pieces of the selected fileindex has been downloaded or not. """ return self.__lt_state().get_pieces_complete() @check_vod(True) def disable(self) -> None: """ - Stop Streaming + Stop Streaming. """ self.fileindex = None self.headerpieces = [] @@ -242,7 +255,7 @@ def disable(self) -> None: def close(self) -> None: """ - Close this class gracefully + Close this class gracefully. """ # Close the coroutine. Unnecessary calls should be harmless. self.__prepare_coro.close() @@ -251,7 +264,7 @@ def close(self) -> None: @check_vod([]) def bytestopieces(self, bytes_begin: int, bytes_end: int) -> list[int]: """ - Returns the pieces that represents the given byte range + Returns the pieces that represents the given byte range. """ bytes_begin = min(self.filesize, bytes_begin) if bytes_begin >= 0 else self.filesize + bytes_begin bytes_end = min(self.filesize, bytes_end) if bytes_end > 0 else self.filesize + bytes_end @@ -265,7 +278,7 @@ def bytestopieces(self, bytes_begin: int, bytes_end: int) -> list[int]: @check_vod(-1) def bytetopiece(self, byte_begin: int) -> int: """ - Finds the piece position that begin_bytes is mapped to + Finds the piece position that begin_bytes is mapped to. """ return self.mapfile(self.fileindex, byte_begin, 0).piece @@ -273,7 +286,7 @@ def bytetopiece(self, byte_begin: int) -> int: def calculateprogress(self, pieces: list[int], consec: bool) -> float: """ Claculates the download progress of a given piece list. - if consec is True, calcaulation is based only the pieces downloaded sequentially + if consec is True, calcaulation is based only the pieces downloaded sequentially. """ if not pieces: return 1.0 @@ -289,7 +302,7 @@ def calculateprogress(self, pieces: list[int], consec: bool) -> float: def iterpieces(self, have: bool | None = None, consec: bool = False, startfrom: int | None = None) -> Generator[int, None, None]: """ - Generator function that yield the pieces for the active fileindex + Generator function that yield the pieces for the active fileindex. :param have: None: nofilter, True: only pieces we have, False: only pieces we dont have :param consec: True: sequentially, False: all pieces @@ -300,29 +313,25 @@ def iterpieces(self, have: bool | None = None, consec: bool = False, for piece in range(self.firstpiece, self.lastpiece + 1): if startfrom is not None and piece < startfrom: continue - if have is None: - yield piece - elif have and pieces_have[piece]: - yield piece - elif not have and not pieces_have[piece]: + if have is None or have and pieces_have[piece] or not have and not pieces_have[piece]: yield piece elif consec: break - async def updateprios(self): + async def updateprios(self) -> None: # noqa: C901, PLR0912, PLR0915 """ This async function controls how the individual piece priority and deadline is configured. This method is called when a stream in enabled, and when a chunk reads the stream each time. - The performance of this method is crucical since it gets called quite frequently + The performance of this method is crucical since it gets called quite frequently. """ if not self.enabled: return - def _updateprio(piece, prio, deadline=None): + def _updateprio(piece: int, prio: int, deadline: int | None = None) -> None: """ - Utility function to update piece priorities + Utility function to update piece priorities. """ - if not curr_prio == prio: + if curr_prio != prio: piecepriorities[piece] = prio if deadline is not None: # it is cool to step deadlines with 10ms interval but in realty there is no need. @@ -332,10 +341,10 @@ def _updateprio(piece, prio, deadline=None): self.__resetdeadline(piece) diffmap[piece] = f"{piece}:-:{curr_prio}->{prio}" - def _find_deadline(piece): + def _find_deadline(piece: int) -> tuple[int, int]: """ - Find the cursor which has this piece closest to its start - Returns the deadline for the piece and the cursor startbyte + Find the cursor which has this piece closest to its start. + Returns the deadline for the piece and the cursor startbyte. """ # if piece is not in piecemaps, then there is no deadline # if piece in piecemaps, then the deadline is the index of the related piecemap @@ -370,23 +379,22 @@ def _find_deadline(piece): elif piece in self.prebuffpieces: _updateprio(piece, 7, 2) staticbuff = True + elif staticbuff: + _updateprio(piece, 0) else: - if staticbuff: - _updateprio(piece, 0) - else: - # dynamic buffering - deadline, cursor = _find_deadline(piece) - if cursor is not None: - if deadline < len(DEADLINE_PRIO_MAP): - # get prio according to deadline - _updateprio(piece, DEADLINE_PRIO_MAP[deadline], deadline) - else: - # the deadline is outside of map, set piece prio 1 with the deadline - # buffer size is bigger then prio_map - _updateprio(piece, 1, deadline) + # dynamic buffering + deadline, cursor = _find_deadline(piece) + if cursor is not None: + if deadline < len(DEADLINE_PRIO_MAP): + # get prio according to deadline + _updateprio(piece, DEADLINE_PRIO_MAP[deadline], deadline) else: - # the piece is not in buffer zone, set to min prio without deadline - _updateprio(piece, MIN_PIECE_PRIO) + # the deadline is outside of map, set piece prio 1 with the deadline + # buffer size is bigger then prio_map + _updateprio(piece, 1, deadline) + else: + # the piece is not in buffer zone, set to min prio without deadline + _updateprio(piece, MIN_PIECE_PRIO) if diffmap: # log stuff self._logger.info("Piece Piority changed: %s", repr(diffmap)) @@ -399,10 +407,10 @@ def _find_deadline(piece): self._logger.debug("Current Prios: %s", [(x, piecepriorities[x]) for x in self.iterpieces(have=False)]) self.__setpieceprios(piecepriorities) - def resetprios(self, pieces=None, prio=None): + def resetprios(self, pieces: list[int] | None = None, prio: int | None = None) -> None: """ Resets the prios and deadline of the pieces of the active fileindex, - If no pieces are provided, resets every piece for the fileindex + If no pieces are provided, resets every piece for the fileindex. """ prio = prio if prio is not None else 4 piecepriorities = self.__getpieceprios() @@ -428,7 +436,7 @@ def __init__(self, stream: Stream, startpos: int = 0) -> None: """ self._logger = logging.getLogger(self.__class__.__name__) if not stream.enabled: - raise NotStreamingError() + raise NotStreamingError self.stream = stream self.file = None self.startpos = startpos @@ -441,7 +449,7 @@ def seekpos(self) -> int: """ return self.__seekpos - async def __aenter__(self) -> StreamChunk: + async def __aenter__(self) -> Self: """ Open the chunk. """ @@ -458,28 +466,31 @@ async def __aexit__(self, exc_type: type[BaseException] | None, exc_value: BaseE async def open(self) -> None: """ - Opens the file in the filesystem until its ready and seeks to the seekpos position + Opens the file in the filesystem until its ready and seeks to the seekpos position. """ while not self.stream.filename.exists(): await sleep(1) - self.file = open(self.stream.filename, 'rb') + self.file = open(self.stream.filename, 'rb') # noqa: ASYNC101, SIM115 self.file.seek(self.seekpos) @property def isclosed(self) -> bool: + """ + Check if the file (if it exists) belonging to this chunk is closed. + """ return self.file is None or self.file.closed @property def isstarted(self) -> bool: """ - Checks if the this chunk has already registered itself to stream instance + Checks if the this chunk has already registered itself to stream instance. """ return self.startpos in self.stream.cursorpiecemap @property def ispaused(self) -> bool: """ - Checks if the chunk is in paused state + Checks if the chunk is in paused state. """ if self.isstarted and self.stream.cursorpiecemap[self.startpos][0]: return True @@ -489,7 +500,7 @@ def ispaused(self) -> bool: def shouldpause(self) -> bool: """ Checks if this chunk should pause, based on the desicion that - any other chunks also is streaming the same torrent or not + any other chunks also is streaming the same torrent or not. """ for spos in self.stream.cursorpiecemap: if spos == self.startpos: @@ -501,7 +512,7 @@ def shouldpause(self) -> bool: def pause(self, force: bool = False) -> bool: """ - Sets the chunk pieces to pause, if not forced, chunk is only paused if other chunks are not paused + Sets the chunk pieces to pause, if not forced, chunk is only paused if other chunks are not paused. """ if not self.ispaused and (self.shouldpause or force): self.stream.cursorpiecemap[self.startpos][0] = True @@ -510,7 +521,7 @@ def pause(self, force: bool = False) -> bool: def resume(self, force: bool = False) -> bool: """ - Sets the chunk pieces to resume, if not forced, chunk is only resume if other chunks are paused + Sets the chunk pieces to resume, if not forced, chunk is only resume if other chunks are paused. """ if self.ispaused and (not self.shouldpause or force): self.stream.cursorpiecemap[self.startpos][0] = False @@ -519,8 +530,8 @@ def resume(self, force: bool = False) -> bool: async def seek(self, positionbyte: int) -> list[int]: """ - Seeks the stream to the related picece that represents the position byte - Also updates the dynamic buffer accordingly + Seeks the stream to the related picece that represents the position byte. + Also updates the dynamic buffer accordingly. """ buffersize = 0 pospiece = self.stream.bytetopiece(positionbyte) diff --git a/src/tribler/core/libtorrent/restapi/create_torrent_endpoint.py b/src/tribler/core/libtorrent/restapi/create_torrent_endpoint.py index 8750aed9c5..6d87621148 100644 --- a/src/tribler/core/libtorrent/restapi/create_torrent_endpoint.py +++ b/src/tribler/core/libtorrent/restapi/create_torrent_endpoint.py @@ -4,6 +4,7 @@ import libtorrent as lt from aiohttp import web +from aiohttp.abc import Request from aiohttp_apispec import docs, json_schema from ipv8.REST.schema import schema from marshmallow.fields import String @@ -12,8 +13,13 @@ from tribler.core.libtorrent.download_manager.download_config import DownloadConfig from tribler.core.libtorrent.download_manager.download_manager import DownloadManager from tribler.core.libtorrent.torrentdef import TorrentDef -from tribler.core.restapi.rest_endpoint import HTTP_BAD_REQUEST, RESTEndpoint, RESTResponse, MAX_REQUEST_SIZE, \ - return_handled_exception +from tribler.core.restapi.rest_endpoint import ( + HTTP_BAD_REQUEST, + MAX_REQUEST_SIZE, + RESTEndpoint, + RESTResponse, + return_handled_exception, +) def recursive_bytes(obj): @@ -25,9 +31,9 @@ def recursive_bytes(obj): """ if isinstance(obj, dict): return {recursive_bytes(k): recursive_bytes(v) for k, v in obj.items()} - elif isinstance(obj, list): + if isinstance(obj, list): return [recursive_bytes(i) for i in obj] - elif isinstance(obj, str): + if isinstance(obj, str): return obj.encode('utf8') return obj @@ -35,29 +41,34 @@ def recursive_bytes(obj): class CreateTorrentEndpoint(RESTEndpoint): """ Create a torrent file from local files. + See: http://www.bittorrent.org/beps/bep_0012.html """ - path = '/createtorrent' - def __init__(self, download_manager: DownloadManager, client_max_size: int = MAX_REQUEST_SIZE): + path = "/createtorrent" + + def __init__(self, download_manager: DownloadManager, client_max_size: int = MAX_REQUEST_SIZE) -> None: + """ + Create a new endpoint to create torrents. + """ super().__init__(client_max_size=client_max_size) self.download_manager = download_manager - self.app.add_routes([web.post('', self.create_torrent)]) + self.app.add_routes([web.post("", self.create_torrent)]) @docs( tags=["Libtorrent"], summary="Create a torrent from local files and return it in base64 encoding.", parameters=[{ - 'in': 'query', - 'name': 'download', - 'description': 'Flag indicating whether or not to start downloading', - 'type': 'boolean', - 'required': False + "in": "query", + "name": "download", + "description": "Flag indicating whether or not to start downloading", + "type": "boolean", + "required": False }], responses={ 200: { - "schema": schema(CreateTorrentResponse={'torrent': 'base64 encoded torrent file'}), - "examples": {'Success': {"success": True}} + "schema": schema(CreateTorrentResponse={"torrent": "base64 encoded torrent file"}), + "examples": {"Success": {"success": True}} }, HTTP_BAD_REQUEST: { "schema": HandledErrorSchema, @@ -66,44 +77,46 @@ def __init__(self, download_manager: DownloadManager, client_max_size: int = MAX } ) @json_schema(schema(CreateTorrentRequest={ - 'files': [String], - 'name': String, - 'description': String, - 'trackers': [String], - 'export_dir': String + "files": [String], + "name": String, + "description": String, + "trackers": [String], + "export_dir": String })) - async def create_torrent(self, request): + async def create_torrent(self, request: Request) -> RESTResponse: + """ + Create a torrent from local files and return it in base64 encoding. + """ parameters = await request.json() params = {} - if 'files' in parameters and parameters['files']: - file_path_list = parameters['files'] + if parameters.get("files"): + file_path_list = parameters["files"] else: return RESTResponse({"error": "files parameter missing"}, status=HTTP_BAD_REQUEST) - if 'description' in parameters and parameters['description']: - params['comment'] = parameters['description'] + if parameters.get("description"): + params["comment"] = parameters["description"] - if 'trackers' in parameters and parameters['trackers']: - tracker_url_list = parameters['trackers'] - params['announce'] = tracker_url_list[0] - params['announce-list'] = tracker_url_list + if parameters.get("trackers"): + tracker_url_list = parameters["trackers"] + params["announce"] = tracker_url_list[0] + params["announce-list"] = tracker_url_list - name = 'unknown' - if 'name' in parameters and parameters['name']: - name = parameters['name'] - params['name'] = name + name = "unknown" + if parameters.get("name"): + name = parameters["name"] + params["name"] = name export_dir = None - if 'export_dir' in parameters and parameters['export_dir']: - export_dir = Path(parameters['export_dir']) - - params['created by'] = f"Tribler version: Tribler Experimental" + if parameters.get("export_dir"): + export_dir = Path(parameters["export_dir"]) - params['nodes'] = False - params['httpseeds'] = False - params['encoding'] = False - params['piece length'] = 0 # auto + params["created by"] = "Tribler version: Tribler Experimental" + params["nodes"] = False + params["httpseeds"] = False + params["encoding"] = False + params["piece length"] = 0 # auto try: result = await self.download_manager.create_torrent_file(file_path_list, recursive_bytes(params)) @@ -111,18 +124,18 @@ async def create_torrent(self, request): self._logger.exception(e) return return_handled_exception(e) - metainfo_dict = lt.bdecode(result['metainfo']) + metainfo_dict = lt.bdecode(result["metainfo"]) if export_dir and export_dir.exists(): save_path = export_dir / (f"{name}.torrent") - with open(save_path, "wb") as fd: - fd.write(result['metainfo']) + with open(save_path, "wb") as fd: # noqa: ASYNC101 + fd.write(result["metainfo"]) # Download this torrent if specified - if 'download' in request.query and request.query['download'] and request.query['download'] == "1": + if "download" in request.query and request.query["download"] and request.query["download"] == "1": download_config = DownloadConfig.from_defaults(self.download_manager.config) - download_config.set_dest_dir(result['base_dir']) + download_config.set_dest_dir(result["base_dir"]) download_config.set_hops(self.download_manager.config.get("libtorrent/download_defaults/number_hops")) await self.download_manager.start_download(tdef=TorrentDef(metainfo_dict), config=download_config) - return RESTResponse(json.dumps({"torrent": base64.b64encode(result['metainfo']).decode('utf-8')})) + return RESTResponse(json.dumps({"torrent": base64.b64encode(result["metainfo"]).decode()})) diff --git a/src/tribler/core/libtorrent/restapi/downloads_endpoint.py b/src/tribler/core/libtorrent/restapi/downloads_endpoint.py index 5c27d26780..7a478bbed3 100644 --- a/src/tribler/core/libtorrent/restapi/downloads_endpoint.py +++ b/src/tribler/core/libtorrent/restapi/downloads_endpoint.py @@ -1,19 +1,23 @@ -from asyncio import CancelledError, TimeoutError as AsyncTimeoutError, wait_for -from binascii import unhexlify, hexlify +from __future__ import annotations + +from asyncio import CancelledError, wait_for +from asyncio import TimeoutError as AsyncTimeoutError +from binascii import hexlify, unhexlify from contextlib import suppress -from pathlib import PurePosixPath, Path +from pathlib import Path, PurePosixPath +from typing import TYPE_CHECKING, TypedDict import libtorrent as lt from aiohttp import web from aiohttp_apispec import docs, json_schema -from ipv8.REST.schema import schema from ipv8.messaging.anonymization.tunnel import PEER_FLAG_EXIT_BT +from ipv8.REST.schema import schema from marshmallow.fields import Boolean, Float, Integer, List, String from tribler.core.libtorrent.download_manager.download import Download, IllegalFileIndex from tribler.core.libtorrent.download_manager.download_config import DownloadConfig from tribler.core.libtorrent.download_manager.download_manager import DownloadManager -from tribler.core.libtorrent.download_manager.download_state import DownloadStatus, DOWNLOAD, UPLOAD +from tribler.core.libtorrent.download_manager.download_state import DOWNLOAD, UPLOAD, DownloadStatus from tribler.core.libtorrent.download_manager.stream import STREAM_PAUSE_TIME, StreamChunk from tribler.core.restapi.rest_endpoint import ( HTTP_BAD_REQUEST, @@ -24,45 +28,68 @@ return_handled_exception, ) +if TYPE_CHECKING: + from aiohttp.abc import Request + + from tribler.core.database.store import MetadataStore + from tribler.core.tunnel.community import TriblerTunnelCommunity + TOTAL = "total" LOADED = "loaded" ALL_LOADED = "all_loaded" +class JSONFilesInfo(TypedDict): + """ + A JSON dict to describe file info. + """ + + index: int + name: str + size: int + included: bool + progress: float + + class DownloadsEndpoint(RESTEndpoint): """ This endpoint is responsible for all requests regarding downloads. Examples include getting all downloads, starting, pausing and stopping downloads. """ - path = '/downloads' - def __init__(self, download_manager: DownloadManager, metadata_store=None, tunnel_community=None): + path = "/downloads" + + def __init__(self, download_manager: DownloadManager, metadata_store: MetadataStore | None = None, + tunnel_community: TriblerTunnelCommunity | None = None) -> None: + """ + Create a new endpoint to query the status of downloads. + """ super().__init__() self.download_manager = download_manager self.mds = metadata_store self.tunnel_community = tunnel_community self.app.add_routes([ - web.get('', self.get_downloads), - web.put('', self.add_download), - web.delete('/{infohash}', self.delete_download), - web.patch('/{infohash}', self.update_download), - web.get('/{infohash}/torrent', self.get_torrent), - web.get('/{infohash}/files', self.get_files), - web.get('/{infohash}/files/expand', self.expand_tree_directory), - web.get('/{infohash}/files/collapse', self.collapse_tree_directory), - web.get('/{infohash}/files/select', self.select_tree_path), - web.get('/{infohash}/files/deselect', self.deselect_tree_path), - web.get('/{infohash}/stream/{fileindex}', self.stream, allow_head=False) + web.get("", self.get_downloads), + web.put("", self.add_download), + web.delete("/{infohash}", self.delete_download), + web.patch("/{infohash}", self.update_download), + web.get("/{infohash}/torrent", self.get_torrent), + web.get("/{infohash}/files", self.get_files), + web.get("/{infohash}/files/expand", self.expand_tree_directory), + web.get("/{infohash}/files/collapse", self.collapse_tree_directory), + web.get("/{infohash}/files/select", self.select_tree_path), + web.get("/{infohash}/files/deselect", self.deselect_tree_path), + web.get("/{infohash}/stream/{fileindex}", self.stream, allow_head=False) ]) @staticmethod - def return_404(request, message="this download does not exist"): + def return_404(request: Request, message: str = "this download does not exist") -> RESTResponse: """ Returns a 404 response code if your channel has not been created. """ return RESTResponse({"error": message}, status=HTTP_NOT_FOUND) - def create_dconfig_from_params(self, parameters): + def create_dconfig_from_params(self, parameters: dict) -> tuple[DownloadConfig, None] | tuple[None, str]: """ Create a download configuration based on some given parameters. @@ -94,15 +121,14 @@ def create_dconfig_from_params(self, parameters): return download_config, None @staticmethod - def get_files_info_json(download): + def get_files_info_json(download: Download) -> list[JSONFilesInfo]: """ Return file information as JSON from a specified download. """ files_json = [] - files_completion = {name: progress for name, progress in download.get_state().get_files_completion()} + files_completion = dict(download.get_state().get_files_completion()) selected_files = download.config.get_selected_files() - file_index = 0 - for fn, size in download.get_def().get_files_with_length(): + for file_index, (fn, size) in enumerate(download.get_def().get_files_with_length()): files_json.append({ "index": file_index, # We always return files in Posix format to make GUI independent of Core and simplify testing @@ -111,11 +137,10 @@ def get_files_info_json(download): "included": (file_index in selected_files or not selected_files), "progress": files_completion.get(fn, 0.0) }) - file_index += 1 return files_json @staticmethod - def get_files_info_json_paged(download: Download, view_start: Path, view_size: int): + def get_files_info_json_paged(download: Download, view_start: Path, view_size: int) -> list[JSONFilesInfo]: """ Return file info, similar to get_files_info_json() but paged (based on view_start and view_size). @@ -130,7 +155,7 @@ def get_files_info_json_paged(download: Download, view_start: Path, view_size: i "index": IllegalFileIndex.unloaded.value, "name": "loading...", "size": 0, - "included": 0, + "included": False, "progress": 0.0 }] return [ @@ -149,77 +174,77 @@ def get_files_info_json_paged(download: Download, view_start: Path, view_size: i tags=["Libtorrent"], summary="Return all downloads, both active and inactive", parameters=[{ - 'in': 'query', - 'name': 'get_peers', - 'description': 'Flag indicating whether or not to include peers', - 'type': 'boolean', - 'required': False + "in": "query", + "name": "get_peers", + "description": "Flag indicating whether or not to include peers", + "type": "boolean", + "required": False }, { - 'in': 'query', - 'name': 'get_pieces', - 'description': 'Flag indicating whether or not to include pieces', - 'type': 'boolean', - 'required': False + "in": "query", + "name": "get_pieces", + "description": "Flag indicating whether or not to include pieces", + "type": "boolean", + "required": False }, { - 'in': 'query', - 'name': 'get_availability', - 'description': 'Flag indicating whether or not to include availability', - 'type': 'boolean', - 'required': False + "in": "query", + "name": "get_availability", + "description": "Flag indicating whether or not to include availability", + "type": "boolean", + "required": False }, { - 'in': 'query', - 'name': 'infohash', - 'description': 'Limit fetching of files, peers, and pieces to a specific infohash', - 'type': 'str', - 'required': False + "in": "query", + "name": "infohash", + "description": "Limit fetching of files, peers, and pieces to a specific infohash", + "type": "str", + "required": False }, { - 'in': 'query', - 'name': 'excluded', - 'description': 'If specified, only return downloads excluding this one', - 'type': 'str', - 'required': False + "in": "query", + "name": "excluded", + "description": "If specified, only return downloads excluding this one", + "type": "str", + "required": False } ], responses={ 200: { "schema": schema(DownloadsResponse={ - 'downloads': schema(Download={ - 'name': String, - 'progress': Float, - 'infohash': String, - 'speed_down': Float, - 'speed_up': Float, - 'status': String, - 'status_code': Integer, - 'size': Integer, - 'eta': Integer, - 'num_peers': Integer, - 'num_seeds': Integer, - 'all_time_upload': Integer, - 'all_time_download': Integer, - 'all_time_ratio': Float, - 'files': String, - 'trackers': String, - 'hops': Integer, - 'anon_download': Boolean, - 'safe_seeding': Boolean, - 'max_upload_speed': Integer, - 'max_download_speed': Integer, - 'destination': String, - 'availability': Float, - 'peers': String, - 'total_pieces': Integer, - 'vod_mode': Boolean, - 'vod_prebuffering_progress': Float, - 'vod_prebuffering_progress_consec': Float, - 'error': String, - 'time_added': Integer + "downloads": schema(Download={ + "name": String, + "progress": Float, + "infohash": String, + "speed_down": Float, + "speed_up": Float, + "status": String, + "status_code": Integer, + "size": Integer, + "eta": Integer, + "num_peers": Integer, + "num_seeds": Integer, + "all_time_upload": Integer, + "all_time_download": Integer, + "all_time_ratio": Float, + "files": String, + "trackers": String, + "hops": Integer, + "anon_download": Boolean, + "safe_seeding": Boolean, + "max_upload_speed": Integer, + "max_download_speed": Integer, + "destination": String, + "availability": Float, + "peers": String, + "total_pieces": Integer, + "vod_mode": Boolean, + "vod_prebuffering_progress": Float, + "vod_prebuffering_progress_consec": Float, + "error": String, + "time_added": Integer }), - 'checkpoints': schema(Checkpoints={ + "checkpoints": schema(Checkpoints={ TOTAL: Integer, LOADED: Integer, ALL_LOADED: Boolean, @@ -235,7 +260,10 @@ def get_files_info_json_paged(download: Download, view_start: Path, view_size: i "get_pieces flag is set. Note that setting this flag has a negative impact on performance " "and should only be used in situations where this data is required. " ) - async def get_downloads(self, request): + async def get_downloads(self, request: Request) -> RESTResponse: # noqa: C901 + """ + Return all downloads, both active and inactive. + """ params = request.query get_peers = params.get('get_peers', '0') == '1' get_pieces = params.get('get_pieces', '0') == '1' @@ -309,13 +337,13 @@ async def get_downloads(self, request): }) - if unfiltered or params.get('infohash') == info["infohash"]: + if unfiltered or params.get("infohash") == info["infohash"]: # Add peers information if requested if get_peers: peer_list = state.get_peer_list(include_have=False) for peer_info in peer_list: - if 'extended_version' in peer_info: - peer_info['extended_version'] = self._safe_extended_peer_info(peer_info['extended_version']) + if "extended_version" in peer_info: + peer_info["extended_version"] = self._safe_extended_peer_info(peer_info["extended_version"]) info["peers"] = peer_list @@ -334,43 +362,46 @@ async def get_downloads(self, request): tags=["Libtorrent"], summary="Start a download from a provided URI.", parameters=[{ - 'in': 'query', - 'name': 'get_peers', - 'description': 'Flag indicating whether or not to include peers', - 'type': 'boolean', - 'required': False + "in": "query", + "name": "get_peers", + "description": "Flag indicating whether or not to include peers", + "type": "boolean", + "required": False }, { - 'in': 'query', - 'name': 'get_pieces', - 'description': 'Flag indicating whether or not to include pieces', - 'type': 'boolean', - 'required': False + "in": "query", + "name": "get_pieces", + "description": "Flag indicating whether or not to include pieces", + "type": "boolean", + "required": False }, { - 'in': 'query', - 'name': 'get_files', - 'description': 'Flag indicating whether or not to include files', - 'type': 'boolean', - 'required': False + "in": "query", + "name": "get_files", + "description": "Flag indicating whether or not to include files", + "type": "boolean", + "required": False }], responses={ 200: { "schema": schema(AddDownloadResponse={"started": Boolean, "infohash": String}), - 'examples': {"started": True, "infohash": "4344503b7e797ebf31582327a5baae35b11bda01"} + "examples": {"started": True, "infohash": "4344503b7e797ebf31582327a5baae35b11bda01"} } }, ) @json_schema(schema(AddDownloadRequest={ - 'anon_hops': (Integer, 'Number of hops for the anonymous download. No hops is equivalent to a plain download'), - 'safe_seeding': (Boolean, 'Whether the seeding of the download should be anonymous or not'), - 'destination': (String, 'the download destination path of the torrent'), - 'uri*': (String, 'The URI of the torrent file that should be downloaded. This URI can either represent a file ' - 'location, a magnet link or a HTTP(S) url.'), + "anon_hops": (Integer, "Number of hops for the anonymous download. No hops is equivalent to a plain download"), + "safe_seeding": (Boolean, "Whether the seeding of the download should be anonymous or not"), + "destination": (String, "the download destination path of the torrent"), + "uri*": (String, "The URI of the torrent file that should be downloaded. This URI can either represent a file " + "location, a magnet link or a HTTP(S) url."), })) - async def add_download(self, request): + async def add_download(self, request: Request) -> RESTResponse: + """ + Start a download from a provided URI. + """ params = await request.json() - uri = params.get('uri') + uri = params.get("uri") if not uri: return RESTResponse({"error": "uri parameter missing"}, status=HTTP_BAD_REQUEST) @@ -389,41 +420,48 @@ async def add_download(self, request): tags=["Libtorrent"], summary="Remove a specific download.", parameters=[{ - 'in': 'path', - 'name': 'infohash', - 'description': 'Infohash of the download to remove', - 'type': 'string', - 'required': True + "in": "path", + "name": "infohash", + "description": "Infohash of the download to remove", + "type": "string", + "required": True }], responses={ 200: { "schema": schema(DeleteDownloadResponse={"removed": Boolean, "infohash": String}), - 'examples': {"removed": True, "infohash": "4344503b7e797ebf31582327a5baae35b11bda01"} + "examples": {"removed": True, "infohash": "4344503b7e797ebf31582327a5baae35b11bda01"} } }, ) @json_schema(schema(RemoveDownloadRequest={ 'remove_data': (Boolean, 'Whether or not to remove the associated data'), })) - async def delete_download(self, request): + async def delete_download(self, request: Request) -> RESTResponse: + """ + Remove a specific download. + """ parameters = await request.json() - if 'remove_data' not in parameters: + if "remove_data" not in parameters: return RESTResponse({"error": "remove_data parameter missing"}, status=HTTP_BAD_REQUEST) - infohash = unhexlify(request.match_info['infohash']) + infohash = unhexlify(request.match_info["infohash"]) download = self.download_manager.get_download(infohash) if not download: return DownloadsEndpoint.return_404(request) try: - await self.download_manager.remove_download(download, remove_content=parameters['remove_data']) + await self.download_manager.remove_download(download, remove_content=parameters["remove_data"]) except Exception as e: self._logger.exception(e) return return_handled_exception(e) return RESTResponse({"removed": True, "infohash": hexlify(download.get_def().get_infohash()).decode()}) - async def vod_response(self, download, parameters, request, vod_mode): + async def vod_response(self, download: Download, parameters: dict, request: Request, + vod_mode: bool) -> RESTResponse: + """ + Return a response for the VOD status of a download. + """ modified = False if vod_mode: file_index = parameters.get("fileindex") @@ -453,27 +491,30 @@ async def vod_response(self, download, parameters, request, vod_mode): tags=["Libtorrent"], summary="Update a specific download.", parameters=[{ - 'in': 'path', - 'name': 'infohash', - 'description': 'Infohash of the download to update', - 'type': 'string', - 'required': True + "in": "path", + "name": "infohash", + "description": "Infohash of the download to update", + "type": "string", + "required": True }], responses={ 200: { "schema": schema(UpdateDownloadResponse={"modified": Boolean, "infohash": String}), - 'examples': {"modified": True, "infohash": "4344503b7e797ebf31582327a5baae35b11bda01"} + "examples": {"modified": True, "infohash": "4344503b7e797ebf31582327a5baae35b11bda01"} } }, ) @json_schema(schema(UpdateDownloadRequest={ - 'state': (String, 'State parameter to be passed to modify the state of the download (resume/stop/recheck)'), - 'selected_files': (List(Integer), 'File indexes to be included in the download'), - 'anon_hops': (Integer, 'The anonymity of a download can be changed at runtime by passing the anon_hops ' - 'parameter, however, this must be the only parameter in this request.') + "state": (String, "State parameter to be passed to modify the state of the download (resume/stop/recheck)"), + "selected_files": (List(Integer), "File indexes to be included in the download"), + "anon_hops": (Integer, "The anonymity of a download can be changed at runtime by passing the anon_hops " + "parameter, however, this must be the only parameter in this request.") })) - async def update_download(self, request): - infohash = unhexlify(request.match_info['infohash']) + async def update_download(self, request: Request) -> RESTResponse: # noqa: C901, PLR0911, PLR0912 + """ + Update a specific download. + """ + infohash = unhexlify(request.match_info["infohash"]) download = self.download_manager.get_download(infohash) if not download: return DownloadsEndpoint.return_404(request) @@ -486,10 +527,10 @@ async def update_download(self, request): status=HTTP_BAD_REQUEST) return await self.vod_response(download, parameters, request, vod_mode) - if len(parameters) > 1 and 'anon_hops' in parameters: + if len(parameters) > 1 and "anon_hops" in parameters: return RESTResponse({"error": "anon_hops must be the only parameter in this request"}, status=HTTP_BAD_REQUEST) - elif 'anon_hops' in parameters: + if 'anon_hops' in parameters: anon_hops = int(parameters['anon_hops']) try: await self.download_manager.update_hops(download, anon_hops) @@ -498,14 +539,14 @@ async def update_download(self, request): return return_handled_exception(e) return RESTResponse({"modified": True, "infohash": hexlify(download.get_def().get_infohash()).decode()}) - if 'selected_files' in parameters: - selected_files_list = parameters['selected_files'] + if "selected_files" in parameters: + selected_files_list = parameters["selected_files"] num_files = len(download.tdef.get_files()) - if not all([0 <= index < num_files for index in selected_files_list]): + if not all(0 <= index < num_files for index in selected_files_list): return RESTResponse({"error": "index out of range"}, status=HTTP_BAD_REQUEST) download.set_selected_files(selected_files_list) - if state := parameters.get('state'): + if state := parameters.get("state"): if state == "resume": download.resume() elif state == "stop": @@ -513,7 +554,7 @@ async def update_download(self, request): elif state == "recheck": download.force_recheck() elif state == "move_storage": - dest_dir = Path(parameters['dest_dir']) + dest_dir = Path(parameters["dest_dir"]) if not dest_dir.exists(): return RESTResponse({"error": f"Target directory ({dest_dir}) does not exist"}, status=HTTP_BAD_REQUEST) @@ -528,18 +569,21 @@ async def update_download(self, request): tags=["Libtorrent"], summary="Return the .torrent file associated with the specified download.", parameters=[{ - 'in': 'path', - 'name': 'infohash', - 'description': 'Infohash of the download from which to get the .torrent file', - 'type': 'string', - 'required': True + "in": "path", + "name": "infohash", + "description": "Infohash of the download from which to get the .torrent file", + "type": "string", + "required": True }], responses={ 200: {'description': 'The torrent'} } ) - async def get_torrent(self, request): - infohash = unhexlify(request.match_info['infohash']) + async def get_torrent(self, request: Request) -> RESTResponse: + """ + Return the .torrent file associated with the specified download. + """ + infohash = unhexlify(request.match_info["infohash"]) download = self.download_manager.get_download(infohash) if not download: return DownloadsEndpoint.return_404(request) @@ -548,61 +592,64 @@ async def get_torrent(self, request): if not torrent: return DownloadsEndpoint.return_404(request) - return RESTResponse(lt.bencode(torrent), headers={'content-type': 'application/x-bittorrent', - 'Content-Disposition': 'attachment; filename=%s.torrent' + return RESTResponse(lt.bencode(torrent), headers={"content-type": "application/x-bittorrent", + "Content-Disposition": "attachment; filename=%s.torrent" % hexlify(infohash)}) @docs( tags=["Libtorrent"], summary="Return file information of a specific download.", parameters=[{ - 'in': 'path', - 'name': 'infohash', - 'description': 'Infohash of the download to from which to get file information', - 'type': 'string', - 'required': True + "in": "path", + "name": "infohash", + "description": "Infohash of the download to from which to get file information", + "type": "string", + "required": True }, { - 'in': 'query', - 'name': 'view_start_path', - 'description': 'Path of the file or directory to form a view for', - 'type': 'string', - 'required': False + "in": "query", + "name": "view_start_path", + "description": "Path of the file or directory to form a view for", + "type": "string", + "required": False }, { - 'in': 'query', - 'name': 'view_size', - 'description': 'Number of files to include in the view', - 'type': 'number', - 'required': False + "in": "query", + "name": "view_size", + "description": "Number of files to include in the view", + "type": "number", + "required": False }], responses={ 200: { - "schema": schema(GetFilesResponse={"files": [schema(File={'index': Integer, - 'name': String, - 'size': Integer, - 'included': Boolean, - 'progress': Float})]}) + "schema": schema(GetFilesResponse={"files": [schema(File={"index": Integer, + "name": String, + "size": Integer, + "included": Boolean, + "progress": Float})]}) } } ) - async def get_files(self, request): - infohash = unhexlify(request.match_info['infohash']) + async def get_files(self, request: Request) -> RESTResponse: + """ + Return file information of a specific download. + """ + infohash = unhexlify(request.match_info["infohash"]) download = self.download_manager.get_download(infohash) if not download: return DownloadsEndpoint.return_404(request) params = request.query - view_start_path = params.get('view_start_path') + view_start_path = params.get("view_start_path") if view_start_path is None: return RESTResponse({ - "infohash": request.match_info['infohash'], + "infohash": request.match_info["infohash"], "files": self.get_files_info_json(download) }) - view_size = int(params.get('view_size', '100')) + view_size = int(params.get("view_size", "100")) return RESTResponse({ - "infohash": request.match_info['infohash'], + "infohash": request.match_info["infohash"], "query": view_start_path, "files": self.get_files_info_json_paged(download, Path(view_start_path), view_size) }) @@ -611,101 +658,110 @@ async def get_files(self, request): tags=["Libtorrent"], summary="Collapse a tree directory.", parameters=[{ - 'in': 'path', - 'name': 'infohash', - 'description': 'Infohash of the download', - 'type': 'string', - 'required': True + "in": "path", + "name": "infohash", + "description": "Infohash of the download", + "type": "string", + "required": True }, { - 'in': 'query', - 'name': 'path', - 'description': 'Path of the directory to collapse', - 'type': 'string', - 'required': True + "in": "query", + "name": "path", + "description": "Path of the directory to collapse", + "type": "string", + "required": True }], responses={ 200: { - "schema": schema(File={'path': path}) + "schema": schema(File={"path": path}) } } ) - async def collapse_tree_directory(self, request): - infohash = unhexlify(request.match_info['infohash']) + async def collapse_tree_directory(self, request: Request) -> RESTResponse: + """ + Collapse a tree directory. + """ + infohash = unhexlify(request.match_info["infohash"]) download = self.download_manager.get_download(infohash) if not download: return DownloadsEndpoint.return_404(request) params = request.query - path = params.get('path') + path = params.get("path") download.tdef.torrent_file_tree.collapse(Path(path)) - return RESTResponse({'path': path}) + return RESTResponse({"path": path}) @docs( tags=["Libtorrent"], summary="Expand a tree directory.", parameters=[{ - 'in': 'path', - 'name': 'infohash', - 'description': 'Infohash of the download', - 'type': 'string', - 'required': True + "in": "path", + "name": "infohash", + "description": "Infohash of the download", + "type": "string", + "required": True }, { - 'in': 'query', - 'name': 'path', - 'description': 'Path of the directory to expand', - 'type': 'string', - 'required': True + "in": "query", + "name": "path", + "description": "Path of the directory to expand", + "type": "string", + "required": True }], responses={ 200: { - "schema": schema(File={'path': String}) + "schema": schema(File={"path": String}) } } ) - async def expand_tree_directory(self, request): - infohash = unhexlify(request.match_info['infohash']) + async def expand_tree_directory(self, request: Request) -> RESTResponse: + """ + Expand a tree directory. + """ + infohash = unhexlify(request.match_info["infohash"]) download = self.download_manager.get_download(infohash) if not download: return DownloadsEndpoint.return_404(request) params = request.query - path = params.get('path') + path = params.get("path") download.tdef.torrent_file_tree.expand(Path(path)) - return RESTResponse({'path': path}) + return RESTResponse({"path": path}) @docs( tags=["Libtorrent"], summary="Select a tree path.", parameters=[{ - 'in': 'path', - 'name': 'infohash', - 'description': 'Infohash of the download', - 'type': 'string', - 'required': True + "in": "path", + "name": "infohash", + "description": "Infohash of the download", + "type": "string", + "required": True }, { - 'in': 'query', - 'name': 'path', - 'description': 'Path of the directory to select', - 'type': 'string', - 'required': True + "in": "query", + "name": "path", + "description": "Path of the directory to select", + "type": "string", + "required": True }], responses={ 200: {} } ) - async def select_tree_path(self, request): - infohash = unhexlify(request.match_info['infohash']) + async def select_tree_path(self, request: Request) -> RESTResponse: + """ + Select a tree path. + """ + infohash = unhexlify(request.match_info["infohash"]) download = self.download_manager.get_download(infohash) if not download: return DownloadsEndpoint.return_404(request) params = request.query - path = params.get('path') + path = params.get("path") download.set_selected_file_or_dir(Path(path), True) return RESTResponse({}) @@ -714,31 +770,34 @@ async def select_tree_path(self, request): tags=["Libtorrent"], summary="Deselect a tree path.", parameters=[{ - 'in': 'path', - 'name': 'infohash', - 'description': 'Infohash of the download', - 'type': 'string', - 'required': True + "in": "path", + "name": "infohash", + "description": "Infohash of the download", + "type": "string", + "required": True }, { - 'in': 'query', - 'name': 'path', - 'description': 'Path of the directory to deselect', - 'type': 'string', - 'required': True + "in": "query", + "name": "path", + "description": "Path of the directory to deselect", + "type": "string", + "required": True }], responses={ 200: {} } ) - async def deselect_tree_path(self, request): - infohash = unhexlify(request.match_info['infohash']) + async def deselect_tree_path(self, request: Request) -> RESTResponse: + """ + Deselect a tree path. + """ + infohash = unhexlify(request.match_info["infohash"]) download = self.download_manager.get_download(infohash) if not download: return DownloadsEndpoint.return_404(request) params = request.query - path = params.get('path') + path = params.get("path") download.set_selected_file_or_dir(Path(path), False) return RESTResponse({}) @@ -765,9 +824,10 @@ def _get_extended_status(self, download: Download) -> DownloadStatus: return status - def _safe_extended_peer_info(self, ext_peer_info): + def _safe_extended_peer_info(self, ext_peer_info: bytes) -> str: """ Given a string describing peer info, return a json.dumps() safe representation. + :param ext_peer_info: the string to convert to a dumpable format :return: the safe string """ @@ -779,37 +839,41 @@ def _safe_extended_peer_info(self, ext_peer_info): return ext_peer_info.decode() except UnicodeDecodeError as e: # We might have some special unicode characters in here - self._logger.warning(f"Error while decoding peer info: {ext_peer_info}. {e.__class__.__name__}: {e}") + self._logger.warning("Error while decoding peer info: %s. %s: %s", + str(ext_peer_info), e.__class__.__name__, str(e)) return ''.join(map(chr, ext_peer_info)) @docs( tags=["Libtorrent"], summary="Stream the contents of a file that is being downloaded.", parameters=[{ - 'in': 'path', - 'name': 'infohash', - 'description': 'Infohash of the download to stream', - 'type': 'string', - 'required': True + "in": "path", + "name": "infohash", + "description": "Infohash of the download to stream", + "type": "string", + "required": True }, { - 'in': 'path', - 'name': 'fileindex', - 'description': 'The fileindex to stream', - 'type': 'string', - 'required': True + "in": "path", + "name": "fileindex", + "description": "The fileindex to stream", + "type": "string", + "required": True }], responses={ - 206: {'description': 'Contents of the stream'} + 206: {"description": "Contents of the stream"} } ) - async def stream(self, request): - infohash = unhexlify(request.match_info['infohash']) + async def stream(self, request: Request) -> web.StreamResponse: # noqa: C901 + """ + Stream the contents of a file that is being downloaded. + """ + infohash = unhexlify(request.match_info["infohash"]) download = self.download_manager.get_download(infohash) if not download: return DownloadsEndpoint.return_404(request) - file_index = int(request.match_info['fileindex']) + file_index = int(request.match_info["fileindex"]) http_range = request.http_range start = http_range.start or 0 @@ -821,21 +885,21 @@ async def stream(self, request): stop = download.stream.filesize if http_range.stop is None else min(http_range.stop, download.stream.filesize) if not start < stop or not 0 <= start < download.stream.filesize or not 0 < stop <= download.stream.filesize: - return RESTResponse('Requested Range Not Satisfiable', status=416) + return RESTResponse("Requested Range Not Satisfiable", status=416) response = web.StreamResponse(status=206, - reason='OK', - headers={'Accept-Ranges': 'bytes', - 'Content-Type': 'application/octet-stream', - 'Content-Length': f'{stop - start}', - 'Content-Range': f'{start}-{stop}/{download.stream.filesize}'}) + reason="OK", + headers={"Accept-Ranges": "bytes", + "Content-Type": "application/octet-stream", + "Content-Length": f"{stop - start}", + "Content-Range": f"{start}-{stop}/{download.stream.filesize}"}) response.force_close() with suppress(CancelledError, ConnectionResetError): async with StreamChunk(download.stream, start) as chunk: await response.prepare(request) bytes_todo = stop - start bytes_done = 0 - self._logger.info('Got range request for %s-%s (%s bytes)', start, stop, bytes_todo) + self._logger.info("Got range request for %s-%s (%s bytes)", start, stop, bytes_todo) while not request.transport.is_closing(): if chunk.seekpos >= download.stream.filesize: break diff --git a/src/tribler/core/libtorrent/restapi/libtorrent_endpoint.py b/src/tribler/core/libtorrent/restapi/libtorrent_endpoint.py index b4581ecabf..d433150eee 100644 --- a/src/tribler/core/libtorrent/restapi/libtorrent_endpoint.py +++ b/src/tribler/core/libtorrent/restapi/libtorrent_endpoint.py @@ -2,6 +2,7 @@ from binascii import hexlify from aiohttp import web +from aiohttp.abc import Request from aiohttp_apispec import docs from ipv8.REST.schema import schema from marshmallow.fields import Integer @@ -14,85 +15,95 @@ class LibTorrentEndpoint(RESTEndpoint): """ Endpoint for getting information about libtorrent sessions and settings. """ - path = '/libtorrent' - def __init__(self, download_manager: DownloadManager): + path = "/libtorrent" + + def __init__(self, download_manager: DownloadManager) -> None: + """ + Create a new libtorrent endpoint. + """ super().__init__() self.download_manager = download_manager - self.app.add_routes([web.get('/settings', self.get_libtorrent_settings), - web.get('/session', self.get_libtorrent_session_info)]) + self.app.add_routes([web.get("/settings", self.get_libtorrent_settings), + web.get("/session", self.get_libtorrent_session_info)]) @docs( tags=["Libtorrent"], summary="Return Libtorrent session settings.", parameters=[{ - 'in': 'query', - 'name': 'hop', - 'description': 'The hop count of the session for which to return settings', - 'type': 'string', - 'required': False + "in": "query", + "name": "hop", + "description": "The hop count of the session for which to return settings", + "type": "string", + "required": False }], responses={ 200: { - 'description': 'Return a dictonary with key-value pairs from the Libtorrent session settings', - "schema": schema(LibtorrentSessionResponse={'hop': Integer, - 'settings': schema(LibtorrentSettings={})}) + "description": "Return a dictonary with key-value pairs from the Libtorrent session settings", + "schema": schema(LibtorrentSessionResponse={"hop": Integer, + "settings": schema(LibtorrentSettings={})}) } } ) - async def get_libtorrent_settings(self, request): + async def get_libtorrent_settings(self, request: Request) -> RESTResponse: + """ + Return Libtorrent session settings. + """ args = request.query hop = 0 - if 'hop' in args and args['hop']: - hop = int(args['hop']) + if args.get("hop"): + hop = int(args["hop"]) if hop not in self.download_manager.ltsessions: - return RESTResponse({'hop': hop, "settings": {}}) + return RESTResponse({"hop": hop, "settings": {}}) lt_session = self.download_manager.ltsessions[hop] if hop == 0: lt_settings = self.download_manager.get_session_settings(lt_session) - lt_settings['peer_fingerprint'] = hexlify(lt_settings['peer_fingerprint'].encode()).decode() + lt_settings["peer_fingerprint"] = hexlify(lt_settings["peer_fingerprint"].encode()).decode() else: lt_settings = lt_session.get_settings() - return RESTResponse({'hop': hop, "settings": lt_settings}) + return RESTResponse({"hop": hop, "settings": lt_settings}) @docs( tags=["Libtorrent"], summary="Return Libtorrent session information.", parameters=[{ - 'in': 'query', - 'name': 'hop', - 'description': 'The hop count of the session for which to return information', - 'type': 'string', - 'required': False + "in": "query", + "name": "hop", + "description": "The hop count of the session for which to return information", + "type": "string", + "required": False }], responses={ 200: { - 'description': 'Return a dictonary with key-value pairs from the Libtorrent session information', - "schema": schema(LibtorrentinfoResponse={'hop': Integer, - 'settings': schema(LibtorrentInfo={})}) + "description": "Return a dictonary with key-value pairs from the Libtorrent session information", + "schema": schema(LibtorrentinfoResponse={"hop": Integer, + "settings": schema(LibtorrentInfo={})}) } } ) - async def get_libtorrent_session_info(self, request): + async def get_libtorrent_session_info(self, request: Request) -> RESTResponse: + """ + Return Libtorrent session information. + """ session_stats = Future() - def on_session_stats_alert_received(alert): + def on_session_stats_alert_received(alert) -> None: if not session_stats.done(): session_stats.set_result(alert.values) args = request.query hop = 0 - if 'hop' in args and args['hop']: - hop = int(args['hop']) + if args.get("hop"): + hop = int(args["hop"]) if hop not in self.download_manager.ltsessions or \ not hasattr(self.download_manager.ltsessions[hop], "post_session_stats"): - return RESTResponse({'hop': hop, 'session': {}}) + return RESTResponse({"hop": hop, "session": {}}) self.download_manager.session_stats_callback = on_session_stats_alert_received self.download_manager.ltsessions[hop].post_session_stats() stats = await session_stats - return RESTResponse({'hop': hop, 'session': stats}) + return RESTResponse({"hop": hop, "session": stats}) diff --git a/src/tribler/core/libtorrent/restapi/torrentinfo_endpoint.py b/src/tribler/core/libtorrent/restapi/torrentinfo_endpoint.py index fe82143ec0..ccad2d069a 100644 --- a/src/tribler/core/libtorrent/restapi/torrentinfo_endpoint.py +++ b/src/tribler/core/libtorrent/restapi/torrentinfo_endpoint.py @@ -6,18 +6,24 @@ from binascii import hexlify, unhexlify from copy import deepcopy from ssl import SSLError +from typing import TYPE_CHECKING, Iterable import libtorrent as lt -from aiohttp import (BaseConnector, ClientConnectorError, ClientResponseError, ClientSession, ClientTimeout, - ServerConnectionError, web) -from aiohttp.typedefs import LooseHeaders +from aiohttp import ( + BaseConnector, + ClientConnectorError, + ClientResponseError, + ClientSession, + ClientTimeout, + ServerConnectionError, + web, +) from aiohttp_apispec import docs from ipv8.REST.schema import schema from marshmallow.fields import String from yarl import URL from tribler.core.database.orm_bindings.torrent_metadata import tdef_to_metadata_dict -from tribler.core.libtorrent.download_manager.download_manager import DownloadManager from tribler.core.libtorrent.torrentdef import TorrentDef from tribler.core.libtorrent.uris import unshorten, url_to_path from tribler.core.notifier import Notification @@ -28,10 +34,16 @@ RESTResponse, ) +if TYPE_CHECKING: + from aiohttp.abc import Request + from aiohttp.typedefs import LooseHeaders + + from tribler.core.libtorrent.download_manager.download_manager import DownloadManager + logger = logging.getLogger(__name__) -def recursive_unicode(obj, ignore_errors=False): +def recursive_unicode(obj: Iterable, ignore_errors: bool = False) -> Iterable: """ Converts any bytes within a data structure to unicode strings. Bytes are assumed to be UTF-8 encoded text. @@ -40,9 +52,9 @@ def recursive_unicode(obj, ignore_errors=False): """ if isinstance(obj, dict): return {recursive_unicode(k, ignore_errors): recursive_unicode(v, ignore_errors) for k, v in obj.items()} - elif isinstance(obj, list): + if isinstance(obj, list): return [recursive_unicode(i, ignore_errors) for i in obj] - elif isinstance(obj, bytes): + if isinstance(obj, bytes): try: return obj.decode() except UnicodeDecodeError: @@ -54,53 +66,63 @@ def recursive_unicode(obj, ignore_errors=False): async def query_uri(uri: str, connector: BaseConnector | None = None, headers: LooseHeaders | None = None, timeout: ClientTimeout | None = None, return_json: bool = False, ) -> bytes | dict: - kwargs = {'headers': headers} + """ + Retrieve the response for the given aiohttp context. + """ + kwargs = {"headers": headers} if timeout: # ClientSession uses a sentinel object for the default timeout. Therefore, it should only be specified if an # actual value has been passed to this function. - kwargs['timeout'] = timeout + kwargs["timeout"] = timeout - async with ClientSession(connector=connector, raise_for_status=True) as session: - async with await session.get(uri, **kwargs) as response: - if return_json: - return await response.json(content_type=None) - return await response.read() + async with ClientSession(connector=connector, raise_for_status=True) as session, \ + await session.get(uri, **kwargs) as response: + if return_json: + return await response.json(content_type=None) + return await response.read() class TorrentInfoEndpoint(RESTEndpoint): """ This endpoint is responsible for handing all requests regarding torrent info in Tribler. """ - path = '/torrentinfo' - def __init__(self, download_manager: DownloadManager): + path = "/torrentinfo" + + def __init__(self, download_manager: DownloadManager) -> None: + """ + Create a new torrent info endpoint. + """ super().__init__() self.download_manager = download_manager - self.app.add_routes([web.get('', self.get_torrent_info)]) + self.app.add_routes([web.get("", self.get_torrent_info)]) @docs( tags=["Libtorrent"], summary="Return metainfo from a torrent found at a provided URI.", parameters=[{ - 'in': 'query', - 'name': 'torrent', - 'description': 'URI for which to return torrent information. This URI can either represent ' - 'a file location, a magnet link or a HTTP(S) url.', - 'type': 'string', - 'required': True + "in": "query", + "name": "torrent", + "description": "URI for which to return torrent information. This URI can either represent " + "a file location, a magnet link or a HTTP(S) url.", + "type": "string", + "required": True }], responses={ 200: { - 'description': 'Return a hex-encoded json-encoded string with torrent metainfo', - "schema": schema(GetMetainfoResponse={'metainfo': String}) + "description": "Return a hex-encoded json-encoded string with torrent metainfo", + "schema": schema(GetMetainfoResponse={"metainfo": String}) } } ) - async def get_torrent_info(self, request): + async def get_torrent_info(self, request: Request) -> RESTResponse: # noqa: C901, PLR0911, PLR0912, PLR0915 + """ + Return metainfo from a torrent found at a provided URI. + """ params = request.query - hops = params.get('hops') - uri = params.get('uri') - self._logger.info(f'URI: {uri}') + hops = params.get("hops") + uri = params.get("uri") + self._logger.info("URI: %s", uri) if hops: try: hops = int(hops) @@ -126,7 +148,7 @@ async def get_torrent_info(self, request): response = await query_uri(uri) except (ServerConnectionError, ClientResponseError, SSLError, ClientConnectorError, AsyncTimeoutError, ValueError) as e: - self._logger.warning(f'Error while querying http uri: {e}') + self._logger.warning("Error while querying http uri: %s", str(e)) return RESTResponse({"error": str(e)}, status=HTTP_INTERNAL_SERVER_ERROR) if response.startswith(b'magnet'): @@ -148,7 +170,7 @@ async def get_torrent_info(self, request): else: metainfo = lt.bdecode(response) elif scheme == "magnet": - self._logger.info(f'magnet scheme detected') + self._logger.info("magnet scheme detected") try: try: @@ -169,7 +191,7 @@ async def get_torrent_info(self, request): if not metainfo: return RESTResponse({"error": "metainfo error"}, status=HTTP_INTERNAL_SERVER_ERROR) - if not isinstance(metainfo, dict) or b'info' not in metainfo: + if not isinstance(metainfo, dict) or b"info" not in metainfo: self._logger.warning("Received metainfo is not a valid dictionary") return RESTResponse({"error": "invalid response"}, status=HTTP_INTERNAL_SERVER_ERROR) diff --git a/src/tribler/core/libtorrent/torrent_file_tree.py b/src/tribler/core/libtorrent/torrent_file_tree.py index 024c2341bc..abf9f33b5b 100644 --- a/src/tribler/core/libtorrent/torrent_file_tree.py +++ b/src/tribler/core/libtorrent/torrent_file_tree.py @@ -3,10 +3,14 @@ import os import re from bisect import bisect -from collections import defaultdict from dataclasses import dataclass, field from pathlib import Path -from typing import Generator, Sequence, cast, ItemsView, Dict +from typing import TYPE_CHECKING, Dict, Generator, ItemsView, Sequence, cast + +if TYPE_CHECKING: + from collections import defaultdict + + import libtorrent class TorrentFileTree: @@ -25,11 +29,11 @@ class Directory: collapsed: bool = True size: int = 0 - def calc_size(self): + def calc_size(self) -> None: """ Calculate the size of this Directory, assuming all subdirectories already have their size calculated. """ - self.size = sum(dir.size for dir in self.directories.values()) + sum(f.size for f in self.files) + self.size = sum(d.size for d in self.directories.values()) + sum(f.size for f in self.files) def iter_dirs(self) -> Generator[TorrentFileTree.Directory, None, None]: """ @@ -38,8 +42,7 @@ def iter_dirs(self) -> Generator[TorrentFileTree.Directory, None, None]: We do it this way so that calc_size() can be easily/efficiently executed! """ for directory in self.directories.values(): - for entry in directory.iter_dirs(): - yield entry + yield from directory.iter_dirs() yield self def tostr(self, depth: int = 0, name: str = "") -> str: @@ -67,6 +70,7 @@ class File: """ A File object that has a name (relative to its parent directory) and a file index in the torrent's file list. """ + name: str index: int size: int = 0 @@ -86,43 +90,43 @@ def sort_key(self) -> Sequence[int | str]: """ return tuple(int(part) if part.isdigit() else part for part in self._sort_pattern.split(self.name)) - def __lt__(self, other) -> bool: + def __lt__(self, other: TorrentFileTree.File) -> bool: """ Python 3.8 quirk/shortcoming is that File needs to be a SupportsRichComparisonT (instead of using a key). """ return self.sort_key() < other.sort_key() - def __le__(self, other) -> bool: + def __le__(self, other: TorrentFileTree.File) -> bool: """ Python 3.8 quirk/shortcoming is that File needs to be a SupportsRichComparisonT (instead of using a key). """ return self.sort_key() <= other.sort_key() - def __gt__(self, other) -> bool: + def __gt__(self, other: TorrentFileTree.File) -> bool: """ Python 3.8 quirk/shortcoming is that File needs to be a SupportsRichComparisonT (instead of using a key). """ return self.sort_key() > other.sort_key() - def __ge__(self, other) -> bool: + def __ge__(self, other: TorrentFileTree.File) -> bool: """ Python 3.8 quirk/shortcoming is that File needs to be a SupportsRichComparisonT (instead of using a key). """ return self.sort_key() >= other.sort_key() - def __eq__(self, other) -> bool: + def __eq__(self, other: TorrentFileTree.File) -> bool: """ Python 3.8 quirk/shortcoming is that File needs to be a SupportsRichComparisonT (instead of using a key). """ return self.sort_key() == other.sort_key() - def __ne__(self, other) -> bool: + def __ne__(self, other: TorrentFileTree.File) -> bool: """ Python 3.8 quirk/shortcoming is that File needs to be a SupportsRichComparisonT (instead of using a key). """ return self.sort_key() != other.sort_key() - def __init__(self, file_storage) -> None: + def __init__(self, file_storage: libtorrent.file_storage) -> None: """ Construct an empty tree data structure belonging to the given file storage. @@ -140,7 +144,7 @@ def __str__(self) -> str: return f"TorrentFileTree({self.root.tostr()}\n)" @classmethod - def from_lt_file_storage(cls, file_storage): + def from_lt_file_storage(cls: type[TorrentFileTree], file_storage: libtorrent.file_storage) -> TorrentFileTree: """ Load in the tree contents from the given file storage, sorting the files in each directory. """ diff --git a/src/tribler/core/libtorrent/torrentdef.py b/src/tribler/core/libtorrent/torrentdef.py index d5b9ed09fa..a65a2c3915 100644 --- a/src/tribler/core/libtorrent/torrentdef.py +++ b/src/tribler/core/libtorrent/torrentdef.py @@ -1,5 +1,5 @@ """ -Author(s): Arno Bakker +Author(s): Arno Bakker. """ from __future__ import annotations @@ -9,58 +9,61 @@ from contextlib import suppress from functools import cached_property from hashlib import sha1 -from os import PathLike from pathlib import Path -from typing import Dict, Iterator, List, Optional, Set, Tuple, Union, Any +from typing import TYPE_CHECKING, Any, Dict, Iterable, Iterator, List import aiohttp - import libtorrent as lt from tribler.core.libtorrent.torrent_file_tree import TorrentFileTree from tribler.core.libtorrent.torrents import create_torrent_file from tribler.core.libtorrent.trackers import is_valid_url +if TYPE_CHECKING: + from os import PathLike + -def escape_as_utf8(string: bytes, encoding='utf8') -> str: +def escape_as_utf8(string: bytes, encoding: str = "utf8") -> str: """ Make a string UTF-8 compliant, destroying characters if necessary. :param string: the string to convert - :type string: str + :param encoding: the string encoding to use :return: the utf-8 string derivative - :rtype: str """ try: - # Try seeing if the delivered encoding is correct and we - # can convert to utf8 without any issues. + # Check if the delivered encoding is correct and we can convert to utf8 without any issues. return string.decode(encoding).encode('utf8').decode('utf8') except (LookupError, TypeError, ValueError): try: - # The delivered encoding is incorrect, cast it to - # latin1 and hope for the best (minor corruption). + # The delivered encoding is incorrect, cast it to latin1 and hope for the best (minor corruption). return string.decode('latin1').encode('utf8', 'ignore').decode('utf8') except (TypeError, ValueError): # This is a very nasty string (e.g. '\u266b'), remove the illegal entries. return string.decode('utf8', 'ignore') -def pathlist2filename(pathlist) -> Path: - """ Convert a multi-file torrent file 'path' entry to a filename. """ +def pathlist2filename(pathlist: Iterable[bytes]) -> Path: + """ + Convert a multi-file torrent file 'path' entry to a filename. + """ return Path(*(x.decode() for x in pathlist)) -def get_length_from_metainfo(metainfo: Dict, selectedfiles: Set[Path]): - if b'files' not in metainfo[b'info']: +def get_length_from_metainfo(metainfo: dict, selectedfiles: set[Path]) -> int: + """ + Loop through all files in a torrent and calculate the total size. + """ + if b"files" not in metainfo[b"info"]: # single-file torrent - return metainfo[b'info'][b'length'] + return metainfo[b"info"][b"length"] # multi-file torrent - files = metainfo[b'info'][b'files'] + files = metainfo[b"info"][b"files"] total = 0 for i in range(len(files)): - path = files[i][b'path'] - length = files[i][b'length'] + path = files[i][b"path"] + length = files[i][b"length"] if length > 0 and (not selectedfiles or pathlist2filename(path) in selectedfiles): total += length return total @@ -72,8 +75,8 @@ class TorrentDef: It can be used to create new torrents, or analyze existing ones. """ - def __init__(self, metainfo: Optional[Dict] = None, torrent_parameters: Optional[Dict[bytes, Any]] = None, - ignore_validation: bool = True): + def __init__(self, metainfo: dict | None = None, torrent_parameters: dict[bytes, Any] | None = None, + ignore_validation: bool = True) -> None: """ Create a new TorrentDef object, possibly based on existing data. @@ -99,7 +102,8 @@ def __init__(self, metainfo: Optional[Dict] = None, torrent_parameters: Optional else: try: if not self.metainfo[b'info']: - raise ValueError("Empty metainfo!") + msg = "Empty metainfo!" + raise ValueError(msg) self.infohash = sha1(lt.bencode(self.metainfo[b'info'])).digest() except (KeyError, RuntimeError) as exc: raise ValueError from exc @@ -125,10 +129,10 @@ def copy_metainfo_to_torrent_parameters(self) -> None: if self.metainfo and key in self.metainfo: self.torrent_parameters[key] = self.metainfo[key] - infokeys = [b'name', b'piece length'] + infokeys = [b"name", b"piece length"] for key in infokeys: - if self.metainfo and key in self.metainfo[b'info']: - self.torrent_parameters[key] = self.metainfo[b'info'][key] + if self.metainfo and key in self.metainfo[b"info"]: + self.torrent_parameters[key] = self.metainfo[b"info"][key] @property def torrent_info(self) -> lt.torrent_info | None: @@ -165,7 +169,7 @@ def torrent_file_tree(self) -> TorrentFileTree: return TorrentFileTree.from_lt_file_storage(self.torrent_info.files()) @staticmethod - def _threaded_load_job(filepath: Union[str, bytes, PathLike]) -> TorrentDef: + def _threaded_load_job(filepath: str | bytes | PathLike) -> TorrentDef: """ Perform the actual loading of the torrent. @@ -176,9 +180,10 @@ def _threaded_load_job(filepath: Union[str, bytes, PathLike]) -> TorrentDef: return TorrentDef.load_from_memory(file_content) @staticmethod - async def load(filepath: Union[str, bytes, PathLike]) -> TorrentDef: + async def load(filepath: str | bytes | PathLike) -> TorrentDef: """ - Create a TorrentDef object from a .torrent file + Create a TorrentDef object from a .torrent file. + :param filepath: The path to the .torrent file """ return await get_running_loop().run_in_executor(None, TorrentDef._threaded_load_job, filepath) @@ -187,19 +192,22 @@ async def load(filepath: Union[str, bytes, PathLike]) -> TorrentDef: def load_from_memory(bencoded_data: bytes) -> TorrentDef: """ Load some bencoded data into a TorrentDef. + :param bencoded_data: The bencoded data to decode and use as metainfo """ metainfo = lt.bdecode(bencoded_data) # Some versions of libtorrent will not raise an exception when providing invalid data. # This issue is present in 1.0.8 (included with Tribler 7.3.0), but has been fixed since at least 1.2.1. if metainfo is None: - raise ValueError("Data is not a bencoded string") + msg = "Data is not a bencoded string" + raise ValueError(msg) return TorrentDef.load_from_dict(metainfo) @staticmethod def load_from_dict(metainfo: Dict) -> TorrentDef: """ Load a metainfo dictionary into a TorrentDef object. + :param metainfo: The metainfo dictionary """ return TorrentDef(metainfo=metainfo) @@ -225,9 +233,7 @@ def _filter_characters(self, name: bytes) -> str: character. :param name: the name to sanitize - :type name: bytes :return: the sanitized string - :rtype: str """ def filter_character(char: int) -> str: @@ -248,16 +254,17 @@ def add_content(self, file_path: Path | str) -> None: def set_encoding(self, enc: bytes) -> None: """ - Set the character encoding for e.g. the 'name' field + Set the character encoding for e.g. the 'name' field. + :param enc: The new encoding of the file. """ - self.torrent_parameters[b'encoding'] = enc + self.torrent_parameters[b"encoding"] = enc def get_encoding(self) -> str: """ Returns the used encoding of the TorrentDef. """ - return self.torrent_parameters.get(b'encoding', b'utf-8').decode() + return self.torrent_parameters.get(b"encoding", b"utf-8").decode() def set_tracker(self, url: str) -> None: """ @@ -265,30 +272,30 @@ def set_tracker(self, url: str) -> None: :param url: The tracker url. """ if not is_valid_url(url): - raise ValueError("Invalid URL") + msg = "Invalid URL" + raise ValueError(msg) - if url.endswith('/'): # Some tracker code can't deal with / at end + if url.endswith("/"): # Some tracker code can't deal with / at end url = url[:-1] - self.torrent_parameters[b'announce'] = url + self.torrent_parameters[b"announce"] = url - def get_tracker(self) -> Optional[str]: + def get_tracker(self) -> str | None: """ Returns the torrent announce URL. """ - return self.torrent_parameters.get(b'announce', None) + return self.torrent_parameters.get(b"announce", None) - def get_tracker_hierarchy(self) -> List[List[str]]: + def get_tracker_hierarchy(self) -> list[list[str]]: """ Returns the hierarchy of trackers. """ - return self.torrent_parameters.get(b'announce-list', []) + return self.torrent_parameters.get(b"announce-list", []) - def get_trackers(self) -> Set[str]: + def get_trackers(self) -> set[str]: """ Returns a flat set of all known trackers. :return: all known trackers - :rtype: set """ if self.get_tracker_hierarchy(): trackers = itertools.chain.from_iterable(self.get_tracker_hierarchy()) @@ -307,15 +314,16 @@ def set_piece_length(self, piece_length: int) -> None: :param piece_length: The piece length. """ if not isinstance(piece_length, int): - raise ValueError("Piece length not an int/long") + msg = "Piece length not an int/long" + raise ValueError(msg) # noqa: TRY004 - self.torrent_parameters[b'piece length'] = piece_length + self.torrent_parameters[b"piece length"] = piece_length def get_piece_length(self) -> int: """ Returns the piece size. """ - return self.torrent_parameters.get(b'piece length', 0) + return self.torrent_parameters.get(b"piece length", 0) def get_nr_pieces(self) -> int: """ @@ -323,7 +331,7 @@ def get_nr_pieces(self) -> int: """ if not self.metainfo: return 0 - return len(self.metainfo[b'info'][b'pieces']) // 20 + return len(self.metainfo[b"info"][b"pieces"]) // 20 def get_pieces(self) -> List: """ @@ -331,7 +339,7 @@ def get_pieces(self) -> List: """ if not self.metainfo: return [] - return self.metainfo[b'info'][b'pieces'][:] + return self.metainfo[b"info"][b"pieces"][:] def get_infohash(self) -> bytes | None: """ @@ -339,7 +347,7 @@ def get_infohash(self) -> bytes | None: """ return self.infohash - def get_metainfo(self) -> Dict: + def get_metainfo(self) -> dict: """ Returns the metainfo of the torrent. Might be None if no metainfo is provided. """ @@ -349,7 +357,7 @@ def get_name(self) -> bytes: """ Returns the name as raw string of bytes. """ - return self.torrent_parameters[b'name'] + return self.torrent_parameters[b"name"] def get_name_utf8(self) -> str: """ @@ -363,7 +371,7 @@ def set_name(self, name: bytes) -> None: :param name: The new name of the torrent """ - self.torrent_parameters[b'name'] = name + self.torrent_parameters[b"name"] = name def get_name_as_unicode(self) -> str: """ @@ -409,7 +417,7 @@ def get_name_as_unicode(self) -> str: # We failed. Returning an empty string return "" - def save(self, torrent_filepath: Optional[str] = None) -> None: + def save(self, torrent_filepath: str | None = None) -> None: """ Generate the metainfo and save the torrent file. @@ -423,11 +431,12 @@ def save(self, torrent_filepath: Optional[str] = None) -> None: self.copy_metainfo_to_torrent_parameters() self.infohash = torrent_dict['infohash'] - def _get_all_files_as_unicode_with_length(self) -> Iterator[Path, int]: - """ Get a generator for files in the torrent def. No filtering - is possible and all tricks are allowed to obtain a unicode - list of filenames. - @return A unicode filename generator. + def _get_all_files_as_unicode_with_length(self) -> Iterator[Path, int]: # noqa: C901, PLR0912 + """ + Get a generator for files in the torrent def. No filtering is possible and all tricks are allowed to obtain + a unicode list of filenames. + + :return: A unicode filename generator. """ if self.metainfo and b"files" in self.metainfo[b"info"]: # Multi-file torrent @@ -436,8 +445,7 @@ def _get_all_files_as_unicode_with_length(self) -> Iterator[Path, int]: for file_dict in files: if b"path.utf-8" in file_dict: # This file has an utf-8 encoded list of elements. - # We assume that it is correctly encoded and use - # it normally + # We assume that it is correctly encoded and use it normally. try: yield (Path(*(element.decode() for element in file_dict[b"path.utf-8"])), file_dict[b"length"]) @@ -446,8 +454,7 @@ def _get_all_files_as_unicode_with_length(self) -> Iterator[Path, int]: pass if b"path" in file_dict: - # Try to use the 'encoding' field. If it exists, - # it should contain something like 'utf-8' + # Try to use the 'encoding' field. If it exists, it should contain something like 'utf-8'. if b"encoding" in self.metainfo: encoding = self.metainfo[b"encoding"].decode() try: @@ -457,14 +464,11 @@ def _get_all_files_as_unicode_with_length(self) -> Iterator[Path, int]: except UnicodeError: pass except LookupError: - # Some encodings are not supported by - # python. For instance, the MBCS codec - # which is used by Windows is not - # supported (Jan 2010) + # Some encodings are not supported by Python. For instance, the MBCS codec which is used + # by Windows is not supported (Jan 2010). pass - # Try to convert the names in path to unicode, - # assuming that it was encoded as utf-8 + # Try to convert the names in path to unicode, assuming that it was encoded as utf-8. try: yield (Path(*(element.decode() for element in file_dict[b"path"])), file_dict[b"length"]) @@ -472,11 +476,10 @@ def _get_all_files_as_unicode_with_length(self) -> Iterator[Path, int]: except UnicodeError: pass - # Convert the names in path to unicode by - # replacing out all characters that may -even - # remotely- cause problems with the '?' character + # Convert the names in path to unicode by replacing out all characters that may - even remotely - + # cause problems with the '?' character. try: - yield (Path(*map(self._filter_characters, file_dict[b"path"])), file_dict[b"length"]) + yield Path(*map(self._filter_characters, file_dict[b"path"])), file_dict[b"length"] continue except UnicodeError: pass @@ -485,11 +488,13 @@ def _get_all_files_as_unicode_with_length(self) -> Iterator[Path, int]: # Single-file torrent yield self.get_name_as_unicode(), self.metainfo[b"info"][b"length"] - def get_files_with_length(self, exts: Optional[str] = None) -> List[Tuple[Path, int]]: - """ The list of files in the torrent def. - @param exts (Optional) list of filename extensions (without leading .) + def get_files_with_length(self, exts: str | None = None) -> list[tuple[Path, int]]: + """ + The list of files in the torrent def. + + :param exts: (Optional) list of filename extensions (without leading .) to search for. - @return A list of filenames. + :return: A list of filenames. """ videofiles = [] for filename, length in self._get_all_files_as_unicode_with_length(): @@ -500,14 +505,18 @@ def get_files_with_length(self, exts: Optional[str] = None) -> List[Tuple[Path, videofiles.append((filename, length)) return videofiles - def get_files(self, exts: Optional[Set[str]] = None) -> List[Path]: + def get_files(self, exts: set[str] | None = None) -> list[Path]: + """ + Return the list of file paths in this torrent. + """ return [filename for filename, _ in self.get_files_with_length(exts)] - def get_length(self, selectedfiles: Optional[Set[Path]] = None) -> int: - """ Returns the total size of the content in the torrent. If the - optional selectedfiles argument is specified, the method returns - the total size of only those files. - @return A length (long) + def get_length(self, selectedfiles: set[Path] | None = None) -> int: + """ + Returns the total size of the content in the torrent. If the optional selectedfiles argument is specified, the + method returns the total size of only those files. + + :return: A length (long) """ if self.metainfo: return get_length_from_metainfo(self.metainfo, selectedfiles) @@ -524,7 +533,7 @@ def is_multifile_torrent(self) -> bool: Returns whether this TorrentDef is a multi-file torrent. """ if self.metainfo: - return b'files' in self.metainfo[b'info'] + return b"files" in self.metainfo[b"info"] return False def is_private(self) -> bool: @@ -532,31 +541,37 @@ def is_private(self) -> bool: Returns whether this TorrentDef is a private torrent (and is not announced in the DHT). """ try: - private = int(self.metainfo[b'info'].get(b'private', 0)) if self.metainfo else 0 + private = int(self.metainfo[b"info"].get(b"private", 0)) if self.metainfo else 0 except (ValueError, KeyError) as e: - self._logger.warning(f'{e.__class__.__name__}: {e}') + self._logger.warning("%s: %s", e.__class__.__name__, str(e)) private = 0 return private == 1 - def get_index_of_file_in_files(self, file: Optional[str]) -> int: + def get_index_of_file_in_files(self, file: str | None) -> int: + """ + Get the index of the given file path in the torrent. + + Raises a ValueError if the path is not found. + """ if not self.metainfo: - raise ValueError("TorrentDef does not have metainfo") - info = self.metainfo[b'info'] + msg = "TorrentDef does not have metainfo" + raise ValueError(msg) + info = self.metainfo[b"info"] - if file is not None and b'files' in info: - for i in range(len(info[b'files'])): - file_dict = info[b'files'][i] + if file is not None and b"files" in info: + for i in range(len(info[b"files"])): + file_dict = info[b"files"][i] - if b'path.utf-8' in file_dict: - intorrentpath = pathlist2filename(file_dict[b'path.utf-8']) - else: - intorrentpath = pathlist2filename(file_dict[b'path']) + intorrentpath = (pathlist2filename(file_dict[b"path.utf-8"]) if b"path.utf-8" in file_dict + else pathlist2filename(file_dict[b"path"])) if intorrentpath == Path(file): return i - raise ValueError("File not found in torrent") - else: - raise ValueError("File not found in single-file torrent") + msg = "File not found in torrent" + raise ValueError(msg) + + msg = "File not found in single-file torrent" + raise ValueError(msg) class TorrentDefNoMetainfo(TorrentDef): @@ -567,26 +582,40 @@ class TorrentDefNoMetainfo(TorrentDef): implemented. """ - def __init__(self, infohash: bytes, name: bytes, url: bytes | str | None = None): + def __init__(self, infohash: bytes, name: bytes, url: bytes | str | None = None) -> None: + """ + Create a new valid torrent def without metainfo. + """ torrent_parameters = { - b'name': name + b"name": name } if url is not None: - torrent_parameters[b'urllist'] = [url] + torrent_parameters[b"urllist"] = [url] super().__init__(torrent_parameters=torrent_parameters) self.infohash = infohash def get_url(self) -> bytes | str | None: - if urllist := self.torrent_parameters.get(b'urllist'): + """ + Get the URL belonging to this torrent. + """ + if urllist := self.torrent_parameters.get(b"urllist"): return urllist[0] return None @property def torrent_info(self) -> lt.torrent_info | None: + """ + A torrent def without metinfo has no libtorrent torrent_info. + """ return None def load_torrent_info(self) -> None: - pass + """ + If there cannot be torrent info, we don't need to try and load it. + """ - def get_name_as_unicode(self): + def get_name_as_unicode(self) -> str: + """ + Get the name of this torrent. + """ return self.get_name_utf8() diff --git a/src/tribler/core/libtorrent/torrents.py b/src/tribler/core/libtorrent/torrents.py index 94898ea80b..f7dce22581 100644 --- a/src/tribler/core/libtorrent/torrents.py +++ b/src/tribler/core/libtorrent/torrents.py @@ -1,13 +1,16 @@ +from __future__ import annotations + import logging from asyncio import CancelledError, Future from contextlib import suppress from hashlib import sha1 from os.path import getsize -from pathlib import Path -from typing import Any, Dict, Iterable, List, Optional +from typing import TYPE_CHECKING, Any, Dict, Iterable, TypedDict import libtorrent as lt +if TYPE_CHECKING: + from pathlib import Path logger = logging.getLogger(__name__) @@ -15,6 +18,7 @@ def check_handle(default=None): """ Return the libtorrent handle if it's available, else return the default value. + Author(s): Egbert Bouman """ @@ -33,6 +37,7 @@ def invoke_func(*args, **kwargs): def require_handle(func): """ Invoke the function once the handle is available. Returns a future that will fire once the function has completed. + Author(s): Egbert Bouman """ @@ -70,7 +75,7 @@ def done_cb(fut): def check_vod(default=None): """ - Check if torrent is vod mode, else return default + Check if torrent is vod mode, else return default. """ def wrap(f): @@ -84,7 +89,7 @@ def invoke_func(self, *args, **kwargs): return wrap -def common_prefix(paths_list: List[Path]) -> Path: +def common_prefix(paths_list: list[Path]) -> Path: """ Get the path prefixes component-wise. """ @@ -95,15 +100,34 @@ def common_prefix(paths_list: List[Path]) -> Path: return sorted(base_set, reverse=True)[0] -def _existing_files(path_list: List[Path]) -> Iterable[Path]: +def _existing_files(path_list: list[Path]) -> Iterable[Path]: for path in path_list: if not path.exists(): - raise OSError(f'Path does not exist: {path}') + msg = f"Path does not exist: {path}" + raise OSError(msg) elif path.is_file(): yield path -def create_torrent_file(file_path_list: List[Path], params: Dict[bytes, Any], torrent_filepath: Optional[str] = None): +class TorrentFileResult(TypedDict): + """ + A dictionary to describe a newly-created torrent. + """ + + success: bool + base_dir: Path + torrent_file_path: str | None + metainfo: dict + infohash: bytes + + +def create_torrent_file(file_path_list: list[Path], params: Dict[bytes, Any], # noqa: C901 + torrent_filepath: str | None = None) -> TorrentFileResult: + """ + Create a torrent file from the given paths and parameters. + + If an output file path is omitted, no file will be written to disk. + """ fs = lt.file_storage() # filter all non-files @@ -117,48 +141,38 @@ def create_torrent_file(file_path_list: List[Path], params: Dict[bytes, Any], to relative = path.relative_to(base_dir) fs.add_file(str(relative), getsize(str(path))) - if params.get(b'piece length'): - piece_size = params[b'piece length'] - else: - piece_size = 0 - + piece_size = params[b"piece length"] if params.get(b"piece length") else 0 flags = lt.create_torrent_flags_t.optimize - - # This flag doesn't exist anymore in libtorrent V1.1.0 - if hasattr(lt.create_torrent_flags_t, 'calculate_file_hashes'): - flags |= lt.create_torrent_flags_t.calculate_file_hashes - - params = {k: (v.decode('utf-8') if isinstance(v, bytes) else v) for k, v in params.items()} + params = {k: (v.decode() if isinstance(v, bytes) else v) for k, v in params.items()} torrent = lt.create_torrent(fs, piece_size=piece_size, flags=flags) - if params.get(b'comment'): - torrent.set_comment(params[b'comment']) - if params.get(b'created by'): - torrent.set_creator(params[b'created by']) + if params.get(b"comment"): + torrent.set_comment(params[b"comment"]) + if params.get(b"created by"): + torrent.set_creator(params[b"created by"]) # main tracker - if params.get(b'announce'): - torrent.add_tracker(params[b'announce']) + if params.get(b"announce"): + torrent.add_tracker(params[b"announce"]) # tracker list - if params.get(b'announce-list'): + if params.get(b"announce-list"): tier = 1 - for tracker in params[b'announce-list']: + for tracker in params[b"announce-list"]: torrent.add_tracker(tracker[0], tier=tier) tier += 1 # DHT nodes # http://www.bittorrent.org/beps/bep_0005.html - if params.get(b'nodes'): - for node in params[b'nodes']: + if params.get(b"nodes"): + for node in params[b"nodes"]: torrent.add_node(*node) # HTTP seeding # http://www.bittorrent.org/beps/bep_0017.html - if params.get(b'httpseeds'): - torrent.add_http_seed(params[b'httpseeds']) + if params.get(b"httpseeds"): + torrent.add_http_seed(params[b"httpseeds"]) # Web seeding # http://www.bittorrent.org/beps/bep_0019.html - if len(file_path_list) == 1: - if params.get(b'urllist', False): - torrent.add_url_seed(params[b'urllist']) + if len(file_path_list) == 1 and params.get(b"urllist", False): + torrent.add_url_seed(params[b"urllist"]) # read the files and calculate the hashes lt.set_piece_hashes(torrent, str(base_dir)) @@ -167,19 +181,19 @@ def create_torrent_file(file_path_list: List[Path], params: Dict[bytes, Any], to torrent = lt.bencode(t1) if torrent_filepath: - with open(torrent_filepath, 'wb') as f: + with open(torrent_filepath, "wb") as f: f.write(torrent) return { - 'success': True, - 'base_dir': base_dir, - 'torrent_file_path': torrent_filepath, - 'metainfo': torrent, - 'infohash': sha1(lt.bencode(t1[b'info'])).digest() + "success": True, + "base_dir": base_dir, + "torrent_file_path": torrent_filepath, + "metainfo": torrent, + "infohash": sha1(lt.bencode(t1[b"info"])).digest() } -def get_info_from_handle(handle: lt.torrent_handle) -> Optional[lt.torrent_info]: +def get_info_from_handle(handle: lt.torrent_handle) -> lt.torrent_info | None: """ Call handle.torrent_file() and handle RuntimeErrors. """ diff --git a/src/tribler/core/libtorrent/trackers.py b/src/tribler/core/libtorrent/trackers.py index 770ba91b55..09d43e4163 100644 --- a/src/tribler/core/libtorrent/trackers.py +++ b/src/tribler/core/libtorrent/trackers.py @@ -1,34 +1,38 @@ +from __future__ import annotations + import re from http.client import HTTP_PORT, HTTPS_PORT from json import dumps from urllib.parse import ParseResult, parse_qsl, unquote, urlencode, urlparse, urlsplit -UDP = 'udp' -HTTP = 'http' -HTTPS = 'https' +UDP = "udp" +HTTP = "http" +HTTPS = "https" SUPPORTED_SCHEMES = {UDP, HTTP, HTTPS} DEFAULT_PORTS = {HTTP: HTTP_PORT, HTTPS: HTTPS_PORT} class MalformedTrackerURLException(Exception): - pass + """ + The tracker URL is not valid. + """ delimiters_regex = re.compile(r'[\r\n\x00\s\t;]+(%20)*') url_regex = re.compile( - r'^(?:http|udp|wss)s?://' # http:// or https:// - r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain... - r'localhost|' # localhost... - r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip - r'(?::\d+)?' # optional port - r'(?:/?|[/?]\S+)$', re.IGNORECASE) + r"^(?:http|udp|wss)s?://" # http:// or https:// + r"(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|" # domain... + r"localhost|" # localhost... + r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})" # ...or ip + r"(?::\d+)?" # optional port + r"(?:/?|[/?]\S+)$", re.IGNORECASE) -remove_trailing_junk = re.compile(r'[,*.:]+\Z') -truncated_url_detector = re.compile(r'\.\.\.') +remove_trailing_junk = re.compile(r"[,*.:]+\Z") +truncated_url_detector = re.compile(r"\.\.\.") -def get_uniformed_tracker_url(tracker_url: str): +def get_uniformed_tracker_url(tracker_url: str) -> str | None: """ Parses the given tracker URL and returns in a uniform URL format. It uses regex to sanitize the URL. @@ -39,33 +43,31 @@ def get_uniformed_tracker_url(tracker_url: str): assert isinstance(tracker_url, str), f"tracker_url is not a str: {type(tracker_url)}" # Search the string for delimiters and try to get the first correct URL - for tracker_url in re.split(delimiters_regex, tracker_url): + for next_tracker_url in re.split(delimiters_regex, tracker_url): # Rule out the case where the regex returns None - if not tracker_url: + if not next_tracker_url: continue # Rule out truncated URLs - if re.search(truncated_url_detector, tracker_url): + if re.search(truncated_url_detector, next_tracker_url): continue # Try to match it against a simple regexp - if not re.match(url_regex, tracker_url): + if not re.match(url_regex, next_tracker_url): continue - tracker_url = re.sub(remove_trailing_junk, '', tracker_url) - try: - scheme, (host, port), path = _parse_tracker_url(tracker_url) + scheme, (host, port), path = _parse_tracker_url(re.sub(remove_trailing_junk, "", next_tracker_url)) if scheme == UDP: return f"{scheme}://{host}:{port}" if scheme in {HTTP, HTTPS}: # HTTP(S) trackers must have a path - path = path.rstrip('/') + path = path.rstrip("/") if not path: continue - uniformed_port = '' if port == DEFAULT_PORTS[scheme] else f':{port}' + uniformed_port = "" if port == DEFAULT_PORTS[scheme] else f":{port}" return f"{scheme}://{host}{uniformed_port}{path}" except MalformedTrackerURLException: @@ -73,7 +75,7 @@ def get_uniformed_tracker_url(tracker_url: str): return None -def parse_tracker_url(tracker_url): +def parse_tracker_url(tracker_url: str) -> tuple[str, tuple[str, int], str]: """ Parses the tracker URL and checks whether it satisfies tracker URL constraints. Additionally, it also checks if the tracker URL is a uniform and valid URL. @@ -81,28 +83,29 @@ def parse_tracker_url(tracker_url): :param tracker_url the URL of the tracker :returns: Tuple (scheme, (host, port), announce_path) """ - http_prefix = f'{HTTP}://' - http_port_suffix = f':{HTTP_PORT}/' - https_prefix = f'{HTTPS}://' - https_port_suffix = f':{HTTPS_PORT}/' + http_prefix = f"{HTTP}://" + http_port_suffix = f":{HTTP_PORT}/" + https_prefix = f"{HTTPS}://" + https_port_suffix = f":{HTTPS_PORT}/" url = tracker_url.lower() if url.startswith(http_prefix) and http_port_suffix in url: - tracker_url = tracker_url.replace(http_port_suffix, '/', 1) + tracker_url = tracker_url.replace(http_port_suffix, "/", 1) if url.startswith(https_prefix) and https_port_suffix in url: - tracker_url = tracker_url.replace(https_port_suffix, '/', 1) + tracker_url = tracker_url.replace(https_port_suffix, "/", 1) if tracker_url != get_uniformed_tracker_url(tracker_url): - raise MalformedTrackerURLException(f'Tracker URL is not sanitized ({tracker_url}).') + msg = f"Tracker URL is not sanitized ({tracker_url})." + raise MalformedTrackerURLException(msg) return _parse_tracker_url(tracker_url) -def _parse_tracker_url(tracker_url): +def _parse_tracker_url(tracker_url: str) -> tuple[str, tuple[str, int], str]: """ - Parses the tracker URL and check whether it satisfies certain constraints: + Parses the tracker URL and check whether it satisfies certain constraints. - The tracker type must be one of the supported types (udp, http, https). - UDP trackers requires a port. @@ -119,21 +122,24 @@ def _parse_tracker_url(tracker_url): port = parsed_url.port if scheme not in SUPPORTED_SCHEMES: - raise MalformedTrackerURLException(f'Unsupported tracker type ({scheme}).') + msg = f"Unsupported tracker type ({scheme})." + raise MalformedTrackerURLException(msg) if scheme == UDP and not port: - raise MalformedTrackerURLException(f'Missing port for UDP tracker URL ({tracker_url}).') + msg = f"Missing port for UDP tracker URL ({tracker_url})." + raise MalformedTrackerURLException(msg) if scheme in {HTTP, HTTPS}: if not path: - raise MalformedTrackerURLException(f'Missing announce path for HTTP(S) tracker URL ({tracker_url}).') + msg = f"Missing announce path for HTTP(S) tracker URL ({tracker_url})." + raise MalformedTrackerURLException(msg) if not port: port = DEFAULT_PORTS[scheme] return scheme, (host, port), path -def add_url_params(url, params): +def add_url_params(url: str, params: dict) -> str: """ Add GET params to provided URL being aware of existing. @@ -168,15 +174,13 @@ def add_url_params(url, params): encoded_get_args = urlencode(parsed_get_args, doseq=True) # Creating new parsed result object based on provided with new # URL arguments. Same thing happens inside of urlparse. - new_url = ParseResult( + return ParseResult( parsed_url.scheme, parsed_url.netloc, parsed_url.path, parsed_url.params, encoded_get_args, parsed_url.fragment ).geturl() - return new_url - -def is_valid_url(url): +def is_valid_url(url: str) -> bool | None: """ Checks whether the given URL is a valid URL. @@ -185,10 +189,10 @@ def is_valid_url(url): :param url: an object representing the URL :return: Boolean specifying whether the URL is valid """ - if ' ' in url.strip(): - return - if url.lower().startswith('udp'): - url = url.lower().replace('udp', 'http', 1) + if " " in url.strip(): + return None + if url.lower().startswith("udp"): + url = url.lower().replace("udp", "http", 1) split_url = urlsplit(url) - return not (split_url[0] == '' or split_url[1] == '') + return not (split_url[0] == "" or split_url[1] == "") diff --git a/src/tribler/core/libtorrent/uris.py b/src/tribler/core/libtorrent/uris.py index e77dd87503..e2a4bd09e6 100644 --- a/src/tribler/core/libtorrent/uris.py +++ b/src/tribler/core/libtorrent/uris.py @@ -14,17 +14,19 @@ def url_to_path(file_url: str) -> str: Convert a URL to a path. Example: + ------- 'file:///path/to/file' -> '/path/to/file' + """ url = URL(file_url) - if os.name == 'nt' and url.host: + if os.name == "nt" and url.host: # UNC file path, \\server\share\path... # ref: https://docs.microsoft.com/en-us/dotnet/standard/io/file-path-formats _, share, *segments = url.parts - path = (rf'\\{url.host}\{share}', *segments) - elif os.name == 'nt': - path = (url.path.lstrip('/'), ) + path = (rf"\\{url.host}\{share}", *segments) + elif os.name == "nt": + path = (url.path.lstrip("/"), ) else: path = (url.path, ) @@ -42,7 +44,7 @@ async def unshorten(uri: str) -> str: if scheme not in ("http", "https"): return uri - logger.info(f'Unshortening URI: {uri}') + logger.info("Unshortening URI: %s", uri) async with ClientSession() as session: try: @@ -50,7 +52,7 @@ async def unshorten(uri: str) -> str: if response.status in (301, 302, 303, 307, 308): uri = response.headers.get(LOCATION, uri) except Exception as e: - logger.warning(f'Error while unshortening a URI: {e.__class__.__name__}: {e}', exc_info=e) + logger.warning("Error while unshortening a URI: %s: %s", e.__class__.__name__, str(e), exc_info=e) - logger.info(f'Unshorted URI: {uri}') + logger.info("Unshorted URI: %s", uri) return uri diff --git a/src/tribler/test_unit/core/libtorrent/download_manager/test_download.py b/src/tribler/test_unit/core/libtorrent/download_manager/test_download.py index 3a32784579..96ba9cbe96 100644 --- a/src/tribler/test_unit/core/libtorrent/download_manager/test_download.py +++ b/src/tribler/test_unit/core/libtorrent/download_manager/test_download.py @@ -688,8 +688,8 @@ def test_get_tracker_status_unicode_decode_error(self) -> None: result = download.get_tracker_status() - self.assertEqual([42, "Disabled"], result["[DHT]"]) - self.assertEqual([7, "Working"], result["[PeX]"]) + self.assertEqual((42, "Disabled"), result["[DHT]"]) + self.assertEqual((7, "Working"), result["[PeX]"]) def test_get_tracker_status_get_peer_info_error(self) -> None: """ @@ -703,8 +703,8 @@ def test_get_tracker_status_get_peer_info_error(self) -> None: result = download.get_tracker_status() - self.assertEqual([0, "Working"], result["[DHT]"]) - self.assertEqual([0, "Working"], result["[PeX]"]) + self.assertEqual((0, "Working"), result["[DHT]"]) + self.assertEqual((0, "Working"), result["[PeX]"]) async def test_shutdown(self) -> None: """ diff --git a/src/tribler/test_unit/core/libtorrent/download_manager/test_download_manager.py b/src/tribler/test_unit/core/libtorrent/download_manager/test_download_manager.py index 1442018cb1..6a4535b284 100644 --- a/src/tribler/test_unit/core/libtorrent/download_manager/test_download_manager.py +++ b/src/tribler/test_unit/core/libtorrent/download_manager/test_download_manager.py @@ -479,7 +479,7 @@ def test_get_download_rate_limit(self) -> None: self.manager.ltsessions[0].get_settings = Mock(return_value=settings) self.manager.ltsessions[0].download_rate_limit = functools.partial(settings.get, "download_rate_limit") - self.manager.set_download_rate_limit(42, 0) + self.manager.set_download_rate_limit(42) self.assertEqual(42, self.manager.get_download_rate_limit()) @@ -491,6 +491,6 @@ def test_get_upload_rate_limit(self) -> None: self.manager.ltsessions[0].get_settings = Mock(return_value=settings) self.manager.ltsessions[0].upload_rate_limit = functools.partial(settings.get, "upload_rate_limit") - self.manager.set_upload_rate_limit(42, 0) + self.manager.set_upload_rate_limit(42) self.assertEqual(42, self.manager.get_upload_rate_limit())