diff --git a/README.rst b/README.rst index 8e80e34def3..a022a322304 100644 --- a/README.rst +++ b/README.rst @@ -7,7 +7,7 @@ Tribler |downloads_7_0| |downloads_7_1| |downloads_7_2| |downloads_7_3| |downloads_7_4| |downloads_7_5| |downloads_7_6| |downloads_7_7| |downloads_7_8| |downloads_7_9| -|downloads_7_10| |downloads_7_11| |downloads_7_12| +|downloads_7_10| |downloads_7_11| |downloads_7_12| |downloads_7_13| |doi| |openhub| |discord| @@ -187,6 +187,10 @@ We like to hear your feedback and suggestions. To reach out to us, you can join :target: https://github.com/Tribler/tribler/releases :alt: Downloads(7.12.1) +.. |downloads_7_13| image:: https://img.shields.io/github/downloads/tribler/tribler/v7.13.0/total.svg?style=flat + :target: https://github.com/Tribler/tribler/releases + :alt: Downloads(7.13.0) + .. |contributors| image:: https://img.shields.io/github/contributors/tribler/tribler.svg?style=flat :target: https://github.com/Tribler/tribler/graphs/contributors :alt: Contributors diff --git a/build/mac/makedist_macos.sh b/build/mac/makedist_macos.sh index a722dc6ad57..6862d595d30 100755 --- a/build/mac/makedist_macos.sh +++ b/build/mac/makedist_macos.sh @@ -21,7 +21,7 @@ export RESOURCES=build/mac/resources python3 -m venv build-env . ./build-env/bin/activate -python3 -m pip install --upgrade pip +python3 -m pip install pip==23.0.1 # pin pip version to avoid "--no-use-pep517" issues with the latest version python3 -m pip install PyInstaller==4.2 --no-use-pep517 python3 -m pip install --upgrade -r requirements-build.txt diff --git a/doc/development/development_on_osx.rst b/doc/development/development_on_osx.rst index c1884cd000d..4808824988c 100644 --- a/doc/development/development_on_osx.rst +++ b/doc/development/development_on_osx.rst @@ -19,6 +19,12 @@ If you wish to run the Tribler Graphical User Interface, PyQt5 should be availab qmake --version # test whether qt is installed correctly +Add `qt@5/bin` to the PATH environment variable, e.g.: + +.. code-block:: bash + + export PATH="/usr/local/opt/qt@5/bin:$PATH" + Other Packages ~~~~~~~~~~~~~~ diff --git a/src/tribler/core/components/component.py b/src/tribler/core/components/component.py index 394839e32bd..7a10dcee4f3 100644 --- a/src/tribler/core/components/component.py +++ b/src/tribler/core/components/component.py @@ -2,48 +2,24 @@ import logging import sys +import time from asyncio import Event from typing import Optional, Set, TYPE_CHECKING, Type, Union +from tribler.core.components.exceptions import ComponentStartupException, MissedDependency, NoneComponent +from tribler.core.components.reporter.exception_handler import default_core_exception_handler +from tribler.core.sentry_reporter.sentry_reporter import SentryReporter + if TYPE_CHECKING: from tribler.core.components.session import Session, T -class ComponentError(Exception): - pass - - -class ComponentStartupException(ComponentError): - def __init__(self, component: Component, cause: Exception): - super().__init__(component.__class__.__name__) - self.component = component - self.__cause__ = cause - - -class MissedDependency(ComponentError): - def __init__(self, component: Component, dependency: Type[Component]): - msg = f'Missed dependency: {component.__class__.__name__} requires {dependency.__name__} to be active' - super().__init__(msg) - self.component = component - self.dependency = dependency - - -class MultipleComponentsFound(ComponentError): - def __init__(self, comp_cls: Type[Component], candidates: Set[Component]): - msg = f'Found multiple subclasses for the class {comp_cls}. Candidates are: {candidates}.' - super().__init__(msg) - - -class NoneComponent: - def __getattr__(self, item): - return NoneComponent() - - class Component: tribler_should_stop_on_component_error = True - def __init__(self): - self.logger = logging.getLogger(self.__class__.__name__) + def __init__(self, reporter: Optional[SentryReporter] = None): + self.name = self.__class__.__name__ + self.logger = logging.getLogger(self.name) self.logger.info('__init__') self.session: Optional[Session] = None self.dependencies: Set[Component] = set() @@ -54,20 +30,21 @@ def __init__(self): self.stopped = False # Every component starts unused, so it does not lock the whole system on shutdown self.unused_event.set() + self.reporter = reporter or default_core_exception_handler.sentry_reporter async def start(self): - self.logger.info(f'Start: {self.__class__.__name__}') + start_time = time.time() + self._set_component_status('starting...') try: await self.run() + self._set_component_status(f'started in {time.time() - start_time:.4f}s') except Exception as e: # pylint: disable=broad-except # Writing to stderr is for the case when logger is not configured properly (as my happen in local tests, # for example) to avoid silent suppression of the important exceptions - sys.stderr.write(f'\nException in {self.__class__.__name__}.start(): {type(e).__name__}:{e}\n') - if isinstance(e, MissedDependency): - # Use logger.error instead of logger.exception here to not spam log with multiple error tracebacks - self.logger.error(e) - else: - self.logger.exception(f'Exception in {self.__class__.__name__}.start(): {type(e).__name__}:{e}') + sys.stderr.write(f'\nException in {self.name}.start(): {type(e).__name__}:{e}\n') + msg = f'exception in {self.name}.start(): {type(e).__name__}:{e}' + exc_info = not isinstance(e, MissedDependency) + self._set_component_status(msg, logging.ERROR, exc_info=exc_info) self.failed = True self.started_event.set() if self.session.failfast: @@ -76,15 +53,17 @@ async def start(self): self.started_event.set() async def stop(self): - component_name = self.__class__.__name__ dependants = sorted(component.__class__.__name__ for component in self.reverse_dependencies) - self.logger.info(f'Stopping {component_name}: waiting for {dependants} to release it') + msg = f'Stopping {self.name}: waiting for {dependants} to release it' + self._set_component_status(msg) await self.unused_event.wait() - self.logger.info(f"Component {component_name} free, shutting down") + self._set_component_status('shutting down') try: await self.shutdown() + self._set_component_status('shut down') except Exception as e: # pylint: disable=broad-except - self.logger.exception(f"Exception in {self.__class__.__name__}.shutdown(): {type(e).__name__}:{e}") + msg = f"exception in {self.name}.shutdown(): {type(e).__name__}:{e}" + self._set_component_status(msg, logging.ERROR, exc_info=True) raise finally: self.stopped = True @@ -92,7 +71,7 @@ async def stop(self): self._release_instance(dep) remaining_components = sorted( c.__class__.__name__ for c in self.session.components.values() if not c.stopped) - self.logger.info(f"Component {component_name}, stopped. Remaining components: {remaining_components}") + self.logger.info(f"Component {self.name}, stopped. Remaining components: {remaining_components}") async def run(self): pass @@ -123,9 +102,11 @@ async def get_component(self, dependency: Type[T]) -> Optional[T]: if not dep: return None + self._set_component_status(f'waiting for {dep.name}') await dep.started_event.wait() + if dep.failed: - self.logger.warning(f'Component {self.__class__.__name__} has failed dependency {dependency.__name__}') + self.logger.warning(f'Component {self.name} has failed dependency {dependency.__name__}') return None if dep not in self.dependencies and dep is not self: @@ -166,3 +147,7 @@ def _unuse_by(self, component: Component): self.reverse_dependencies.remove(component) if not self.reverse_dependencies: self.unused_event.set() + + def _set_component_status(self, status: str, log_level: int = logging.INFO, **kwargs): + self.reporter.additional_information['components_status'][self.name] = status + self.logger.log(log_level, f'{self.name}: {status}', **kwargs) diff --git a/src/tribler/core/components/exceptions.py b/src/tribler/core/components/exceptions.py new file mode 100644 index 00000000000..b74d768180d --- /dev/null +++ b/src/tribler/core/components/exceptions.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from typing import Set, TYPE_CHECKING, Type + +if TYPE_CHECKING: + from tribler.core.components.component import Component + + +class ComponentError(Exception): + pass + + +class ComponentStartupException(ComponentError): + def __init__(self, component: Component, cause: Exception): + super().__init__(component.__class__.__name__) + self.component = component + self.__cause__ = cause + + +class MissedDependency(ComponentError): + def __init__(self, component: Component, dependency: Type[Component]): + msg = f'Missed dependency: {component.__class__.__name__} requires {dependency.__name__} to be active' + super().__init__(msg) + self.component = component + self.dependency = dependency + + +class MultipleComponentsFound(ComponentError): + def __init__(self, comp_cls: Type[Component], candidates: Set[Component]): + msg = f'Found multiple subclasses for the class {comp_cls}. Candidates are: {candidates}.' + super().__init__(msg) + + +class NoneComponent: + def __getattr__(self, item): + return NoneComponent() diff --git a/src/tribler/core/components/gigachannel_manager/gigachannel_manager.py b/src/tribler/core/components/gigachannel_manager/gigachannel_manager.py index 27fb306eff0..fa007d59d76 100644 --- a/src/tribler/core/components/gigachannel_manager/gigachannel_manager.py +++ b/src/tribler/core/components/gigachannel_manager/gigachannel_manager.py @@ -191,7 +191,6 @@ def check_channels_updates(self): infohash = bytes(channel.infohash) if self.download_manager.metainfo_requests.get(infohash): continue - status = self.download_manager.get_download(infohash).get_state().get_status() if not self.download_manager.download_exists(infohash): self._logger.info( "Downloading new channel version %s ver %i->%i", @@ -200,7 +199,10 @@ def check_channels_updates(self): channel.timestamp, ) self.download_channel(channel) - elif status == DownloadStatus.SEEDING: + continue + + channel_download = self.download_manager.get_download(infohash) + if channel_download and channel_download.get_state().get_status() == DownloadStatus.SEEDING: self._logger.info( "Processing previously downloaded, but unprocessed channel torrent %s ver %i->%i", channel.dirname, @@ -208,10 +210,9 @@ def check_channels_updates(self): channel.timestamp, ) self.channels_processing_queue[channel.infohash] = (PROCESS_CHANNEL_DIR, channel) - except Exception: - self._logger.exception( - "Error when tried to download a newer version of channel %s", hexlify(channel.public_key) - ) + except Exception as e: + self._logger.exception("Error when tried to download a newer version of channel " + f"{hexlify(channel.public_key)}: {type(e).__name__}: {e}") async def remove_channel_download(self, to_remove): """ diff --git a/src/tribler/core/components/gigachannel_manager/tests/test_gigachannel_manager.py b/src/tribler/core/components/gigachannel_manager/tests/test_gigachannel_manager.py index 11c2b0d00f8..863dd2541d9 100644 --- a/src/tribler/core/components/gigachannel_manager/tests/test_gigachannel_manager.py +++ b/src/tribler/core/components/gigachannel_manager/tests/test_gigachannel_manager.py @@ -1,4 +1,6 @@ import asyncio +import os +import random from asyncio import Future from datetime import datetime from pathlib import Path @@ -38,8 +40,8 @@ def personal_channel(metadata_store): return chan -@pytest.fixture -async def gigachannel_manager(metadata_store): +@pytest.fixture(name="gigachannel_manager") +async def gigachannel_manager_fixture(metadata_store): chanman = GigaChannelManager( state_dir=metadata_store.channels_dir.parent, metadata_store=metadata_store, @@ -225,6 +227,60 @@ def mock_process_channel_dir(c, _): assert not gigachannel_manager.channels_processing_queue +@db_session +def test_check_channel_updates_for_different_states(gigachannel_manager, metadata_store): + def random_subscribed_channel(): + return metadata_store.ChannelMetadata( + title=f"Channel {random.randint(0, 100)}", + public_key=os.urandom(32), + signature=os.urandom(32), + skip_key_check=True, + timestamp=123, + local_version=122, + subscribed=True, + infohash=random_infohash(), + ) + + # Three channels in different states based on the setup + channel_with_metainfo = random_subscribed_channel() + already_downloaded_channel = random_subscribed_channel() + non_downloaded_channel = random_subscribed_channel() + + # Setup 1: metainfo is already available for channel torrent. + def mock_get_metainfo(infohash): + return MagicMock() if infohash == channel_with_metainfo.infohash else None + + gigachannel_manager.download_manager.metainfo_requests = MagicMock(get=mock_get_metainfo) + + # Setup 2: Only one specific channel torrent is already downloaded. + def mock_download_exists(infohash): + return infohash == already_downloaded_channel.infohash + + gigachannel_manager.download_manager.download_exists = mock_download_exists + + # Setup 2 (contd): We expect non-downloaded channel to be downloaded + # so mocking download_channel() method. + gigachannel_manager.download_channel = MagicMock() + + # Setup 3: Downloaded channel torrent is set on Seeding state. + def mock_get_download(infohash): + if infohash != already_downloaded_channel.infohash: + return None + + seeding_state = MagicMock(get_status=lambda: DownloadStatus.SEEDING) + return MagicMock(get_state=lambda: seeding_state) + + gigachannel_manager.download_manager.get_download = mock_get_download + + # Act + gigachannel_manager.check_channels_updates() + + # Assert + gigachannel_manager.download_channel.assert_called_once_with(non_downloaded_channel) + assert len(gigachannel_manager.channels_processing_queue) == 1 + assert already_downloaded_channel.infohash in gigachannel_manager.channels_processing_queue + + async def test_remove_cruft_channels(torrent_template, personal_channel, gigachannel_manager, metadata_store): remove_list = [] with db_session: diff --git a/src/tribler/core/components/knowledge/db/knowledge_db.py b/src/tribler/core/components/knowledge/db/knowledge_db.py index ae45ef48f03..b7adbeced68 100644 --- a/src/tribler/core/components/knowledge/db/knowledge_db.py +++ b/src/tribler/core/components/knowledge/db/knowledge_db.py @@ -2,7 +2,7 @@ import logging from dataclasses import dataclass from enum import IntEnum -from typing import Callable, Iterator, List, Optional, Set +from typing import Any, Callable, Iterator, List, Optional, Set from pony import orm from pony.orm import raw_sql @@ -139,6 +139,10 @@ class StatementOp(db.Entity): orm.composite_key(statement, peer) + class Misc(db.Entity): # pylint: disable=unused-variable + name = orm.PrimaryKey(str) + value = orm.Optional(str) + def add_operation(self, operation: StatementOperation, signature: bytes, is_local_peer: bool = False, is_auto_generated: bool = False, counter_increment: int = 1) -> bool: """ Add the operation that will be applied to a statement. @@ -441,3 +445,11 @@ def _get_random_operations_by_condition(self, condition: Callable[[Entity], bool operations.add(operation) return operations + + def get_misc(self, key: str, default: Optional[str] = None) -> Optional[str]: + data = self.instance.Misc.get(name=key) + return data.value if data else default + + def set_misc(self, key: str, value: Any): + key_value = get_or_create(self.instance.Misc, name=key) + key_value.value = str(value) diff --git a/src/tribler/core/components/knowledge/db/tests/test_knowledge_db.py b/src/tribler/core/components/knowledge/db/tests/test_knowledge_db.py index df4cca59363..471ce15d215 100644 --- a/src/tribler/core/components/knowledge/db/tests/test_knowledge_db.py +++ b/src/tribler/core/components/knowledge/db/tests/test_knowledge_db.py @@ -633,3 +633,18 @@ def _subjects(subject_type=None, obj='', predicate=None): assert _subjects(obj='linux') == {'infohash1', 'infohash2', 'infohash3'} assert _subjects(predicate=ResourceType.TAG, obj='linux') == {'infohash3'} assert _subjects(predicate=ResourceType.TITLE) == {'infohash1', 'infohash2'} + + @db_session + def test_non_existent_misc(self): + """Test that get_misc returns proper values""" + # None if the key does not exist + assert not self.db.get_misc(key='non existent') + + # A value if the key does exist + assert self.db.get_misc(key='non existent', default=42) == 42 + + @db_session + def test_set_misc(self): + """Test that set_misc works as expected""" + self.db.set_misc(key='key', value='value') + assert self.db.get_misc(key='key') == 'value' diff --git a/src/tribler/core/components/knowledge/knowledge_component.py b/src/tribler/core/components/knowledge/knowledge_component.py index 5c343c8a01d..d5c8d9745ec 100644 --- a/src/tribler/core/components/knowledge/knowledge_component.py +++ b/src/tribler/core/components/knowledge/knowledge_component.py @@ -4,7 +4,7 @@ from tribler.core.components.key.key_component import KeyComponent from tribler.core.components.knowledge.community.knowledge_community import KnowledgeCommunity from tribler.core.components.knowledge.db.knowledge_db import KnowledgeDatabase -from tribler.core.components.knowledge.rules.tag_rules_processor import KnowledgeRulesProcessor +from tribler.core.components.knowledge.rules.knowledge_rules_processor import KnowledgeRulesProcessor from tribler.core.components.metadata_store.utils import generate_test_channels from tribler.core.utilities.simpledefs import STATEDIR_DB_DIR diff --git a/src/tribler/core/components/knowledge/rules/tag_rules_processor.py b/src/tribler/core/components/knowledge/rules/knowledge_rules_processor.py similarity index 79% rename from src/tribler/core/components/knowledge/rules/tag_rules_processor.py rename to src/tribler/core/components/knowledge/rules/knowledge_rules_processor.py index 5ba8642d5c7..7b2eb84675a 100644 --- a/src/tribler/core/components/knowledge/rules/tag_rules_processor.py +++ b/src/tribler/core/components/knowledge/rules/knowledge_rules_processor.py @@ -18,11 +18,12 @@ DEFAULT_BATCH_SIZE = 1000 LAST_PROCESSED_TORRENT_ID = 'last_processed_torrent_id' +RULES_PROCESSOR_VERSION = 'rules_processor_version' class KnowledgeRulesProcessor(TaskManager): # this value must be incremented in the case of new rules set has been applied - version: int = 2 + version: int = 4 def __init__(self, notifier: Notifier, db: KnowledgeDatabase, mds: MetadataStore, batch_size: int = DEFAULT_BATCH_SIZE, interval: float = DEFAULT_INTERVAL): @@ -45,19 +46,26 @@ def __init__(self, notifier: Notifier, db: KnowledgeDatabase, mds: MetadataStore def start(self): self.logger.info('Start') + rules_processor_version = self.get_rules_processor_version() + if rules_processor_version < self.version: + # the database was processed by the previous version of the rules processor + self.logger.info('New version of rules processor is available. Starting knowledge generation from scratch.') + self.set_last_processed_torrent_id(0) + self.set_rules_processor_version(self.version) + max_row_id = self.mds.get_max_rowid() is_finished = self.get_last_processed_torrent_id() >= max_row_id if not is_finished: self.logger.info(f'Register process_batch task with interval: {self.interval} sec') - self.register_task(name=self.process_batch.__name__, - interval=self.interval, - task=self.process_batch) + self.register_task(name=self.process_batch.__name__, interval=self.interval, task=self.process_batch) + else: + self.logger.info(f'Database processing is finished. Last processed torrent id: {max_row_id}') async def shutdown(self): await self.shutdown_task_manager() - @db_session + @db_session(serializable=True) def process_batch(self) -> int: def query(_start, _end): return lambda t: _start < t.rowid and t.rowid <= _end and \ @@ -77,7 +85,7 @@ def query(_start, _end): torrent.tag_processor_version = self.version processed += 1 - self.mds.set_value(LAST_PROCESSED_TORRENT_ID, str(end)) + self.set_last_processed_torrent_id(end) self.logger.info(f'Processed: {processed} titles. Added {added} tags.') is_finished = end >= max_row_id @@ -107,4 +115,13 @@ def save_statements(self, subject_type: ResourceType, subject: str, predicate: R self.db.add_auto_generated(subject_type=subject_type, subject=subject, predicate=predicate, obj=obj) def get_last_processed_torrent_id(self) -> int: - return int(self.mds.get_value(LAST_PROCESSED_TORRENT_ID, default='0')) + return int(self.db.get_misc(LAST_PROCESSED_TORRENT_ID, default='0')) + + def set_last_processed_torrent_id(self, value: int): + self.db.set_misc(LAST_PROCESSED_TORRENT_ID, str(value)) + + def get_rules_processor_version(self) -> int: + return int(self.db.get_misc(RULES_PROCESSOR_VERSION, default='0')) + + def set_rules_processor_version(self, version: int): + self.db.set_misc(RULES_PROCESSOR_VERSION, str(version)) diff --git a/src/tribler/core/components/knowledge/rules/tests/test_knowledge_rules_processor.py b/src/tribler/core/components/knowledge/rules/tests/test_knowledge_rules_processor.py new file mode 100644 index 00000000000..46ccadd7a26 --- /dev/null +++ b/src/tribler/core/components/knowledge/rules/tests/test_knowledge_rules_processor.py @@ -0,0 +1,151 @@ +import os +from unittest.mock import MagicMock, Mock, patch + +import pytest +from ipv8.keyvault.private.libnaclkey import LibNaCLSK +from pony.orm import db_session + +from tribler.core import notifications +from tribler.core.components.knowledge.db.knowledge_db import KnowledgeDatabase, ResourceType +from tribler.core.components.knowledge.rules.knowledge_rules_processor import KnowledgeRulesProcessor +from tribler.core.components.metadata_store.db.serialization import REGULAR_TORRENT +from tribler.core.components.metadata_store.db.store import MetadataStore +from tribler.core.utilities.path_util import Path +from tribler.core.utilities.utilities import MEMORY_DB + +TEST_BATCH_SIZE = 100 +TEST_INTERVAL = 0.1 + + +# pylint: disable=redefined-outer-name, protected-access +@pytest.fixture +async def tag_rules_processor(tmp_path: Path): + mds = MetadataStore(db_filename=MEMORY_DB, channels_dir=tmp_path, my_key=LibNaCLSK()) + knowledge_db = KnowledgeDatabase(filename=':memory:') + processor = KnowledgeRulesProcessor(notifier=MagicMock(), db=knowledge_db, mds=mds, + batch_size=TEST_BATCH_SIZE, + interval=TEST_INTERVAL) + yield processor + await processor.shutdown() + + +def test_constructor(tag_rules_processor: KnowledgeRulesProcessor): + # test that constructor of TagRulesProcessor works as expected + assert tag_rules_processor.batch_size == TEST_BATCH_SIZE + assert tag_rules_processor.interval == TEST_INTERVAL + + m: MagicMock = tag_rules_processor.notifier.add_observer + m.assert_called_with(notifications.new_torrent_metadata_created, tag_rules_processor.process_torrent_title, + synchronous=True) + + +@patch.object(KnowledgeRulesProcessor, 'save_statements') +def test_process_torrent_file(mocked_save_tags: MagicMock, tag_rules_processor: KnowledgeRulesProcessor): + # test on None + assert not tag_rules_processor.process_torrent_title(infohash=None, title='title') + assert not tag_rules_processor.process_torrent_title(infohash=b'infohash', title=None) + + # test that process_torrent_title doesn't find any tags in the title + assert not tag_rules_processor.process_torrent_title(infohash=b'infohash', title='title') + mocked_save_tags.assert_not_called() + + # test that process_torrent_title does find tags in the title + assert tag_rules_processor.process_torrent_title(infohash=b'infohash', title='title [tag]') == 1 + mocked_save_tags.assert_called_with(subject_type=ResourceType.TORRENT, subject='696e666f68617368', objects={'tag'}, + predicate=ResourceType.TAG) + + +def test_save_tags(tag_rules_processor: KnowledgeRulesProcessor): + # test that tag_rules_processor calls TagDatabase with correct args + expected_calls = [ + {'obj': 'tag2', 'predicate': ResourceType.TAG, 'subject': 'infohash', 'subject_type': ResourceType.TORRENT}, + {'obj': 'tag1', 'predicate': ResourceType.TAG, 'subject': 'infohash', 'subject_type': ResourceType.TORRENT} + ] + tag_rules_processor.db.add_auto_generated = Mock() + tag_rules_processor.save_statements(subject_type=ResourceType.TORRENT, subject='infohash', + predicate=ResourceType.TAG, + objects={'tag1', 'tag2'}) + actual_calls = [c.kwargs for c in tag_rules_processor.db.add_auto_generated.mock_calls] + + # compare two lists of dict + assert [c for c in actual_calls if c not in expected_calls] == [] + + +@db_session +@patch.object(KnowledgeRulesProcessor, 'process_torrent_title', new=MagicMock(return_value=1)) +@patch.object(KnowledgeRulesProcessor, 'cancel_pending_task') +def test_process_batch(mocked_cancel_pending_task: Mock, tag_rules_processor: KnowledgeRulesProcessor): + # test the correctness of the inner logic of process_batch. + + # fill the db with 50 torrents + for _ in range(50): + tag_rules_processor.mds.TorrentMetadata(infohash=os.urandom(20), metadata_type=REGULAR_TORRENT) + + tag_rules_processor.set_last_processed_torrent_id(10) # batch should start from 11 + tag_rules_processor.batch_size = 30 # and process 30 entities + + # first iteration + assert tag_rules_processor.process_batch() == 30 + assert tag_rules_processor.get_last_processed_torrent_id() == 40 + assert not mocked_cancel_pending_task.called # it should not be the last batch in the db + + # second iteration + assert tag_rules_processor.process_batch() == 10 + assert tag_rules_processor.get_last_processed_torrent_id() == 50 + assert mocked_cancel_pending_task.called # it should be the last batch in the db + + +@db_session +@patch.object(KnowledgeRulesProcessor, 'register_task', new=MagicMock()) +def test_start_no_previous_version(tag_rules_processor: KnowledgeRulesProcessor): + # test that if there is no previous version of the rules processor, it will be created + assert tag_rules_processor.get_rules_processor_version() == 0 + assert tag_rules_processor.get_rules_processor_version() != tag_rules_processor.version + + tag_rules_processor.start() + + # version should be set to the current version + assert tag_rules_processor.get_rules_processor_version() == tag_rules_processor.version + # last processed torrent id should be set to 0 + assert tag_rules_processor.get_last_processed_torrent_id() == 0 + + +@db_session +@patch.object(KnowledgeRulesProcessor, 'register_task', new=MagicMock()) +def test_start_previous_version(tag_rules_processor: KnowledgeRulesProcessor): + # test that if there is a previous version of the rules processor, it will be updated to the current + tag_rules_processor.set_rules_processor_version(tag_rules_processor.version - 1) + tag_rules_processor.set_last_processed_torrent_id(100) + + tag_rules_processor.start() + + # version should be set to the current version + assert tag_rules_processor.get_rules_processor_version() == tag_rules_processor.version + # last processed torrent id should be set to 0 + assert tag_rules_processor.get_last_processed_torrent_id() == 0 + + +@db_session +@patch.object(KnowledgeRulesProcessor, 'register_task', new=MagicMock()) +def test_start_current_version(tag_rules_processor: KnowledgeRulesProcessor): + # test that if there is a current version of the rules processor, it will process the database from + # the last processed torrent id + tag_rules_processor.set_rules_processor_version(tag_rules_processor.version) + tag_rules_processor.set_last_processed_torrent_id(100) + + tag_rules_processor.start() + + # version should be the same + assert tag_rules_processor.get_rules_processor_version() == tag_rules_processor.version + # last processed torrent id should be the same + assert tag_rules_processor.get_last_processed_torrent_id() == 100 + + +@db_session +@patch.object(KnowledgeRulesProcessor, 'register_task') +def test_start_batch_processing(mocked_register_task: Mock, tag_rules_processor: KnowledgeRulesProcessor): + # test that if there are torrents in the database, the batch processing will be started + tag_rules_processor.mds.TorrentMetadata(infohash=os.urandom(20), metadata_type=REGULAR_TORRENT) + tag_rules_processor.start() + + assert mocked_register_task.called diff --git a/src/tribler/core/components/knowledge/rules/tests/test_tag_rules_processor.py b/src/tribler/core/components/knowledge/rules/tests/test_tag_rules_processor.py deleted file mode 100644 index ff6c7bd2a72..00000000000 --- a/src/tribler/core/components/knowledge/rules/tests/test_tag_rules_processor.py +++ /dev/null @@ -1,105 +0,0 @@ -from types import SimpleNamespace -from unittest.mock import MagicMock, patch - -import pytest - -from tribler.core import notifications -from tribler.core.components.knowledge.db.knowledge_db import ResourceType -from tribler.core.components.knowledge.rules.tag_rules_processor import KnowledgeRulesProcessor, \ - LAST_PROCESSED_TORRENT_ID - -TEST_BATCH_SIZE = 100 -TEST_INTERVAL = 0.1 - - -# pylint: disable=redefined-outer-name, protected-access -@pytest.fixture -async def tag_rules_processor(): - processor = KnowledgeRulesProcessor(notifier=MagicMock(), db=MagicMock(), mds=MagicMock(), - batch_size=TEST_BATCH_SIZE, - interval=TEST_INTERVAL) - yield processor - await processor.shutdown() - - -def test_constructor(tag_rules_processor: KnowledgeRulesProcessor): - # test that constructor of TagRulesProcessor works as expected - assert tag_rules_processor.batch_size == TEST_BATCH_SIZE - assert tag_rules_processor.interval == TEST_INTERVAL - - m: MagicMock = tag_rules_processor.notifier.add_observer - m.assert_called_with(notifications.new_torrent_metadata_created, tag_rules_processor.process_torrent_title, - synchronous=True) - - -@patch.object(KnowledgeRulesProcessor, 'save_statements') -def test_process_torrent_file(mocked_save_tags: MagicMock, tag_rules_processor: KnowledgeRulesProcessor): - # test on None - assert not tag_rules_processor.process_torrent_title(infohash=None, title='title') - assert not tag_rules_processor.process_torrent_title(infohash=b'infohash', title=None) - - # test that process_torrent_title doesn't find any tags in the title - assert not tag_rules_processor.process_torrent_title(infohash=b'infohash', title='title') - mocked_save_tags.assert_not_called() - - # test that process_torrent_title does find tags in the title - assert tag_rules_processor.process_torrent_title(infohash=b'infohash', title='title [tag]') == 1 - mocked_save_tags.assert_called_with(subject_type=ResourceType.TORRENT, subject='696e666f68617368', objects={'tag'}, - predicate=ResourceType.TAG) - - -def test_save_tags(tag_rules_processor: KnowledgeRulesProcessor): - # test that tag_rules_processor calls TagDatabase with correct args - expected_calls = [ - {'obj': 'tag2', 'predicate': ResourceType.TAG, 'subject': 'infohash', 'subject_type': ResourceType.TORRENT}, - {'obj': 'tag1', 'predicate': ResourceType.TAG, 'subject': 'infohash', 'subject_type': ResourceType.TORRENT} - ] - tag_rules_processor.save_statements(subject_type=ResourceType.TORRENT, subject='infohash', - predicate=ResourceType.TAG, - objects={'tag1', 'tag2'}) - actual_calls = [c.kwargs for c in tag_rules_processor.db.add_auto_generated.mock_calls] - - # compare two lists of dict - assert [c for c in actual_calls if c not in expected_calls] == [] - - -@patch.object(KnowledgeRulesProcessor, 'process_torrent_title', new=MagicMock(return_value=1)) -def test_process_batch_within_the_boundary(tag_rules_processor: KnowledgeRulesProcessor): - # test inner logic of `process_batch` in case this batch located within the boundary - returned_batch_size = TEST_BATCH_SIZE // 2 # let's return a half of requested items - - def select(_): - return [SimpleNamespace(infohash=i, title=i) for i in range(returned_batch_size)] - - tag_rules_processor.mds.TorrentMetadata.select = select - tag_rules_processor.mds.get_value = lambda *_, **__: 0 # let's start from 0 for LAST_PROCESSED_TORRENT_ID - - # let's specify `max_rowid` in such a way that it is far more than end of the current batch - tag_rules_processor.mds.get_max_rowid = lambda: TEST_BATCH_SIZE * 10 - - # assert that actually returned count of processed items is equal to `returned_batch_size` - assert tag_rules_processor.process_batch() == returned_batch_size - - # assert that actually stored last_processed_torrent_id is equal to `TEST_BATCH_SIZE` - tag_rules_processor.mds.set_value.assert_called_with(LAST_PROCESSED_TORRENT_ID, str(TEST_BATCH_SIZE)) - - -@patch.object(KnowledgeRulesProcessor, 'process_torrent_title', new=MagicMock(return_value=1)) -def test_process_batch_beyond_the_boundary(tag_rules_processor: KnowledgeRulesProcessor): - # test inner logic of `process_batch` in case this batch located on a border - returned_batch_size = TEST_BATCH_SIZE // 2 # let's return a half of requested items - - # let's specify `max_rowid` in such a way that it is less than end of the current batch - max_rowid = returned_batch_size // 2 - - def select(_): - return [SimpleNamespace(infohash=i, title=i) for i in range(returned_batch_size)] - - tag_rules_processor.mds.get_value = lambda *_, **__: 0 # let's start from 0 for LAST_PROCESSED_TORRENT_ID - tag_rules_processor.mds.TorrentMetadata.select = select - - tag_rules_processor.mds.get_max_rowid = lambda: max_rowid - - # assert that actually returned count of processed items is equal to `max_rowid` - assert tag_rules_processor.process_batch() == returned_batch_size - tag_rules_processor.mds.set_value.assert_called_with(LAST_PROCESSED_TORRENT_ID, str(max_rowid)) diff --git a/src/tribler/core/components/libtorrent/download_manager/download_config.py b/src/tribler/core/components/libtorrent/download_manager/download_config.py index 8e1f931c77c..d66c53926b0 100644 --- a/src/tribler/core/components/libtorrent/download_manager/download_config.py +++ b/src/tribler/core/components/libtorrent/download_manager/download_config.py @@ -4,11 +4,10 @@ from configobj import ConfigObj from validate import Validator -from tribler.core.components.libtorrent.settings import DownloadDefaultsSettings +from tribler.core.components.libtorrent.settings import DownloadDefaultsSettings, get_default_download_dir from tribler.core.components.libtorrent.utils.libtorrent_helper import libtorrent as lt from tribler.core.exceptions import InvalidConfigException from tribler.core.utilities.install_dir import get_lib_path -from tribler.core.utilities.osutils import get_home_dir from tribler.core.utilities.path_util import Path from tribler.core.utilities.utilities import bdecode_compat @@ -61,12 +60,7 @@ def from_defaults(settings: DownloadDefaultsSettings, state_dir=None): config.set_hops(settings.number_hops) config.set_safe_seeding(settings.safeseeding_enabled) - - destination_directory = settings.saveas - if destination_directory is None: - destination_directory = get_default_dest_dir() - - config.set_dest_dir(destination_directory) + config.set_dest_dir(settings.saveas) return config @@ -91,7 +85,7 @@ def get_dest_dir(self): """ dest_dir = self.config['download_defaults']['saveas'] if not dest_dir: - dest_dir = get_default_dest_dir() + dest_dir = get_default_download_dir() self.set_dest_dir(dest_dir) # This is required to support relative paths @@ -176,19 +170,3 @@ def set_engineresumedata(self, engineresumedata: Dict): def get_engineresumedata(self) -> Optional[Dict]: return _to_dict(self.config['state']['engineresumedata']) - - -def get_default_dest_dir(): - """ - Returns the default dir to save content to. - """ - tribler_downloads = Path("TriblerDownloads") - if tribler_downloads.is_dir(): - return tribler_downloads.resolve() - - home = get_home_dir() - downloads = home / "Downloads" - if downloads.is_dir(): - return (downloads / tribler_downloads).resolve() - - return (home / tribler_downloads).resolve() diff --git a/src/tribler/core/components/libtorrent/restapi/downloads_endpoint.py b/src/tribler/core/components/libtorrent/restapi/downloads_endpoint.py index 862c8c7c9ce..2fb61f35440 100644 --- a/src/tribler/core/components/libtorrent/restapi/downloads_endpoint.py +++ b/src/tribler/core/components/libtorrent/restapi/downloads_endpoint.py @@ -517,8 +517,7 @@ async def update_download(self, request): return RESTResponse({"error": "index out of range"}, status=HTTP_BAD_REQUEST) download.set_selected_files(selected_files_list) - if parameters.get('state'): - state = parameters['state'] + if state := parameters.get('state'): if state == "resume": download.resume() elif state == "stop": diff --git a/src/tribler/core/components/libtorrent/settings.py b/src/tribler/core/components/libtorrent/settings.py index ba86d929782..9dad6a1337a 100644 --- a/src/tribler/core/components/libtorrent/settings.py +++ b/src/tribler/core/components/libtorrent/settings.py @@ -5,6 +5,10 @@ from tribler.core.config.tribler_config_section import TriblerConfigSection from tribler.core.utilities.network_utils import NetworkUtils +from tribler.core.utilities.osutils import get_home_dir +from tribler.core.utilities.path_util import Path + +TRIBLER_DOWNLOADS_DEFAULT = "TriblerDownloads" # pylint: disable=no-self-argument @@ -47,11 +51,31 @@ class SeedingMode(str, Enum): time = 'time' +def get_default_download_dir(home: Optional[Path] = None, tribler_downloads_name=TRIBLER_DOWNLOADS_DEFAULT) -> Path: + """ + Returns the default dir to save content to. + Could be one of: + - TriblerDownloads + - $HOME/Downloads/TriblerDownloads + - $HOME/TriblerDownloads + """ + path = Path(tribler_downloads_name) + if path.is_dir(): + return path.resolve() + + home = home or get_home_dir() + downloads = home / "Downloads" + if downloads.is_dir(): + return downloads.resolve() / tribler_downloads_name + + return home.resolve() / tribler_downloads_name + + class DownloadDefaultsSettings(TriblerConfigSection): anonymity_enabled: bool = True number_hops: int = 1 safeseeding_enabled: bool = True - saveas: Optional[str] = None + saveas: str = str(get_default_download_dir()) seeding_mode: SeedingMode = SeedingMode.forever seeding_ratio: float = 2.0 seeding_time: float = 60 diff --git a/src/tribler/core/components/libtorrent/tests/test_download.py b/src/tribler/core/components/libtorrent/tests/test_download.py index b79adedac6c..5e48413dd6d 100644 --- a/src/tribler/core/components/libtorrent/tests/test_download.py +++ b/src/tribler/core/components/libtorrent/tests/test_download.py @@ -9,6 +9,7 @@ from tribler.core.components.libtorrent.download_manager.download import Download from tribler.core.components.libtorrent.download_manager.download_config import DownloadConfig +from tribler.core.components.libtorrent.torrentdef import TorrentDefNoMetainfo from tribler.core.components.libtorrent.utils.torrent_utils import get_info_from_handle from tribler.core.components.reporter.exception_handler import NoCrashException from tribler.core.exceptions import SaveResumeDataError @@ -409,6 +410,21 @@ def test_on_state_changed(mock_handle, test_download): test_download.apply_ip_filter.assert_called_with(False) +async def test_apply_ip_filter(test_download, mock_handle): # pylint: disable=unused-argument + test_download.handle.status = lambda: Mock(error=None) + test_download.tdef.get_infohash = lambda: b'a' * 20 + test_download.config.set_hops(1) + + assert not isinstance(test_download.tdef, TorrentDefNoMetainfo) + await test_download.apply_ip_filter(True) + test_download.handle.apply_ip_filter.assert_called_with(True) + + test_download.tdef = TorrentDefNoMetainfo(b'a' * 20, 'metainfo request') + test_download.handle.reset_mock() + test_download.apply_ip_filter(False) + test_download.handle.apply_ip_filter.assert_not_called() + + async def test_checkpoint_timeout(test_download): """ Testing whether making a checkpoint times out when we receive no alert from libtorrent diff --git a/src/tribler/core/components/libtorrent/tests/test_download_config.py b/src/tribler/core/components/libtorrent/tests/test_download_config.py index f98bf78fa9c..34458ea6cb5 100644 --- a/src/tribler/core/components/libtorrent/tests/test_download_config.py +++ b/src/tribler/core/components/libtorrent/tests/test_download_config.py @@ -3,8 +3,7 @@ import pytest from configobj import ConfigObjError -from tribler.core.components.libtorrent.download_manager.download_config import DownloadConfig, _from_dict, _to_dict, \ - get_default_dest_dir +from tribler.core.components.libtorrent.download_manager.download_config import DownloadConfig, _from_dict, _to_dict from tribler.core.tests.tools.common import TESTS_DATA_DIR CONFIG_FILES_DIR = TESTS_DATA_DIR / "config_files" @@ -52,10 +51,6 @@ def test_download_load_corrupt(download_config): download_config.load(CONFIG_FILES_DIR / "corrupt_download_config.conf") -def test_get_default_dest_dir(): - assert isinstance(get_default_dest_dir(), Path) - - def test_default_download_config_load(tmpdir): with open(tmpdir / "dlconfig.conf", 'wb') as conf_file: conf_file.write(b"[Tribler]\nabc=def") diff --git a/src/tribler/core/components/libtorrent/tests/test_settings.py b/src/tribler/core/components/libtorrent/tests/test_settings.py new file mode 100644 index 00000000000..075acbeec6f --- /dev/null +++ b/src/tribler/core/components/libtorrent/tests/test_settings.py @@ -0,0 +1,38 @@ +from tribler.core.components.libtorrent.settings import TRIBLER_DOWNLOADS_DEFAULT, get_default_download_dir +from tribler.core.utilities.path_util import Path + + +def test_get_default_download_dir_exists(tmp_path, monkeypatch): + # Test the case when the default download dir exists. Then it should be returned as is. + # Historically, the default download dir was 'TriblerDownloads' + monkeypatch.chdir(tmp_path) + + downloads = Path(TRIBLER_DOWNLOADS_DEFAULT) + downloads.mkdir() + + actual = get_default_download_dir(home=Path("home")) + assert actual == downloads.resolve() + + +def test_get_default_home_download_dir_exists(tmp_path, monkeypatch): + # Test the case when the `$HOME/Downloads` dir exists. Then it should return default dir + # as `$HOME/Downloads/TriblerDownloads` + monkeypatch.chdir(tmp_path) + + home = Path("home") + downloads = home / "Downloads" + downloads.mkdir(parents=True) + + download_dir = get_default_download_dir(home) + assert download_dir == (downloads / TRIBLER_DOWNLOADS_DEFAULT).resolve() + + +def test_get_default_home_nothing_exists(tmp_path, monkeypatch): + # Test the case when neither `$HOME/Downloads` nor `TriblerDownloads` dir exists. + # Then it should return default dir as `$HOME/TriblerDownloads` + monkeypatch.chdir(tmp_path) + + home = Path("home") + + download_dir = get_default_download_dir(home) + assert download_dir == (home / TRIBLER_DOWNLOADS_DEFAULT).resolve() diff --git a/src/tribler/core/components/libtorrent/torrentdef.py b/src/tribler/core/components/libtorrent/torrentdef.py index d4b68a3c3e5..34ccbb2371b 100644 --- a/src/tribler/core/components/libtorrent/torrentdef.py +++ b/src/tribler/core/components/libtorrent/torrentdef.py @@ -7,7 +7,7 @@ import aiohttp from tribler.core.components.libtorrent.utils.libtorrent_helper import libtorrent as lt -from tribler.core.components.libtorrent.utils.torrent_utils import create_torrent_file +from tribler.core.components.libtorrent.utils import torrent_utils from tribler.core.utilities import maketorrent, path_util from tribler.core.utilities.path_util import Path from tribler.core.utilities.simpledefs import INFOHASH_LENGTH @@ -317,7 +317,8 @@ def save(self, torrent_filepath=None): Generate the metainfo and save the torrent file. :param torrent_filepath: An optional absolute path to where to save the generated .torrent file. """ - torrent_dict = create_torrent_file(self.files_list, self.torrent_parameters, torrent_filepath=torrent_filepath) + torrent_dict = torrent_utils.create_torrent_file(self.files_list, self.torrent_parameters, + torrent_filepath=torrent_filepath) self.metainfo = bdecode_compat(torrent_dict['metainfo']) self.copy_metainfo_to_torrent_parameters() self.infohash = torrent_dict['infohash'] diff --git a/src/tribler/core/components/libtorrent/utils/torrent_utils.py b/src/tribler/core/components/libtorrent/utils/torrent_utils.py index ec9634750e2..b969709c0f8 100644 --- a/src/tribler/core/components/libtorrent/utils/torrent_utils.py +++ b/src/tribler/core/components/libtorrent/utils/torrent_utils.py @@ -4,6 +4,7 @@ from hashlib import sha1 from typing import Any, Dict, Iterable, List, Optional +from tribler.core.components.libtorrent import torrentdef from tribler.core.components.libtorrent.utils.libtorrent_helper import libtorrent as lt from tribler.core.utilities.path_util import Path @@ -33,14 +34,18 @@ def require_handle(func): Invoke the function once the handle is available. Returns a future that will fire once the function has completed. Author(s): Egbert Bouman """ - def invoke_func(*args, **kwargs): result_future = Future() def done_cb(fut): with suppress(CancelledError): handle = fut.result() - if not fut.cancelled() and not result_future.done() and handle == download.handle and handle.is_valid(): + + if not fut.cancelled() \ + and not result_future.done() \ + and handle == download.handle \ + and handle.is_valid() \ + and not isinstance(download.tdef, torrentdef.TorrentDefNoMetainfo): result_future.set_result(func(*args, **kwargs)) download = args[0] diff --git a/src/tribler/core/components/metadata_store/db/orm_bindings/torrent_metadata.py b/src/tribler/core/components/metadata_store/db/orm_bindings/torrent_metadata.py index f30bcd5a393..fe76814cd4f 100644 --- a/src/tribler/core/components/metadata_store/db/orm_bindings/torrent_metadata.py +++ b/src/tribler/core/components/metadata_store/db/orm_bindings/torrent_metadata.py @@ -143,7 +143,7 @@ def to_simple_dict(self): "num_seeders": self.health.seeders, "num_leechers": self.health.leechers, "last_tracker_check": self.health.last_check, - "updated": int((self.torrent_date - epoch).total_seconds()), + "created": int((self.torrent_date - epoch).total_seconds()), "tag_processor_version": self.tag_processor_version, } ) diff --git a/src/tribler/core/components/metadata_store/metadata_store_component.py b/src/tribler/core/components/metadata_store/metadata_store_component.py index 477d682480b..4c16e328c6e 100644 --- a/src/tribler/core/components/metadata_store/metadata_store_component.py +++ b/src/tribler/core/components/metadata_store/metadata_store_component.py @@ -1,7 +1,7 @@ from tribler.core import notifications from tribler.core.components.component import Component from tribler.core.components.key.key_component import KeyComponent -from tribler.core.components.knowledge.rules.tag_rules_processor import KnowledgeRulesProcessor +from tribler.core.components.knowledge.rules.knowledge_rules_processor import KnowledgeRulesProcessor from tribler.core.components.metadata_store.db.store import MetadataStore from tribler.core.utilities.simpledefs import STATEDIR_DB_DIR diff --git a/src/tribler/core/components/metadata_store/restapi/metadata_endpoint_base.py b/src/tribler/core/components/metadata_store/restapi/metadata_endpoint_base.py index 446a7e20a0f..77f0b6e18e3 100644 --- a/src/tribler/core/components/metadata_store/restapi/metadata_endpoint_base.py +++ b/src/tribler/core/components/metadata_store/restapi/metadata_endpoint_base.py @@ -4,7 +4,7 @@ from pony.orm import db_session from tribler.core.components.knowledge.db.knowledge_db import KnowledgeDatabase, ResourceType -from tribler.core.components.knowledge.rules.tag_rules_processor import KnowledgeRulesProcessor +from tribler.core.components.knowledge.rules.knowledge_rules_processor import KnowledgeRulesProcessor from tribler.core.components.metadata_store.category_filter.family_filter import default_xxx_filter from tribler.core.components.metadata_store.db.serialization import CHANNEL_TORRENT, COLLECTION_NODE, REGULAR_TORRENT from tribler.core.components.metadata_store.db.store import MetadataStore @@ -19,7 +19,7 @@ 'size': "size", 'infohash': "infohash", 'date': "torrent_date", - 'updated': "torrent_date", + 'created': "torrent_date", 'status': 'status', 'torrents': 'num_entries', 'votes': 'votes', diff --git a/src/tribler/core/components/reporter/exception_handler.py b/src/tribler/core/components/reporter/exception_handler.py index 1c6dcf91f28..e65824f8e5c 100644 --- a/src/tribler/core/components/reporter/exception_handler.py +++ b/src/tribler/core/components/reporter/exception_handler.py @@ -7,7 +7,7 @@ from traceback import print_exception from typing import Callable, Optional -from tribler.core.components.component import ComponentStartupException +from tribler.core.components.exceptions import ComponentStartupException from tribler.core.components.reporter.reported_error import ReportedError from tribler.core.sentry_reporter.sentry_reporter import SentryReporter from tribler.core.utilities.process_manager import get_global_process_manager @@ -110,7 +110,10 @@ def unhandled_error_observer(self, _, context): long_text=long_text, context=str(context), event=self.sentry_reporter.event_from_exception(exception) or {}, - should_stop=should_stop + should_stop=should_stop, + # `additional_information` should be converted to dict + # see: https://github.com/python/cpython/pull/32056 + additional_information=dict(self.sentry_reporter.additional_information) ) self.logger.error(f"Unhandled exception occurred! {reported_error}\n{reported_error.long_text}") if process_manager: diff --git a/src/tribler/core/components/reporter/reported_error.py b/src/tribler/core/components/reporter/reported_error.py index f508a8f8f40..3968f8c0aff 100644 --- a/src/tribler/core/components/reporter/reported_error.py +++ b/src/tribler/core/components/reporter/reported_error.py @@ -7,6 +7,7 @@ class ReportedError: type: str text: str event: dict = field(repr=False) + additional_information: dict = field(default_factory=lambda: {}, repr=False) long_text: str = field(default='', repr=False) context: str = field(default='', repr=False) diff --git a/src/tribler/core/components/reporter/tests/test_exception_handler.py b/src/tribler/core/components/reporter/tests/test_exception_handler.py index 1835d0040b9..0f936535520 100644 --- a/src/tribler/core/components/reporter/tests/test_exception_handler.py +++ b/src/tribler/core/components/reporter/tests/test_exception_handler.py @@ -1,3 +1,4 @@ +from dataclasses import asdict from socket import gaierror from unittest.mock import MagicMock, Mock, patch @@ -91,6 +92,7 @@ def test_unhandled_error_observer_only_message(exception_handler): assert not reported_error.event assert reported_error.context == '{}' assert reported_error.should_stop + assert reported_error.additional_information == {} def test_unhandled_error_observer_store_unreported_error(exception_handler): @@ -106,6 +108,18 @@ def test_unhandled_error_observer_false_should_stop(exception_handler): assert exception_handler.unreported_error.should_stop is False +def test_unhandled_error_observer_additional_information(exception_handler): + # test that additional information is passed to the `report_callback` + exception_handler.report_callback = MagicMock() + exception_handler.sentry_reporter.additional_information['a'] = 1 + exception_handler.unhandled_error_observer(None, {}) + + reported_error = exception_handler.report_callback.call_args_list[-1][0][0] + + assert reported_error.additional_information == {'a': 1} + assert asdict(reported_error) # default dict could produce TypeError: first argument must be callable or None + + def test_unhandled_error_observer_ignored(exception_handler): # test that exception from list IGNORED_ERRORS_BY_CODE never sends to the GUI context = {'exception': OSError(113, '')} diff --git a/src/tribler/core/components/restapi/rest/aiohttp_patch.py b/src/tribler/core/components/restapi/rest/aiohttp_patch.py new file mode 100644 index 00000000000..e63709059f7 --- /dev/null +++ b/src/tribler/core/components/restapi/rest/aiohttp_patch.py @@ -0,0 +1,64 @@ +from asyncio import CancelledError, Task +from threading import Lock +from typing import Type + +from aiohttp import StreamReader +from aiohttp.abc import AbstractStreamWriter +from aiohttp.http_parser import RawRequestMessage +from aiohttp.web_app import Application +from aiohttp.web_protocol import RequestHandler +from aiohttp.web_request import Request + + +transport_is_none_counter = 0 +counter_lock = Lock() + + +def increment_transport_is_none_counter(): + global transport_is_none_counter # pylint: disable=global-statement + with counter_lock: + transport_is_none_counter += 1 + + +def get_transport_is_none_counter() -> int: + with counter_lock: + return transport_is_none_counter + + +def patch_make_request(cls: Type[Application]) -> bool: + # This function monkey-patches a bug in the aiohttp library, see #7344 and aio-libs/aiohttp#7258. + # The essence of the bug is that the `aiohttp.web_protocol.RequestHandler.start()` coroutine erroneously continues + # to run after a connection was closed from the client side, the transport was closed, and None was assigned + # to `self.transport`. Then the `start` coroutine calls `self._make_request(...)`, which in turn creates + # an `aiohttp.web_request.Request` instance, and it has `assert transport is not None` in its constructor. + # + # To fix the bug, the monkey-patched `_make_request` method first checks if the `self.transport is None`, and if so, + # it raises the `CancelledError` exception to cancel the erroneously working `RequestHandler.start` coroutine. + # + # Additionally, the new `_make_request` method increases the counter of cases when the transport was None + # to allow gathering some statistics on how often this situation happens + + original_make_request = cls._make_request # pylint: disable=protected-access + if getattr(original_make_request, 'patched', False): + return False + + def new_make_request( + self, + message: RawRequestMessage, + payload: StreamReader, + protocol: RequestHandler, + writer: AbstractStreamWriter, + task: Task, + _cls: Type[Request] = Request, + ) -> Request: + if protocol.transport is None: + increment_transport_is_none_counter() + raise CancelledError + + return original_make_request( + self, message=message, payload=payload, protocol=protocol, writer=writer, task=task, _cls=_cls + ) + + new_make_request.patched = True + cls._make_request = new_make_request # pylint: disable=protected-access + return True diff --git a/src/tribler/core/components/restapi/rest/events_endpoint.py b/src/tribler/core/components/restapi/rest/events_endpoint.py index 48513b7a3bc..431435933c7 100644 --- a/src/tribler/core/components/restapi/rest/events_endpoint.py +++ b/src/tribler/core/components/restapi/rest/events_endpoint.py @@ -1,15 +1,15 @@ import asyncio import json import time -from asyncio import CancelledError +from asyncio import CancelledError, Queue from dataclasses import asdict -from typing import List, Optional +from typing import Any, Dict, List, Optional +import marshmallow.fields from aiohttp import web from aiohttp_apispec import docs from ipv8.REST.schema import schema from ipv8.messaging.anonymization.tunnel import Circuit -from marshmallow.fields import Dict, String from tribler.core import notifications from tribler.core.components.reporter.reported_error import ReportedError @@ -37,6 +37,8 @@ def passthrough(x): notifications.report_config_error, ] +MessageDict = Dict[str, Any] + @froze_it class EventsEndpoint(RESTEndpoint): @@ -49,16 +51,17 @@ class EventsEndpoint(RESTEndpoint): def __init__(self, notifier: Notifier, public_key: str = None): super().__init__() self.events_responses: List[RESTStreamResponse] = [] - self.undelivered_error: Optional[dict] = None + self.undelivered_error: Optional[MessageDict] = None self.public_key = public_key self.notifier = notifier + self.queue = Queue() + self.async_group.add_task(self.process_queue()) notifier.add_observer(notifications.circuit_removed, self.on_circuit_removed) notifier.add_generic_observer(self.on_notification) def on_notification(self, topic, *args, **kwargs): if topic in topics_to_send_to_gui: - data = {"topic": topic.__name__, "args": args, "kwargs": kwargs} - self.async_group.add_task(self.write_data(data)) + self.send_event({"topic": topic.__name__, "args": args, "kwargs": kwargs}) def on_circuit_removed(self, circuit: Circuit, additional_info: str): # The original notification contains non-JSON-serializable argument, so we send another one to GUI @@ -75,19 +78,19 @@ async def shutdown(self): def setup_routes(self): self.app.add_routes([web.get('', self.get_events)]) - def initial_message(self) -> dict: + def initial_message(self) -> MessageDict: return { "topic": notifications.events_start.__name__, "kwargs": {"public_key": self.public_key, "version": version_id} } - def error_message(self, reported_error: ReportedError) -> dict: + def error_message(self, reported_error: ReportedError) -> MessageDict: return { "topic": notifications.tribler_exception.__name__, "kwargs": {"error": asdict(reported_error)}, } - def encode_message(self, message: dict) -> bytes: + def encode_message(self, message: MessageDict) -> bytes: try: message = json.dumps(message) except UnicodeDecodeError: @@ -96,32 +99,71 @@ def encode_message(self, message: dict) -> bytes: message = json.dumps(fix_unicode_dict(message)) return b'data: ' + message.encode('utf-8') + b'\n\n' - def has_connection_to_gui(self): + def has_connection_to_gui(self) -> bool: return bool(self.events_responses) - async def write_data(self, message): + def should_skip_message(self, message: MessageDict) -> bool: """ - Write data over the event socket if it's open. + Returns True if EventsEndpoint should skip sending message to GUI due to a shutdown or no connection to GUI. + Issue an appropriate warning if the message cannot be sent. """ + if self._shutdown: + self._logger.warning(f"Shutdown is in progress, skip message: {message}") + return True + if not self.has_connection_to_gui(): - return + self._logger.warning(f"No connections to GUI, skip message: {message}") + return True + + return False + + def send_event(self, message: MessageDict): + """ + Put event message to a queue to be sent to GUI + """ + if not self.should_skip_message(message): + self.queue.put_nowait(message) + + async def process_queue(self): + while True: + message = await self.queue.get() + if not self.should_skip_message(message): + await self._write_data(message) + + async def _write_data(self, message: MessageDict): + """ + Write data over the event socket if it's open. + """ + self._logger.debug(f'Write message: {message}') try: - self._logger.debug(f'Write message: {message}') message_bytes = self.encode_message(message) except Exception as e: # pylint: disable=broad-except # if a notification arguments contains non-JSON-serializable data, the exception should be logged self._logger.exception(e) return + processed_responses = [] for response in self.events_responses: - await response.write(message_bytes) + try: + await response.write(message_bytes) + # by creating the list with processed responses we want to remove responses with + # ConnectionResetError from `self.events_responses`: + processed_responses.append(response) + except ConnectionResetError as e: + # The connection was closed by GUI + self._logger.warning(e, exc_info=True) + self.events_responses = processed_responses # An exception has occurred in Tribler. The event includes a readable # string of the error and a Sentry event. def on_tribler_exception(self, reported_error: ReportedError): + if self._shutdown: + self._logger.warning('Ignoring tribler exception, because the endpoint is shutting down.') + return + message = self.error_message(reported_error) if self.has_connection_to_gui(): - self.async_group.add_task(self.write_data(message)) + self.send_event(message) elif not self.undelivered_error: # If there are several undelivered errors, we store the first error as more important and skip other self.undelivered_error = message @@ -131,8 +173,7 @@ def on_tribler_exception(self, reported_error: ReportedError): summary="Open an EventStream for receiving Tribler events.", responses={ 200: { - "schema": schema(EventsResponse={'type': String, - 'event': Dict}) + "schema": schema(EventsResponse={'type': marshmallow.fields.String, 'event': marshmallow.fields.Dict}) } } ) diff --git a/src/tribler/core/components/restapi/rest/rest_endpoint.py b/src/tribler/core/components/restapi/rest/rest_endpoint.py index 7cdd653e2bd..fc0be1fcd08 100644 --- a/src/tribler/core/components/restapi/rest/rest_endpoint.py +++ b/src/tribler/core/components/restapi/rest/rest_endpoint.py @@ -6,12 +6,17 @@ from aiohttp import web +from tribler.core.components.restapi.rest.aiohttp_patch import patch_make_request from tribler.core.utilities.async_group.async_group import AsyncGroup if TYPE_CHECKING: from tribler.core.components.restapi.rest.events_endpoint import EventsEndpoint from ipv8.REST.root_endpoint import RootEndpoint as IPV8RootEndpoint + +patch_make_request(web.Application) + + HTTP_BAD_REQUEST = 400 HTTP_UNAUTHORIZED = 401 HTTP_NOT_FOUND = 404 diff --git a/src/tribler/core/components/restapi/rest/tests/test_events_endpoint.py b/src/tribler/core/components/restapi/rest/tests/test_events_endpoint.py index 8086b93b220..ec1cbdb6517 100644 --- a/src/tribler/core/components/restapi/rest/tests/test_events_endpoint.py +++ b/src/tribler/core/components/restapi/rest/tests/test_events_endpoint.py @@ -1,9 +1,12 @@ +import asyncio import json +import logging from asyncio import CancelledError, Event, create_task from contextlib import suppress -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest +from _pytest.logging import LogCaptureFixture from aiohttp import ClientSession from tribler.core import notifications @@ -19,7 +22,7 @@ messages_to_wait_for = set() -# pylint: disable=redefined-outer-name +# pylint: disable=redefined-outer-name, protected-access @pytest.fixture(name='api_port') def fixture_api_port(free_port): @@ -31,8 +34,8 @@ def fixture_notifier(event_loop): return Notifier(loop=event_loop) -@pytest.fixture -async def endpoint(notifier): +@pytest.fixture(name='events_endpoint') +async def events_endpoint_fixture(notifier): events_endpoint = EventsEndpoint(notifier) yield events_endpoint @@ -45,12 +48,12 @@ def fixture_reported_error(): @pytest.fixture(name="rest_manager") -async def fixture_rest_manager(api_port, tmp_path, endpoint): +async def fixture_rest_manager(api_port, tmp_path, events_endpoint): config = TriblerConfig() config.api.http_enabled = True config.api.http_port = api_port root_endpoint = RootEndpoint(middlewares=[ApiKeyMiddleware(config.api.key), error_middleware]) - root_endpoint.add_endpoint('/events', endpoint) + root_endpoint.add_endpoint('/events', events_endpoint) rest_manager = RESTManager(config=config.api, root_endpoint=root_endpoint, state_dir=tmp_path) await rest_manager.start() @@ -114,52 +117,98 @@ async def test_events(rest_manager, notifier: Notifier): await event_socket_task -@patch.object(EventsEndpoint, 'write_data') +@patch.object(EventsEndpoint, '_write_data') @patch.object(EventsEndpoint, 'has_connection_to_gui', new=MagicMock(return_value=True)) -async def test_on_tribler_exception_has_connection_to_gui(mocked_write_data, endpoint, reported_error): +async def test_on_tribler_exception_has_connection_to_gui(mocked_write_data, events_endpoint, reported_error): # test that in case of established connection to GUI, `on_tribler_exception` will work - # as a normal endpoint function, that is call `write_data` - endpoint.on_tribler_exception(reported_error) + # as a normal events_endpoint function, that is call `_write_data` + events_endpoint.on_tribler_exception(reported_error) + await asyncio.sleep(0.01) mocked_write_data.assert_called_once() - assert not endpoint.undelivered_error + assert not events_endpoint.undelivered_error -@patch.object(EventsEndpoint, 'write_data') +@patch.object(EventsEndpoint, '_write_data') @patch.object(EventsEndpoint, 'has_connection_to_gui', new=MagicMock(return_value=False)) -async def test_on_tribler_exception_no_connection_to_gui(mocked_write_data, endpoint, reported_error): +async def test_on_tribler_exception_no_connection_to_gui(mocked_write_data, events_endpoint, reported_error): # test that if no connection to GUI, then `on_tribler_exception` will store # reported_error in `self.undelivered_error` - endpoint.on_tribler_exception(reported_error) + events_endpoint.on_tribler_exception(reported_error) mocked_write_data.assert_not_called() - assert endpoint.undelivered_error == endpoint.error_message(reported_error) + assert events_endpoint.undelivered_error == events_endpoint.error_message(reported_error) -@patch.object(EventsEndpoint, 'write_data', new=MagicMock()) +@patch.object(EventsEndpoint, '_write_data', new=MagicMock()) @patch.object(EventsEndpoint, 'has_connection_to_gui', new=MagicMock(return_value=False)) -async def test_on_tribler_exception_stores_only_first_error(endpoint, reported_error): +async def test_on_tribler_exception_stores_only_first_error(events_endpoint, reported_error): # test that if no connection to GUI, then `on_tribler_exception` will store # only the very first `reported_error` first_reported_error = reported_error - endpoint.on_tribler_exception(first_reported_error) + events_endpoint.on_tribler_exception(first_reported_error) second_reported_error = ReportedError('second_type', 'second_text', {}) - endpoint.on_tribler_exception(second_reported_error) + events_endpoint.on_tribler_exception(second_reported_error) - assert endpoint.undelivered_error == endpoint.error_message(first_reported_error) + assert events_endpoint.undelivered_error == events_endpoint.error_message(first_reported_error) @patch('asyncio.sleep', new=AsyncMock(side_effect=CancelledError)) @patch.object(RESTStreamResponse, 'prepare', new=AsyncMock()) @patch.object(RESTStreamResponse, 'write', new_callable=AsyncMock) @patch.object(EventsEndpoint, 'encode_message') -async def test_get_events_has_undelivered_error(mocked_encode_message, mocked_write, endpoint): +async def test_get_events_has_undelivered_error(mocked_encode_message, mocked_write, events_endpoint): # test that in case `self.undelivered_error` is not None, then it will be sent - endpoint.undelivered_error = {'undelivered': 'error'} + events_endpoint.undelivered_error = {'undelivered': 'error'} - await endpoint.get_events(MagicMock()) + await events_endpoint.get_events(MagicMock()) mocked_write.assert_called() mocked_encode_message.assert_called_with({'undelivered': 'error'}) - assert not endpoint.undelivered_error + assert not events_endpoint.undelivered_error + + +async def test_on_tribler_exception_shutdown(): + # test that `on_tribler_exception` will not send any error message if events_endpoint is shutting down + events_endpoint = EventsEndpoint(Mock()) + events_endpoint.error_message = Mock() + + await events_endpoint.shutdown() + + events_endpoint.on_tribler_exception(ReportedError('', '', {})) + + assert not events_endpoint.error_message.called + + +async def test_should_skip_message(events_endpoint): + assert not events_endpoint._shutdown and not events_endpoint.events_responses # pylint: disable=protected-access + message = Mock() + + # Initially the events endpoint is not in shutdown state, but it does not have any connection, + # so it should skip message as nobody is listen to it + assert events_endpoint.should_skip_message(message) + + with patch.object(events_endpoint, 'events_responses', new=[Mock()]): + # We add a mocked connection to GUI, and now the events endpoint should not skip a message + assert not events_endpoint.should_skip_message(message) + + with patch.object(events_endpoint, '_shutdown', new=True): + # But, if it is in shutdown state, it should always skip a message + assert events_endpoint.should_skip_message(message) + + +async def test_write_data(events_endpoint: EventsEndpoint, caplog: LogCaptureFixture): + # Test that write_data will call write methods for all responses, even if some of them could raise + # a ConnectionResetError exception. + + bad_response = AsyncMock(write=AsyncMock(side_effect=ConnectionResetError)) + good_response = AsyncMock() + + events_endpoint.events_responses = [bad_response, good_response] + await events_endpoint._write_data({'any': 'data'}) + + assert bad_response.write.called + assert good_response.write.called + last_log_record = caplog.records[-1] + assert last_log_record.levelno == logging.WARNING diff --git a/src/tribler/core/components/restapi/rest/tests/test_rest_manager.py b/src/tribler/core/components/restapi/rest/tests/test_rest_manager.py index 6c06b9d8917..a82c4867ad5 100644 --- a/src/tribler/core/components/restapi/rest/tests/test_rest_manager.py +++ b/src/tribler/core/components/restapi/rest/tests/test_rest_manager.py @@ -1,8 +1,12 @@ import shutil -from unittest.mock import patch +from asyncio import CancelledError +from unittest.mock import Mock, patch import pytest +from aiohttp import ServerDisconnectedError +from aiohttp.web_protocol import RequestHandler +from tribler.core.components.restapi.rest.aiohttp_patch import get_transport_is_none_counter, patch_make_request from tribler.core.components.restapi.rest.base_api_test import do_real_request from tribler.core.components.restapi.rest.rest_endpoint import HTTP_UNAUTHORIZED from tribler.core.components.restapi.rest.rest_manager import ApiKeyMiddleware, RESTManager, error_middleware @@ -12,18 +16,22 @@ from tribler.core.tests.tools.common import TESTS_DIR -@pytest.fixture() -def tribler_config(): +# pylint: disable=unused-argument # because the `rest_manager` argument is syntactically unused in tests +# pylint: disable=protected-access + + +@pytest.fixture(name='tribler_config') +def tribler_config_fixture(): return TriblerConfig() -@pytest.fixture() -def api_port(free_port): +@pytest.fixture(name='api_port') +def api_port_fixture(free_port): return free_port -@pytest.fixture -async def rest_manager(request, tribler_config, api_port, tmp_path): +@pytest.fixture(name='rest_manager') +async def rest_manager_fixture(request, tribler_config, api_port, tmp_path): config = tribler_config api_key_marker = request.node.get_closest_marker("api_key") if api_key_marker is not None: @@ -91,3 +99,55 @@ async def test_unhandled_exception(rest_manager, api_port): assert response_dict assert not response_dict['error']['handled'] assert response_dict['error']['code'] == "TypeError" + + +async def test_patch_make_request(): + app = Mock() + + app._make_request.patched = None # to avoid returning a mock when the patched attribute is accessed + + # The first call returns True as the patch is successful + assert patch_make_request(app) + make_request = app._make_request + assert make_request.patched + + # The second call is unsuccessful, as the Application class is already patched + assert not patch_make_request(app) + assert app._make_request is make_request + + +async def test_aiohttp_assertion_patched(rest_manager, api_port): + # The test checks that when the request handler is forced to close transport before calling + # `self._make_request(..)`, the monkey-patched version of `_make_request` handles this situation + # and increments the number of cases when the transport was None at that stage of the request processing. + original_start = RequestHandler.start + + async def new_start(self: RequestHandler) -> None: + original_request_factory = self._request_factory + + # it should be the monkey-patched version of `_make_request` + assert getattr(original_request_factory, 'patched', False) # should be True + + def new_request_factory(*args): + self.force_close() + return original_request_factory(*args) + + # monkey-patch it the second time and add the force closing of the transport + with patch.object(self, '_request_factory', new=new_request_factory): + + # the start method should correctly raise `CancelledError` in case the transport is force closed + with pytest.raises(CancelledError): + await original_start(self) + + raise CancelledError # re-raise it to propagate the exception further + + # the main monkey-patch should handle the closed transport and increment the counter during the request handling + counter_before = get_transport_is_none_counter() + + with pytest.raises(ServerDisconnectedError): + # The server is disconnected, because the transport is closed and the start() coroutine is canceled + with patch('aiohttp.web_protocol.RequestHandler.start', new=new_start): + await do_real_request(api_port, 'settings') + + counter_after = get_transport_is_none_counter() + assert counter_before + 1 == counter_after diff --git a/src/tribler/core/components/restapi/restapi_component.py b/src/tribler/core/components/restapi/restapi_component.py index 3aadb622e9a..b5b5335de4a 100644 --- a/src/tribler/core/components/restapi/restapi_component.py +++ b/src/tribler/core/components/restapi/restapi_component.py @@ -5,7 +5,8 @@ from tribler.core.components.bandwidth_accounting.bandwidth_accounting_component import BandwidthAccountingComponent from tribler.core.components.bandwidth_accounting.restapi.bandwidth_endpoint import BandwidthEndpoint -from tribler.core.components.component import Component, NoneComponent +from tribler.core.components.component import Component +from tribler.core.components.exceptions import NoneComponent from tribler.core.components.gigachannel.gigachannel_component import GigaChannelComponent from tribler.core.components.gigachannel_manager.gigachannel_manager_component import GigachannelManagerComponent from tribler.core.components.ipv8.ipv8_component import Ipv8Component diff --git a/src/tribler/core/components/restapi/tests/test_restapi_component.py b/src/tribler/core/components/restapi/tests/test_restapi_component.py index 32f08f49984..177e22f85c5 100644 --- a/src/tribler/core/components/restapi/tests/test_restapi_component.py +++ b/src/tribler/core/components/restapi/tests/test_restapi_component.py @@ -3,7 +3,7 @@ import pytest from tribler.core.components.bandwidth_accounting.bandwidth_accounting_component import BandwidthAccountingComponent -from tribler.core.components.component import NoneComponent +from tribler.core.components.exceptions import NoneComponent from tribler.core.components.gigachannel.gigachannel_component import GigaChannelComponent from tribler.core.components.ipv8.ipv8_component import Ipv8Component from tribler.core.components.key.key_component import KeyComponent diff --git a/src/tribler/core/components/session.py b/src/tribler/core/components/session.py index f7cefce5763..2d2e3a1c5f6 100644 --- a/src/tribler/core/components/session.py +++ b/src/tribler/core/components/session.py @@ -7,9 +7,11 @@ from pathlib import Path from typing import Dict, List, Optional, Type, TypeVar -from tribler.core.components.component import Component, ComponentError, ComponentStartupException, \ - MultipleComponentsFound +from tribler.core.components.component import Component +from tribler.core.components.exceptions import ComponentError, ComponentStartupException, MultipleComponentsFound +from tribler.core.components.reporter.exception_handler import default_core_exception_handler from tribler.core.config.tribler_config import TriblerConfig +from tribler.core.sentry_reporter.sentry_reporter import SentryReporter from tribler.core.utilities.async_group.async_group import AsyncGroup from tribler.core.utilities.crypto_patcher import patch_crypto_be_discovery from tribler.core.utilities.install_dir import get_lib_path @@ -25,8 +27,8 @@ class SessionError(Exception): class Session: _startup_exception: Optional[Exception] = None - def __init__(self, config: TriblerConfig = None, components: List[Component] = (), - shutdown_event: Event = None, notifier: Notifier = None, failfast: bool = True): + def __init__(self, config: TriblerConfig = None, components: List[Component] = (), shutdown_event: Event = None, + notifier: Notifier = None, failfast: bool = True, reporter: Optional[SentryReporter] = None): # deepcode ignore unguarded~next~call: not necessary to catch StopIteration on infinite iterator self.exit_code = None self.failfast = failfast @@ -36,6 +38,7 @@ def __init__(self, config: TriblerConfig = None, components: List[Component] = ( self.notifier: Notifier = notifier or Notifier(loop=get_event_loop()) self.async_group = AsyncGroup() self.components: Dict[Type[Component], Component] = {} + self.reporter = reporter or default_core_exception_handler.sentry_reporter for component in components: self.register(component.__class__, component) diff --git a/src/tribler/core/components/tests/test_base_component.py b/src/tribler/core/components/tests/test_base_component.py index a16576329bb..49cfd7a0740 100644 --- a/src/tribler/core/components/tests/test_base_component.py +++ b/src/tribler/core/components/tests/test_base_component.py @@ -1,6 +1,7 @@ import pytest -from tribler.core.components.component import Component, MissedDependency, MultipleComponentsFound, NoneComponent +from tribler.core.components.component import Component +from tribler.core.components.exceptions import MissedDependency, MultipleComponentsFound, NoneComponent from tribler.core.components.session import Session from tribler.core.config.tribler_config import TriblerConfig diff --git a/src/tribler/core/config/tests/test_tribler_config.py b/src/tribler/core/config/tests/test_tribler_config.py index 82a47cccd1a..d8fb6bcba80 100644 --- a/src/tribler/core/config/tests/test_tribler_config.py +++ b/src/tribler/core/config/tests/test_tribler_config.py @@ -75,6 +75,11 @@ def test_load_write_nonascii(tmpdir): assert config.file == tmpdir / filename +def test_load_default_saveas(tmpdir): + config = TriblerConfig(state_dir=tmpdir) + assert config.download_defaults.saveas + + def test_copy(tmpdir): config = TriblerConfig(state_dir=tmpdir, file=tmpdir / '1.txt') config.api.http_port = 42 diff --git a/src/tribler/core/conftest.py b/src/tribler/core/conftest.py index 1af3e7f4cb7..e924ba63737 100644 --- a/src/tribler/core/conftest.py +++ b/src/tribler/core/conftest.py @@ -1,4 +1,5 @@ import asyncio +import logging import os import platform import sys @@ -32,6 +33,11 @@ sys.set_coroutine_origin_tracking_depth(10) +def pytest_configure(config): # pylint: disable=unused-argument + # Disable logging from faker for all tests + logging.getLogger('faker.factory').propagate = False + + @pytest.fixture(name="tribler_root_dir") def _tribler_root_dir(tmp_path): return Path(tmp_path) diff --git a/src/tribler/core/sentry_reporter/sentry_reporter.py b/src/tribler/core/sentry_reporter/sentry_reporter.py index 319ad9aa882..1a15670ad26 100644 --- a/src/tribler/core/sentry_reporter/sentry_reporter.py +++ b/src/tribler/core/sentry_reporter/sentry_reporter.py @@ -1,11 +1,12 @@ import logging import os import sys +from collections import defaultdict from contextlib import contextmanager from contextvars import ContextVar from enum import Enum, auto from hashlib import md5 -from typing import Dict, List, Optional +from typing import Any, Dict, List, Optional import sentry_sdk from faker import Faker @@ -15,17 +16,28 @@ from tribler.core import version from tribler.core.sentry_reporter.sentry_tools import ( delete_item, - extract_dict, get_first_item, get_last_item, get_value, parse_last_core_output, parse_os_environ, parse_stacktrace, ) -# fmt: off - +VALUE = 'value' +TYPE = 'type' +LAST_CORE_OUTPUT = 'last_core_output' +LAST_PROCESSES = 'last_processes' +PLATFORM = 'platform' +OS = 'os' +MACHINE = 'machine' +COMMENTS = 'comments' +TRIBLER = 'Tribler' +NAME = 'name' +VERSION = 'version' +BROWSER = 'browser' PLATFORM_DETAILS = 'platform.details' STACKTRACE = '_stacktrace' +STACKTRACE_EXTRA = f'{STACKTRACE}_extra' +STACKTRACE_CONTEXT = f'{STACKTRACE}_context' SYSINFO = 'sysinfo' OS_ENVIRON = 'os.environ' SYS_ARGV = 'sys.argv' @@ -38,6 +50,7 @@ VALUES = 'values' RELEASE = 'release' EXCEPTION = 'exception' +ADDITIONAL_INFORMATION = 'additional_information' class SentryStrategy(Enum): @@ -77,6 +90,7 @@ def __init__(self): # SentryReporter.get_actual_strategy() self.global_strategy = SentryStrategy.SEND_ALLOWED_WITH_CONFIRMATION self.thread_strategy = ContextVar('context_strategy', default=None) + self.additional_information = defaultdict(dict) # dict that will be added to a Sentry event self._sentry_logger_name = 'SentryReporter' self._logger = logging.getLogger(self._sentry_logger_name) @@ -147,7 +161,7 @@ def add_breadcrumb(self, message='', category='', level='info', **kwargs): return sentry_sdk.add_breadcrumb(crumb, **kwargs) def send_event(self, event: Dict = None, post_data: Dict = None, sys_info: Dict = None, - additional_tags: List[str] = None, last_core_output: Optional[str] = None, + additional_tags: Dict[str, Any] = None, last_core_output: Optional[str] = None, last_processes: List[str] = None): """Send the event to the Sentry server @@ -193,47 +207,47 @@ def send_event(self, event: Dict = None, post_data: Dict = None, sys_info: Dict # tags tags = event[TAGS] - tags['version'] = get_value(post_data, 'version') - tags['machine'] = get_value(post_data, 'machine') - tags['os'] = get_value(post_data, 'os') - tags['platform'] = get_first_item(get_value(sys_info, 'platform')) - tags[f'{PLATFORM_DETAILS}'] = get_first_item(get_value(sys_info, PLATFORM_DETAILS)) + tags[VERSION] = get_value(post_data, VERSION) + tags[MACHINE] = get_value(post_data, MACHINE) + tags[OS] = get_value(post_data, OS) + tags[PLATFORM] = get_first_item(get_value(sys_info, PLATFORM)) + tags[PLATFORM_DETAILS] = get_first_item(get_value(sys_info, PLATFORM_DETAILS)) tags.update(additional_tags) # context context = event[CONTEXTS] reporter = context[REPORTER] - version = get_value(post_data, 'version') + tribler_version = get_value(post_data, VERSION) - context['browser'] = {'version': version, 'name': 'Tribler'} + context[BROWSER] = {VERSION: tribler_version, NAME: TRIBLER} stacktrace_parts = parse_stacktrace(get_value(post_data, 'stack')) reporter[STACKTRACE] = next(stacktrace_parts, []) stacktrace_extra = next(stacktrace_parts, []) - reporter[f'{STACKTRACE}_extra'] = stacktrace_extra - reporter[f'{STACKTRACE}_context'] = next(stacktrace_parts, []) + reporter[STACKTRACE_EXTRA] = stacktrace_extra + reporter[STACKTRACE_CONTEXT] = next(stacktrace_parts, []) - reporter['comments'] = get_value(post_data, 'comments') + reporter[COMMENTS] = get_value(post_data, COMMENTS) reporter[OS_ENVIRON] = parse_os_environ(get_value(sys_info, OS_ENVIRON)) delete_item(sys_info, OS_ENVIRON) - reporter['events'] = extract_dict(sys_info, r'^(event|request)') - reporter[SYSINFO] = {key: sys_info[key] for key in sys_info if key not in reporter['events']} - + reporter[SYSINFO] = sys_info if last_processes: - reporter['last_processes'] = last_processes + reporter[LAST_PROCESSES] = last_processes + + reporter[ADDITIONAL_INFORMATION] = self.additional_information # try to retrieve an error from the last_core_output if last_core_output: # split for better representation in the web view - reporter['last_core_output'] = last_core_output.split('\n') + reporter[LAST_CORE_OUTPUT] = last_core_output.split('\n') if last_core_exception := parse_last_core_output(last_core_output): exceptions = event.get(EXCEPTION, {}) gui_exception = get_last_item(exceptions.get(VALUES, []), {}) # create a core exception extracted from the last core output - core_exception = {'type': last_core_exception.type, 'value': last_core_exception.message} + core_exception = {TYPE: last_core_exception.type, VALUE: last_core_exception.message} # remove the stacktrace field as it doesn't give any useful information for the further investigation delete_item(gui_exception, 'stacktrace') diff --git a/src/tribler/core/sentry_reporter/tests/test_sentry_reporter.py b/src/tribler/core/sentry_reporter/tests/test_sentry_reporter.py index 6c364262d2d..9a2e6c5aae3 100644 --- a/src/tribler/core/sentry_reporter/tests/test_sentry_reporter.py +++ b/src/tribler/core/sentry_reporter/tests/test_sentry_reporter.py @@ -1,17 +1,34 @@ +from copy import deepcopy from unittest.mock import MagicMock, Mock, patch import pytest from tribler.core.sentry_reporter.sentry_reporter import ( - OS_ENVIRON, - PLATFORM_DETAILS, - SentryReporter, + ADDITIONAL_INFORMATION, BROWSER, COMMENTS, CONTEXTS, LAST_CORE_OUTPUT, MACHINE, NAME, OS, OS_ENVIRON, + PLATFORM, PLATFORM_DETAILS, + REPORTER, STACKTRACE, STACKTRACE_CONTEXT, STACKTRACE_EXTRA, SYSINFO, SentryReporter, SentryStrategy, - this_sentry_strategy, + TAGS, TRIBLER, TYPE, VALUE, VERSION, this_sentry_strategy, ) from tribler.core.sentry_reporter.sentry_scrubber import SentryScrubber from tribler.core.utilities.patch_import import patch_import +DEFAULT_EVENT = { + CONTEXTS: { + BROWSER: {NAME: TRIBLER, VERSION: None}, + REPORTER: { + STACKTRACE: [], + STACKTRACE_CONTEXT: [], + STACKTRACE_EXTRA: [], + COMMENTS: None, + OS_ENVIRON: {}, + SYSINFO: {}, + ADDITIONAL_INFORMATION: {}, + }, + }, + TAGS: {MACHINE: None, OS: None, PLATFORM: None, PLATFORM_DETAILS: None, VERSION: None}, +} + # pylint: disable=redefined-outer-name, protected-access @@ -119,7 +136,7 @@ def test_get_actual_strategy(sentry_reporter): assert sentry_reporter.get_actual_strategy() == SentryStrategy.SEND_ALLOWED_WITH_CONFIRMATION -@patch('os.environ', {}) +@patch(OS_ENVIRON, {}) def test_get_sentry_url_not_specified(): assert not SentryReporter.get_sentry_url() @@ -129,18 +146,18 @@ def test_get_sentry_url_from_version_file(): assert SentryReporter.get_sentry_url() == 'sentry_url' -@patch('os.environ', {'TRIBLER_SENTRY_URL': 'env_url'}) +@patch(OS_ENVIRON, {'TRIBLER_SENTRY_URL': 'env_url'}) def test_get_sentry_url_from_env(): assert SentryReporter.get_sentry_url() == 'env_url' -@patch('os.environ', {}) +@patch(OS_ENVIRON, {}) def test_is_not_in_test_mode(): assert SentryReporter.get_test_sentry_url() is None assert not SentryReporter.is_in_test_mode() -@patch('os.environ', {'TRIBLER_TEST_SENTRY_URL': 'url'}) +@patch(OS_ENVIRON, {'TRIBLER_TEST_SENTRY_URL': 'url'}) def test_is_in_test_mode(): assert SentryReporter.get_test_sentry_url() == 'url' assert SentryReporter.is_in_test_mode() @@ -197,97 +214,63 @@ def test_before_send_scrubber_doesnt_exists(sentry_reporter: SentryReporter): def test_send_defaults(sentry_reporter): assert sentry_reporter.send_event(None, None, None) is None + assert sentry_reporter.send_event(event={}) == DEFAULT_EVENT - assert sentry_reporter.send_event(event={}) == { - 'contexts': { - 'browser': {'name': 'Tribler', 'version': None}, - 'reporter': { - '_stacktrace': [], - '_stacktrace_context': [], - '_stacktrace_extra': [], - 'comments': None, - OS_ENVIRON: {}, - 'sysinfo': {}, - 'events': {}, - }, - }, - 'tags': {'machine': None, 'os': None, 'platform': None, PLATFORM_DETAILS: None, 'version': None}, - } + +def test_send_additional_information(sentry_reporter): + # test that additional information is added to the event + sentry_reporter.additional_information = {'some': 'information'} + + actual = sentry_reporter.send_event(event={}) + expected = deepcopy(DEFAULT_EVENT) + expected[CONTEXTS][REPORTER][ADDITIONAL_INFORMATION] = {'some': 'information'} + assert actual == expected def test_send_post_data(sentry_reporter): - actual = sentry_reporter.send_event(event={'a': 'b'}, - post_data={"version": '0.0.0', "machine": 'x86_64', "os": 'posix', - "timestamp": 42, "sysinfo": '', "comments": 'comment', - "stack": 'l1\nl2--LONG TEXT--l3\nl4', }, ) - expected = { - 'a': 'b', - 'contexts': { - 'browser': {'name': 'Tribler', 'version': '0.0.0'}, - 'reporter': { - '_stacktrace': ['l1', 'l2'], - '_stacktrace_context': [], - '_stacktrace_extra': ['l3', 'l4'], - 'comments': 'comment', - 'os.environ': {}, - 'sysinfo': {}, - 'events': {}, - }, - }, - 'tags': {'machine': 'x86_64', 'os': 'posix', 'platform': None, PLATFORM_DETAILS: None, - 'version': '0.0.0'}, + # test that post data is added to the event + event = {'a': 'b'} + post_data = { + "version": '0.0.0', "machine": 'x86_64', "os": 'posix', + "timestamp": 42, "sysinfo": '', "comments": 'comment', + "stack": 'l1\nl2--LONG TEXT--l3\nl4', } + actual = sentry_reporter.send_event(event=event, post_data=post_data) + expected = deepcopy(DEFAULT_EVENT) + expected['a'] = 'b' + expected[CONTEXTS][BROWSER][VERSION] = '0.0.0' + expected[CONTEXTS][REPORTER][STACKTRACE] = ['l1', 'l2'] + expected[CONTEXTS][REPORTER][STACKTRACE_EXTRA] = ['l3', 'l4'] + expected[CONTEXTS][REPORTER][COMMENTS] = 'comment' + expected[TAGS] = {MACHINE: 'x86_64', OS: 'posix', PLATFORM: None, PLATFORM_DETAILS: None, + VERSION: '0.0.0'} + assert actual == expected def test_send_sys_info(sentry_reporter): - actual = sentry_reporter.send_event(event={}, sys_info={'platform': ['darwin'], PLATFORM_DETAILS: ['details'], - OS_ENVIRON: ['KEY:VALUE', 'KEY1:VALUE1'], - 'event_1': [{'type': ''}], 'request_1': [{}], 'event_2': [], - 'request_2': [], }, ) - expected = { - 'contexts': { - 'browser': {'name': 'Tribler', 'version': None}, - 'reporter': { - '_stacktrace': [], - '_stacktrace_context': [], - '_stacktrace_extra': [], - 'comments': None, - OS_ENVIRON: {'KEY': 'VALUE', 'KEY1': 'VALUE1'}, - 'sysinfo': {'platform': ['darwin'], PLATFORM_DETAILS: ['details']}, - 'events': {'event_1': [{'type': ''}], 'request_1': [{}], 'event_2': [], 'request_2': []}, - }, - }, - 'tags': {'machine': None, 'os': None, 'platform': 'darwin', 'platform.details': 'details', - 'version': None}, + # test that sys_info is added to the event + sys_info = { + PLATFORM: ['darwin'], + PLATFORM_DETAILS: ['details'], + OS_ENVIRON: ['KEY:VALUE', 'KEY1:VALUE1'], } + actual = sentry_reporter.send_event(event={}, sys_info=sys_info) + expected = deepcopy(DEFAULT_EVENT) + expected[CONTEXTS][REPORTER][OS_ENVIRON] = {'KEY': 'VALUE', 'KEY1': 'VALUE1'} + expected[CONTEXTS][REPORTER][SYSINFO] = {PLATFORM: ['darwin'], PLATFORM_DETAILS: ['details']} + expected[TAGS][PLATFORM] = 'darwin' + expected[TAGS]['platform.details'] = 'details' + assert actual == expected def test_send_additional_tags(sentry_reporter): - actual = sentry_reporter.send_event(event={}, additional_tags={'tag_key': 'tag_value'}) - expected = { - 'contexts': { - 'browser': {'name': 'Tribler', 'version': None}, - 'reporter': { - '_stacktrace': [], - '_stacktrace_context': [], - '_stacktrace_extra': [], - 'comments': None, - OS_ENVIRON: {}, - 'sysinfo': {}, - 'events': {}, - }, - }, - 'tags': { - 'machine': None, - 'os': None, - 'platform': None, - 'platform.details': None, - 'version': None, - 'tag_key': 'tag_value', - }, - } + # test that additional tags are added to the event + tags = {'tag_key': 'tag_value', 'numeric_tag_key': 1} + actual = sentry_reporter.send_event(event={}, additional_tags=tags) + expected = deepcopy(DEFAULT_EVENT) + expected[TAGS].update(tags) assert actual == expected @@ -318,8 +301,8 @@ def test_before_send(sentry_reporter): assert sentry_reporter._before_send({'a': 'b'}, {'exc_info': [KeyboardInterrupt]}) is None # check information has been scrubbed - assert sentry_reporter._before_send({'contexts': {'reporter': {'_stacktrace': ['/Users/username/']}}}, None) == { - 'contexts': {'reporter': {'_stacktrace': ['/Users//']}} + assert sentry_reporter._before_send({CONTEXTS: {REPORTER: {STACKTRACE: ['/Users/username/']}}}, None) == { + CONTEXTS: {REPORTER: {STACKTRACE: ['/Users//']}} } # check release @@ -357,14 +340,14 @@ def test_send_last_core_output(sentry_reporter): 'values': [ { 'module': 'tribler.gui.utilities', - 'type': 'CreationTraceback', - 'value': '\n File "/Users//Projects/github.com/Tribler/tribler/src/run_tribler.py", ', + TYPE: 'CreationTraceback', + VALUE: '\n File "/Users//Projects/github.com/Tribler/tribler/src/run_tribler.py", ', 'mechanism': None }, { 'module': 'tribler.gui.exceptions', - 'type': 'CoreCrashedError', - 'value': 'The Tribler core has unexpectedly finished with exit code 1 and status: 0.', + TYPE: 'CoreCrashedError', + VALUE: 'The Tribler core has unexpectedly finished with exit code 1 and status: 0.', 'mechanism': None, 'stacktrace': { 'frames': [] @@ -381,35 +364,22 @@ def test_send_last_core_output(sentry_reporter): Press Ctrl-C to quit ''' actual = sentry_reporter.send_event(event=event, last_core_output=last_core_output) - expected = { - 'exception': { - 'values': [ - { - 'module': 'tribler.gui.exceptions', - 'type': 'CoreCrashedError', - 'value': 'The Tribler core has unexpectedly finished with exit code 1 and status: 0.', - 'mechanism': None - }, - { - 'type': 'OverflowError', - 'value': 'bind(): port must be 0-65535.' - } - ] - }, - 'contexts': { - 'browser': {'name': 'Tribler', 'version': None}, - 'reporter': { - 'last_core_output': last_core_output.split('\n'), - '_stacktrace': [], - '_stacktrace_context': [], - '_stacktrace_extra': [], - 'comments': None, - OS_ENVIRON: {}, - 'sysinfo': {}, - 'events': {}, + expected = deepcopy(DEFAULT_EVENT) + + expected['exception'] = { + 'values': [ + { + 'module': 'tribler.gui.exceptions', + TYPE: 'CoreCrashedError', + VALUE: 'The Tribler core has unexpectedly finished with exit code 1 and status: 0.', + 'mechanism': None }, - }, - 'tags': {'machine': None, 'os': None, 'platform': None, PLATFORM_DETAILS: None, 'version': None}, + { + TYPE: 'OverflowError', + VALUE: 'bind(): port must be 0-65535.' + } + ] } + expected[CONTEXTS][REPORTER][LAST_CORE_OUTPUT] = last_core_output.split('\n') assert actual == expected diff --git a/src/tribler/core/tests/test_search_utils.py b/src/tribler/core/tests/test_search_utils.py index 454782bd968..a2db3d6ac50 100644 --- a/src/tribler/core/tests/test_search_utils.py +++ b/src/tribler/core/tests/test_search_utils.py @@ -148,10 +148,10 @@ def test_title_rank(): def test_item_rank(): - item = dict(name="abc", num_seeders=10, num_leechers=20, updated=time.time() - 10 * DAY) + item = dict(name="abc", num_seeders=10, num_leechers=20, created=time.time() - 10 * DAY) assert item_rank("abc", item) == pytest.approx(0.88794642) # Torrent created ten days ago - item = dict(name="abc", num_seeders=10, num_leechers=20, updated=0) + item = dict(name="abc", num_seeders=10, num_leechers=20, created=0) assert item_rank("abc", item) == pytest.approx(0.81964285) # Torrent creation date is unknown item = dict(name="abc", num_seeders=10, num_leechers=20) diff --git a/src/tribler/core/upgrade/config_converter.py b/src/tribler/core/upgrade/config_converter.py index 4408b3bd451..93b6267486f 100644 --- a/src/tribler/core/upgrade/config_converter.py +++ b/src/tribler/core/upgrade/config_converter.py @@ -9,6 +9,7 @@ def convert_config_to_tribler76(state_dir): """ Convert the download config files from Tribler 7.5 to 7.6 format. """ + logger.info('Upgrade config to 7.6') config = ConfigObj(infile=(str(state_dir / 'triblerd.conf')), default_encoding='utf-8') if 'http_api' in config: logger.info('Convert config') diff --git a/src/tribler/core/upgrade/tests/test_triblerversion.py b/src/tribler/core/upgrade/tests/test_triblerversion.py new file mode 100644 index 00000000000..f5f058802ae --- /dev/null +++ b/src/tribler/core/upgrade/tests/test_triblerversion.py @@ -0,0 +1,45 @@ +from tribler.core.upgrade.version_manager import TriblerVersion + + +def test_create_from_version(tmp_path): + # Test that we can create a TriblerVersion object from a version string + v = TriblerVersion(tmp_path, '7.13.1') + assert v.version.version == [7, 13, 1] + + +def test_equal(tmp_path): + # Test correctness of equal comparison + def v(s): + return TriblerVersion(tmp_path, s).version + + assert v('7.13.1') == v('7.13.1') + assert v('7.13.1') != v('7.13.2') + + +def test_greater(tmp_path): + # Test correctness of greater than comparison + def v(s): + return TriblerVersion(tmp_path, s).version + + assert v('7.13.1') >= v('7.13.1') + assert v('7.13.1') > v('7.13') + assert v('7.13.1') > v('7.12') + + +def test_less(tmp_path): + # Test correctness of less than comparison + def v(s): + return TriblerVersion(tmp_path, s).version + + assert v('7.13.1') <= v('7.13.1') + assert v('7.13') < v('7.13.1') + assert v('7.12') < v('7.13.1') + + +def test_is_ancient(tmp_path): + # Test that we can correctly determine whether a version is ancient + last_supported = '7.5' + assert not TriblerVersion(tmp_path, '7.13').is_ancient(last_supported) + assert not TriblerVersion(tmp_path, '7.5').is_ancient(last_supported) + + assert TriblerVersion(tmp_path, '7.4').is_ancient(last_supported) diff --git a/src/tribler/core/upgrade/upgrade.py b/src/tribler/core/upgrade/upgrade.py index 81314f4b738..3d490e1d537 100644 --- a/src/tribler/core/upgrade/upgrade.py +++ b/src/tribler/core/upgrade/upgrade.py @@ -127,10 +127,12 @@ def remove_old_logs(self) -> Tuple[List[Path], List[Path]]: return removed_files, left_files def upgrade_tags_to_knowledge(self): + self._logger.info('Upgrade tags to knowledge') migration = MigrationTagsToKnowledge(self.state_dir, self.secondary_key) migration.run() def upgrade_pony_db_14to15(self): + self._logger.info('Upgrade Pony DB from version 14 to version 15') mds_path = self.state_dir / STATEDIR_DB_DIR / 'metadata.db' mds = MetadataStore(mds_path, self.channels_dir, self.primary_key, disable_sync=True, @@ -141,6 +143,7 @@ def upgrade_pony_db_14to15(self): mds.shutdown() def upgrade_pony_db_13to14(self): + self._logger.info('Upgrade Pony DB from version 13 to version 14') mds_path = self.state_dir / STATEDIR_DB_DIR / 'metadata.db' tagdb_path = self.state_dir / STATEDIR_DB_DIR / 'tags.db' @@ -160,6 +163,7 @@ def upgrade_pony_db_12to13(self): Upgrade GigaChannel DB from version 12 (7.9.x) to version 13 (7.11.x). Version 12 adds index for TorrentState.last_check attribute. """ + self._logger.info('Upgrade Pony DB 12 to 13') # We have to create the Metadata Store object because Session-managed Store has not been started yet database_path = self.state_dir / STATEDIR_DB_DIR / 'metadata.db' if database_path.exists(): @@ -174,6 +178,7 @@ def upgrade_pony_db_11to12(self): Version 12 adds a `json_text`, `binary_data` and `data_type` fields to TorrentState table if it already does not exist. """ + self._logger.info('Upgrade Pony DB 11 to 12') # We have to create the Metadata Store object because Session-managed Store has not been started yet database_path = self.state_dir / STATEDIR_DB_DIR / 'metadata.db' if not database_path.exists(): @@ -189,10 +194,12 @@ def upgrade_pony_db_10to11(self): Version 11 adds a `self_checked` field to TorrentState table if it already does not exist. """ + self._logger.info('Upgrade Pony DB 10 to 11') # We have to create the Metadata Store object because Session-managed Store has not been started yet database_path = self.state_dir / STATEDIR_DB_DIR / 'metadata.db' if not database_path.exists(): return + # code of the migration mds = MetadataStore(database_path, self.channels_dir, self.primary_key, disable_sync=True, check_tables=False, db_version=10) self.do_upgrade_pony_db_10to11(mds) @@ -204,6 +211,7 @@ def upgrade_bw_accounting_db_8to9(self): Specifically, this upgrade wipes all transactions and addresses an issue where payouts with the wrong amount were made. Also see https://github.com/Tribler/tribler/issues/5789. """ + self._logger.info('Upgrade bandwidth accounting DB 8 to 9') to_version = 9 database_path = self.state_dir / STATEDIR_DB_DIR / 'bandwidth.db' @@ -367,6 +375,7 @@ def upgrade_pony_db_8to10(self): Upgrade GigaChannel DB from version 8 (7.5.x) to version 10 (7.6.x). This will recreate the database anew, which can take quite some time. """ + self._logger.info('Upgrading GigaChannel DB from version 8 to 10') database_path = self.state_dir / STATEDIR_DB_DIR / 'metadata.db' if not database_path.exists() or get_db_version(database_path) >= 10: # Either no old db exists, or the old db version is up to date - nothing to do diff --git a/src/tribler/core/upgrade/version_manager.py b/src/tribler/core/upgrade/version_manager.py index 6a5f29957c4..4f1222a7400 100644 --- a/src/tribler/core/upgrade/version_manager.py +++ b/src/tribler/core/upgrade/version_manager.py @@ -72,14 +72,14 @@ class TriblerVersion: should_recreate_directory: bool deleted: bool - def __init__(self, root_state_dir: Path, version_str: str, files_to_copy: List[str], + def __init__(self, root_state_dir: Path, version_str: str, files_to_copy: List[str] = None, last_launched_at: Optional[float] = None): if last_launched_at is None: last_launched_at = time.time() self.logger = logging.getLogger(self.__class__.__name__) self.version_str = version_str - self.version_tuple = tuple(LooseVersion(version_str).version) - self.major_minor = self.version_tuple[:2] + self.version = LooseVersion(version_str) + self.major_minor = tuple(self.version.version[:2]) self.last_launched_at = last_launched_at self.root_state_dir = root_state_dir self.directory = self.get_directory() @@ -90,7 +90,7 @@ def __init__(self, root_state_dir: Path, version_str: str, files_to_copy: List[s self.should_be_copied = False self.should_recreate_directory = False self.deleted = False - self.files_to_copy = files_to_copy + self.files_to_copy = files_to_copy or [] def __repr__(self): return f'<{self.__class__.__name__}{{{self.version_str}}}>' @@ -187,6 +187,9 @@ def rename_directory(self, prefix='unused_v'): self.logger.info(f"Rename state directory for version {self.version_str} to {dirname}") return self.directory.rename(self.root_state_dir / dirname) + def is_ancient(self, last_supported_version: str): + return self.version < LooseVersion(last_supported_version) + class VersionHistory: """ @@ -201,7 +204,7 @@ class VersionHistory: versions_by_number: List[TriblerVersion] versions_by_time: List[TriblerVersion] last_run_version: Optional[TriblerVersion] - code_version: TriblerVersion + code_version: TriblerVersion # current Tribler's version # pylint: disable=too-many-branches def __init__(self, root_state_dir: Path, code_version_id: Optional[str] = None): @@ -226,15 +229,14 @@ def __init__(self, root_state_dir: Path, code_version_id: Optional[str] = None): versions_by_time[i].prev_version_by_time = versions_by_time[i + 1] code_version = TriblerVersion(root_state_dir, code_version_id, self.files_to_copy) + self.logger.info(f"Current Tribler version is {code_version.version_str}") if not last_run_version: - # No previous versions found - self.logger.info(f"No previous version found, current Tribler version is {code_version.version_str}") + self.logger.info("No previous version found") elif last_run_version.version_str == code_version.version_str: # Previously we started the same version, nothing to upgrade code_version = last_run_version - self.logger.info( - f"The previously started version is the same as the current one: {code_version.version_str}") + self.logger.info("The previously started version is the same as the current one") elif last_run_version.major_minor == code_version.major_minor: # Previously we started version from the same directory and can continue use this directory self.logger.info(f"The previous version {last_run_version.version_str} " @@ -333,9 +335,9 @@ def save(self): def fork_state_directory_if_necessary(self) -> Optional[TriblerVersion]: """Returns version string from which the state directory was forked""" - self.logger.info('Fork state directory') code_version = self.code_version if code_version.should_recreate_directory: + self.logger.info('State directory should be recreated') code_version.rename_directory() if code_version.should_be_copied: @@ -344,6 +346,7 @@ def fork_state_directory_if_necessary(self) -> Optional[TriblerVersion]: if prev_version: # should always be True here code_version.copy_state_from(prev_version) return prev_version + self.logger.info('State directory should not be copied') return None def get_installed_versions(self, with_code_version=True) -> List[TriblerVersion]: diff --git a/src/tribler/core/utilities/path_util.py b/src/tribler/core/utilities/path_util.py index eccd3bc4e94..23983f953cc 100644 --- a/src/tribler/core/utilities/path_util.py +++ b/src/tribler/core/utilities/path_util.py @@ -1,5 +1,7 @@ from __future__ import annotations +import itertools +import os import pathlib import sys import tempfile @@ -36,8 +38,26 @@ def normalize_to(self, base: str = None) -> Path: return self - def size(self) -> int: - return self.stat().st_size + def size(self, include_dir_sizes: bool = True) -> int: + """ Return the size of this file or directory (recursively). + + Args: + include_dir_sizes: If True, return the size of files and directories, not the size of files only. + + Returns: The size of this file or directory. + """ + if not self.exists(): + return 0 + + if self.is_file(): + return self.stat().st_size + + size = os.path.getsize(self.absolute()) if include_dir_sizes else 0 # get root dir size + for root, dir_names, file_names in os.walk(self): + names = itertools.chain(dir_names, file_names) if include_dir_sizes else file_names + paths = (os.path.join(root, name) for name in names) + size += sum(os.path.getsize(p) for p in paths if os.path.exists(p)) + return size def startswith(self, text: str) -> bool: return self.match(f"{text}*") diff --git a/src/tribler/core/utilities/process_manager/process.py b/src/tribler/core/utilities/process_manager/process.py index f60c4e3e2c3..12c85f71f4a 100644 --- a/src/tribler/core/utilities/process_manager/process.py +++ b/src/tribler/core/utilities/process_manager/process.py @@ -4,9 +4,9 @@ import os import sqlite3 import time -from datetime import datetime +from datetime import datetime, timedelta from enum import Enum -from typing import Optional, TYPE_CHECKING, Union +from typing import List, Optional, TYPE_CHECKING, Union import psutil @@ -86,23 +86,45 @@ def from_row(cls, manager: ProcessManager, row: tuple) -> TriblerProcess: def __str__(self) -> str: kind = self.kind.value.capitalize() - flags = f"{'primary, ' if self.primary else ''}{'canceled, ' if self.canceled else ''}" - result = [f'{kind}Process({flags}pid={self.pid}'] + elements: List[str] = [] + append = elements.append + append('finished' if self.finished_at or self.exit_code is not None else 'running') + + if self.is_current_process(): + append('current process') + + if self.primary: + append('primary') + + if self.canceled: + append('canceled') + + append(f'pid={self.pid}') + if self.creator_pid is not None: - result.append(f', gui_pid={self.creator_pid}') + append(f'gui_pid={self.creator_pid}') + started = datetime.utcfromtimestamp(self.started_at) - result.append(f", version='{self.app_version}', started='{started.strftime('%Y-%m-%d %H:%M:%S')}'") + append(f"version='{self.app_version}'") + append(f"started='{started.strftime('%Y-%m-%d %H:%M:%S')}'") + if self.api_port is not None: - result.append(f', api_port={self.api_port}') + append(f'api_port={self.api_port}') + if self.finished_at: finished = datetime.utcfromtimestamp(self.finished_at) duration = finished - started - result.append(f", duration='{duration}'") + else: + duration = timedelta(seconds=int(time.time()) - self.started_at) + append(f"duration='{duration}'") + if self.exit_code is not None: - result.append(f', exit_code={self.exit_code}') + append(f'exit_code={self.exit_code}') + if self.error_msg: - result.append(f', error={repr(self.error_msg)}') - result.append(')') + append(f'error={repr(self.error_msg)}') + + result = f'{kind}Process({", ".join(elements)})' return ''.join(result) @classmethod diff --git a/src/tribler/core/utilities/process_manager/tests/test_process.py b/src/tribler/core/utilities/process_manager/tests/test_process.py index 385e396f844..a055068d061 100644 --- a/src/tribler/core/utilities/process_manager/tests/test_process.py +++ b/src/tribler/core/utilities/process_manager/tests/test_process.py @@ -14,7 +14,17 @@ def test_tribler_process(): assert p.is_current_process() assert p.is_running() - pattern = r"^CoreProcess\(pid=\d+, gui_pid=123, version='[^']+', started='\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}'\)$" + pattern = r"^CoreProcess\(running, current process, pid=\d+, gui_pid=123, version='[^']+', " \ + r"started='\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}'\, duration='0:00:\d{2}'\)$" + assert re.match(pattern, str(p)) + + p.canceled = True + p.api_port = 123 + p.exit_code = 1 + + pattern = r"^CoreProcess\(finished, current process, canceled, pid=\d+, gui_pid=123, version='[^']+', " \ + r"started='\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}'\, api_port=123, duration='0:00:\d{2}', " \ + r"exit_code=1\)$" assert re.match(pattern, str(p)) @@ -93,7 +103,8 @@ def test_tribler_process_set_error(current_process): assert current_process.error_msg == 'ValueError: exception text' # The error text is included in ProcessInfo.__str__() output - pattern = r"^CoreProcess\(primary, pid=\d+, version='[^']+', started='[^']+', error='ValueError: exception text'\)$" + pattern = r"^CoreProcess\(running, current process, primary, pid=\d+, version='[^']+', " \ + r"started='[^']+', duration='0:00:\d{2}', error='ValueError: exception text'\)$" assert re.match(pattern, str(current_process)) diff --git a/src/tribler/core/utilities/search_utils.py b/src/tribler/core/utilities/search_utils.py index 95ea39cb472..b9875e03218 100644 --- a/src/tribler/core/utilities/search_utils.py +++ b/src/tribler/core/utilities/search_utils.py @@ -40,15 +40,15 @@ def item_rank(query: str, item: dict) -> float: :param query: a user-defined query string :param item: a dict with torrent info. - Should include key `name`, can include `num_seeders`, `num_leechers`, `updated` + Should include key `name`, can include `num_seeders`, `num_leechers`, `created` :return: the torrent rank value in range [0, 1] """ title = item['name'] seeders = item.get('num_seeders', 0) leechers = item.get('num_leechers', 0) - updated = item.get('updated', 0) - freshness = None if updated <= 0 else time.time() - updated + created = item.get('created', 0) + freshness = None if created <= 0 else time.time() - created return torrent_rank(query, title, seeders, leechers, freshness) diff --git a/src/tribler/core/utilities/tests/test_path_utils.py b/src/tribler/core/utilities/tests/test_path_utils.py index d60ff9c32ad..093b61b26a8 100644 --- a/src/tribler/core/utilities/tests/test_path_utils.py +++ b/src/tribler/core/utilities/tests/test_path_utils.py @@ -3,6 +3,12 @@ from tribler.core.utilities.path_util import Path, tail +# pylint: disable=redefined-outer-name +@pytest.fixture +def tribler_tmp_path(tmp_path): + return Path(tmp_path) + + def test_put_path_relative(tmpdir): assert Path(tmpdir).normalize_to(None) == Path(tmpdir) assert Path(tmpdir).normalize_to('') == Path(tmpdir) @@ -23,16 +29,16 @@ def test_tail_no_file(): tail('missed_file.txt') -def test_tail_small_file(tmpdir: Path): +def test_tail_small_file(tribler_tmp_path: Path): """Test that tail works correct with a small file """ - log_file = tmpdir / 'log.txt' + log_file = tribler_tmp_path / 'log.txt' log_file.write_text('text', 'utf-8') assert tail(log_file) == 'text' -def test_tail_count(tmpdir: Path): +def test_tail_count(tribler_tmp_path: Path): """Test that tail returns desired count of lines""" - log_file = tmpdir / 'log.txt' + log_file = tribler_tmp_path / 'log.txt' # add 100 lines content = '\n'.join(f'{i}' for i in range(100)) @@ -47,7 +53,7 @@ def test_tail_count(tmpdir: Path): tail(log_file, -1) -def test_tail_encodings(tmpdir: Path): +def test_tail_encodings(tribler_tmp_path: Path): """Test that the `tail` function can read logs with "utf-8", "ascii", "latin-1" encodings """ encodings = ["utf-8", "ascii", "latin-1"] log_files = [] @@ -56,10 +62,42 @@ def test_tail_encodings(tmpdir: Path): # create files for all available encodings for encoding in encodings: - path = tmpdir / encoding + path = tribler_tmp_path / encoding path.write_text(content, encoding) log_files.append(path) # make sure they were read all encoding correctly for log in log_files: assert tail(log, 100) == content + + +def test_size_file(tribler_tmp_path: Path): + # test that size returns correct size for a file + path = tribler_tmp_path / '10bytes.file' + path.write_bytes(b'0' * 10) + assert path.size() == 10 + + +def test_size_missed_file(tribler_tmp_path: Path): + # test that size returns 0 for missed file + path = tribler_tmp_path / '10bytes.file' + assert path.size() == 0 + + +def test_size_folder(tribler_tmp_path: Path): + # test that size can calculate size of files and folders recursively + # create a structure like: + # + # tribler_tmp_path + # ├ file.100bytes + # └ folder1 + # ├ file.100bytes + # └ file1.100bytes + + (tribler_tmp_path / 'file.100bytes').write_bytes(b'0' * 100) + (tribler_tmp_path / 'folder1').mkdir() + (tribler_tmp_path / 'folder1' / 'file.100bytes').write_bytes(b'0' * 100) + (tribler_tmp_path / 'folder1' / 'file1.100bytes').write_bytes(b'0' * 100) + + assert tribler_tmp_path.size(include_dir_sizes=False) == 300 + assert tribler_tmp_path.size() >= 300 diff --git a/src/tribler/gui/core_manager.py b/src/tribler/gui/core_manager.py index cd60e54db98..96a3ae582b0 100644 --- a/src/tribler/gui/core_manager.py +++ b/src/tribler/gui/core_manager.py @@ -15,7 +15,7 @@ from tribler.gui.app_manager import AppManager from tribler.gui.event_request_manager import EventRequestManager from tribler.gui.exceptions import CoreConnectTimeoutError, CoreCrashedError -from tribler.gui.network.request_manager import request_manager +from tribler.gui.network.request_manager import SHUTDOWN_ENDPOINT, request_manager from tribler.gui.utilities import connect API_PORT_CHECK_INTERVAL = 100 # 0.1 seconds between attempts to retrieve Core API port @@ -229,17 +229,21 @@ def stop(self, quit_app_on_core_finished=True): self.events_manager.shutting_down = True def shutdown_request_processed(response): - self._logger.info(f"Shutdown request was processed by Core. Response: {response}") + self._logger.info(f"{SHUTDOWN_ENDPOINT} request was processed by Core. Response: {response}") def send_shutdown_request(initial=False): if initial: - self._logger.info("Sending shutdown request to Tribler Core") + self._logger.info(f"Sending {SHUTDOWN_ENDPOINT} request to Tribler Core") else: - self._logger.warning("Re-sending shutdown request to Tribler Core") - - request = request_manager.put("shutdown", shutdown_request_processed, - priority=QNetworkRequest.HighPriority) - request.cancellable = False + self._logger.warning(f"Re-sending {SHUTDOWN_ENDPOINT} request to Tribler Core") + + request = request_manager.put( + endpoint=SHUTDOWN_ENDPOINT, + on_success=shutdown_request_processed, + priority=QNetworkRequest.HighPriority + ) + if request: + request.cancellable = False send_shutdown_request(initial=True) diff --git a/src/tribler/gui/debug_window.py b/src/tribler/gui/debug_window.py index 123d8a52063..1356c2ff7b6 100644 --- a/src/tribler/gui/debug_window.py +++ b/src/tribler/gui/debug_window.py @@ -324,11 +324,10 @@ def load_requests_tab(self): method = request.method data = request.data timestamp = request.time - status_code = request.status_code item = QTreeWidgetItem(self.window().requests_tree_widget) item.setText(0, f"{method} {repr(endpoint)} {repr(data)}") - item.setText(1, str(status_code or "unknown")) + item.setText(1, request.status_text) item.setText(2, f"{strftime('%H:%M:%S', localtime(timestamp))}") self.window().requests_tree_widget.addTopLevelItem(item) @@ -905,7 +904,7 @@ def load_libtorrent_data(self, export=False): def load_libtorrent_settings_tab(self, hop, export=False): request_manager.get(endpoint=f"libtorrent/settings?hop={hop}", - on_finish=lambda data: self.on_libtorrent_settings_received(data, export=export)) + on_success=lambda data: self.on_libtorrent_settings_received(data, export=export)) self.window().libtorrent_settings_tree_widget.clear() def on_libtorrent_settings_received(self, data, export=False): @@ -921,7 +920,7 @@ def on_libtorrent_settings_received(self, data, export=False): def load_libtorrent_sessions_tab(self, hop, export=False): request_manager.get(endpoint=f"libtorrent/session?hop={hop}", - on_finish=lambda data: self.on_libtorrent_session_received(data, export=export)) + on_success=lambda data: self.on_libtorrent_session_received(data, export=export)) self.window().libtorrent_session_tree_widget.clear() def on_libtorrent_session_received(self, data, export=False): diff --git a/src/tribler/gui/dialogs/addtopersonalchanneldialog.py b/src/tribler/gui/dialogs/addtopersonalchanneldialog.py index e55a62d9db4..737a78ed277 100644 --- a/src/tribler/gui/dialogs/addtopersonalchanneldialog.py +++ b/src/tribler/gui/dialogs/addtopersonalchanneldialog.py @@ -91,15 +91,18 @@ def on_item_expanded(self, item): self.load_channel(channel_id) def load_channel(self, channel_id): - request = request_manager.get(f"channels/mychannel/{channel_id}", - on_finish=lambda result: self.on_channel_contents(result, channel_id), - url_params={ - "metadata_type": [CHANNEL_TORRENT, COLLECTION_NODE], - "first": 1, - "last": 1000, - "exclude_deleted": True, - }) - self.root_requests_list.append(request) + request = request_manager.get( + f"channels/mychannel/{channel_id}", + on_success=lambda result: self.on_channel_contents(result, channel_id), + url_params={ + "metadata_type": [CHANNEL_TORRENT, COLLECTION_NODE], + "first": 1, + "last": 1000, + "exclude_deleted": True, + } + ) + if request: + self.root_requests_list.append(request) def get_selected_channel_id(self): selected = self.dialog_widget.channels_tree_wt.selectedItems() diff --git a/src/tribler/gui/dialogs/createtorrentdialog.py b/src/tribler/gui/dialogs/createtorrentdialog.py index f8855536873..67e5a4385dd 100644 --- a/src/tribler/gui/dialogs/createtorrentdialog.py +++ b/src/tribler/gui/dialogs/createtorrentdialog.py @@ -115,7 +115,7 @@ def on_create_clicked(self, checked): is_seed = self.dialog_widget.seed_after_adding_checkbox.isChecked() self.rest_request1 = request_manager.post( endpoint='createtorrent', - on_finish=self.on_torrent_created, + on_success=self.on_torrent_created, url_params={'download': 1} if is_seed else None, data={"name": self.name, "description": description, "files": files_list, "export_dir": export_dir}, ) diff --git a/src/tribler/gui/dialogs/editmetadatadialog.py b/src/tribler/gui/dialogs/editmetadatadialog.py index 50622a97180..b809606c251 100644 --- a/src/tribler/gui/dialogs/editmetadatadialog.py +++ b/src/tribler/gui/dialogs/editmetadatadialog.py @@ -7,6 +7,7 @@ from tribler.core.components.knowledge.db.knowledge_db import ResourceType from tribler.core.components.knowledge.knowledge_constants import MAX_RESOURCE_LENGTH, MIN_RESOURCE_LENGTH +from tribler.core.utilities.path_util import Path from tribler.gui.defs import TAG_HORIZONTAL_MARGIN from tribler.gui.dialogs.dialogcontainer import DialogContainer from tribler.gui.network.request_manager import request_manager @@ -45,8 +46,9 @@ def __init__(self, parent: QWidget, index: QModelIndex) -> None: connect(self.dialog_widget.edit_metadata_table.doubleClicked, self.on_edit_metadata_table_item_clicked) # Load the languages - with open(get_ui_file_path("languages.json"), "r") as languages_file: - self.languages = json.loads(languages_file.read()) + languages_path = get_ui_file_path("languages.json") + content = Path(languages_path).read_text(encoding='utf-8') + self.languages = json.loads(content) # Fill in the metadata table and make the items in the 2nd column editable for ind in range(self.dialog_widget.edit_metadata_table.topLevelItemCount()): @@ -70,7 +72,7 @@ def __init__(self, parent: QWidget, index: QModelIndex) -> None: self.dialog_widget.content_name_label.setText(self.data_item["name"]) # Fetch suggestions - request_manager.get(f"knowledge/{self.infohash}/tag_suggestions", on_finish=self.on_received_tag_suggestions) + request_manager.get(f"knowledge/{self.infohash}/tag_suggestions", on_success=self.on_received_tag_suggestions) self.update_window() @@ -135,6 +137,9 @@ def on_save_metadata_button_clicked(self, _) -> None: self.save_button_clicked.emit(self.index, statements) def on_received_tag_suggestions(self, data: Dict) -> None: + if self.closed: # The dialog was closed before the request finished + return + self.suggestions_loaded.emit() if data["suggestions"]: self.dialog_widget.suggestions_container.show() diff --git a/src/tribler/gui/dialogs/feedbackdialog.py b/src/tribler/gui/dialogs/feedbackdialog.py index 412533e9c4d..2b379bb9981 100644 --- a/src/tribler/gui/dialogs/feedbackdialog.py +++ b/src/tribler/gui/dialogs/feedbackdialog.py @@ -1,6 +1,5 @@ from __future__ import annotations -import json import os import platform import sys @@ -15,8 +14,6 @@ from tribler.core.sentry_reporter.sentry_reporter import SentryReporter from tribler.core.sentry_reporter.sentry_scrubber import SentryScrubber from tribler.core.sentry_reporter.sentry_tools import CONTEXT_DELIMITER, LONG_TEXT_DELIMITER -from tribler.gui.event_request_manager import received_events -from tribler.gui.network.request_manager import request_manager from tribler.gui.sentry_mixin import AddBreadcrumbOnShowMixin from tribler.gui.tribler_action_menu import TriblerActionMenu from tribler.gui.utilities import connect, get_ui_file_path, tr @@ -99,23 +96,6 @@ def add_item_to_info_widget(key, value): for key in os.environ.keys(): add_item_to_info_widget('os.environ', f'{key}: {os.environ[key]}') - # Add recent requests to feedback dialog - request_ind = 1 - - for request in request_manager.performed_requests: - add_item_to_info_widget( - 'request_%d' % request_ind, - '%s %s %s (time: %s, code: %s)' - % (request.endpoint, request.method, request.data, request.time, request.status_code), - ) - request_ind += 1 - - # Add recent events to feedback dialog - events_ind = 1 - for event, event_time in received_events[:30][::-1]: - add_item_to_info_widget('event_%d' % events_ind, f'{json.dumps(event)} (time: {event_time})') - events_ind += 1 - # Users can remove specific lines in the report connect(self.env_variables_list.customContextMenuRequested, self.on_right_click_item) diff --git a/src/tribler/gui/dialogs/startdownloaddialog.py b/src/tribler/gui/dialogs/startdownloaddialog.py index 3c9aceb3844..6a8190284a5 100644 --- a/src/tribler/gui/dialogs/startdownloaddialog.py +++ b/src/tribler/gui/dialogs/startdownloaddialog.py @@ -144,8 +144,12 @@ def perform_files_request(self): params = {'uri': self.download_uri} if direct: params['hops'] = 0 - self.rest_request = request_manager.get('torrentinfo', on_finish=self.on_received_metainfo, - url_params=params, capture_errors=False) + self.rest_request = request_manager.get( + 'torrentinfo', + on_success=self.on_received_metainfo, + url_params=params, + capture_errors=False + ) if self.metainfo_retries <= METAINFO_MAX_RETRIES: fetch_mode = tr("directly") if direct else tr("anonymously") diff --git a/src/tribler/gui/error_handler.py b/src/tribler/gui/error_handler.py index f6271eda5d6..410eb0b9ecb 100644 --- a/src/tribler/gui/error_handler.py +++ b/src/tribler/gui/error_handler.py @@ -72,6 +72,11 @@ def gui_error(self, exc_type, exc, tb): if self.app_manager.quitting_app: return + additional_tags = { + 'source': 'gui', + 'tribler_stopped': self._tribler_stopped + } + FeedbackDialog( parent=self.tribler_window, sentry_reporter=gui_sentry_reporter, @@ -79,7 +84,7 @@ def gui_error(self, exc_type, exc, tb): tribler_version=self.tribler_window.tribler_version, start_time=self.tribler_window.start_time, stop_application_on_close=self._tribler_stopped, - additional_tags={'source': 'gui'}, + additional_tags=additional_tags, ).show() def core_error(self, reported_error: ReportedError): @@ -98,6 +103,12 @@ def core_error(self, reported_error: ReportedError): self._stop_tribler(error_text) SentryScrubber.remove_breadcrumbs(reported_error.event) + gui_sentry_reporter.additional_information.update(reported_error.additional_information) + + additional_tags = { + 'source': 'core', + 'tribler_stopped': self._tribler_stopped + } FeedbackDialog( parent=self.tribler_window, @@ -106,7 +117,7 @@ def core_error(self, reported_error: ReportedError): tribler_version=self.tribler_window.tribler_version, start_time=self.tribler_window.start_time, stop_application_on_close=self._tribler_stopped, - additional_tags={'source': 'core'} + additional_tags=additional_tags, ).show() def _stop_tribler(self, text): @@ -119,7 +130,7 @@ def _stop_tribler(self, text): self.tribler_window.delete_tray_icon() # Stop the download loop - self.tribler_window.downloads_page.stop_loading_downloads() + self.tribler_window.downloads_page.stop_refreshing_downloads() # Add info about whether we are stopping Tribler or not self.tribler_window.core_manager.stop(quit_app_on_core_finished=False) diff --git a/src/tribler/gui/i18n/es_ES.qm b/src/tribler/gui/i18n/es_ES.qm index 1db7458f1b1..781f196d490 100644 Binary files a/src/tribler/gui/i18n/es_ES.qm and b/src/tribler/gui/i18n/es_ES.qm differ diff --git a/src/tribler/gui/i18n/es_ES.ts b/src/tribler/gui/i18n/es_ES.ts index accb0fc83ed..cd2557da46c 100644 --- a/src/tribler/gui/i18n/es_ES.ts +++ b/src/tribler/gui/i18n/es_ES.ts @@ -473,6 +473,11 @@ Tenga en cuenta que los valores decimales están truncados. Health Enlaces + + + Created + Creado + Updated @@ -1118,32 +1123,32 @@ Si no está seguro, pulse "No". Más adelante podrá eliminar esas car %(years)iy %(weeks)iw - %(años)iy %(semanas)iw + %(years)ia %(weeks)is %(weeks)iw %(days)id - %(semanas)iw %(días)id + %(weeks)is %(days)id %(days)id %(hours)ih - %(días)id %(horas)ih + %(days)id %(hours)ih %(hours)ih %(minutes)im - %(horas)ih %(minutos)im + %(hours)ih %(minutes)im %(minutes)im %(seconds)is - %(minutos)im %(segundos)is + %(minutes)im %(seconds)is %(seconds)is - %(segundos)is + %(seconds)is diff --git a/src/tribler/gui/i18n/pt_BR.qm b/src/tribler/gui/i18n/pt_BR.qm index f431624b3b6..809bc5990be 100644 Binary files a/src/tribler/gui/i18n/pt_BR.qm and b/src/tribler/gui/i18n/pt_BR.qm differ diff --git a/src/tribler/gui/i18n/pt_BR.ts b/src/tribler/gui/i18n/pt_BR.ts index 714215aacf7..baaeacc12ca 100644 --- a/src/tribler/gui/i18n/pt_BR.ts +++ b/src/tribler/gui/i18n/pt_BR.ts @@ -17,6 +17,11 @@ Torrents Torrents + + + Created + Criado + Updated diff --git a/src/tribler/gui/i18n/ru_RU.qm b/src/tribler/gui/i18n/ru_RU.qm index 6207baba332..310bd7515f9 100644 Binary files a/src/tribler/gui/i18n/ru_RU.qm and b/src/tribler/gui/i18n/ru_RU.qm differ diff --git a/src/tribler/gui/i18n/ru_RU.ts b/src/tribler/gui/i18n/ru_RU.ts index 2ac7e9baee4..229182aac8a 100644 --- a/src/tribler/gui/i18n/ru_RU.ts +++ b/src/tribler/gui/i18n/ru_RU.ts @@ -16,6 +16,11 @@ Health Состояние + + + + Created + Создан diff --git a/src/tribler/gui/i18n/zh_CN.qm b/src/tribler/gui/i18n/zh_CN.qm index a4361fb59ad..157edc7dca9 100644 Binary files a/src/tribler/gui/i18n/zh_CN.qm and b/src/tribler/gui/i18n/zh_CN.qm differ diff --git a/src/tribler/gui/i18n/zh_CN.ts b/src/tribler/gui/i18n/zh_CN.ts index 09f6636accb..b3fac71b039 100644 --- a/src/tribler/gui/i18n/zh_CN.ts +++ b/src/tribler/gui/i18n/zh_CN.ts @@ -17,6 +17,11 @@ Health 健康度 + + + Created + 创建时间 + Updated diff --git a/src/tribler/gui/network/request.py b/src/tribler/gui/network/request.py index 8d4ea66f395..6ed90d841c4 100644 --- a/src/tribler/gui/network/request.py +++ b/src/tribler/gui/network/request.py @@ -11,12 +11,27 @@ from tribler.gui.utilities import connect +REQUEST_ID = '_request_id' + if TYPE_CHECKING: from tribler.gui.network.request_manager import RequestManager DATA_TYPE = Optional[Union[bytes, str, Dict, List]] +def make_reply_errors_map() -> Dict[int, str]: + errors_map = {} + for attr_name in dir(QNetworkReply): + if attr_name[0].isupper() and attr_name.endswith('Error'): # SomeError, but not the `setError` method + error_code = getattr(QNetworkReply, attr_name) + if isinstance(error_code, int): # an additional safety check, just for case + errors_map[error_code] = attr_name + return errors_map + + +reply_errors = make_reply_errors_map() + + class Request(QObject): GET = 'GET' POST = 'POST' @@ -34,7 +49,7 @@ class Request(QObject): def __init__( self, endpoint: str, - on_finish: Callable = lambda _: None, + on_success: Callable = lambda _: None, url_params: Optional[Dict] = None, data: DATA_TYPE = None, method: str = GET, @@ -61,7 +76,7 @@ def __init__( raw_data = data self.raw_data: Optional[bytes] = raw_data - connect(self.on_finished_signal, on_finish) + connect(self.on_finished_signal, on_success) self.reply: Optional[QNetworkReply] = None # to hold the associated QNetworkReply object self.manager: Optional[RequestManager] = None @@ -69,7 +84,9 @@ def __init__( self.time = time() self.status_code = 0 + self.status_text = "unknown" self.cancellable = True + self.id = 0 def set_manager(self, manager: RequestManager): self.manager = manager @@ -86,6 +103,10 @@ def _set_url(self, base_url: str): def update_status(self, status_code: int): self.logger.debug(f'Update {self}: {status_code}') self.status_code = status_code + if status_code > 0: # positive codes are HTTP response codes + self.status_text = str(status_code) + else: # negative codes represent QNetworkReply error codes + self.status_text = f'{status_code}: {reply_errors.get(-status_code, "")}' def on_finished(self): if not self.reply or not self.manager: @@ -93,6 +114,13 @@ def on_finished(self): self.logger.info(f'Finished: {self}') try: + error_code = self.reply.error() + if error_code != QNetworkReply.NoError: + error_name = reply_errors.get(error_code, '') + self.logger.warning(f'Request {self} finished with error: {error_code} ({error_name})') + self.update_status(-error_code) + return + if status_code := self.reply.attribute(QNetworkRequest.HttpStatusCodeAttribute): self.update_status(status_code) @@ -104,11 +132,13 @@ def on_finished(self): return if not data: - self.on_finished_signal.emit({}) + self.logger.error(f'No data received in the reply for {self}') return self.logger.debug('Create a json response') result = json.loads(data) + if isinstance(result, dict): + result[REQUEST_ID] = self.id is_error = 'error' in result if is_error and self.capture_errors: text = self.manager.show_error(self, result) diff --git a/src/tribler/gui/network/request_manager.py b/src/tribler/gui/network/request_manager.py index 15ad044279e..4ab55182acd 100644 --- a/src/tribler/gui/network/request_manager.py +++ b/src/tribler/gui/network/request_manager.py @@ -14,6 +14,8 @@ from tribler.gui.network.request import DATA_TYPE, Request from tribler.gui.utilities import connect +SHUTDOWN_ENDPOINT = "shutdown" + class RequestManager(QNetworkAccessManager): """ @@ -34,35 +36,35 @@ def __init__(self, limit: int = 50, timeout_interval: int = 15): self.protocol = DEFAULT_API_PROTOCOL self.host = DEFAULT_API_HOST self.port = DEFAULT_API_PORT - self.key = b"" + self.key = '' self.limit = limit self.timeout_interval = timeout_interval + self.last_request_id = 0 def get(self, endpoint: str, - on_finish: Callable = lambda _: None, + on_success: Callable = lambda _: None, url_params: Optional[Dict] = None, data: DATA_TYPE = None, capture_errors: bool = True, priority: int = QNetworkRequest.NormalPriority, - raw_response: bool = False) -> Request: + raw_response: bool = False) -> Optional[Request]: - request = Request(endpoint=endpoint, on_finish=on_finish, url_params=url_params, data=data, + request = Request(endpoint=endpoint, on_success=on_success, url_params=url_params, data=data, capture_errors=capture_errors, priority=priority, raw_response=raw_response, method=Request.GET) - self.add(request) - return request + return self.add(request) def post(self, endpoint: str, - on_finish: Callable = lambda _: None, + on_success: Callable = lambda _: None, url_params: Optional[Dict] = None, data: DATA_TYPE = None, capture_errors: bool = True, priority: int = QNetworkRequest.NormalPriority, - raw_response: bool = False) -> Request: + raw_response: bool = False) -> Optional[Request]: - request = Request(endpoint=endpoint, on_finish=on_finish, url_params=url_params, data=data, + request = Request(endpoint=endpoint, on_success=on_success, url_params=url_params, data=data, capture_errors=capture_errors, priority=priority, raw_response=raw_response, method=Request.POST) self.add(request) @@ -70,50 +72,55 @@ def post(self, def put(self, endpoint: str, - on_finish: Callable = lambda _: None, + on_success: Callable = lambda _: None, url_params: Optional[Dict] = None, data: DATA_TYPE = None, capture_errors: bool = True, priority: int = QNetworkRequest.NormalPriority, - raw_response: bool = False) -> Request: + raw_response: bool = False) -> Optional[Request]: - request = Request(endpoint=endpoint, on_finish=on_finish, url_params=url_params, data=data, + request = Request(endpoint=endpoint, on_success=on_success, url_params=url_params, data=data, capture_errors=capture_errors, priority=priority, raw_response=raw_response, method=Request.PUT) - self.add(request) - return request + return self.add(request) def patch(self, endpoint: str, - on_finish: Callable = lambda _: None, + on_success: Callable = lambda _: None, url_params: Optional[Dict] = None, data: DATA_TYPE = None, capture_errors: bool = True, priority: int = QNetworkRequest.NormalPriority, - raw_response: bool = False) -> Request: + raw_response: bool = False) -> Optional[Request]: - request = Request(endpoint=endpoint, on_finish=on_finish, url_params=url_params, data=data, + request = Request(endpoint=endpoint, on_success=on_success, url_params=url_params, data=data, capture_errors=capture_errors, priority=priority, raw_response=raw_response, method=Request.PATCH) - self.add(request) - return request + return self.add(request) def delete(self, endpoint: str, - on_finish: Callable = lambda _: None, + on_success: Callable = lambda _: None, url_params: Optional[Dict] = None, data: DATA_TYPE = None, capture_errors: bool = True, priority: int = QNetworkRequest.NormalPriority, - raw_response: bool = False) -> Request: + raw_response: bool = False) -> Optional[Request]: - request = Request(endpoint=endpoint, on_finish=on_finish, url_params=url_params, data=data, + request = Request(endpoint=endpoint, on_success=on_success, url_params=url_params, data=data, capture_errors=capture_errors, priority=priority, raw_response=raw_response, method=Request.DELETE) - self.add(request) - return request + return self.add(request) + + def add(self, request: Request) -> Optional[Request]: + if self._is_in_shutting_down(request): + # Do not send requests when Tribler is shutting down + return None + + # Set last request id + self.last_request_id += 1 + request.id = self.last_request_id - def add(self, request: Request): if len(self.active_requests) > self.limit: self._drop_timed_out_requests() @@ -139,13 +146,14 @@ def add(self, request: Request): buf.setParent(request.reply) connect(request.reply.finished, request.on_finished) + return request def remove(self, request: Request): self.active_requests.discard(request) def show_error(self, request: Request, data: Dict) -> str: text = self.get_message_from_error(data) - if self.window.core_manager.shutting_down: + if self._is_in_shutting_down(request): return '' text = f'An error occurred during the request "{request}":\n\n{text}' @@ -177,6 +185,16 @@ def clear(self): if request.cancellable: request.cancel() + def _is_in_shutting_down(self, request: Request) -> bool: + """ Check if the Tribler is in shutting down state.""" + if request.endpoint == SHUTDOWN_ENDPOINT: + return False + + if not self.window or not self.window.core_manager: + return False + + return self.window.core_manager.shutting_down + def _drop_timed_out_requests(self): for req in list(self.active_requests): is_time_to_cancel = time() - req.time > self.timeout_interval diff --git a/src/tribler/gui/network/tests/test_request.py b/src/tribler/gui/network/tests/test_request.py index 48656cee90a..fdc281c7279 100644 --- a/src/tribler/gui/network/tests/test_request.py +++ b/src/tribler/gui/network/tests/test_request.py @@ -1,6 +1,9 @@ -from unittest.mock import MagicMock +from typing import Dict +from unittest.mock import MagicMock, Mock, patch -from tribler.gui.network.request import Request +from PyQt5.QtNetwork import QNetworkReply + +from tribler.gui.network.request import REQUEST_ID, Request def test_default_constructor(): @@ -39,13 +42,34 @@ def test_str_data_constructor(): def test_on_finished(): - # Test that if 'request.reply' is empty, the `on_finish` method is called with an empty dict. - # see: https://github.com/Tribler/tribler/issues/7297 - on_finish = MagicMock() - request = Request(endpoint='endpoint', on_finish=on_finish) + # Test that if 'request.reply' is empty, the `on_success` callback is not called + # see: https://github.com/Tribler/tribler/issues/7333 + on_success = MagicMock() + request = Request(endpoint='endpoint', on_success=on_success) request.manager = MagicMock() request.reply = MagicMock(readAll=MagicMock(return_value=b'')) request.on_finished() - on_finish.assert_called_once_with({}) + on_success.assert_not_called() + + +@patch.object(Request, 'update_status', Mock()) +def test_set_id(): + # Test that the id is set correctly during `on_finished` callback call + actual_id = None + + def on_success(json: Dict): + nonlocal actual_id + actual_id = json[REQUEST_ID] + + request = Request('endpoint', on_success=on_success) + request.manager = Mock() + request.reply = Mock( + error=Mock(return_value=QNetworkReply.NoError), + readAll=Mock(return_value=b'{"a": "b"}') + ) + request.id = 10 + request.on_finished() + + assert actual_id == 10 diff --git a/src/tribler/gui/network/tests/test_request_manager.py b/src/tribler/gui/network/tests/test_request_manager.py index f065162b0ab..f123ab6346f 100644 --- a/src/tribler/gui/network/tests/test_request_manager.py +++ b/src/tribler/gui/network/tests/test_request_manager.py @@ -1,3 +1,5 @@ +from unittest.mock import Mock, patch + import pytest from tribler.gui.network.request_manager import RequestManager @@ -42,3 +44,13 @@ def test_get_message_from_error_any_dict(request_manager: RequestManager): } ) assert message == '{"key": "value"}' + + +@patch('tribler.gui.network.request_manager.QBuffer', Mock()) +@patch.object(RequestManager, 'sendCustomRequest', Mock()) +def test_request_id(request_manager: RequestManager): + request = request_manager.get('endpoint') + assert request.id == 1 + + request = request_manager.delete('endpoint') + assert request.id == 2 diff --git a/src/tribler/gui/start_gui.py b/src/tribler/gui/start_gui.py index 0fb3d222e4b..55d9668de27 100644 --- a/src/tribler/gui/start_gui.py +++ b/src/tribler/gui/start_gui.py @@ -63,16 +63,10 @@ def run_gui(api_port, api_key, root_state_dir, parsed_args): translator = get_translator(settings.value('translation', None)) app.installTranslator(translator) - if not current_process_is_primary and app.connected_to_previous_instance: - # if an application is already running, then send the command line - # argument to it and close the current instance - logger.info('GUI Application is already running. Passing a torrent file path to it.') - for arg in sys.argv[1:]: - if os.path.exists(arg) and arg.endswith(".torrent"): - app.send_message(path_to_url(arg)) - elif arg.startswith('magnet'): - app.send_message(arg) - logger.info('Close the current application.') + if not current_process_is_primary: + logger.info('GUI Application is already running.') + app.send_torrent_file_path_to_primary_process() + logger.info('Close the current GUI application.') process_manager.sys_exit(1, 'Tribler GUI application is already running') logger.info('Start Tribler Window') diff --git a/src/tribler/gui/tests/conftest.py b/src/tribler/gui/tests/conftest.py index bbf201e0095..1d566d4181a 100644 --- a/src/tribler/gui/tests/conftest.py +++ b/src/tribler/gui/tests/conftest.py @@ -1,6 +1,15 @@ +import logging + import pytest +def pytest_configure(config): # pylint: disable=unused-argument + # Disable logging from faker for all tests + logging.getLogger('faker.factory').propagate = False + # Disable logging from PyQt5.uic for all tests + logging.getLogger('PyQt5.uic').propagate = False + + def pytest_addoption(parser): parser.addoption('--guitests', action='store_true', dest="guitests", default=False, help="enable longrundecorated tests") diff --git a/src/tribler/gui/tests/test_downloadspage.py b/src/tribler/gui/tests/test_downloadspage.py new file mode 100644 index 00000000000..418de3b6617 --- /dev/null +++ b/src/tribler/gui/tests/test_downloadspage.py @@ -0,0 +1,50 @@ +from unittest.mock import MagicMock, Mock, patch + +from PyQt5.QtWidgets import QWidget + +from tribler.gui.network.request import REQUEST_ID +from tribler.gui.widgets.downloadspage import DownloadsPage + + +def downloads_page() -> DownloadsPage: + window = MagicMock() + window.downloads_list.indexOfTopLevelItem = Mock(return_value=-1) + + page = DownloadsPage() + page.window = Mock(return_value=window) + page.received_downloads = Mock() + return page + + +@patch.object(QWidget, '__init__', Mock()) +def test_accept_requests(): + # Test that the page accepts requests with the greater request id + page = downloads_page() + + page.on_received_downloads(result={REQUEST_ID: 1, 'downloads': MagicMock()}) + assert page.received_downloads.emit.called + + page.received_downloads.emit.reset_mock() + page.on_received_downloads(result={REQUEST_ID: 2, 'downloads': MagicMock()}) + assert page.received_downloads.emit.called + + page.received_downloads.emit.reset_mock() + page.on_received_downloads(result={REQUEST_ID: 10, 'downloads': MagicMock()}) + assert page.received_downloads.emit.called + + +@patch.object(QWidget, '__init__', Mock()) +def test_ignore_request(): + # Test that the page ignores requests with a lower or equal request id + page = downloads_page() + + page.on_received_downloads(result={REQUEST_ID: 10, 'downloads': MagicMock()}) + assert page.received_downloads.emit.called + + page.received_downloads.emit.reset_mock() + page.on_received_downloads(result={REQUEST_ID: 10, 'downloads': MagicMock()}) + assert not page.received_downloads.emit.called + + page.received_downloads.emit.reset_mock() + page.on_received_downloads(result={REQUEST_ID: 9, 'downloads': MagicMock()}) + assert not page.received_downloads.emit.called diff --git a/src/tribler/gui/tests/test_util.py b/src/tribler/gui/tests/test_utilities.py similarity index 58% rename from src/tribler/gui/tests/test_util.py rename to src/tribler/gui/tests/test_utilities.py index 1dfa0d82b12..9c37f5279c0 100644 --- a/src/tribler/gui/tests/test_util.py +++ b/src/tribler/gui/tests/test_utilities.py @@ -1,9 +1,11 @@ -from unittest.mock import MagicMock +from unittest.mock import MagicMock, Mock, patch from urllib.parse import unquote_plus import pytest -from tribler.gui.utilities import compose_magnetlink, create_api_key, dict_item_is_any_of, format_api_key, \ +from tribler.gui.utilities import TranslatedString, compose_magnetlink, create_api_key, dict_item_is_any_of, \ + duration_to_string, \ + format_api_key, \ quote_plus_unicode, set_api_key, unicode_quoter @@ -154,3 +156,62 @@ def test_set_api_key(): gui_settings = MagicMock() set_api_key(gui_settings, "abcdef") gui_settings.setValue.assert_called_once_with("api_key", b"abcdef") + + +TRANSLATIONS = [ + (0, '0s'), + (61, '1m 1s'), + (3800, '1h 3m'), + (110000, '1d 6h'), + (1110000, '1w 5d'), + (91110000, '2y 46w'), + (11191110000, 'Forever'), +] + + +@pytest.mark.parametrize('seconds, translation', TRANSLATIONS) +@patch('tribler.gui.utilities.tr', new=Mock(side_effect=lambda x: x)) +def test_duration_to_string(seconds, translation): + # test if the duration_to_string function returns the correct translation for all possible formats + assert duration_to_string(seconds) == translation + + +def test_correct_translation(): + original_string = 'original %(key1)s' + translated_string = 'translated %(key1)s' + s = TranslatedString(translated_string, original_string) + assert s % {'key1': '123'} == 'translated 123' + + +@patch('tribler.gui.utilities.logger.warning') +def test_missed_key_in_translated_string(warning: Mock): + original_string = 'original %(key1)s' + translated_string = 'translated %(key2)s' + s = TranslatedString(translated_string, original_string) + + # In this test, we pass the correct param 'key1' presented in the original string but missed in the translation. + # The KeyError is intercepted, the original string is used instead of the translation, and the error is logged + # as a warning. + assert s % {'key1': '123'} == 'original 123' + + warning.assert_called_once_with('KeyError: No value provided for \'key2\' in translation "translated %(key2)s", ' + 'original string: "original %(key1)s"') + + +@patch('tribler.gui.utilities.logger.warning') +def test_missed_key_in_both_translated_and_original_strings(warning: Mock): + original_string = 'original %(key1)s' + translated_string = 'translated %(key2)s' + s = TranslatedString(translated_string, original_string) + + with pytest.raises(KeyError, match=r"^'key1'$"): + # In this test, we pass an incorrect param 'key3' for interpolation, and also, the translation + # string (with param 'key2') differs from the original string (with param 'key1'). First, + # translated string tries to interpolate params and issues a warning that 'key2' is missed. + # Then, the original string tries to interpolate params and again gets a KeyError because 'key1' + # is also missed. This second exception is propagated because the main reason for the error is + # in the outside code that passes an incorrect parameter. + _ = s % {'key3': '123'} + + warning.assert_called_once_with('KeyError: No value provided for \'key2\' in translation "translated %(key2)s", ' + 'original string: "original %(key1)s"') diff --git a/src/tribler/gui/tribler_app.py b/src/tribler/gui/tribler_app.py index bf889a747f9..1ba80c0b976 100644 --- a/src/tribler/gui/tribler_app.py +++ b/src/tribler/gui/tribler_app.py @@ -1,6 +1,8 @@ +import logging import os import os.path import sys +from typing import List from PyQt5.QtCore import QCoreApplication, QEvent, Qt @@ -25,6 +27,7 @@ class TriblerApplication(QtSingleApplication): def __init__(self, app_name: str, args: list, start_local_server: bool = False): QtSingleApplication.__init__(self, app_name, start_local_server, args) + self._logger = logging.getLogger(self.__class__.__name__) self.code_executor = None connect(self.message_received, self.on_app_message) @@ -62,6 +65,30 @@ def parse_sys_args(self, args): if '--tunnel-testnet' in sys.argv[1:]: os.environ['TUNNEL_TESTNET'] = "YES" + @staticmethod + def get_urls_from_sys_args() -> List[str]: + urls = [] + for arg in sys.argv[1:]: + if os.path.exists(arg) and arg.endswith(".torrent"): + urls.append((path_to_url(arg))) + elif arg.startswith('magnet'): + urls.append(arg) + return urls + + def send_torrent_file_path_to_primary_process(self): + urls_to_send = self.get_urls_from_sys_args() + if not urls_to_send: + return + + if not self.connected_to_previous_instance: + self._logger.warning("Can't send torrent url: do not have a connection to a primary process") + return + + count = len(urls_to_send) + self._logger.info(f'Sending {count} torrent file{"s" if count > 1 else ""} to a primary process') + for url in urls_to_send: + self.send_message(url) + def event(self, event): if event.type() == QEvent.FileOpen and event.file().endswith(".torrent"): uri = path_to_url(event.file()) diff --git a/src/tribler/gui/tribler_window.py b/src/tribler/gui/tribler_window.py index 04260f97a71..6c312246f91 100644 --- a/src/tribler/gui/tribler_window.py +++ b/src/tribler/gui/tribler_window.py @@ -487,7 +487,7 @@ def close_tribler_gui(): # So, we quit the GUI separately here. self.app_manager.quit_application() - self.downloads_page.stop_loading_downloads() + self.downloads_page.stop_refreshing_downloads() self.core_manager.stop(quit_app_on_core_finished=False) close_dialog = ConfirmationDialog( self.window(), @@ -546,19 +546,23 @@ def on_core_connected(self, version): return self._logger.info("Core connected") - self.tribler_started = True self.tribler_version = version + request_manager.get("settings", self.on_receive_settings, capture_errors=False) + + def on_receive_settings(self, settings): + self.tribler_settings = settings['settings'] + self.start_ui() + + def start_ui(self): self.top_menu_button.setHidden(False) self.left_menu.setHidden(False) # self.token_balance_widget.setHidden(False) # restore it after the token balance calculation is fixed self.settings_button.setHidden(False) self.add_torrent_button.setHidden(False) self.top_search_bar.setHidden(False) - - self.fetch_settings() - + self.process_uri_request() self.downloads_page.start_loading_downloads() self.setAcceptDrops(True) @@ -675,7 +679,7 @@ def perform_start_download_request( anon_hops = int(self.tribler_settings['download_defaults']['number_hops']) if anon_download else 0 safe_seeding = 1 if safe_seeding else 0 request_manager.put("downloads", - on_finish=callback if callback else self.on_download_added, + on_success=callback if callback else self.on_download_added, data={ "uri": uri, "anon_hops": anon_hops, @@ -702,8 +706,8 @@ def on_add_button_pressed(channel_id): if post_data: request_manager.put(f"channels/mychannel/{channel_id}/torrents", - on_finish=lambda _: self.tray_show_message(tr("Channel update"), - tr("Torrent(s) added to your channel")), + on_success=lambda _: self.tray_show_message(tr("Channel update"), + tr("Torrent(s) added to your channel")), data=post_data) self.window().add_to_channel_dialog.show_dialog(on_add_button_pressed, confirm_button_text="Add torrent") @@ -714,8 +718,8 @@ def on_add_button_pressed(channel_id): if post_data: request_manager.put(f"channels/mychannel/{channel_id}/torrents", - on_finish=lambda _: self.tray_show_message(tr("Channel update"), - tr("Torrent(s) added to your channel")), + on_success=lambda _: self.tray_show_message(tr("Channel update"), + tr("Torrent(s) added to your channel")), data=post_data) self.window().add_to_channel_dialog.show_dialog(on_add_button_pressed, confirm_button_text="Add torrent") @@ -739,33 +743,6 @@ def on_received_search_completions(self, completions): if completions_list: self.search_completion_model.setStringList(completions_list) - def fetch_settings(self): - request_manager.get("settings", self.received_settings, capture_errors=False) - - def received_settings(self, settings): - if not settings: - return - # If we cannot receive the settings, stop Tribler with an option to send the crash report. - if 'error' in settings: - raise RuntimeError(RequestManager.get_message_from_error(settings)) - - # If there is any pending dialog (likely download dialog or error dialog of setting not available), - # close the dialog - if self.dialog: - self.dialog.close_dialog() - self.dialog = None - - self.tribler_settings = settings['settings'] - - self.downloads_all_button.click() - - # process pending file requests (i.e. someone clicked a torrent file when Tribler was closed) - # We do this after receiving the settings so we have the default download location. - self.process_uri_request() - - if self.token_balance_widget.isVisible(): - self.enable_token_balance_refresh() - def on_settings_button_click(self): self.deselect_all_menu_buttons() self.stackedWidget.setCurrentIndex(PAGE_SETTINGS) @@ -879,20 +856,8 @@ def on_add_torrent_browse_file(self, *_): def start_download_from_uri(self, uri): uri = uri.decode('utf-8') if isinstance(uri, bytes) else uri - if get_gui_setting(self.gui_settings, "ask_download_settings", True, is_bool=True): - # FIXME: instead of using this workaround, make sure the settings are _available_ by this moment - # If tribler settings is not available, fetch the settings and inform the user to try again. - if not self.tribler_settings: - self.fetch_settings() - self.dialog = ConfirmationDialog.show_error( - self, - tr("Download Error"), - tr("Tribler settings is not available yet. Fetching it now. Please try again later."), - ) - # By re-adding the download uri to the pending list, the request is re-processed - # when the settings is received - self.pending_uri_requests.append(uri) - return + ask_download_settings = get_gui_setting(self.gui_settings, "ask_download_settings", True, is_bool=True) + if ask_download_settings: # Clear any previous dialog if exists if self.dialog: self.dialog.close_dialog() @@ -903,15 +868,6 @@ def start_download_from_uri(self, uri): self.dialog.show() self.start_download_dialog_active = True else: - # FIXME: instead of using this workaround, make sure the settings are _available_ by this moment - # In the unlikely scenario that tribler settings are not available yet, try to fetch settings again and - # add the download uri back to self.pending_uri_requests to process again. - if not self.tribler_settings: - self.fetch_settings() - if uri not in self.pending_uri_requests: - self.pending_uri_requests.append(uri) - return - self.window().perform_start_download_request( uri, self.window().tribler_settings['download_defaults']['anonymity_enabled'], @@ -979,7 +935,7 @@ def on_add_button_pressed(channel_id): request_manager.put( endpoint=f"collections/mychannel/{channel_id}/torrents", - on_finish=lambda _: self.tray_show_message( + on_success=lambda _: self.tray_show_message( tr("Channels update"), tr("%s added to your channel") % self.chosen_dir ), data={"torrents_dir": self.chosen_dir} @@ -1114,7 +1070,7 @@ def clicked_menu_button_downloads(self): self.stackedWidget.setCurrentIndex(PAGE_DOWNLOADS) def clicked_debug_panel_button(self, *_): - if not self.tribler_settings or not self.gui_settings: + if not self.gui_settings: self._logger.info("Tribler settings (Core and/or GUI) is not available yet.") return if not self.debug_window: @@ -1153,7 +1109,7 @@ def show_force_shutdown(): self.app_manager.quit_application() self.core_manager.stop() - self.downloads_page.stop_loading_downloads() + self.downloads_page.stop_refreshing_downloads() request_manager.clear() # Stop the token balance timer diff --git a/src/tribler/gui/upgrade_manager.py b/src/tribler/gui/upgrade_manager.py index b746855c162..1ddb8a1b9e6 100644 --- a/src/tribler/gui/upgrade_manager.py +++ b/src/tribler/gui/upgrade_manager.py @@ -5,7 +5,7 @@ from typing import List, Optional, TYPE_CHECKING from PyQt5.QtCore import QObject, QThread, pyqtSignal -from PyQt5.QtWidgets import QMessageBox +from PyQt5.QtWidgets import QApplication, QMessageBox from tribler.core.components.key.key_component import KeyComponent from tribler.core.config.tribler_config import TriblerConfig @@ -27,6 +27,7 @@ class StateDirUpgradeWorker(QObject): def __init__(self, version_history: VersionHistory): super().__init__() + self.logger = logging.getLogger(self.__class__.__name__) self.version_history = version_history self._upgrade_interrupted = False connect(self.stop_upgrade, self._stop_upgrade) @@ -42,21 +43,22 @@ def _update_status_callback(self, text): def run(self): try: + self.logger.info('Run') self.upgrade_state_dir( self.version_history, update_status_callback=self._update_status_callback, interrupt_upgrade_event=self.upgrade_interrupted, ) except Exception as exc: # pylint: disable=broad-except + self.logger.exception(exc) self.finished.emit(exc) else: + self.logger.info('Finished') self.finished.emit(None) - @staticmethod - def upgrade_state_dir(version_history: VersionHistory, - update_status_callback=None, + def upgrade_state_dir(self, version_history: VersionHistory, update_status_callback=None, interrupt_upgrade_event=None): - logging.info('Upgrade state dir') + self.logger.info(f'Upgrade state dir for {version_history}') # Before any upgrade, prepare a separate state directory for the update version so it does not # affect the older version state directory. This allows for safe rollback. version_history.fork_state_directory_if_necessary() @@ -88,11 +90,12 @@ class UpgradeManager(QObject): upgrader_tick = pyqtSignal(str) upgrader_finished = pyqtSignal() - def __init__(self, version_history: VersionHistory): + def __init__(self, version_history: VersionHistory, last_supported_version: str = '7.5'): QObject.__init__(self, None) self._logger = logging.getLogger(self.__class__.__name__) + self.last_supported_version = last_supported_version self.version_history = version_history self.new_version_dialog_postponed: bool = False self.dialog: Optional[ConfirmationDialog] = None @@ -130,25 +133,27 @@ def on_button_clicked(click_result: int): connect(self.dialog.button_clicked, on_button_clicked) self.dialog.show() - def _show_question_box(self, title, body, additional_text, default_button=None): + @staticmethod + def _show_message_box(title, body, icon, standard_buttons, default_button, additional_text=''): message_box = QMessageBox() - message_box.setIcon(QMessageBox.Question) + message_box.setIcon(icon) message_box.setWindowTitle(title) message_box.setText(body) message_box.setInformativeText(additional_text) - message_box.setStandardButtons(QMessageBox.No | QMessageBox.Yes) - if default_button: - message_box.setDefaultButton(default_button) + message_box.setStandardButtons(standard_buttons) + message_box.setDefaultButton(default_button) return message_box.exec_() def should_cleanup_old_versions(self) -> List[TriblerVersion]: - self._logger.info('Getting old versions...') + self._logger.info('Should cleanup old versions') if self.version_history.last_run_version == self.version_history.code_version: + self._logger.info('Last run version is the same as the current version. Exit cleanup procedure.') return [] disposable_versions = self.version_history.get_disposable_versions(skip_versions=2) if not disposable_versions: + self._logger.info('No disposable versions. Exit cleanup procedure.') return [] storage_info = "" @@ -157,9 +162,9 @@ def should_cleanup_old_versions(self) -> List[TriblerVersion]: state_size = version.calc_state_size() claimable_storage += state_size storage_info += f"{version.version_str} \t {format_size(state_size)}\n" - + self._logger.info(f'Storage info: {storage_info}') # Show a question to the user asking if the user wants to remove the old data. - title = "Delete state directories for old versions?" + title = tr("Delete state directories for old versions?") message_body = tr( "Press 'Yes' to remove state directories for older versions of Tribler " "and reclaim %s of storage space. " @@ -169,13 +174,37 @@ def should_cleanup_old_versions(self) -> List[TriblerVersion]: "You will be able to remove those directories from the Settings->Data page later." ) % format_size(claimable_storage) - user_choice = self._show_question_box(title, message_body, storage_info, default_button=QMessageBox.Yes) + user_choice = self._show_message_box( + title, + message_body, + additional_text=storage_info, + icon=QMessageBox.Question, + standard_buttons=QMessageBox.No | QMessageBox.Yes, + default_button=QMessageBox.Yes + ) if user_choice == QMessageBox.Yes: + self._logger.info('User decided to delete old versions. Start cleanup procedure.') return disposable_versions return [] def start(self): self._logger.info('Start upgrade process') + last_version = self.version_history.last_run_version + if last_version and last_version.is_ancient(self.last_supported_version): + self._logger.info('Ancient version detected. Quitting Tribler.') + self._show_message_box( + tr("Ancient version detected"), + body=tr("You are running an old version of Tribler. " + "It is not possible to upgrade from this version to the most recent one." + "Please do upgrade incrementally (download Tribler 7.10, upgrade, " + "then download the most recent one, upgrade)."), + icon=QMessageBox.Warning, + standard_buttons=QMessageBox.Yes, + default_button=QMessageBox.Yes + ) + QApplication.quit() + return + versions_to_delete = self.should_cleanup_old_versions() if versions_to_delete: for version in versions_to_delete: diff --git a/src/tribler/gui/utilities.py b/src/tribler/gui/utilities.py index 0cb5f34fed8..e9c2e1d7af7 100644 --- a/src/tribler/gui/utilities.py +++ b/src/tribler/gui/utilities.py @@ -36,8 +36,30 @@ NUM_VOTES_BARS = 8 +class TranslatedString(str): + """ This class is used to wrap translated strings to be able to log untranslated strings in case of errors. + Thanks to this class no `KeyError` exceptions are raised when a translation is missing. + """ + + def __new__(cls, translation, original_string): # pylint: disable=unused-argument + return super().__new__(cls, translation) + + def __init__(self, translation: str, original_string: str): # pylint: disable=unused-argument + super().__init__() + self.original_string = original_string + + def __mod__(self, other): + try: + return str.__mod__(self, other) + except KeyError as e: + msg = f'No value provided for {e} in translation "{self}", original string: "{self.original_string}"' + logger.warning(f'{type(e).__name__}: {msg}') + return self.original_string % other + + def tr(key): - return f"{QCoreApplication.translate('@default', key)}" + translated_string = QCoreApplication.translate('@default', key) + return TranslatedString(translated_string, original_string=key) VOTES_RATING_DESCRIPTIONS = ( diff --git a/src/tribler/gui/widgets/channelcontentswidget.py b/src/tribler/gui/widgets/channelcontentswidget.py index 398f7926cb2..273ae539de0 100644 --- a/src/tribler/gui/widgets/channelcontentswidget.py +++ b/src/tribler/gui/widgets/channelcontentswidget.py @@ -130,7 +130,7 @@ def on_channel_committed(self, response): self.update_labels() def commit_channels(self, checked=False): # pylint: disable=W0613 - request_manager.post("channels/mychannel/0/commit", on_finish=self.on_channel_committed) + request_manager.post("channels/mychannel/0/commit", on_success=self.on_channel_committed) def initialize_content_page( self, @@ -574,8 +574,8 @@ def _on_torrent_to_channel_added(self, result): def _add_torrent_request(self, data): channel_id = self.model.channel_info["id"] - request_manager.put(f'collections/mychannel/{channel_id}/torrents', on_finish=self._on_torrent_to_channel_added, - data=data) + request_manager.put(f'collections/mychannel/{channel_id}/torrents', + on_success=self._on_torrent_to_channel_added, data=data) def add_torrent_to_channel(self, filename): with open(filename, "rb") as torrent_file: diff --git a/src/tribler/gui/widgets/channeldescriptionwidget.py b/src/tribler/gui/widgets/channeldescriptionwidget.py index 66ea8376ba4..1121c6cc8f6 100644 --- a/src/tribler/gui/widgets/channeldescriptionwidget.py +++ b/src/tribler/gui/widgets/channeldescriptionwidget.py @@ -144,7 +144,7 @@ def on_save_button_clicked(self): if self.description_text is not None: descr_changed = True request_manager.put(f'channels/{self.channel_pk}/{self.channel_id}/description', - on_finish=self._on_description_received, + on_success=self._on_description_received, data={"description_text": self.description_text}) if self.channel_thumbnail_bytes is not None: @@ -154,7 +154,7 @@ def _on_thumbnail_updated(_): pass request_manager.put(f'channels/{self.channel_pk}/{self.channel_id}/thumbnail', - on_finish=_on_thumbnail_updated, + on_success=_on_thumbnail_updated, data=self.channel_thumbnail_bytes, raw_response=True) @@ -232,7 +232,7 @@ def _on_description_received(self, result): self.description_text_preview.setMarkdown("") request_manager.get(f'channels/{self.channel_pk}/{self.channel_id}/thumbnail', - on_finish=self._on_thumbnail_received, + on_success=self._on_thumbnail_received, raw_response=True) def set_widget_visible(self, show): diff --git a/src/tribler/gui/widgets/downloadsdetailstabwidget.py b/src/tribler/gui/widgets/downloadsdetailstabwidget.py index 02c9e9fc698..702fd2b4362 100644 --- a/src/tribler/gui/widgets/downloadsdetailstabwidget.py +++ b/src/tribler/gui/widgets/downloadsdetailstabwidget.py @@ -1,4 +1,6 @@ +from enum import IntEnum from pathlib import PurePosixPath +from typing import Dict, Optional from PyQt5.QtCore import QTimer, Qt from PyQt5.QtWidgets import QTabWidget, QTreeWidgetItem @@ -15,6 +17,13 @@ PROGRESS_BAR_DRAW_LIMIT = 0 # Don't draw progress bars for files in torrents that have more than this many files +class DownloadDetailsTabs(IntEnum): + DETAILS = 0 + FILES = 1 + TRACKERS = 2 + PEERS = 3 + + def convert_to_files_tree_format(download_info): files = download_info['files'] out = [] @@ -45,7 +54,7 @@ class DownloadsDetailsTabWidget(QTabWidget): def __init__(self, parent): QTabWidget.__init__(self, parent) - self.current_download = None + self.current_download: Optional[Dict] = None self.selected_files_info = [] # This timer is used to apply files selection changes in batches, to avoid multiple requests to the Core diff --git a/src/tribler/gui/widgets/downloadspage.py b/src/tribler/gui/widgets/downloadspage.py index 3acb1cd1a29..0a10f891ec1 100644 --- a/src/tribler/gui/widgets/downloadspage.py +++ b/src/tribler/gui/widgets/downloadspage.py @@ -1,13 +1,13 @@ import logging import os -import time -from typing import Optional, Tuple +from typing import List, Optional, Tuple from PyQt5.QtCore import QTimer, QUrl, Qt, pyqtSignal from PyQt5.QtGui import QDesktopServices from PyQt5.QtNetwork import QNetworkRequest from PyQt5.QtWidgets import QAbstractItemView, QAction, QFileDialog, QWidget +from tribler.core.sentry_reporter.sentry_tools import get_first_item from tribler.core.utilities.simpledefs import DownloadStatus from tribler.gui.defs import ( BUTTON_TYPE_CONFIRM, @@ -20,13 +20,19 @@ DOWNLOADS_FILTER_DOWNLOADING, DOWNLOADS_FILTER_INACTIVE, ) from tribler.gui.dialogs.confirmationdialog import ConfirmationDialog +from tribler.gui.network.request import REQUEST_ID from tribler.gui.network.request_manager import request_manager from tribler.gui.sentry_mixin import AddBreadcrumbOnShowMixin from tribler.gui.tribler_action_menu import TriblerActionMenu from tribler.gui.utilities import compose_magnetlink, connect, format_speed, tr +from tribler.gui.widgets.downloadsdetailstabwidget import DownloadDetailsTabs from tribler.gui.widgets.downloadwidgetitem import DownloadWidgetItem, LoadingDownloadWidgetItem from tribler.gui.widgets.loading_list_item import LoadingListItem +REFRESH_DOWNLOADS_SOON_INTERVAL_MSEC = 10 # 0.01s +REFRESH_DOWNLOADS_UI_CHANGE_INTERVAL_MSEC = 2000 # 2s +REFRESH_DOWNLOADS_BACKGROUND_INTERVAL_MSEC = 5000 # 5s + button_name2filter = { "downloads_all_button": DOWNLOADS_FILTER_ALL, "downloads_downloading_button": DOWNLOADS_FILTER_DOWNLOADING, @@ -47,31 +53,34 @@ class DownloadsPage(AddBreadcrumbOnShowMixin, QWidget): received_downloads = pyqtSignal(object) def __init__(self): - QWidget.__init__(self) + super().__init__() self._logger = logging.getLogger(self.__class__.__name__) self.export_dir = None self.filter = DOWNLOADS_FILTER_ALL self.download_widgets = {} # key: infohash, value: QTreeWidgetItem self.downloads = None - self.downloads_timer = QTimer() - self.downloads_timeout_timer = QTimer() + self.background_refresh_downloads_timer = QTimer() self.downloads_last_update = 0 - self.selected_items = [] + self.selected_items: List[DownloadWidgetItem] = [] self.dialog = None self.loading_message_widget: Optional[LoadingDownloadWidgetItem] = None self.loading_list_item: Optional[LoadingListItem] = None self.total_download = 0 self.total_upload = 0 - self.rest_request = None + # Used to keep track of the last processed request with a purpose of ignoring old requests + self.last_processed_request_id = 0 def showEvent(self, QShowEvent): """ When the downloads tab is clicked, we want to update the downloads list immediately. """ super().showEvent(QShowEvent) - self.stop_loading_downloads() - self.schedule_downloads_timer(True) + self.schedule_downloads_refresh(REFRESH_DOWNLOADS_SOON_INTERVAL_MSEC) + + def hideEvent(self, QHideEvent): + super().hideEvent(QHideEvent) + self.stop_refreshing_downloads() def initialize_downloads_page(self): self.window().downloads_tab.initialize() @@ -81,7 +90,7 @@ def initialize_downloads_page(self): connect(self.window().stop_download_button.clicked, self.on_stop_download_clicked) connect(self.window().remove_download_button.clicked, self.on_remove_download_clicked) - connect(self.window().downloads_list.itemSelectionChanged, self.update_downloads) + connect(self.window().downloads_list.itemSelectionChanged, self.on_selection_change) connect(self.window().downloads_list.customContextMenuRequested, self.on_right_click_item) @@ -93,10 +102,8 @@ def initialize_downloads_page(self): self.window().downloads_list.header().setSortIndicator(12, Qt.AscendingOrder) self.window().downloads_list.header().resizeSection(12, 146) - self.downloads_timeout_timer.setSingleShot(True) - self.downloads_timer.setSingleShot(True) - connect(self.downloads_timer.timeout, self.load_downloads) - connect(self.downloads_timeout_timer.timeout, self.on_downloads_request_timeout) + self.background_refresh_downloads_timer.setSingleShot(True) + connect(self.background_refresh_downloads_timer.timeout, self.on_background_refresh_downloads_timer) def on_filter_text_changed(self, text): self.window().downloads_list.clearSelection() @@ -109,43 +116,49 @@ def start_loading_downloads(self): self.loading_list_item = LoadingListItem(self.window().downloads_list) self.window().downloads_list.addTopLevelItem(self.loading_message_widget) self.window().downloads_list.setItemWidget(self.loading_message_widget, 2, self.loading_list_item) - self.schedule_downloads_timer(now=True) - - def schedule_downloads_timer(self, now=False): - self.downloads_timer.start(0 if now else 1000) - self.downloads_timeout_timer.start(16000) - - def on_downloads_request_timeout(self): - if self.rest_request: - self.rest_request.cancel() - self.schedule_downloads_timer() - - def stop_loading_downloads(self): - self.downloads_timer.stop() - self.downloads_timeout_timer.stop() - - def load_downloads(self): - url = "downloads?get_pieces=1" - if self.window().download_details_widget.currentIndex() == 3: - url += "&get_peers=1" - elif self.window().download_details_widget.currentIndex() == 1: - url += "&get_files=1" - - isactive = not self.isHidden() - - if isactive or (time.time() - self.downloads_last_update > 30): - # Update if the downloads page is visible or if we haven't updated for longer than 30 seconds - self.downloads_last_update = time.time() - priority = QNetworkRequest.LowPriority if not isactive else QNetworkRequest.HighPriority - if self.rest_request: - self.rest_request.cancel() - request_manager.get(url, self.on_received_downloads, priority=priority) - - def on_received_downloads(self, downloads): - if not downloads or "downloads" not in downloads: + + def schedule_downloads_refresh(self, interval_msec=REFRESH_DOWNLOADS_BACKGROUND_INTERVAL_MSEC): + timer = self.background_refresh_downloads_timer + remaining = timer.remainingTime() + if timer.isActive(): + interval_msec = min(remaining, interval_msec) + timer.start(interval_msec) + + def on_background_refresh_downloads_timer(self): + self.refresh_downloads() + self.schedule_downloads_refresh() + + def stop_refreshing_downloads(self): + self.background_refresh_downloads_timer.stop() + + def refresh_downloads(self): + index = self.window().download_details_widget.currentIndex() + + url_params = {'get_pieces': 1} + if index == DownloadDetailsTabs.PEERS: + url_params['get_peers'] = 1 + elif index == DownloadDetailsTabs.FILES: + url_params['get_files'] = 1 + + request_manager.get( + endpoint="downloads", + url_params=url_params, + on_success=self.on_received_downloads, + ) + + def on_received_downloads(self, result): + if not result or "downloads" not in result: return # This might happen when closing Tribler + request_id = result[REQUEST_ID] + if self.last_processed_request_id >= request_id: + # This is an old request, ignore it. + # It could happen because some of requests processed a bit longer than others + msg = f'Ignoring old request {request_id} (last processed request id: {self.last_processed_request_id})' + self._logger.warning(msg) + return + self.last_processed_request_id = request_id - checkpoints = downloads.get('checkpoints', {}) + checkpoints = result.get('checkpoints', {}) if checkpoints and self.loading_message_widget: # If not all checkpoints are loaded, display the number of the loaded checkpoints total = checkpoints['total'] @@ -155,7 +168,7 @@ def on_received_downloads(self, downloads): message = f'{loaded}/{total} checkpoints' self._logger.info(f'Loading checkpoints: {message}') self.loading_list_item.textlabel.setText(message) - self.schedule_downloads_timer() + self.schedule_downloads_refresh() return loading_widget_index = self.window().downloads_list.indexOfTopLevelItem(self.loading_message_widget) @@ -163,7 +176,7 @@ def on_received_downloads(self, downloads): self.window().downloads_list.takeTopLevelItem(loading_widget_index) self.window().downloads_list.setSelectionMode(QAbstractItemView.ExtendedSelection) - self.downloads = downloads + self.downloads = result self.total_download = 0 self.total_upload = 0 @@ -171,7 +184,7 @@ def on_received_downloads(self, downloads): download_infohashes = set() items = [] - for download in downloads["downloads"]: + for download in result["downloads"]: # Update download progress information for torrents in the Channels GUI. # We skip updating progress information for ChannelTorrents because otherwise it interferes # with channel processing progress updates @@ -215,13 +228,9 @@ def on_received_downloads(self, downloads): f"Down: {format_speed(self.total_download)}, Up: {format_speed(self.total_upload)}" ) self.update_download_visibility() - self.schedule_downloads_timer() + self.refresh_top_panel() - # Update the top download management button if we have a row selected - if len(self.window().downloads_list.selectedItems()) > 0: - self.update_downloads() - - self.received_downloads.emit(downloads) + self.received_downloads.emit(result) def update_download_visibility(self): for i in range(self.window().downloads_list.topLevelItemCount()): @@ -260,58 +269,65 @@ def force_recheck_download_enabled(download_widgets): recheck = {DownloadStatus.METADATA, DownloadStatus.HASHCHECKING, DownloadStatus.WAITING_FOR_HASHCHECK} return any(dw.get_status() not in recheck for dw in download_widgets) - def update_downloads(self): - selected = self.window().downloads_list.selectedItems() - selected_count = len(selected) - if selected_count == 0: + def on_selection_change(self): + self.selected_items = self.window().downloads_list.selectedItems() + + # refresh bottom detailed info panel + if len(self.selected_items) == 1: + self.window().download_details_widget.update_with_download(self.selected_items[0].download_info) + self.window().download_details_widget.show() + else: + self.window().download_details_widget.hide() + + self.refresh_top_panel() + + def refresh_top_panel(self): + if len(self.selected_items) == 0: self.window().remove_download_button.setEnabled(False) self.window().start_download_button.setEnabled(False) self.window().stop_download_button.setEnabled(False) - self.window().download_details_widget.hide() return - self.selected_items = selected self.window().remove_download_button.setEnabled(True) self.window().start_download_button.setEnabled(DownloadsPage.start_download_enabled(self.selected_items)) self.window().stop_download_button.setEnabled(DownloadsPage.stop_download_enabled(self.selected_items)) - if selected_count == 1: - self.window().download_details_widget.update_with_download(selected[0].download_info) - self.window().download_details_widget.show() - else: - self.window().download_details_widget.hide() - def on_start_download_clicked(self, checked): - for selected_item in self.selected_items: - infohash = selected_item.download_info["infohash"] - request_manager.patch(f"downloads/{infohash}", self.on_download_resumed, data={"state": "resume"}) + for item in self.selected_items: + request_manager.patch( + f"downloads/{item.infohash}", + on_success=self.on_download_resumed, + data={"state": "resume"} + ) + + def find_item_in_selected(self, infohash) -> Optional[DownloadWidgetItem]: + return next((it for it in self.selected_items if it.infohash == infohash), None) def on_download_resumed(self, json_result): if not json_result or 'modified' not in json_result: return - for selected_item in self.selected_items: - if selected_item.download_info["infohash"] != json_result["infohash"]: - continue + self.schedule_downloads_refresh(REFRESH_DOWNLOADS_UI_CHANGE_INTERVAL_MSEC) - selected_item.update_item() - self.update_downloads() + if item := self.find_item_in_selected(json_result["infohash"]): + item.update_item() def on_stop_download_clicked(self, checked): - for selected_item in self.selected_items: - infohash = selected_item.download_info["infohash"] - request_manager.patch(f"downloads/{infohash}", self.on_download_stopped, data={"state": "stop"}) + for item in self.selected_items: + request_manager.patch( + f"downloads/{item.infohash}", + on_success=self.on_download_stopped, + data={"state": "stop"} + ) def on_download_stopped(self, json_result): if not json_result or "modified" not in json_result: return - for selected_item in self.selected_items: - if selected_item.download_info["infohash"] != json_result["infohash"]: - continue + self.schedule_downloads_refresh(REFRESH_DOWNLOADS_UI_CHANGE_INTERVAL_MSEC) - selected_item.update_item() - self.update_downloads() + if item := self.find_item_in_selected(json_result["infohash"]): + item.update_item() def on_remove_download_clicked(self, checked): self.dialog = ConfirmationDialog( @@ -329,46 +345,56 @@ def on_remove_download_clicked(self, checked): def on_remove_download_dialog(self, action): if action != 2: - for selected_item in self.selected_items: - infohash = selected_item.download_info["infohash"] + for item in self.selected_items: current_download = self.window().download_details_widget.current_download - if current_download and current_download.get(infohash) == infohash: + if current_download and current_download.get('infohash') == item.infohash: self.window().download_details_widget.current_download = None - request_manager.delete(f"downloads/{infohash}", self.on_download_removed, - data={"remove_data": bool(action)}) + request_manager.delete( + f"downloads/{item.infohash}", + on_success=self.on_download_removed, + data={"remove_data": bool(action)} + ) + if self.dialog: self.dialog.close_dialog() self.dialog = None def on_download_removed(self, json_result): if json_result and "removed" in json_result: - self.load_downloads() + self.schedule_downloads_refresh(REFRESH_DOWNLOADS_SOON_INTERVAL_MSEC) self.window().download_details_widget.hide() self.window().core_manager.events_manager.node_info_updated.emit( {"infohash": json_result["infohash"], "progress": None} ) def on_force_recheck_download(self, checked): - for selected_item in self.selected_items: - infohash = selected_item.download_info["infohash"] - request_manager.patch(f"downloads/{infohash}", self.on_forced_recheck, data={"state": "recheck"}) + for item in self.selected_items: + request_manager.patch( + f"downloads/{item.infohash}", + on_success=self.on_forced_recheck, + data={"state": "recheck"} + ) def on_forced_recheck(self, result): - if result and "modified" in result: - for selected_item in self.selected_items: - if selected_item.download_info["infohash"] == result["infohash"]: - selected_item.download_info['status'] = DownloadStatus.HASHCHECKING.name - selected_item.update_item() - self.update_downloads() + if not result or "modified" not in result: + return + + self.schedule_downloads_refresh(REFRESH_DOWNLOADS_UI_CHANGE_INTERVAL_MSEC) + + if item := self.find_item_in_selected(result["infohash"]): + item.update_item() def on_change_anonymity(self, result): pass def change_anonymity(self, hops): - for selected_item in self.selected_items: - infohash = selected_item.download_info["infohash"] - request_manager.patch(f"downloads/{infohash}", self.on_change_anonymity, data={"anon_hops": hops}) + for item in self.selected_items: + request_manager.patch( + f"downloads/{item.infohash}", + on_success=self.on_change_anonymity, + data={"anon_hops": hops} + ) def on_explore_files(self, checked): # ACHTUNG! To whomever might stumble upon here intending to debug the case @@ -394,12 +420,14 @@ def on_move_files(self, checked): if not dest_dir: return - _infohash = self.selected_items[0].download_info["infohash"] + _infohash = self.selected_items[0].infohash _name = self.selected_items[0].download_info["name"] - request_manager.patch(f"downloads/{_infohash}", - on_finish=lambda res: self.on_files_moved(res, _name, dest_dir), - data={"state": "move_storage", "dest_dir": dest_dir}) + request_manager.patch( + f"downloads/{_infohash}", + on_success=lambda res: self.on_files_moved(res, _name, dest_dir), + data={"state": "move_storage", "dest_dir": dest_dir} + ) def on_files_moved(self, response, name, dest_dir): if "modified" in response and response["modified"]: @@ -428,15 +456,19 @@ def on_export_download(self, checked): self.dialog.show() def on_export_download_dialog_done(self, action): - def on_finish(result: Tuple): + def on_success(result: Tuple): data, _ = result self.on_export_download_request_done(filename, data) - selected_item = self.selected_items[:1] - if action == 0 and selected_item: + item = get_first_item(self.selected_items) + if action == 0 and item: filename = self.dialog.dialog_widget.dialog_input.text() - request_manager.get(f"downloads/{selected_item[0].download_info['infohash']}/torrent", - on_finish, priority=QNetworkRequest.LowPriority, raw_response=True) + request_manager.get( + f"downloads/{item.infohash}/torrent", + on_success=on_success, + priority=QNetworkRequest.LowPriority, + raw_response=True + ) self.dialog.close_dialog() self.dialog = None @@ -460,14 +492,17 @@ def on_export_download_request_done(self, filename, data): def on_add_to_channel(self, checked): def on_add_button_pressed(channel_id): - for selected_item in self.selected_items: - infohash = selected_item.download_info["infohash"] - name = selected_item.download_info["name"] - request_manager.put(f"channels/mychannel/{channel_id}/torrents", - on_finish=lambda _: self.window().tray_show_message( - tr("Channel update"), tr("Torrent(s) added to your channel") - ), - data={"uri": compose_magnetlink(infohash, name=name)}) + for item in self.selected_items: + infohash = item.infohash + name = item.download_info["name"] + request_manager.put( + f"channels/mychannel/{channel_id}/torrents", + on_success=lambda _: self.window().tray_show_message( + tr("Channel update"), + tr("Torrent(s) added to your channel") + ), + data={"uri": compose_magnetlink(infohash, name=name)} + ) self.window().add_to_channel_dialog.show_dialog(on_add_button_pressed, confirm_button_text=tr("Add torrent(s)")) diff --git a/src/tribler/gui/widgets/downloadwidgetitem.py b/src/tribler/gui/widgets/downloadwidgetitem.py index e05eae9ddd1..eb44ee12fec 100644 --- a/src/tribler/gui/widgets/downloadwidgetitem.py +++ b/src/tribler/gui/widgets/downloadwidgetitem.py @@ -61,11 +61,13 @@ class DownloadWidgetItem(QTreeWidgetItem): def __init__(self): QTreeWidgetItem.__init__(self) self.download_info: Optional[Dict] = None + self.infohash: Optional[str] = None self._logger = logging.getLogger('TriblerGUI') self.bar_container, self.progress_slider = create_progress_bar_widget() def update_with_download(self, download: Dict): self.download_info = download + self.infohash = download["infohash"] self.update_item() def get_status(self) -> DownloadStatus: diff --git a/src/tribler/gui/widgets/lazytableview.py b/src/tribler/gui/widgets/lazytableview.py index 9b3167c76e1..15ca7e49024 100644 --- a/src/tribler/gui/widgets/lazytableview.py +++ b/src/tribler/gui/widgets/lazytableview.py @@ -279,7 +279,9 @@ def on_metadata_edited(self, index, statements: List[Dict]): self.edited_metadata.emit(data_item) def save_edited_metadata(self, index: QModelIndex, statements: List[Dict]): + def on_success(_): + self.on_metadata_edited(index, statements) + data_item = self.model().data_items[index.row()] - request_manager.patch(f"knowledge/{data_item['infohash']}", - on_finish=lambda _, ind=index, stmts=statements: self.on_metadata_edited(ind, statements), + request_manager.patch(f"knowledge/{data_item['infohash']}", on_success=on_success, data=json.dumps({"statements": statements})) diff --git a/src/tribler/gui/widgets/settingspage.py b/src/tribler/gui/widgets/settingspage.py index 45cabe9f854..1cc9792b682 100644 --- a/src/tribler/gui/widgets/settingspage.py +++ b/src/tribler/gui/widgets/settingspage.py @@ -5,6 +5,7 @@ from PyQt5.QtWidgets import QCheckBox, QFileDialog, QMessageBox, QSizePolicy, QWidget from tribler.core.utilities.osutils import get_root_state_directory +from tribler.core.utilities.path_util import Path from tribler.core.utilities.simpledefs import MAX_LIBTORRENT_RATE_LIMIT from tribler.gui.defs import ( DARWIN, @@ -245,8 +246,8 @@ def initialize_with_settings(self, settings): self.window().settings_stacked_widget.setCurrentIndex(0) def _version_dir_checkbox(self, state_dir, enabled=True): - dir_size = sum(f.stat().st_size for f in state_dir.glob('**/*')) - text = f"{state_dir} {format_size(dir_size)}" + path = Path(state_dir) + text = f"{state_dir} {format_size(path.size())}" checkbox = QCheckBox(text) checkbox.setEnabled(enabled) return checkbox @@ -556,6 +557,9 @@ def on_settings_saved(self, data): self.save_language_selection() self.window().tray_show_message(tr("Tribler settings"), tr("Settings saved")) - self.window().fetch_settings() + def on_receive_settings(response): + self.window().tribler_settings = response['settings'] + + request_manager.get("settings", on_receive_settings, capture_errors=False) self.settings_edited.emit() diff --git a/src/tribler/gui/widgets/tablecontentmodel.py b/src/tribler/gui/widgets/tablecontentmodel.py index a9ad05513a6..e91c48b6f37 100644 --- a/src/tribler/gui/widgets/tablecontentmodel.py +++ b/src/tribler/gui/widgets/tablecontentmodel.py @@ -31,7 +31,7 @@ class Column(Enum): NAME = auto() SIZE = auto() HEALTH = auto() - UPDATED = auto() + CREATED = auto() VOTES = auto() STATUS = auto() STATE = auto() @@ -62,7 +62,7 @@ def define_columns(): display_filter=lambda data: (format_size(float(data)) if data != "" else "")), Column.HEALTH: d('health', tr("Health"), width=120, tooltip_filter=lambda data: f"{data}" + ( '' if data == HEALTH_CHECKING else '\n(Click to recheck)'), ), - Column.UPDATED: d('updated', tr("Updated"), width=120, display_filter=lambda timestamp: pretty_date( + Column.CREATED: d('created', tr("Created"), width=120, display_filter=lambda timestamp: pretty_date( timestamp) if timestamp and timestamp > BITTORRENT_BIRTHDAY else "", ), Column.VOTES: d('votes', tr("Popularity"), width=120, display_filter=format_votes, tooltip_filter=lambda data: get_votes_rating_description(data) if data is not None else None, ), @@ -407,7 +407,7 @@ def on_query_results(self, response, remote=False, on_top=False): class ChannelContentModel(RemoteTableModel): - columns_shown = (Column.ACTIONS, Column.CATEGORY, Column.NAME, Column.SIZE, Column.HEALTH, Column.UPDATED) + columns_shown = (Column.ACTIONS, Column.CATEGORY, Column.NAME, Column.SIZE, Column.HEALTH, Column.CREATED) def __init__( self, @@ -460,7 +460,8 @@ def endpoint_url(self): def headerData(self, num, orientation, role=None): if orientation == Qt.Horizontal and role == Qt.DisplayRole: - return self.columns[num].header + header_text = self.columns[num].header + return str(header_text) # convert TranslatedString to str as Qt can't handle str subclasses here if role == Qt.InitialSortOrderRole and num != self.column_position.get(Column.NAME): return Qt.DescendingOrder if role == Qt.TextAlignmentRole: @@ -705,7 +706,7 @@ def show_remote_results(self): class PopularTorrentsModel(ChannelContentModel): - columns_shown = (Column.CATEGORY, Column.NAME, Column.SIZE, Column.UPDATED) + columns_shown = (Column.CATEGORY, Column.NAME, Column.SIZE, Column.CREATED) def __init__(self, *args, **kwargs): kwargs["endpoint_url"] = 'channels/popular_torrents' @@ -713,7 +714,7 @@ def __init__(self, *args, **kwargs): class DiscoveredChannelsModel(ChannelContentModel): - columns_shown = (Column.SUBSCRIBED, Column.NAME, Column.STATE, Column.TORRENTS, Column.VOTES, Column.UPDATED) + columns_shown = (Column.SUBSCRIBED, Column.NAME, Column.STATE, Column.TORRENTS, Column.VOTES, Column.CREATED) @property def default_sort_column(self): @@ -732,7 +733,7 @@ class PersonalChannelsModel(ChannelContentModel): Column.NAME, Column.SIZE, Column.HEALTH, - Column.UPDATED, + Column.CREATED, Column.STATUS, ) @@ -791,7 +792,7 @@ def edit_enabled(self): class SimplifiedPersonalChannelsModel(PersonalChannelsModel): - columns_shown = (Column.ACTIONS, Column.CATEGORY, Column.NAME, Column.SIZE, Column.HEALTH, Column.UPDATED) + columns_shown = (Column.ACTIONS, Column.CATEGORY, Column.NAME, Column.SIZE, Column.HEALTH, Column.CREATED) def __init__(self, *args, **kwargs): kwargs["exclude_deleted"] = kwargs.get("exclude_deleted", True) diff --git a/src/tribler/gui/widgets/triblertablecontrollers.py b/src/tribler/gui/widgets/triblertablecontrollers.py index 41219a18316..475c4ec1604 100644 --- a/src/tribler/gui/widgets/triblertablecontrollers.py +++ b/src/tribler/gui/widgets/triblertablecontrollers.py @@ -241,7 +241,7 @@ def _show_context_menu(self, pos): def on_add_to_channel(_): def on_confirm_clicked(channel_id): request_manager.post(f"collections/mychannel/{channel_id}/copy", - on_finish=lambda _: self.table_view.window().tray_show_message( + on_success=lambda _: self.table_view.window().tray_show_message( tr("Channel update"), tr("Torrent(s) added to your channel") ), data=json.dumps(entries))