id
int64
0
458k
file_name
stringlengths
4
119
file_path
stringlengths
14
227
content
stringlengths
24
9.96M
size
int64
24
9.96M
language
stringclasses
1 value
extension
stringclasses
14 values
total_lines
int64
1
219k
avg_line_length
float64
2.52
4.63M
max_line_length
int64
5
9.91M
alphanum_fraction
float64
0
1
repo_name
stringlengths
7
101
repo_stars
int64
100
139k
repo_forks
int64
0
26.4k
repo_open_issues
int64
0
2.27k
repo_license
stringclasses
12 values
repo_extraction_date
stringclasses
433 values
22,900
test_rest_endpoint.py
Tribler_tribler/src/tribler/test_unit/core/restapi/test_rest_endpoint.py
from ipv8.test.base import TestBase from tribler.core.restapi.rest_endpoint import ( HTTP_INTERNAL_SERVER_ERROR, RESTEndpoint, RESTResponse, RootEndpoint, return_handled_exception, ) from tribler.core.restapi.rest_manager import error_middleware from tribler.test_unit.base_restapi import response_to_bytes, response_to_json class TestRESTEndpoint(TestBase): """ Tests for the RESTEndpoint related functionality. """ def test_initialize_app(self) -> None: """ Test if base RESTEndpoints are initialized with an app. """ middlewares = (error_middleware, ) endpoint = RESTEndpoint(middlewares) self.assertEqual(list(middlewares), endpoint.app.middlewares) def test_root_attach_endpoint(self) -> None: """ Test if RootEndpoints can attach other endpoints. """ middlewares = (error_middleware, ) endpoint = RESTEndpoint(middlewares) endpoint.path = "" root = RootEndpoint(middlewares) root.add_endpoint("/test", endpoint) root.app.freeze() router_info = next(iter(root.app.router.resources())).get_info() self.assertEqual(list(middlewares), root.app.middlewares) self.assertEqual(endpoint, root.endpoints["/test"]) self.assertEqual(endpoint.app, router_info["app"]) # Route to endpoint's sub-app, not the main app self.assertEqual("/test", router_info["prefix"]) async def test_response_convert_dict_to_json(self) -> None: """ Test if base RESTResponse automatically switch to json when fed a dict. """ response = RESTResponse({"a": 1}) body = await response_to_json(response) self.assertEqual(1, body["a"]) self.assertEqual("application/json", response.content_type) async def test_response_convert_list_to_json(self) -> None: """ Test if base RESTResponse automatically switch to json when fed a list. """ response = RESTResponse(["a", "b"]) body = await response_to_json(response) self.assertEqual(["a", "b"], body) self.assertEqual("application/json", response.content_type) async def test_response_no_convert_other(self) -> None: """ Test if base RESTResponse automatically switch to json when fed a list. """ response = RESTResponse(b"PNG\x01\x02", content_type="image/png") body = await response_to_bytes(response) self.assertEqual(b"PNG\x01\x02", body) self.assertEqual("image/png", response.content_type) async def test_return_handled_exception(self) -> None: """ Test if a standard response can be constructed from an exception. """ response = return_handled_exception(ValueError("test message")) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertTrue(response_body_json["error"]["handled"]) self.assertEqual("ValueError", response_body_json["error"]["code"]) self.assertEqual("test message", response_body_json["error"]["message"])
3,192
Python
.py
70
37.814286
107
0.668708
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,901
test_ipv8_endpoint.py
Tribler_tribler/src/tribler/test_unit/core/restapi/test_ipv8_endpoint.py
from ipv8.test.base import TestBase from tribler.core.restapi.ipv8_endpoint import IPv8RootEndpoint class TestIPv8RootEndpoint(TestBase): """ Tests for the IPv8RootEndpoint class. """ def test_binding(self) -> None: """ Test if all the IPv8 endpoint paths exist. """ endpoint = IPv8RootEndpoint() endpoint.setup_routes() self.assertEqual("/api/ipv8", endpoint.path) self.assertIn("/asyncio", endpoint.endpoints) self.assertIn("/attestation", endpoint.endpoints) self.assertIn("/dht", endpoint.endpoints) self.assertIn("/identity", endpoint.endpoints) self.assertIn("/isolation", endpoint.endpoints) self.assertIn("/network", endpoint.endpoints) self.assertIn("/noblockdht", endpoint.endpoints) self.assertIn("/overlays", endpoint.endpoints) self.assertIn("/tunnel", endpoint.endpoints)
927
Python
.py
22
34.727273
63
0.682222
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,902
test_shutdown_endpoint.py
Tribler_tribler/src/tribler/test_unit/core/restapi/test_shutdown_endpoint.py
from ipv8.test.base import TestBase from tribler.core.restapi.shutdown_endpoint import ShutdownEndpoint from tribler.test_unit.base_restapi import MockRequest, response_to_json class ShutdownRequest(MockRequest): """ A MockRequest that mimics ShutdownRequests. """ def __init__(self) -> None: """ Create a new ShutdownRequest. """ super().__init__({}, "PUT", "/shutdown") class TestShutdownEndpoint(TestBase): """ Tests for the ShutdownEndpoint class. """ async def test_shutdown(self) -> None: """ Test if a call to the shutdown endpoint calls the shutdown callback. """ value = [2, 1] endpoint = ShutdownEndpoint(value.reverse) response = endpoint.shutdown_request(ShutdownRequest()) response_body_json = await response_to_json(response) self.assertTrue(response_body_json["shutdown"]) self.assertEqual([1, 2], value)
965
Python
.py
26
30.538462
76
0.667742
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,903
test_settings_endpoint.py
Tribler_tribler/src/tribler/test_unit/core/restapi/test_settings_endpoint.py
from unittest.mock import Mock, call from ipv8.test.base import TestBase from tribler.core.restapi.settings_endpoint import SettingsEndpoint from tribler.test_unit.base_restapi import MockRequest, response_to_json from tribler.tribler_config import TriblerConfigManager class GetSettingsRequest(MockRequest): """ A MockRequest that mimics GetSettingsRequests. """ def __init__(self) -> None: """ Create a new GetSettingsRequest. """ super().__init__({}, "GET", "/settings") class UpdateSettingsRequest(MockRequest): """ A MockRequest that mimics UpdateSettingsRequests. """ def __init__(self, raw_json_content: bytes) -> None: """ Create a new UpdateSettingsRequest. """ super().__init__({}, "POST", "/settings") self.raw_json_content = raw_json_content async def read(self) -> bytes: """ Get the json contents of this request. """ return self.raw_json_content class MockTriblerConfigManager(TriblerConfigManager): """ A memory-based TriblerConfigManager. """ def write(self) -> None: """ Don't actually write to any file. """ class TestSettingsEndpoint(TestBase): """ Tests for the SettingsEndpoint class. """ async def test_get_settings(self) -> None: """ Test if settings can be retrieved. """ config = MockTriblerConfigManager() config.set("api/http_port", 1337) endpoint = SettingsEndpoint(config) response = await endpoint.get_settings(GetSettingsRequest()) response_body_json = await response_to_json(response) self.assertEqual(1337, response_body_json["settings"]["api"]["http_port"]) async def test_update_settings(self) -> None: """ Test if settings can be changed. """ config = MockTriblerConfigManager() endpoint = SettingsEndpoint(config) request = UpdateSettingsRequest(b'{"api":{"http_port":1337}}') response = await endpoint.update_settings(request) response_body_json = await response_to_json(response) self.assertTrue(response_body_json["modified"]) self.assertEqual(1337, config.get("api/http_port")) async def test_update_settings_download_manager(self) -> None: """ Test if a registered download manager is notified of a settings update. """ config = MockTriblerConfigManager() endpoint = SettingsEndpoint(config) endpoint.download_manager = Mock(update_max_rates_from_config=Mock(return_value=None)) request = UpdateSettingsRequest(b'{"api":{"http_port":1337}}') response = await endpoint.update_settings(request) response_body_json = await response_to_json(response) self.assertTrue(response_body_json["modified"]) self.assertEqual(1337, config.get("api/http_port")) self.assertEqual(call(), endpoint.download_manager.update_max_rates_from_config.call_args)
3,056
Python
.py
75
33.413333
98
0.664638
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,904
test_events_endpoint.py
Tribler_tribler/src/tribler/test_unit/core/restapi/test_events_endpoint.py
from asyncio import Future, ensure_future, sleep from aiohttp.abc import AbstractStreamWriter from ipv8.test.base import TestBase from multidict import CIMultiDict from tribler.core.notifier import Notification, Notifier from tribler.core.restapi.events_endpoint import EventsEndpoint from tribler.test_unit.base_restapi import MockRequest class GetEventsRequest(MockRequest): """ A MockRequest that mimics GetEventsRequests. """ def __init__(self, endpoint: EventsEndpoint, count: int = 1) -> None: """ Create a new GetEventsRequest. """ self.payload_writer = MockStreamWriter(endpoint, count=count) self._handler_waiter = Future() super().__init__({}, "GET", "/api/events", payload_writer=self.payload_writer) def shutdown(self) -> None: """ Mimic a shutdown. """ self._handler_waiter.cancel() def finish_handler(self) -> None: """ Mimic finishing a handler. """ self._handler_waiter.set_result(None) class MockStreamWriter(AbstractStreamWriter): """ A streamwriter that closes the endpoint after a given number of writes. """ def __init__(self, endpoint: EventsEndpoint, count: int = 1) -> None: """ Create a new MockStreamWriter. """ super().__init__() self.endpoint = endpoint self.count = count self.captured = [] async def write(self, chunk: bytes) -> None: """ Write a chunk and check if we should shut down the endpoint. """ self.captured.append(chunk) self.count -= 1 if self.count == 0: self.endpoint.shutdown_event.set() async def write_eof(self, chunk: bytes = b"") -> None: """ Ignore EOFs. """ async def drain(self) -> None: """ Ignore drains. """ def enable_compression(self, encoding: str = "deflate") -> None: """ Ignore compression. """ def enable_chunking(self) -> None: """ Ignore chunking settings. """ async def write_headers(self, status_line: str, headers: CIMultiDict[str]) -> None: """ Ignore headers. """ class TestEventsEndpoint(TestBase): """ Tests for the EventsEndpoint class. """ def setUp(self) -> None: """ Create an inert EventsEndpoint. """ super().setUp() self.notifier = Notifier() self.endpoint = EventsEndpoint(self.notifier) async def tearDown(self) -> None: """ Shut down the endpoint. """ await self.endpoint.shutdown_task_manager() await super().tearDown() async def test_establish_connection(self) -> None: """ Test if opening an events connection leads to an event start. """ request = GetEventsRequest(self.endpoint, count=1) response = await self.endpoint.get_events(request) self.assertEqual(200, response.status) self.assertEqual((b'event: events_start\n' b'data: {"public_key": "", "version": "git"}' b'\n\n'), request.payload_writer.captured[0]) async def test_establish_connection_with_error(self) -> None: """ Test if opening an events connection with an error leads to an event start followed by the error. """ self.endpoint.undelivered_error = ValueError("test message") request = GetEventsRequest(self.endpoint, count=2) response = await self.endpoint.get_events(request) self.assertEqual(200, response.status) self.assertEqual((b'event: tribler_exception\n' b'data: {"error": "test message", "traceback": "ValueError: test message\\n"}' b'\n\n'), request.payload_writer.captured[1]) async def test_forward_error(self) -> None: """ Test if errors are forwarded over the stream. """ request = GetEventsRequest(self.endpoint, count=2) response_future = ensure_future(self.endpoint.get_events(request)) await sleep(0) self.endpoint.on_tribler_exception(ValueError("test message")) response = await response_future self.assertEqual(200, response.status) self.assertIsNone(self.endpoint.undelivered_error) self.assertEqual((b'event: tribler_exception\n' b'data: {"error": "test message", "traceback": "ValueError: test message\\n"}' b'\n\n'), request.payload_writer.captured[1]) async def test_error_before_connection(self) -> None: """ Test if errors are stored as undelivered errors when no connection is available. """ exception = ValueError("test message") self.endpoint.on_tribler_exception(exception) self.assertEqual(exception, self.endpoint.undelivered_error) async def test_send_event(self) -> None: """ Test if event dicts can be sent. """ request = GetEventsRequest(self.endpoint, count=2) response_future = ensure_future(self.endpoint.get_events(request)) await sleep(0) self.endpoint.send_event({"topic": "message", "kwargs": {"key": "value"}}) response = await response_future self.assertEqual(200, response.status) self.assertIsNone(self.endpoint.undelivered_error) self.assertEqual((b'event: message\n' b'data: {"key": "value"}' b'\n\n'), request.payload_writer.captured[1]) async def test_send_event_illegal_chars(self) -> None: """ Test if event dicts error out if they contain illegal characters. """ request = GetEventsRequest(self.endpoint, count=2) response_future = ensure_future(self.endpoint.get_events(request)) await sleep(0) self.endpoint.send_event({"topic": "message", "kwargs": {"something": b"\x80"}}) response = await response_future self.assertEqual(200, response.status) self.assertIsNone(self.endpoint.undelivered_error) self.assertTrue(request.payload_writer.captured[1].startswith( b'event: tribler_exception\n' b'data: {"error": "Object of type bytes is not JSON serializable", "traceback": "' )) async def test_forward_notification(self) -> None: """ Test if notifications are forwarded. """ request = GetEventsRequest(self.endpoint, count=2) response_future = ensure_future(self.endpoint.get_events(request)) await sleep(0) self.endpoint.on_notification(Notification.tribler_new_version, version="super cool version") response = await response_future self.assertEqual(200, response.status) self.assertIsNone(self.endpoint.undelivered_error) self.assertEqual((b'event: tribler_new_version\n' b'data: {"version": "super cool version"}' b'\n\n'), request.payload_writer.captured[1]) async def test_no_forward_illegal_notification(self) -> None: """ Test if notifications are only forwarded if they are in the topics_to_send_to_gui whitelist. """ request = GetEventsRequest(self.endpoint, count=2) response_future = ensure_future(self.endpoint.get_events(request)) await sleep(0) self.endpoint.on_notification(Notification.peer_disconnected, peer_id=b"\x00\x01") # Should be dropped self.endpoint.on_notification(Notification.tribler_new_version, version="super cool version") # Delivered response = await response_future self.assertEqual(200, response.status) self.assertIsNone(self.endpoint.undelivered_error) self.assertEqual((b'event: tribler_new_version\n' b'data: {"version": "super cool version"}' b'\n\n'), request.payload_writer.captured[1]) async def test_shutdown_parent_before_event(self) -> None: """ Test if a parent shutdown does not cause errors after handling a child. """ request = GetEventsRequest(self.endpoint, count=3) # Blocks until shutdown response_future = ensure_future(self.endpoint.get_events(request)) request.shutdown() # 1. The parent protocol is shut down self.endpoint.shutdown_event.set() # 2. Tribler signals shutdown to the events endpoint response = await response_future request.finish_handler() # 3. aiohttp behavior: finish the request handling self.assertEqual(200, response.status)
8,764
Python
.py
196
35.331633
114
0.629621
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,905
test_manager.py
Tribler_tribler/src/tribler/test_unit/core/versioning/test_manager.py
from importlib.metadata import PackageNotFoundError from unittest.mock import AsyncMock, Mock, patch from ipv8.taskmanager import TaskManager from ipv8.test.base import TestBase import tribler from tribler.core.versioning.manager import VersioningManager from tribler.tribler_config import TriblerConfigManager from tribler.upgrade_script import FROM, TO class MockTriblerConfigManager(TriblerConfigManager): """ A memory-based TriblerConfigManager. """ def write(self) -> None: """ Don't actually write to any file. """ class TestVersioningManager(TestBase): """ Tests for the Notifier class. """ def setUp(self) -> None: """ Create a new versioning manager. """ super().setUp() self.task_manager = TaskManager() self.manager = VersioningManager(self.task_manager, MockTriblerConfigManager()) async def tearDown(self) -> None: """ Shut down our task manager. """ await self.task_manager.shutdown_task_manager() await super().tearDown() def test_get_current_version(self) -> None: """ Check if a normal version can be correctly returned. """ with patch.dict(tribler.core.versioning.manager.__dict__, {"version": lambda _: "1.2.3"}): self.assertEqual("1.2.3", self.manager.get_current_version()) def test_get_current_version_not_found(self) -> None: """ Check if a value of None is returned as the version, when it cannot be found. """ with patch.dict(tribler.core.versioning.manager.__dict__, {"version": Mock(side_effect=PackageNotFoundError)}): self.assertIsNone(self.manager.get_current_version()) def test_get_versions(self) -> None: """ Check if we can find all three versions in our test directory. """ with patch("os.listdir", lambda _: ["1.2.3", "1.3.0", "1.2.4"]), patch("os.path.isdir", lambda _: True): self.assertEqual({"1.2.3", "1.2.4", "1.3.0"}, set(self.manager.get_versions())) def test_get_versions_empty(self) -> None: """ Check if an empty list is returned if no versions exist. """ with patch("os.listdir", lambda _: []): self.assertEqual(set(), set(self.manager.get_versions())) async def test_check_version_no_version(self) -> None: """ Check if the bleeding edge source does not think it needs to be updated. """ self.assertIsNone(await self.manager.check_version()) async def test_check_version_no_responses(self) -> None: """ Check if None is returned when no responses are received. """ self.manager.get_current_version = Mock(return_value="1.0.0") with patch.dict(tribler.core.versioning.manager.__dict__, {"ClientSession": Mock(side_effect=RuntimeError)}): self.assertIsNone(await self.manager.check_version()) async def test_check_version_latest(self) -> None: """ Check if None is returned when we are already at the latest version. """ self.manager.get_current_version = Mock(return_value="1.0.0") with patch.dict(tribler.core.versioning.manager.__dict__, {"ClientSession": Mock(return_value=Mock( __aexit__=AsyncMock(), __aenter__=AsyncMock(return_value=AsyncMock( get=AsyncMock(return_value=Mock(json=AsyncMock(return_value={"name": "1.0.0"}))) ))))}): self.assertIsNone(await self.manager.check_version()) async def test_check_version_latest_old(self) -> None: """ Check if None is returned when we are already at the latest version, in old format. """ self.manager.get_current_version = Mock(return_value="1.0.0") with patch.dict(tribler.core.versioning.manager.__dict__, {"ClientSession": Mock(return_value=Mock( __aexit__=AsyncMock(), __aenter__=AsyncMock(return_value=AsyncMock( get=AsyncMock(return_value=Mock(json=AsyncMock(return_value={"name": "v1.0.0"}))) ))))}): self.assertIsNone(await self.manager.check_version()) async def test_check_version_newer(self) -> None: """ Check if a newer version is returned when available. """ self.manager.get_current_version = Mock(return_value="1.0.0") with patch.dict(tribler.core.versioning.manager.__dict__, {"ClientSession": Mock(return_value=Mock( __aexit__=AsyncMock(), __aenter__=AsyncMock(return_value=AsyncMock( get=AsyncMock(return_value=Mock(json=AsyncMock(return_value={"name": "1.0.1"}))) ))))}): self.assertEqual("1.0.1", await self.manager.check_version()) async def test_check_version_newer_retry(self) -> None: """ Check if a newer version is returned when available from the backup url. """ self.manager.get_current_version = Mock(return_value="1.0.0") with patch.dict(tribler.core.versioning.manager.__dict__, {"ClientSession": Mock(side_effect=[ RuntimeError, Mock( __aexit__=AsyncMock(), __aenter__=AsyncMock(return_value=AsyncMock( get=AsyncMock(return_value=Mock(json=AsyncMock(return_value={"name": "1.0.1"}))) )))])}): self.assertEqual("1.0.1", await self.manager.check_version()) def test_can_upgrade_upgraded(self) -> None: """ Check if we cannot upgrade an already upgraded version. """ with patch("os.path.isfile", lambda _: True): self.assertFalse(self.manager.can_upgrade()) def test_can_upgrade_unsupported(self) -> None: """ Check if we cannot upgrade from an unsupported version. """ self.manager.get_versions = Mock(return_value=["0.0.0"]) with patch("os.path.isfile", lambda _: False): self.assertFalse(self.manager.can_upgrade()) def test_can_upgrade_to_unsupported(self) -> None: """ Check if we cannot upgrade to an unsupported version. """ self.manager.get_versions = Mock(return_value=[FROM]) self.manager.get_current_version = Mock(return_value="0.0.0") with patch("os.path.isfile", lambda _: False): self.assertFalse(self.manager.can_upgrade()) def test_can_upgrade_to_current(self) -> None: """ Check if we can upgrade to the currently supported version. """ self.manager.get_versions = Mock(return_value=[FROM]) self.manager.get_current_version = Mock(return_value=TO) with patch("os.path.isfile", lambda _: False): self.assertEqual(FROM, self.manager.can_upgrade()) def test_can_upgrade_to_git(self) -> None: """ Check if we can upgrade to the git version. """ self.manager.get_versions = Mock(return_value=[FROM]) self.manager.get_current_version = Mock(return_value=None) with patch("os.path.isfile", lambda _: False): self.assertEqual(FROM, self.manager.can_upgrade())
7,279
Python
.py
152
38.467105
119
0.622128
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,906
test_versioning_endpoint.py
Tribler_tribler/src/tribler/test_unit/core/versioning/restapi/test_versioning_endpoint.py
from __future__ import annotations from typing import TYPE_CHECKING from unittest.mock import AsyncMock, Mock, call from ipv8.test.base import TestBase from tribler.core.restapi.rest_endpoint import HTTP_BAD_REQUEST from tribler.core.versioning.restapi.versioning_endpoint import VersioningEndpoint from tribler.test_unit.base_restapi import MockRequest, response_to_json from tribler.tribler_config import VERSION_SUBDIR if TYPE_CHECKING: from tribler.core.versioning.manager import VersioningManager class GenericRequest(MockRequest): """ A MockRequest that mimics generic GET requests for the versioning endpoint. """ def __init__(self, vman: VersioningManager, route: str) -> None: """ Create a new request. """ super().__init__({}, "GET", f"/versioning/{route}") self.context = (vman,) class PerformUpgradeRequest(MockRequest): """ A MockRequest that mimics PerformUpgrade requests for the versioning endpoint. """ def __init__(self, vman: VersioningManager) -> None: """ Create a new request. """ super().__init__({}, "POST", "/versioning/upgrade") self.context = (vman,) class RemoveVersionRequest(MockRequest): """ A MockRequest that mimics RemoveVersion requests for the versioning endpoint. """ def __init__(self, vman: VersioningManager, version: str) -> None: """ Create a new request. """ super().__init__({}, "DELETE", f"/versioning/versions/{version}") self.context = (vman,) self.version_str = version @property def match_info(self) -> dict[str, str]: """ Return our version info. """ return {"version": self.version_str} class TestVersioningEndpoint(TestBase): """ Tests for the VersioningEndpoint class. """ def setUp(self) -> None: """ Create a new VersioningEndpoint. """ super().setUp() self.vman = Mock() self.rest_ep = VersioningEndpoint() self.rest_ep.versioning_manager = self.vman async def test_current_version(self) -> None: """ Check if the current version is correctly returned. """ self.vman.get_current_version = Mock(return_value="1.2.3") response = await self.rest_ep.get_current_version(GenericRequest(self.vman, "versions/current")) response_body_json = await response_to_json(response) self.assertEqual("1.2.3", response_body_json["version"]) async def test_versions(self) -> None: """ Check if the known versions are correctly returned. """ self.vman.get_versions = Mock(return_value=["1.2.3", "4.5.6"]) response = await self.rest_ep.get_versions(GenericRequest(self.vman, "versions")) response_body_json = await response_to_json(response) self.assertEqual({"1.2.3", "4.5.6"}, set(response_body_json["versions"])) self.assertEqual(VERSION_SUBDIR, response_body_json["current"]) async def test_check_version_available(self) -> None: """ Check if the checked version is correctly returned when a version is available. """ self.vman.check_version = AsyncMock(return_value="1.2.3") response = await self.rest_ep.check_version(GenericRequest(self.vman, "versions/check")) response_body_json = await response_to_json(response) self.assertTrue(response_body_json["has_version"]) self.assertEqual("1.2.3", response_body_json["new_version"]) async def test_check_version_unavailable(self) -> None: """ Check if the checked version is correctly returned when a version is not available. """ self.vman.check_version = AsyncMock(return_value=None) response = await self.rest_ep.check_version(GenericRequest(self.vman, "versions/check")) response_body_json = await response_to_json(response) self.assertFalse(response_body_json["has_version"]) self.assertEqual("", response_body_json["new_version"]) async def test_can_upgrade_no(self) -> None: """ Check if the inability to upgrade is correctly returned. """ self.vman.can_upgrade = Mock(return_value=False) response = await self.rest_ep.can_upgrade(GenericRequest(self.vman, "upgrade/available")) response_body_json = await response_to_json(response) self.assertFalse(response_body_json["can_upgrade"]) async def test_can_upgrade(self) -> None: """ Check if the ability to upgrade is correctly returned. """ self.vman.can_upgrade = Mock(return_value="1.2.3") response = await self.rest_ep.can_upgrade(GenericRequest(self.vman, "upgrade/available")) response_body_json = await response_to_json(response) self.assertEqual("1.2.3", response_body_json["can_upgrade"]) async def test_is_upgrading(self) -> None: """ Check if the upgrading status is correctly returned. """ self.vman.task_manager.get_task = Mock(return_value=True) response = await self.rest_ep.is_upgrading(GenericRequest(self.vman, "upgrade/working")) response_body_json = await response_to_json(response) self.assertTrue(response_body_json["running"]) async def test_is_upgrading_no(self) -> None: """ Check if the non-upgrading status is correctly returned. """ self.vman.task_manager.get_task = Mock(return_value=None) response = await self.rest_ep.is_upgrading(GenericRequest(self.vman, "upgrade/working")) response_body_json = await response_to_json(response) self.assertFalse(response_body_json["running"]) async def test_perform_upgrade(self) -> None: """ Check if a request to perform an upgrade launches an upgrade task. """ response = await self.rest_ep.perform_upgrade(PerformUpgradeRequest(self.vman)) response_body_json = await response_to_json(response) self.assertTrue(response_body_json["success"]) self.assertEqual(call(), self.vman.perform_upgrade.call_args) async def test_remove_version_illegal(self) -> None: """ Check if a request without a version returns a BAD REQUEST status. """ response = await self.rest_ep.remove_version(RemoveVersionRequest(self.vman, "")) self.assertEqual(HTTP_BAD_REQUEST, response.status) async def test_remove_version(self) -> None: """ Check if a request to remove a given version is forwarded. """ response = await self.rest_ep.remove_version(RemoveVersionRequest(self.vman, "1.2.3")) response_body_json = await response_to_json(response) self.assertTrue(response_body_json["success"]) self.assertEqual(call("1.2.3"), self.vman.remove_version.call_args)
6,954
Python
.py
148
39.182432
104
0.664743
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,907
test_discovery.py
Tribler_tribler/src/tribler/test_unit/core/tunnel/test_discovery.py
from __future__ import annotations from typing import TYPE_CHECKING from unittest.mock import Mock from ipv8.community import Community, CommunitySettings from ipv8.messaging.anonymization.tunnel import PEER_FLAG_EXIT_BT from ipv8.test.base import TestBase from tribler.core.tunnel.discovery import GoldenRatioStrategy if TYPE_CHECKING: from ipv8.peer import Peer class MockTriblerTunnelCommunity(Community): """ A mocked TriblerTunnelCommunity. """ settings_class = CommunitySettings community_id = b"\x00" * 20 def __init__(self, settings: CommunitySettings) -> None: """ Create a new MockTriblerTunnelCommunity. """ super().__init__(settings) self.exit_candidates = [] self.candidates = {} self.send_introduction_request = Mock() def get_candidates(self, flag: int) -> list: """ Get exit candidates. """ return self.exit_candidates if flag == PEER_FLAG_EXIT_BT else [] def get_peers(self) -> set[Peer]: """ Get all known peers. """ return self.network.verified_peers class TestGoldenRatioStrategy(TestBase[MockTriblerTunnelCommunity]): """ Tests for the GoldenRatioStrategy class. """ def setUp(self) -> None: """ Create two peers. """ self.initialize(MockTriblerTunnelCommunity, 3) self.overlay(0).exit_candidates.append(self.peer(2)) def test_invariant(self) -> None: """ If we are not at our target peer count, don't do anything. """ strategy = GoldenRatioStrategy(self.overlay(0), 0.0, 3) strategy.take_step() strategy.golden_ratio = 1.0 strategy.take_step() self.assertEqual(2, len(self.network(0).verified_peers)) self.assertIn(self.peer(1), self.network(0).verified_peers) self.assertIn(self.peer(2), self.network(0).verified_peers) def test_remove_normal(self) -> None: """ If we have a normal node and an exit node, check if enforcing a ratio of 0.0 removes the normal node. """ strategy = GoldenRatioStrategy(self.overlay(0), 0.0, 1) strategy.take_step() # The normal peer should be removed self.assertEqual(1, len(self.network(0).verified_peers)) self.assertIn(self.peer(2), self.network(0).verified_peers) def test_remove_exit(self) -> None: """ If we have a normal node and an exit node, check if enforcing a ratio of 1.0 removes the exit node. """ strategy = GoldenRatioStrategy(self.overlay(0), 1.0, 1) strategy.take_step() # The normal peer should be removed self.assertEqual(1, len(self.network(0).verified_peers)) self.assertIn(self.peer(1), self.network(0).verified_peers) def test_send_introduction_request(self) -> None: """ If a node has sent us its peer_flag, check if an introduction_request is sent. """ self.overlay(0).candidates[self.peer(2)] = [] strategy = GoldenRatioStrategy(self.overlay(0), 1.0, 1) strategy.take_step() self.overlay(0).send_introduction_request.assert_called_once_with(self.peer(1))
3,249
Python
.py
80
33.1875
109
0.654909
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,908
test_community.py
Tribler_tribler/src/tribler/test_unit/core/tunnel/test_community.py
from __future__ import annotations from asyncio import TimeoutError as AsyncTimeoutError from asyncio import gather, sleep, wait_for from collections import defaultdict from io import BytesIO from typing import TYPE_CHECKING from unittest.mock import AsyncMock, Mock, call, patch from ipv8.keyvault.public.libnaclkey import LibNaCLPK from ipv8.messaging.anonymization.tunnel import ( CIRCUIT_STATE_CLOSING, CIRCUIT_TYPE_IP_SEEDER, CIRCUIT_TYPE_RP_DOWNLOADER, CIRCUIT_TYPE_RP_SEEDER, PEER_FLAG_EXIT_BT, Circuit, ) from ipv8.messaging.serialization import ADDRESS_TYPE_IPV4 from ipv8.peer import Peer from ipv8.test.base import TestBase from ipv8.test.messaging.anonymization import mock as dht_mocks from ipv8.test.messaging.anonymization.mock import MockDHTProvider from ipv8.test.mocking.endpoint import AutoMockEndpoint from ipv8.test.mocking.exit_socket import MockTunnelExitSocket from ipv8.test.mocking.ipv8 import MockIPv8 from ipv8.util import succeed import tribler from tribler.core.libtorrent.download_manager.download_state import DownloadStatus from tribler.core.notifier import Notifier from tribler.core.tunnel.community import PEER_FLAG_EXIT_HTTP, TriblerTunnelCommunity, TriblerTunnelSettings if TYPE_CHECKING: from ipv8.community import CommunitySettings class TestTriblerTunnelCommunity(TestBase[TriblerTunnelCommunity]): """ Tests for the TriblerTunnelCommunity class. """ def setUp(self) -> None: """ Create a new TriblerTunnelCommunity. """ super().setUp() self.fake_cache_file = BytesIO() self.fake_cache_file.read = self.fake_cache_file.getvalue self.initialize(TriblerTunnelCommunity, 1) async def tearDown(self) -> None: """ Reset the global_dht_services variable. """ dht_mocks.global_dht_services = defaultdict(list) await super().tearDown() def create_node(self, settings: CommunitySettings | None = None, create_dht: bool = False, enable_statistics: bool = False) -> MockIPv8: """ Set up a fake dht provider. """ exit_cache = Mock(open=Mock(return_value=Mock(__enter__=Mock(return_value=self.fake_cache_file), __exit__=Mock())), write_bytes=self.fake_cache_file.write) config = TriblerTunnelSettings(remove_tunnel_delay=0, max_circuits=1, socks_servers=[], notifier=Notifier(), exitnode_cache=exit_cache, download_manager=None) mock_ipv8 = MockIPv8("curve25519", TriblerTunnelCommunity, config) config.dht_provider = MockDHTProvider(Peer(mock_ipv8.overlay.my_peer.key, mock_ipv8.overlay.my_estimated_wan)) mock_ipv8.overlay.cancel_all_pending_tasks() return mock_ipv8 async def create_intro(self, i: int, service: bytes) -> None: """ Create an 1 hop introduction point for some node for some service. """ self.overlay(i).join_swarm(service, 1, seeding=True) self.overlay(i).create_introduction_point(service) await self.deliver_messages() for node in self.nodes: exit_sockets = node.overlay.exit_sockets for circuit_id in exit_sockets: exit_sockets[circuit_id] = MockTunnelExitSocket(exit_sockets[circuit_id]) async def assign_exit_node(self, i: int) -> None: """ Give a node a dedicated exit node to play with. """ exit_node = self.create_node() self.nodes.append(exit_node) # So it could be properly removed on exit exit_node.overlay.settings.peer_flags = {PEER_FLAG_EXIT_BT} public_peer = Peer(exit_node.my_peer.public_key, exit_node.my_peer.address) self.network(i).add_verified_peer(public_peer) self.network(i).discover_services(public_peer, exit_node.overlay.community_id) self.overlay(i).candidates[public_peer] = exit_node.overlay.settings.peer_flags self.overlay(i).build_tunnels(1) await self.deliver_messages() exit_sockets = exit_node.overlay.exit_sockets for circuit_id in exit_sockets: exit_sockets[circuit_id] = MockTunnelExitSocket(exit_sockets[circuit_id]) async def test_backup_exitnodes(self) -> None: """ Check if exitnodes are serialized and deserialized to and from disk properly. """ # 1. Add and exit node exit_node = self.create_node() exit_node.overlay.settings.peer_flags.add(PEER_FLAG_EXIT_BT) self.add_node_to_experiment(exit_node) self.overlay(0).candidates[exit_node.my_peer] = exit_node.overlay.settings.peer_flags self.assertGreaterEqual(len(self.nodes[0].overlay.get_candidates(PEER_FLAG_EXIT_BT)), 1) # 2. Unload self.overlay(0).cache_exitnodes_to_disk() self.network(0).verified_peers = set() self.overlay(0).candidates.clear() # 3. Load again self.overlay(0).restore_exitnodes_from_disk() # 4. Check if exit node was contacted await self.deliver_messages(timeout=0.1) self.assertGreaterEqual(len(self.overlay(0).get_candidates(PEER_FLAG_EXIT_BT)), 1) def test_readd_bittorrent_peers(self) -> None: """ Test the readd bittorrent peers method. """ mock_torrent = Mock(add_peer=Mock(return_value=succeed(None)), tdef=Mock(get_infohash=Mock(return_value=b'a' * 20))) self.overlay(0).bittorrent_peers = {mock_torrent: [None]} self.overlay(0).readd_bittorrent_peers() self.assertNotIn(mock_torrent, self.overlay(0).bittorrent_peers) async def test_remove_non_existing_circuit(self) -> None: """ Test removing a non-existing circuit. """ await self.overlay(0).remove_circuit(3, remove_now=True) self.assertNotIn(3, self.overlay(0).circuits) async def test_remove_existing_circuit(self) -> None: """ Test removing an existing circuit. """ self.overlay(0).circuits[3] = Circuit(3) await self.overlay(0).remove_circuit(3, remove_now=True) self.assertNotIn(3, self.overlay(0).circuits) async def test_remove_existing_circuit_later(self) -> None: """ Test removing an existing circuit. """ circuit = Circuit(3) circuit.add_hop(Mock()) self.overlay(0).circuits[3] = circuit self.overlay(0).settings.remove_tunnel_delay = 5.0 _ = self.overlay(0).remove_circuit(3, remove_now=False) self.assertIn(3, self.overlay(0).circuits) self.assertEqual(CIRCUIT_STATE_CLOSING, circuit.state) def test_monitor_downloads_ignore_hidden(self) -> None: """ Test if hidden downloads get ignored by monitor_downloads. """ mock_state = Mock() mock_download = Mock(hidden=True, get_state=Mock(return_value=mock_state)) mock_state.get_download = lambda: mock_download self.overlay(0).monitor_downloads([mock_state]) self.assertFalse(self.overlay(0).download_states) async def test_monitor_downloads_stop_ip(self) -> None: """ Test if we stop building IPs when a download doesn't exist anymore. """ self.overlay(0).settings.download_manager = Mock(get_last_download_states=Mock(return_value=[]), get_downloads=Mock(return_value=[])) circuit = Circuit(0, 1, CIRCUIT_TYPE_IP_SEEDER, info_hash=b'a') remote_endpoint = AutoMockEndpoint() circuit.add_hop(Mock(address=remote_endpoint.get_address())) circuit.last_activity = 0 self.overlay(0).circuits[0] = circuit self.overlay(0).join_swarm(b'a', 1) self.overlay(0).download_states[b'a'] = 3 self.overlay(0).monitor_downloads([]) await gather(*self.overlay(0).get_anonymous_tasks("remove_circuit")) self.assertNotIn(0, self.overlay(0).circuits) def test_monitor_downloads_recreate_ip(self) -> None: """ Test if an old introduction point is recreated. """ mock_state = Mock(get_status=Mock(return_value=DownloadStatus.SEEDING)) mock_tdef = Mock(get_infohash=Mock(return_value=b'a')) mock_download = Mock(get_def=Mock(return_value=mock_tdef), add_peer=Mock(return_value=succeed(None)), get_state=Mock(return_value=mock_state), config=Mock(get_hops=Mock(return_value=1)), apply_ip_filter=Mock(return_value=None)) mock_state.get_download = Mock(return_value=mock_download) self.overlay(0).create_introduction_point = Mock() self.overlay(0).download_states[b'a'] = DownloadStatus.DOWNLOADING self.overlay(0).monitor_downloads([mock_state]) self.assertEqual(call(self.overlay(0).get_lookup_info_hash(b'a')), self.overlay(0).create_introduction_point.call_args) def test_monitor_downloads_leave_swarm(self) -> None: """ Test if we leave the swarm when a download is stopped. """ self.overlay(0).swarms[b'a'] = None self.overlay(0).download_states[b'a'] = 3 self.overlay(0).monitor_downloads([]) self.assertNotIn(b'a', self.overlay(0).swarms) async def test_monitor_downloads_intro(self) -> None: """ Test if rendezvous points are removed when a download is stopped. """ self.overlay(0).settings.download_manager = Mock(get_last_download_states=Mock(return_value=[]), get_downloads=Mock(return_value=[])) circuit = Circuit(0, 1, CIRCUIT_TYPE_RP_DOWNLOADER, info_hash=b"a") remote_endpoint = AutoMockEndpoint() circuit.add_hop(Mock(address=remote_endpoint.get_address())) circuit.last_activity = 0 self.overlay(0).circuits[0] = circuit self.overlay(0).join_swarm(b'a', 1) self.overlay(0).swarms[b'a'].add_connection(circuit, None) self.overlay(0).download_states[b'a'] = 3 self.overlay(0).monitor_downloads([]) await gather(*self.overlay(0).get_anonymous_tasks("remove_circuit")) self.assertNotIn(0, self.overlay(0).circuits) async def test_monitor_downloads_stop_all(self) -> None: """ Test if circuits are removed when all downloads are stopped. """ self.overlay(0).settings.download_manager = Mock(get_last_download_states=Mock(return_value=[]), get_downloads=Mock(return_value=[])) circuit = Circuit(0, 1, CIRCUIT_TYPE_RP_DOWNLOADER, info_hash=b"a") remote_endpoint = AutoMockEndpoint() circuit.add_hop(Mock(address=remote_endpoint.get_address())) self.overlay(0).circuits[0] = circuit self.overlay(0).join_swarm(b"a", 1) self.overlay(0).download_states[b"a"] = 3 self.overlay(0).monitor_downloads([]) await gather(*self.overlay(0).get_anonymous_tasks("remove_circuit")) self.assertNotIn(0, self.overlay(0).circuits) async def test_update_ip_filter(self) -> None: """ Test if the ip filter is updated properly. """ circuit = Circuit(123, 1, CIRCUIT_TYPE_RP_DOWNLOADER) remote_endpoint = AutoMockEndpoint() circuit.add_hop(Mock(address=remote_endpoint.get_address())) circuit.bytes_down = 0 circuit.last_activity = 0 self.overlay(0).circuits[circuit.circuit_id] = circuit download = Mock(handle=None, config=Mock(get_hops=Mock(return_value=1))) self.overlay(0).get_download = Mock(return_value=download) lt_session = Mock() self.overlay(0).settings.download_manager = Mock(get_session=Mock(return_value=lt_session), update_ip_filter=Mock(), get_downloads=Mock(return_value=[download])) self.overlay(0).update_ip_filter(0) self.overlay(0).settings.download_manager.update_ip_filter.assert_called_with(lt_session, []) circuit.ctype = CIRCUIT_TYPE_RP_SEEDER self.overlay(0).update_ip_filter(0) ips = [self.overlay(0).circuit_id_to_ip(circuit.circuit_id)] self.overlay(0).settings.download_manager.update_ip_filter.assert_called_with(lt_session, ips) def test_update_torrent(self) -> None: """ Test updating a torrent when a circuit breaks. """ self.overlay(0).find_circuits = Mock(return_value=True) self.overlay(0).readd_bittorrent_peers = Mock(return_value=None) download = Mock(handle=Mock(get_peer_info=Mock(return_value={Mock(ip=('2.2.2.2', 2)), Mock(ip=('3.3.3.3', 3))}), is_valid=Mock(return_value=True))) peers = {('1.1.1.1', 1), ('2.2.2.2', 2)} self.overlay(0).update_torrent(peers, download) self.assertIn(download, self.nodes[0].overlay.bittorrent_peers) # Test adding peers self.overlay(0).bittorrent_peers[download] = {('4.4.4.4', 4)} self.overlay(0).update_torrent(peers, download) async def test_circuit_reject_too_many(self) -> None: """ Test if a circuit is rejected by an exit node if it already joined the max number of circuits. """ self.add_node_to_experiment(self.create_node()) self.overlay(1).settings.peer_flags = {PEER_FLAG_EXIT_BT} self.overlay(1).settings.max_joined_circuits = 0 await self.introduce_nodes() self.overlay(0).build_tunnels(1) await self.deliver_messages() self.assertEqual(self.overlay(0).tunnels_ready(1), 0.0) async def test_perform_http_request(self) -> None: """ Test if we can make a http request through a circuit. """ remote_endpoint = AutoMockEndpoint() writer = Mock() reader = Mock(readline=AsyncMock(side_effect=[b"HTTP/1.1 200 OK\r\n", b"\r\n"]), read=AsyncMock(return_value=b"i11e\r\n")) open_connection = AsyncMock(return_value=(reader, writer)) self.add_node_to_experiment(self.create_node()) self.overlay(1).settings.peer_flags = {PEER_FLAG_EXIT_HTTP} await self.introduce_nodes() self.overlay(0).create_circuit(1, exit_flags=[PEER_FLAG_EXIT_HTTP]) await sleep(0) with patch.dict(tribler.core.tunnel.community.__dict__, {"open_connection": open_connection}): response = await self.overlay(0).perform_http_request(remote_endpoint.get_address(), b'GET /scrape?info_hash=0 HTTP/1.1\r\n\r\n') self.assertEqual(response.split(b'\r\n')[0], b'HTTP/1.1 200 OK') async def test_perform_http_request_not_allowed(self) -> None: """ Test if we can't make HTTP requests that don't have a bencoded response. """ remote_endpoint = AutoMockEndpoint() writer = Mock() reader = Mock(readline=AsyncMock(side_effect=[b"HTTP/1.1 200 OK\r\n", b"\r\n"]), read=AsyncMock(return_value=b"\r\n")) open_connection = AsyncMock(return_value=(reader, writer)) self.add_node_to_experiment(self.create_node()) self.overlay(1).settings.peer_flags = {PEER_FLAG_EXIT_HTTP} await self.introduce_nodes() self.overlay(0).create_circuit(1, exit_flags=[PEER_FLAG_EXIT_HTTP]) await sleep(0) await gather(*self.overlay(1).get_anonymous_tasks("on_packet_from_circuit")) with patch.dict(tribler.core.tunnel.community.__dict__, {"open_connection": open_connection}),\ self.assertRaises(AsyncTimeoutError): await wait_for(self.overlay(0).perform_http_request(remote_endpoint.get_address(), b'GET /scrape?info_hash=0 HTTP/1.1\r\n\r\n'), timeout=.05) async def test_perform_http_request_no_http_exits(self) -> None: """ Test if we can't make HTTP requests when we have no exits. """ remote_endpoint = AutoMockEndpoint() self.add_node_to_experiment(self.create_node()) self.overlay(1).settings.peer_flags = set() await self.introduce_nodes() with self.assertRaises(RuntimeError), self.overlay(0).request_cache.passthrough(): await self.overlay(0).perform_http_request(remote_endpoint.get_address(), b'GET /scrape?info_hash=0 HTTP/1.1\r\n\r\n') async def test_perform_http_request_failed(self) -> None: """ Test if a HTTP request raises a timeout error when the request times out. """ remote_endpoint = AutoMockEndpoint() self.add_node_to_experiment(self.create_node()) self.overlay(1).settings.peer_flags = {PEER_FLAG_EXIT_HTTP} await self.introduce_nodes() with self.assertRaises(AsyncTimeoutError): await wait_for(self.overlay(0).perform_http_request(remote_endpoint.get_address(), b'GET /scrape?info_hash=0 HTTP/1.1\r\n\r\n'), timeout=0) def test_cache_exitnodes_to_disk(self) -> None: """ Test if we can cache exit nodes to disk. """ self.overlay(0).candidates = {Peer(LibNaCLPK(b'\x00' * 64), ("0.1.2.3", 1029)): {PEER_FLAG_EXIT_BT}} self.overlay(0).cache_exitnodes_to_disk() self.assertEqual(bytes([ADDRESS_TYPE_IPV4]) + bytes(range(6)), self.fake_cache_file.read()) def test_cache_exitnodes_to_disk_os_error(self) -> None: """ Test if we can handle an OSError when caching exit nodes to disk and raise no errors. """ self.overlay(0).candidates = {Peer(LibNaCLPK(b'\x00' * 64), ("0.1.2.3", 1029)): {PEER_FLAG_EXIT_BT}} self.overlay(0).settings.exitnode_cache = Mock(write_bytes=Mock(side_effect=FileNotFoundError)) self.overlay(0).cache_exitnodes_to_disk() self.assertTrue(self.overlay(0).settings.exitnode_cache.write_bytes.called) async def test_should_join_circuit(self) -> None: """ Test if we can join a circuit. """ self.assertTrue(self.overlay(0).should_join_circuit(create_payload=Mock(), previous_node_address=Mock()))
18,630
Python
.py
350
42.745714
120
0.637313
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,909
test_dispatcher.py
Tribler_tribler/src/tribler/test_unit/core/tunnel/test_dispatcher.py
import asyncio from asyncio import Future from unittest.mock import Mock, call from ipv8.messaging.anonymization.tunnel import CIRCUIT_STATE_READY, Circuit from ipv8.test.base import TestBase from ipv8.util import succeed from tribler.core.tunnel.dispatcher import TunnelDispatcher class TestTunnelDispatcher(TestBase): """ Tests for the TunnelDispatcher class. """ def setUp(self) -> None: """ Create a mocked dispatcher. """ self.dispatcher = TunnelDispatcher(Mock()) async def tearDown(self) -> None: """ Destroy the dispatcher. """ await self.dispatcher.shutdown_task_manager() def test_on_tunnel_in_too_many_hops(self) -> None: """ Test if no data is sent to the SOCKS5 server over a circuit with too many hops. """ self.assertFalse(self.dispatcher.on_incoming_from_tunnel(self.dispatcher.tunnels, Circuit(123, 300), ("0.0.0.0", 1024), b'a')) def test_on_tunnel_in_no_sessions(self) -> None: """ Test if no data is sent to the SOCKS5 server when no sessions are available. """ self.dispatcher.set_socks_servers([Mock(sessions=[])]) self.assertFalse(self.dispatcher.on_incoming_from_tunnel(self.dispatcher.tunnels, Circuit(123, 1), ("0.0.0.0", 1024), b'a')) def test_on_tunnel_in_no_unmapped_connection(self) -> None: """ Test if no data is sent to the SOCKS5 server when no connections are available. """ self.dispatcher.set_socks_servers([Mock(sessions=[Mock(udp_connection=None)])]) self.assertFalse(self.dispatcher.on_incoming_from_tunnel(self.dispatcher.tunnels, Circuit(123, 1), ("0.0.0.0", 1024), b'a')) def test_on_tunnel_in_no_mapped_connection(self) -> None: """ Test if no data is sent to the SOCKS5 server when the mapped connection is not available. """ self.dispatcher.cid_to_con[b'a'] = Mock(udp_connection=None) self.dispatcher.set_socks_servers([Mock(sessions=[self.dispatcher.cid_to_con[b'a']])]) self.assertFalse(self.dispatcher.on_incoming_from_tunnel(self.dispatcher.tunnels, Circuit(123, 1), ("0.0.0.0", 1024), b'a')) def test_on_tunnel_in_available(self) -> None: """ Test if data is sent to the SOCKS5 server when the mapped connection is available. """ self.dispatcher.cid_to_con[b'a'] = Mock(udp_connection=Mock(send_datagram=Mock(return_value=True))) self.dispatcher.set_socks_servers([Mock(sessions=[self.dispatcher.cid_to_con[b'a']])]) self.assertTrue(self.dispatcher.on_incoming_from_tunnel(self.dispatcher.tunnels, Circuit(123, 1), ("0.0.0.0", 1024), b'a')) def test_on_socks_in_udp_no_circuit(self) -> None: """ Test if data cannot be dispatched without a circuit. """ mock_request = Mock(destination=("0.0.0.0", 1024), data=b'a') connection = Mock() self.dispatcher.set_socks_servers([connection.socksconnection.socksserver]) self.dispatcher.tunnels.circuits = {} self.assertFalse(self.dispatcher.on_socks5_udp_data(connection, mock_request)) def test_on_socks_in_udp_no_ready_circuit(self) -> None: """ Test if data cannot be dispatched with a circuit that is not ready. """ circuit = Circuit(3, 1) mock_request = Mock(destination=("0.0.0.0", 1024), data=b'a') connection = Mock() self.dispatcher.set_socks_servers([connection.socksconnection.socksserver]) self.dispatcher.tunnels.circuits = {circuit.circuit_id: circuit} self.assertFalse(self.dispatcher.on_socks5_udp_data(connection, mock_request)) def test_on_socks_in_udp_ready_circuit(self) -> None: """ Test if data is dispatched with a circuit that is ready. """ circuit = Circuit(3, 1) circuit.add_hop(Mock()) mock_request = Mock(destination=("0.0.0.0", 1024), data=b'a') connection = Mock() self.dispatcher.set_socks_servers([connection.socksconnection.socksserver]) self.dispatcher.tunnels.circuits = {circuit.circuit_id: circuit} self.assertEqual(CIRCUIT_STATE_READY, circuit.state) self.assertTrue(self.dispatcher.on_socks5_udp_data(connection, mock_request)) async def test_on_socks_in_tcp_no_success(self) -> None: """ Test if a TCP connect request is not dispatched to the TunnelCommunity when the request is not successful. """ tcp_connection = Mock() self.dispatcher.set_socks_servers([tcp_connection.socksserver]) self.dispatcher.tunnels.perform_http_request = Mock(return_value=succeed(None)) await self.dispatcher.on_socks5_tcp_data(tcp_connection, ("0.0.0.0", 1024), b'') self.assertIsNone(tcp_connection.transport.write.call_args) async def test_on_socks_in_tcp(self) -> None: """ Test if a TCP connect request is correctly dispatched to the TunnelCommunity. """ tcp_connection = Mock() self.dispatcher.set_socks_servers([tcp_connection.socksserver]) self.dispatcher.tunnels.perform_http_request = Mock(return_value=succeed(b'test')) await self.dispatcher.on_socks5_tcp_data(tcp_connection, ("0.0.0.0", 1024), b'') self.assertEqual(call(b"test"), tcp_connection.transport.write.call_args) async def test_on_socks5_tcp_data_with_transport_none(self) -> None: """ Test if connection without a transport does not lead to a successful dispatch. """ tcp_connection = Mock(transport=None) self.dispatcher.set_socks_servers([tcp_connection.socksserver]) self.dispatcher.tunnels.perform_http_request = Mock(return_value=succeed(b'test')) result = await self.dispatcher.on_socks5_tcp_data(tcp_connection, ("0.0.0.0", 1024), b'') self.assertFalse(result) def test_circuit_dead(self) -> None: """ Test if the correct peers are removed when a circuit breaks. """ connection = Mock() circuit = Circuit(3, 1) self.dispatcher.con_to_cir = {connection: {(i + 1): circuit for i in range(3)}} self.dispatcher.cid_to_con = {circuit.circuit_id: connection} res = self.dispatcher.circuit_dead(circuit) self.assertEqual(3, len(res)) self.assertEqual(0, len(self.dispatcher.con_to_cir[connection])) self.assertNotIn(circuit.circuit_id, self.dispatcher.cid_to_con) def test_check_connections(self) -> None: """ Test if closed connections are cleaned up properly. """ connection = Mock(udp_connection=None) circuit = Circuit(3, 1) self.dispatcher.con_to_cir = {connection: {(i + 1): circuit for i in range(3)}} self.dispatcher.cid_to_con = {circuit.circuit_id: connection, 2: Mock()} self.dispatcher.check_connections() self.assertNotIn(connection, self.dispatcher.con_to_cir) self.assertNotIn(circuit.circuit_id, self.dispatcher.cid_to_con) self.assertIn(2, self.dispatcher.cid_to_con) async def test_on_data_after_select(self) -> None: """ Test if data is forwarded after a circuit select. """ mock_connection = Mock(udp_connection=None) circuit = Circuit(3, 1) mock_request = Mock(destination=("0.0.0.0", 1024), data=b'a') self.dispatcher.set_socks_servers([mock_connection.socksserver]) self.dispatcher.tunnels.circuits = {} self.dispatcher.tunnels.create_circuit = Mock(return_value=circuit) self.dispatcher.on_socks5_udp_data = Mock(return_value=None) circuit.ready = Future() circuit.ready.set_result(True) self.dispatcher.select_circuit(mock_connection, mock_request) await asyncio.sleep(0) self.assertEqual(call(None, mock_request), self.dispatcher.on_socks5_udp_data.call_args) async def test_on_data_after_select_no_result(self) -> None: """ Test if no data is forwarded after a failed circuit select. """ mock_connection = Mock(udp_connection=None) circuit = Circuit(3, 1) mock_request = Mock(destination=("0.0.0.0", 1024), data=b'a') self.dispatcher.set_socks_servers([mock_connection.socksserver]) self.dispatcher.tunnels.circuits = {} self.dispatcher.tunnels.create_circuit = Mock(return_value=circuit) self.dispatcher.on_socks5_udp_data = Mock(return_value=None) circuit.ready = Future() circuit.ready.set_result(None) self.dispatcher.select_circuit(mock_connection, mock_request) await asyncio.sleep(0) self.assertIsNone(self.dispatcher.on_socks5_udp_data.call_args)
9,117
Python
.py
171
43.263158
114
0.642689
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,910
test_caches.py
Tribler_tribler/src/tribler/test_unit/core/tunnel/test_caches.py
from asyncio import sleep from unittest.mock import Mock from ipv8.requestcache import RequestCache from ipv8.test.base import TestBase from tribler.core.tunnel.caches import HTTPRequestCache from tribler.core.tunnel.payload import HTTPResponsePayload class TestHTTPRequestCache(TestBase): """ Tests for the HTTPRequestCache cache. """ def setUp(self) -> None: """ Create a new request cache. """ self.request_cache = RequestCache() async def tearDown(self) -> None: """ Destroy the cache. """ await self.request_cache.shutdown() await super().tearDown() async def test_timeout_response_future(self) -> None: """ Test if a HTTPRequestCache sets its response future to a value of None on timeout. """ with self.request_cache.passthrough(): cache = self.request_cache.add(HTTPRequestCache(Mock(request_cache=self.request_cache), 0)) response_future = cache.response_future await sleep(0) self.assertIsNone(await response_future) self.assertFalse(self.request_cache.has(cache.prefix, cache.number)) async def test_callback_response_future_one(self) -> None: """ Test if a HTTPRequestCache completed with one response sets the value of its response future. """ response = HTTPResponsePayload(0, 0, 0, 1, b"request") cache = self.request_cache.add(HTTPRequestCache(Mock(request_cache=self.request_cache), 0)) added = cache.add_response(response) await sleep(0) self.assertTrue(added) self.assertEqual(response.response, await cache.response_future) async def test_callback_response_future_two(self) -> None: """ Test if a HTTPRequestCache completed with two responses sets the value of its response future. """ response1 = HTTPResponsePayload(0, 0, 0, 2, b"[first half]") response2 = HTTPResponsePayload(0, 1, 1, 2, b"[second half]") cache = self.request_cache.add(HTTPRequestCache(Mock(request_cache=self.request_cache), 0)) completed = cache.add_response(response1) added = cache.add_response(response2) await sleep(0) self.assertFalse(completed) self.assertTrue(added) self.assertEqual(response1.response + response2.response, await cache.response_future)
2,424
Python
.py
54
37.037037
103
0.680815
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,911
test_payload.py
Tribler_tribler/src/tribler/test_unit/core/tunnel/test_payload.py
from __future__ import annotations from ipv8.test.base import TestBase from tribler.core.tunnel.payload import HTTPRequestPayload, HTTPResponsePayload class TestHTTPPayloads(TestBase): """ Tests for the various payloads of the TriblerTunnelCommunity. """ def test_http_request_payload(self) -> None: """ Test if HTTPRequestPayload initializes correctly. """ hrqp = HTTPRequestPayload(42, 7, ("1.2.3.4", 5), b"test") self.assertEqual(28, hrqp.msg_id) self.assertEqual(42, hrqp.circuit_id) self.assertEqual(7, hrqp.identifier) self.assertEqual(("1.2.3.4", 5), hrqp.target) self.assertEqual(b"test", hrqp.request) def test_http_response_payload(self) -> None: """ Test if HTTPResponsePayload initializes correctly. """ hrsp = HTTPResponsePayload(42, 7, 3, 5, b"test") self.assertEqual(29, hrsp.msg_id) self.assertEqual(42, hrsp.circuit_id) self.assertEqual(7, hrsp.identifier) self.assertEqual(3, hrsp.part) self.assertEqual(5, hrsp.total) self.assertEqual(b"test", hrsp.response)
1,158
Python
.py
28
33.928571
79
0.666667
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,912
test_cache.py
Tribler_tribler/src/tribler/test_unit/core/content_discovery/test_cache.py
from asyncio import sleep from ipv8.keyvault.private.libnaclkey import LibNaCLSK from ipv8.peer import Peer from ipv8.requestcache import RequestCache from ipv8.test.base import TestBase from tribler.core.content_discovery.cache import SelectRequest class TestSelectRequest(TestBase): """ Tests for the SelectRequest cache. """ FAKE_PEER = Peer(LibNaCLSK(b"")) async def test_timeout_no_cb(self) -> None: """ Test if a SelectRequest can time out without a callback set. """ request_cache = RequestCache() with request_cache.passthrough(): cache = request_cache.add(SelectRequest(request_cache, {}, TestSelectRequest.FAKE_PEER)) await sleep(0) self.assertFalse(request_cache.has(cache.prefix, cache.number)) async def test_timeout_with_cb(self) -> None: """ Test if a SelectRequest can time out with a callback set. """ request_cache = RequestCache() callback_values = [] with request_cache.passthrough(): cache = request_cache.add(SelectRequest(request_cache, {}, TestSelectRequest.FAKE_PEER, timeout_callback=callback_values.append)) await sleep(0) self.assertFalse(request_cache.has(cache.prefix, cache.number)) self.assertIn(cache, callback_values)
1,399
Python
.py
32
35
100
0.665929
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,913
test_community.py
Tribler_tribler/src/tribler/test_unit/core/content_discovery/test_community.py
from __future__ import annotations import os import sys from binascii import hexlify from typing import TYPE_CHECKING, cast from unittest import skipIf from unittest.mock import AsyncMock, Mock, patch from ipv8.messaging.payload import IntroductionRequestPayload, NewIntroductionRequestPayload from ipv8.test.base import TestBase from ipv8.test.mocking.endpoint import MockEndpointListener import tribler from tribler.core.content_discovery.community import ContentDiscoveryCommunity, ContentDiscoverySettings from tribler.core.content_discovery.payload import ( PopularTorrentsRequest, SelectResponsePayload, TorrentsHealthPayload, VersionRequest, VersionResponse, ) from tribler.core.database.layers.knowledge import ResourceType from tribler.core.database.orm_bindings.torrent_metadata import LZ4_EMPTY_ARCHIVE from tribler.core.database.serialization import REGULAR_TORRENT from tribler.core.notifier import Notification, Notifier from tribler.core.torrent_checker.torrent_checker import TorrentChecker from tribler.core.torrent_checker.torrentchecker_session import HealthInfo if TYPE_CHECKING: from ipv8.community import CommunitySettings from ipv8.test.mocking.ipv8 import MockIPv8 class MockTorrentChecker(TorrentChecker): """ A mocked TorrentChecker. """ infohash = b"\x01" * 20 def __init__(self) -> None: """ Create a new mocked TorrentChecker. """ super().__init__(None, None, None, None, None) self._torrents_checked = {self.infohash: HealthInfo(self.infohash, 7, 42, 1337)} def set_torrents_checked(self, value: dict[bytes, HealthInfo]) -> None: """ Overwrite the default test value for torrents_checked. """ self._torrents_checked = value class TestContentDiscoveryCommunity(TestBase[ContentDiscoveryCommunity]): """ Tests for the ContentDiscoveryCommunity. """ def setUp(self) -> None: """ Create a new pair of ContentDiscoveryCommunities. """ super().setUp() self.initialize(ContentDiscoveryCommunity, 2) def create_node(self, settings: CommunitySettings | None = None, create_dht: bool = False, enable_statistics: bool = False) -> MockIPv8: """ Create an inert version of the ContentDiscoveryCommunity. """ overwrite_settings = ContentDiscoverySettings( torrent_checker=MockTorrentChecker(), metadata_store=Mock(get_entries_threaded=AsyncMock(), process_compressed_mdblob_threaded=AsyncMock()) ) out = super().create_node(overwrite_settings, create_dht, enable_statistics) out.overlay.cancel_all_pending_tasks() return out def torrent_checker(self, i: int) -> MockTorrentChecker: """ Get the torrent checker of node i. """ return cast(MockTorrentChecker, self.overlay(i).composition.torrent_checker) async def test_torrents_health_gossip(self) -> None: """ Test whether torrent health information is periodically gossiped around. """ with self.assertReceivedBy(1, [TorrentsHealthPayload], message_filter=[TorrentsHealthPayload]): self.overlay(0).gossip_random_torrents_health() await self.deliver_messages() async def test_torrents_health_gossip_no_checker(self) -> None: """ Test whether no torrent health information is spread without a torrent checker. """ self.overlay(0).composition.torrent_checker = None with self.assertReceivedBy(1, []): self.overlay(0).gossip_random_torrents_health() await self.deliver_messages() async def test_torrents_health_gossip_no_live(self) -> None: """ Test whether torrent health information is spread when no live torrents are known. """ with self.assertReceivedBy(1, [TorrentsHealthPayload], message_filter=[TorrentsHealthPayload]) as received: self.overlay(0).gossip_random_torrents_health() await self.deliver_messages() message, = received self.assertEqual(1, message.random_torrents_length) self.assertEqual(0, message.torrents_checked_length) def test_get_alive_torrents(self) -> None: """ Test if get_alive_checked_torrents returns a known alive torrent. """ self.assertEqual([HealthInfo(MockTorrentChecker.infohash, 7, 42, 1337)], self.overlay(0).get_alive_checked_torrents()) def test_get_alive_torrents_no_seeders(self) -> None: """ Test if get_alive_checked_torrents does not return torrents without seeders. """ self.torrent_checker(0).set_torrents_checked({ MockTorrentChecker.infohash: HealthInfo(MockTorrentChecker.infohash, 0, 42, 1337) }) self.assertEqual([], self.overlay(0).get_alive_checked_torrents()) def test_get_alive_torrents_no_leechers(self) -> None: """ Test if get_alive_checked_torrents does return torrents without leechers. """ health_info = HealthInfo(MockTorrentChecker.infohash, 7, 0, 1337) self.torrent_checker(0).set_torrents_checked({MockTorrentChecker.infohash: health_info}) self.assertEqual([health_info], self.overlay(0).get_alive_checked_torrents()) def test_get_alive_torrents_no_checker(self) -> None: """ Test if get_alive_checked_torrents returns nothing without a torrent checker. """ self.overlay(0).composition.torrent_checker = None self.assertEqual([], self.overlay(0).get_alive_checked_torrents()) async def test_popularity_search(self) -> None: """ Test searching several nodes for metadata entries based on title text. """ notifications = {} self.overlay(0).composition.notifier = Notifier() self.overlay(0).composition.notifier.add(Notification.remote_query_results, notifications.update) uuid, peers = self.overlay(0).send_search_request(txt_filter="ubuntu*") await self.deliver_messages() self.assertEqual(str(uuid), notifications["uuid"]) self.assertEqual([], notifications["results"]) self.assertEqual(hexlify(peers[0].mid).decode(), notifications["peer"]) async def test_popularity_search_deprecated(self) -> None: """ Test searching several nodes for metadata entries with a deprecated parameter. """ notifications = {} self.overlay(0).composition.notifier = Notifier() self.overlay(0).composition.notifier.add(Notification.remote_query_results, notifications.update) uuid, peers = self.overlay(0).send_search_request(txt_filter="ubuntu*", hide_xxx="1", metadata_type=REGULAR_TORRENT, exclude_deleted="1") await self.deliver_messages() self.assertEqual(str(uuid), notifications["uuid"]) self.assertEqual([], notifications["results"]) self.assertEqual(hexlify(peers[0].mid).decode(), notifications["peer"]) async def test_popularity_search_unparsed_metadata_type(self) -> None: """ Test searching several nodes for metadata entries with the metadata type passed as a string. """ notifications = {} self.overlay(0).composition.notifier = Notifier() self.overlay(0).composition.notifier.add(Notification.remote_query_results, notifications.update) uuid, peers = self.overlay(0).send_search_request(txt_filter="ubuntu*", hide_xxx="1", metadata_type=str(REGULAR_TORRENT), exclude_deleted="1") await self.deliver_messages() self.assertEqual(str(uuid), notifications["uuid"]) self.assertEqual([], notifications["results"]) self.assertEqual(hexlify(peers[0].mid).decode(), notifications["peer"]) async def test_request_for_version(self) -> None: """ Test if a version request is responded to. """ with self.assertReceivedBy(0, [VersionResponse]) as received: self.overlay(0).ez_send(self.peer(1), VersionRequest()) await self.deliver_messages() message, = received self.assertEqual(sys.platform, message.platform) self.assertEqual("Tribler git", message.version) async def test_request_for_version_build(self) -> None: """ Test if a build version request is responded to. """ with patch.dict(tribler.core.content_discovery.community.__dict__, {"version": lambda _: "1.2.3"}), \ self.assertReceivedBy(0, [VersionResponse]) as received: self.overlay(0).ez_send(self.peer(1), VersionRequest()) await self.deliver_messages() message, = received self.assertEqual(sys.platform, message.platform) self.assertEqual("Tribler 1.2.3", message.version) def test_search_for_tags_no_db(self) -> None: """ Test if search_for_tags returns None without tribler_db. """ self.overlay(0).composition.tribler_db = None self.assertIsNone(self.overlay(0).search_for_tags(tags=['tag'])) def test_search_for_tags_no_tags(self) -> None: """ Test if search_for_tags returns None without tags. """ self.overlay(0).composition.tribler_db = None self.assertIsNone(self.overlay(0).search_for_tags(tags=[])) def test_search_for_tags_only_valid_tags(self) -> None: """ Test if search_for_tags filters valid tags. """ args = {} self.overlay(0).composition.tribler_db = Mock(knowledge=Mock(get_subjects_intersection=args.update)) self.overlay(0).search_for_tags(tags=['invalid_tag' * 50, 'valid_tag']) self.assertEqual(ResourceType.TORRENT, args["subjects_type"]) self.assertEqual({'valid_tag'}, args["objects"]) self.assertEqual(ResourceType.TAG, args["predicate"]) self.assertEqual(False, args["case_sensitive"]) async def test_process_rpc_query(self) -> None: """ Test if process_rpc_query searches the TriblerDB and MetadataStore. """ async_mock = AsyncMock() self.overlay(0).composition.tribler_db = Mock(instance=Mock(return_value={"01" * 20})) self.overlay(0).composition.metadata_store.get_entries_threaded = async_mock await self.overlay(0).process_rpc_query({'first': 0, 'infohash_set': None, 'last': 100}) self.assertEqual(0, async_mock.call_args.kwargs["first"]) self.assertEqual({b"\x01" * 20}, async_mock.call_args.kwargs["infohash_set"]) self.assertEqual(100, async_mock.call_args.kwargs["last"]) async def test_remote_select(self) -> None: """ Test querying metadata entries from a remote machine. """ kwargs_dict = {"txt_filter": "ubuntu*", "metadata_type": REGULAR_TORRENT} mock_callback = Mock() self.overlay(1).send_remote_select(self.peer(0), **kwargs_dict, processing_callback=mock_callback) with self.assertReceivedBy(1, [SelectResponsePayload]): await self.deliver_messages() select_request = mock_callback.call_args[0][0] self.assertTrue(select_request.peer_responded) async def test_remote_select_deprecated(self) -> None: """ Test deprecated search keys receiving an empty archive response. """ with self.assertReceivedBy(0, [SelectResponsePayload]) as responses: self.overlay(0).send_remote_select(self.peer(1), subscribed=1) await self.deliver_messages() response, = responses assert response.raw_blob == LZ4_EMPTY_ARCHIVE def test_sanitize_query(self) -> None: """ Test if queries are properly sanitized. """ req_response_list = [ ({"first": None, "last": None}, {"first": 0, "last": 100}), ({"first": 123, "last": None}, {"first": 123, "last": 223}), ({"first": None, "last": 1000}, {"first": 0, "last": 100}), ({"first": 100, "last": None}, {"first": 100, "last": 200}), ({"first": 123}, {"first": 123, "last": 223}), ({"last": 123}, {"first": 0, "last": 100}), ({}, {"first": 0, "last": 100}), ] for req, resp in req_response_list: self.assertEqual(resp, self.overlay(0).sanitize_query(req)) def test_sanitize_query_binary_fields(self) -> None: """ Test if binary fields are properly sanitized. """ for field in ("infohash", "channel_pk"): field_in_b = b'0' * 20 field_in_hex = hexlify(field_in_b).decode() self.assertEqual(field_in_b, self.overlay(0).sanitize_query({field: field_in_hex})[field]) async def test_process_rpc_query_match_none(self) -> None: """ Check if a correct query with no match in our database returns no result. """ results = await self.overlay(0).process_rpc_query({}) self.assertEqual(0, len(results)) @skipIf(int(os.environ.get("TEST_IPV8_WITH_IPV6", "0")), "IPv4-only test") async def test_ping(self) -> None: """ Test if the keep-alive message works. Note: assertReceivedBy is illegal here because a ping is a GlobalTimeDistribution + IntroductionRequest! """ ep_listener = MockEndpointListener(self.endpoint(1)) self.overlay(0).send_ping(self.peer(1)) await self.deliver_messages() self.assertEqual(1, len(ep_listener.received_packets)) self.assertEqual(IntroductionRequestPayload.msg_id, ep_listener.received_packets[0][1][22]) @skipIf(not int(os.environ.get("TEST_IPV8_WITH_IPV6", "0")), "IPv6-only test") async def test_ping_ipv6(self) -> None: """ Test if the keep-alive message works when dealing with IPv6 addresses. Note: assertReceivedBy is illegal here because a ping is a GlobalTimeDistribution + NewIntroductionRequest! """ ep_listener = MockEndpointListener(self.endpoint(1)) self.overlay(0).send_ping(self.peer(1)) await self.deliver_messages() self.assertEqual(1, len(ep_listener.received_packets)) self.assertEqual(NewIntroductionRequestPayload.msg_id, ep_listener.received_packets[0][1][22]) async def test_deprecated_popular_torrents_request_no_live(self) -> None: """ The new protocol no longer uses PopularTorrentsRequest but still supports it. """ with self.assertReceivedBy(0, [TorrentsHealthPayload], message_filter=[TorrentsHealthPayload]) as received: self.overlay(0).ez_send(self.peer(1), PopularTorrentsRequest()) await self.deliver_messages() message, = received self.assertEqual(0, message.random_torrents_length) self.assertEqual(1, message.torrents_checked_length) self.assertEqual([], message.random_torrents) self.assertEqual((b"\x01" * 20, 7, 42, 1337), message.torrents_checked[0]) async def test_deprecated_popular_torrents_request_live(self) -> None: """ The new protocol no longer uses PopularTorrentsRequest but still supports it. """ with self.assertReceivedBy(0, [TorrentsHealthPayload], message_filter=[TorrentsHealthPayload]) as received: self.overlay(0).ez_send(self.peer(1), PopularTorrentsRequest()) await self.deliver_messages() message, = received self.assertEqual(0, message.random_torrents_length) self.assertEqual(1, message.torrents_checked_length) self.assertEqual([], message.random_torrents) self.assertEqual((b'\x01'*20, 7, 42, message.torrents_checked[0][3]), message.torrents_checked[0])
16,003
Python
.py
313
42.073482
115
0.659987
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,914
test_payload.py
Tribler_tribler/src/tribler/test_unit/core/content_discovery/test_payload.py
from __future__ import annotations from ipv8.test.base import TestBase from tribler.core.content_discovery.payload import ( PopularTorrentsRequest, RemoteSelectPayload, SelectResponsePayload, TorrentInfoFormat, TorrentsHealthPayload, VersionRequest, VersionResponse, ) from tribler.core.torrent_checker.dataclasses import HealthInfo, Source class TestContentDiscoveryPayloads(TestBase): """ Tests for the various payloads of the ContentDiscoveryCommunity. """ @classmethod def setUpClass(cls: TestContentDiscoveryPayloads) -> None: """ Create a test infohash. """ super().setUpClass() cls.infohash = b"\x01" * 20 def test_torrent_info_format(self) -> None: """ Test if TorrentInfoFormat initializes correctly. """ tif = TorrentInfoFormat(self.infohash, 7, 42, 1337) self.assertEqual(36, tif.length) self.assertEqual(self.infohash, tif.infohash) self.assertEqual(7, tif.seeders) self.assertEqual(42, tif.leechers) self.assertEqual(1337, tif.timestamp) self.assertEqual((self.infohash, 7, 42, 1337), tif.to_tuple()) def test_torrents_health_payload_empty(self) -> None: """ Test if TorrentsHealthPayload initializes correctly without health info. """ thp = TorrentsHealthPayload.create([], []) self.assertEqual(1, thp.msg_id) self.assertEqual(0, thp.random_torrents_length) self.assertEqual(0, thp.torrents_checked_length) self.assertEqual([], thp.random_torrents) self.assertEqual([], thp.torrents_checked) def test_torrents_health_payload_one_random(self) -> None: """ Test if TorrentsHealthPayload initializes correctly with one random health info. """ thp = TorrentsHealthPayload.create([HealthInfo(self.infohash, 1, 2, 3, True, Source.DHT, "tracker")], []) self.assertEqual(1, thp.msg_id) self.assertEqual(1, thp.random_torrents_length) self.assertEqual(0, thp.torrents_checked_length) self.assertEqual([(self.infohash, 1, 2, 3)], thp.random_torrents) self.assertEqual([], thp.torrents_checked) def test_torrents_health_payload_one_checked(self) -> None: """ Test if TorrentsHealthPayload initializes correctly with one checked health info. """ thp = TorrentsHealthPayload.create([], [HealthInfo(self.infohash, 1, 2, 3, True, Source.TRACKER, "tracker")]) self.assertEqual(1, thp.msg_id) self.assertEqual(0, thp.random_torrents_length) self.assertEqual(1, thp.torrents_checked_length) self.assertEqual([], thp.random_torrents) self.assertEqual([(self.infohash, 1, 2, 3)], thp.torrents_checked) def test_torrents_health_payload_many(self) -> None: """ Test if TorrentsHealthPayload initializes correctly with more health info. """ thp = TorrentsHealthPayload.create([ HealthInfo(self.infohash, 1, 2, 3, True, Source.TRACKER, "tracker1"), HealthInfo(self.infohash, 4, 5, 6, False, Source.POPULARITY_COMMUNITY, "tracker2") ], [ HealthInfo(self.infohash, 7, 8, 9, True, Source.DHT, "tracker3"), HealthInfo(self.infohash, 10, 11, 12, False, Source.UNKNOWN, "tracker4") ]) self.assertEqual(1, thp.msg_id) self.assertEqual(2, thp.random_torrents_length) self.assertEqual(2, thp.torrents_checked_length) self.assertEqual([(self.infohash, 1, 2, 3), (self.infohash, 4, 5, 6)], thp.random_torrents) self.assertEqual([(self.infohash, 7, 8, 9), (self.infohash, 10, 11, 12)], thp.torrents_checked) def test_popular_torrents_request(self) -> None: """ Test if PopularTorrentsRequest initializes correctly. """ ptr = PopularTorrentsRequest() self.assertEqual(2, ptr.msg_id) def test_version_request(self) -> None: """ Test if VersionRequest initializes correctly. """ vr = VersionRequest() self.assertEqual(101, vr.msg_id) def test_version_response(self) -> None: """ Test if VersionResponse initializes correctly. """ vr = VersionResponse("foo", "bar") self.assertEqual(102, vr.msg_id) self.assertEqual("foo", vr.version) self.assertEqual("bar", vr.platform) def test_remote_select_payload(self) -> None: """ Test if RemoteSelectPayload initializes correctly. """ rsp = RemoteSelectPayload(42, b"{}") self.assertEqual(201, rsp.msg_id) self.assertEqual(42, rsp.id) self.assertEqual(b"{}", rsp.json) def test_select_response_payload(self) -> None: """ Test if SelectResponsePayload initializes correctly. """ srp = SelectResponsePayload(42, b"foo") self.assertEqual(202, srp.msg_id) self.assertEqual(42, srp.id) self.assertEqual(b"foo", srp.raw_blob)
5,217
Python
.py
117
35.205128
119
0.635074
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,915
test_search_endpoint.py
Tribler_tribler/src/tribler/test_unit/core/content_discovery/restapi/test_search_endpoint.py
from __future__ import annotations from uuid import UUID from ipv8.keyvault.private.libnaclkey import LibNaCLSK from ipv8.peer import Peer from ipv8.peerdiscovery.network import Network from ipv8.test.base import TestBase from ipv8.test.mocking.endpoint import AutoMockEndpoint from tribler.core.content_discovery.community import ContentDiscoveryCommunity from tribler.core.content_discovery.restapi.search_endpoint import SearchEndpoint from tribler.core.restapi.rest_endpoint import HTTP_BAD_REQUEST from tribler.test_unit.base_restapi import MockRequest, response_to_json class MockContentDiscoveryCommunity(ContentDiscoveryCommunity): """ A mocked ContentDiscoveryCommunity. """ def __init__(self) -> None: """ Create a new mocked ContentDiscoveryCommunity. """ my_peer = Peer(LibNaCLSK(b"\x01" * 64)) super().__init__(self.settings_class(my_peer=my_peer, endpoint=AutoMockEndpoint(), network=Network())) def send_search_request(self, **kwargs) -> tuple[UUID, list[Peer]]: """ Fake the return values of a search request. """ return UUID(int=1), [self.my_peer] class SearchRequest(MockRequest): """ A MockRequest that mimics SearchRequests. """ def __init__(self, query: dict) -> None: """ Create a new SearchRequest. """ super().__init__(query, "PUT", "/search/remote") self.context = [MockContentDiscoveryCommunity()] class TestSearchEndpoint(TestBase): """ Tests for the SearchEndpoint REST endpoint. """ async def test_remote_search_bad_request(self) -> None: """ Test if a bad request returns the bad request status. """ endpoint = SearchEndpoint() response = await endpoint.remote_search(SearchRequest({"channel_pk": "GG"})) self.assertEqual(HTTP_BAD_REQUEST, response.status) async def test_remote_search(self) -> None: """ Test if a good search request returns a dict with the UUID and serving peers. """ endpoint = SearchEndpoint() response = await endpoint.remote_search(SearchRequest({"channel_pk": "AA", "fts_text": ""})) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertEqual("00000000-0000-0000-0000-000000000001", response_body_json["request_uuid"]) self.assertEqual(["5b16b30807cdcb11f8214a5eb762c0dc1931c503"], response_body_json["peers"])
2,522
Python
.py
57
37.877193
110
0.697998
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,916
test_manager.py
Tribler_tribler/src/tribler/test_unit/core/recommender/test_manager.py
from __future__ import annotations from ipv8.test.base import TestBase from tribler.core.recommender.manager import Manager class TestManager(TestBase): """ Tests for the database manager. """ def setUp(self) -> None: """ Create a new memory-based manager. """ self.manager = Manager(":memory:") def test_add_query(self) -> None: """ Test if queries can be added and retrieved. """ self.manager.add_query('{"key":"value"}') result = self.manager.get_query(1) size = self.manager.get_total_queries() self.assertEqual(1, size) self.assertEqual(1, result.rowid) self.assertEqual(1, result.version) self.assertEqual('{"key":"value"}', result.json) def test_get_total_queries(self) -> None: """ Test if multiple queries are counted correctly. """ self.manager.add_query('{"key":"value"}') self.manager.add_query('{"key":"value"}') self.manager.add_query('{"key2":"value2"}') size = self.manager.get_total_queries() self.assertEqual(3, size)
1,190
Python
.py
32
28.25
57
0.59331
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,917
test_community.py
Tribler_tribler/src/tribler/test_unit/core/recommender/test_community.py
from __future__ import annotations import dataclasses import json from typing import cast from unittest.mock import patch from ipv8.keyvault.crypto import default_eccrypto from ipv8.peer import Peer from ipv8.test.base import TestBase from ipv8.test.mocking.ipv8 import MockIPv8 from tribler.core.recommender.community import ( RecommenderCommunity, RecommenderCommunityCrawler, RecommenderSettings, ResultItem, ) from tribler.core.recommender.payload import CrawlResponse class StubRecommenderCommunityCrawler(RecommenderCommunityCrawler): """ Stub class that avoids file I/O. """ def init_crawl_history(self) -> None: """ Don't load from disk. """ def finalize_query(self, peer: Peer, query_id: int, query: str, chosen_index: int, timestamp: int,results: list[ResultItem]) -> None: """ Don't write to disk. """ size, missing = self.crawl_history.get(peer.mid, (0, set())) missing.remove(query_id) self.crawl_history[peer.mid] = (size, missing) class MockManager: """ Using a memory-based mock is even faster than ``Manager(":memory:")``, tested in ``test_manager``. """ @dataclasses.dataclass class MockQuery: """ A database-equivalent for a Query. """ rowid: int version: int json: str def __init__(self) -> None: """ Don't do any database stuff. """ self.queries = [] def get_total_queries(self) -> int: """ Get the total number of queries that we know of. """ return len(self.queries) def get_query(self, query_id: int) -> MockQuery: """ Get the Query with a given id. """ return self.queries[query_id - 1] def add_query(self, json_data: str) -> None: """ Inject data into our database. """ self.queries += [self.MockQuery(len(self.queries) + 1, 0, json_data)] class TestRecommenderCommunity(TestBase[RecommenderCommunity]): """ Tests for the recommender community. """ def setUp(self) -> None: """ Create a new memory-based manager. """ self.node_id = 0 self.crawler_peer = Peer(default_eccrypto.generate_key("curve25519")) self.initialize(RecommenderCommunity, 2) def create_node(self, settings: RecommenderSettings | None = None, create_dht: bool = False, enable_statistics: bool = False) -> MockIPv8: """ Create a new memory-based community. """ community_class = RecommenderCommunity if self.node_id == 0 else StubRecommenderCommunityCrawler self.node_id += 1 settings = RecommenderSettings(manager=MockManager(), crawler_mid=self.crawler_peer.mid) return MockIPv8("low" if self.node_id==1 else self.crawler_peer, community_class, settings, create_dht, enable_statistics) def crawler_overlay(self) -> StubRecommenderCommunityCrawler: """ Get the overlay of the crawler (peer id 1). """ return cast(StubRecommenderCommunityCrawler, self.overlay(1)) async def test_crawl_table_empty(self) -> None: """ Test if an empty table can be crawled. """ with self.assertReceivedBy(1, [CrawlResponse]) as messages: self.crawler_overlay().crawl_next(self.peer(0)) await self.deliver_messages() response = json.loads(messages[0].data) self.assertEqual(0, response["version"]) self.assertEqual("table_size", response["type"]) self.assertEqual(0, response["total_queries"]) async def test_crawl_query_v0(self) -> None: """ Test if a single query can be crawled. """ self.overlay(0).manager.add_query('{"query": "test query", "chosen_index": 2, "results": [' f'{{"infohash": "{"01" * 20}", "seeders": 1, "leechers": 2}}, ' f'{{"infohash": "{"02" * 20}", "seeders": 3, "leechers": 4}}, ' f'{{"infohash": "{"03" * 20}", "seeders": 5, "leechers": 6}}' ']}') with self.assertReceivedBy(1, [CrawlResponse, CrawlResponse, CrawlResponse]) as messages: self.crawler_overlay().crawl_next(self.peer(0)) await self.deliver_messages() response1 = json.loads(messages[0].data) response2 = json.loads(messages[1].data) response3 = json.loads(messages[2].data) self.assertEqual("table_size", response1["type"]) self.assertEqual("query_info", response2["type"]) self.assertEqual(1, response2["query_id"]) self.assertEqual("query_fragment", response3["type"]) self.assertEqual(1, response3["query_id"]) self.assertListEqual(["01" * 20, "02" * 20, "03" * 20], response3["infohashes"]) self.assertListEqual([1, 3, 5], response3["seeders"]) self.assertListEqual([2, 4, 6], response3["leechers"]) self.assertIn(self.mid(0), self.crawler_overlay().crawl_history) self.assertEqual(1, self.crawler_overlay().crawl_history[self.mid(0)][0], "The known size should be 1") self.assertSetEqual(set(), self.crawler_overlay().crawl_history[self.mid(0)][1], "There should be no missing") async def test_crawl_query_v1(self) -> None: """ Test if a single query can be crawled. """ self.overlay(0).manager.add_query('{"query": "test query", "timestamp": 1234567890, "chosen_index": 2, "results": [' f'{{"infohash": "{"01" * 20}", "seeders": 1, "leechers": 2}}, ' f'{{"infohash": "{"02" * 20}", "seeders": 3, "leechers": 4}}, ' f'{{"infohash": "{"03" * 20}", "seeders": 5, "leechers": 6}}' ']}') with self.assertReceivedBy(1, [CrawlResponse, CrawlResponse, CrawlResponse]) as messages: self.crawler_overlay().crawl_next(self.peer(0)) await self.deliver_messages() response1 = json.loads(messages[0].data) response2 = json.loads(messages[1].data) response3 = json.loads(messages[2].data) self.assertEqual("table_size", response1["type"]) self.assertEqual("query_info", response2["type"]) self.assertEqual(1, response2["query_id"]) self.assertEqual("query_fragment", response3["type"]) self.assertEqual(1, response3["query_id"]) self.assertListEqual(["01" * 20, "02" * 20, "03" * 20], response3["infohashes"]) self.assertListEqual([1, 3, 5], response3["seeders"]) self.assertListEqual([2, 4, 6], response3["leechers"]) self.assertIn(self.mid(0), self.crawler_overlay().crawl_history) self.assertEqual(1, self.crawler_overlay().crawl_history[self.mid(0)][0], "The known size should be 1") self.assertSetEqual(set(), self.crawler_overlay().crawl_history[self.mid(0)][1], "There should be no missing") async def test_crawl_query_done(self) -> None: """ Test if a crawl after completion leads to no further requests for data. """ self.overlay(0).manager.add_query("{}") self.crawler_overlay().crawl_history[self.mid(0)] = (1, set()) with self.assertReceivedBy(1, [CrawlResponse]) as messages: self.crawler_overlay().crawl_next(self.peer(0)) await self.deliver_messages() response = json.loads(messages[0].data) self.assertEqual("table_size", response["type"]) self.assertEqual(1, response["total_queries"]) async def test_crawl_query_fragmented(self) -> None: """ Test if a fragmented crawl completes. """ self.overlay(0).manager.add_query('{"query": "test query", "chosen_index": 2, "results": [' f'{{"infohash": "{"01" * 20}", "seeders": 1, "leechers": 2}}, ' f'{{"infohash": "{"02" * 20}", "seeders": 3, "leechers": 4}}, ' f'{{"infohash": "{"03" * 20}", "seeders": 5, "leechers": 6}}' ']}') with patch.object(RecommenderCommunity, "MAX_RESULTS_IN_PACKET", 1), \ self.assertReceivedBy(1, [CrawlResponse] * 5) as messages: self.crawler_overlay().crawl_next(self.peer(0)) await self.deliver_messages() frag1_response = json.loads(messages[2].data) frag2_response = json.loads(messages[3].data) frag3_response = json.loads(messages[4].data) self.assertEqual("table_size", json.loads(messages[0].data)["type"]) self.assertEqual("query_info", json.loads(messages[1].data)["type"]) self.assertEqual("query_fragment", frag1_response["type"]) self.assertListEqual(["01" * 20], frag1_response["infohashes"]) self.assertEqual("query_fragment", frag2_response["type"]) self.assertListEqual(["02" * 20], frag2_response["infohashes"]) self.assertEqual("query_fragment", frag3_response["type"]) self.assertListEqual(["03" * 20], frag3_response["infohashes"])
9,631
Python
.py
189
39.206349
125
0.581715
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,918
test_payload.py
Tribler_tribler/src/tribler/test_unit/core/recommender/test_payload.py
from __future__ import annotations import json from ipv8.messaging.serialization import default_serializer from ipv8.test.base import TestBase from tribler.core.recommender.payload import Crawl, CrawlInfo, CrawlResponse class TestContentDiscoveryPayloads(TestBase): """ Tests for the various payloads of the RecommenderCommunity. """ def test_crawl_info(self) -> None: """ Test if CrawlInfo initializes correctly. """ ci = CrawlInfo(b"\x01" * 20, b"unknown") self.assertEqual(0, ci.msg_id) self.assertEqual(b"\x01" * 20, ci.mid) self.assertEqual(b"unknown", ci.unknown) def test_crawl_info_unpack(self) -> None: """ Test if CrawlInfo unpacks correctly. """ packed = b'\x01' * 20 + b'unknown' ci, _ = default_serializer.unpack_serializable(CrawlInfo, packed) self.assertEqual(0, ci.msg_id) self.assertEqual(b"\x01" * 20, ci.mid) self.assertEqual(b"unknown", ci.unknown) def test_crawl(self) -> None: """ Test if a Crawl message initializes correctly. """ c = Crawl(b"\x01" * 20, json.dumps({"key": "value"}).encode(), b"unknown") self.assertEqual(1, c.msg_id) self.assertEqual(b"\x01" * 20, c.mid) self.assertDictEqual({"key": "value"}, c.json()) self.assertEqual(b"unknown", c.unknown) def test_crawl_unpack(self) -> None: """ Test if a Crawl message unpacks correctly. """ packed = b'\x01' * 20 + b'\x00\x0F{"key":"value"}unknown' c, _ = default_serializer.unpack_serializable(Crawl, packed) self.assertEqual(1, c.msg_id) self.assertEqual(b"\x01" * 20, c.mid) self.assertDictEqual({"key": "value"}, c.json()) self.assertEqual(b"unknown", c.unknown) def test_crawl_response(self) -> None: """ Test if a CrawlResponse initializes correctly. """ cr = CrawlResponse(b"\x01" * 20, json.dumps({"key": "value"}).encode(), b"unknown") self.assertEqual(2, cr.msg_id) self.assertEqual(b"\x01" * 20, cr.mid) self.assertDictEqual({"key": "value"}, cr.json()) self.assertEqual(b"unknown", cr.unknown) def test_crawl_response_unpack(self) -> None: """ Test if a CrawlResponse unpacks correctly. """ packed = b'\x01' * 20 + b'\x00\x0F{"key":"value"}unknown' cr, _ = default_serializer.unpack_serializable(CrawlResponse, packed) self.assertEqual(2, cr.msg_id) self.assertEqual(b"\x01" * 20, cr.mid) self.assertDictEqual({"key": "value"}, cr.json()) self.assertEqual(b"unknown", cr.unknown)
2,801
Python
.py
64
34.546875
92
0.602664
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,919
test_tracker_manager.py
Tribler_tribler/src/tribler/test_unit/core/torrent_checker/test_tracker_manager.py
from __future__ import annotations from pathlib import Path from unittest.mock import Mock from ipv8.test.base import TestBase from tribler.core.libtorrent.trackers import get_uniformed_tracker_url from tribler.core.torrent_checker.tracker_manager import TrackerManager from tribler.test_unit.core.torrent_checker.mocks import MockTrackerState class MockTrackerManager(TrackerManager): """ A mocked TrackerManager that does not perform file io with the blacklist file. """ def __init__(self) -> None: """ Create a new MockTrackerManager. """ self.blacklist_contents = None super().__init__(Path("."), Mock(TrackerState=MockTrackerState())) def load_blacklist(self) -> None: """ Load the blacklist. """ if self.blacklist_contents: self.blacklist.extend([get_uniformed_tracker_url(url) for url in self.blacklist_contents.split("\n")]) class TestTrackerManager(TestBase): """ Tests for the TrackerManager class. """ def setUp(self) -> None: """ Create a new TrackerManager. """ self.tracker_manager = MockTrackerManager() MockTrackerState.instances = [] def test_add_tracker_invalid(self) -> None: """ Test if adding an invalid tracker works correctly. """ self.tracker_manager.add_tracker("http://test1.com") self.assertIsNone(self.tracker_manager.get_tracker_info("http://test1.com")) def test_add_tracker_valid(self) -> None: """ Test if adding a valid tracker works correctly. """ self.tracker_manager.add_tracker("http://test1.com:80/announce") self.assertIsNotNone(self.tracker_manager.get_tracker_info("http://test1.com:80/announce")) def test_remove_tracker(self) -> None: """ Test if removing a tracker works correctly. """ self.tracker_manager.add_tracker("http://test1.com:80/announce") self.tracker_manager.remove_tracker("http://test1.com:80/announce") self.assertIsNone(self.tracker_manager.get_tracker_info("http://test1.com:80/announce")) def test_update_tracker_info_non_existent(self) -> None: """ Test if a non-existent tracker's info is not updated. """ self.tracker_manager.update_tracker_info("http://nonexisting.com", True) self.assertIsNone(self.tracker_manager.get_tracker_info("http://nonexisting.com")) def test_update_tracker_info_failed(self) -> None: """ Test if the tracker info update failure is correctly updated. """ self.tracker_manager.add_tracker("http://test1.com:80/announce") self.tracker_manager.update_tracker_info("http://test1.com/announce", False) tracker_info = self.tracker_manager.get_tracker_info("http://test1.com/announce") self.assertIsNotNone(tracker_info) self.assertEqual(1, tracker_info['failures']) def test_update_tracker_info_success(self) -> None: """ Test if the tracker info update success is correctly updated. """ self.tracker_manager.add_tracker("http://test1.com:80/announce") self.tracker_manager.update_tracker_info("http://test1.com/announce", True) tracker_info = self.tracker_manager.get_tracker_info("http://test1.com/announce") self.assertTrue(tracker_info['is_alive']) def test_get_tracker_for_check_unknown(self) -> None: """ Test if the no tracker is returned when fetching from no eligible trackers. """ self.assertIsNone(self.tracker_manager.get_next_tracker()) def test_get_tracker_for_check_known(self) -> None: """ Test if the correct tracker is returned when fetching the next eligible tracker for the auto check. """ self.tracker_manager.add_tracker("http://test1.com:80/announce") self.assertEqual("http://test1.com/announce", self.tracker_manager.get_next_tracker().url) def test_get_tracker_for_check_blacklist(self) -> None: """ Test if the next tracker for autocheck is not in the blacklist. """ self.tracker_manager.add_tracker("http://test1.com:80/announce") self.tracker_manager.blacklist.append("http://test1.com/announce") self.assertIsNone(self.tracker_manager.get_next_tracker()) def test_load_blacklist_from_file_none(self) -> None: """ Test if we correctly load a blacklist without entries. """ self.tracker_manager.blacklist_contents = "" self.tracker_manager.load_blacklist() self.assertEqual([], self.tracker_manager.blacklist) def test_load_blacklist_from_file_single(self) -> None: """ Test if we correctly load a blacklist entry from a file. """ self.tracker_manager.blacklist_contents = "http://test1.com/announce" self.tracker_manager.load_blacklist() self.assertIn("http://test1.com/announce", self.tracker_manager.blacklist) def test_load_blacklist_from_file_multiple(self) -> None: """ Test if we correctly load blacklist entries from a file. """ self.tracker_manager.blacklist_contents = "http://test1.com/announce\nhttp://test2.com/announce" self.tracker_manager.load_blacklist() self.assertIn("http://test1.com/announce", self.tracker_manager.blacklist) self.assertIn("http://test2.com/announce", self.tracker_manager.blacklist)
5,537
Python
.py
115
40.191304
114
0.671058
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,920
test_torrent_checker.py
Tribler_tribler/src/tribler/test_unit/core/torrent_checker/test_torrent_checker.py
from __future__ import annotations import random import secrets import time from binascii import unhexlify from pathlib import Path from unittest.mock import AsyncMock, MagicMock, Mock, patch from ipv8.test.base import TestBase from ipv8.util import succeed import tribler from tribler.core.notifier import Notification, Notifier from tribler.core.torrent_checker.dataclasses import ( TOLERABLE_TIME_DRIFT, HealthInfo, TrackerResponse, ) from tribler.core.torrent_checker.torrent_checker import ( TORRENT_SELECTION_POOL_SIZE, TorrentChecker, aggregate_responses_for_infohash, ) from tribler.core.torrent_checker.torrentchecker_session import HttpTrackerSession, UdpSocketManager from tribler.core.torrent_checker.tracker_manager import TrackerManager from tribler.test_unit.core.torrent_checker.mocks import MockEntity, MockTorrentState, MockTrackerState from tribler.tribler_config import TriblerConfigManager class MockMiniTorrentMetadata(MockEntity): """ A partial mock of TorrentMetadata. """ instances = [] def __init__(self, infohash: bytes = b"", title: str = "", health: MockTorrentState | None = None) -> None: """ Create a new MockMiniTorrentMetadata. """ self.__class__.instances.append(self) self.title = title self.infohash = infohash self.health = health class TestTorrentChecker(TestBase): """ Tests for the TorrentChecker class. """ def setUp(self) -> None: """ Create a new tracker manager and a torrent checker. """ super().setUp() self.metadata_store = Mock() self.metadata_store.TorrentState = MockTorrentState() self.metadata_store.TrackerState = MockTrackerState() self.metadata_store.TorrentMetadata = MockMiniTorrentMetadata() self.metadata_store.TorrentState.__class__.instances = [] self.metadata_store.TrackerState.__class__.instances = [] self.metadata_store.TorrentMetadata.__class__.instances = [] self.tracker_manager = TrackerManager(state_dir=Path("."), metadata_store=self.metadata_store) self.torrent_checker = TorrentChecker(config=TriblerConfigManager(), tracker_manager=self.tracker_manager, download_manager=MagicMock(get_metainfo=AsyncMock()), notifier=MagicMock(), metadata_store=self.metadata_store) async def tearDown(self) -> None: """ Shut doown the torrent checker. """ await self.torrent_checker.shutdown() await super().tearDown() async def test_create_socket_fail(self) -> None: """ Test creation of the UDP socket of the torrent checker when it fails. """ def mocked_listen_on_udp() -> None: message = "Something went wrong" raise OSError(message) self.torrent_checker.socket_mgr = UdpSocketManager() self.torrent_checker.listen_on_udp = mocked_listen_on_udp await self.torrent_checker.create_socket_or_schedule() self.assertIsNone(self.torrent_checker.udp_transport) self.assertTrue(self.torrent_checker.is_pending_task_active("listen_udp_port")) async def test_health_check_blacklisted_trackers(self) -> None: """ Test if only cached results of a torrent are returned with only blacklisted trackers. """ tracker, = self.torrent_checker.mds.TrackerState.instances = [MockTrackerState(url="http://localhost/tracker")] self.torrent_checker.mds.TorrentState.instances = [MockTorrentState(infohash=b'a' * 20, seeders=5, leechers=10, trackers={tracker}, last_check=int(time.time()))] self.torrent_checker.tracker_manager.blacklist.append("http://localhost/tracker") result = await self.torrent_checker.check_torrent_health(b'a' * 20) self.assertEqual(5, result.seeders) self.assertEqual(10, result.leechers) async def test_health_check_cached(self) -> None: """ Test whether cached results of a torrent are returned when fetching the health of a torrent. """ tracker, = self.torrent_checker.mds.TrackerState.instances = [MockTrackerState(url="http://localhost/tracker")] self.torrent_checker.mds.TorrentState.instances = [MockTorrentState(infohash=b'a' * 20, seeders=5, leechers=10, trackers={tracker}, last_check=int(time.time()))] result = await self.torrent_checker.check_torrent_health(b'a' * 20) self.assertEqual(5, result.seeders) self.assertEqual(10, result.leechers) def test_load_torrents_check_from_db_no_self_checked(self) -> None: """ Test if the torrents_checked only considers self-checked torrents. """ self.torrent_checker.mds.TorrentState.instances = [MockTorrentState(infohash=secrets.token_bytes(20), seeders=random.randint(1, 100), leechers=random.randint(1, 100), last_check=int(time.time()), self_checked=False) for _ in range(10)] self.assertEqual(0, len(self.torrent_checker.torrents_checked)) def test_load_torrents_check_from_db_only_fresh(self) -> None: """ Test if the torrents_checked only considers fresh torrents. """ self.torrent_checker.mds.TorrentState.instances = [MockTorrentState(infohash=secrets.token_bytes(20), seeders=random.randint(1, 100), leechers=random.randint(1, 100), last_check=0, self_checked=True) for _ in range(10)] self.assertEqual(0, len(self.torrent_checker.torrents_checked)) def test_load_torrents_check_from_db_allow_fresh_self_checked(self) -> None: """ Test if the torrents_checked does consider fresh self-checked torrents. """ self.torrent_checker.mds.TorrentState.instances = [MockTorrentState(infohash=secrets.token_bytes(20), seeders=random.randint(1, 100), leechers=random.randint(1, 100), last_check=int(time.time()), self_checked=True) for _ in range(10)] self.assertEqual(10, len(self.torrent_checker.torrents_checked)) async def test_task_select_no_tracker(self) -> None: """ Test if we are not checking a random tracker when there are no trackers in the database. """ result = await self.torrent_checker.check_random_tracker() self.assertIsNone(result) async def test_check_random_tracker_shutdown(self) -> None: """ Test if we are not performing a tracker check when we are shutting down. """ await self.torrent_checker.shutdown() result = await self.torrent_checker.check_random_tracker() self.assertIsNone(result) async def test_check_random_tracker_not_alive(self) -> None: """ Test if we correctly update the tracker state when the number of failures is too large. """ self.torrent_checker.mds.TrackerState.instances = [MockTrackerState(url="http://localhost/tracker", failures=1000, alive=True)] tracker = self.torrent_checker.tracker_manager.TrackerState.get(lambda x: True) self.assertTrue(tracker.alive) async def test_task_select_tracker(self) -> None: """ Test if a random tracker can be selected. """ tracker, = self.torrent_checker.mds.TrackerState.instances = [MockTrackerState(url="http://localhost/tracker")] self.torrent_checker.mds.TorrentState.instances = [MockTorrentState(infohash=b'a' * 20, seeders=5, leechers=10, trackers={tracker})] controlled_session = HttpTrackerSession("127.0.0.1", ("localhost", 8475), "/announce", 5, None) controlled_session.connect_to_tracker = lambda: succeed(None) self.torrent_checker.create_session_for_request = lambda *args, **kwargs: controlled_session with patch.dict(tribler.core.torrent_checker.torrent_checker.__dict__, {"select": (lambda x: self.torrent_checker.mds.TorrentState.instances)}): result = await self.torrent_checker.check_random_tracker() self.assertIsNone(result) self.assertEqual(1, len(controlled_session.infohash_list)) await controlled_session.cleanup() async def test_tracker_test_error_resolve(self) -> None: """ Test if we capture the error when a tracker check fails. """ tracker, = self.tracker_manager.TrackerState.instances = [MockTrackerState(url="http://localhost/tracker")] self.torrent_checker.mds.TorrentState.instances = [ MockTorrentState(infohash=b'a' * 20, seeders=5, leechers=10, trackers={tracker}, last_check=int(time.time())) ] with patch.dict(tribler.core.torrent_checker.torrent_checker.__dict__, {"select": (lambda x: self.torrent_checker.mds.TorrentState.instances)}): result = await self.torrent_checker.check_random_tracker() self.assertIsNone(result) self.assertEqual({}, self.torrent_checker.sessions) async def test_tracker_no_infohashes(self) -> None: """ Test if the check of a tracker without associated torrents leads to no result. """ self.torrent_checker.tracker_manager.add_tracker('http://trackertest.com:80/announce') with patch.dict(tribler.core.torrent_checker.torrent_checker.__dict__, {"select": (lambda x: self.torrent_checker.mds.TorrentState.instances)}): result = await self.torrent_checker.check_random_tracker() self.assertIsNone(result) def test_get_valid_next_tracker_for_auto_check(self) -> None: """ Test if only valid tracker url are used for auto check. """ self.tracker_manager.TrackerState.instances = [ MockTrackerState("http://anno nce.torrentsmd.com:8080/announce"), MockTrackerState("http://announce.torrentsmd.com:8080/announce"), ] next_tracker = self.torrent_checker.get_next_tracker() self.assertEqual("http://announce.torrentsmd.com:8080/announce", next_tracker.url) def test_update_health(self) -> None: """ Test if torrent health can be updated. """ responses = [ TrackerResponse("udp://localhost:2801", [HealthInfo(b"\xee" * 20, leechers=1, seeders=2)]), TrackerResponse("DHT", [HealthInfo(b"\xee" * 20, leechers=12, seeders=13)]) ] health = aggregate_responses_for_infohash(b"\xee" * 20, responses) health.self_checked = True ts = MockTorrentState(infohash=b"\xee" * 20) self.torrent_checker.mds.TorrentState.instances = [ts] updated = self.torrent_checker.update_torrent_health(health) self.assertIsNotNone(updated) self.assertEqual(1, len(self.torrent_checker.torrents_checked)) self.assertEqual(12, ts.leechers) self.assertEqual(13, ts.seeders) async def test_check_local_torrents(self) -> None: """ Test if the random torrent health checking mechanism picks the right torrents. """ self.torrent_checker.mds.TorrentState.instances = [ MockTorrentState(bytes([i]) * 20, i, last_check=int(time.time()) if i < 20 else 0) for i in range(40) ] self.torrent_checker.mds.TorrentMetadata.instances = [ MockMiniTorrentMetadata(bytes([i]) * 20, f'torrent{i}', self.torrent_checker.mds.TorrentState.instances[i]) for i in range(40) ] stale_infohashes = [bytes([i]) * 20 for i in range(20, 40)] # 1. Popular torrents are in the front, and # 2. Older torrents are towards the back selection_range = (stale_infohashes[0:TORRENT_SELECTION_POOL_SIZE] + stale_infohashes[-TORRENT_SELECTION_POOL_SIZE:]) selected_torrents, _ = await self.torrent_checker.check_local_torrents() self.assertLessEqual(len(selected_torrents), TORRENT_SELECTION_POOL_SIZE) for t in selected_torrents: self.assertIn(t.infohash, selection_range) def test_update_torrent_health_invalid_health(self) -> None: """ Tests if invalid health is ignored in TorrentChecker.update_torrent_health(). """ health = HealthInfo(unhexlify('abcd0123'), last_check=int(time.time()) + TOLERABLE_TIME_DRIFT + 2) self.assertFalse(self.torrent_checker.update_torrent_health(health)) def test_update_torrent_health_not_self_checked(self) -> None: """ Tests if non-self-checked health is ignored in TorrentChecker.update_torrent_health(). """ health = HealthInfo(unhexlify('abcd0123')) self.assertFalse(self.torrent_checker.update_torrent_health(health)) def test_update_torrent_health_unknown_torrent(self) -> None: """ Tests if unknown torrent's health is ignored in TorrentChecker.update_torrent_health(). """ health = HealthInfo(unhexlify('abcd0123'), 1, 2, self_checked=True) self.assertFalse(self.torrent_checker.update_torrent_health(health)) async def test_update_torrent_health_no_replace(self) -> None: """ Tests if the TorrentChecker.notify() method is called even if the new health does not replace the old health. """ now = int(time.time()) mocked_handler = Mock() self.torrent_checker.notifier = Notifier() self.torrent_checker.notifier.add(Notification.torrent_health_updated, mocked_handler) self.torrent_checker.mds.TorrentState.instances = [MockTorrentState(infohash=unhexlify('abcd0123'), seeders=2, leechers=1, last_check=now, self_checked=True)] prev_health = self.torrent_checker.mds.TorrentState.instances[0].to_health() health = HealthInfo(unhexlify('abcd0123'), 1, 2, self_checked=True, last_check=now) self.assertFalse(self.torrent_checker.update_torrent_health(health)) notified = mocked_handler.call_args.kwargs self.assertEqual(prev_health.infohash, unhexlify(notified["infohash"])) self.assertEqual(prev_health.seeders, notified["num_seeders"]) self.assertEqual(prev_health.leechers, notified["num_leechers"]) self.assertEqual(prev_health.last_check, notified["last_tracker_check"])
16,018
Python
.py
276
43.623188
119
0.608626
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,921
mocks.py
Tribler_tribler/src/tribler/test_unit/core/torrent_checker/mocks.py
from __future__ import annotations from typing import TYPE_CHECKING, Callable, Iterator, Set from tribler.core.torrent_checker.dataclasses import HealthInfo if TYPE_CHECKING: from typing_extensions import Self class DBResult(list): """ Mock a Pony ORM result. """ def order_by(self, _: int) -> DBResult: """ Order by last check time. """ if len(self) > 0: if isinstance(self[0], MockTrackerState): self.sort(key=lambda x: x.last_check) elif isinstance(self[0], MockTorrentState): self.sort(key=lambda x: x.seeders, reverse=True) return self def limit(self, limit: int) -> DBResult: """ Limit the results to a given limit. """ while len(self) > limit: self.pop(-1) return self class MockEntity: """ A mocked Pony ORM Entity. """ instances: list def __call__(self: Self, *args, **kwargs) -> Self: # noqa: ANN002 """ Create a new MockEntity. """ return self.__class__(*args, **kwargs) def __iter__(self: Self) -> Iterator[Self]: """ Create an iterable from our known instances. """ return iter(self.__class__.instances) def select(self: Self, selector: Callable[[Self], bool]) -> DBResult: """ Apply a selector to our known instances. """ return DBResult(instance for instance in self.__class__.instances if selector(instance)) def get(self: Self, selector: Callable[[Self], bool] | None = None, **kwargs) -> Self | None: """ Get the instance belonging to the selector. """ for instance in self.instances: if selector is not None: if selector(instance): return instance elif all(getattr(instance, key) == kwargs[key] for key in kwargs): return instance return None def get_for_update(self: Self, **kwargs) -> Self | None: """ Get the instane belonging to the given keyword arguments. """ for instance in self.__class__.instances: if all(getattr(instance, key, None) == kwargs[key] for key in kwargs): return instance return None def set(self: Self, **kwargs) -> None: """ Set the given attributes to the given values. """ for key, value in kwargs.items(): setattr(self, key, value) def delete(self: Self) -> None: """ Delete this instance. """ self.__class__.instances.remove(self) class MockTrackerState(MockEntity): """ A mocked TrackerState Pony ORM database object. """ instances = [] def __init__(self, url: str = "", last_check: int = 0, alive: bool = True, torrents: Set | None = None, failures: int = 0) -> None: """ Create a new MockTrackerState and add it to our known instances. """ self.__class__.instances.append(self) self.rowid = 0 self.url = url self.last_check = last_check self.alive = alive self.torrents = torrents or set() self.failures = failures class MockTorrentState(MockEntity): """ A mocked TorrentState Pony ORM database object. """ instances = [] def __init__(self, infohash: bytes = b"", seeders: int = 0, leechers: int = 0, last_check: int = 0, # noqa: PLR0913 self_checked: bool = False, has_data: bool = True, metadata: Set | None = None, trackers: Set | None = None) -> None: """ Create a new MockTrackerState and add it to our known instances. """ self.__class__.instances.append(self) self.rowid = 0 self.infohash = infohash self.seeders = seeders self.leechers = leechers self.last_check = last_check self.self_checked = self_checked self.has_data = has_data self.metadata = metadata or set() self.trackers = trackers or set() @classmethod def from_health(cls: type[Self], health: HealthInfo) -> Self: """ Create from health info. """ return cls(infohash=health.infohash, seeders=health.seeders, leechers=health.leechers, last_check=health.last_check, self_checked=health.self_checked) def to_health(self) -> HealthInfo: """ Cast to health info. """ return HealthInfo(self.infohash, self.seeders, self.leechers, self.last_check, self.self_checked)
4,655
Python
.py
126
28.238095
120
0.585556
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,922
test_dataclasses.py
Tribler_tribler/src/tribler/test_unit/core/torrent_checker/test_dataclasses.py
import sys from ipv8.test.base import TestBase from tribler.core.torrent_checker.dataclasses import HealthInfo class TestHealthInfo(TestBase): """ Tests for the HealthInfo class. """ def test_is_valid_no_seeders(self) -> None: """ Test if health info with negative seeders is invalid. """ health = HealthInfo(b"\x00" * 20, -1, 200) self.assertFalse(health.is_valid()) def test_is_valid_no_leechers(self) -> None: """ Test if health info with negative leechers is invalid. """ health = HealthInfo(b"\x00" * 20, 200, -1) self.assertFalse(health.is_valid()) def test_is_valid_old(self) -> None: """ Test if old info is invalid. """ health = HealthInfo(b"\x00" * 20, 200, 200, last_check=sys.maxsize) self.assertFalse(health.is_valid()) def test_is_valid_healthy(self) -> None: """ Test if health info with recent healthy seeders and leechers is valid. """ health = HealthInfo(b"\x00" * 20, 200, 200) self.assertTrue(health.is_valid()) def test_old_old(self) -> None: """ Test if health info that is old is flagged as old. """ health = HealthInfo(b"\x00" * 20, 200, 200, last_check=0) self.assertTrue(health.old()) def test_old_new(self) -> None: """ Test if health info that is new is not flagged as old. """ health = HealthInfo(b"\x00" * 20, 200, 200) self.assertFalse(health.old()) def test_older_than(self) -> None: """ Test if old health info is older than new health info. """ health = HealthInfo(b"\x00" * 20, 200, 200, last_check=0) self.assertTrue(health.older_than(HealthInfo(b"\x00" * 20, 200, 200))) def test_much_older_than(self) -> None: """ Test if very old health info is older than new health info. """ health = HealthInfo(b"\x00" * 20, 200, 200, last_check=0) self.assertTrue(health.much_older_than(HealthInfo(b"\x00" * 20, 200, 200))) def test_should_replace_unrelated(self) -> None: """ Test if trying to replace unrelated health info raises an error. """ health = HealthInfo(b"\x00" * 20, 200, 200) with self.assertRaises(ValueError): health.should_replace(HealthInfo(b"\x01" * 20, 200, 200)) def test_should_replace_invalid(self) -> None: """ Test if invalid health info should not replace anything. """ health = HealthInfo(b"\x00" * 20, -1, 200) self.assertFalse(health.should_replace(HealthInfo(b"\x00" * 20, 200, 200))) def test_should_replace_own_self_checked(self) -> None: """ Test if self-checked health info should replace equal self-checked health info. """ health = HealthInfo(b"\x00" * 20, 200, 200, self_checked=True) self.assertFalse(health.should_replace(HealthInfo(b"\x00" * 20, 200, 200, self_checked=True))) def test_should_replace_not_self_checked_old(self) -> None: """ Test if self-checked health info should replace old non-self-checked health info. """ health = HealthInfo(b"\x00" * 20, 200, 200, self_checked=True) self.assertTrue(health.should_replace(HealthInfo(b"\x00" * 20, 200, 200, last_check=0))) def test_should_replace_not_self_checked_lower(self) -> None: """ Test if self-checked health info should replace non-self-checked health info with less seeders/leechers. """ health = HealthInfo(b"\x00" * 20, 200, 200, self_checked=True) self.assertTrue(health.should_replace(HealthInfo(b"\x00" * 20, 100, 200))) def test_should_replace_old(self) -> None: """ Test if newer health info should replace older health info. """ health = HealthInfo(b"\x00" * 20, 200, 200) self.assertTrue(health.should_replace(HealthInfo(b"\x00" * 20, 200, 200, last_check=0))) def test_should_replace_self_checked(self) -> None: """ Test if health info should replace self-checked health info. """ health = HealthInfo(b"\x00" * 20, 200, 200) self.assertFalse(health.should_replace(HealthInfo(b"\x00" * 20, 200, 200, self_checked=True))) def test_should_replace_equivalent_lower(self) -> None: """ Test if health info should replace other health info with less seeders/leechers. """ health = HealthInfo(b"\x00" * 20, 200, 200) self.assertTrue(health.should_replace(HealthInfo(b"\x00" * 20, 100, 200)))
4,690
Python
.py
104
36.75
112
0.618242
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,923
test_torrentchecker_session.py
Tribler_tribler/src/tribler/test_unit/core/torrent_checker/test_torrentchecker_session.py
import struct from asyncio import CancelledError, Future, ensure_future, sleep from unittest.mock import Mock, patch from aiohttp.web_exceptions import HTTPBadRequest from ipv8.test.base import TestBase from ipv8.util import succeed from libtorrent import bencode from tribler.core.torrent_checker.dataclasses import HealthInfo from tribler.core.torrent_checker.torrentchecker_session import ( FakeBep33DHTSession, FakeDHTSession, HttpTrackerSession, UdpSocketManager, UdpTrackerSession, ) class MockUdpSocketManager: """ A mocked UDP socket manager. """ transport = 1 def __init__(self) -> None: """ Create a new MockUdpSocketManager. """ self.response = None self.tracker_sessions = {} def send_request(self, data: bytes, tracker_session: UdpTrackerSession) -> Future: """ Fake sending a request and return the registered response. """ return succeed(self.response) class TestTrackerSession(TestBase): """ Tests for the TrackerSession classes. """ def setUp(self) -> None: """ Create a fake udp socket manager. """ self.fake_udp_socket_manager = MockUdpSocketManager() self.fake_dht_session = FakeDHTSession(Mock(), 10) self.session = None async def tearDown(self) -> None: """ Clean the registered session and helpers. """ if self.session is not None: await self.session.cleanup() await self.fake_dht_session.shutdown_task_manager() await super().tearDown() async def test_httpsession_scrape_no_body(self) -> None: """ Test if processing a scrape response of None leads to a ValueError. """ self.session = HttpTrackerSession("localhost", ("localhost", 8475), "/announce", 5, None) with self.assertRaises(ValueError): self.session.process_scrape_response(None) self.assertTrue(self.session.is_failed) async def test_httpsession_bdecode_fails(self) -> None: """ Test if processing a scrape response of an empty (bencoded) dictionary leads to a ValueError. """ self.session = HttpTrackerSession("localhost", ("localhost", 8475), "/announce", 5, None) with self.assertRaises(ValueError): self.session.process_scrape_response(bencode({})) self.assertTrue(self.session.is_failed) async def test_httpsession_code_not_200(self) -> None: """ Test if getting a HTTP code 400 leads to a ValueError when connecting. """ self.session = HttpTrackerSession("localhost", ("localhost", 8475), "/announce", 5, None) def fake_request(_: str) -> None: raise HTTPBadRequest with self.assertRaises(ValueError), patch.object(self.session.session, "get", fake_request): await self.session.connect_to_tracker() self.assertTrue(self.session.is_failed) async def test_httpsession_failure_reason_in_dict(self) -> None: """ Test if processing a scrape response of a failed (bencoded) dictionary leads to a ValueError. """ self.session = HttpTrackerSession("localhost", ("localhost", 8475), "/announce", 5, None) with self.assertRaises(ValueError): self. session.process_scrape_response(bencode({'failure reason': 'test'})) self.assertTrue(self.session.is_failed) async def test_httpsession_unicode_err(self) -> None: """ Test if connecting to a tracker with a unicode error in the url raises a UnicodeEncodeError. """ self.session = HttpTrackerSession("retracker.local", ("retracker.local", 80), "/announce?comment=%26%23%3B%28%2C%29%5B%5D%E3%5B%D4%E8%EB%FC%EC%EE%E2", 5, None) with self.assertRaises(UnicodeEncodeError): await self.session.connect_to_tracker() self.assertFalse(self.session.is_failed) async def test_pop_finished_transaction(self) -> None: """ Test if receiving a datagram for an already finished tracker session does not result in InvalidStateError. """ transaction_id = 123 mgr = UdpSocketManager() mgr.connection_made(Mock()) task = ensure_future(mgr.send_request(Mock(), Mock(proxy=None, transaction_id=transaction_id))) await sleep(0) self.assertNotEqual({}, mgr.tracker_sessions) data = struct.pack("!iiq", 124, transaction_id, 126) mgr.datagram_received(data, None) self.assertEqual({}, mgr.tracker_sessions) await task self.assertTrue(task.done()) async def test_proxy_transport(self) -> None: """ Test if the UdpSocketManager uses a proxy if specified. """ mgr = UdpSocketManager() mgr.connection_made(Mock()) mgr.proxy_transports['proxy_url'] = Mock() _ = ensure_future(mgr.send_request(b'', Mock(proxy='proxy_url', transaction_id=123))) await sleep(0) mgr.proxy_transports['proxy_url'].sendto.assert_called_once() mgr.transport.assert_not_called() mgr.tracker_sessions[123].cancel() _ = ensure_future(mgr.send_request(b'', Mock(proxy=None, transaction_id=123))) await sleep(0) mgr.proxy_transports['proxy_url'].sendto.assert_called_once() mgr.transport.sendto.assert_called_once() mgr.tracker_sessions[123].cancel() del _ async def test_httpsession_cancel_operation(self) -> None: """ Test if a canceled task is propagated through the HTTP session. """ self.session = HttpTrackerSession("127.0.0.1", ("localhost", 8475), "/announce", 5, None) task = ensure_future(self.session.connect_to_tracker()) with self.assertRaises(CancelledError): task.cancel() await task async def test_udpsession_cancel_operation(self) -> None: """ Test if a canceled task is propagated through the UDP session. """ self.session = UdpTrackerSession("127.0.0.1", ("localhost", 8475), "/announce", 0, None, self.fake_udp_socket_manager) task = ensure_future(self.session.connect_to_tracker()) with self.assertRaises(CancelledError): task.cancel() await task async def test_udpsession_handle_response_wrong_len(self) -> None: """ Test if, after receiving a correct packet, a session should still be in a failed state. """ self.session = UdpTrackerSession("localhost", ("localhost", 4782), "/announce", 0, None, self.fake_udp_socket_manager) self.fake_udp_socket_manager.response = b"too short" with self.assertRaises(ValueError): await self.session.connect() self.session.action, self.session.transaction_id = 123, 124 self.fake_udp_socket_manager.response = struct.pack("!iiq", 123, 124, 126) await self.session.connect() self.assertTrue(self.session.is_failed) async def test_udpsession_no_port(self) -> None: """ Test if a UDP session without a port raises a ValueError when connecting. """ self.session = UdpTrackerSession("localhost", ("localhost", 4782), "/announce", 0, None, self.fake_udp_socket_manager) self.fake_udp_socket_manager.transport = None with self.assertRaises(ValueError): await self.session.connect() self.assertTrue(self.session.is_failed) async def test_udpsession_handle_connection_wrong_action_transaction(self) -> None: """ Test if a wrong action response leads to a ValueError when scraping. """ self.session = UdpTrackerSession("localhost", ("localhost", 4782), "/announce", 0, None, self.fake_udp_socket_manager) self.fake_udp_socket_manager.response = struct.pack("!qq4s", 123, 123, b"test") with self.assertRaises(ValueError): await self.session.scrape() self.assertTrue(self.session.is_failed) async def test_udpsession_handle_packet(self) -> None: """ Test if a normal UDP packet leads to a non-failed UDP session. """ self.session = UdpTrackerSession("localhost", ("localhost", 4782), "/announce", 0, None, self.fake_udp_socket_manager) self.session.action, self.session.transaction_id = 123, 124 self.fake_udp_socket_manager.response = struct.pack("!iiq", 123, 124, 126) await self.session.connect() self.assertFalse(self.session.is_failed) async def test_udpsession_handle_wrong_action_transaction(self) -> None: """ Test if a wrong action leads to ValueError when connecting. """ self.session = UdpTrackerSession("localhost", ("localhost", 4782), "/announce", 0, None, self.fake_udp_socket_manager) self.fake_udp_socket_manager.response = struct.pack("!qq4s", 123, 123, b"test") with self.assertRaises(ValueError): await self.session.connect() self.assertTrue(self.session.is_failed) async def test_udpsession_mismatch(self) -> None: """ Test if a UDP session mismatch leads to a ValueError when scraping. """ self.session = UdpTrackerSession("localhost", ("localhost", 4782), "/announce", 0, None, self.fake_udp_socket_manager) self.session.action, self.session.transaction_id, self.session.infohash_list = 123, 124, [b'\x00' * 20] self.fake_udp_socket_manager.response = struct.pack("!ii", 123, 124) with self.assertRaises(ValueError): await self.session.scrape() self.assertTrue(self.session.is_failed) async def test_udpsession_response_too_short(self) -> None: """ Test if truncated response leads to a ValueError when scraping. """ self.session = UdpTrackerSession("localhost", ("localhost", 4782), "/announce", 0, None, self.fake_udp_socket_manager) self.fake_udp_socket_manager.response = struct.pack("!i", 123) with self.assertRaises(ValueError): await self.session.scrape() self.assertTrue(self.session.is_failed) async def test_udpsession_response_wrong_transaction_id(self) -> None: """ Test if a response with a wrong transaction id leads to a ValueError when scraping. """ self.session = UdpTrackerSession("localhost", ("localhost", 4782), "/announce", 0, None, self.fake_udp_socket_manager) self.fake_udp_socket_manager.response = struct.pack("!ii", 0, 1337) with self.assertRaises(ValueError): await self.session.scrape() self.assertTrue(self.session.is_failed) async def test_udpsession_response_list_len_mismatch(self) -> None: """ Test if a response with a wrong size leads to a ValueError when scraping. """ self.session = UdpTrackerSession("localhost", ("localhost", 4782), "/announce", 0, None, self.fake_udp_socket_manager) self.session.action, self.session.transaction_id, self.session.infohash_list = 123, 123, [b"test", b"test2"] self.fake_udp_socket_manager.response = struct.pack("!iiiii", 123, 123, 0, 1, 2) with self.assertRaises(ValueError): await self.session.scrape() self.assertTrue(self.session.is_failed) async def test_udpsession_correct_handle(self) -> None: """ Test if correct connect and scrape responses do not lead to a failed session. """ self.session = UdpTrackerSession("localhost", ("localhost", 4782), "/announce", 5, None, self.fake_udp_socket_manager) self.session.ip_address = "127.0.0.1" self.session.infohash_list.append(b'test') self.fake_udp_socket_manager.response = struct.pack("!iiq", 0, self.session.transaction_id, 2) await self.session.connect() self.fake_udp_socket_manager.response = struct.pack("!iiiii", 2, self.session.transaction_id, 0, 1, 2) await self.session.scrape() self.assertFalse(self.session.is_failed) async def test_big_correct_run(self) -> None: """ Test if correct connect and scrape responses with infohashes do not lead to a failed session. """ self.session = UdpTrackerSession("localhost", ("192.168.1.1", 1234), "/announce", 0, None, self.fake_udp_socket_manager) self.fake_udp_socket_manager.response = struct.pack("!iiq", self.session.action, self.session.transaction_id, 126) await self.session.connect() self.session.infohash_list = [b"test"] self.fake_udp_socket_manager.response = struct.pack("!iiiii", self.session.action, self.session.transaction_id, 0, 1, 2) await self.session.scrape() self.assertTrue(self.session.is_finished) async def test_http_unprocessed_infohashes(self) -> None: """ Test if a HTTP session that receives infohashes leads to a finished scrape. """ self.session = HttpTrackerSession("localhost", ("localhost", 8475), "/announce", 5, None) self.session.infohash_list.append(b"test") response = bencode({"files": {b"a" * 20: {"complete": 10, "incomplete": 10}}}) self.session.process_scrape_response(response) self.assertTrue(self.session.is_finished) async def test_failed_unicode(self) -> None: """ Test if response with non-unicode symbols leads to a ValueError when processing a scrape. """ self.session = HttpTrackerSession("localhost", ("localhost", 8475), "/announce", 5, None) with self.assertRaises(ValueError): self.session.process_scrape_response(bencode({'failure reason': '\xe9'})) def test_failed_unicode_udp(self) -> None: """ Test if failing a session with non-unicode symbols leads to a ValueError. """ self.session = UdpTrackerSession("localhost", ("localhost", 8475), "/announce", 0, None, self.fake_udp_socket_manager) with self.assertRaises(ValueError): self.session.failed('\xd0') async def test_connect_to_tracker(self) -> None: """ Test if metainfo can be looked up for a DHT session. """ metainfo = {b'seeders': 42, b'leechers': 42} self.fake_dht_session.download_manager.get_metainfo = Mock(return_value=succeed(metainfo)) self.fake_dht_session.add_infohash(b'a' * 20) response = await self.fake_dht_session.connect_to_tracker() self.assertEqual("DHT", response.url) self.assertEqual(1, len(response.torrent_health_list)) self.assertEqual(42, response.torrent_health_list[0].leechers) self.assertEqual(42, response.torrent_health_list[0].seeders) async def test_connect_to_tracker_fail(self) -> None: """ Test if the DHT session raises a TimeoutError when the metainfo lookup fails. """ self.fake_dht_session.download_manager.get_metainfo = Mock(side_effect=TimeoutError) self.fake_dht_session.add_infohash(b'a' * 20) with self.assertRaises(TimeoutError): await self.fake_dht_session.connect_to_tracker() async def test_connect_to_tracker_bep33(self) -> None: """ Test the metainfo lookup of the BEP33 DHT session. """ infohash_health = HealthInfo(b"a" * 20, seeders=1, leechers=2) mock_dlmgr = Mock(dht_health_manager=Mock(get_health=Mock(return_value=succeed([infohash_health])))) self.session = FakeBep33DHTSession(mock_dlmgr, 10) self.session.add_infohash(b"a" * 20) response = await self.session.connect_to_tracker() self.assertEqual("DHT", response.url) self.assertEqual(1, len(response.torrent_health_list)) self.assertEqual(2, response.torrent_health_list[0].leechers) self.assertEqual(1, response.torrent_health_list[0].seeders)
16,494
Python
.py
320
41.478125
126
0.639731
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,924
test_client.py
Tribler_tribler/src/tribler/test_unit/core/socks5/test_client.py
from __future__ import annotations from asyncio import sleep from unittest.mock import Mock, call from ipv8.test.base import TestBase from tribler.core.socks5.client import Socks5Client, Socks5ClientUDPConnection, Socks5Error class MockSocks5Client(Socks5Client): """ A Mocked Socks5Client that does not pull any event loop constructs. The following three methods are tested in test_server. """ async def _login(self) -> None: """ Fake a login. """ await sleep(0) self.transport = Mock() async def _associate_udp(self, local_addr: tuple[str, int] | None = None) -> None: """ Fake a UDP association. """ await sleep(0) self.connection = Socks5ClientUDPConnection(self.callback) async def _connect_tcp(self, target_addr: tuple[str, int]) -> None: """ Fake a TCP connection. """ await sleep(0) self.connected_to = target_addr class TestSocks5Client(TestBase): """ Tests for the Socks5Client class. """ def test_data_received_connected(self) -> None: """ Test if data is fed to the registered callback when a connection is open. """ callback = Mock() client = MockSocks5Client(None, callback) client.connected_to = ("localhost", 80) client.data_received(b"test") self.assertEqual(call(b"test", ("localhost", 80)), callback.call_args) async def test_data_received_queue_unconnected(self) -> None: """ Test if data is put in a single-item queue when no connection is open. """ callback = Mock() client = MockSocks5Client(None, callback) client.data_received(b"test") self.assertEqual(None, callback.call_args) self.assertTrue(client.queue.full()) self.assertEqual(b"test", await client.queue.get()) def test_tcp_connection_lost(self) -> None: """ Test if the tranport is set to None when a TCP connection is lost. """ client = MockSocks5Client(None, None) client.connected_to = ("localhost", 80) client.connection_lost(None) self.assertIsNone(client.transport) self.assertFalse(client.connected) def test_udp_connection_lost(self) -> None: """ Test if the tranport is set to None when a UDP connection is lost. """ client = MockSocks5Client(None, None) client.connection = Socks5ClientUDPConnection(client.callback) client.connection_lost(None) self.assertIsNone(client.transport) self.assertFalse(client.associated) async def test_associate_udp(self) -> None: """ Test if a client can associate through UDP. """ client = MockSocks5Client(None, None) await client.associate_udp() self.assertTrue(client.associated) async def test_disallow_associate_udp_on_tcp(self) -> None: """ Test if a client cannot associate through UDP using a pre-established TCP connection. """ client = MockSocks5Client(None, None) await client.connect_tcp(("localhost", 80)) with self.assertRaises(Socks5Error): await client.associate_udp() self.assertFalse(client.associated) async def test_connect_tcp(self) -> None: """ Test if a client can connect through TCP. """ client = MockSocks5Client(None, None) await client.connect_tcp(("localhost", 80)) self.assertTrue(client.connected) async def test_disallow_connect_tcp_on_udp(self) -> None: """ Test if a client cannot connect through TCP using a pre-established UDP association. """ client = MockSocks5Client(None, None) await client.associate_udp() with self.assertRaises(Socks5Error): await client.connect_tcp(("localhost", 80)) self.assertFalse(client.connected) async def test_send_to(self) -> None: """ Test if we can send over an established UDP connection. """ client = MockSocks5Client(None, None) await client.associate_udp() client.connection.transport = Mock() client.sendto(b"test", ("localhost", 80)) self.assertEqual(call(b"\x00\x00\x00\x03\tlocalhost\x00Ptest", None), client.connection.transport.sendto.call_args) async def test_send_to_no_connection(self) -> None: """ Test if we cannot send without an established UDP connection. """ client = MockSocks5Client(None, None) with self.assertRaises(Socks5Error): client.sendto(b"test", ("localhost", 80)) async def test_write(self) -> None: """ Test if we can write over an established TCP connection. """ client = MockSocks5Client(None, None) await client.connect_tcp(("localhost", 80)) client.write(b"test") self.assertEqual(call(b"test"), client.transport.write.call_args) async def test_write_no_connection(self) -> None: """ Test if we cannot write without an established TCP connection. """ client = MockSocks5Client(None, None) with self.assertRaises(Socks5Error): client.write(b"test")
5,379
Python
.py
133
31.93985
93
0.638916
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,925
test_aiohttp_connector.py
Tribler_tribler/src/tribler/test_unit/core/socks5/test_aiohttp_connector.py
from __future__ import annotations import asyncio import socket import sys from unittest.mock import Mock, patch from aiohttp import ClientRequest, ClientTimeout from ipv8.test.base import TestBase from yarl import URL from tribler.core.socks5.client import Socks5Client class MockSocks5Client(Socks5Client): """ Mocks a Socks5Client's TCP connection call. """ instance = None async def connect_tcp(self, target_addr: tuple[str, int]) -> None: """ Fake a TCP connection establishment. """ MockSocks5Client.instance = self await asyncio.sleep(0) # Give the parent some time to process the timeout check self.connected_to = target_addr self.transport = Mock() class TestSocks5Connector(TestBase): """ Tests for the Socks5Connector class. """ async def setUp(self) -> None: """ Create a patched connector. """ super().setUp() self.connection = None # Make sure are imports are clean (note: these may have been imported in other TestCases). for module_name in list(sys.modules.keys()): if module_name.startswith("tribler.core.socks5.") and module_name != "tribler.core.socks5.client": del sys.modules[module_name] # Patch the module. with patch(target="tribler.core.socks5.client.Socks5Client", new=MockSocks5Client) as self.client: from tribler.core.socks5.aiohttp_connector import Socks5Connector self.connector = Socks5Connector(None) async def tearDown(self) -> None: """ Close any connection and the connector. """ if self.connection is not None: self.connection.close() await self.connector.close() await super().tearDown() async def test_connect_without_timeout(self) -> None: """ Test if connect can be called without a timeout. """ self.connection = await self.connector.connect(ClientRequest("GET", URL("http://localhost/")), [], ClientTimeout()) self.assertEqual(("localhost", 80), self.client.instance.connected_to) async def test_connect_with_timeout(self) -> None: """ Test if connect can be called with a timeout. """ self.connection = await self.connector.connect(ClientRequest("GET", URL("http://localhost/")), [], ClientTimeout(sock_connect=1.0)) self.assertEqual(("localhost", 80), self.client.instance.connected_to) async def test_connect_pass_timeout(self) -> None: """ Test if connect correctly times out. """ with self.assertRaises(asyncio.exceptions.TimeoutError): self.connection = await self.connector.connect(ClientRequest("GET", URL("http://localhost/")), [], ClientTimeout(sock_connect=0.0)) async def test_resolver(self) -> None: """ Test if the resolver creates a correctly resolved dict. """ from tribler.core.socks5.aiohttp_connector import FakeResolver resolver = FakeResolver() resolved, = await resolver.resolve("testhostname", 8, socket.AF_INET6) self.assertEqual("testhostname", resolved["hostname"]) self.assertEqual("testhostname", resolved["host"]) self.assertEqual(8, resolved["port"]) self.assertEqual(socket.AF_INET6, resolved["family"]) self.assertEqual(0, resolved["proto"]) self.assertEqual(0, resolved["flags"])
3,660
Python
.py
82
35.060976
110
0.633146
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,926
test_server.py
Tribler_tribler/src/tribler/test_unit/core/socks5/test_server.py
from __future__ import annotations import socket from asyncio import Transport, get_event_loop, sleep from asyncio.base_events import Server from typing import Callable from unittest.mock import Mock, patch from ipv8.test.base import TestBase from tribler.core.socks5.client import Socks5Client, Socks5Error from tribler.core.socks5.conversion import UdpPacket, socks5_serializer from tribler.core.socks5.server import Socks5Server class MockTransport(Transport): """ Mocked Transport that offloads to a callback. """ def __init__(self, callback: Callable[[bytes], None]) -> None: """ Create a new MockTransport. """ super().__init__() self.callback = callback def write(self, data: bytes) -> None: """ Pretend to write, actually just call the callback. """ self.callback(data) def sendto(self, data: bytes, _: tuple) -> None: """ Pretend to write, actually just call the callback. """ self.callback(data) def close(self) -> None: """ Pretend to close. """ def get_extra_info(self, key: str) -> dict: """ Fake extra info. """ if key == "socket": return Mock(getsockname=Mock(return_value=('0.0.0.0', 0))) return ('0.0.0.0', 0) class MockServer(Server): """ A Server with a mocked close method. """ def close(self) -> None: """ Pretend to close. """ self._sockets = [] super().close() class TestSocks5Server(TestBase): """ Tests for the Socks5Server class. """ def setUp(self) -> None: """ Create a new server. """ self.socks5_server = Socks5Server(1, output_stream=Mock()) self.client_connection = None self.client_transport = None self.mock_server = None async def tearDown(self) -> None: """ Stop the server. """ await self.socks5_server.stop() await super().tearDown() def deliver_to_client(self, data: bytes) -> None: """ Fake a Server sending to a client. """ self.client_connection.queue.put_nowait(data) async def create_server(self, protocol_factory, host=None, port=None, *, # noqa: ANN001, PLR0913 family=socket.AF_UNSPEC, flags=socket.AI_PASSIVE, sock=None, backlog=100, # noqa: ANN001 ssl=None, reuse_address=None, reuse_port=None, ssl_handshake_timeout=None, # noqa: ANN001 start_serving=True) -> Server: # noqa: ANN001 """ Mock the event loop's create_server call. """ sockets = [Mock(getsockname=Mock(return_value=('0.0.0.0', 0)))] self.mock_server = MockServer(get_event_loop(), sockets, protocol_factory, ssl, backlog, ssl_handshake_timeout) connection = protocol_factory() connection.transport = MockTransport(self.deliver_to_client) return self.mock_server async def create_connection(self, protocol_factory, host=None, port=None, *, ssl=None, # noqa: ANN001, PLR0913 family=0, proto=0, flags=0, sock=None, local_addr=None, # noqa: ANN001 server_hostname=None, ssl_handshake_timeout=None, # noqa: ANN001 happy_eyeballs_delay=None, interleave=None) -> tuple[Transport, None]: # noqa: ANN001 """ Mock the event loop's create_connection call. """ self.client_connection = protocol_factory() self.client_transport = MockTransport(self.socks5_server.sessions[0].data_received) return self.client_transport, None async def create_datagram_endpoint(self, protocol_factory, local_addr=None, # noqa: ANN001, PLR0913 remote_addr=None, *, family=0, proto=0, flags=0, # noqa: ANN001 reuse_address=None, reuse_port=None, allow_broadcast=None, # noqa: ANN001 sock=None) -> tuple[Transport, None]: # noqa: ANN001 """ Mock the event loop's create_datagram_endpoint call. """ protocol_factory().transport = self.client_transport return self.client_transport, None async def create_server_and_client(self) -> Socks5Client: """ Create a server and an associated client. """ with patch.object(get_event_loop(), "create_server", self.create_server): await self.socks5_server.start() with patch.object(get_event_loop(), "create_connection", self.create_connection),\ patch.object(get_event_loop(), "create_datagram_endpoint", self.create_datagram_endpoint): return Socks5Client(('127.0.0.1', self.socks5_server.port), Mock()) async def test_socks5_udp_associate(self) -> None: """ Test if sending a UDP associate request to the server succeeds. """ client = await self.create_server_and_client() with patch.object(get_event_loop(), "create_connection", self.create_connection): await client.associate_udp() self.assertTrue(client.associated) async def test_socks5_sendto_fail(self) -> None: """ Test if sending a UDP packet without a successful association fails. """ client = await self.create_server_and_client() with patch.object(get_event_loop(), "create_connection", self.create_connection), \ self.assertRaises(Socks5Error): client.sendto(b'\x00', ('127.0.0.1', 123)) async def test_socks5_sendto_success(self) -> None: """ Test if sending/receiving a UDP packet works correctly. """ client = await self.create_server_and_client() with patch.object(get_event_loop(), "create_connection", self.create_connection): await client.associate_udp() packet = socks5_serializer.pack_serializable(UdpPacket(0, 0, ('127.0.0.1', 123), b'\x00')) self.client_connection.connection.datagram_received(packet, None) await sleep(0) client.callback.assert_called_once_with(b'\x00', ('127.0.0.1', 123)) async def test_socks5_tcp_connect(self) -> None: """ Test if sending a TCP connect request to the server succeeds. """ client = await self.create_server_and_client() with patch.object(get_event_loop(), "create_connection", self.create_connection): await client.connect_tcp(('127.0.0.1', 123)) assert client.transport is not None assert client.connection is None async def test_socks5_write(self) -> None: """ Test if sending a TCP data to the server succeeds. """ client = await self.create_server_and_client() with patch.object(get_event_loop(), "create_connection", self.create_connection): await client.connect_tcp(('127.0.0.1', 123)) client.write(b' ') await sleep(.1) self.socks5_server.output_stream.on_socks5_tcp_data.assert_called_once_with(self.socks5_server.sessions[0], ('127.0.0.1', 123), b' ')
7,399
Python
.py
161
35.677019
119
0.604383
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,927
test_connection.py
Tribler_tribler/src/tribler/test_unit/core/socks5/test_connection.py
from asyncio import sleep from unittest.mock import Mock, call from ipv8.test.base import TestBase from tribler.core.socks5.connection import ConnectionState, Socks5Connection class TestSocks5Connection(TestBase): """ Tests for the Socks5Connection class. """ def test_connection_made(self) -> None: """ Test if the transport is updated when a connection is made. """ connection = Socks5Connection(None) transport = Mock() connection.connection_made(transport) self.assertEqual(transport, connection.transport) def test_data_received_incomplete_handshake(self) -> None: """ Test if the buffer is not consumed when an incomplete handshake is received. """ connection = Socks5Connection(None) connection.data_received(b"\x05") # Just the version self.assertEqual(b"\x05", connection.buffer) self.assertEqual(ConnectionState.BEFORE_METHOD_REQUEST, connection.state) def test_data_received_complete_handshake(self) -> None: """ Test if the buffer is consumed and a no-authentication method is chosen when a handshake is received. """ connection = Socks5Connection(None) connection.connection_made(Mock()) connection.data_received(b"\x05\x01\x00") # Version 5, 1 method(s): [0] self.assertEqual(b"", connection.buffer) self.assertEqual(ConnectionState.CONNECTED, connection.state) self.assertEqual(call(b"\x05\x00"), connection.transport.write.call_args) # Version 5, no authentication def test_data_received_incomplete_request(self) -> None: """ Test if the buffer is not consumed when an incomplete request is received. """ connection = Socks5Connection(None) connection.connection_made(Mock()) connection.state = ConnectionState.CONNECTED connection.data_received(b"\x05") # Just the version self.assertEqual(b"\x05", connection.buffer) async def test_data_received_complete_associate_request(self) -> None: """ Test if associate requests are properly responded to. """ connection = Socks5Connection(Mock()) connection.socksserver.rust_endpoint.create_udp_associate = Mock(return_value=1337) connection.connection_made(Mock(get_extra_info=Mock(return_value=("127.0.0.1", 1337)))) connection.state = ConnectionState.CONNECTED connection.data_received(b"\x05\x03\x00\x01\x7f\x00\x00\x01\x059") # Version 5, associate, rsv 0 await sleep(0) self.assertEqual(b"", connection.buffer) self.assertEqual(call(b"\x05\x00\x00\x01\x7f\x00\x00\x01\x059"), # Version 5, succeeded, rsv 0, localhost:1337 connection.transport.write.call_args) def test_data_received_complete_bind_request(self) -> None: """ Test if bind requests are properly responded to. """ connection = Socks5Connection(Mock()) connection.connection_made(Mock(get_extra_info=Mock(return_value=("127.0.0.1", 1337)))) connection.state = ConnectionState.CONNECTED connection.data_received(b"\x05\x02\x00\x01\x7f\x00\x00\x01\x059") # Version 5, bind, rsv 0, localhost:1337 self.assertEqual(b"", connection.buffer) self.assertEqual(ConnectionState.PROXY_REQUEST_ACCEPTED, connection.state) self.assertEqual(call(b"\x05\x00\x00\x01\x7f\x00\x00\x01\x049"), # Version 5, succeeded, rsv 0, localhost:1081 connection.transport.write.call_args) def test_data_received_complete_connect_request(self) -> None: """ Test if connect requests are properly responded to. """ connection = Socks5Connection(Mock()) connection.connection_made(Mock(get_extra_info=Mock(return_value=("127.0.0.1", 1337)))) connection.state = ConnectionState.CONNECTED connection.data_received(b"\x05\x01\x00\x01\x7f\x00\x00\x01\x059") # Version 5, connect, rsv 0, localhost:1337 self.assertEqual(b"", connection.buffer) self.assertEqual(("127.0.0.1", 1337), connection.connect_to) self.assertEqual(call(b"\x05\x00\x00\x01\x7f\x00\x00\x01\x049"), # Version 5, succeeded, rsv 0, localhost:1081 connection.transport.write.call_args) def test_data_received_complete_unknown_request(self) -> None: """ Test if connect requests are actively refused if the command is unknown. """ connection = Socks5Connection(Mock()) connection.connection_made(Mock(get_extra_info=Mock(return_value=("127.0.0.1", 1337)))) connection.state = ConnectionState.CONNECTED connection.data_received(b"\x05\xAA\x00\x01\x7f\x00\x00\x01\x059") # Version 5, ???? self.assertEqual(b"", connection.buffer) self.assertEqual(call(b"\x05\x07\x00\x01\x00\x00\x00\x00\x00\x00"), # Version 5, unsupported, rsv 0, 0.0.0.0:0 connection.transport.write.call_args) def test_connection_lost(self) -> None: """ Test if the socks server is informed of connection losses. """ connection = Socks5Connection(Mock()) connection.connection_lost(None) self.assertEqual(call(connection), connection.socksserver.connection_lost.call_args) def test_close(self) -> None: """ Test if the udp connection and transport are closed when the connection is closed. """ connection = Socks5Connection(Mock()) udp_connection = Mock() transport = Mock() connection.udp_connection = udp_connection connection.transport = transport connection.close("test") self.assertIsNone(connection.transport) self.assertIsNone(connection.udp_connection) self.assertEqual(call(), udp_connection.close.call_args) self.assertEqual(call(), transport.close.call_args)
6,010
Python
.py
112
44.571429
119
0.674284
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,928
test_conversion.py
Tribler_tribler/src/tribler/test_unit/core/socks5/test_conversion.py
import struct from ipv8.messaging.interfaces.udp.endpoint import DomainAddress from ipv8.messaging.serialization import PackError from ipv8.test.base import TestBase from tribler.core.socks5.conversion import ( CommandRequest, CommandResponse, UdpPacket, socks5_serializer, ) class TestConversion(TestBase): """ Tests for the conversion logic. """ def test_decode_non_unicode_packet(self) -> None: """ Test decoding a non-unicode udp packet. """ encoded = b"\x00\x00\x00\x03\x19tracker1.good-tracker.com\x1f\x940x000" decoded, _ = socks5_serializer.unpack_serializable(UdpPacket, encoded) self.assertEqual(0, decoded.rsv) self.assertEqual(0, decoded.frag) self.assertEqual(DomainAddress('tracker1.good-tracker.com', 8084), decoded.destination) def test_decode_unicode_packet(self) -> None: """ Test decoding a unicode udp packet. """ encoded = b"\x00\x00\x00\x03 tracker1.unicode-tracker\xc3\x84\xc3\xa95\x11$\x00\x1f\x940x000" decoded, _ = socks5_serializer.unpack_serializable(UdpPacket, encoded) self.assertEqual(0, decoded.rsv) self.assertEqual(0, decoded.frag) self.assertEqual(DomainAddress('tracker1.unicode-tracker\xc4\xe95\x11$\x00', 8084), decoded.destination) def test_decode_udp_packet_fail(self) -> None: """ Test if decoding a badly encoded udp packet raises an exception. """ encoded = b'\x00\x00\x00\x03 tracker1.invalid-tracker\xc4\xe95\x11$\x00\x1f\x940x000' with self.assertRaises(PackError): socks5_serializer.unpack_serializable(UdpPacket, encoded) def test_decode_non_unicode_command_request(self) -> None: """ Test if command requests from non-unicode trackers can be decoded. """ encoded = b"\x05\x00\x00\x03\x19tracker1.good-tracker.com\x1f\x94" decoded, _ = socks5_serializer.unpack_serializable(CommandRequest, encoded) self.assertEqual(5, decoded.version) self.assertEqual(0, decoded.rsv) self.assertEqual(DomainAddress('tracker1.good-tracker.com', 8084), decoded.destination) def test_decode_unicode_command_request(self) -> None: """ Test if command requests from unicode trackers can be decoded. """ encoded = b"\x05\x00\x00\x03 tracker1.unicode-tracker\xc3\x84\xc3\xa95\x11$\x00\x1f\x94" decoded, _ = socks5_serializer.unpack_serializable(CommandResponse, encoded) self.assertEqual(5, decoded.version) self.assertEqual(0, decoded.rsv) self.assertEqual(DomainAddress('tracker1.unicode-tracker\xc4\xe95\x11$\x00', 8084), decoded.bind) def test_decode_command_request_fail_encoding(self) -> None: """ Test if decoding a badly encoded command request raises an exception. """ encoded = b'\x05\x00\x00\x03 tracker1.invalid-tracker\xc4\xe95\x11$\x00\x1f\x94' with self.assertRaises(PackError): socks5_serializer.unpack_serializable(CommandRequest, encoded) def test_decode_command_request_fail_invalid_address(self) -> None: """ Test if decoding a command request with an invalid address type raises an exception. """ with self.assertRaises(PackError): socks5_serializer.unpack_serializable(CommandRequest, struct.pack("!BBBB", 5, 0, 0, 5)) def test_decode_command_request_fail_ipv6(self) -> None: """ Test if decoding a command request with an ipv6 address raises an exception. """ with self.assertRaises(PackError): socks5_serializer.unpack_serializable(CommandRequest, struct.pack("!BBBB", 5, 0, 0, 4))
3,757
Python
.py
76
41.526316
112
0.689138
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,929
test_udp_connection.py
Tribler_tribler/src/tribler/test_unit/core/socks5/test_udp_connection.py
from asyncio import get_event_loop from unittest.mock import AsyncMock, Mock, call, patch from ipv8.messaging.interfaces.udp.endpoint import DomainAddress from ipv8.test.base import TestBase from tribler.core.socks5.udp_connection import RustUDPConnection, SocksUDPConnection class MockSocksUDPConnection(SocksUDPConnection): """ A SocksUDPConnection with a mocked open method. """ async def open(self) -> None: """ Fake opening a transport. """ self.transport = Mock() class TestSocksUDPConnection(TestBase): """ Tests for the SocksUDPConnection class. """ async def test_open(self) -> None: """ Test if opening a connection binds on 0.0.0.0:0. """ callback = AsyncMock(return_value=(Mock(), None)) connection = SocksUDPConnection(None, None) with patch.object(get_event_loop(), "create_datagram_endpoint", callback): await connection.open() self.assertEqual(("127.0.0.1", 0), callback.call_args.kwargs["local_addr"]) async def test_get_listen_port(self) -> None: """ Test if the listen port is determined from the transport address. """ connection = MockSocksUDPConnection(None, None) await connection.open() connection.transport.get_extra_info = Mock(return_value=("0.0.0.0", 42)) self.assertEqual(42, connection.get_listen_port()) async def test_send_without_remote(self) -> None: """ Test if data cannot be sent without a udp address. """ connection = MockSocksUDPConnection(None, None) await connection.open() self.assertFalse(connection.send_datagram(b"test")) async def test_send_with_remote(self) -> None: """ Test if data is sent to the registered udp address. """ connection = MockSocksUDPConnection(None, ("localhost", 1337)) await connection.open() self.assertTrue(connection.send_datagram(b"test")) self.assertEqual(call(b"test", ("localhost", 1337)), connection.transport.sendto.call_args) async def test_datagram_received_first(self) -> None: """ Test if the first received data updates the remote properly. """ socks_connection = Mock() socks_connection.socksserver.output_stream.on_socks5_udp_data = Mock(return_value=True) connection = MockSocksUDPConnection(socks_connection, None) await connection.open() value = connection.cb_datagram_received(b"\x00\x00\x00\x03\tlocalhost\x0590x000", ("localhost", 1337)) udp_payload = socks_connection.socksserver.output_stream.on_socks5_udp_data.call_args.args[1] self.assertTrue(value) self.assertEqual(("localhost", 1337), connection.remote_udp_address) self.assertEqual(0, udp_payload.rsv) self.assertEqual(0, udp_payload.frag) self.assertEqual(DomainAddress(host="localhost", port=1337), udp_payload.destination) self.assertEqual(b"0x000", udp_payload.data) async def test_datagram_received_wrong_source(self) -> None: """ Test if packets are dropped if they don't come from the remote. """ socks_connection = Mock() connection = MockSocksUDPConnection(socks_connection, ("localhost", 1337)) await connection.open() value = connection.cb_datagram_received(b"\x00\x00\x00\x03\tlocalhost\x0590x000", ("notlocalhost", 1337)) self.assertFalse(value) async def test_datagram_received_garbage(self) -> None: """ Test if packets are dropped if they don't contain valid UDP packets. """ socks_connection = Mock() connection = MockSocksUDPConnection(socks_connection, ("localhost", 1337)) await connection.open() value = connection.cb_datagram_received(b"\x00", ("localhost", 1337)) self.assertFalse(value) async def test_datagram_received_fragmented(self) -> None: """ Test if packets are dropped if they are fragmented. """ socks_connection = Mock() connection = MockSocksUDPConnection(socks_connection, ("localhost", 1337)) await connection.open() value = connection.cb_datagram_received(b"\x00\x00\x01\x03\tlocalhost\x0590x000", ("localhost", 1337)) self.assertFalse(value) async def test_datagram_received_no_destination(self) -> None: """ Test if packets are dropped if they don't specify a destination. """ socks_connection = Mock() connection = MockSocksUDPConnection(socks_connection, ("localhost", 1337)) await connection.open() transport = connection.transport connection.close() self.assertEqual(call(), transport.close.call_args) self.assertIsNone(connection.transport) async def test_open_rust_endpoint(self) -> None: """ Test if the Rust endpoint is associated when the connection is opened. """ rust_connection = RustUDPConnection(Mock(create_udp_associate=Mock(return_value=1337)), 1) await rust_connection.open() self.assertEqual(1337, rust_connection.port) self.assertEqual(1337, rust_connection.get_listen_port()) self.assertEqual(call(0, 1), rust_connection.rust_endpoint.create_udp_associate.call_args) async def test_open_rust_endpoint_twice(self) -> None: """ Test if the Rust endpoint is not opened twice. """ rust_connection = RustUDPConnection(Mock(create_udp_associate=Mock(return_value=42)), 1) rust_connection.port = 1337 await rust_connection.open() self.assertEqual(1337, rust_connection.port) self.assertEqual(1337, rust_connection.get_listen_port()) async def test_close_rust_endpoint(self) -> None: """ Test if the Rust endpoint gets closed with the connection. """ rust_connection = RustUDPConnection(Mock(create_udp_associate=Mock(return_value=1337)), 1) await rust_connection.open() rust_connection.close() self.assertEqual(call(1337), rust_connection.rust_endpoint.close_udp_associate.call_args) def test_close_unopened_rust_endpoint(self) -> None: """ Test if the Rust endpoint does not close an unopened connection. """ rust_connection = RustUDPConnection(Mock(), 1) rust_connection.close() self.assertIsNone(rust_connection.rust_endpoint.close_udp_associate.call_args)
6,570
Python
.py
137
39.620438
113
0.672247
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,930
test_community.py
Tribler_tribler/src/tribler/test_unit/core/knowledge/test_community.py
from __future__ import annotations import dataclasses from datetime import datetime, timezone from random import sample from typing import TYPE_CHECKING from unittest.mock import Mock from ipv8.keyvault.crypto import default_eccrypto from ipv8.test.base import TestBase from pony.orm import db_session from tribler.core.database.layers.knowledge import Operation, ResourceType from tribler.core.knowledge.community import ( KnowledgeCommunity, KnowledgeCommunitySettings, is_valid_resource, validate_operation, validate_resource, validate_resource_type, ) from tribler.core.knowledge.payload import StatementOperation, StatementOperationMessage if TYPE_CHECKING: from ipv8.community import CommunitySettings from ipv8.test.mocking.ipv8 import MockIPv8 @dataclasses.dataclass class Peer: """ A mocked Peer class. """ public_key: bytes added_at: datetime = dataclasses.field(default=datetime.now(tz=timezone.utc)) operations: set[StatementOp] = dataclasses.field(default_factory=set) @dataclasses.dataclass class Resource: """ A mocked Resource class. """ name: str type: int subject_statements: set[Statement] = dataclasses.field(default_factory=set) object_statements: set[Statement] = dataclasses.field(default_factory=set) torrent_healths: set = dataclasses.field(default_factory=set) trackers: set = dataclasses.field(default_factory=set) @dataclasses.dataclass class Statement: """ A mocked Statement class. """ subject: Resource object: Resource operations: set[StatementOp] added_count: int = 0 removed_count: int = 0 local_operation: int = 0 @dataclasses.dataclass class StatementOp: """ A mocked StatementOp class. """ statement: Statement peer: Peer operation: int clock: int signature: bytes updated_at: datetime = dataclasses.field(default=datetime.now(tz=timezone.utc)) auto_generated: bool = False class TestKnowledgeCommunity(TestBase[KnowledgeCommunity]): """ Tests for the KnowledgeCommunity. """ def setUp(self) -> None: """ Create two nodes. """ super().setUp() self.initialize(KnowledgeCommunity, 2, KnowledgeCommunitySettings(request_interval=0.1)) self.operations = [] self.signatures = [] self.statement_ops = [] def create_node(self, settings: CommunitySettings | None = None, create_dht: bool = False, enable_statistics: bool = False) -> MockIPv8: """ Create a mocked database and new key for each node. """ settings.db = Mock() settings.key = default_eccrypto.generate_key("curve25519") out = super().create_node(settings, create_dht, enable_statistics) out.overlay.cancel_all_pending_tasks() return out def create_operation(self, subject: str = "1" * 20, obj: str = "", sign_correctly: bool = True) -> StatementOperation: """ Create an operation with the given subject and object. """ operation = StatementOperation(subject_type=ResourceType.TORRENT, subject=subject, predicate=ResourceType.TAG, object=obj, operation=Operation.ADD, clock=len(self.operations) + 1, creator_public_key=self.overlay(0).key.pub().key_to_bin()) self.operations.append((operation, self.overlay(0).sign(operation) if sign_correctly else b'1' * 64)) self.statement_ops.append(StatementOp( statement=Statement( Resource(self.operations[-1][0].subject, self.operations[-1][0].subject_type), Resource(self.operations[-1][0].object, self.operations[-1][0].predicate), set() ), peer=Peer(self.overlay(0).key.pub().key_to_bin()), operation=self.operations[-1][0].operation, clock=self.operations[-1][0].clock, signature=self.operations[-1][1] )) return operation @db_session def fill_db(self) -> None: """ Create 5 correct, of which one has unicode characters, and 5 incorrect operations. """ for i in range(9): self.create_operation(obj=f'{i}' * 3, sign_correctly=i < 4) self.create_operation(subject='Контент', obj='Тэг', sign_correctly=True) self.overlay(0).db.knowledge.get_operations_for_gossip = lambda count: sample(self.statement_ops, count) async def test_gossip(self) -> None: """ Test if the 5 correct messages are gossiped. """ self.fill_db() with self.assertReceivedBy(1, [StatementOperationMessage] * 10) as received: self.overlay(1).request_operations() await self.deliver_messages() received_objects = {message.operation.object for message in received} self.assertEqual(10, len(received_objects)) self.assertEqual(5, len(self.overlay(1).db.knowledge.add_operation.call_args_list)) async def test_on_request_eat_exceptions(self) -> None: """ Test faulty statement ops have their ValueError caught. """ self.fill_db() self.statement_ops[0].statement.subject.name = "" # Fails validate_resource(operation.subject) with self.assertReceivedBy(1, [StatementOperationMessage] * 9) as received: self.overlay(1).request_operations() await self.deliver_messages() received_objects = {message.operation.object for message in received} self.assertEqual(9, len(received_objects)) self.assertEqual(4, len(self.overlay(1).db.knowledge.add_operation.call_args_list)) async def test_no_peers(self) -> None: """ Test if no error occurs in the community, in case there are no peers. """ self.overlay(1).network.remove_peer(self.peer(0)) self.fill_db() with self.assertReceivedBy(0, []), self.assertReceivedBy(1, []): self.overlay(1).request_operations() await self.deliver_messages() def test_valid_tag(self) -> None: """ Test if a normal tag is valid. """ tag = "Tar " validate_resource(tag) # no exception self.assertTrue(is_valid_resource(tag)) def test_invalid_tag_nothing(self) -> None: """ Test if nothing is not a valid tag. """ tag = "" self.assertFalse(is_valid_resource(tag)) with self.assertRaises(ValueError): validate_resource(tag) def test_invalid_tag_short(self) -> None: """ Test if a short tag is not valid. """ tag = "t" self.assertFalse(is_valid_resource(tag)) with self.assertRaises(ValueError): validate_resource(tag) def test_invalid_tag_long(self) -> None: """ Test if a long tag is not valid. """ tag = "t" * 51 self.assertFalse(is_valid_resource(tag)) with self.assertRaises(ValueError): validate_resource(tag) def test_correct_operation(self) -> None: """ Test if a correct operation is valid. """ for operation in Operation: validate_operation(operation) # no exception validate_operation(operation.value) # no exception def test_incorrect_operation(self) -> None: """ Test if an incorrect operation raises a ValueError. """ max_operation = max(Operation) with self.assertRaises(ValueError): validate_operation(max_operation.value + 1) def test_correct_relation(self) -> None: """ Test if a correct relation is valid. """ for relation in ResourceType: validate_resource_type(relation) # no exception validate_resource_type(relation.value) # no exception def test_incorrect_relation(self) -> None: """ Test if an incorrect relation raises a ValueError. """ max_relation = max(ResourceType) with self.assertRaises(ValueError): validate_operation(max_relation.value + 1)
8,315
Python
.py
209
31.507177
118
0.645129
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,931
test_content_bundling.py
Tribler_tribler/src/tribler/test_unit/core/knowledge/test_content_bundling.py
from ipv8.test.base import TestBase from tribler.core.knowledge.content_bundling import _create_name, calculate_diversity, group_content_by_number class TestContentBundling(TestBase): """ Tests for content bundling functionality. """ def test_group_content_by_number_empty_list(self) -> None: """ Test if group_content_by_number returns an empty dict if an empty list passed. """ self.assertEqual({}, group_content_by_number([])) def test_group_content_by_number(self) -> None: """ Test if group_content_by_number group content by a first number. """ content_list = [ {"name": "item 2"}, {"name": "item 1"}, {"name": "item with number1"}, {"name": "item with number 2 and 3"}, {"name": "item without number"}, {"item": "without a name"}, {"item": "without a name but with 1 number"}, ] actual = group_content_by_number(content_list) expected = { "Item 2": [{"name": "item 2"}, {"name": "item with number 2 and 3"}], "Item 1": [{"name": "item 1"}, {"name": "item with number1"}] } self.assertEqual(expected, actual) def test_group_content_by_number_extract_no_spaces(self) -> None: """ Test if group_content_by_number extracts correct group name from text without spaces. """ actual = group_content_by_number([{"name": "text123"}], min_group_size=1) self.assertEqual({"Text 123": [{"name": "text123"}]}, actual) def test_group_content_by_number_extract_period(self) -> None: """ Test if group_content_by_number extracts correct group name from text with a period. """ actual = group_content_by_number([{"name": "text.123"}], min_group_size=1) self.assertEqual({"Text 123": [{"name": "text.123"}]}, actual) def test_group_content_by_number_extract_complex(self) -> None: """ Test if group_content_by_number extracts correct group name from text with many numbers and strings. """ actual = group_content_by_number([{"name": "123any345text678"}], min_group_size=1) self.assertEqual({"Text 123": [{"name": "123any345text678"}]}, actual) def test_group_content_by_number_extract_simplify_number(self) -> None: """ Test if group_content_by_number extracts correct group name from text with a 0-prepended number. """ actual = group_content_by_number([{"name": "012"}], min_group_size=1) self.assertEqual({"12": [{"name": "012"}]}, actual) def test_create_name(self) -> None: """ Test if _create_name creates a group name based on the most common word in the title. """ content_list = [ {"name": "Individuals and interactions over processes and tools"}, {"name": "Working software over comprehensive documentation"}, {"name": "Customer collaboration over contract negotiation"}, {"name": "Responding to change over following a plan"}, ] self.assertEqual("Over 1", _create_name(content_list, "1", min_word_length=4)) def test_create_name_non_latin(self) -> None: """ Test if _create_name creates a group name based on the most common word in the title with non-latin characters. """ content_list = [ {"name": "Может быть величайшим триумфом человеческого гения является то, "}, {"name": "что человек может понять вещи, которые он уже не в силах вообразить"}, ] self.assertEqual("Может 2", _create_name(content_list, "2")) def test_calculate_diversity_match_one(self) -> None: """ Test if calculate_diversity finds one other word and calculates the CTTR. """ content_list = [{"name": "word wor wo w"}] self.assertEqual(10, int(10.0 * calculate_diversity(content_list, 3))) def test_calculate_diversity_match_two(self) -> None: """ Test if calculate_diversity finds two other words and calculates the CTTR. """ content_list = [{"name": "word wor wo w"}] self.assertEqual(12, int(10.0 * calculate_diversity(content_list, 2))) def test_calculate_diversity_match_three(self) -> None: """ Test if calculate_diversity finds three other words and calculates the CTTR. """ content_list = [{"name": "word wor wo w"}] self.assertEqual(14, int(10.0 * calculate_diversity(content_list, 1))) def test_calculate_diversity_match_all(self) -> None: """ Test if calculate_diversity finds all (three) other words and calculates the CTTR. """ content_list = [{"name": "word wor wo w"}] self.assertEqual(14, int(10.0 * calculate_diversity(content_list, 1))) def test_calculate_diversity_no_words(self) -> None: """ Test if calculate_diversity returns 0 if there are no words in the content list. """ content_list = [{"name": ""}] self.assertEqual(0, calculate_diversity(content_list)) def test_calculate_diversity(self) -> None: """ Test if calculate_diversity calculates diversity based on the text. """ self.assertEqual(70, int(100.0 * calculate_diversity([{"name": "The"}], min_word_length=3))) self.assertEqual(100, int(100.0 * calculate_diversity([{"name": "The quick"}], min_word_length=3))) self.assertEqual(122, int(100.0 * calculate_diversity([{"name": "The quick brown"}], min_word_length=3))) self.assertEqual(106, int(100.0 * calculate_diversity([{"name": "The quick brown the"}], min_word_length=3))) self.assertEqual(94, int(100.0 * calculate_diversity([{"name": "The quick brown the quick"}], min_word_length=3)))
6,218
Python
.py
114
42.842105
120
0.600236
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,932
test_operations_requests.py
Tribler_tribler/src/tribler/test_unit/core/knowledge/test_operations_requests.py
from ipv8.test.base import TestBase from tribler.core.knowledge.operations_requests import OperationsRequests class TestOperationsRequests(TestBase): """ Tests for the OperationsRequests class. """ def setUp(self) -> None: """ Create a new OperationsRequests to test with. """ super().setUp() self.operations_requests = OperationsRequests() def test_add_peer(self) -> None: """ Test if a peer can be registered. """ self.operations_requests.register_peer("peer", number_of_responses=10) self.assertEqual(10, self.operations_requests.requests["peer"]) def test_clear_requests(self) -> None: """ Test if requests can be cleared. """ self.operations_requests.register_peer("peer", number_of_responses=10) self.operations_requests.clear_requests() self.assertEqual(0, len(self.operations_requests.requests)) def test_valid_peer(self) -> None: """ Test if peers with a non-zero number of requests are seen as valid. """ self.operations_requests.register_peer("peer", number_of_responses=10) self.operations_requests.validate_peer("peer") self.assertIn("peer", self.operations_requests.requests) def test_missed_peer(self) -> None: """ Test if peers with zero requests are seen as invalid. """ with self.assertRaises(ValueError): self.operations_requests.validate_peer("peer") def test_invalid_peer(self) -> None: """ Test if validating a peer lowers it response count. """ self.operations_requests.register_peer("peer", number_of_responses=1) self.operations_requests.validate_peer("peer") with self.assertRaises(ValueError): self.operations_requests.validate_peer("peer")
1,899
Python
.py
46
33.086957
78
0.65506
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,933
test_payload.py
Tribler_tribler/src/tribler/test_unit/core/knowledge/test_payload.py
from __future__ import annotations from ipv8.test.base import TestBase from tribler.core.knowledge.payload import ( RawStatementOperationMessage, RequestStatementOperationMessage, StatementOperation, StatementOperationMessage, StatementOperationSignature, ) class TestHTTPPayloads(TestBase): """ Tests for the various payloads of the KnowledgeCommunity. """ def test_statement_operation(self) -> None: """ Test if StatementOperation initializes correctly. """ so = StatementOperation(1, "foo", 2, "bar", 3, 4, b"baz") self.assertEqual(1, so.subject_type) self.assertEqual("foo", so.subject) self.assertEqual(2, so.predicate) self.assertEqual("bar", so.object) self.assertEqual(3, so.operation) self.assertEqual(4, so.clock) self.assertEqual(b"baz", so.creator_public_key) def test_statement_operation_signature(self) -> None: """ Test if StatementOperationSignature initializes correctly. """ sos = StatementOperationSignature(b"test") self.assertEqual(b"test", sos.signature) def test_raw_statement_operation_message(self) -> None: """ Test if RawStatementOperationMessage initializes correctly. """ rsom = RawStatementOperationMessage(b"foo", b"bar") self.assertEqual(2, rsom.msg_id) self.assertEqual(b"foo", rsom.operation) self.assertEqual(b"bar", rsom.signature) def test_statement_operation_message(self) -> None: """ Test if StatementOperationMessage initializes correctly. """ som = StatementOperationMessage(b"foo", b"bar") self.assertEqual(2, som.msg_id) self.assertEqual(b"foo", som.operation) self.assertEqual(b"bar", som.signature) def test_request_statement_operation_message(self) -> None: """ Test if RequestStatementOperationMessage initializes correctly. """ rsom = RequestStatementOperationMessage(42) self.assertEqual(1, rsom.msg_id) self.assertEqual(42, rsom.count)
2,141
Python
.py
54
32.092593
71
0.677279
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,934
test_knowledge_endpoint.py
Tribler_tribler/src/tribler/test_unit/core/knowledge/restapi/test_knowledge_endpoint.py
from unittest.mock import Mock from aiohttp.web_urldispatcher import UrlMappingMatchInfo from ipv8.keyvault.crypto import default_eccrypto from ipv8.peer import Peer from ipv8.peerdiscovery.network import Network from ipv8.test.base import TestBase from ipv8.test.mocking.endpoint import AutoMockEndpoint from tribler.core.database.layers.knowledge import ResourceType from tribler.core.database.tribler_database import TriblerDatabase from tribler.core.knowledge.community import KnowledgeCommunity, KnowledgeCommunitySettings from tribler.core.knowledge.payload import StatementOperation from tribler.core.knowledge.restapi.knowledge_endpoint import KnowledgeEndpoint from tribler.test_unit.base_restapi import MockRequest, response_to_json class MockCommunity(KnowledgeCommunity): """ An inert KnowledgeCommunity. """ community_id = b"\x00" * 20 def __init__(self, settings: KnowledgeCommunitySettings) -> None: """ Create a new MockCommunity. """ super().__init__(settings) self.cancel_all_pending_tasks() def sign(self, operation: StatementOperation) -> bytes: """ Fake a signature. """ return b"" class UpdateKnowledgeEntriesRequest(MockRequest): """ A MockRequest that mimics UpdateKnowledgeEntriesRequests. """ def __init__(self, query: dict, infohash: str, db: TriblerDatabase) -> None: """ Create a new UpdateKnowledgeEntriesRequest. """ super().__init__(query, "PATCH", f"/knowledge/{infohash}") self._infohash = infohash self.context = [db] async def json(self) -> dict: """ Get the json equivalent of the query (i.e., just the query). """ return self._query @property def match_info(self) -> UrlMappingMatchInfo: """ Get the match info (the infohash in the url). """ return UrlMappingMatchInfo({"infohash": self._infohash}, Mock()) class GetTagSuggestionsRequest(MockRequest): """ A MockRequest that mimics GetTagSuggestionsRequests. """ def __init__(self, infohash: str, db: TriblerDatabase) -> None: """ Create a new GetTagSuggestionsRequest. """ super().__init__({}, "GET", f"/knowledge/{infohash}/tag_suggestions") self._infohash = infohash self.context = [db] @property def match_info(self) -> UrlMappingMatchInfo: """ Get the match info (the infohash in the url). """ return UrlMappingMatchInfo({"infohash": self._infohash}, Mock()) class TestKnowledgeEndpoint(TestBase): """ Tests for the KnowledgeEndpoint REST endpoint. """ def setUp(self) -> None: """ Create a new endpoint and a mock community. """ super().setUp() key = default_eccrypto.generate_key("curve25519") settings = KnowledgeCommunitySettings( endpoint=AutoMockEndpoint(), my_peer=Peer(key), network=Network(), key=key, db=Mock() ) self.endpoint = KnowledgeEndpoint() self.endpoint.db = settings.db self.endpoint.community = MockCommunity(settings) def tag_to_statement(self, tag: str) -> dict: """ Convert a tag to a statement dictionary. """ return {"predicate": ResourceType.TAG, "object": tag} async def test_add_tag_invalid_infohash(self) -> None: """ Test if an error is returned if we try to add a tag to content with an invalid infohash. """ post_data = {"knowledge": [self.tag_to_statement("abc"), self.tag_to_statement("def")]} response = await self.endpoint.update_knowledge_entries(UpdateKnowledgeEntriesRequest(post_data, "3f3", self.endpoint.db)) response_body_json = await response_to_json(response) self.assertEqual(400, response.status) self.assertEqual("Invalid infohash", response_body_json["error"]) async def test_add_invalid_tag_too_short(self) -> None: """ Test whether an error is returned if we try to add a tag that is too short or long. """ post_data = {"statements": [self.tag_to_statement("a")]} response = await self.endpoint.update_knowledge_entries(UpdateKnowledgeEntriesRequest(post_data, "a" * 40, self.endpoint.db)) response_body_json = await response_to_json(response) self.assertEqual(400, response.status) self.assertEqual("Invalid tag length", response_body_json["error"]) async def test_add_invalid_tag_too_long(self) -> None: """ Test whether an error is returned if we try to add a tag that is too short or long. """ post_data = {"statements": [self.tag_to_statement("a" * 60)]} response = await self.endpoint.update_knowledge_entries(UpdateKnowledgeEntriesRequest(post_data, "a" * 40, self.endpoint.db)) response_body_json = await response_to_json(response) self.assertEqual(400, response.status) self.assertEqual("Invalid tag length", response_body_json["error"]) async def test_modify_tags(self) -> None: """ Test modifying tags. """ post_data = {"statements": [self.tag_to_statement("abc"), self.tag_to_statement("def")]} self.endpoint.db.knowledge.get_statements = Mock(return_value=[]) self.endpoint.db.knowledge.get_clock = Mock(return_value=0) response = await self.endpoint.update_knowledge_entries(UpdateKnowledgeEntriesRequest(post_data, "a" * 40, self.endpoint.db)) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertTrue(response_body_json["success"]) async def test_modify_tags_no_community(self) -> None: """ Test if the KnowledgeEndpoint can function without a community. """ self.endpoint.community = None post_data = {"statements": [self.tag_to_statement("abc"), self.tag_to_statement("def")]} self.endpoint.db.knowledge.get_statements = Mock(return_value=[]) self.endpoint.db.knowledge.get_clock = Mock(return_value=0) response = await self.endpoint.update_knowledge_entries(UpdateKnowledgeEntriesRequest(post_data, "a" * 40, self.endpoint.db)) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertTrue(response_body_json["success"]) async def test_get_suggestions_invalid_infohash(self) -> None: """ Test if an error is returned if we fetch suggestions from content with an invalid infohash. """ response = await self.endpoint.get_tag_suggestions(GetTagSuggestionsRequest("3f3", self.endpoint.db)) response_body_json = await response_to_json(response) self.assertEqual(400, response.status) self.assertEqual("Invalid infohash", response_body_json["error"]) async def test_get_suggestions(self) -> None: """ Test if we can successfully fetch suggestions from content. """ self.endpoint.db.knowledge.get_suggestions = Mock(return_value=["test"]) response = await self.endpoint.get_tag_suggestions(GetTagSuggestionsRequest("a" * 40, self.endpoint.db)) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertEqual(["test"], response_body_json["suggestions"])
8,004
Python
.py
165
38.115152
114
0.631619
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,935
test_store.py
Tribler_tribler/src/tribler/test_unit/core/database/test_store.py
from __future__ import annotations from ipv8.community import Community, CommunitySettings from ipv8.keyvault.crypto import default_eccrypto from ipv8.test.base import TestBase from ipv8.test.mocking.ipv8 import MockIPv8 from pony.orm import db_session from tribler.core.database.orm_bindings.torrent_metadata import entries_to_chunk from tribler.core.database.serialization import NULL_KEY, int2time from tribler.core.database.store import MetadataStore, ObjState class MockCommunity(Community): """ An empty community. """ community_id = b"\x00" * 20 class TestMetadataStore(TestBase[MockCommunity]): """ Tests for the MetadataStore class. """ def setUp(self) -> None: """ Create a single node and its metadata store. """ super().setUp() self.initialize(MockCommunity, 1) self.metadata_store = MetadataStore(":memory:", self.private_key(0), check_tables=False) def create_node(self, settings: CommunitySettings | None = None, create_dht: bool = False, enable_statistics: bool = False) -> MockIPv8: """ Create a node with a curve25519 key. """ return MockIPv8("curve25519", MockCommunity, settings, create_dht, enable_statistics) @db_session def test_squash_mdblobs(self) -> None: """ Test if mdblobs can be squashed and processed again. """ md_list = [ self.metadata_store.TorrentMetadata(title=f'test torrent {i}', infohash=bytes([i]) * 20, torrent_date=int2time(i)) for i in range(10) ] chunk, _ = entries_to_chunk(md_list, chunk_size=999999999999999) signatures = [d.signature for d in md_list] for d in md_list: d.delete() uncompressed = self.metadata_store.process_compressed_mdblob(chunk, skip_personal_metadata_payload=False) self.assertEqual(signatures, [d.md_obj.signature for d in uncompressed]) @db_session def test_squash_mdblobs_multiple_chunks(self) -> None: """ Test if multiple mdblobs can be squashed and processed again. """ md_list = [ self.metadata_store.TorrentMetadata(title=f'test torrent {i}', infohash=bytes([i]) * 20, id_=i, torrent_date=int2time(i), timestamp=i) for i in range(10) ] for md in md_list: md.public_key = self.metadata_store.my_public_key_bin md.signature = md.serialized(self.metadata_store.my_key)[-64:] # Test splitting into multiple chunks chunks1, index = entries_to_chunk(md_list, chunk_size=1) chunks2, _ = entries_to_chunk(md_list, chunk_size=999999999999999, start_index=index) signatures = [d.signature for d in md_list] for d in md_list: d.delete() uncompressed1 = self.metadata_store.process_compressed_mdblob(chunks1, skip_personal_metadata_payload=False) uncompressed2 = self.metadata_store.process_compressed_mdblob(chunks2, skip_personal_metadata_payload=False) self.assertEqual(signatures[:index], [d.md_obj.signature for d in uncompressed1]) self.assertEqual(signatures[index:], [d.md_obj.signature for d in uncompressed2]) @db_session def test_process_invalid_compressed_mdblob(self) -> None: """ Test if an invalid compressed mdblob does not crash Tribler. """ self.assertEqual([], self.metadata_store.process_compressed_mdblob(b"abcdefg")) @db_session def test_process_forbidden_null_key_payload(self) -> None: """ Test if payloads with NULL keys are not processed. """ md = self.metadata_store.TorrentMetadata(title="test torrent", infohash=b"\x01" * 20, id_=0, torrent_date=int2time(0), timestamp=0, public_key=NULL_KEY) payload = md.payload_class.from_dict(**md.to_dict()) self.assertEqual([], self.metadata_store.process_payload(payload)) @db_session def test_process_forbidden_type_payload(self) -> None: """ Test if payloads that are not torrent type are not processed. """ md = self.metadata_store.TorrentMetadata(title="test torrent", infohash=b"\x01" * 20, id_=0, torrent_date=int2time(0), timestamp=0, public_key=NULL_KEY) payload = md.payload_class.from_dict(**md.to_dict()) payload.metadata_type = 9999 self.assertEqual([], self.metadata_store.process_payload(payload)) @db_session def test_process_external_payload(self) -> None: """ Test if processing an external payload works. """ other_key = default_eccrypto.generate_key("curve25519") md = self.metadata_store.TorrentMetadata(title="test torrent", infohash=b"\x01" * 20, id_=0, timestamp=0, torrent_date=int2time(0), public_key=other_key.key_to_bin()) payload = md.payload_class.from_signed_blob(md.serialized(other_key)) # Check if node metadata object is properly created on payload processing result, = self.metadata_store.process_payload(payload) self.assertEqual(ObjState.NEW_OBJECT, result.obj_state) self.assertEqual(payload.metadata_type, result.md_obj.to_dict()['metadata_type']) # Check that we flag this as duplicate in case we already know about the local node result, = self.metadata_store.process_payload(payload) self.assertEqual(ObjState.DUPLICATE_OBJECT, result.obj_state) @db_session def test_process_external_payload_invalid_sig(self) -> None: """ Test that payloads with an invalid signature are not processed. """ other_key = default_eccrypto.generate_key("curve25519") md = self.metadata_store.TorrentMetadata(title="test torrent", infohash=b"\x01" * 20, id_=0, timestamp=0, torrent_date=int2time(0), public_key=other_key.key_to_bin()) payload = md.payload_class.from_signed_blob(md.serialized(other_key)) payload.signature = bytes(127 ^ byte for byte in payload.signature) md.delete() self.assertEqual([], self.metadata_store.process_payload(payload)) @db_session def test_process_payload_invalid_metadata_type(self) -> None: """ Test if payloads with an invalid metadata type are not processed. """ other_key = default_eccrypto.generate_key("curve25519") md = self.metadata_store.TorrentMetadata(title="test torrent", infohash=b"\x01" * 20, id_=0, timestamp=0, torrent_date=int2time(0), public_key=other_key.key_to_bin()) payload = md.payload_class.from_signed_blob(md.serialized(other_key)) payload.metadata_type = -1 md.delete() self.assertEqual([], self.metadata_store.process_payload(payload)) @db_session def test_process_payload_skip_personal(self) -> None: """ Test if payloads with our own signature are not processed. """ md = self.metadata_store.TorrentMetadata(title="test torrent", infohash=b"\x01" * 20, id_=0, timestamp=0, torrent_date=int2time(0), public_key=self.key_bin(0)) payload = md.payload_class.from_signed_blob(md.serialized(self.private_key(0))) md.delete() self.assertEqual([], self.metadata_store.process_payload(payload)) @db_session def test_process_payload_ffa(self) -> None: """ Test if FFA entries are correctly added. """ infohash = b"1" * 20 ffa_title = "abcabc" ffa_torrent = self.metadata_store.TorrentMetadata.add_ffa_from_dict({"infohash": infohash, "title": ffa_title}) ffa_payload = self.metadata_store.TorrentMetadata.payload_class.from_signed_blob(ffa_torrent.serialized()) ffa_torrent.delete() # Assert that FFA is never added to DB if there is already a signed entry with the same infohash signed_md = self.metadata_store.TorrentMetadata(infohash=infohash, title='') self.metadata_store.TorrentMetadata.payload_class.from_signed_blob(signed_md.serialized()) self.assertEqual([], self.metadata_store.process_payload(ffa_payload)) self.assertIsNone(self.metadata_store.TorrentMetadata.get(title=ffa_title)) signed_md.delete() # Add an FFA from the payload result = self.metadata_store.process_payload(ffa_payload)[0] self.assertEqual(ObjState.NEW_OBJECT, result.obj_state) self.assertIsNotNone(self.metadata_store.TorrentMetadata.get(title=ffa_title)) self.assertEqual([], self.metadata_store.process_payload(ffa_payload)) @db_session def test_get_entries_query_sort_by_size(self) -> None: """ Test if entries are properly sorted by size. """ self.metadata_store.TorrentMetadata.add_ffa_from_dict({"infohash": b"\xab" * 20, "title": "abc", "size": 20}) self.metadata_store.TorrentMetadata.add_ffa_from_dict({"infohash": b"\xcd" * 20, "title": "def", "size": 1}) self.metadata_store.TorrentMetadata.add_ffa_from_dict({"infohash": b"\xef" * 20, "title": "ghi", "size": 10}) ordered1, ordered2, ordered3 = self.metadata_store.get_entries_query(sort_by="size", sort_desc=True)[:] self.assertEqual(20, ordered1.size) self.assertEqual(10, ordered2.size) self.assertEqual(1, ordered3.size) @db_session def test_get_entries_query_deprecated(self) -> None: """ Test if the get entries query ignores invalid arguments. """ self.metadata_store.TorrentMetadata.add_ffa_from_dict({"infohash": b"\xab" * 20, "title": "abc", "size": 20}) self.metadata_store.TorrentMetadata.add_ffa_from_dict({"infohash": b"\xcd" * 20, "title": "def", "size": 1}) self.metadata_store.TorrentMetadata.add_ffa_from_dict({"infohash": b"\xef" * 20, "title": "ghi", "size": 10}) ordered1, ordered2, ordered3 = self.metadata_store.get_entries_query(sort_by="size", sort_desc=True, exclude_deleted="1")[:] self.assertEqual(20, ordered1.size) self.assertEqual(10, ordered2.size) self.assertEqual(1, ordered3.size)
10,555
Python
.py
191
44.926702
119
0.64359
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,936
test_ranks.py
Tribler_tribler/src/tribler/test_unit/core/database/test_ranks.py
from __future__ import annotations from collections import deque from ipv8.test.base import TestBase from tribler.core.database.ranks import ( find_word_and_rotate_title, freshness_rank, seeders_rank, title_rank, torrent_rank, ) class TestRanks(TestBase): """ Tests for the ranking logic. """ def test_title_rank_exact_match(self) -> None: """ Test if an exact title match leads to a score of exactly 1. """ self.assertEqual(1, title_rank("Big Buck Bunny", "Big Buck Bunny")) def test_title_rank_no_match(self) -> None: """ Test if input of different titles leads to a score less than 1. """ self.assertLess(title_rank("Big Buck Bunny", "Aji Ayvl Aybbu"), 1) def test_freshness_rank_zero(self) -> None: """ Test if invalid or unknown freshness has the lowest rank. """ self.assertEqual(0, freshness_rank(-1)) self.assertEqual(0, freshness_rank(None)) def test_freshness_rank_max(self) -> None: """ Test if maximum freshness has the highest rank. """ self.assertEqual(1, freshness_rank(0)) def test_freshness_rank_relative(self) -> None: """ Test if fresh torrents have a higher score than old torrents. """ self.assertLess(freshness_rank(10), freshness_rank(1)) def test_seeders_rank_zero(self) -> None: """ Test if a torrent without seeders scores a 0. """ self.assertEqual(0, seeders_rank(0)) def test_seeders_rank_many(self) -> None: """ Test if a torrent with many seeders scores more than a 0. """ self.assertGreater(seeders_rank(1000000), 0) def test_seeders_rank_relative(self) -> None: """ Test if a torrent with more seeders scores higher. """ self.assertGreaterEqual(seeders_rank(100), seeders_rank(10)) def test_seeders_rank_leechers(self) -> None: """ Test if a torrent with more leechers scores higher. """ self.assertGreaterEqual(seeders_rank(10, 100), seeders_rank(10, 10)) def test_torrent_rank_exact_match(self) -> None: """ Test if an exact title match leads to a score of exactly 0.81. This is 90% (no seeders/leechers) of 90% (no freshness) of an exact match score of 1. """ self.assertEqual(0.81, torrent_rank("Big Buck Bunny", "Big Buck Bunny")) def test_torrent_rank__no_freshness(self) -> None: """ Test if no freshness is worse than any freshness. """ rank1 = torrent_rank("Big Buck Bunny", "Big Buck Bunny", seeders=1000, freshness=1) rank2 = torrent_rank("Big Buck Bunny", "Big Buck Bunny", seeders=1000, freshness=None) self.assertGreaterEqual(rank1, rank2) def test_torrent_rank_freshness(self) -> None: """ Test if recent freshness is better than old freshness. """ rank1 = torrent_rank("Big Buck Bunny", "Big Buck Bunny", seeders=1000, freshness=1) rank2 = torrent_rank("Big Buck Bunny", "Big Buck Bunny", seeders=1000, freshness=100) self.assertGreaterEqual(rank1, rank2) def test_torrent_rank_position(self) -> None: """ Test if word matches are preferred close to the start. """ rank1 = torrent_rank("Big Buck Bunny", "Big Buck Bunny") # Exact rank2 = torrent_rank("Big Buck Bunny", "Big Buck Bunny II") rank3 = torrent_rank("Big Buck Bunny", "Big Buck Brown Bunny") rank4 = torrent_rank("Big Buck Bunny", "Big Bad Buck Bunny") rank5 = torrent_rank("Big Buck Bunny", "Boring Big Buck Bunny") self.assertGreaterEqual(rank1, rank2) self.assertGreaterEqual(rank2, rank3) self.assertGreaterEqual(rank3, rank4) self.assertGreaterEqual(rank4, rank5) def test_torrent_rank_intermediate(self) -> None: """ Test if word matches are preferred with less words in between. """ rank1 = torrent_rank("Big Buck Bunny", "Big Buck Bunny") # Exact rank2 = torrent_rank("Big Buck Bunny", "Big Buck A Bunny") rank3 = torrent_rank("Big Buck Bunny", "Big Buck A B Bunny") rank4 = torrent_rank("Big Buck Bunny", "Big Buck A B C Bunny") self.assertGreaterEqual(rank1, rank2) self.assertGreaterEqual(rank2, rank3) self.assertGreaterEqual(rank3, rank4) def test_torrent_rank_mis_position_one(self) -> None: """ Test if word matches are preferred with mismatches further on in the item. """ rank1 = torrent_rank("Big Buck Bunny", "Big A Buck Bunny") rank2 = torrent_rank("Big Buck Bunny", "A Big Buck Bunny") self.assertGreaterEqual(rank1, rank2) def test_torrent_rank_mis_position_many(self) -> None: """ Test if word group matches are preferred with mismatches further on in the item. """ rank1 = torrent_rank("Big Buck Bunny", "Big A B C Buck Bunny") rank2 = torrent_rank("Big Buck Bunny", "A B C Big Buck Bunny") self.assertGreaterEqual(rank1, rank2) def test_torrent_rank_reorder(self) -> None: """ Test if wrong order of words in the title imposes a penalty to the rank. """ rank1 = torrent_rank("Big Buck Bunny", "Big Buck Bunny") # Exact rank2 = torrent_rank("Big Buck Bunny", "Big Bunny Buck") self.assertGreaterEqual(rank1, rank2) def test_torrent_rank_miss(self) -> None: """ Test if missed words in the title imposes a penalty to the rank. """ rank1 = torrent_rank("Big Buck Bunny", "Big Buck Bunny") # Exact rank2 = torrent_rank("Big Buck Bunny", "Big Buck") rank3 = torrent_rank("Big Buck Bunny", "Big Bunny") rank4 = torrent_rank("Big Buck Bunny", "Buck Bunny") self.assertGreaterEqual(rank1, rank2) self.assertGreaterEqual(rank2, rank3) self.assertGreaterEqual(rank3, rank4) def test_find_word_first(self) -> None: """ Test if a matched first word gets popped from the queue. """ title = deque(["A", "B", "C"]) self.assertEqual((True, 0), find_word_and_rotate_title("A", title)) self.assertEqual(deque(["B", "C"]), title) def test_find_word_skip_one(self) -> None: """ Test if the number of skipped words is returned correctly when skipping over one. """ title = deque(["A", "B", "C"]) self.assertEqual((True, 1), find_word_and_rotate_title("B", title)) self.assertEqual(deque(["C", "A"]), title) def test_find_word_skip_many(self) -> None: """ Test if the number of skipped words is returned correctly when skipping over many. """ title = deque(["A", "B", "X", "Y", "C"]) self.assertEqual((True, 4), find_word_and_rotate_title("C", title)) self.assertEqual(deque(["A", "B", "X", "Y"]), title) def test_find_word_not_found(self) -> None: """ Test if the False is returned when a word is not found. """ title = deque(["A", "C", "X"]) self.assertEqual((False, 0), find_word_and_rotate_title("B", title)) self.assertEqual(deque(["A", "C", "X"]), title)
7,361
Python
.py
164
36.664634
94
0.62107
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,937
test_tribler_database.py
Tribler_tribler/src/tribler/test_unit/core/database/test_tribler_database.py
from ipv8.test.base import TestBase from pony.orm import db_session from tribler.core.database.tribler_database import TriblerDatabase class TestTriblerDatabase(TestBase): """ Tests for the TriblerDatabase class. """ def setUp(self) -> None: """ Create a database instance. """ super().setUp() self.db = TriblerDatabase(":memory:") @db_session def test_set_misc(self) -> None: """ Test if set_misc works as expected. """ self.db.set_misc(key='string', value='value') self.db.set_misc(key='integer', value="1") self.assertEqual("value", self.db.get_misc(key='string')) self.assertEqual("1", self.db.get_misc(key='integer')) @db_session def test_non_existent_misc(self) -> None: """ Test if get_misc returns proper values. """ self.assertIsNone(self.db.get_misc(key="non existent")) self.assertEqual("42", self.db.get_misc(key="non existent", default="42")) @db_session def test_default_version(self) -> None: """ Test if the default version is equal to ``CURRENT_VERSION``. """ self.assertEqual(TriblerDatabase.CURRENT_VERSION, self.db.version) @db_session def test_version_getter_and_setter(self) -> None: """ Test if the version getter and setter work as expected. """ self.db.version = 42 self.assertEqual(42, self.db.version) @db_session def test_version_getter_unsupported_type(self) -> None: """ Test if the version getter raises a TypeError if the type is not supported. """ with self.assertRaises(TypeError): self.db.version = 'string'
1,762
Python
.py
49
28.44898
83
0.622797
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,938
test_serialization.py
Tribler_tribler/src/tribler/test_unit/core/database/test_serialization.py
from __future__ import annotations from datetime import datetime, timezone from ipv8.keyvault.crypto import default_eccrypto from ipv8.test.base import TestBase from tribler.core.database.serialization import ( REGULAR_TORRENT, HealthItemsPayload, SignedPayload, TorrentMetadataPayload, UnknownBlobTypeException, int2time, read_payload_with_offset, time2int, ) class TestSerialization(TestBase): """ Tests for the serialization logic. """ def test_time2int(self) -> None: """ Test if time2int normalizes timestamps based on the supplied epoch. """ date = datetime.fromtimestamp(1234, tz=timezone.utc) epoch = datetime.fromtimestamp(1000, tz=timezone.utc) self.assertEqual(234, time2int(date, epoch)) def test_int2time(self) -> None: """ Test if int2time normalizes timestamps based on the supplied epoch. """ epoch = datetime.fromtimestamp(1000, tz=timezone.utc) self.assertEqual(datetime.fromtimestamp(1234, tz=timezone.utc), int2time(234, epoch)) def test_read_payload_with_offset_unknown(self) -> None: """ Test if unknown payload formats throw a UnknownBlobTypeException. """ with self.assertRaises(UnknownBlobTypeException): read_payload_with_offset(b"\xFF\xFF") def test_read_payload_with_offset_regular_torrent(self) -> None: """ Test if TorrentMetadataPayload payload formats are correctly decoded. """ payload = TorrentMetadataPayload(metadata_type=REGULAR_TORRENT, reserved_flags=0, public_key=b"\x00" * 64, id_=7, origin_id=1337, timestamp=10, infohash=b"\x01" * 20, size=42, torrent_date=int2time(0), title="test", tags="tags", tracker_info="") unserialized, offset = read_payload_with_offset(payload.serialized()) self.assertEqual(payload.metadata_type, unserialized.metadata_type) self.assertEqual(payload.reserved_flags, unserialized.reserved_flags) self.assertEqual(payload.public_key, unserialized.public_key) self.assertEqual(payload.id_, unserialized.id_) self.assertEqual(payload.origin_id, unserialized.origin_id) self.assertEqual(payload.timestamp, unserialized.timestamp) self.assertEqual(payload.infohash, unserialized.infohash) self.assertEqual(payload.size, unserialized.size) self.assertEqual(payload.torrent_date, unserialized.torrent_date) self.assertEqual(payload.title, unserialized.title) self.assertEqual(payload.tags, unserialized.tags) self.assertEqual(payload.tracker_info, unserialized.tracker_info) def test_signed_payload_sign(self) -> None: """ Test if signing a SignedPayload and unpacking it, leads to the same payload. """ private_key = default_eccrypto.generate_key("curve25519") payload = SignedPayload(9999, 0, private_key.pub().key_to_bin()[10:]) payload.add_signature(private_key) unserialized = SignedPayload.from_signed_blob(payload.serialized() + payload.signature) self.assertTrue(payload.has_signature()) self.assertTrue(payload.check_signature()) self.assertTrue(unserialized.has_signature()) self.assertTrue(unserialized.check_signature()) self.assertEqual(payload.metadata_type, unserialized.metadata_type) self.assertEqual(payload.reserved_flags, unserialized.reserved_flags) self.assertEqual(payload.public_key, unserialized.public_key) self.assertEqual(payload.signature, unserialized.signature) def test_signed_payload_to_dict_signed(self) -> None: """ Test if converting a signed SignedPayload to a dict and loading it again, leads to the same payload. """ private_key = default_eccrypto.generate_key("curve25519") payload = SignedPayload(9999, 0, private_key.pub().key_to_bin()[10:]) payload.add_signature(private_key) unserialized = SignedPayload.from_dict(**payload.to_dict()) self.assertTrue(payload.has_signature()) self.assertTrue(payload.check_signature()) self.assertTrue(unserialized.has_signature()) self.assertTrue(unserialized.check_signature()) self.assertEqual(payload.metadata_type, unserialized.metadata_type) self.assertEqual(payload.reserved_flags, unserialized.reserved_flags) self.assertEqual(payload.public_key, unserialized.public_key) self.assertEqual(payload.signature, unserialized.signature) def test_signed_payload_to_dict_unsigned(self) -> None: """ Test if converting an unsigned SignedPayload to a dict and loading it again, leads to the same payload. """ payload = SignedPayload(9999, 0, b"\x00" * 64) unserialized = SignedPayload.from_dict(**payload.to_dict()) self.assertFalse(payload.has_signature()) self.assertFalse(unserialized.has_signature()) self.assertEqual(payload.metadata_type, unserialized.metadata_type) self.assertEqual(payload.reserved_flags, unserialized.reserved_flags) self.assertEqual(payload.public_key, unserialized.public_key) self.assertEqual(payload.signature, unserialized.signature) def test_get_magnet(self) -> None: """ Test if TorrentMetadataPayload can generated magnet links from its infohash. """ payload = TorrentMetadataPayload(metadata_type=REGULAR_TORRENT, reserved_flags=0, public_key=b"\x00" * 64, id_=7, origin_id=1337, timestamp=10, infohash=b"\x01" * 20, size=42, torrent_date=int2time(0), title="test", tags="tags", tracker_info="") self.assertEqual("magnet:?xt=urn:btih:0101010101010101010101010101010101010101&dn=test", payload.get_magnet()) def test_auto_convert_torrent_date(self) -> None: """ Test if TorrentMetadataPayload automatically converts its torrent date from int to datetime. """ payload1 = TorrentMetadataPayload(metadata_type=REGULAR_TORRENT, reserved_flags=0, public_key=b"\x00" * 64, id_=7, origin_id=1337, timestamp=10, infohash=b"\x01" * 20, size=42, torrent_date=int2time(0), title="test", tags="tags", tracker_info="") payload2 = TorrentMetadataPayload(metadata_type=REGULAR_TORRENT, reserved_flags=0, public_key=b"\x00" * 64, id_=7, origin_id=1337, timestamp=10, infohash=b"\x01" * 20, size=42, torrent_date=0, title="test", tags="tags", tracker_info="") unserialized1 = TorrentMetadataPayload.from_signed_blob(payload1.serialized() + payload1.signature) unserialized2 = TorrentMetadataPayload.from_signed_blob(payload2.serialized() + payload2.signature) self.assertEqual(unserialized1.torrent_date, unserialized2.torrent_date) def test_health_items_payload(self) -> None: """ Test if HealthItemsPayload is correctly unpacked. """ payload = HealthItemsPayload(b"1,2,3;4,5,6;7,8,9;") self.assertEqual([(1, 2, 3), (4, 5, 6), (7, 8, 9)], HealthItemsPayload.unpack(payload.serialize())) def test_health_items_payload_no_data(self) -> None: """ Test if HealthItemsPayload without data is correctly unpacked. """ payload = HealthItemsPayload(b"") self.assertEqual([], HealthItemsPayload.unpack(payload.serialize())) def test_health_items_payload_missing_data(self) -> None: """ Test if HealthItemsPayload with missing data gets error values. """ payload = HealthItemsPayload(b";1,2,3;") self.assertEqual([(0, 0, 0), (1, 2, 3)], HealthItemsPayload.unpack(payload.serialize())) def test_health_items_payload_illegal_data(self) -> None: """ Test if HealthItemsPayload with illegal data gets error values. """ payload = HealthItemsPayload(b"a,b,c;1,2,3;") self.assertEqual([(0, 0, 0), (1, 2, 3)], HealthItemsPayload.unpack(payload.serialize())) def test_health_items_payload_negative_data(self) -> None: """ Test if HealthItemsPayload with negatve data gets error values. """ payload = HealthItemsPayload(b"-1,-1,-1;1,2,3;") self.assertEqual([(0, 0, 0), (1, 2, 3)], HealthItemsPayload.unpack(payload.serialize()))
8,658
Python
.py
153
46.477124
115
0.668753
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,939
test_database_endpoint.py
Tribler_tribler/src/tribler/test_unit/core/database/restapi/test_database_endpoint.py
from __future__ import annotations from asyncio import sleep from typing import TYPE_CHECKING, Callable from unittest.mock import AsyncMock, Mock, call from aiohttp.web_urldispatcher import UrlMappingMatchInfo from ipv8.test.base import TestBase from multidict import MultiDict, MultiDictProxy from tribler.core.database.layers.knowledge import ResourceType, SimpleStatement from tribler.core.database.restapi.database_endpoint import DatabaseEndpoint, parse_bool from tribler.core.database.serialization import REGULAR_TORRENT from tribler.core.restapi.rest_endpoint import HTTP_BAD_REQUEST from tribler.test_unit.base_restapi import MockRequest, response_to_json if TYPE_CHECKING: from tribler.core.database.store import MetadataStore class TorrentHealthRequest(MockRequest): """ A MockRequest that mimics TorrentHealthRequests. """ def __init__(self, query: dict, infohash: str, mds: MetadataStore | None) -> None: """ Create a new TorrentHealthRequest. """ super().__init__(query, "GET", f"/metadata/torrents/{infohash}/health") self._infohash = infohash self.context = (mds,) @property def match_info(self) -> UrlMappingMatchInfo: """ Get the match info (the infohash in the url). """ return UrlMappingMatchInfo({"infohash": self._infohash}, Mock()) class PopularTorrentsRequest(MockRequest): """ A MockRequest that mimics PopularTorrentsRequests. """ def __init__(self, query: dict, mds: MetadataStore | None) -> None: """ Create a new PopularTorrentsRequest. """ super().__init__(query, "GET", "/metadata/torrents/popular") self.context = (mds,) class SearchLocalRequest(MockRequest): """ A MockRequest that mimics SearchLocalRequests. """ def __init__(self, query: dict, mds: MetadataStore | None) -> None: """ Create a new SearchLocalRequest. """ default_query = {"fts_text": ""} default_query.update(query) super().__init__(default_query, "GET", "/metadata/search/local") self.context = (mds,) class SearchCompletionsRequest(MockRequest): """ A MockRequest that mimics SearchCompletionsRequests. """ def __init__(self, query: dict, mds: MetadataStore | None) -> None: """ Create a new SearchCompletionsRequest. """ super().__init__(query, "GET", "/metadata/search/completions") self.context = (mds, ) class TestDatabaseEndpoint(TestBase): """ Tests for the DatabaseEndpoint REST endpoint. """ async def mds_run_now(self, callback: Callable[[], tuple[dict, int, int]]) -> tuple[dict, int, int]: """ Run an mds callback immediately. """ await sleep(0) return callback() def test_sanitize(self) -> None: """ Test if parameters are properly sanitized. """ soiled = MultiDictProxy(MultiDict([("first", "7"), ("last", "42"), ("sort_by", "name"), ("sort_desc", "0"), ("hide_xxx", "0"), ("category", "TEST"), ("origin_id", "13"), ("tags", "tag1"), ("tags", "tag2"), ("tags", "tag3"), ("max_rowid", "1337"), ("channel_pk", "AA")])) sanitized = DatabaseEndpoint.sanitize_parameters(soiled) self.assertEqual(7, sanitized["first"]) self.assertEqual(42, sanitized["last"]) self.assertEqual("title", sanitized["sort_by"]) self.assertFalse(sanitized["sort_desc"]) self.assertFalse(sanitized["hide_xxx"]) self.assertEqual("TEST", sanitized["category"]) self.assertEqual(13, sanitized["origin_id"]) self.assertEqual(["tag1", "tag2", "tag3"], sanitized["tags"]) self.assertEqual(1337, sanitized["max_rowid"]) self.assertEqual(b"\xaa", sanitized["channel_pk"]) def test_parse_bool(self) -> None: """ Test if parse bool fulfills its promises. """ self.assertTrue(parse_bool("true")) self.assertTrue(parse_bool("1")) self.assertFalse(parse_bool("false")) self.assertFalse(parse_bool("0")) def test_add_statements_to_metadata_list(self) -> None: """ Test if statements can be added to an existing metadata dict. """ metadata = {"type": REGULAR_TORRENT, "infohash": "AA"} endpoint = DatabaseEndpoint() endpoint.tribler_db = Mock(knowledge=Mock(get_simple_statements=Mock(return_value=[ SimpleStatement(ResourceType.TORRENT, "AA", ResourceType.TAG, "tag") ]))) endpoint.add_statements_to_metadata_list([metadata]) self.assertEqual(ResourceType.TORRENT, metadata["statements"][0]["subject_type"]) self.assertEqual("AA", metadata["statements"][0]["subject"]) self.assertEqual(ResourceType.TAG, metadata["statements"][0]["predicate"]) self.assertEqual("tag", metadata["statements"][0]["object"]) async def test_get_torrent_health_bad_timeout(self) -> None: """ Test if a bad timeout value in get_torrent_health leads to a HTTP_BAD_REQUEST status. """ endpoint = DatabaseEndpoint() response = await endpoint.get_torrent_health(TorrentHealthRequest({"timeout": "AA"}, "AA", endpoint.mds)) self.assertEqual(HTTP_BAD_REQUEST, response.status) async def test_get_torrent_health_no_checker(self) -> None: """ Test if calling get_torrent_health without a torrent checker leads to a false checking status. """ endpoint = DatabaseEndpoint() response = await endpoint.get_torrent_health(TorrentHealthRequest({}, "AA", endpoint.mds)) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertFalse(response_body_json["checking"]) async def test_get_torrent_health(self) -> None: """ Test if calling get_torrent_health with a valid request leads to a true checking status. """ endpoint = DatabaseEndpoint() check_torrent_health = AsyncMock() endpoint.torrent_checker = Mock(check_torrent_health=check_torrent_health) response = await endpoint.get_torrent_health(TorrentHealthRequest({}, "AA", endpoint.mds)) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertTrue(response_body_json["checking"]) self.assertEqual(call(b'\xaa', timeout=20, scrape_now=True), check_torrent_health.call_args) def test_add_download_progress_to_metadata_list(self) -> None: """ Test if progress can be added to an existing metadata dict. """ metadata = {"type": REGULAR_TORRENT, "infohash": "AA"} download = Mock(get_state=Mock(return_value=Mock(get_progress=Mock(return_value=1.0))), tdef=Mock(infohash="AA")) endpoint = DatabaseEndpoint() endpoint.download_manager = Mock(get_download=Mock(return_value=download), metainfo_requests=[]) endpoint.add_download_progress_to_metadata_list([metadata]) self.assertEqual(1.0, metadata["progress"]) def test_add_download_progress_to_metadata_list_none(self) -> None: """ Test if progress is not added to an existing metadata dict if no download exists. """ metadata = {"type": REGULAR_TORRENT, "infohash": "AA"} endpoint = DatabaseEndpoint() endpoint.download_manager = Mock(get_download=Mock(return_value=None), metainfo_requests=[]) endpoint.add_download_progress_to_metadata_list([metadata]) self.assertNotIn("progress", metadata) def test_add_download_progress_to_metadata_list_metainfo_requests(self) -> None: """ Test if progress is not added to an existing metadata dict if it is in metainfo_requests. """ metadata = {"type": REGULAR_TORRENT, "infohash": "AA"} download = Mock(get_state=Mock(return_value=Mock(get_progress=Mock(return_value=1.0))), tdef=Mock(infohash="AA")) endpoint = DatabaseEndpoint() endpoint.download_manager = Mock(get_download=Mock(return_value=download), metainfo_requests=["AA"]) endpoint.add_download_progress_to_metadata_list([metadata]) self.assertNotIn("progress", metadata) async def test_get_popular_torrents(self) -> None: """ Test if we can bring everything together into a popular torrents request. Essentially, this combines ``add_download_progress_to_metadata_list`` and ``add_statements_to_metadata_list``. """ metadata = {"type": REGULAR_TORRENT, "infohash": "AA"} endpoint = DatabaseEndpoint() endpoint.tribler_db = Mock(knowledge=Mock(get_simple_statements=Mock(return_value=[ SimpleStatement(ResourceType.TORRENT, "AA", ResourceType.TAG, "tag") ]))) download = Mock(get_state=Mock(return_value=Mock(get_progress=Mock(return_value=1.0))), tdef=Mock(infohash="AA")) endpoint.download_manager = Mock(get_download=Mock(return_value=download), metainfo_requests=[]) endpoint.mds = Mock(get_entries=Mock(return_value=[Mock(to_simple_dict=Mock(return_value=metadata))])) response = await endpoint.get_popular_torrents(PopularTorrentsRequest(metadata, endpoint.mds)) response_body_json = await response_to_json(response) response_results = response_body_json["results"][0] self.assertEqual(200, response.status) self.assertEqual(1, response_body_json["first"]) self.assertEqual(50, response_body_json["last"]) self.assertEqual(300, response_results["type"]) self.assertEqual("AA", response_results["infohash"]) self.assertEqual(1.0, response_results["progress"]) self.assertEqual(ResourceType.TORRENT.value, response_results["statements"][0]["subject_type"]) self.assertEqual("AA", response_results["statements"][0]["subject"]) self.assertEqual(ResourceType.TAG.value, response_results["statements"][0]["predicate"]) self.assertEqual("tag", response_results["statements"][0]["object"]) async def test_local_search_bad_query(self) -> None: """ Test if a bad value leads to a bad request status. """ endpoint = DatabaseEndpoint() response = await endpoint.local_search(SearchLocalRequest({"first": "bla"}, endpoint.mds)) self.assertEqual(HTTP_BAD_REQUEST, response.status) async def test_local_search_errored_search(self) -> None: """ Test if a search that threw an Exception leads to a bad request status. The exception here stems from the ``mds`` being set to ``None``. """ endpoint = DatabaseEndpoint() endpoint.tribler_db = Mock() response = await endpoint.local_search(SearchLocalRequest({}, endpoint.mds)) self.assertEqual(HTTP_BAD_REQUEST, response.status) async def test_local_search_no_knowledge(self) -> None: """ Test if performing a local search without a tribler db set returns mds results. """ endpoint = DatabaseEndpoint() endpoint.tribler_db = Mock() endpoint.mds = Mock(run_threaded=self.mds_run_now, get_total_count=Mock(), get_max_rowid=Mock(), get_entries=Mock(return_value=[Mock(to_simple_dict=Mock(return_value={"test": "test", "type": -1}))])) response = await endpoint.local_search(SearchLocalRequest({}, endpoint.mds)) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertEqual("test", response_body_json["results"][0]["test"]) self.assertEqual(1, response_body_json["first"]) self.assertEqual(50, response_body_json["last"]) self.assertEqual(None, response_body_json["sort_by"]) self.assertEqual(True, response_body_json["sort_desc"]) async def test_local_search_no_knowledge_include_total(self) -> None: """ Test if performing a local search with requested total, includes a total. """ endpoint = DatabaseEndpoint() endpoint.tribler_db = Mock() endpoint.mds = Mock(run_threaded=self.mds_run_now, get_total_count=Mock(return_value=1), get_max_rowid=Mock(return_value=7), get_entries=Mock(return_value=[Mock(to_simple_dict=Mock(return_value={"test": "test", "type": -1}))])) response = await endpoint.local_search(SearchLocalRequest({"include_total": "I would like this"}, endpoint.mds)) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertEqual("test", response_body_json["results"][0]["test"]) self.assertEqual(1, response_body_json["first"]) self.assertEqual(50, response_body_json["last"]) self.assertEqual(None, response_body_json["sort_by"]) self.assertEqual(True, response_body_json["sort_desc"]) self.assertEqual(1, response_body_json["total"]) self.assertEqual(7, response_body_json["max_rowid"]) async def test_completions_bad_query(self) -> None: """ Test if a missing query leads to a bad request status. """ endpoint = DatabaseEndpoint() response = await endpoint.completions(SearchCompletionsRequest({}, endpoint.mds)) self.assertEqual(HTTP_BAD_REQUEST, response.status) async def test_completions_lowercase_search(self) -> None: """ Test if a normal lowercase search leads to results. """ endpoint = DatabaseEndpoint() endpoint.mds = Mock(get_auto_complete_terms=Mock(return_value=["test1", "test2"])) response = await endpoint.completions(SearchCompletionsRequest({"q": "test"}, endpoint.mds)) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertEqual(["test1", "test2"], response_body_json["completions"]) self.assertEqual(call("test", max_terms=5), endpoint.mds.get_auto_complete_terms.call_args) async def test_completions_mixed_case_search(self) -> None: """ Test if a mixed case search leads to results. """ endpoint = DatabaseEndpoint() endpoint.mds = Mock(get_auto_complete_terms=Mock(return_value=["test1", "test2"])) response = await endpoint.completions(SearchCompletionsRequest({"q": "TeSt"}, endpoint.mds)) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertEqual(["test1", "test2"], response_body_json["completions"]) self.assertEqual(call("test", max_terms=5), endpoint.mds.get_auto_complete_terms.call_args)
15,222
Python
.py
282
44.397163
120
0.647079
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,940
test_health.py
Tribler_tribler/src/tribler/test_unit/core/database/layers/test_health.py
from __future__ import annotations from types import SimpleNamespace from typing import TYPE_CHECKING from ipv8.test.base import TestBase from tribler.core.database.layers.health import HealthDataAccessLayer, ResourceType from tribler.core.torrent_checker.dataclasses import HealthInfo, Source if TYPE_CHECKING: from typing_extensions import Self class MockResource: """ A mocked Resource that stored is call kwargs. """ def __init__(self, **kwargs) -> None: """ Create a MockResouce and store its kwargs. """ self.get_kwargs = kwargs @classmethod def get(cls: type[Self], **kwargs) -> type[Self]: """ Fake a search using the given kwargs and return an instance of ourselves. """ return cls(**kwargs) @classmethod def get_for_update(cls: type[Self], /, **kwargs) -> type[Self] | None: """ Mimic fetching the resource from the database. """ del kwargs return None class MockEntity(MockResource, SimpleNamespace): """ Allow a db binding to write whatever they want to this class. """ CREATED = [] def __init__(self, **kwargs) -> None: """ Create a new MockEntity and add it to the CREATED list. """ super().__init__(**kwargs) self.CREATED.append(self) class MockDatabase: """ Mock the bindings that others will inherit from. """ Entity = MockEntity Resource = MockResource class MockKnowledgeDataAccessLayer: """ A mocked KnowledgeDataAccessLayer. """ def __init__(self) -> None: """ Create a new mocked KnowledgeDataAccessLayer. """ self.instance = MockDatabase() self.Resource = self.instance.Resource self.instance.Entity.CREATED = [] class TestHealthDataAccessLayer(TestBase): """ Tests for the HealthDataAccessLayer. """ def test_get_torrent_health(self) -> None: """ Test if torrents with the correct infohash are retrieved for their health info. """ hdal = HealthDataAccessLayer(MockKnowledgeDataAccessLayer()) health = hdal.get_torrent_health("01" * 20) self.assertEqual("01" * 20, health.get_kwargs["torrent"].get_kwargs["name"]) self.assertEqual(ResourceType.TORRENT, health.get_kwargs["torrent"].get_kwargs["type"]) def test_add_torrent_health(self) -> None: """ Test if adding torrent health leads to the correct database calls. """ hdal = HealthDataAccessLayer(MockKnowledgeDataAccessLayer()) hdal.TorrentHealth = MockEntity hdal.add_torrent_health(HealthInfo(b"\x01" * 20, 7, 42, 1337)) added, = hdal.TorrentHealth.CREATED self.assertEqual(b"01" * 20, added.get_kwargs["torrent"].get_kwargs["name"]) self.assertEqual(ResourceType.TORRENT, added.get_kwargs["torrent"].get_kwargs["type"]) self.assertEqual(7, added.seeders) self.assertEqual(42, added.leechers) self.assertEqual(Source.UNKNOWN, added.source)
3,102
Python
.py
83
30.542169
95
0.660649
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,941
test_knowledge.py
Tribler_tribler/src/tribler/test_unit/core/database/layers/test_knowledge.py
from __future__ import annotations from types import SimpleNamespace from typing import TYPE_CHECKING from ipv8.test.base import TestBase from tribler.core.database.layers.health import ResourceType from tribler.core.database.layers.knowledge import KnowledgeDataAccessLayer, Operation from tribler.core.knowledge.payload import StatementOperation if TYPE_CHECKING: from typing_extensions import Self class MockResource: """ A mocked Resource that stored is call kwargs. """ def __init__(self, **kwargs) -> None: """ Create a MockResouce and store its kwargs. """ self.get_kwargs = kwargs @classmethod def get(cls: type[Self], **kwargs) -> type[Self]: """ Fake a search using the given kwargs and return an instance of ourselves. """ return cls(**kwargs) @classmethod def get_for_update(cls: type[Self], /, **kwargs) -> type[Self] | None: """ Mimic fetching the resource from the database. """ return cls(**kwargs) class MockEntity(MockResource, SimpleNamespace): """ Allow a db binding to write whatever they want to this class. """ CREATED = [] def __init__(self, **kwargs) -> None: """ Create a new MockEntity and add it to the CREATED list. """ super().__init__(**kwargs) self.CREATED.append(self) class MockStatement(MockEntity): """ A mocked Statement. """ def update_counter(self, operation: Operation, increment: int = 1, is_local_peer: bool = False) -> None: """ Fake a counter update and store the calling args. """ self.update_counter_arg_op = operation self.update_counter_arg_increment = increment self.update_counter_arg_local_peer = is_local_peer class MockStatementOp(MockEntity): """ A mocked StatementOp. """ clock = 0 operation = Operation.ADD def set(self, **kwargs) -> None: """ Fake a set and store the calling args. """ self.set_kwargs = kwargs def __hash__(self) -> int: """ We need a hash. """ return 0 @classmethod def select_random(cls: type[Self], count: int) -> list[Self]: """ Fake random selection. """ out = [] for i in range(count): statement_op = MockStatementOp() statement_op.id = i statement_op.statement = None statement_op.peer = None statement_op.operation = Operation.ADD.value statement_op.clock = 0 statement_op.signature = b"" statement_op.updated_at = 0 statement_op.auto_generated = False out.append(statement_op) return out class MockStatementOpMissing(MockStatement): """ A mocked StatementOp that does not exist. """ @classmethod def get_for_update(cls: type[Self], /, **kwargs) -> type[Self] | None: """ It did not exist before. """ del kwargs return None class MockDatabase: """ Mock the bindings that others will inherit from. """ Entity = MockEntity Resource = MockResource class TestKnowledgeDataAccessLayer(TestBase): """ Tests for the KnowledgeDataAccessLayer. """ def setUp(self) -> None: """ Mock all bindings. """ super().setUp() self.kdal = KnowledgeDataAccessLayer(MockDatabase()) self.kdal.Statement = MockStatement self.kdal.Statement.CREATED = [] self.kdal.StatementOp = MockStatementOpMissing self.kdal.StatementOp.CREATED = [] def get_created(self, entity_type: type[MockEntity]) -> list[MockEntity]: """ Get all instances of a particular mock entity type that have been created. """ return [entity for entity in entity_type.CREATED if isinstance(entity, entity_type)] def add_searchable_statement(self, subj: str = "\x01" * 20, obj: str = "test tag") -> None: """ Inject a Statement that can be searched for. """ self.kdal.add_auto_generated_operation(ResourceType.TORRENT, subj, ResourceType.TAG, obj) statement, = self.get_created(self.kdal.Statement) statement.subject_statements = self.kdal.Statement statement.object = SimpleNamespace() statement.object.name = statement.get_kwargs["object"].get_kwargs["name"] statement.object.type = statement.get_kwargs["object"].get_kwargs["type"] statement.subject = SimpleNamespace() statement.subject.name = statement.get_kwargs["subject"].get_kwargs["name"] statement.subject.type = statement.get_kwargs["subject"].get_kwargs["type"] self.kdal.get_statements = lambda **kwargs: [statement] def test_add_operation_update(self) -> None: """ Test if operations are correctly updated in the ORM. """ statement_operation = StatementOperation(subject_type=ResourceType.TORRENT, subject=b"\x01" * 20, predicate=ResourceType.TAG, object='test tag', operation=Operation.ADD, clock=1, creator_public_key=b"\x02" * 64) self.kdal.StatementOp = MockStatementOp self.kdal.StatementOp.CREATED = [] value = self.kdal.add_operation(statement_operation, b"\x00" * 32, True) statement, = self.get_created(self.kdal.Statement) statement_op, = self.get_created(self.kdal.StatementOp) self.assertTrue(value) self.assertEqual(b"\x01" * 20, statement.get_kwargs["subject"].get_kwargs["name"]) self.assertEqual(ResourceType.TORRENT, statement.get_kwargs["subject"].get_kwargs["type"]) self.assertEqual("test tag", statement.get_kwargs["object"].get_kwargs["name"]) self.assertEqual(ResourceType.TAG, statement.get_kwargs["object"].get_kwargs["type"]) self.assertEqual(Operation.ADD, statement.update_counter_arg_op) self.assertEqual(1, statement.update_counter_arg_increment) self.assertTrue(statement.update_counter_arg_local_peer) self.assertEqual(Operation.ADD, statement_op.set_kwargs["operation"]) self.assertEqual(1, statement_op.set_kwargs["clock"]) self.assertEqual(b"\x00" * 32, statement_op.set_kwargs["signature"]) self.assertFalse(statement_op.set_kwargs["auto_generated"]) def test_add_operation_past(self) -> None: """ Test if operations are not added to the ORM if their clock is in the past. """ statement_operation = StatementOperation(subject_type=ResourceType.TORRENT, subject=b"\x01" * 20, predicate=ResourceType.TAG, object='test tag', operation=Operation.ADD, clock=-1, creator_public_key=b"\x02" * 64) self.kdal.StatementOp = MockStatementOp self.kdal.StatementOp.CREATED = [] value = self.kdal.add_operation(statement_operation, b"\x00" * 32, True) self.assertFalse(value) def test_add_operation_missing(self) -> None: """ Test if operations are correctly added to the ORM is the statement op did not exist before. """ statement_operation = StatementOperation(subject_type=ResourceType.TORRENT, subject=b"\x01" * 20, predicate=ResourceType.TAG, object='test tag', operation=Operation.ADD, clock=1, creator_public_key=b"\x02" * 64) value = self.kdal.add_operation(statement_operation, b"\x00" * 32, True) statement, = self.get_created(self.kdal.Statement) self.assertTrue(value) self.assertEqual(b"\x01" * 20, statement.get_kwargs["subject"].get_kwargs["name"]) self.assertEqual(ResourceType.TORRENT, statement.get_kwargs["subject"].get_kwargs["type"]) self.assertEqual("test tag", statement.get_kwargs["object"].get_kwargs["name"]) self.assertEqual(ResourceType.TAG, statement.get_kwargs["object"].get_kwargs["type"]) self.assertEqual(Operation.ADD, statement.update_counter_arg_op) self.assertEqual(1, statement.update_counter_arg_increment) self.assertTrue(statement.update_counter_arg_local_peer) def test_add_auto_generated_operation(self) -> None: """ Test if auto generated operations are correctly added to the ORM. """ value = self.kdal.add_auto_generated_operation(ResourceType.TORRENT, "\x01" * 20, ResourceType.TAG, 'test tag') statement, = self.get_created(self.kdal.Statement) self.assertTrue(value) self.assertEqual("\x01" * 20, statement.get_kwargs["subject"].get_kwargs["name"]) self.assertEqual(ResourceType.TORRENT, statement.get_kwargs["subject"].get_kwargs["type"]) self.assertEqual("test tag", statement.get_kwargs["object"].get_kwargs["name"]) self.assertEqual(ResourceType.TAG, statement.get_kwargs["object"].get_kwargs["type"]) self.assertEqual(Operation.ADD, statement.update_counter_arg_op) self.assertEqual(1, statement.update_counter_arg_increment) self.assertFalse(statement.update_counter_arg_local_peer) def test_get_objects(self) -> None: """ Test if objects are correctly retrieved from the ORM. """ self.add_searchable_statement(obj="test tag") value = self.kdal.get_objects() self.assertEqual(["test tag"], value) def test_get_subjects(self) -> None: """ Test if subjects are correctly retrieved from the ORM. """ self.add_searchable_statement(subj="\x01" * 20) value = self.kdal.get_subjects() self.assertEqual(["\x01" * 20], value) def test_get_suggestions(self) -> None: """ Test if suggestions are correctly retrieved from the ORM. """ self.add_searchable_statement(subj="\x01" * 20, obj="test tag") value = self.kdal.get_suggestions(subject="\x01" * 20) self.assertEqual(["test tag"], value) def test_get_operations_for_gossip(self) -> None: """ Test if. """ self.kdal.StatementOp = MockStatementOp selected, = self.kdal.get_operations_for_gossip(1) self.assertFalse(selected.auto_generated) self.assertEqual(0, selected.clock) self.assertEqual(0, selected.id) self.assertEqual(Operation.ADD.value, selected.operation)
10,653
Python
.py
233
36.643777
119
0.63875
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,942
test_layer.py
Tribler_tribler/src/tribler/test_unit/core/database/layers/test_layer.py
from __future__ import annotations from typing import TYPE_CHECKING from ipv8.test.base import TestBase from tribler.core.database.layers.layer import Layer if TYPE_CHECKING: from typing_extensions import Self class MockEntity: """ A mocked-up database entity. """ def __init__(self, /, **kwargs) -> None: """ Create a new MockEntity and store the kwargs. """ self.init_kwargs = kwargs @classmethod def get_for_update(cls: type[Self], /, **kwargs) -> type[Self] | None: """ Mimic fetching the entity from the database. """ return cls(**kwargs) class MockUnknownEntity(MockEntity): """ A mocked-up database entity that cannot be retrieved. """ @classmethod def get_for_update(cls: type[Self], /, **kwargs) -> type[Self] | None: """ Mimic fetching the entity from the database. """ del kwargs return None class TestLayer(TestBase): """ Tests for the Layer base class. """ def test_retrieve_existing(self) -> None: """ Test retrieving a known entity. """ layer = Layer() value = layer.get_or_create(MockEntity) self.assertIsNotNone(value) self.assertEqual(value.init_kwargs, {}) def test_create_no_kwargs_no_create(self) -> None: """ Test creating a new entity without kwargs or create kwargs. """ layer = Layer() value = layer.get_or_create(MockUnknownEntity) self.assertIsNotNone(value) self.assertEqual(value.init_kwargs, {}) def test_create_no_kwargs_with_create(self) -> None: """ Test creating a new entity without kwargs but with create kwargs. """ layer = Layer() value = layer.get_or_create(MockUnknownEntity, create_kwargs={"a": 1}) self.assertIsNotNone(value) self.assertEqual(value.init_kwargs, {"a": 1}) def test_create_with_kwargs_no_create(self) -> None: """ Test creating a new entity with kwargs but without create kwargs. """ layer = Layer() value = layer.get_or_create(MockUnknownEntity, a=1) self.assertIsNotNone(value) self.assertEqual(value.init_kwargs, {"a": 1}) def test_create_with_kwargs_with_create(self) -> None: """ Test creating a new entity with both kwargs and create kwargs. """ layer = Layer() value = layer.get_or_create(MockUnknownEntity, create_kwargs={"a": 1}, b=2) self.assertIsNotNone(value) self.assertEqual(value.init_kwargs, {"a": 1, "b": 2})
2,675
Python
.py
76
27.723684
83
0.620381
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,943
test_torrent_metadata.py
Tribler_tribler/src/tribler/test_unit/core/database/orm_bindings/test_torrent_metadata.py
from __future__ import annotations from ipv8.test.base import TestBase from tribler.core.database.orm_bindings.torrent_metadata import entries_to_chunk, infohash_to_id, tdef_to_metadata_dict from tribler.core.libtorrent.torrentdef import TorrentDefNoMetainfo class MockTorrentMetadata: """ Mocked torrent metadata. """ def __init__(self, start: int, stop: int) -> None: """ Create mocked data and base its serialized form on the given range. """ self.start = start self.stop = stop def serialized(self) -> bytes: """ Serialize this mock to bytes. """ return bytes(list(range(self.start, self.stop))) def serialized_health(self) -> bytes: """ Serialize fake health to bytes. """ return b"\x07" class TestTorrentMetadata(TestBase): """ Tests for the TorrentMetadata helpers. """ def test_infohash_to_id(self) -> None: """ Test if ids are calculated by the first 8 bytes of an infohash. """ infohash1 = b"\x01" * 20 infohash2 = b"\x01" * 8 + b"\x00" * 12 infohash3 = b"\x01" * 7 + b"\x00" * 13 self.assertEqual(72340172838076673, infohash_to_id(infohash1)) self.assertEqual(72340172838076673, infohash_to_id(infohash2)) self.assertEqual(72340172838076672, infohash_to_id(infohash3)) def test_tdef_to_metadata_dict(self) -> None: """ Test if TorrentDef instances are correctly represented by dictionaries. """ tdef = TorrentDefNoMetainfo(b"\x01" * 20, b"torrent name", b"http://test.url/") value = tdef_to_metadata_dict(tdef) self.assertEqual(b"\x01" * 20, value["infohash"]) self.assertEqual("torrent name", value["title"]) self.assertEqual("Unknown", value["tags"]) self.assertEqual(0, value["size"]) self.assertEqual("", value["tracker_info"]) def test_entries_to_chunk_last_index_no_fit(self) -> None: """ Test if the last index of entries_to_chunk is correctly given if the data does not fit. """ _, last_index = entries_to_chunk([MockTorrentMetadata(0, 99), MockTorrentMetadata(100, 199)], 1) self.assertEqual(1, last_index) def test_entries_to_chunk_last_index_fit(self) -> None: """ Test if the last index of entries_to_chunk is correctly given if the data does fit. """ _, last_index = entries_to_chunk([MockTorrentMetadata(0, 99), MockTorrentMetadata(100, 199)], 400) self.assertEqual(2, last_index) def test_entries_to_chunk_health(self) -> None: """ Test if entries_to_chunk correctly gives the health data. """ chunks, last_index = entries_to_chunk([MockTorrentMetadata(0, 99), MockTorrentMetadata(100, 199)], 1, 0, True) *_, health = chunks self.assertEqual(1, last_index) self.assertEqual(7, health)
2,979
Python
.py
69
35.391304
119
0.638408
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,944
test_trackers.py
Tribler_tribler/src/tribler/test_unit/core/libtorrent/test_trackers.py
from ipv8.test.base import TestBase from tribler.core.libtorrent.trackers import ( MalformedTrackerURLException, add_url_params, get_uniformed_tracker_url, is_valid_url, parse_tracker_url, ) class TestTrackers(TestBase): """ Tests for the tracker-related functionality. """ def test_get_uniformed_tracker_url_proper_udp(self) -> None: """ Test if a proper UDP URL is not transformed. """ url = "udp://tracker.example.com:80" self.assertEqual("udp://tracker.example.com:80", get_uniformed_tracker_url(url)) def test_get_uniformed_tracker_url_udp_no_port(self) -> None: """ Test if a UDP URL without a port leads to a None value. """ url = "udp://tracker.example.com" self.assertIsNone(get_uniformed_tracker_url(url)) def test_get_uniformed_tracker_url_remove_path(self) -> None: """ Test if a UDP URL with a path has its path removed. """ url = "udp://tracker.example.com:80/announce" self.assertEqual("udp://tracker.example.com:80", get_uniformed_tracker_url(url)) def test_get_uniformed_tracker_url_trailing_zeros(self) -> None: """ Test if a UDP URL with trailing zeros has the zeros removed. """ url = "udp://tracker.example.com:80\x00" self.assertEqual("udp://tracker.example.com:80", get_uniformed_tracker_url(url)) def test_get_uniformed_tracker_url_trailing_other(self) -> None: """ Test if a UDP URL with trailing garbage leads to a None value. """ url = "udp://tracker.example.com:80\xFF" self.assertIsNone(get_uniformed_tracker_url(url)) def test_get_uniformed_tracker_url_proper_http(self) -> None: """ Test if a proper HTTP URL is not transformed. """ url = "http://tracker.example.com:6969/announce" self.assertEqual("http://tracker.example.com:6969/announce", get_uniformed_tracker_url(url)) def test_get_uniformed_tracker_url_http_trailing_fwdslash(self) -> None: """ Test if a HTTP URL with a trailing forward slash has this postfix removed. """ url = "http://tracker.example.com:6969/announce/" self.assertEqual("http://tracker.example.com:6969/announce", get_uniformed_tracker_url(url)) def test_get_uniformed_tracker_url_http_no_path(self) -> None: """ Test if a HTTP URL without a path leads to a None value. """ url = "http://tracker.example.com" self.assertIsNone(get_uniformed_tracker_url(url)) def test_get_uniformed_tracker_url_http_nbsp(self) -> None: """ Test if a HTTP URL with a trailing non-breaking space. """ url = "http://tracker.example.com\xa0" self.assertIsNone(get_uniformed_tracker_url(url)) def test_get_uniformed_tracker_url_http_no_port(self) -> None: """ Test if a HTTP URL without a port is not transformed. """ url = "http://tracker.example.com/announce/" self.assertEqual("http://tracker.example.com/announce", get_uniformed_tracker_url(url)) def test_get_uniformed_tracker_url_http_standard_port(self) -> None: """ Test if a HTTP URL with the standard port has its port removed. """ url = "http://tracker.example.com:80/announce/" self.assertEqual("http://tracker.example.com/announce", get_uniformed_tracker_url(url)) def test_get_uniformed_tracker_url_bad_url(self) -> None: """ Test if a HTTP URL bad URL encoding leads to a None value. """ url = "http://tracker.example.com/?do=upload" self.assertIsNone(get_uniformed_tracker_url(url)) def test_get_uniformed_tracker_url_truncated(self) -> None: """ Test if a truncated HTTP URL leads to a None value. """ url = "http://tracker.example.com/anno..." self.assertIsNone(get_uniformed_tracker_url(url)) def test_get_uniformed_tracker_url_https_no_port(self) -> None: """ Test if a HTTPS URL without a port is not transformed. """ url = "https://tracker.example.com/announce/" self.assertEqual("https://tracker.example.com/announce", get_uniformed_tracker_url(url)) def test_get_uniformed_tracker_url_https_standard_port(self) -> None: """ Test if a HTTPS URL with the standard port has its port removed. """ url = "https://tracker.example.com:443/announce/" self.assertEqual("https://tracker.example.com/announce", get_uniformed_tracker_url(url)) def test_get_uniformed_tracker_url_unknown_scheme(self) -> None: """ Test if a URL with an unknown scheme leads to a None value. """ url = "unknown://tracker.example.com/announce" self.assertIsNone(get_uniformed_tracker_url(url)) def test_get_uniformed_tracker_url_corrupt_url(self) -> None: """ Test if a URL with a corrupt URL leads to a None value. """ url = "http://tracker.examp\xffle.com/announce" self.assertIsNone(get_uniformed_tracker_url(url)) def test_get_uniformed_tracker_url_empty(self) -> None: """ Test if an empty URL leads to a None value. """ self.assertIsNone(get_uniformed_tracker_url("")) def test_parse_tracker_url_udp_with_port(self) -> None: """ Test if tracker UDP URLs with a port are parsed correctly. """ url = "udp://tracker.example.com:80" self.assertEqual(("udp", ("tracker.example.com", 80), ""), parse_tracker_url(url)) def test_parse_tracker_url_http_with_port(self) -> None: """ Test if tracker HTTP URLs with a port are parsed correctly. """ url = "http://tracker.example.com:6969/announce" self.assertEqual(("http", ("tracker.example.com", 6969), "/announce"), parse_tracker_url(url)) def test_parse_tracker_url_https_with_port(self) -> None: """ Test if tracker HTTPS URLs with a port are parsed correctly. """ url = "https://tracker.example.com:6969/announce" self.assertEqual(("https", ("tracker.example.com", 6969), "/announce"), parse_tracker_url(url)) def test_parse_tracker_url_http_without_port(self) -> None: """ Test if tracker HTTP URLs without a port are parsed correctly. """ url = "http://tracker.example.com/announce" self.assertEqual(("http", ("tracker.example.com", 80), "/announce"), parse_tracker_url(url)) def test_parse_tracker_url_https_without_port(self) -> None: """ Test if tracker HTTPS URLs without a port are parsed correctly. """ url = "https://tracker.example.com/announce" self.assertEqual(("https", ("tracker.example.com", 443), "/announce"), parse_tracker_url(url)) def test_parse_tracker_url_http_non_standard_port(self) -> None: """ Test if tracker HTTP URLs with a non-standard port are parsed correctly. """ url = "http://ipv6.tracker.example.com:6969/announce" self.assertEqual(("http", ("ipv6.tracker.example.com", 6969), "/announce"), parse_tracker_url(url)) def test_parse_tracker_url_https_non_standard_port(self) -> None: """ Test if tracker HTTPS URLs with a non-standard port are parsed correctly. """ url = "https://ipv6.tracker.example.com:6969/announce" self.assertEqual(("https", ("ipv6.tracker.example.com", 6969), "/announce"), parse_tracker_url(url)) def test_parse_tracker_url_unknown_scheme(self) -> None: """ Test if tracker URLs with an unknown scheme raise a MalformedTrackerURLException. """ url = "unknown://ipv6.tracker.example.com:6969/announce" with self.assertRaises(MalformedTrackerURLException): parse_tracker_url(url) def test_parse_tracker_url_http_bad_url(self) -> None: """ Test if tracker HTTP URLs with a bad URL raise a MalformedTrackerURLException. """ url = "http://tracker.example.com:6969/announce( %(" with self.assertRaises(MalformedTrackerURLException): parse_tracker_url(url) def test_parse_tracker_url_https_bad_url(self) -> None: """ Test if tracker HTTP URLs with a bad URL raise a MalformedTrackerURLException. """ url = "https://tracker.example.com:6969/announce( %(" with self.assertRaises(MalformedTrackerURLException): parse_tracker_url(url) def test_parse_tracker_url_unknown_scheme_no_path(self) -> None: """ Test if tracker URLs with an unknown scheme and no path raise a MalformedTrackerURLException. """ url = "unknown://tracker.example.com:80" with self.assertRaises(MalformedTrackerURLException): parse_tracker_url(url) def test_parse_tracker_url_http_no_path(self) -> None: """ Test if tracker HTTP URLs without a path raise a MalformedTrackerURLException. """ url = "http://ipv6.tracker.example.com:6969" with self.assertRaises(MalformedTrackerURLException): parse_tracker_url(url) def test_parse_tracker_url_https_no_path(self) -> None: """ Test if tracker HTTPS URLs without a path raise a MalformedTrackerURLException. """ url = "https://ipv6.tracker.example.com:6969" with self.assertRaises(MalformedTrackerURLException): parse_tracker_url(url) def test_parse_tracker_url_udp_no_port(self) -> None: """ Test if tracker UDP URLs without a port raise a MalformedTrackerURLException. """ url = "udp://tracker.example.com" with self.assertRaises(MalformedTrackerURLException): parse_tracker_url(url) def test_add_url_param_some_present(self) -> None: """ Test if appending parameters to a URL works. """ url = "http://stackoverflow.com/test?answers=true" new_params = {"answers": False, "data": ["some", "values"]} result = add_url_params(url, new_params) self.assertIn("data=values", result) self.assertIn("answers=false", result) def test_add_url_param_clean(self) -> None: """ Test if adding parameters to a URL works. """ url = "http://stackoverflow.com/test" new_params = {"data": ["some", "values"]} result = add_url_params(url, new_params) self.assertIn("data=some", result) self.assertIn("data=values", result) def test_valid_url_invalid_with_space(self) -> None: """ Test if a URL with a space is invalid. """ url = "http://anno nce.torrentsmd.com:8080/announce" self.assertFalse(is_valid_url(url)) def test_valid_url_wild(self) -> None: """ Test if a normal dotcom URL is valid. """ url = "http://announce.torrentsmd.com:8080/announce " self.assertTrue(is_valid_url(url)) def test_valid_url_localhost(self) -> None: """ Test if a localhost URL is valid. """ url = "http://localhost:1920/announce" self.assertTrue(is_valid_url(url)) def test_valid_url(self) -> None: """ Test if a UDP URL is valid. """ url = "udp://localhost:1264" self.assertTrue(is_valid_url(url))
11,563
Python
.py
252
37.313492
108
0.633871
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,945
test_torrentdef.py
Tribler_tribler/src/tribler/test_unit/core/libtorrent/test_torrentdef.py
from pathlib import Path from unittest.mock import AsyncMock, Mock, patch import libtorrent from aiohttp import ClientResponseError from ipv8.test.base import TestBase from tribler.core.libtorrent.torrentdef import TorrentDef, TorrentDefNoMetainfo from tribler.test_unit.core.libtorrent.mocks import TORRENT_WITH_DIRS, TORRENT_WITH_DIRS_CONTENT class TestTorrentDef(TestBase): """ Tests for the TorrentDef class. """ def test_tdef_init_parameters(self) -> None: """ Test if a TorrentDef object can be initialized with parameters. """ tdef_params = TorrentDef(torrent_parameters={b"announce": "http://test.com"}) self.assertIn(b"announce", tdef_params.torrent_parameters) def test_create_invalid_tdef_empty_metainfo(self) -> None: """ Test if creating a TorrentDef object without metainfo results in a ValueError. """ with self.assertRaises(ValueError): TorrentDef(metainfo={}) def test_create_invalid_tdef_empty_metainfo_validate(self) -> None: """ Test if creating a TorrentDef object without metainfo and with validation results in a ValueError. """ with self.assertRaises(ValueError): TorrentDef(metainfo={}, ignore_validation=False) def test_create_invalid_tdef_empty_info(self) -> None: """ Test if creating a TorrentDef object with metainfo but an empty info dictionary results in a ValueError. """ with self.assertRaises(ValueError): TorrentDef(metainfo={b"info": {}}) def test_create_invalid_tdef_empty_info_validate(self) -> None: """ Test if a TorrentDef object with metainfo but an empty info dict with validation results in a ValueError. """ with self.assertRaises(ValueError): TorrentDef(metainfo={b"info": {}}, ignore_validation=False) def test_get_name_utf8_unknown(self) -> None: """ Test if we can succesfully get the UTF-8 name. """ tdef = TorrentDef() tdef.set_name(b"\xA1\xC0") tdef.torrent_parameters[b"encoding"] = b"euc_kr" self.assertEqual("\xf7", tdef.get_name_utf8()) def test_get_name_utf8(self) -> None: """ Check if we can successfully get the UTF-8 encoded torrent name when using a different encoding. """ tdef = TorrentDef() tdef.set_name(b"\xA1\xC0") self.assertEqual("\xa1\xc0", tdef.get_name_utf8()) def test_is_private_non_private(self) -> None: """ Test if a torrent marked with private = 0 is not seen as private. """ tdef = TorrentDef() tdef.metainfo = {b"info": {b"private": 0}} self.assertFalse(tdef.is_private()) def test_is_private_private(self) -> None: """ Test if a torrent marked with private = 1 is seen as private. """ tdef = TorrentDef() tdef.metainfo = {b"info": {b"private": 1}} self.assertTrue(tdef.is_private()) def test_is_private_i1e(self) -> None: """ Test if an invalid but common private field setting of i1e (instead of 1) is not valid as private. """ tdef = TorrentDef() tdef.metainfo = {b"info": {b"private": b"i1e"}} self.assertFalse(tdef.is_private()) def test_is_private_i0e(self) -> None: """ Test if an invalid but common private field setting of i0e (instead of 0) is not valid as private. """ tdef = TorrentDef() tdef.metainfo = {b"info": {b"private": b"i0e"}} self.assertFalse(tdef.is_private()) def test_load_private_from_memory_nothing(self) -> None: """ Test if loading the private field set to nothing from an existing torrent works correctly. """ tdef = TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT) self.assertFalse(tdef.is_private()) def test_load_private_from_memory_public(self) -> None: """ Test if loading the private field set to 0 from an existing torrent works correctly. """ info_dict_index = TORRENT_WITH_DIRS_CONTENT.find(b"4:infod") + 7 new_content = (TORRENT_WITH_DIRS_CONTENT[:info_dict_index] + b"7:privatei0e" + TORRENT_WITH_DIRS_CONTENT[info_dict_index:]) tdef = TorrentDef.load_from_memory(new_content) self.assertFalse(tdef.is_private()) def test_load_private_from_memory_private(self) -> None: """ Test if loading the private field set to 1 from an existing torrent works correctly. """ info_dict_index = TORRENT_WITH_DIRS_CONTENT.find(b"4:infod") + 7 new_content = (TORRENT_WITH_DIRS_CONTENT[:info_dict_index] + b"7:privatei1e" + TORRENT_WITH_DIRS_CONTENT[info_dict_index:]) tdef = TorrentDef.load_from_memory(new_content) self.assertTrue(tdef.is_private()) async def test_load_from_url(self) -> None: """ Test if torrents can be loaded from a URL. """ response_mock = AsyncMock(read=AsyncMock(return_value=TORRENT_WITH_DIRS_CONTENT)) with patch("aiohttp.ClientSession", Mock(return_value=Mock( get=AsyncMock(return_value=response_mock))) ): tdef = await TorrentDef.load_from_url("http://127.0.0.1:1234/ubuntu.torrent") self.assertEqual(b"\xb3\xba\x19\xc93\xda\x95\x84k\xfd\xf7Z\xd0\x8a\x94\x9cl\xea\xc7\xbc", tdef.infohash) async def test_load_from_url_404(self) -> None: """ Test if 404 errors are not caught. """ with patch("aiohttp.ClientSession", Mock(return_value=Mock( get=AsyncMock(side_effect=ClientResponseError(None, None, status=404)))) ), self.assertRaises(ClientResponseError): await TorrentDef.load_from_url("http://127.0.0.1:1234/ubuntu.torrent") def test_torrent_default_encoding(self) -> None: """ Test if the default encoding is set to UTF-8. """ tdef = TorrentDef() self.assertEqual("utf-8", tdef.get_encoding()) def test_torrent_encoding(self) -> None: """ Test if the encoding can be set to any string. """ tdef = TorrentDef() tdef.set_encoding(b"my_fancy_encoding") self.assertEqual("my_fancy_encoding", tdef.get_encoding()) def test_set_tracker_invalid_url(self) -> None: """ Test if setting an invalid tracker raises a ValueError. """ tdef = TorrentDef() with self.assertRaises(ValueError): tdef.set_tracker("http/tracker.org") def test_set_tracker_strip_slash(self) -> None: """ Test if the final slash in a tracker URL is stripped. """ tdef = TorrentDef() tdef.set_tracker("http://tracker.org/") self.assertEqual("http://tracker.org", tdef.torrent_parameters[b"announce"]) def test_set_tracker(self) -> None: """ Test if a tracker can be set to a valid URL. """ tdef = TorrentDef() tdef.set_tracker("http://tracker.org") self.assertSetEqual({'http://tracker.org'}, tdef.get_trackers()) def test_get_trackers(self) -> None: """ Test if get_trackers returns flat set of trackers. """ tdef = TorrentDef() tdef.get_tracker_hierarchy = Mock(return_value=[["t1", "t2"], ["t3"], ["t4"]]) trackers = tdef.get_trackers() self.assertSetEqual({"t1", "t2", "t3", "t4"}, trackers) def test_get_default_nr_pieces(self) -> None: """ Test if the default number of pieces is zero. """ tdef = TorrentDef() self.assertEqual(0, tdef.get_nr_pieces()) def test_get_nr_pieces(self) -> None: """ Test if the number of pieces can be retrieved from a TorrentDef. """ tdef = TorrentDef() tdef.metainfo = {b"info": {b"pieces": b"a" * 40}} self.assertEqual(2, tdef.get_nr_pieces()) def test_is_multifile_empty(self) -> None: """ Test if an empty TorrentDef is not classified as a multifile torrent. """ tdef = TorrentDef() self.assertFalse(tdef.is_multifile_torrent()) def test_is_multifile(self) -> None: """ Test if a TorrentDef is correctly classified as multifile torrent. """ tdef = TorrentDef(metainfo={b"info": {b"files": [b"a"]}}) self.assertTrue(tdef.is_multifile_torrent()) def test_set_piece_length_invalid_type(self) -> None: """ Test if the piece length cannot be set to something other than an int. """ tdef = TorrentDef() with self.assertRaises(ValueError): tdef.set_piece_length(b"20") def test_get_piece_length(self) -> None: """ Test if the default piece length is zero. """ tdef = TorrentDef() self.assertEqual(0, tdef.get_piece_length()) def test_load_from_dict(self) -> None: """ Test if a TorrentDef can be loaded from a dictionary. """ metainfo = {b"info": libtorrent.bdecode(TORRENT_WITH_DIRS.metadata())} tdef = TorrentDef.load_from_dict(metainfo) self.assertEqual(b"\xb3\xba\x19\xc93\xda\x95\x84k\xfd\xf7Z\xd0\x8a\x94\x9cl\xea\xc7\xbc", tdef.infohash) def test_torrent_no_metainfo(self) -> None: """ Test if a TorrentDefNoMetainfo can be constructed from torrent file information. """ tdef = TorrentDefNoMetainfo(b"12345678901234567890", b"ubuntu.torrent", "http://google.com") self.assertEqual(b"ubuntu.torrent", tdef.get_name()) self.assertEqual(b"12345678901234567890", tdef.get_infohash()) self.assertEqual(0, tdef.get_length()) self.assertIsNone(tdef.get_metainfo()) self.assertEqual(b"http://google.com", tdef.get_url()) self.assertFalse(tdef.is_multifile_torrent()) self.assertEqual("ubuntu.torrent", tdef.get_name_as_unicode()) self.assertEqual([], tdef.get_files()) self.assertEqual([], tdef.get_files_with_length()) self.assertEqual(0, len(tdef.get_trackers())) self.assertFalse(tdef.is_private()) self.assertEqual("ubuntu.torrent", tdef.get_name_utf8()) self.assertEqual(0, tdef.get_nr_pieces()) self.assertIsNone(tdef.torrent_info) def test_torrent_no_metainfo_load_info(self) -> None: """ Test if a TorrentDefNoMetainfo does not load torrent info if there is none to load by definition. """ tdef = TorrentDefNoMetainfo(b"12345678901234567890", b"ubuntu.torrent", "http://google.com") tdef.load_torrent_info() self.assertIsNone(tdef.torrent_info) def test_magnet_no_metainfo(self) -> None: """ Test if a TorrentDefNoMetainfo can be constructed from magnet link information. """ torrent2 = TorrentDefNoMetainfo(b"12345678901234567890", b"ubuntu.torrent", "magnet:") self.assertEqual(0, len(torrent2.get_trackers())) def test_get_length(self) -> None: """ Test if a TorrentDef has 0 length by default. """ tdef = TorrentDef() self.assertEqual(0, tdef.get_length()) def test_get_index(self) -> None: """ Test if we can successfully get the index of a file in a torrent. """ tdef = TorrentDef(metainfo={b"info": {b"files": [{b"path": [b"a.txt"], b"length": 123}]}}) self.assertEqual(0, tdef.get_index_of_file_in_files("a.txt")) with self.assertRaises(ValueError): tdef.get_index_of_file_in_files("b.txt") with self.assertRaises(ValueError): tdef.get_index_of_file_in_files(None) def test_get_index_utf8(self) -> None: """ Test if we can successfully get the index of a path.utf-8 file in a torrent. """ tdef = TorrentDef({b"info": {b"files": [{b"path": [b"a.txt"], b"path.utf-8": [b"b.txt"], b"length": 123}]}}) self.assertEqual(0, tdef.get_index_of_file_in_files("b.txt")) def test_get_index_no_metainfo(self) -> None: """ Test if attempting to fetch a file from a TorrentDef without metainfo results in a ValueError. """ tdef = TorrentDef(metainfo=None) with self.assertRaises(ValueError): tdef.get_index_of_file_in_files("b.txt") def test_get_name_as_unicode_path_utf8(self) -> None: """ Test if names for files with a name.utf-8 path can be decoded. """ name_bytes = b"\xe8\xaf\xad\xe8\xa8\x80\xe5\xa4\x84\xe7\x90\x86" name_unicode = name_bytes.decode() tdef = TorrentDef(metainfo={b"info": {b"name.utf-8": name_bytes}}) self.assertEqual(name_unicode, tdef.get_name_as_unicode()) def test_get_name_as_unicode(self) -> None: """ Test if normal UTF-8 names can be decoded. """ name_bytes = b"\xe8\xaf\xad\xe8\xa8\x80\xe5\xa4\x84\xe7\x90\x86" name_unicode = name_bytes.decode() tdef = TorrentDef(metainfo={b"info": {b"name": name_bytes}}) self.assertEqual(name_unicode, tdef.get_name_as_unicode()) def test_get_name_as_unicode_replace_illegal(self) -> None: """ Test if illegal characters are replaced with question marks. Note: the FF byte ruins the unicode encoding for all following bytes. """ name_bytes = b"\xe8\xaf\xad\xe8\xa8\x80\xe5\xa4\x84\xe7\x90\x86" tdef = TorrentDef(metainfo={b"info": {b"name": b"test\xff" + name_bytes}}) self.assertEqual("test" + "?" * len(b"\xff" + name_bytes), tdef.get_name_as_unicode()) def test_get_files_with_length(self) -> None: """ Test if get_files_with_length returns the correct result for normal path names. """ name_bytes = b"\xe8\xaf\xad\xe8\xa8\x80\xe5\xa4\x84\xe7\x90\x86" name_unicode = name_bytes.decode() tdef = TorrentDef(metainfo={b"info": {b"files": [{b"path.utf-8": [name_bytes], b"length": 123}, {b"path.utf-8": [b"file.txt"], b"length": 456}]}}) self.assertEqual([(Path(name_unicode), 123), (Path('file.txt'), 456)], tdef.get_files_with_length()) def test_get_files_with_length_valid_path_utf8(self) -> None: """ Test if get_files_with_length returns the correct result for path.utf-8 names. """ name_bytes = b"\xe8\xaf\xad\xe8\xa8\x80\xe5\xa4\x84\xe7\x90\x86" name_unicode = name_bytes.decode() tdef = TorrentDef(metainfo={b"info": {b"files": [{b"path": [name_bytes], b"length": 123}, {b"path": [b"file.txt"], b"length": 456}]}}) self.assertEqual([(Path(name_unicode), 123), (Path('file.txt'), 456)], tdef.get_files_with_length()) def test_get_files_with_length_illegal_path(self) -> None: """ Test if get_files_with_length sanitizes files with invalid path names. """ name_bytes = b"\xe8\xaf\xad\xe8\xa8\x80\xe5\xa4\x84\xe7\x90\x86" tdef = TorrentDef(metainfo={b"info": {b"files": [{b"path": [b"test\xff" + name_bytes], b"length": 123}, {b"path": [b"file.txt"], b"length": 456}]}}) self.assertEqual([(Path("test?????????????"), 123), (Path("file.txt"), 456)], tdef.get_files_with_length()) def test_get_files_with_length_illegal_path_utf8(self) -> None: """ Test if get_files_with_length sanitizes files with invalid path.utf-8 names. """ name_bytes = b"\xe8\xaf\xad\xe8\xa8\x80\xe5\xa4\x84\xe7\x90\x86" tdef = TorrentDef(metainfo={b"info": {b"files": [{b"path.utf-8": [b"test\xff" + name_bytes], b"length": 123}, {b"path": [b"file.txt"], b"length": 456}]}}) self.assertEqual([(Path("file.txt"), 456)], tdef.get_files_with_length()) def test_load_torrent_info(self) -> None: """ Test if load_torrent_info() loads the torrent info. """ tdef = TorrentDef(metainfo={ b"info": { b"name": b"torrent name", b"files": [{b"path": [b"a.txt"], b"length": 123}], b"piece length": 128, b"pieces": b"\x00" * 20 } }) tdef.load_torrent_info() self.assertTrue(tdef.torrent_info_loaded()) self.assertIsNotNone(tdef.torrent_info) def test_lazy_load_torrent_info(self) -> None: """ Test if accessing torrent_info loads the torrent info. """ tdef = TorrentDef(metainfo={ b"info": { b"name": b"torrent name", b"files": [{b"path": [b"a.txt"], b"length": 123}], b"piece length": 128, b"pieces": b"\x00" * 20 } }) self.assertFalse(tdef.torrent_info_loaded()) self.assertIsNotNone(tdef.torrent_info) self.assertTrue(tdef.torrent_info_loaded()) def test_generate_tree(self) -> None: """ Test if a torrent tree can be generated from a TorrentDef. """ tdef = TorrentDef(metainfo={ b"info": { b"name": b"torrent name", b"files": [{b"path": [b"a.txt"], b"length": 123}], b"piece length": 128, b"pieces": b"\x00" * 20 } }) tree = tdef.torrent_file_tree self.assertEqual(123, tree.find(Path("torrent name") / "a.txt").size)
17,801
Python
.py
378
37.343915
117
0.602795
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,946
test_torrent_file_tree.py
Tribler_tribler/src/tribler/test_unit/core/libtorrent/test_torrent_file_tree.py
from pathlib import Path from ipv8.test.base import TestBase from tribler.core.libtorrent.torrent_file_tree import TorrentFileTree from tribler.test_unit.core.libtorrent.mocks import TORRENT_UBUNTU_FILE, TORRENT_WITH_DIRS class TestTorrentFileTree(TestBase): """ Tests for the TorrentFileTree class. """ def test_file_natsort_numbers(self) -> None: """ Test the natural sorting of File instances for numbers only. """ self.assertLess(TorrentFileTree.File("01", 0, 0), TorrentFileTree.File("10", 0, 0)) self.assertLessEqual(TorrentFileTree.File("010", 0, 0), TorrentFileTree.File("10", 0, 0)) self.assertEqual(TorrentFileTree.File("010", 0, 0), TorrentFileTree.File("10", 0, 0)) self.assertGreaterEqual(TorrentFileTree.File("010", 0, 0), TorrentFileTree.File("10", 0, 0)) self.assertGreater(TorrentFileTree.File("011", 0, 0), TorrentFileTree.File("10", 0, 0)) self.assertGreater(TorrentFileTree.File("11", 0, 0), TorrentFileTree.File("10", 0, 0)) self.assertNotEqual(TorrentFileTree.File("11", 0, 0), TorrentFileTree.File("10", 0, 0)) def test_file_natsort_compound(self) -> None: """ Test the natural sorting of File instances for names mixing numbers and text. """ self.assertNotEqual(TorrentFileTree.File("a1b", 0, 0), TorrentFileTree.File("a10b", 0, 0)) self.assertLess(TorrentFileTree.File("a1b", 0, 0), TorrentFileTree.File("a10b", 0, 0)) self.assertLess(TorrentFileTree.File("a10b", 0, 0), TorrentFileTree.File("b10b", 0, 0)) self.assertLessEqual(TorrentFileTree.File("a10b", 0, 0), TorrentFileTree.File("a010b", 0, 0)) self.assertEqual(TorrentFileTree.File("a10b", 0, 0), TorrentFileTree.File("a010b", 0, 0)) self.assertGreaterEqual(TorrentFileTree.File("a10b", 0, 0), TorrentFileTree.File("a010b", 0, 0)) self.assertGreater(TorrentFileTree.File("a010c", 0, 0), TorrentFileTree.File("a10b", 0, 0)) self.assertGreater(TorrentFileTree.File("a010c", 0, 0), TorrentFileTree.File("a0010b", 0, 0)) def test_create_from_flat_torrent(self) -> None: """ Test if we can correctly represent a torrent with a single file. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_UBUNTU_FILE.files()) self.assertEqual(0, len(tree.root.directories)) self.assertEqual(1, len(tree.root.files)) self.assertEqual(0, tree.root.files[0].index) self.assertEqual("ubuntu-15.04-desktop-amd64.iso", tree.root.files[0].name) self.assertEqual(1150844928, tree.root.files[0].size) self.assertEqual(1150844928, tree.root.size) def test_create_from_torrent_wdirs(self) -> None: """ Test if we can correctly represent a torrent with multiple files and directories. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) self.assertEqual(1, len(tree.root.directories)) self.assertEqual(0, len(tree.root.files)) self.assertEqual(36, tree.root.size) self.assertTrue(tree.root.directories["torrent_create"].collapsed) def test_create_from_torrent_wdirs_expand(self) -> None: """ Test if we can expand directories. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) tree.expand(Path("") / "torrent_create") subdir = tree.root.directories["torrent_create"] self.assertEqual(2, len(subdir.directories)) self.assertEqual(1, len(subdir.files)) self.assertEqual(36, subdir.size) self.assertFalse(subdir.collapsed) def test_create_from_torrent_wdirs_collapse(self) -> None: """ Test if we can collapse directories, remembering the uncollapsed state of child directories. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) tree.expand(Path("") / "torrent_create" / "abc") subdir = tree.root.directories["torrent_create"] tree.collapse(Path("") / "torrent_create") self.assertEqual(1, len(tree.root.directories)) self.assertEqual(0, len(tree.root.files)) self.assertEqual(36, tree.root.size) self.assertTrue(subdir.collapsed) self.assertFalse(subdir.directories["abc"].collapsed) # Note: this is not visible to the user! def test_expand_drop_nonexistent(self) -> None: """ Test if we expand the directory up to the point where we have it. This is edge-case behavior. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) subdir = tree.root.directories["torrent_create"].directories["abc"] tree.expand(Path("") / "torrent_create" / "abc" / "idontexist") self.assertFalse(tree.root.directories["torrent_create"].collapsed) self.assertFalse(subdir.collapsed) def test_collapse_drop_nonexistent(self) -> None: """ Test if we collapse the directory only if it exists. This is edge-case behavior. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) tree.expand(Path("") / "torrent_create" / "abc") subdir = tree.root.directories["torrent_create"].directories["abc"] tree.collapse(Path("") / "torrent_create" / "abc" / "idontexist") self.assertFalse(subdir.collapsed) def test_to_str_flat(self) -> None: """ Test if we can print trees with a single file. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_UBUNTU_FILE.files()) expected = """TorrentFileTree( Directory('', \tdirectories=[], \tfiles=[ \t\tFile(0, ubuntu-15.04-desktop-amd64.iso, 1150844928 bytes)], 1150844928 bytes) )""" self.assertEqual(expected, str(tree)) def test_to_str_wdirs_collapsed(self) -> None: """ Test if we can print trees with a collapsed directories. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) expected = """TorrentFileTree( Directory('', \tdirectories=[ \t\tCollapsedDirectory('torrent_create', 36 bytes) \t], \tfiles=[], 36 bytes) )""" self.assertEqual(expected, str(tree)) def test_to_str_wdirs_expanded(self) -> None: """ Test if we can print trees with files and collapsed and uncollapsed directories. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) tree.expand(Path("") / "torrent_create") tree.expand(Path("") / "torrent_create" / "def") expected = """TorrentFileTree( Directory('', \tdirectories=[ \t\tDirectory('torrent_create', \t\t\tdirectories=[ \t\t\t\tCollapsedDirectory('abc', 18 bytes), \t\t\t\tDirectory('def', \t\t\t\t\tdirectories=[], \t\t\t\t\tfiles=[ \t\t\t\t\t\tFile(4, file5.txt, 6 bytes) \t\t\t\t\t\tFile(3, file6.avi, 6 bytes)], 12 bytes) \t\t\t], \t\t\tfiles=[ \t\t\t\tFile(5, file1.txt, 6 bytes)], 36 bytes) \t], \tfiles=[], 36 bytes) )""" self.assertEqual(expected, str(tree)) def test_get_dir(self) -> None: """ Tests if we can retrieve a Directory instance. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) result = tree.find(Path("") / "torrent_create") self.assertIsInstance(result, TorrentFileTree.Directory) self.assertEqual(2, len(result.directories)) self.assertEqual(1, len(result.files)) self.assertTrue(result.collapsed) def test_get_file(self) -> None: """ Tests if we can retrieve a File instance. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) result = tree.find(Path("") / "torrent_create" / "def" / "file6.avi") self.assertIsInstance(result, TorrentFileTree.File) self.assertEqual(6, result.size) self.assertEqual("file6.avi", result.name) self.assertEqual(3, result.index) def test_get_none(self) -> None: """ Tests if we get a None result when getting a non-existent Path. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) result = tree.find(Path("") / "torrent_create" / "def" / "file6.txt") self.assertIsNone(result) def test_get_from_empty(self) -> None: """ Tests if we get a None result when getting from an empty folder. """ tree = TorrentFileTree(None) result = tree.find(Path("") / "file.txt") self.assertIsNone(result) def test_is_dir_dir(self) -> None: """ Tests if we correctly classify a Directory instance as a dir. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) result = tree.path_is_dir(Path("") / "torrent_create") self.assertTrue(result) def test_is_dir_file(self) -> None: """ Tests if we correctly classify a File to not be a dir. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) result = tree.path_is_dir(Path("") / "torrent_create" / "def" / "file6.avi") self.assertFalse(result) def test_is_dir_none(self) -> None: """ Tests if we correctly classify a non-existent Path to not be a dir. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) result = tree.path_is_dir(Path("") / "torrent_create" / "def" / "file6.txt") self.assertFalse(result) def test_find_next_dir_next_in_list(self) -> None: """ Test if we can get the full path of the next dir in a list of dirs. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) _, path = tree.find_next_directory(Path("") / "torrent_create" / "abc") self.assertEqual(Path("torrent_create") / "def", path) def test_find_next_dir_last_in_torrent(self) -> None: """ Test if we can get the directory after the final directory is None. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) result = tree.find_next_directory(Path("") / "torrent_create") self.assertIsNone(result) def test_find_next_dir_jump_to_files(self) -> None: """ Test if we can get the directory after reaching the final directory in a list of subdirectories. From torrent_create/abc/newdir we should jump up to torrent_create/abc. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) _, path = tree.find_next_directory(Path("") / "torrent_create" / "def") self.assertEqual(Path("torrent_create") / "file1.txt", path) def test_view_lbl_flat(self) -> None: """ Test if we can loop through a single-file torrent line-by-line. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_UBUNTU_FILE.files()) results = [] result = "" while result := tree.view(Path(result), 1): result, = result results.append(result) self.assertEqual(["ubuntu-15.04-desktop-amd64.iso"], results) def test_view_lbl_collapsed(self) -> None: """ Test if we can loop through a collapsed torrent line-by-line. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) results = [] result = "" while result := tree.view(Path(result), 1): result, = result results.append(result) self.assertEqual(["torrent_create"], results) def test_view_lbl_expanded(self) -> None: """ Test if we can loop through a expanded torrent line-by-line. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) tree.expand(Path("") / "torrent_create" / "abc") tree.expand(Path("") / "torrent_create" / "def") results = [] result = "" while result := tree.view(Path(result), 1): result, = result results.append(Path(result)) self.assertEqual([ Path("torrent_create"), Path("torrent_create") / "abc", Path("torrent_create") / "abc" / "file2.txt", Path("torrent_create") / "abc" / "file3.txt", Path("torrent_create") / "abc" / "file4.txt", Path("torrent_create") / "def", Path("torrent_create") / "def" / "file5.txt", Path("torrent_create") / "def" / "file6.avi", Path("torrent_create") / "file1.txt" ], results) def test_view_2_expanded(self) -> None: """ Test if we can loop through an expanded torrent with a view of two items. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) tree.expand(Path("") / "torrent_create" / "abc") tree.expand(Path("") / "torrent_create" / "def") results = [] result = [""] while result := tree.view(Path(result[-1]), 2): results.append([Path(r) for r in result]) self.assertEqual([ [Path("torrent_create"), Path("torrent_create") / "abc"], [Path("torrent_create") / "abc" / "file2.txt", Path("torrent_create") / "abc" / "file3.txt",], [Path("torrent_create") / "abc" / "file4.txt", Path("torrent_create") / "def"], [Path("torrent_create") / "def" / "file5.txt", Path("torrent_create") / "def" / "file6.avi"], [Path("torrent_create") / "file1.txt"] ], results) def test_view_full_expanded(self) -> None: """ Test if we can loop through a expanded torrent with a view the size of the tree. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) tree.expand(Path("") / "torrent_create" / "abc") tree.expand(Path("") / "torrent_create" / "def") result = tree.view(Path(""), 9) self.assertEqual([ Path("torrent_create"), Path("torrent_create") / "abc", Path("torrent_create") / "abc" / "file2.txt", Path("torrent_create") / "abc" / "file3.txt", Path("torrent_create") / "abc" / "file4.txt", Path("torrent_create") / "def", Path("torrent_create") / "def" / "file5.txt", Path("torrent_create") / "def" / "file6.avi", Path("torrent_create") / "file1.txt" ], [Path(r) for r in result]) def test_view_over_expanded(self) -> None: """ Test if we can loop through an expanded torrent with a view larger than the size of the tree. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) tree.expand(Path("") / "torrent_create" / "abc") tree.expand(Path("") / "torrent_create" / "def") result = tree.view(Path(""), 10) self.assertEqual([ Path("torrent_create"), Path("torrent_create") / "abc", Path("torrent_create") / "abc" / "file2.txt", Path("torrent_create") / "abc" / "file3.txt", Path("torrent_create") / "abc" / "file4.txt", Path("torrent_create") / "def", Path("torrent_create") / "def" / "file5.txt", Path("torrent_create") / "def" / "file6.avi", Path("torrent_create") / "file1.txt" ], [Path(r) for r in result]) def test_view_full_collapsed(self) -> None: """ Test if we can loop through a expanded directory with only collapsed dirs. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) tree.expand(Path("") / "torrent_create") result = tree.view(Path(""), 4) self.assertEqual([ Path("torrent_create"), Path("torrent_create") / "abc", Path("torrent_create") / "def", Path("torrent_create") / "file1.txt" ], [Path(r) for r in result]) def test_view_start_at_collapsed(self) -> None: """ Test if we can form a view starting at a collapsed directory. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) tree.expand(Path("torrent_create")) result = tree.view(Path("torrent_create") / "abc", 2) self.assertEqual([Path("torrent_create") / "def", Path("torrent_create") / "file1.txt"], [Path(r) for r in result]) def test_select_start_selected(self) -> None: """ Test if all files start selected. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) self.assertTrue(tree.find(Path("torrent_create") / "abc" / "file2.txt").selected) self.assertTrue(tree.find(Path("torrent_create") / "abc" / "file3.txt").selected) self.assertTrue(tree.find(Path("torrent_create") / "abc" / "file4.txt").selected) self.assertTrue(tree.find(Path("torrent_create") / "def" / "file5.txt").selected) self.assertTrue(tree.find(Path("torrent_create") / "def" / "file6.avi").selected) self.assertTrue(tree.find(Path("torrent_create") / "file1.txt").selected) def test_select_nonexistent(self) -> None: """ Test selecting a non-existent path. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) tree.set_selected(Path("."), False) tree.set_selected(Path("I don't exist"), True) self.assertFalse(tree.find(Path("torrent_create") / "abc" / "file2.txt").selected) self.assertFalse(tree.find(Path("torrent_create") / "abc" / "file3.txt").selected) self.assertFalse(tree.find(Path("torrent_create") / "abc" / "file4.txt").selected) self.assertFalse(tree.find(Path("torrent_create") / "def" / "file5.txt").selected) self.assertFalse(tree.find(Path("torrent_create") / "def" / "file6.avi").selected) self.assertFalse(tree.find(Path("torrent_create") / "file1.txt").selected) def test_select_file(self) -> None: """ Test selecting a path pointing to a single file. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) tree.set_selected(Path("."), False) tree.set_selected(Path("torrent_create") / "abc" / "file2.txt", True) self.assertTrue(tree.find(Path("torrent_create") / "abc" / "file2.txt").selected) self.assertFalse(tree.find(Path("torrent_create") / "abc" / "file3.txt").selected) self.assertFalse(tree.find(Path("torrent_create") / "abc" / "file4.txt").selected) self.assertFalse(tree.find(Path("torrent_create") / "def" / "file5.txt").selected) self.assertFalse(tree.find(Path("torrent_create") / "def" / "file6.avi").selected) self.assertFalse(tree.find(Path("torrent_create") / "file1.txt").selected) def test_select_flatdir(self) -> None: """ Test selecting a path pointing to a directory with no subdirectories, only files. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) tree.set_selected(Path("."), False) tree.set_selected(Path("torrent_create") / "abc", True) self.assertTrue(tree.find(Path("torrent_create") / "abc" / "file2.txt").selected) self.assertTrue(tree.find(Path("torrent_create") / "abc" / "file3.txt").selected) self.assertTrue(tree.find(Path("torrent_create") / "abc" / "file4.txt").selected) self.assertFalse(tree.find(Path("torrent_create") / "def" / "file5.txt").selected) self.assertFalse(tree.find(Path("torrent_create") / "def" / "file6.avi").selected) self.assertFalse(tree.find(Path("torrent_create") / "file1.txt").selected) def test_select_deepdir(self) -> None: """ Test selecting a path pointing to a directory with no bdirectories and files. """ tree = TorrentFileTree.from_lt_file_storage(TORRENT_WITH_DIRS.files()) tree.set_selected(Path("."), False) tree.set_selected(Path("torrent_create"), True) self.assertTrue(tree.find(Path("torrent_create") / "abc" / "file2.txt").selected) self.assertTrue(tree.find(Path("torrent_create") / "abc" / "file3.txt").selected) self.assertTrue(tree.find(Path("torrent_create") / "abc" / "file4.txt").selected) self.assertTrue(tree.find(Path("torrent_create") / "def" / "file5.txt").selected) self.assertTrue(tree.find(Path("torrent_create") / "def" / "file6.avi").selected) self.assertTrue(tree.find(Path("torrent_create") / "file1.txt").selected)
20,759
Python
.py
408
42.164216
106
0.622309
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,947
mocks.py
Tribler_tribler/src/tribler/test_unit/core/libtorrent/mocks.py
import libtorrent TORRENT_WITH_DIRS_CONTENT = ( b'd' b'7:comment16:Test description' b'10:created by27:Tribler version: 7.10.0-GIT' b'13:creation datei1634911081e' b'4:infod' b'5:filesl' b'd' b'6:lengthi6e' b'4:pathl' b'3:abc' b'9:file2.txt' b'e' b'e' b'd' b'6:lengthi6e' b'4:pathl' b'3:abc' b'9:file3.txt' b'e' b'e' b'd' b'6:lengthi6e' b'4:pathl' b'3:abc' b'9:file4.txt' b'e' b'e' b'd' b'6:lengthi6e' b'4:pathl' b'3:def' b'9:file6.avi' b'e' b'e' b'd' b'6:lengthi6e' b'4:pathl' b'3:def' b'9:file5.txt' b'e' b'e' b'd' b'6:lengthi6e' b'4:pathl' b'9:file1.txt' b'e' b'e' b'e' b'4:name14:torrent_create' b'12:piece lengthi16384e' b'6:pieces20:\xdd\xed}"\xe2\xabE\x04\xe5\x8e\xe0\xb3\x1a\xd4\xba\xfe\xc0\xce\xe1W' b'e' b'e' ) TORRENT_WITH_VIDEO = ( b'd' b'4:infod' b'6:lengthi10e' b'4:name13:somevideo.mp4' b'12:piece lengthi524288e' b'6:pieces10:\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01' b'e' b'e' ) TORRENT_WITH_DIRS = libtorrent.torrent_info(libtorrent.bdecode(TORRENT_WITH_DIRS_CONTENT)) """ Torrent structure: > [Directory] torrent_create > > [Directory] abc > > > [File] file2.txt (6 bytes) > > > [File] file3.txt (6 bytes) > > > [File] file4.txt (6 bytes) > > [Directory] def > > > [File] file5.txt (6 bytes) > > > [File] file6.avi (6 bytes) > > [File] file1.txt (6 bytes) """ TORRENT_UBUNTU_FILE_CONTENT = ( b'd' b'8:announce39:http://torrent.ubuntu.com:6969/announce' b'13:announce-listl' b'l39:http://torrent.ubuntu.com:6969/announce' b'e' b'l' b'44:http://ipv6.torrent.ubuntu.com:6969/announcee' b'e' b'7:comment29:Ubuntu CD releases.ubuntu.com' b'13:creation datei1429786237e' b'4:infod' b'6:lengthi1150844928e' b'4:name30:ubuntu-15.04-desktop-amd64.iso' b'12:piece lengthi524288e' b'6:pieces43920:' + (b'\x01' * 43920) + b'e' b'e' ) TORRENT_UBUNTU_FILE = libtorrent.torrent_info(libtorrent.bdecode(TORRENT_UBUNTU_FILE_CONTENT)) """ Torrent structure: > [File] ubuntu-15.04-desktop-amd64.iso (1150844928 bytes) """
3,100
Python
.py
103
17.660194
94
0.443924
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,948
test_uris.py
Tribler_tribler/src/tribler/test_unit/core/libtorrent/test_uris.py
from pathlib import PurePosixPath, PureWindowsPath from unittest.mock import AsyncMock, Mock, patch from ipv8.test.base import TestBase from multidict import istr import tribler from tribler.core.libtorrent.uris import unshorten, url_to_path class TestURIs(TestBase): """ Tests for the URI-related functionality. """ def test_url_to_path_unc(self) -> None: """ Test if UNC URLs can be converted to Paths. """ with patch("os.name", "nt"), patch.dict(tribler.core.libtorrent.uris.__dict__, {"Path": PureWindowsPath}): path = url_to_path("file://server/share/path") self.assertEqual(r"\\server\share\path", path) def test_url_to_path_win_path(self) -> None: """ Test if normal Windows file URLs can be converted to Paths. """ with patch("os.name", "nt"), patch.dict(tribler.core.libtorrent.uris.__dict__, {"Path": PureWindowsPath}): path = url_to_path("file:///C:/path/to/file") self.assertEqual(r"C:\path\to\file", path) def test_url_to_path_win_path_with_spaces(self) -> None: """ Test if Windows file URLs with spaces can be converted to Paths. """ with patch("os.name", "nt"), patch.dict(tribler.core.libtorrent.uris.__dict__, {"Path": PureWindowsPath}): path = url_to_path("file:///C:/path/to/file%20with%20space") self.assertEqual(r"C:\path\to\file with space", path) def test_url_to_path_win_path_with_special_chars(self) -> None: """ Test if Windows file URLs with special characters can be converted to Paths. """ with patch("os.name", "nt"), patch.dict(tribler.core.libtorrent.uris.__dict__, {"Path": PureWindowsPath}): path = url_to_path("file:///C:/%2520%2521file") self.assertEqual(r"C:\%20%21file", path) def test_url_to_path_posix_file(self) -> None: """ Test if normal POSIX file URLs can be converted to Paths. """ with patch("os.name", "posix"), patch.dict(tribler.core.libtorrent.uris.__dict__, {"Path": PurePosixPath}): path = url_to_path("file:///path/to/file") self.assertEqual("/path/to/file", path) def test_url_to_path_posix_file_with_space(self) -> None: """ Test if POSIX file URLs with spaces can be converted to Paths. """ with patch("os.name", "posix"), patch.dict(tribler.core.libtorrent.uris.__dict__, {"Path": PurePosixPath}): path = url_to_path("file:///path/to/file%20with%20space") self.assertEqual("/path/to/file with space", path) def test_url_to_path_posix_file_with_special_chars(self) -> None: """ Test if POSIX file URLs with special characters can be converted to Paths. """ with patch("os.name", "posix"), patch.dict(tribler.core.libtorrent.uris.__dict__, {"Path": PurePosixPath}): path = url_to_path("file:///path/to/%2520%2521file") self.assertEqual("/path/to/%20%21file", path) def test_url_to_path_posix_file_with_double_slash(self) -> None: """ Test if POSIX file URLs with double slashes can be converted to Paths. """ with patch("os.name", "posix"), patch.dict(tribler.core.libtorrent.uris.__dict__, {"Path": PurePosixPath}): path = url_to_path("file:////path/to/file") self.assertEqual("//path/to/file", path) def test_url_to_path_posix_file_absolute(self) -> None: """ Test if POSIX file URLs starting at root can be converted to Paths. """ with patch("os.name", "posix"), patch.dict(tribler.core.libtorrent.uris.__dict__, {"Path": PurePosixPath}): path = url_to_path("file:/path") self.assertEqual("/path", path) def test_url_to_path_posix_file_host(self) -> None: """ Test if POSIX file URLs starting at a host can be converted to Paths. """ with patch("os.name", "posix"), patch.dict(tribler.core.libtorrent.uris.__dict__, {"Path": PurePosixPath}): path = url_to_path("file://localhost/path") self.assertEqual("/path", path) async def test_unshorten_non_http_https(self) -> None: """ Test if URLs with non-HTTP(s) schemes are not followed. """ url = "udp://tracker.example.com/" unshortened = await unshorten(url) self.assertEqual(url, unshortened) async def test_unshorten_non_redirect(self) -> None: """ Test if following URLs that are not redirected are ignored. """ url = "http://tracker.example.com/" with patch.dict(tribler.core.libtorrent.uris.__dict__, {"ClientSession": Mock(return_value=Mock( __aexit__=AsyncMock(), __aenter__=AsyncMock(return_value=AsyncMock(get=AsyncMock(return_value=AsyncMock( __aexit__=AsyncMock(), __aenter__=AsyncMock(return_value=Mock(status=200, headers={istr("Location"): "test"})) )))) ))}): unshortened = await unshorten(url) self.assertEqual(url, unshortened) async def test_unshorten_redirect_no_location(self) -> None: """ Test if following URLs that are redirected but specify no location are ignored. """ url = "http://tracker.example.com/" with patch.dict(tribler.core.libtorrent.uris.__dict__, {"ClientSession": Mock(return_value=Mock( __aexit__=AsyncMock(), __aenter__=AsyncMock(return_value=AsyncMock(get=AsyncMock(return_value=AsyncMock( __aexit__=AsyncMock(), __aenter__=AsyncMock(return_value=Mock(status=301, headers={})) )))) ))}): unshortened = await unshorten(url) self.assertEqual(url, unshortened) async def test_unshorten_redirect(self) -> None: """ Test if following URLs that are properly redirected are followed. """ url = "http://tracker.example.com/" with patch.dict(tribler.core.libtorrent.uris.__dict__, {"ClientSession": Mock(return_value=Mock( __aexit__=AsyncMock(), __aenter__=AsyncMock(return_value=AsyncMock(get=AsyncMock(return_value=AsyncMock( __aexit__=AsyncMock(), __aenter__=AsyncMock(return_value=Mock(status=301, headers={istr("Location"): "test"})) )))) ))}): unshortened = await unshorten(url) self.assertEqual("test", unshortened)
6,604
Python
.py
129
41.356589
115
0.608945
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,949
test_torrents.py
Tribler_tribler/src/tribler/test_unit/core/libtorrent/test_torrents.py
from io import StringIO from pathlib import Path from unittest.mock import Mock import libtorrent from configobj import ConfigObj from ipv8.test.base import TestBase from tribler.core.libtorrent.download_manager.download import Download from tribler.core.libtorrent.download_manager.download_config import SPEC_CONTENT, DownloadConfig from tribler.core.libtorrent.download_manager.stream import Stream from tribler.core.libtorrent.torrentdef import TorrentDef, TorrentDefNoMetainfo from tribler.core.libtorrent.torrents import ( check_handle, check_vod, common_prefix, create_torrent_file, get_info_from_handle, require_handle, ) from tribler.test_unit.core.libtorrent.mocks import TORRENT_WITH_DIRS_CONTENT class TestTorrents(TestBase): """ Tests for the torrent-related functionality. """ def test_check_handle_default_missing_handle(self) -> None: """ Test if the default value is returned for missing handles. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name", None), None, checkpoint_disabled=True, config=DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT)))) self.assertEqual("default", (check_handle("default")(Download.get_def)(download))) def test_check_handle_default_invalid_handle(self) -> None: """ Test if the default value is returned for invalid handles. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name", None), None, checkpoint_disabled=True, config=DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT)))) download.handle = Mock(is_valid=Mock(return_value=False)) self.assertEqual("default", (check_handle("default")(Download.get_def)(download))) def test_check_handle_default_valid_handle(self) -> None: """ Test if the given method is called for valid handles. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name", None), None, checkpoint_disabled=True, config=DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT)))) download.handle = Mock(is_valid=Mock(return_value=True)) self.assertEqual(download.tdef, (check_handle("default")(Download.get_def)(download))) async def test_require_handle_invalid_handle(self) -> None: """ Test if None is returned for invalid handles. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name", None), None, checkpoint_disabled=True, config=DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT)))) download.handle = Mock(is_valid=Mock(return_value=False)) result = await require_handle(Download.get_def)(download) self.assertIsNone(result) async def test_require_handle_valid_handle(self) -> None: """ Test if the result of the given method is given for valid handles. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name", None), None, checkpoint_disabled=True, config=DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT)))) download.handle = Mock(is_valid=Mock(return_value=True)) result = await require_handle(Download.get_def)(download) self.assertEqual(download.tdef, result) async def test_require_handle_ignore_runtime_errors(self) -> None: """ Test if runtime errors are ignored in functions. """ def callback(_: Download) -> None: """ Raise a RuntimeError. NOTE: THIS IS LOGGED, THE TEST IS NOT FAILING. """ raise RuntimeError download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name", None), None, checkpoint_disabled=True, config=DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT)))) download.handle = Mock(is_valid=Mock(return_value=True)) future = require_handle(callback)(download) await future self.assertIsNone(future.result()) async def test_require_handle_set_exception(self) -> None: """ Test if exceptions other than runtime errors are set on the future. """ def callback(_: Download) -> None: """ Raise a non-RuntimeError. NOTE: THIS IS LOGGED, THE TEST IS NOT FAILING. """ raise ValueError download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name", None), None, checkpoint_disabled=True, config=DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT)))) download.handle = Mock(is_valid=Mock(return_value=True)) future = require_handle(callback)(download) with self.assertRaises(ValueError): await future async def test_check_vod_disabled(self) -> None: """ Test if the default value is returned for disabled vod mode. """ stream = Stream(Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name", None), None, checkpoint_disabled=True, config=DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT))))) stream.close() result = check_vod("default")(Stream.enabled)(stream) self.assertEqual("default", result) async def test_check_vod_enabled(self) -> None: """ Test if the function result is returned for enabled vod mode. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT)))) download.handle = Mock(is_valid=Mock(return_value=True), file_priorities=Mock(return_value=[0]), torrent_file=Mock(return_value=Mock(map_file=Mock(return_value=Mock(piece=0))))) stream = Stream(download) download.lt_status = Mock(state=3, paused=False, pieces=[]) await stream.enable() result = check_vod("default")(Stream.bytetopiece)(stream, 0) self.assertEqual(0, result) def test_common_prefix_single(self) -> None: """ Test if a single file can a prefix of nothing. """ self.assertEqual(Path("."), common_prefix([Path("hello.txt")])) def test_common_prefix_single_many(self) -> None: """ Test if a many single files have a prefix of nothing. """ self.assertEqual(Path("."), common_prefix([Path("hello.txt"), Path("file.txt"), Path("foo.txt")])) def test_common_prefix_single_in_folder(self) -> None: """ Test if a single file can a prefix of its parent directory. """ self.assertEqual(Path("folder") / "directory", common_prefix([Path("folder") / "directory" / "hello.txt"])) def test_common_prefix_single_many_in_folder(self) -> None: """ Test if many single files have a prefix of their parent directory. """ result = common_prefix([ Path("folder") / "directory" / "hello.txt", Path("folder") / "directory" / "subdirectory" / "hello.txt", Path("folder") / "directory" / "file.txt" ]) self.assertEqual(Path("folder") / "directory", result) def test_get_info_from_handle_no_attribute(self) -> None: """ Test if a handle has no torrent info attribute that None is returned. """ self.assertIsNone(get_info_from_handle(object())) def test_get_info_from_handle_runtime_error(self) -> None: """ Test if fetching a handle raise a RuntimeError that None is returned. """ self.assertIsNone(get_info_from_handle(Mock(torrent_file=Mock(side_effect=RuntimeError("test"))))) def test_get_info_from_handle_legacy(self) -> None: """ Test if fetching a handle raise a RuntimeError that None is returned. """ torrent_file = "I am a torrent file" self.assertEqual(torrent_file, get_info_from_handle(Mock(torrent_file=Mock(return_value=torrent_file)))) def test_create_torrent_file_defaults(self) -> None: """ Test if torrents can be created from an existing file without any parameters. """ result = create_torrent_file([Path(__file__).absolute()], {}) self.assertTrue(result["success"]) self.assertIsNone(result["torrent_file_path"]) self.assertEqual(b"test_torrents.py", libtorrent.bdecode(result["metainfo"])[b"info"][b"name"]) def test_create_torrent_file_with_piece_length(self) -> None: """ Test if torrents can be created with a specified piece length. """ result = create_torrent_file([Path(__file__).absolute()], {b"piece length": 16}) self.assertEqual(16, libtorrent.bdecode(result["metainfo"])[b"info"][b"piece length"]) def test_create_torrent_file_with_comment(self) -> None: """ Test if torrents can be created with a specified comment. """ result = create_torrent_file([Path(__file__).absolute()], {b"comment": b"test"}) self.assertEqual(b"test", libtorrent.bdecode(result["metainfo"])[b"comment"]) def test_create_torrent_file_with_created_by(self) -> None: """ Test if torrents can be created with a specified created by field. """ result = create_torrent_file([Path(__file__).absolute()], {b"created by": b"test"}) self.assertEqual(b"test", libtorrent.bdecode(result["metainfo"])[b"created by"]) def test_create_torrent_file_with_announce(self) -> None: """ Test if torrents can be created with a specified announce field. """ result = create_torrent_file([Path(__file__).absolute()], {b"announce": b"http://127.0.0.1/announce"}) self.assertEqual(b"http://127.0.0.1/announce", libtorrent.bdecode(result["metainfo"])[b"announce"]) def test_create_torrent_file_with_announce_list(self) -> None: """ Test if torrents can be created with a specified announce list. Note that the announce list becomes a list of lists after creating the torrent. """ tracker_list = [[b"http://127.0.0.1/announce"], [b"http://10.0.0.2/announce"]] result = create_torrent_file([Path(__file__).absolute()], {b"announce-list": tracker_list}) self.assertEqual(tracker_list, libtorrent.bdecode(result["metainfo"])[b"announce-list"]) def test_create_torrent_file_with_nodes(self) -> None: """ Test if torrents can be created with a specified node list. """ node_list = [[b"127.0.0.1", 80], [b"10.0.0.2", 22]] result = create_torrent_file([Path(__file__).absolute()], {b"nodes": node_list}) self.assertEqual(node_list, libtorrent.bdecode(result["metainfo"])[b"nodes"]) def test_create_torrent_file_with_http_seed(self) -> None: """ Test if torrents can be created with a specified http seed. """ result = create_torrent_file([Path(__file__).absolute()], {b"httpseeds": b"http://127.0.0.1/file"}) self.assertEqual(b"http://127.0.0.1/file", libtorrent.bdecode(result["metainfo"])[b"httpseeds"]) def test_create_torrent_file_with_url_list(self) -> None: """ Test if torrents can be created with a specified url list. """ result = create_torrent_file([Path(__file__).absolute()], {b"urllist": b"http://127.0.0.1/file"}) self.assertEqual(b"http://127.0.0.1/file", libtorrent.bdecode(result["metainfo"])[b"url-list"])
11,581
Python
.py
216
44.314815
115
0.643615
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,950
test_torrentinfo_endpoint.py
Tribler_tribler/src/tribler/test_unit/core/libtorrent/restapi/test_torrentinfo_endpoint.py
import json from asyncio import TimeoutError from binascii import unhexlify from ssl import SSLError from unittest.mock import AsyncMock, Mock, patch from aiohttp import ClientConnectorCertificateError, ClientConnectorError, ClientResponseError, ServerConnectionError from ipv8.test.base import TestBase from ipv8.util import succeed import tribler from tribler.core.libtorrent.download_manager.download_manager import MetainfoLookup from tribler.core.libtorrent.restapi.torrentinfo_endpoint import TorrentInfoEndpoint, recursive_unicode from tribler.core.libtorrent.torrentdef import TorrentDef, TorrentDefNoMetainfo from tribler.core.restapi.rest_endpoint import HTTP_BAD_REQUEST, HTTP_INTERNAL_SERVER_ERROR from tribler.test_unit.base_restapi import MockRequest, response_to_json from tribler.test_unit.core.libtorrent.mocks import TORRENT_WITH_DIRS_CONTENT class GetTorrentInfoRequest(MockRequest): """ A MockRequest that mimics GetTorrentInfoRequests. """ def __init__(self, query: dict) -> None: """ Create a new GetTorrentInfoRequest. """ super().__init__(query, "GET", "/torrentinfo") async def mock_unshorten(uri: str) -> str: """ Don't following links. """ return uri class TestTorrentInfoEndpoint(TestBase): """ Tests for the TorrentInfoEndpoint class. """ def setUp(self) -> None: """ Create a mocked DownloadManager and a TorrentInfoEndpoint. """ super().setUp() self.download_manager = Mock() self.endpoint = TorrentInfoEndpoint(self.download_manager) async def test_get_torrent_info_bad_hops(self) -> None: """ Test if a graceful error is returned when the supplied number of hops is an int value. """ response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": "foo", "uri": "file://"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_BAD_REQUEST, response.status) self.assertEqual("wrong value of 'hops' parameter: foo", response_body_json["error"]) async def test_get_torrent_info_bad_scheme(self) -> None: """ Test if a graceful error is returned when the supplied the URI scheme is unknown. """ response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "xxx://"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_BAD_REQUEST, response.status) self.assertEqual("invalid uri", response_body_json["error"]) async def test_get_torrent_info_no_metainfo(self) -> None: """ Test if a graceful error is returned when no metainfo is available for a file. """ tdef = TorrentDefNoMetainfo(b"\x01" * 20, b"test") with patch("tribler.core.libtorrent.torrentdef.TorrentDef.load", AsyncMock(return_value=tdef)): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "file://"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("metainfo error", response_body_json["error"]) async def test_get_torrent_info_file_filenotfounderror(self) -> None: """ Test if a graceful error is returned when a FileNotFoundError occurs when loading a torrent. """ with patch("tribler.core.libtorrent.torrentdef.TorrentDef.load", AsyncMock(side_effect=FileNotFoundError)): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "file://"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("error while decoding torrent file: .", response_body_json["error"]) async def test_get_torrent_info_file_typeerror(self) -> None: """ Test if a graceful error is returned when a TypeError occurs when loading a torrent. """ with patch("tribler.core.libtorrent.torrentdef.TorrentDef.load", AsyncMock(side_effect=TypeError)): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "file://"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("error while decoding torrent file: .", response_body_json["error"]) async def test_get_torrent_info_file_valueerror(self) -> None: """ Test if a graceful error is returned when a ValueError occurs when loading a torrent. """ with patch("tribler.core.libtorrent.torrentdef.TorrentDef.load", AsyncMock(side_effect=ValueError)): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "file://"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("error while decoding torrent file: .", response_body_json["error"]) async def test_get_torrent_info_file_runtimeerror(self) -> None: """ Test if a graceful error is returned when a RuntimeError occurs when loading a torrent. """ with patch("tribler.core.libtorrent.torrentdef.TorrentDef.load", AsyncMock(side_effect=RuntimeError)): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "file://"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("error while decoding torrent file: .", response_body_json["error"]) async def test_get_torrent_info_magnet_runtimeerror_compat(self) -> None: """ Test if a graceful error is returned when a RuntimeError occurs when loading a magnet in compatibility mode. """ with patch.dict(tribler.core.libtorrent.restapi.torrentinfo_endpoint.__dict__, {"lt": Mock(parse_magnet_uri=Mock(side_effect=RuntimeError))}): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "magnet://"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_BAD_REQUEST, response.status) self.assertEqual("Error while getting an infohash from magnet: RuntimeError: ", response_body_json["error"]) async def test_get_torrent_info_magnet_runtimeerror_modern(self) -> None: """ Test if a graceful error is returned when a RuntimeError occurs when loading a magnet. """ with patch.dict(tribler.core.libtorrent.restapi.torrentinfo_endpoint.__dict__, {"lt": Mock(parse_magnet_uri=type("test", (object, ), { "info_hash": property(Mock(side_effect=RuntimeError)), "__init__": lambda _, __: None }))}): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "magnet://"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_BAD_REQUEST, response.status) self.assertEqual("Error while getting an infohash from magnet: RuntimeError: ", response_body_json["error"]) async def test_get_torrent_info_magnet_no_metainfo(self) -> None: """ Test if a graceful error is returned when no metainfo is available for a magnet. """ self.download_manager.get_metainfo = AsyncMock(return_value=None) with patch.dict(tribler.core.libtorrent.restapi.torrentinfo_endpoint.__dict__, {"lt": Mock(parse_magnet_uri=Mock(return_value={"info_hash": b"\x01" * 20}))}): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "magnet://"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("metainfo error", response_body_json["error"]) self.assertEqual(b"\x01" * 20, self.download_manager.get_metainfo.call_args.args[0]) self.assertEqual(60, self.download_manager.get_metainfo.call_args.kwargs["timeout"]) self.assertEqual(0, self.download_manager.get_metainfo.call_args.kwargs["hops"]) self.assertEqual("magnet://", self.download_manager.get_metainfo.call_args.kwargs["url"]) async def test_get_torrent_info_http_serverconnectionerror(self) -> None: """ Test if a graceful error is returned when a ServerConnectionError occurs when loading from an HTTP link. """ with patch.dict(tribler.core.libtorrent.restapi.torrentinfo_endpoint.__dict__, {"unshorten": mock_unshorten}), \ patch("tribler.core.libtorrent.restapi.torrentinfo_endpoint.query_uri", AsyncMock(side_effect=ServerConnectionError("test"))): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "http://127.0.0.1/file"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("test", response_body_json["error"]) async def test_get_torrent_info_http_clientresponseerror(self) -> None: """ Test if a graceful error is returned when a ClientResponseError occurs when loading from an HTTP link. """ with patch.dict(tribler.core.libtorrent.restapi.torrentinfo_endpoint.__dict__, {"unshorten": mock_unshorten}), \ patch("tribler.core.libtorrent.restapi.torrentinfo_endpoint.query_uri", AsyncMock(side_effect=ClientResponseError(Mock(real_url="test"), ()))): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "http://127.0.0.1/file"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("0, message='', url='test'", response_body_json["error"]) async def test_get_torrent_info_http_sslerror(self) -> None: """ Test if a graceful error is returned when a SSLError occurs when loading from an HTTP link. """ with patch.dict(tribler.core.libtorrent.restapi.torrentinfo_endpoint.__dict__, {"unshorten": mock_unshorten}), \ patch("tribler.core.libtorrent.restapi.torrentinfo_endpoint.query_uri", AsyncMock(side_effect=SSLError("test"))): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "http://127.0.0.1/file"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("('test',)", response_body_json["error"]) async def test_get_torrent_info_http_clientconnectorerror(self) -> None: """ Test if a graceful error is returned when a ClientConnectorError occurs when loading from an HTTP link. """ with patch.dict(tribler.core.libtorrent.restapi.torrentinfo_endpoint.__dict__, {"unshorten": mock_unshorten}), \ patch("tribler.core.libtorrent.restapi.torrentinfo_endpoint.query_uri", AsyncMock(side_effect=ClientConnectorError(Mock(ssl="default", host="test", port=42), OSError("test")))): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "http://127.0.0.1/file"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("Cannot connect to host test:42 ssl:default [None]", response_body_json["error"]) async def test_get_torrent_info_http_timeouterror(self) -> None: """ Test if a graceful error is returned when a TimeoutError occurs when loading from an HTTP link. """ with patch.dict(tribler.core.libtorrent.restapi.torrentinfo_endpoint.__dict__, {"unshorten": mock_unshorten}), \ patch("tribler.core.libtorrent.restapi.torrentinfo_endpoint.query_uri", AsyncMock(side_effect=TimeoutError("test"))): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "http://127.0.0.1/file"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("test", response_body_json["error"]) async def test_get_torrent_info_http_valueerror(self) -> None: """ Test if a graceful error is returned when a ValueError occurs when loading from an HTTP link. """ with patch.dict(tribler.core.libtorrent.restapi.torrentinfo_endpoint.__dict__, {"unshorten": mock_unshorten}), \ patch("tribler.core.libtorrent.restapi.torrentinfo_endpoint.query_uri", AsyncMock(side_effect=ValueError("test"))): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "http://127.0.0.1/file"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("test", response_body_json["error"]) async def test_get_torrent_info_https_serverconnectionerror(self) -> None: """ Test if a graceful error is returned when a ServerConnectionError occurs when loading from an HTTPS link. """ with patch.dict(tribler.core.libtorrent.restapi.torrentinfo_endpoint.__dict__, {"unshorten": mock_unshorten}), \ patch("tribler.core.libtorrent.restapi.torrentinfo_endpoint.query_uri", AsyncMock(side_effect=ServerConnectionError("test"))): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "https://127.0.0.1/file"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("test", response_body_json["error"]) async def test_get_torrent_info_https_clientresponseerror(self) -> None: """ Test if a graceful error is returned when a ClientResponseError occurs when loading from an HTTPS link. """ with patch.dict(tribler.core.libtorrent.restapi.torrentinfo_endpoint.__dict__, {"unshorten": mock_unshorten}), \ patch("tribler.core.libtorrent.restapi.torrentinfo_endpoint.query_uri", AsyncMock(side_effect=ClientResponseError(Mock(real_url="test"), ()))): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "https://127.0.0.1/file"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("0, message='', url='test'", response_body_json["error"]) async def test_get_torrent_info_https_sslerror(self) -> None: """ Test if a graceful error is returned when a SSLError occurs when loading from an HTTPS link. """ with patch.dict(tribler.core.libtorrent.restapi.torrentinfo_endpoint.__dict__, {"unshorten": mock_unshorten}), \ patch("tribler.core.libtorrent.restapi.torrentinfo_endpoint.query_uri", AsyncMock(side_effect=SSLError("test"))): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "https://127.0.0.1/file"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("('test',)", response_body_json["error"]) async def test_get_torrent_info_https_clientconnectorerror(self) -> None: """ Test if a graceful error is returned when a ClientConnectorError occurs when loading from an HTTPS link. """ with patch.dict(tribler.core.libtorrent.restapi.torrentinfo_endpoint.__dict__, {"unshorten": mock_unshorten}), \ patch("tribler.core.libtorrent.restapi.torrentinfo_endpoint.query_uri", AsyncMock(side_effect=ClientConnectorError(Mock(ssl="default", host="test", port=42), OSError("test")))): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "https://127.0.0.1/file"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("Cannot connect to host test:42 ssl:default [None]", response_body_json["error"]) async def test_get_torrent_info_https_timeouterror(self) -> None: """ Test if a graceful error is returned when a TimeoutError occurs when loading from an HTTPS link. """ with patch.dict(tribler.core.libtorrent.restapi.torrentinfo_endpoint.__dict__, {"unshorten": mock_unshorten}), \ patch("tribler.core.libtorrent.restapi.torrentinfo_endpoint.query_uri", AsyncMock(side_effect=TimeoutError("test"))): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "https://127.0.0.1/file"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("test", response_body_json["error"]) async def test_get_torrent_info_https_valueerror(self) -> None: """ Test if a graceful error is returned when a ValueError occurs when loading from an HTTPS link. """ with patch.dict(tribler.core.libtorrent.restapi.torrentinfo_endpoint.__dict__, {"unshorten": mock_unshorten}), \ patch("tribler.core.libtorrent.restapi.torrentinfo_endpoint.query_uri", AsyncMock(side_effect=ValueError("test"))): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "https://127.0.0.1/file"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("test", response_body_json["error"]) async def test_get_torrent_info_https_certificate_error(self) -> None: """ Test if it is returned that an invalid certificate for an HTTPS request was used. """ async def server_response(_: str, valid_cert: bool = True) -> bytes: """ Pretend that the request causes a certificate error in strict "valid_cert" mode. """ if valid_cert: raise ClientConnectorCertificateError(None, RuntimeError("Invalid certificate test error!")) return await succeed(b"d4:infod6:lengthi0eee") with patch.dict(tribler.core.libtorrent.restapi.torrentinfo_endpoint.__dict__, {"unshorten": mock_unshorten}), \ patch("tribler.core.libtorrent.restapi.torrentinfo_endpoint.query_uri", server_response): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "https://127.0.0.1/file"})) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertFalse(response_body_json["valid_certificate"]) async def test_get_torrent_info_http_no_metainfo(self) -> None: """ Test if a graceful error is returned when no metainfo is available for a http link. """ self.download_manager.get_metainfo = AsyncMock(return_value=None) with patch.dict(tribler.core.libtorrent.restapi.torrentinfo_endpoint.__dict__, {"unshorten": mock_unshorten}), \ patch("tribler.core.libtorrent.restapi.torrentinfo_endpoint.query_uri", AsyncMock(return_value=b"de")): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "http://127.0.0.1/file"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("metainfo error", response_body_json["error"]) async def test_get_torrent_info_https_no_metainfo(self) -> None: """ Test if a graceful error is returned when no metainfo is available for a https link. """ self.download_manager.get_metainfo = AsyncMock(return_value=None) with patch.dict(tribler.core.libtorrent.restapi.torrentinfo_endpoint.__dict__, {"unshorten": mock_unshorten}), \ patch("tribler.core.libtorrent.restapi.torrentinfo_endpoint.query_uri", AsyncMock(return_value=b"de")): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "https://127.0.0.1/file"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("metainfo error", response_body_json["error"]) async def test_get_torrent_info_http_redirect_magnet_no_metainfo(self) -> None: """ Test if a graceful error is returned when no metainfo is available for a magnet returned by a HTTP link. """ self.download_manager.get_metainfo = AsyncMock(return_value=None) with patch.dict(tribler.core.libtorrent.restapi.torrentinfo_endpoint.__dict__, {"unshorten": mock_unshorten}), \ patch("tribler.core.libtorrent.restapi.torrentinfo_endpoint.query_uri", AsyncMock(return_value=b"magnet://")), \ patch.dict(tribler.core.libtorrent.restapi.torrentinfo_endpoint.__dict__, {"lt": Mock(parse_magnet_uri=Mock(return_value={"info_hash": b"\x01" * 20}))}): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "http://127.0.0.1/file"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("metainfo error", response_body_json["error"]) self.assertEqual(b"\x01" * 20, self.download_manager.get_metainfo.call_args.args[0]) self.assertEqual(60.0, self.download_manager.get_metainfo.call_args.kwargs["timeout"]) self.assertEqual(0, self.download_manager.get_metainfo.call_args.kwargs["hops"]) self.assertEqual("magnet://", self.download_manager.get_metainfo.call_args.kwargs["url"]) async def test_get_torrent_info_https_redirect_magnet_no_metainfo(self) -> None: """ Test if a graceful error is returned when no metainfo is available for a magnet returned by a HTTPS link. """ self.download_manager.get_metainfo = AsyncMock(return_value=None) with patch.dict(tribler.core.libtorrent.restapi.torrentinfo_endpoint.__dict__, {"unshorten": mock_unshorten}), \ patch("tribler.core.libtorrent.restapi.torrentinfo_endpoint.query_uri", AsyncMock(return_value=b"magnet://")), \ patch.dict(tribler.core.libtorrent.restapi.torrentinfo_endpoint.__dict__, {"lt": Mock(parse_magnet_uri=Mock(return_value={"info_hash": b"\x01" * 20}))}): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "https://127.0.0.1/file"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("metainfo error", response_body_json["error"]) self.assertEqual(b"\x01" * 20, self.download_manager.get_metainfo.call_args.args[0]) self.assertEqual(60.0, self.download_manager.get_metainfo.call_args.kwargs["timeout"]) self.assertEqual(0, self.download_manager.get_metainfo.call_args.kwargs["hops"]) self.assertEqual("magnet://", self.download_manager.get_metainfo.call_args.kwargs["url"]) async def test_get_torrent_info_invalid_response_empty_metainfo(self) -> None: """ Test if a graceful error is returned when the metainfo is empty for a file. """ tdef = TorrentDef().load_from_memory(TORRENT_WITH_DIRS_CONTENT) tdef.metainfo = 3 with patch("tribler.core.libtorrent.torrentdef.TorrentDef.load", AsyncMock(return_value=tdef)): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "file://"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("invalid response", response_body_json["error"]) async def test_get_torrent_info_invalid_response_empty_info(self) -> None: """ Test if a graceful error is returned when the metainfo is empty for a file. """ tdef = TorrentDef().load_from_memory(TORRENT_WITH_DIRS_CONTENT) tdef.metainfo.pop(b"info") with patch("tribler.core.libtorrent.torrentdef.TorrentDef.load", AsyncMock(return_value=tdef)): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "file://"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("invalid response", response_body_json["error"]) async def test_get_torrent_info_valid_download(self) -> None: """ Test if a valid download has its info returned correctly. """ tdef = TorrentDef().load_from_memory(TORRENT_WITH_DIRS_CONTENT) self.download_manager.metainfo_requests = {} with patch("tribler.core.libtorrent.torrentdef.TorrentDef.load", AsyncMock(return_value=tdef)): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "file://"})) response_body_json = await response_to_json(response) response_info_json = json.loads(unhexlify(response_body_json["metainfo"])) self.assertEqual(200, response.status) self.assertTrue(response_body_json["download_exists"]) self.assertEqual(tdef.metainfo[b"comment"].decode(), response_info_json["comment"]) self.assertEqual(tdef.metainfo[b"created by"].decode(), response_info_json["created by"]) self.assertEqual(tdef.metainfo[b"creation date"], response_info_json["creation date"]) self.assertEqual(tdef.metainfo[b"info"][b"name"].decode(), response_info_json["info"]["name"]) self.assertEqual(tdef.metainfo[b"info"][b"piece length"], response_info_json["info"]["piece length"]) async def test_get_torrent_info_valid_metainfo_request(self) -> None: """ Test if a valid metainfo request has its info returned correctly. """ tdef = TorrentDef().load_from_memory(TORRENT_WITH_DIRS_CONTENT) download = self.download_manager.downloads.get(tdef.infohash) self.download_manager.metainfo_requests = {tdef.infohash: MetainfoLookup(download, 1)} with patch("tribler.core.libtorrent.torrentdef.TorrentDef.load", AsyncMock(return_value=tdef)): response = await self.endpoint.get_torrent_info(GetTorrentInfoRequest({"hops": 0, "uri": "file://"})) response_body_json = await response_to_json(response) response_info_json = json.loads(unhexlify(response_body_json["metainfo"])) self.assertEqual(200, response.status) self.assertFalse(response_body_json["download_exists"]) self.assertEqual(tdef.metainfo[b"comment"].decode(), response_info_json["comment"]) self.assertEqual(tdef.metainfo[b"created by"].decode(), response_info_json["created by"]) self.assertEqual(tdef.metainfo[b"creation date"], response_info_json["creation date"]) self.assertEqual(tdef.metainfo[b"info"][b"name"].decode(), response_info_json["info"]["name"]) self.assertEqual(tdef.metainfo[b"info"][b"piece length"], response_info_json["info"]["piece length"]) def test_recursive_unicode_empty(self) -> None: """ Test if empty items can be recursively converted to unicode. """ self.assertEqual({}, recursive_unicode({})) self.assertEqual([], recursive_unicode([])) self.assertEqual("", recursive_unicode(b"")) self.assertEqual("", recursive_unicode("")) self.assertIsNone(recursive_unicode(None)) def test_recursive_unicode_unicode_decode_error(self) -> None: """ Test if recursive unicode raises an exception on invalid bytes. """ with self.assertRaises(UnicodeDecodeError): recursive_unicode(b'\x80') def test_recursive_unicode_unicode_decode_error_ignore_errors(self) -> None: """ Test if recursive unicode ignores errors on invalid bytes and returns the converted bytes by using chr(). """ self.assertEqual("\x80", recursive_unicode(b'\x80', ignore_errors=True)) def test_recursive_unicode_complex_object(self) -> None: """ Test if a complex object can be recursively converted to unicode. """ obj = {"list": [b"binary",{}], "sub dict": {"sub list": [1, b"binary",{"": b""}]}} expected = {"list": ["binary", {}], "sub dict": {"sub list": [1, "binary", {"": ""}]}} self.assertEqual(expected, recursive_unicode(obj))
31,742
Python
.py
468
54.423077
120
0.635889
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,951
test_libtorrent_endpoint.py
Tribler_tribler/src/tribler/test_unit/core/libtorrent/restapi/test_libtorrent_endpoint.py
from asyncio import Future, ensure_future, sleep from binascii import hexlify from unittest.mock import Mock from ipv8.test.base import TestBase from tribler.core.libtorrent.restapi.libtorrent_endpoint import LibTorrentEndpoint from tribler.test_unit.base_restapi import MockRequest, response_to_json class GetLibtorrentSettingsRequest(MockRequest): """ A MockRequest that mimics GetLibtorrentSettingsRequests. """ def __init__(self, query: dict) -> None: """ Create a new GetLibtorrentSettingsRequest. """ super().__init__(query, "GET", "/libtorrent/settings") class GetLibtorrentSessionInfoRequest(MockRequest): """ A MockRequest that mimics GetLibtorrentSessionInfoRequests. """ def __init__(self, query: dict) -> None: """ Create a new GetLibtorrentSessionInfoRequest. """ super().__init__(query, "GET", "/libtorrent/session") class TestLibTorrentEndpoint(TestBase): """ Tests for the LibTorrentEndpoint class. """ def setUp(self) -> None: """ Create a mocked DownloadManager and a CreateTorrentEndpoint. """ super().setUp() self.download_manager = Mock() self.endpoint = LibTorrentEndpoint(self.download_manager) async def test_get_settings_unknown_unspecified_hops_default(self) -> None: """ Test if getting settings for an unspecified number of hops defaults to 0 hops. """ self.download_manager.ltsessions = {} response = await self.endpoint.get_libtorrent_settings(GetLibtorrentSettingsRequest({})) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertEqual(0, response_body_json["hop"]) self.assertEqual({}, response_body_json["settings"]) async def test_get_settings_unknown_specified_hops_default(self) -> None: """ Test if getting settings for an unknown number of hops defaults to 0 hops. """ self.download_manager.ltsessions = {} response = await self.endpoint.get_libtorrent_settings(GetLibtorrentSettingsRequest({"hop": 1})) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertEqual(1, response_body_json["hop"]) self.assertEqual({}, response_body_json["settings"]) async def test_get_settings_zero_hops(self) -> None: """ Test if getting settings for zero hops gives extended info. """ fut = Future() fut.set_result(Mock()) self.download_manager.ltsessions = {0: fut} self.download_manager.get_session_settings = Mock(return_value={"peer_fingerprint": "test", "test": "test"}) response = await self.endpoint.get_libtorrent_settings(GetLibtorrentSettingsRequest({})) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertEqual(0, response_body_json["hop"]) self.assertEqual("test", response_body_json["settings"]["test"]) self.assertEqual(hexlify(b"test").decode(), response_body_json["settings"]["peer_fingerprint"]) async def test_get_settings_more_hops(self) -> None: """ Test if getting settings for more hops leaves out extended info. """ fut = Future() fut.set_result(Mock(get_settings=Mock(return_value={"test": "test"}))) self.download_manager.ltsessions = {2: fut} response = await self.endpoint.get_libtorrent_settings(GetLibtorrentSettingsRequest({"hop": 2})) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertEqual(2, response_body_json["hop"]) self.assertEqual("test", response_body_json["settings"]["test"]) async def test_get_session_info_unknown_unspecified_hops_default(self) -> None: """ Test if getting session info for an unspecified number of hops defaults to 0 hops. """ self.download_manager.ltsessions = {} response = await self.endpoint.get_libtorrent_session_info(GetLibtorrentSettingsRequest({})) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertEqual(0, response_body_json["hop"]) self.assertEqual({}, response_body_json["session"]) async def test_get_session_info_unknown_specified_hops_default(self) -> None: """ Test if getting session info for an unknown number of hops defaults to 0 hops. """ self.download_manager.ltsessions = {} response = await self.endpoint.get_libtorrent_session_info(GetLibtorrentSettingsRequest({"hop": 1})) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertEqual(1, response_body_json["hop"]) self.assertEqual({}, response_body_json["session"]) async def test_get_session_info_known(self) -> None: """ Test if getting session info for a known number of hops forwards the known settings. """ fut = Future() fut.set_result(Mock()) self.download_manager.ltsessions = {0: fut} response_future = ensure_future(self.endpoint.get_libtorrent_session_info(GetLibtorrentSettingsRequest({}))) await sleep(0) self.download_manager.session_stats_callback(Mock(values={"test": "test"})) response = await response_future response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertEqual(0, response_body_json["hop"]) self.assertEqual("test", response_body_json["session"]["test"])
5,814
Python
.py
116
42.112069
116
0.677493
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,952
test_downloads_endpoint.py
Tribler_tribler/src/tribler/test_unit/core/libtorrent/restapi/test_downloads_endpoint.py
""" Keep this text for the test_stream test. """ from __future__ import annotations from binascii import hexlify from io import StringIO from pathlib import Path from unittest.mock import AsyncMock, Mock, call, patch from aiohttp.web_urldispatcher import UrlMappingMatchInfo from configobj import ConfigObj from ipv8.test.base import TestBase from validate import Validator from tribler.core.libtorrent.download_manager.download import Download from tribler.core.libtorrent.download_manager.download_config import SPEC_CONTENT, DownloadConfig from tribler.core.libtorrent.download_manager.stream import Stream from tribler.core.libtorrent.restapi.downloads_endpoint import DownloadsEndpoint from tribler.core.libtorrent.torrentdef import TorrentDef, TorrentDefNoMetainfo from tribler.core.restapi.rest_endpoint import HTTP_BAD_REQUEST, HTTP_INTERNAL_SERVER_ERROR, HTTP_NOT_FOUND from tribler.test_unit.base_restapi import BodyCapture, MockRequest, response_to_bytes, response_to_json from tribler.test_unit.core.libtorrent.mocks import TORRENT_WITH_DIRS, TORRENT_WITH_DIRS_CONTENT, TORRENT_WITH_VIDEO from tribler.tribler_config import TriblerConfigManager class MockTriblerConfigManager(TriblerConfigManager): """ A memory-based TriblerConfigManager. """ def write(self) -> None: """ Don't actually write to any file. """ class GetDownloadsRequest(MockRequest): """ A MockRequest that mimics GetDownloadsRequests. """ def __init__(self, query: dict) -> None: """ Create a new GetDownloadsRequest. """ super().__init__(query, "GET", "/downloads") class AddDownloadRequest(MockRequest): """ A MockRequest that mimics AddDownloadRequests. """ def __init__(self, query: dict) -> None: """ Create a new AddDownloadRequest. """ super().__init__(query, "PUT", "/downloads") async def json(self) -> dict: """ Get the json equivalent of the query (i.e., just the query). """ return self._query class DeleteDownloadRequest(MockRequest): """ A MockRequest that mimics DeleteDownloadRequests. """ def __init__(self, query: dict, infohash: str) -> None: """ Create a new DeleteDownloadRequest. """ super().__init__(query, "DELETE", f"/downloads/{infohash}") self._infohash = infohash @property def match_info(self) -> UrlMappingMatchInfo: """ Get the match info (the infohash in the url). """ return UrlMappingMatchInfo({"infohash": self._infohash}, Mock()) async def json(self) -> dict: """ Get the json equivalent of the query (i.e., just the query). """ return self._query class UpdateDownloadRequest(MockRequest): """ A MockRequest that mimics UpdateDownloadRequests. """ def __init__(self, query: dict, infohash: str) -> None: """ Create a new UpdateDownloadRequest. """ super().__init__(query, "PATCH", f"/downloads/{infohash}") self._infohash = infohash @property def match_info(self) -> UrlMappingMatchInfo: """ Get the match info (the infohash in the url). """ return UrlMappingMatchInfo({"infohash": self._infohash}, Mock()) async def json(self) -> dict: """ Get the json equivalent of the query (i.e., just the query). """ return self._query class GetTorrentRequest(MockRequest): """ A MockRequest that mimics GetTorrentRequests. """ def __init__(self, query: dict, infohash: str) -> None: """ Create a new GetTorrentRequest. """ super().__init__(query, "GET", f"/downloads/{infohash}/torrent") self._infohash = infohash @property def match_info(self) -> UrlMappingMatchInfo: """ Get the match info (the infohash in the url). """ return UrlMappingMatchInfo({"infohash": self._infohash}, Mock()) class GetFilesRequest(MockRequest): """ A MockRequest that mimics GetFilesRequests. """ def __init__(self, query: dict, infohash: str) -> None: """ Create a new GetFilesRequest. """ super().__init__(query, "GET", f"/downloads/{infohash}/files") self._infohash = infohash @property def match_info(self) -> UrlMappingMatchInfo: """ Get the match info (the infohash in the url). """ return UrlMappingMatchInfo({"infohash": self._infohash}, Mock()) class ExpandTreeDirectoryRequest(MockRequest): """ A MockRequest that mimics ExpandTreeDirectoryRequests. """ def __init__(self, query: dict, infohash: str) -> None: """ Create a new ExpandTreeDirectoryRequest. """ super().__init__(query, "GET", f"/downloads/{infohash}/files/expand") self._infohash = infohash @property def match_info(self) -> UrlMappingMatchInfo: """ Get the match info (the infohash in the url). """ return UrlMappingMatchInfo({"infohash": self._infohash}, Mock()) class CollapseTreeDirectoryRequest(MockRequest): """ A MockRequest that mimics CollapseTreeDirectoryRequests. """ def __init__(self, query: dict, infohash: str) -> None: """ Create a new CollapseTreeDirectoryRequest. """ super().__init__(query, "GET", f"/downloads/{infohash}/files/collapse") self._infohash = infohash @property def match_info(self) -> UrlMappingMatchInfo: """ Get the match info (the infohash in the url). """ return UrlMappingMatchInfo({"infohash": self._infohash}, Mock()) class SelectTreePathRequest(MockRequest): """ A MockRequest that mimics SelectTreePathRequests. """ def __init__(self, query: dict, infohash: str) -> None: """ Create a new SelectTreePathRequest. """ super().__init__(query, "GET", f"/downloads/{infohash}/files/select") self._infohash = infohash @property def match_info(self) -> UrlMappingMatchInfo: """ Get the match info (the infohash in the url). """ return UrlMappingMatchInfo({"infohash": self._infohash}, Mock()) class DeselectTreePathRequest(MockRequest): """ A MockRequest that mimics DeselectTreePathRequests. """ def __init__(self, query: dict, infohash: str) -> None: """ Create a new DeselectTreePathRequest. """ super().__init__(query, "GET", f"/downloads/{infohash}/files/deselect") self._infohash = infohash @property def match_info(self) -> UrlMappingMatchInfo: """ Get the match info (the infohash in the url). """ return UrlMappingMatchInfo({"infohash": self._infohash}, Mock()) class GenericTrackerRequest(MockRequest): """ A MockRequest that mimics requests to add, remove or check trackers. """ def __init__(self, infohash: str, url: str | None, method: str, sub_endpoint: str) -> None: """ Create a new AddDownloadRequest. """ super().__init__({"url": url}, method, f"/downloads/{infohash}/{sub_endpoint}") self._infohash = infohash async def json(self) -> dict: """ Get the json equivalent of the query (i.e., just the query). """ return self._query @property def match_info(self) -> UrlMappingMatchInfo: """ Get the match info (the infohash in the url). """ return UrlMappingMatchInfo({"infohash": self._infohash}, Mock()) class StreamRequest(MockRequest): """ A MockRequest that mimics StreamRequests. """ __slots__ = ['http_range'] def __init__(self, query: dict, infohash: str, fileindex: int, **kwargs) -> None: """ Create a new StreamRequest. """ super().__init__(query, "GET", f"/downloads/{infohash}/stream/{fileindex}") self._infohash = infohash self._fileindex = fileindex self._payload_writer = BodyCapture() self.http_range = Mock(**kwargs) def get_transmitted(self) -> bytes: """ Get the received bytes from the writer. """ return self._payload_writer.getvalue() @property def match_info(self) -> UrlMappingMatchInfo: """ Get the match info (the infohash in the url). """ return UrlMappingMatchInfo({"infohash": self._infohash, "fileindex": self._fileindex}, Mock()) class TestDownloadsEndpoint(TestBase): """ Tests for the DownloadsEndpoint class. """ def setUp(self) -> None: """ Create a mocked DownloadManager and a DownloadsEndpoint. """ super().setUp() self.download_manager = Mock() self.download_manager.config = MockTriblerConfigManager() self.endpoint = DownloadsEndpoint(self.download_manager) def set_loaded_downloads(self, downloads: list[Download] | None = None) -> None: """ Set the status for all checkpoints being loaded. """ self.download_manager.get_downloads = Mock(return_value=downloads or []) self.download_manager.checkpoints_count = len(downloads) self.download_manager.checkpoints_loaded = len(downloads) self.download_manager.all_checkpoints_are_loaded = True def create_mock_download(self) -> Download: """ Create a mocked Download. """ defaults = ConfigObj(StringIO(SPEC_CONTENT)) conf = ConfigObj() conf.configspec = defaults conf.validate(Validator()) config = DownloadConfig(conf) config.set_dest_dir(Path("")) return Download(TorrentDefNoMetainfo(b"\x01" * 20, b"test"), None, config, hidden=False, checkpoint_disabled=True) async def test_get_downloads_unloaded(self) -> None: """ Test if a clean response is returned if the checkpoints are not loaded yet. """ self.download_manager.checkpoints_count = 1 self.download_manager.checkpoints_loaded = 0 self.download_manager.all_checkpoints_are_loaded = False self.download_manager.get_downloads = Mock(return_value=[]) response = await self.endpoint.get_downloads(GetDownloadsRequest({})) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertEqual([], response_body_json["downloads"]) self.assertEqual(1, response_body_json["checkpoints"]["total"]) self.assertEqual(0, response_body_json["checkpoints"]["loaded"]) self.assertFalse(response_body_json["checkpoints"]["all_loaded"]) async def test_get_downloads_no_downloads(self) -> None: """ Test if an empty list is returned if there are no downloads. """ self.set_loaded_downloads([]) response = await self.endpoint.get_downloads(GetDownloadsRequest({})) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertEqual([], response_body_json["downloads"]) self.assertEqual(0, response_body_json["checkpoints"]["total"]) self.assertEqual(0, response_body_json["checkpoints"]["loaded"]) self.assertTrue(response_body_json["checkpoints"]["all_loaded"]) async def test_get_downloads_hidden_download(self) -> None: """ Test if an empty list is returned if there are only hidden downloads. """ self.set_loaded_downloads([Download(TorrentDefNoMetainfo(b"\x01" * 20, b"test"), None, Mock(), hidden=True, checkpoint_disabled=True)]) response = await self.endpoint.get_downloads(GetDownloadsRequest({})) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertEqual([], response_body_json["downloads"]) self.assertEqual(1, response_body_json["checkpoints"]["total"]) self.assertEqual(1, response_body_json["checkpoints"]["loaded"]) self.assertTrue(response_body_json["checkpoints"]["all_loaded"]) async def test_get_downloads_normal_download(self) -> None: """ Test if the information of a normal download is correctly presented. """ self.set_loaded_downloads([self.create_mock_download()]) response = await self.endpoint.get_downloads(GetDownloadsRequest({})) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertFalse(response_body_json["downloads"][0]["anon_download"]) self.assertEqual(".", response_body_json["downloads"][0]["destination"]) self.assertEqual("", response_body_json["downloads"][0]["error"]) self.assertEqual(0.0, response_body_json["downloads"][0]["eta"]) self.assertEqual(0, response_body_json["downloads"][0]["hops"]) self.assertEqual("0101010101010101010101010101010101010101", response_body_json["downloads"][0]["infohash"]) self.assertEqual(0, response_body_json["downloads"][0]["max_download_speed"]) self.assertEqual(0, response_body_json["downloads"][0]["max_upload_speed"]) self.assertEqual("test", response_body_json["downloads"][0]["name"]) self.assertEqual(0, response_body_json["downloads"][0]["num_connected_peers"]) self.assertEqual(0, response_body_json["downloads"][0]["num_connected_seeds"]) self.assertEqual(0, response_body_json["downloads"][0]["num_peers"]) self.assertEqual(0, response_body_json["downloads"][0]["num_seeds"]) self.assertEqual(0, response_body_json["downloads"][0]["progress"]) self.assertEqual(0, response_body_json["downloads"][0]["all_time_ratio"]) self.assertFalse(response_body_json["downloads"][0]["safe_seeding"]) self.assertEqual(0, response_body_json["downloads"][0]["size"]) self.assertEqual(0, response_body_json["downloads"][0]["speed_down"]) self.assertEqual(0, response_body_json["downloads"][0]["speed_up"]) self.assertEqual("STOPPED", response_body_json["downloads"][0]["status"]) self.assertEqual(5, response_body_json["downloads"][0]["status_code"]) self.assertEqual(0, response_body_json["downloads"][0]["time_added"]) self.assertEqual(0, response_body_json["downloads"][0]["all_time_download"]) self.assertEqual(0, response_body_json["downloads"][0]["total_pieces"]) self.assertEqual(0, response_body_json["downloads"][0]["all_time_upload"]) self.assertEqual([], response_body_json["downloads"][0]["trackers"]) self.assertEqual(1, response_body_json["checkpoints"]["total"]) self.assertEqual(1, response_body_json["checkpoints"]["loaded"]) self.assertTrue(response_body_json["checkpoints"]["all_loaded"]) async def test_get_downloads_filter_download_pass(self) -> None: """ Test if the information of a download that passes the filter is correctly presented. """ download = self.create_mock_download() download.handle = Mock(is_valid=Mock(return_value=False), status=Mock(return_value=Mock(pieces=[False]))) self.set_loaded_downloads([download]) response = await self.endpoint.get_downloads(GetDownloadsRequest({"infohash": "01" * 20, "get_peers": "1", "get_pieces": "1", "get_availability": "1"})) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertEqual("01" * 20, response_body_json["downloads"][0]["infohash"]) self.assertEqual([], response_body_json["downloads"][0]["peers"]) self.assertEqual("", response_body_json["downloads"][0]["pieces"]) self.assertEqual(0, response_body_json["downloads"][0]["availability"]) async def test_get_downloads_filter_download_fail(self) -> None: """ Test if the information of a download that does not pass the filter is correctly presented. """ download = self.create_mock_download() download.handle = Mock(is_valid=Mock(return_value=False), status=Mock(return_value=Mock(pieces=[False]))) self.set_loaded_downloads([download]) response = await self.endpoint.get_downloads(GetDownloadsRequest({"infohash": "02" * 20, "get_peers": "1", "get_pieces": "1", "get_availability": "1"})) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertEqual("01" * 20, response_body_json["downloads"][0]["infohash"]) self.assertNotIn("peers", response_body_json["downloads"][0]) self.assertNotIn("pieces", response_body_json["downloads"][0]) self.assertNotIn("availability", response_body_json["downloads"][0]) async def test_get_downloads_stream_download(self) -> None: """ Test if the information of a steaming download is correctly presented. """ download = self.create_mock_download() download.handle = Mock(is_valid=Mock(return_value=False)) download.stream = Stream(download) download.stream.close() self.set_loaded_downloads([download]) response = await self.endpoint.get_downloads(GetDownloadsRequest({})) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertFalse(response_body_json["downloads"][0]["anon_download"]) self.assertEqual(0.0, response_body_json["downloads"][0]["vod_prebuffering_progress"]) self.assertEqual(0.0, response_body_json["downloads"][0]["vod_prebuffering_progress_consec"]) self.assertEqual(0.0, response_body_json["downloads"][0]["vod_header_progress"]) self.assertEqual(0.0, response_body_json["downloads"][0]["vod_footer_progress"]) async def test_add_download_no_uri(self) -> None: """ Test if a graceful error is returned when no uri is given. """ response = await self.endpoint.add_download(AddDownloadRequest({})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_BAD_REQUEST, response.status) self.assertEqual("uri parameter missing", response_body_json["error"]) async def test_add_download_unsafe_anon_error(self) -> None: """ Test if a graceful error is returned when safe seeding is not enabled in anonymous mode. """ download = self.create_mock_download() with patch("tribler.core.libtorrent.download_manager.download_config.DownloadConfig.from_defaults", lambda _: download.config): response = await self.endpoint.add_download(AddDownloadRequest({"uri": "http://127.0.0.1/file", "anon_hops": 1, "safe_seeding": 0})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_BAD_REQUEST, response.status) self.assertEqual("Cannot set anonymous download without safe seeding enabled", response_body_json["error"]) async def test_add_download_default_parameters(self) -> None: """ Test if the default parameters are set when adding a download. """ download = self.create_mock_download() self.download_manager.start_download_from_uri = AsyncMock(return_value=download) with patch("tribler.core.libtorrent.download_manager.download_config.DownloadConfig.from_defaults", lambda _: download.config): response = await self.endpoint.add_download(AddDownloadRequest({"uri": "http://127.0.0.1/file"})) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertTrue(response_body_json["started"]) self.assertEqual("01" * 20, response_body_json["infohash"]) self.assertEqual(0, download.config.get_hops()) self.assertFalse(download.config.get_safe_seeding()) self.assertEqual(Path(""), download.config.get_dest_dir()) self.assertEqual([], download.config.get_selected_files()) async def test_add_download_custom_parameters(self) -> None: """ Test if the custom parameters are set when adding a download. """ download = self.create_mock_download() self.download_manager.start_download_from_uri = AsyncMock(return_value=download) with patch("tribler.core.libtorrent.download_manager.download_config.DownloadConfig.from_defaults", lambda _: download.config): response = await self.endpoint.add_download(AddDownloadRequest({"uri": "http://127.0.0.1/file", "safe_seeding": 1, "selected_files": [0], "destination": "foo", "anon_hops": 1})) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertTrue(response_body_json["started"]) self.assertEqual("01" * 20, response_body_json["infohash"]) self.assertEqual(1, download.config.get_hops()) self.assertTrue(download.config.get_safe_seeding()) self.assertEqual(Path("foo"), download.config.get_dest_dir()) self.assertEqual([0], download.config.get_selected_files()) async def test_add_download_failed(self) -> None: """ Test if a graceful error is returned when adding a download failed. """ download = self.create_mock_download() self.download_manager.start_download_from_uri = AsyncMock(side_effect=Exception("invalid uri")) with patch("tribler.core.libtorrent.download_manager.download_config.DownloadConfig.from_defaults", lambda _: download.config): response = await self.endpoint.add_download(AddDownloadRequest({"uri": "http://127.0.0.1/file"})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("invalid uri", response_body_json["error"]) async def test_delete_download_no_remove_data(self) -> None: """ Test if a graceful error is returned when no remove data is supplied when deleting a download. """ response = await self.endpoint.delete_download(DeleteDownloadRequest({}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(HTTP_BAD_REQUEST, response.status) self.assertEqual("remove_data parameter missing", response_body_json["error"]) async def test_delete_download_no_download(self) -> None: """ Test if a graceful error is returned when no download is found for removal. """ self.download_manager.get_download = Mock(return_value=None) response = await self.endpoint.delete_download(DeleteDownloadRequest({"remove_data": False}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(HTTP_NOT_FOUND, response.status) self.assertEqual("this download does not exist", response_body_json["error"]) async def test_delete_download_no_data(self) -> None: """ Test if the requested download is removed without removing data on request. """ download = self.create_mock_download() self.download_manager.get_download = Mock(return_value=download) self.download_manager.remove_download = AsyncMock() response = await self.endpoint.delete_download(DeleteDownloadRequest({"remove_data": False}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertTrue(response_body_json["removed"]) self.assertEqual("01" * 20, response_body_json["infohash"]) self.assertEqual(call(download, remove_content=False), self.download_manager.remove_download.call_args) async def test_delete_download_with_data(self) -> None: """ Test if the requested download and its data is removed on request. """ download = self.create_mock_download() self.download_manager.get_download = Mock(return_value=download) self.download_manager.remove_download = AsyncMock() response = await self.endpoint.delete_download(DeleteDownloadRequest({"remove_data": True}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertTrue(response_body_json["removed"]) self.assertEqual("01" * 20, response_body_json["infohash"]) self.assertEqual(call(download, remove_content=True), self.download_manager.remove_download.call_args) async def test_delete_download_delete_failed(self) -> None: """ Test if a graceful error is returned when the deletion of an existing download fails. """ download = self.create_mock_download() self.download_manager.get_download = Mock(return_value=download) self.download_manager.remove_download = AsyncMock(side_effect=OSError) response = await self.endpoint.delete_download(DeleteDownloadRequest({"remove_data": False}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("OSError", response_body_json["error"]["code"]) self.assertTrue(response_body_json["error"]["handled"]) self.assertEqual("", response_body_json["error"]["message"]) async def test_update_download_no_download(self) -> None: """ Test if a graceful error is returned when no download is found to update. """ self.download_manager.get_download = Mock(return_value=None) response = await self.endpoint.update_download(UpdateDownloadRequest({}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(HTTP_NOT_FOUND, response.status) self.assertEqual("this download does not exist", response_body_json["error"]) async def test_update_download_anon_hops_garbage(self) -> None: """ Test if anon hops can only exist as the only parameter. """ response = await self.endpoint.update_download(UpdateDownloadRequest({"anon_hops": 1, "foo": "bar"}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(HTTP_BAD_REQUEST, response.status) self.assertEqual("anon_hops must be the only parameter in this request", response_body_json["error"]) async def test_update_download_anon_hops_update(self) -> None: """ Test if anon hops can be updated. """ download = self.create_mock_download() self.download_manager.get_download = Mock(return_value=download) self.download_manager.update_hops = AsyncMock() response = await self.endpoint.update_download(UpdateDownloadRequest({"anon_hops": 1}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertTrue(response_body_json["modified"]) self.assertEqual("01" * 20, response_body_json["infohash"]) async def test_update_download_anon_hops_update_failed(self) -> None: """ Test if a graceful error is returned when updating the anon hops failed. """ download = self.create_mock_download() self.download_manager.get_download = Mock(return_value=download) self.download_manager.update_hops = AsyncMock(side_effect=OSError) response = await self.endpoint.update_download(UpdateDownloadRequest({"anon_hops": 1}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertEqual("OSError", response_body_json["error"]["code"]) self.assertTrue(response_body_json["error"]["handled"]) self.assertEqual("", response_body_json["error"]["message"]) async def test_update_download_selected_files_out_of_range(self) -> None: """ Test if a graceful error is returned when the selected files are out of range. """ download = self.create_mock_download() download.tdef.metainfo = {b"info": {b"files": [{b"path": {b"hi.txt"}, b"length": 0}]}} self.download_manager.get_download = Mock(return_value=download) response = await self.endpoint.update_download(UpdateDownloadRequest({"selected_files": [99999999999]}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(HTTP_BAD_REQUEST, response.status) self.assertEqual("index out of range", response_body_json["error"]) async def test_update_download_selected_files(self) -> None: """ Test if the selected files of a download can be updated. """ download = self.create_mock_download() download.tdef.metainfo = {b"info": {b"files": [{b"path": {b"hi.txt"}, b"length": 0}]}} self.download_manager.get_download = Mock(return_value=download) response = await self.endpoint.update_download(UpdateDownloadRequest({"selected_files": [0]}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertTrue(response_body_json["modified"]) self.assertEqual("01" * 20, response_body_json["infohash"]) async def test_update_download_unknown_state(self) -> None: """ Test if a graceful error is returned when a download is set to an unknown state. """ download = self.create_mock_download() self.download_manager.get_download = Mock(return_value=download) response = await self.endpoint.update_download(UpdateDownloadRequest({"state": "foo"}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(HTTP_BAD_REQUEST, response.status) self.assertEqual("unknown state parameter", response_body_json["error"]) async def test_update_download_state_resume(self) -> None: """ Test if a download can be set to the resume state. """ download = self.create_mock_download() download.config.set_user_stopped(True) self.download_manager.get_download = Mock(return_value=download) response = await self.endpoint.update_download(UpdateDownloadRequest({"state": "resume"}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertTrue(response_body_json["modified"]) self.assertEqual("01" * 20, response_body_json["infohash"]) self.assertFalse(download.config.get_user_stopped()) async def test_update_download_state_stop(self) -> None: """ Test if a download can be set to the stopped state. """ download = self.create_mock_download() download.config.set_user_stopped(False) self.download_manager.get_download = Mock(return_value=download) response = await self.endpoint.update_download(UpdateDownloadRequest({"state": "stop"}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertTrue(response_body_json["modified"]) self.assertEqual("01" * 20, response_body_json["infohash"]) self.assertTrue(download.config.get_user_stopped()) async def test_update_download_state_recheck(self) -> None: """ Test if a download can be set to the recheck state. """ download = self.create_mock_download() download.tdef = Mock(get_infohash=Mock(return_value=b"\x01" * 20)) download.handle = Mock(is_valid=Mock(return_value=True)) self.download_manager.get_download = Mock(return_value=download) response = await self.endpoint.update_download(UpdateDownloadRequest({"state": "recheck"}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertTrue(response_body_json["modified"]) self.assertEqual("01" * 20, response_body_json["infohash"]) self.assertEqual(call(), download.handle.force_recheck.call_args) async def test_update_download_state_move_storage_no_dest_dir(self) -> None: """ Test if a graceful error is returned when no dest dir is specified when setting the move storage state. """ download = self.create_mock_download() self.download_manager.get_download = Mock(return_value=download) response = await self.endpoint.update_download(UpdateDownloadRequest({"state": "move_storage", "dest_dir": "I don't exist"}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(HTTP_BAD_REQUEST, response.status) self.assertEqual("Target directory (I don't exist) does not exist", response_body_json["error"]) async def test_update_download_state_move_storage(self) -> None: """ Test if a download can be set to the move_storage state. """ download = self.create_mock_download() download.tdef = Mock(get_infohash=Mock(return_value=b"\x01" * 20)) download.handle = Mock(is_valid=Mock(return_value=True)) self.download_manager.get_download = Mock(return_value=download) response = await self.endpoint.update_download(UpdateDownloadRequest({"state": "move_storage", "dest_dir": str(Path(__file__).parent)}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertTrue(response_body_json["modified"]) self.assertEqual("01" * 20, response_body_json["infohash"]) self.assertEqual(call(str(Path(__file__).parent)), download.handle.move_storage.call_args) async def test_update_download_nothing(self) -> None: """ Test if a download can be updated with nothing. """ download = self.create_mock_download() self.download_manager.get_download = Mock(return_value=download) response = await self.endpoint.update_download(UpdateDownloadRequest({}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertTrue(response_body_json["modified"]) self.assertEqual("01" * 20, response_body_json["infohash"]) async def test_get_torrent_no_download(self) -> None: """ Test if a graceful error is returned when no download is found. """ self.download_manager.get_download = Mock(return_value=None) response = await self.endpoint.get_torrent(GetTorrentRequest({}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(HTTP_NOT_FOUND, response.status) self.assertEqual("this download does not exist", response_body_json["error"]) async def test_get_torrent_no_torrent_data(self) -> None: """ Test if a graceful error is returned when no torrent data is found. """ self.download_manager.get_download = Mock(return_value=Mock(get_torrent_data=Mock(return_value=None))) response = await self.endpoint.get_torrent(GetTorrentRequest({}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(HTTP_NOT_FOUND, response.status) self.assertEqual("this download does not exist", response_body_json["error"]) async def test_get_torrent(self) -> None: """ Test if torrent data is correctly sent over on request. """ download = self.create_mock_download() download.handle = Mock(is_valid=Mock(return_value=True), torrent_file=Mock(return_value=TORRENT_WITH_DIRS)) self.download_manager.get_download = Mock(return_value=download) response = await self.endpoint.get_torrent(GetTorrentRequest({}, "01" * 20)) response_body_bytes = await response_to_bytes(response) self.assertEqual(200, response.status) self.assertEqual(TORRENT_WITH_DIRS_CONTENT, response_body_bytes) async def test_get_files_no_download(self) -> None: """ Test if a graceful error is returned when no download is found. """ self.download_manager.get_download = Mock(return_value=None) response = await self.endpoint.get_files(GetFilesRequest({}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(HTTP_NOT_FOUND, response.status) self.assertEqual("this download does not exist", response_body_json["error"]) async def test_get_files_without_path(self) -> None: """ Test if the files of a download get be retrieved without specifying a starting path. """ download = self.create_mock_download() download.tdef = TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT) self.download_manager.get_download = Mock(return_value=download) response = await self.endpoint.get_files(GetFilesRequest({}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertEqual("01" * 20, response_body_json["infohash"]) self.assertEqual(6, len(response_body_json["files"])) async def test_get_files_with_path_unloaded(self) -> None: """ Test if the special loading state is returned when using a starting path and an unloaded torrent. """ download = self.create_mock_download() download.tdef = TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT) self.download_manager.get_download = Mock(return_value=download) response = await self.endpoint.get_files(GetFilesRequest({"view_start_path": "def/file6.avi", "view_size": 2}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertEqual("01" * 20, response_body_json["infohash"]) self.assertEqual(1, len(response_body_json["files"])) self.assertEqual(-3, response_body_json["files"][0]["index"]) self.assertEqual("loading...", response_body_json["files"][0]["name"]) self.assertEqual(0, response_body_json["files"][0]["size"]) self.assertEqual(0, response_body_json["files"][0]["included"]) self.assertEqual(0.0, response_body_json["files"][0]["progress"]) async def test_get_files_with_path(self) -> None: """ Test if the files of a download get be retrieved using a starting path. """ download = self.create_mock_download() download.tdef = TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT) download.tdef.load_torrent_info() download.tdef.torrent_file_tree.expand(Path("torrent_create/abc")) self.download_manager.get_download = Mock(return_value=download) response = await self.endpoint.get_files(GetFilesRequest({"view_start_path": "torrent_create", "view_size": 2}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertEqual("01" * 20, response_body_json["infohash"]) self.assertEqual(2, len(response_body_json["files"])) self.assertEqual(-2, response_body_json["files"][0]["index"]) self.assertEqual(Path("torrent_create/abc"), Path(response_body_json["files"][0]["name"])) self.assertEqual(18, response_body_json["files"][0]["size"]) self.assertFalse(response_body_json["files"][0]["included"]) self.assertEqual(0.0, response_body_json["files"][0]["progress"]) self.assertEqual(0, response_body_json["files"][1]["index"]) self.assertEqual(Path("torrent_create/abc/file2.txt"), Path(response_body_json["files"][1]["name"])) self.assertEqual(6, response_body_json["files"][1]["size"]) self.assertTrue(response_body_json["files"][1]["included"]) self.assertEqual(0.0, response_body_json["files"][1]["progress"]) async def test_collapse_tree_directory_no_download(self) -> None: """ Test if a graceful error is returned when no download is found. """ self.download_manager.get_download = Mock(return_value=None) response = await self.endpoint.collapse_tree_directory(CollapseTreeDirectoryRequest({}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(HTTP_NOT_FOUND, response.status) self.assertEqual("this download does not exist", response_body_json["error"]) async def test_collapse_tree_directory(self) -> None: """ Test if a file tree directory can be collapsed. """ download = self.create_mock_download() download.tdef = TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT) download.tdef.load_torrent_info() download.tdef.torrent_file_tree.expand(Path("torrent_create/abc")) self.download_manager.get_download = Mock(return_value=download) response = await self.endpoint.collapse_tree_directory(CollapseTreeDirectoryRequest( {"path": "torrent_create/abc"}, "01" * 20 )) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertTrue(download.tdef.torrent_file_tree.find(Path("torrent_create/abc")).collapsed) self.assertEqual("torrent_create/abc", response_body_json["path"]) async def test_expand_tree_directory_no_download(self) -> None: """ Test if a graceful error is returned when no download is found. """ self.download_manager.get_download = Mock(return_value=None) response = await self.endpoint.expand_tree_directory(ExpandTreeDirectoryRequest({}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(HTTP_NOT_FOUND, response.status) self.assertEqual("this download does not exist", response_body_json["error"]) async def test_expand_tree_directory(self) -> None: """ Test if a file tree directory can be expanded. """ download = self.create_mock_download() download.tdef = TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT) download.tdef.load_torrent_info() self.download_manager.get_download = Mock(return_value=download) response = await self.endpoint.expand_tree_directory(ExpandTreeDirectoryRequest( {"path": "torrent_create/abc"}, "01" * 20 )) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertFalse(download.tdef.torrent_file_tree.find(Path("torrent_create/abc")).collapsed) self.assertEqual("torrent_create/abc", response_body_json["path"]) async def test_select_tree_path_no_download(self) -> None: """ Test if a graceful error is returned when no download is found. """ self.download_manager.get_download = Mock(return_value=None) response = await self.endpoint.select_tree_path(SelectTreePathRequest({}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(HTTP_NOT_FOUND, response.status) self.assertEqual("this download does not exist", response_body_json["error"]) async def test_select_tree_path(self) -> None: """ Test if a tree path can be selected. """ download = self.create_mock_download() download.handle = Mock(is_valid=Mock(return_value=True)) download.tdef = TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT) download.tdef.load_torrent_info() self.download_manager.get_download = Mock(return_value=download) response = await self.endpoint.select_tree_path(SelectTreePathRequest( {"path": "torrent_create/def/file6.avi"}, "01" * 20 )) self.assertEqual(200, response.status) self.assertTrue(download.tdef.torrent_file_tree.find(Path("torrent_create/def/file6.avi")).selected) async def test_deselect_tree_path_no_download(self) -> None: """ Test if a graceful error is returned when no download is found. """ self.download_manager.get_download = Mock(return_value=None) response = await self.endpoint.deselect_tree_path(DeselectTreePathRequest({}, "01" * 20)) response_body_json = await response_to_json(response) self.assertEqual(HTTP_NOT_FOUND, response.status) self.assertEqual("this download does not exist", response_body_json["error"]) async def test_deselect_tree_path(self) -> None: """ Test if a tree path can be deselected. """ download = self.create_mock_download() download.handle = Mock(is_valid=Mock(return_value=True)) download.tdef = TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT) download.tdef.load_torrent_info() self.download_manager.get_download = Mock(return_value=download) response = await self.endpoint.deselect_tree_path(DeselectTreePathRequest( {"path": "torrent_create/def/file6.avi"}, "01" * 20 )) self.assertEqual(200, response.status) self.assertFalse(download.tdef.torrent_file_tree.find(Path("torrent_create/def/file6.avi")).selected) async def test_stream_no_download(self) -> None: """ Test if a graceful error is returned when no download is found. """ self.download_manager.get_download = Mock(return_value=None) response = await self.endpoint.stream(StreamRequest({}, "01" * 20, 0)) response_body_json = await response_to_json(response) self.assertEqual(HTTP_NOT_FOUND, response.status) self.assertEqual("this download does not exist", response_body_json["error"]) async def test_stream_unsatisfiable(self) -> None: """ Test if a graceful error is returned when the requested stream start is unsatisfiable. """ download = self.create_mock_download() download.handle = Mock(is_valid=Mock(return_value=False)) download.stream = Stream(download) download.stream.infohash = b"\x01" * 20 download.stream.fileindex = 0 download.stream.filesize = 0 download.tdef = TorrentDef.load_from_memory(TORRENT_WITH_VIDEO) self.download_manager.get_download = Mock(return_value=download) request = StreamRequest({}, "01" * 20, 0, start=100, stop=200) with patch("tribler.core.libtorrent.download_manager.stream.Stream.enable", AsyncMock()): response = await self.endpoint.stream(request) await response.prepare(request) self.assertEqual(416, response.status) self.assertEqual("Requested Range Not Satisfiable", response.reason) async def test_stream(self) -> None: """ Test if files can be streamed from a download. Note: we read the first byte of this test file ("). """ download = self.create_mock_download() download.handle = Mock(is_valid=Mock(return_value=False)) download.stream = Stream(download) download.stream.close() download.stream.infohash = b"\x01" * 20 download.stream.fileindex = 0 download.stream.filesize = 1 download.stream.filename = Path(__file__) download.stream.mapfile = Mock(return_value=Mock(piece=0)) download.stream.firstpiece = 0 download.stream.lastpiece = 0 download.stream.prebuffsize = 0 download.stream.enable = AsyncMock() download.lt_status = Mock(pieces=[True]) download.tdef = TorrentDef.load_from_memory(TORRENT_WITH_VIDEO) self.download_manager.get_download = Mock(return_value=download) request = StreamRequest({}, "01" * 20, 0, start=0, stop=1) with patch("tribler.core.libtorrent.download_manager.stream.Stream.enable", AsyncMock()): response = await self.endpoint.stream(request) await response.prepare(request) self.assertEqual(206, response.status) self.assertEqual(b'"', request.get_transmitted()) async def test_add_tracker(self) -> None: """ Test if trackers can be added to a download. """ trackers = ["http://127.0.0.1/somethingelse"] download = self.create_mock_download() download.handle = Mock(is_valid=Mock(return_value=True), trackers=Mock(return_value=trackers), add_tracker=lambda tracker_dict: trackers.append(tracker_dict["url"])) self.download_manager.get_download = Mock(return_value=download) url = "http://127.0.0.1/announce" response = await self.endpoint.add_tracker( GenericTrackerRequest(hexlify(download.tdef.infohash).decode(), url, "PUT", "trackers") ) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertTrue(response_body_json["added"]) self.assertListEqual(["http://127.0.0.1/somethingelse", url], trackers) self.assertEqual(call(0, 1), download.handle.force_reannounce.call_args) async def test_add_tracker_no_download(self) -> None: """ Test if adding a tracker fails when no download is found. """ self.download_manager.get_download = Mock(return_value=None) response = await self.endpoint.add_tracker(GenericTrackerRequest("AA" * 20, "http://127.0.0.1/announce", "PUT", "trackers")) self.assertEqual(404, response.status) async def test_add_tracker_no_url(self) -> None: """ Test if adding a tracker fails when no tracker url is given. """ download = self.create_mock_download() download.handle = Mock(is_valid=Mock(return_value=True), trackers=Mock(return_value=[])) self.download_manager.get_download = Mock(return_value=download) response = await self.endpoint.add_tracker(GenericTrackerRequest("AA" * 20, None, "PUT", "trackers")) self.assertEqual(400, response.status) async def test_add_tracker_handle_error(self) -> None: """ Test if adding a tracker fails when a libtorrent internal error occurs. """ trackers = ["http://127.0.0.1/somethingelse"] download = self.create_mock_download() download.handle = Mock(is_valid=Mock(return_value=True), trackers=Mock(return_value=trackers), add_tracker=Mock(side_effect=RuntimeError("invalid torrent handle used"))) self.download_manager.get_download = Mock(return_value=download) url = "http://127.0.0.1/announce" response = await self.endpoint.add_tracker( GenericTrackerRequest(hexlify(download.tdef.infohash).decode(), url, "PUT", "trackers") ) response_body_json = await response_to_json(response) self.assertEqual(500, response.status) self.assertEqual("invalid torrent handle used", response_body_json["error"]) async def test_remove_tracker(self) -> None: """ Test if trackers can be removed from a download. """ trackers = [{"url": "http://127.0.0.1/somethingelse", "verified": True}, {"url": "http://127.0.0.1/announce", "verified": True}] download = self.create_mock_download() download.handle = Mock(is_valid=Mock(return_value=True), trackers=Mock(return_value=trackers), replace_trackers=lambda new_trackers: (trackers.clear() is trackers.extend(new_trackers))) self.download_manager.get_download = Mock(return_value=download) url = "http://127.0.0.1/announce" response = await self.endpoint.remove_tracker( GenericTrackerRequest(hexlify(download.tdef.infohash).decode(), url, "DELETE", "trackers") ) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertTrue(response_body_json["removed"]) self.assertListEqual([{"url": "http://127.0.0.1/somethingelse", "verified": True}], trackers) async def test_remove_tracker_no_download(self) -> None: """ Test if removing a tracker fails when no download is found. """ self.download_manager.get_download = Mock(return_value=None) response = await self.endpoint.remove_tracker(GenericTrackerRequest("AA" * 20, "http://127.0.0.1/announce", "DELETE", "trackers")) self.assertEqual(404, response.status) async def test_remove_tracker_no_url(self) -> None: """ Test if removing a tracker fails when no tracker url is given. """ download = self.create_mock_download() download.handle = Mock(is_valid=Mock(return_value=True), trackers=Mock(return_value=[])) self.download_manager.get_download = Mock(return_value=download) response = await self.endpoint.remove_tracker(GenericTrackerRequest("AA" * 20, None, "DELETE", "trackers")) self.assertEqual(400, response.status) async def test_remove_tracker_handle_error(self) -> None: """ Test if removing a tracker fails when a libtorrent internal error occurs. """ trackers = [{"url": "http://127.0.0.1/somethingelse", "verified": True}, {"url": "http://127.0.0.1/announce", "verified": True}] download = self.create_mock_download() download.handle = Mock(is_valid=Mock(return_value=True), trackers=Mock(return_value=trackers), replace_trackers=Mock(side_effect=RuntimeError("invalid torrent handle used"))) self.download_manager.get_download = Mock(return_value=download) url = "http://127.0.0.1/announce" response = await self.endpoint.remove_tracker( GenericTrackerRequest(hexlify(download.tdef.infohash).decode(), url, "DELETE", "trackers") ) response_body_json = await response_to_json(response) self.assertEqual(500, response.status) self.assertEqual("invalid torrent handle used", response_body_json["error"]) async def test_tracker_force_announce(self) -> None: """ Test if trackers can be force announced. """ trackers = [{"url": "http://127.0.0.1/somethingelse", "verified": True}, {"url": "http://127.0.0.1/announce", "verified": True}] download = self.create_mock_download() download.handle = Mock(is_valid=Mock(return_value=True), trackers=Mock(return_value=trackers)) self.download_manager.get_download = Mock(return_value=download) url = "http://127.0.0.1/announce" response = await self.endpoint.tracker_force_announce( GenericTrackerRequest(hexlify(download.tdef.infohash).decode(), url, "PUT", "tracker_force_announce") ) response_body_json = await response_to_json(response) self.assertEqual(200, response.status) self.assertTrue(response_body_json["forced"]) self.assertEqual(call(0, 1), download.handle.force_reannounce.call_args) async def test_tracker_force_announce_no_download(self) -> None: """ Test if force-announcing a tracker fails when no download is found. """ self.download_manager.get_download = Mock(return_value=None) response = await self.endpoint.tracker_force_announce( GenericTrackerRequest("AA" * 20, "http://127.0.0.1/announce", "PUT", "tracker_force_announce") ) self.assertEqual(404, response.status) async def test_tracker_force_announce_no_url(self) -> None: """ Test if force-announcing a tracker fails when no tracker url is given. """ download = self.create_mock_download() download.handle = Mock(is_valid=Mock(return_value=True), trackers=Mock(return_value=[])) self.download_manager.get_download = Mock(return_value=download) response = await self.endpoint.tracker_force_announce(GenericTrackerRequest("AA" * 20, None, "PUT", "tracker_force_announce")) self.assertEqual(400, response.status) async def test_tracker_force_announce_handle_error(self) -> None: """ Test if force-announcing a tracker fails when a libtorrent internal error occurs. """ trackers = [{"url": "http://127.0.0.1/somethingelse", "verified": True}, {"url": "http://127.0.0.1/announce", "verified": True}] download = self.create_mock_download() download.handle = Mock(is_valid=Mock(return_value=True), trackers=Mock(return_value=trackers), force_reannounce=Mock(side_effect=RuntimeError("invalid torrent handle used"))) self.download_manager.get_download = Mock(return_value=download) url = "http://127.0.0.1/announce" response = await self.endpoint.tracker_force_announce( GenericTrackerRequest(hexlify(download.tdef.infohash).decode(), url, "PUT", "tracker_force_announce") ) response_body_json = await response_to_json(response) self.assertEqual(500, response.status) self.assertEqual("invalid torrent handle used", response_body_json["error"])
59,146
Python
.py
1,072
45.313433
119
0.649221
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,953
test_create_torrent_endpoint.py
Tribler_tribler/src/tribler/test_unit/core/libtorrent/restapi/test_create_torrent_endpoint.py
import base64 from io import StringIO from pathlib import Path from unittest.mock import AsyncMock, Mock, patch from configobj import ConfigObj from ipv8.test.base import TestBase import tribler.core.libtorrent.restapi.create_torrent_endpoint as ep_module from tribler.core.libtorrent.download_manager.download_config import SPEC_CONTENT, DownloadConfig from tribler.core.libtorrent.restapi.create_torrent_endpoint import CreateTorrentEndpoint from tribler.core.restapi.rest_endpoint import HTTP_BAD_REQUEST, HTTP_INTERNAL_SERVER_ERROR from tribler.test_unit.base_restapi import MockRequest, response_to_json from tribler.test_unit.core.libtorrent.mocks import TORRENT_WITH_DIRS_CONTENT from tribler.tribler_config import TriblerConfigManager class MockTriblerConfigManager(TriblerConfigManager): """ A memory-based TriblerConfigManager. """ def write(self) -> None: """ Don't actually write to any file. """ class CreateTorrentRequest(MockRequest): """ A MockRequest that mimics CreateTorrentRequests. """ def __init__(self, query: dict) -> None: """ Create a new CreateTorrentRequest. """ super().__init__(query, "POST", "/createtorrent") async def json(self) -> dict: """ Get the json equivalent of the query (i.e., just the query). """ return self._query class TestCreateTorrentEndpoint(TestBase): """ Tests for the CreateTorrentEndpoint class. """ def setUp(self) -> None: """ Create a mocked DownloadManager and a CreateTorrentEndpoint. """ super().setUp() self.download_manager = Mock() self.endpoint = CreateTorrentEndpoint(self.download_manager) async def test_no_files(self) -> None: """ Test if a request without files leads to a bad request status. """ response = await self.endpoint.create_torrent(CreateTorrentRequest({})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_BAD_REQUEST, response.status) self.assertEqual("files parameter missing", response_body_json["error"]) async def test_failure_oserror(self) -> None: """ Test if processing a request that leads to an OSError is gracefully reported. """ with patch.dict(ep_module.__dict__, {"create_torrent_file": Mock(side_effect=OSError("test"))}): response = await self.endpoint.create_torrent(CreateTorrentRequest({"files": [str(Path(__file__))]})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertTrue(response_body_json["error"]["handled"]) self.assertEqual("OSError", response_body_json["error"]["code"]) self.assertEqual("test", response_body_json["error"]["message"]) async def test_failure_unicodedecodeerror(self) -> None: """ Test if processing a request that leads to an OSError is gracefully reported. """ with patch.dict(ep_module.__dict__, {"create_torrent_file": Mock(side_effect=UnicodeDecodeError("utf-8", b"", 0, 1, "𓀬"))}): response = await self.endpoint.create_torrent(CreateTorrentRequest({"files": [str(Path(__file__))]})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertTrue(response_body_json["error"]["handled"]) self.assertEqual("UnicodeDecodeError", response_body_json["error"]["code"]) async def test_failure_runtimeerror(self) -> None: """ Test if processing a request that leads to an RuntimeError is gracefully reported. """ with patch.dict(ep_module.__dict__, {"create_torrent_file": Mock(side_effect=RuntimeError("test"))}): response = await self.endpoint.create_torrent(CreateTorrentRequest({"files": [str(Path(__file__))]})) response_body_json = await response_to_json(response) self.assertEqual(HTTP_INTERNAL_SERVER_ERROR, response.status) self.assertTrue(response_body_json["error"]["handled"]) self.assertEqual("RuntimeError", response_body_json["error"]["code"]) self.assertEqual("test", response_body_json["error"]["message"]) async def test_create_default(self) -> None: """ Test if creating a torrent from defaults works. """ mocked_create_torrent_file = Mock(return_value={"metainfo": TORRENT_WITH_DIRS_CONTENT}) with patch.dict(ep_module.__dict__, {"create_torrent_file": mocked_create_torrent_file}): response = await self.endpoint.create_torrent(CreateTorrentRequest({"files": [str(Path(__file__))]})) response_body_json = await response_to_json(response) _, call_params, __ = mocked_create_torrent_file.call_args.args self.assertEqual(200, response.status) self.assertEqual(TORRENT_WITH_DIRS_CONTENT, base64.b64decode(response_body_json["torrent"])) self.assertFalse(call_params[b"nodes"]) self.assertFalse(call_params[b"httpseeds"]) self.assertFalse(call_params[b"encoding"]) self.assertEqual(0, call_params[b"piece length"]) async def test_create_with_comment(self) -> None: """ Test if creating a torrent with a custom comment works. """ mocked_create_torrent_file = Mock(return_value={"metainfo": TORRENT_WITH_DIRS_CONTENT}) with patch.dict(ep_module.__dict__, {"create_torrent_file": mocked_create_torrent_file}): response = await self.endpoint.create_torrent(CreateTorrentRequest({"files": [str(Path(__file__))], "description": "test"})) response_body_json = await response_to_json(response) _, call_params, __ = mocked_create_torrent_file.call_args.args self.assertEqual(200, response.status) self.assertEqual(TORRENT_WITH_DIRS_CONTENT, base64.b64decode(response_body_json["torrent"])) self.assertEqual(b"test", call_params[b"comment"]) async def test_create_with_trackers(self) -> None: """ Test if creating a torrent with custom trackers works. """ mocked_create_torrent_file = Mock(return_value={"metainfo": TORRENT_WITH_DIRS_CONTENT}) with patch.dict(ep_module.__dict__, {"create_torrent_file": mocked_create_torrent_file}): response = await self.endpoint.create_torrent(CreateTorrentRequest({ "files": [str(Path(__file__))], "trackers": ["http://127.0.0.1/announce", "http://10.0.0.2/announce"] })) response_body_json = await response_to_json(response) _, call_params, __ = mocked_create_torrent_file.call_args.args self.assertEqual(200, response.status) self.assertEqual(TORRENT_WITH_DIRS_CONTENT, base64.b64decode(response_body_json["torrent"])) self.assertEqual(b"http://127.0.0.1/announce", call_params[b"announce"]) self.assertEqual([b"http://127.0.0.1/announce", b"http://10.0.0.2/announce"], call_params[b"announce-list"]) async def test_create_with_name(self) -> None: """ Test if creating a torrent with a custom name works. """ mocked_create_torrent_file = Mock(return_value={"metainfo": TORRENT_WITH_DIRS_CONTENT}) with patch.dict(ep_module.__dict__, {"create_torrent_file": mocked_create_torrent_file}): response = await self.endpoint.create_torrent(CreateTorrentRequest({"files": [str(Path(__file__))], "name": "test"})) response_body_json = await response_to_json(response) _, call_params, __ = mocked_create_torrent_file.call_args.args self.assertEqual(200, response.status) self.assertEqual(TORRENT_WITH_DIRS_CONTENT, base64.b64decode(response_body_json["torrent"])) self.assertEqual(b"test", call_params[b"name"]) async def test_create_and_start(self) -> None: """ Test if creating and starting a download works if download is set to 1. """ self.download_manager.config = MockTriblerConfigManager() self.download_manager.start_download = AsyncMock() mocked_create = Mock(return_value={"metainfo": TORRENT_WITH_DIRS_CONTENT, "base_dir": str(Path(__file__).parent)}) with patch("tribler.core.libtorrent.download_manager.download_config.DownloadConfig.from_defaults", lambda _: DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT)))), patch.dict(ep_module.__dict__, {"create_torrent_file": mocked_create}): response = await self.endpoint.create_torrent(CreateTorrentRequest({"files": [str(Path(__file__))], "download": "1"})) response_body_json = await response_to_json(response) _, tdef, __ = self.download_manager.start_download.call_args.args self.assertEqual(200, response.status) self.assertEqual(TORRENT_WITH_DIRS_CONTENT, base64.b64decode(response_body_json["torrent"])) self.assertEqual(b"\xb3\xba\x19\xc93\xda\x95\x84k\xfd\xf7Z\xd0\x8a\x94\x9cl\xea\xc7\xbc", tdef.infohash)
9,680
Python
.py
164
47.823171
116
0.639241
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,954
test_dht_health_manager.py
Tribler_tribler/src/tribler/test_unit/core/libtorrent/download_manager/test_dht_health_manager.py
from asyncio import Future from binascii import unhexlify from unittest.mock import Mock from ipv8.test.base import TestBase from tribler.core.libtorrent.download_manager.dht_health_manager import DHTHealthManager class TestDHTHealthManager(TestBase): """ Tests for the DHTHealthManager class. """ bf_contents = unhexlify("F6C3F5EAA07FFD91BDE89F777F26FB2BFF37BDB8FB2BBAA2FD3DDDE7BACFFF75EE7CCBAE" "FE5EEDB1FBFAFF67F6ABFF5E43DDBCA3FD9B9FFDF4FFD3E9DFF12D1BDF59DB53DBE9FA5B" "7FF3B8FDFCDE1AFB8BEDD7BE2F3EE71EBBBFE93BCDEEFE148246C2BC5DBFF7E7EFDCF24F" "D8DC7ADFFD8FFFDFDDFFF7A4BBEEDF5CB95CE81FC7FCFF1FF4FFFFDFE5F7FDCBB7FD79B3" "FA1FC77BFE07FFF905B7B7FFC7FEFEFFE0B8370BB0CD3F5B7F2BD93FEB4386CFDD6F7FD5" "BFAF2E9EBFFFFEECD67ADBF7C67F17EFD5D75EBA6FFEBA7FFF47A91EB1BFBB53E8ABFB57" "62ABE8FF237279BFEFBFEEF5FFC5FEBFDFE5ADFFADFEE1FB737FFFFBFD9F6AEFFEEE76B6" "FD8F72EF") def setUp(self) -> None: """ Create a mocked DHTHealthManager. """ super().setUp() self.manager = DHTHealthManager(lt_session=Mock()) async def tearDown(self) -> None: """ Shut down the health manager. """ await self.manager.shutdown_task_manager() await super().tearDown() async def test_get_health(self) -> None: """ Test if the health of a trackerless torrent can be fetched. """ health = await self.manager.get_health(b"a" * 20, timeout=0.01) self.assertEqual(b"a" * 20, health.infohash) async def test_existing_get_health(self) -> None: """ Test if the same future is returned for the same query. """ lookup_future = self.manager.get_health(b"a" * 20, timeout=0.01) self.assertEqual(lookup_future, self.manager.get_health(b"a" * 20, timeout=0.01)) await lookup_future async def test_combine_bloom_filters_equal(self) -> None: """ Test if two bloom equal filters can be combined. """ bf1 = bytearray(b"a" * 256) bf2 = bytearray(b"a" * 256) self.assertEqual(bf1, self.manager.combine_bloomfilters(bf1, bf2)) async def test_combine_bloom_filters_different(self) -> None: """ Test if two bloom different filters can be combined. """ bf1 = bytearray(b"\0" * 256) bf2 = bytearray(b"b" * 256) self.assertEqual(bf2, self.manager.combine_bloomfilters(bf1, bf2)) def test_get_size_from_bloom_filter(self) -> None: """ Test if we can successfully estimate the size from a bloom filter. See http://www.bittorrent.org/beps/bep_0033.html """ bf = bytearray(TestDHTHealthManager.bf_contents) self.assertEqual(1224, self.manager.get_size_from_bloomfilter(bf)) def test_get_size_from_bloom_filter_maximum(self) -> None: """ Test if we can successfully estimate the size of a bloom filter at maximum capacity. """ bf = bytearray(b"\xff" * 256) self.assertEqual(6000, self.manager.get_size_from_bloomfilter(bf)) def test_receive_bloomfilters_nothing(self) -> None: """ Test if just receiving a transactions id does nothing to the bloom filter. """ self.manager.received_bloomfilters("1") self.assertEqual({}, self.manager.bf_seeders) self.assertEqual({}, self.manager.bf_peers) def test_receive_bloomfilters(self) -> None: """ Test if the right operations happens when receiving a bloom filter. """ infohash = b"a" * 20 self.manager.lookup_futures[infohash] = Future() self.manager.bf_seeders[infohash] = bytearray(256) self.manager.bf_peers[infohash] = bytearray(256) self.manager.requesting_bloomfilters("1", infohash) self.manager.received_bloomfilters("1", bf_seeds=bytearray(b"\xee" * 256), bf_peers=bytearray(b"\xff" * 256)) self.assertEqual(bytearray(b"\xee" * 256), self.manager.bf_seeders[infohash]) self.assertEqual(bytearray(b"\xff" * 256), self.manager.bf_peers[infohash])
4,381
Python
.py
90
38.222222
102
0.644866
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,955
test_stream.py
Tribler_tribler/src/tribler/test_unit/core/libtorrent/download_manager/test_stream.py
from __future__ import annotations from io import BytesIO, StringIO from pathlib import Path from unittest.mock import AsyncMock, Mock, call from configobj import ConfigObj from ipv8.test.base import TestBase from validate import Validator from tribler.core.libtorrent.download_manager.download import Download from tribler.core.libtorrent.download_manager.download_config import SPEC_CONTENT, DownloadConfig from tribler.core.libtorrent.download_manager.stream import Stream, StreamChunk from tribler.core.libtorrent.torrentdef import TorrentDef from tribler.test_unit.core.libtorrent.mocks import TORRENT_WITH_DIRS_CONTENT class MockStreamChunk(StreamChunk): """ StreamChunk with a mocked open method. """ async def open(self) -> None: """ Fake opening a file. """ if self.file is None: self.file = BytesIO() self.file.seek(0) class TestStreamChunk(TestBase): """ Tests for the StreamChunk class. """ def setUp(self) -> None: """ Create a new mocked stream chunk. """ super().setUp() self.chunk = MockStreamChunk(Mock(), 0) self.chunk.stream.cursorpiecemap = {0: [False, []]} def create_mock_content(self, content: bytes, piece_length: int = 1) -> None: """ Set the value of the stream to certain content. """ content_end = len(content) // piece_length self.chunk.stream.updateprios = AsyncMock() self.chunk.stream.iterpieces = lambda have, startfrom: list(range(startfrom, content_end)) self.chunk.stream.pieceshave = [True] * content_end self.chunk.stream.lastpiece = content_end self.chunk.stream.bytetopiece = lambda x: x // piece_length self.chunk.stream.prebuffsize = 1 self.chunk.stream.piecelen = piece_length self.chunk.file = BytesIO() self.chunk.file.write(b"content") self.chunk.file.seek(0) async def test_stream_error(self) -> None: """ Test if a chunk context closes the chunk when an exception occurs. """ with self.assertRaises(RuntimeError): async with self.chunk: raise RuntimeError self.assertIsNone(self.chunk.file) self.assertTrue(self.chunk.isclosed) async def test_read_empty(self) -> None: """ Test if all bytes can be read from a chunk when it has no data. """ self.chunk.stream.updateprios = AsyncMock() self.chunk.stream.iterpieces = Mock(return_value=[]) self.chunk.stream.lastpiece = 0 self.chunk.stream.bytetopiece = Mock(return_value=-1) async with self.chunk: value = await self.chunk.read() self.assertEqual(b"", value) async def test_read_single_piece(self) -> None: """ Test if all bytes can be read from a chunk when its data is in a single piece. """ self.create_mock_content(b"content", len(b"content")) async with self.chunk: streamed = await self.chunk.read() self.assertEqual(b"content", streamed) async def test_read_multiple_pieces(self) -> None: """ Test if all bytes can be read from a chunk when its data is in multiple pieces. """ self.create_mock_content(b"content", 1) streamed = b"" async with self.chunk: for _ in range(len(b"content")): streamed += await self.chunk.read() self.assertEqual(b"content", streamed) async def test_seek(self) -> None: """ Test if we can seek to a certain piece. """ self.create_mock_content(b"content", 1) value = await self.chunk.seek(3) streamed = b"" async with self.chunk: for _ in range(len(b"tent")): streamed += await self.chunk.read() self.assertEqual([3], value) self.assertEqual(b"tent", streamed) def test_pause(self) -> None: """ Test if the chunk can tell its stream to pause. """ self.chunk.stream.cursorpiecemap[1] = [False, [0]] result = self.chunk.pause() self.assertTrue(result) self.assertTrue(self.chunk.stream.cursorpiecemap[0][0]) def test_pause_no_next(self) -> None: """ Test if the chunk cannot tell its stream to pause if it is at the cursor position. """ result = self.chunk.pause() self.assertFalse(result) def test_pause_already_paused(self) -> None: """ Test if the chunk cannot tell its stream to pause if its chunk is already paused. """ self.chunk.stream.cursorpiecemap[0][0] = True result = self.chunk.pause() self.assertFalse(result) def test_resume(self) -> None: """ Test if the chunk can tell its stream to resume. """ self.chunk.stream.cursorpiecemap[0] = [True, [0]] self.chunk.stream.cursorpiecemap[1] = [True, [1]] result = self.chunk.resume() self.assertTrue(result) self.assertFalse(self.chunk.stream.cursorpiecemap[0][0]) def test_resume_no_next(self) -> None: """ Test if the chunk can tell its stream to resume even if it is at the cursor position. """ self.chunk.stream.cursorpiecemap[0] = [True, []] result = self.chunk.resume() self.assertTrue(result) def test_resume_not_paused(self) -> None: """ Test if the chunk cannot tell its stream to resume if its chunk is not paused. """ result = self.chunk.resume() self.assertFalse(result) class TestStream(TestBase): """ Tests for the Stream class. """ def create_mock_download(self) -> Download: """ Create a mocked DownloadConfig. """ defaults = ConfigObj(StringIO(SPEC_CONTENT)) conf = ConfigObj() conf.configspec = defaults conf.validate(Validator()) config = DownloadConfig(conf) config.set_dest_dir(Path("")) download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, config, checkpoint_disabled=True) download.handle = Mock(is_valid=Mock(return_value=True), file_priorities=Mock(return_value=[0] * 6), torrent_file=Mock(return_value=download.tdef.torrent_info)) download.lt_status = Mock(state=3, paused=False, pieces=[]) return download def convert_to_piece_size(self, download: Download, piece_size: int) -> None: """ Convert the mock download to use a different piece size. Don't call this with anything other than the download from create_mock_download(). The pieces are 20 byte hashes for the number of pieces in a 6-byte file per 6 files. """ download.tdef.metainfo[b'info'][b'piece length'] = piece_size download.tdef.torrent_parameters[b'piece length'] = piece_size download.tdef.metainfo[b'info'][b'pieces'] = b"\x01" * 20 * (6 // piece_size) * 6 download.tdef.invalidate_torrent_info() async def test_enable(self) -> None: """ Test if a stream can be enabled without known pieces. """ stream = Stream(self.create_mock_download()) await stream.enable(fileindex=0) self.assertEqual(Path("torrent_create") / "abc" / "file2.txt", stream.filename) self.assertEqual(0, stream.firstpiece) self.assertEqual(-1, stream.lastpiece) async def test_enable_have_pieces(self) -> None: """ Test if a stream can be enabled when we already have pieces. """ download = self.create_mock_download() download.lt_status.pieces = [False] stream = Stream(download) await stream.enable(fileindex=0) self.assertEqual(Path("torrent_create") / "abc" / "file2.txt", stream.filename) self.assertEqual(0, stream.firstpiece) self.assertEqual(0, stream.lastpiece) async def test_bytes_to_pieces(self) -> None: """ Test if we can get the pieces from byte indices. """ download = self.create_mock_download() download.lt_status.pieces = [False, True] self.convert_to_piece_size(download, 3) stream = Stream(download) await stream.enable(fileindex=0) for i in range(7): self.assertEqual(i // 3, stream.bytetopiece(i)) self.assertEqual([0, 1], stream.bytestopieces(0, 6)) async def test_calculateprogress_empty(self) -> None: """ Test if progress can be calculated without requested pieces. """ download = self.create_mock_download() download.lt_status.pieces = [False] stream = Stream(download) await stream.enable(fileindex=0) self.assertEqual(1.0, stream.calculateprogress([], False)) async def test_calculateprogress_not_done(self) -> None: """ Test if progress can be calculated without for a given piece that is not done. """ download = self.create_mock_download() download.lt_status.pieces = [False] stream = Stream(download) await stream.enable(fileindex=0) self.assertEqual(0.0, stream.calculateprogress([0], False)) async def test_calculateprogress_done(self) -> None: """ Test if progress can be calculated without for a given piece that is done. """ download = self.create_mock_download() download.lt_status.pieces = [True] stream = Stream(download) await stream.enable(fileindex=0) self.assertEqual(1.0, stream.calculateprogress([0], False)) async def test_calculateprogress_partial(self) -> None: """ Test if progress can be calculated without for two pieces of which one is done and one is not. """ download = self.create_mock_download() download.lt_status.pieces = [False, True] self.convert_to_piece_size(download, 3) stream = Stream(download) await stream.enable(fileindex=0) self.assertEqual(0.5, stream.calculateprogress([0, 1], False)) async def test_updateprios_no_headers_all_missing(self) -> None: """ Test if priorities are set to retrieve missing pieces. """ download = self.create_mock_download() download.lt_status.pieces = [False] * 12 self.convert_to_piece_size(download, 3) download.handle.piece_priorities = Mock(return_value=[0] * 12) # 6 files, 2 pieces per file stream = Stream(download) await stream.enable(fileindex=0) await stream.updateprios() self.assertEqual(call([7, 7, 7] + [0] * 9), download.handle.prioritize_pieces.call_args) async def test_updateprios_no_headers_single_missing(self) -> None: """ Test if priorities are set to retrieve a missing piece. """ download = self.create_mock_download() download.lt_status.pieces = [False] + [True] * 11 self.convert_to_piece_size(download, 3) download.handle.piece_priorities = Mock(return_value=[0] * 12) # 6 files, 2 pieces per file stream = Stream(download) await stream.enable(fileindex=0) await stream.updateprios() self.assertEqual(call([7] + [0] * 11), download.handle.prioritize_pieces.call_args) async def test_updateprios_no_headers_sparse(self) -> None: """ Test if priorities can be updated for sparesely retrieved pieces. """ download = self.create_mock_download() download.lt_status.pieces = [False, True] * 6 self.convert_to_piece_size(download, 3) download.handle.piece_priorities = Mock(return_value=[0] * 12) # 6 files, 2 pieces per file stream = Stream(download) await stream.enable(fileindex=0) await stream.updateprios() self.assertEqual(call([7, 0, 7, 0] + [0] * 8), download.handle.prioritize_pieces.call_args) async def test_updateprios_headers_all_missing(self) -> None: """ Test if priorities are set to retrieve missing headers before anything else. """ download = self.create_mock_download() download.lt_status.pieces = [False] * 12 self.convert_to_piece_size(download, 3) download.handle.piece_priorities = Mock(return_value=[0] * 12) # 6 files, 2 pieces per file stream = Stream(download) await stream.enable(fileindex=0) stream.headerpieces = [0] stream.footerpieces = [11] await stream.updateprios() self.assertEqual(call([7] + [0] * 11), download.handle.prioritize_pieces.call_args) async def test_updateprios_footer_missing(self) -> None: """ Test if priorities are set to retrieve the first pieces with less priority if the footer is missing. """ download = self.create_mock_download() download.lt_status.pieces = [True] + [False] * 11 self.convert_to_piece_size(download, 3) download.handle.piece_priorities = Mock(return_value=[0] * 12) # 6 files, 2 pieces per file stream = Stream(download) await stream.enable(fileindex=0) stream.headerpieces = [0] stream.footerpieces = [11] await stream.updateprios() self.assertEqual(call([0, 1, 1] + [0] * 9), download.handle.prioritize_pieces.call_args) async def test_updateprios_header_footer_available(self) -> None: """ Test if priorities are set to retrieve the first pieces with less priority if the header and footer are there. """ download = self.create_mock_download() download.lt_status.pieces = [True] + [False] * 10 + [True] self.convert_to_piece_size(download, 3) download.handle.piece_priorities = Mock(return_value=[0] * 12) # 6 files, 2 pieces per file stream = Stream(download) await stream.enable(fileindex=0) stream.headerpieces = [0] stream.footerpieces = [11] await stream.updateprios() self.assertEqual(call([0, 1, 1] + [0] * 9), download.handle.prioritize_pieces.call_args) async def test_updateprios_header_footer_prebuffer(self) -> None: """ Test if priorities are set to retrieve the prebuffer pieces with higher priority. """ download = self.create_mock_download() download.lt_status.pieces = [True] + [False] * 10 + [True] self.convert_to_piece_size(download, 3) download.handle.piece_priorities = Mock(return_value=[0] * 12) # 6 files, 2 pieces per file stream = Stream(download) await stream.enable(fileindex=0) stream.headerpieces = [0] stream.prebuffpieces = [2, 3, 4] stream.footerpieces = [11] await stream.updateprios() self.assertEqual(call([0, 1, 7] + [0] * 9), download.handle.prioritize_pieces.call_args) async def test_resetprios_default(self) -> None: """ Test if streams can be reset to the default priority (4) for all pieces. """ download = self.create_mock_download() download.handle.piece_priorities = Mock(return_value=[2]) stream = Stream(download) await stream.enable(fileindex=0) stream.resetprios() self.assertEqual(call([4]), download.handle.prioritize_pieces.call_args) async def test_resetprios_prio(self) -> None: """ Test if streams can be reset to the given priority for all pieces. """ download = self.create_mock_download() download.handle.piece_priorities = Mock(return_value=[2]) stream = Stream(download) await stream.enable(fileindex=0) stream.resetprios(prio=6) self.assertEqual(call([6]), download.handle.prioritize_pieces.call_args) async def test_resetprios_given(self) -> None: """ Test if streams can be reset to the given priority for a given pieces. """ download = self.create_mock_download() download.handle.piece_priorities = Mock(return_value=[2]) stream = Stream(download) await stream.enable(fileindex=0) stream.resetprios(pieces=[0], prio=6) self.assertEqual(call([6]), download.handle.prioritize_pieces.call_args)
16,460
Python
.py
364
36.491758
118
0.638358
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,956
test_download_state.py
Tribler_tribler/src/tribler/test_unit/core/libtorrent/download_manager/test_download_state.py
from io import StringIO from pathlib import Path from unittest.mock import Mock import libtorrent from configobj import ConfigObj from ipv8.test.base import TestBase from tribler.core.libtorrent.download_manager.download import Download from tribler.core.libtorrent.download_manager.download_config import SPEC_CONTENT, DownloadConfig from tribler.core.libtorrent.download_manager.download_state import DOWNLOAD, UPLOAD, DownloadState, DownloadStatus from tribler.core.libtorrent.torrentdef import TorrentDef, TorrentDefNoMetainfo from tribler.test_unit.core.libtorrent.mocks import TORRENT_WITH_DIRS_CONTENT class TestDownloadState(TestBase): """ Tests for the DownloadState class. """ base_peer_info = {"client": "unknown", "pid": Mock(to_bytes=Mock(return_value=b"\x01")), "ip": ("127.0.0.1", 42), "flags":0, "local_connection": 1, "remote_interested": 0, "remote_choked": 0, "upload_only": 1, "upload_queue_length": 1, "used_send_buffer": 1, "interesting": 1, "choked": 0, "seed": 1} def test_initialize(self) -> None: """ Test if DownloadState gets properly initialized from a download without a status. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name", None), None, checkpoint_disabled=True, config=DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT)))) download_state = DownloadState(download, None, None) self.assertEqual(download, download_state.get_download()) self.assertEqual(0, download_state.get_progress()) self.assertIsNone(download_state.get_error()) self.assertEqual(0, download_state.get_current_speed(UPLOAD)) self.assertEqual(0, download_state.total_upload) self.assertEqual(0, download_state.total_download) self.assertEqual(0, download_state.total_payload_download) self.assertEqual(0, download_state.total_payload_upload) self.assertEqual(0, download_state.all_time_upload) self.assertEqual(0, download_state.all_time_download) self.assertEqual((0, 0), download_state.get_num_seeds_peers()) self.assertEqual([], download_state.get_peer_list()) def test_initialize_with_status(self) -> None: """ Test if DownloadState gets properly initialized from a download with a status. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name", None), None, checkpoint_disabled=True, config=DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT)))) download_state = DownloadState(download, libtorrent.torrent_status(), None) self.assertEqual(DownloadStatus.HASHCHECKING, download_state.get_status()) self.assertEqual(0, download_state.get_current_speed(UPLOAD)) self.assertEqual(0, download_state.get_current_speed(DOWNLOAD)) self.assertEqual(0, download_state.get_eta()) self.assertEqual((0, 0), download_state.get_num_seeds_peers()) self.assertEqual([], download_state.get_pieces_complete()) self.assertEqual((0, 0), download_state.get_pieces_total_complete()) self.assertEqual(0, download_state.get_seeding_time()) self.assertEqual([], download_state.get_peer_list()) def test_initialize_with_mocked_status(self) -> None: """ Test if DownloadState gets properly initialized from a download with a mocked status. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name", None), None, checkpoint_disabled=True, config=DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT)))) download.config.set_selected_files(["test"]) download_state = DownloadState(download, Mock(num_pieces=6, pieces=[1, 1, 1, 0, 0, 0], progress=0.75, total_upload=100, total_download=200, total_payload_upload=30, total_payload_download=100, all_time_upload=200, all_time_download=1000), None) self.assertEqual([1, 1, 1, 0, 0, 0], download_state.get_pieces_complete()) self.assertEqual((6, 3), download_state.get_pieces_total_complete()) self.assertEqual(["test"], download_state.get_selected_files()) self.assertEqual(0.75, download_state.get_progress()) self.assertEqual(100, download_state.total_upload) self.assertEqual(200, download_state.total_download) self.assertEqual(30, download_state.total_payload_upload) self.assertEqual(100, download_state.total_payload_download) self.assertEqual(200, download_state.all_time_upload) self.assertEqual(1000, download_state.all_time_download) def test_all_time_ratio_no_lt_status(self) -> None: """ Test if the all-time ratio is 0 when the libtorrent status is None. """ state = DownloadState(Mock(), None, None) self.assertEqual(0, state.get_all_time_ratio()) def test_all_time_ratio(self) -> None: """ Test if the all-time ratio is the fraction of the all-time up and down. """ state = DownloadState(Mock(), Mock(all_time_upload=200, all_time_download=1000), None) self.assertEqual(0.2, state.get_all_time_ratio()) def test_all_time_ratio_no_all_time_download(self) -> None: """ Test if the all-time ratio is 0 when the all-time up and down are both 0. """ state = DownloadState(Mock(), Mock(all_time_upload=0, all_time_download=0), None) self.assertEqual(0, state.get_all_time_ratio()) def test_all_time_ratio_no_all_time_download_inf(self) -> None: """ Test if the all-time ratio is 0 when the all-time download is 0. """ state = DownloadState(Mock(), Mock(all_time_upload=200, all_time_download=0), None) self.assertEqual(-1, state.get_all_time_ratio()) def test_get_files_completion(self) -> None: """ Testing if the right completion of files is returned. Each file is 6 bytes, so a file progress of 3 bytes is 0.5 completion. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT)))) download.handle = Mock(is_valid=Mock(return_value=True), file_progress=Mock(return_value=[3] * 6)) download_state = DownloadState(download, Mock(), None) self.assertEqual([(Path('abc/file2.txt'), 0.5), (Path('abc/file3.txt'), 0.5), (Path('abc/file4.txt'), 0.5), (Path('def/file6.avi'), 0.5), (Path('def/file5.txt'), 0.5), (Path('file1.txt'), 0.5)], download_state.get_files_completion()) def test_get_files_completion_no_progress(self) -> None: """ Testing if file progress is not given if no file progress is available. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT)))) download.handle = Mock(is_valid=Mock(return_value=True), file_progress=Mock(return_value=[])) download_state = DownloadState(download, Mock(), None) self.assertEqual([], download_state.get_files_completion()) def test_get_files_completion_zero_length_file(self) -> None: """ Testing if file progress is 100% for a file of 0 bytes. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT)))) for file_spec in download.tdef.metainfo[b"info"][b"files"]: file_spec[b"length"] = 0 download.handle = Mock(is_valid=Mock(return_value=True), file_progress=Mock(return_value=[])) download_state = DownloadState(download, Mock(), None) for _, progress in download_state.get_files_completion(): self.assertEqual(1.0, progress) def test_get_availability_incomplete(self) -> None: """ Testing if the right availability of a file is returned if another peer has no pieces. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name", None), None, checkpoint_disabled=True, config=DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT)))) download.handle = Mock(is_valid=Mock(return_value=True), file_progress=Mock(return_value=[]), get_peer_info=Mock(return_value=[Mock(**TestDownloadState.base_peer_info, pieces=[False] * 6, completed=0)])) download_state = DownloadState(download, Mock(pieces=[]), 0.6) self.assertEqual(0, download_state.get_availability()) def test_get_availability_complete(self) -> None: """ Testing if the right availability of a file is returned if another peer has all pieces. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name", None), None, checkpoint_disabled=True, config=DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT)))) download.handle = Mock(is_valid=Mock(return_value=True), file_progress=Mock(return_value=[]), get_peer_info=Mock(return_value=[Mock(**TestDownloadState.base_peer_info, pieces=[True] * 6, completed=1)])) download_state = DownloadState(download, Mock(pieces=[]), 0.6) self.assertEqual(1.0, download_state.get_availability()) def test_get_availability_mixed(self) -> None: """ Testing if the right availability of a file is returned if one peer is complete and the other is not. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name", None), None, checkpoint_disabled=True, config=DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT)))) download.handle = Mock(is_valid=Mock(return_value=True), file_progress=Mock(return_value=[]), get_peer_info=Mock(return_value=[Mock(**TestDownloadState.base_peer_info, pieces=[True] * 6, completed=1), Mock(**TestDownloadState.base_peer_info, pieces=[False] * 6, completed=0)])) download_state = DownloadState(download, Mock(pieces=[]), 0.6) self.assertEqual(1.0, download_state.get_availability()) def test_get_files_completion_semivalid_handle(self) -> None: """ Testing whether no file completion is returned for valid handles that have invalid file_progress. This case mirrors https://github.com/Tribler/tribler/issues/6454 """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT)))) download.handle = Mock(is_valid=Mock(return_value=True), file_progress=Mock(side_effect=RuntimeError("invalid torrent handle used"))) download_state = DownloadState(download, Mock(), None) self.assertEqual([], download_state.get_files_completion())
11,705
Python
.py
178
52.898876
117
0.636498
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,957
test_download_config.py
Tribler_tribler/src/tribler/test_unit/core/libtorrent/download_manager/test_download_config.py
from io import StringIO from pathlib import Path from unittest.mock import Mock, call, patch from configobj import ConfigObj from ipv8.test.base import TestBase from validate import Validator from tribler.core.libtorrent.download_manager.download_config import SPEC_CONTENT, DownloadConfig class TestDownloadConfig(TestBase): """ Tests for the DownloadConfig class. """ def setUp(self) -> None: """ Create a new config with a memory spec. """ defaults = ConfigObj(StringIO(SPEC_CONTENT)) conf = ConfigObj() conf.configspec = defaults conf.validate(Validator()) self.download_config = DownloadConfig(conf) self.download_config.set_dest_dir(Path("")) def test_set_dest_dir(self) -> None: """ Test if the destination directory is correctly. """ self.download_config.set_dest_dir(Path("bla")) self.assertEqual(Path("bla"), self.download_config.get_dest_dir()) def test_set_hops(self) -> None: """ Test if the hops are correctly. """ self.download_config.set_hops(4) self.assertEqual(4, self.download_config.get_hops()) def test_set_safe_seeding(self) -> None: """ Test if safe seeding setting is set correctly. """ self.download_config.set_safe_seeding(False) self.assertFalse(self.download_config.get_safe_seeding()) def test_set_selected_files(self) -> None: """ Test if the selected files are set correctly. """ self.download_config.set_selected_files([1]) self.assertEqual([1], self.download_config.get_selected_files()) def test_set_bootstrap_download(self) -> None: """ Test if the bootstrap download flag is set correctly. """ self.download_config.set_bootstrap_download(True) self.assertTrue(self.download_config.get_bootstrap_download()) def test_set_user_stopped(self) -> None: """ Test if the user stopped flag is set correctly. """ self.download_config.set_user_stopped(False) self.assertFalse(self.download_config.get_user_stopped()) def test_downloadconfig_copy(self) -> None: """ Test if the download config can be copied. """ dlcfg_copy = self.download_config.copy() self.assertEqual(0, dlcfg_copy.get_hops()) self.assertEqual(self.download_config.get_dest_dir(), dlcfg_copy.get_dest_dir()) def test_download_save_load(self) -> None: """ Test if configs can be written. """ fake_write = Mock() with patch.object(self.download_config.config, "write", fake_write): self.download_config.write(Path("fake_output")) self.assertEqual(call(), fake_write.call_args)
2,860
Python
.py
72
31.916667
97
0.648228
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,958
test_download.py
Tribler_tribler/src/tribler/test_unit/core/libtorrent/download_manager/test_download.py
from asyncio import Future, ensure_future, sleep from binascii import hexlify from io import StringIO from pathlib import Path from unittest.mock import Mock, call, patch import libtorrent from configobj import ConfigObj from ipv8.test.base import TestBase from ipv8.util import succeed from validate import Validator import tribler from tribler.core.libtorrent.download_manager.download import Download, IllegalFileIndex, SaveResumeDataError from tribler.core.libtorrent.download_manager.download_config import SPEC_CONTENT, DownloadConfig from tribler.core.libtorrent.torrentdef import TorrentDef, TorrentDefNoMetainfo from tribler.core.notifier import Notification, Notifier from tribler.test_unit.core.libtorrent.mocks import TORRENT_UBUNTU_FILE_CONTENT, TORRENT_WITH_DIRS_CONTENT class PermissionErrorDownloadConfig(DownloadConfig): """ A mocked DownloadConfig that raises a PermissionError on write. """ def write(self, filename: Path) -> None: """ Perform a crash. """ self.config["TEST_CRASH"] = True raise PermissionError class TestDownload(TestBase): """ Tests for the Download class. """ def create_mock_download_config(self) -> DownloadConfig: """ Create a mocked DownloadConfig. """ defaults = ConfigObj(StringIO(SPEC_CONTENT)) conf = ConfigObj() conf.configspec = defaults conf.validate(Validator()) config = DownloadConfig(conf) config.set_dest_dir(Path("")) return config def test_download_get_magnet_link_no_handle(self) -> None: """ Test if a download without a handle does not have a magnet link. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) self.assertIsNone(download.get_magnet_link()) def test_download_get_atp(self) -> None: """ Test if the atp can be retrieved from a download. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) atp = download.get_atp() self.assertEqual(".", atp["save_path"]) self.assertIn("flags", atp) self.assertIn("storage_mode", atp) self.assertIn("ti", atp) def test_download_resume(self) -> None: """ Test if a download can be resumed. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(is_valid=Mock(return_value=True)) download.resume() self.assertFalse(download.config.get_user_stopped()) self.assertEqual(call(False), download.handle.set_upload_mode.call_args) self.assertEqual(call(), download.handle.resume.call_args) async def test_save_resume(self) -> None: """ Test if a download is resumed after fetching the save/resume data. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.futures["save_resume_data"] = succeed(True) future = ensure_future(download.save_resume_data()) while "save_resume_data_alert" not in download.futures: await sleep(0) download.process_alert(Mock(), "save_resume_data_alert") await future self.assertTrue(future.done()) def test_move_storage(self) -> None: """ Test if storage can be moved. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(is_valid=Mock(return_value=True)) success = download.move_storage(Path("some_path")) self.assertTrue(success) self.assertEqual(Path("some_path"), download.config.get_dest_dir()) self.assertEqual(call("some_path"), download.handle.move_storage.call_args) def test_move_storage_no_metainfo(self) -> None: """ Test if storage is not moved for torrents without metainfo. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(is_valid=Mock(return_value=True)) success = download.move_storage(Path("some_path")) self.assertTrue(success) self.assertEqual(Path("some_path"), download.config.get_dest_dir()) self.assertIsNone(download.handle.move_storage.call_args) async def test_save_checkpoint_disabled(self) -> None: """ Test if checkpoints are not saved if checkpointing is disabled. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(is_valid=Mock(return_value=False)) value = await download.checkpoint() self.assertIsNone(value) async def test_save_checkpoint_handle_no_data(self) -> None: """ Test if checkpoints are not saved if the handle specifies that it does not need resume data. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.checkpoint_disabled = False download.handle = Mock(is_valid=Mock(return_value=True), need_save_resume_data=Mock(return_value=False)) value = await download.checkpoint() self.assertIsNone(value) async def test_save_checkpoint_no_handle_no_existing(self) -> None: """ Test if checkpoints are saved for torrents without a handle and no existing checkpoint file. """ alerts = [] download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.checkpoint_disabled = False download.download_manager = Mock(get_checkpoint_dir=Mock(return_value=Path("foo"))) download.alert_handlers["save_resume_data_alert"] = [alerts.append] with patch.dict(tribler.core.libtorrent.download_manager.download.__dict__, {"Path": Mock(return_value=Mock(is_file=Mock(return_value=False)))}): value = await download.checkpoint() self.assertIsNone(value) self.assertEqual(None, alerts[0].category()) self.assertEqual(b"libtorrent resume file", alerts[0].resume_data[b"file-format"]) self.assertEqual(1, alerts[0].resume_data[b"file-version"]) self.assertEqual(b"\x01" * 20, alerts[0].resume_data[b"info-hash"]) async def test_save_checkpoint_no_handle_existing(self) -> None: """ Test if existing checkpoints are not overwritten by checkpoints without data. """ alerts = [] download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.checkpoint_disabled = False download.download_manager = Mock(get_checkpoint_dir=Mock(return_value=Path("foo"))) download.alert_handlers["save_resume_data_alert"] = [alerts.append] with patch.dict(tribler.core.libtorrent.download_manager.download.__dict__, {"Path": Mock(return_value=Mock(is_file=Mock(return_value=True)))}): value = await download.checkpoint() self.assertIsNone(value) self.assertEqual([], alerts) def test_selected_files_default(self) -> None: """ Test if the default selected files are no files. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(file_priorities=Mock(return_value=[0, 0])) self.assertEqual([], download.config.get_selected_files()) self.assertEqual([0, 0], download.get_file_priorities()) def test_selected_files_last(self) -> None: """ Test if the last selected file in a list of files gets correctly selected. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(file_priorities=Mock(return_value=[0, 4])) download.set_selected_files([1]) self.assertEqual([1], download.config.get_selected_files()) self.assertEqual([0, 4], download.get_file_priorities()) def test_selected_files_first(self) -> None: """ Test if the first selected file in a list of files gets correctly selected. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(file_priorities=Mock(return_value=[4, 0])) download.set_selected_files([0]) self.assertEqual([0], download.config.get_selected_files()) self.assertEqual([4, 0], download.get_file_priorities()) def test_selected_files_all(self) -> None: """ Test if all files can be selected. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(file_priorities=Mock(return_value=[4, 4])) download.set_selected_files([0, 1]) self.assertEqual([0, 1], download.config.get_selected_files()) self.assertEqual([4, 4], download.get_file_priorities()) def test_selected_files_all_through_none(self) -> None: """ Test if all files can be selected by selecting None. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(file_priorities=Mock(return_value=[4, 4])) download.set_selected_files() self.assertEqual([], download.config.get_selected_files()) self.assertEqual([4, 4], download.get_file_priorities()) def test_selected_files_all_through_empty_list(self) -> None: """ Test if all files can be selected by selecting an empty list. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(file_priorities=Mock(return_value=[4, 4])) download.set_selected_files([]) self.assertEqual([], download.config.get_selected_files()) self.assertEqual([4, 4], download.get_file_priorities()) def test_get_share_mode_enabled(self) -> None: """ Test if we forward the enabled share mode when requested in the download. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.config.set_share_mode(True) self.assertTrue(download.get_share_mode()) def test_get_share_mode_disabled(self) -> None: """ Test if we forward the disabled share mode when requested in the download. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.config.set_share_mode(False) self.assertFalse(download.get_share_mode()) async def test_enable_share_mode(self) -> None: """ Test if the share mode can be enabled in a download. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(is_valid=Mock(return_value=True)) download.set_share_mode(True) await sleep(0) self.assertTrue(download.config.get_share_mode()) self.assertEqual(call(True), download.handle.set_share_mode.call_args) async def test_disable_share_mode(self) -> None: """ Test if the share mode can be disabled in a download. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(is_valid=Mock(return_value=True)) download.set_share_mode(False) await sleep(0) self.assertFalse(download.config.get_share_mode()) self.assertEqual(call(False), download.handle.set_share_mode.call_args) def test_get_num_connected_seeds_peers_no_handle(self) -> None: """ Test if connected peers and seeds are 0 if there is no handle. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) num_seeds, num_peers = download.get_num_connected_seeds_peers() self.assertEqual(0, num_seeds) self.assertEqual(0, num_peers) def test_get_num_connected_seeds_peers(self) -> None: """ Test if connected peers and seeds are correctly returned. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(is_valid=Mock(return_value=True), get_peer_info=Mock(return_value=[ Mock(flags=140347, seed=1024), Mock(flags=140347, seed=128), Mock(flags=131242, seed=1024) ])) num_seeds, num_peers = download.get_num_connected_seeds_peers() self.assertEqual(1, num_seeds) self.assertEqual(2, num_peers) async def test_set_priority(self) -> None: """ Test if setting the priority calls the right methods in download. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(is_valid=Mock(return_value=True)) download.set_priority(1234) await sleep(0) self.assertEqual(call(1234), download.handle.set_priority.call_args) def test_add_trackers(self) -> None: """ Test if trackers are added to the libtorrent handle. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(is_valid=Mock(return_value=True)) download.add_trackers(["http://google.com"]) self.assertEqual(call({"url": "http://google.com", "verified": False}), download.handle.add_tracker.call_args) def test_process_error_alert(self) -> None: """ Test if error alerts are processed correctly. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.process_alert(Mock(msg=None, status_code=123, url="http://google.com", category=Mock(return_value=libtorrent.alert.category_t.error_notification)), "tracker_error_alert") self.assertEqual("HTTP status code 123", download.tracker_status["http://google.com"][1]) def test_process_error_alert_timeout(self) -> None: """ Test if timeout error alerts are processed correctly. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.process_alert(Mock(msg=None, status_code=0, url="http://google.com", category=Mock(return_value=libtorrent.alert.category_t.error_notification)), "tracker_error_alert") self.assertEqual("Timeout", download.tracker_status["http://google.com"][1]) def test_process_error_alert_not_working(self) -> None: """ Test if not working error alerts are processed correctly. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.process_alert(Mock(msg=None, status_code=-1, url="http://google.com", category=Mock(return_value=libtorrent.alert.category_t.error_notification)), "tracker_error_alert") self.assertEqual("Not working", download.tracker_status["http://google.com"][1]) def test_tracker_warning_alert(self) -> None: """ Test if a tracking warning alert is processed correctly. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.process_alert(Mock(message=Mock(return_value="test"), url="http://google.com", category=Mock(return_value=libtorrent.alert.category_t.error_notification)), "tracker_warning_alert") self.assertEqual("Warning: test", download.tracker_status["http://google.com"][1]) async def test_on_metadata_received_alert(self) -> None: """ Test if the right operations happen when we receive metadata. """ tdef = TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT) download = Download(tdef, None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(torrent_file=Mock(return_value=download.tdef.torrent_info), trackers=Mock(return_value=[{"url": "http://google.com"}])) download.tdef = None download.on_metadata_received_alert(Mock()) self.assertEqual(tdef.infohash, download.tdef.infohash) self.assertDictEqual(tdef.metainfo[b"info"], download.tdef.metainfo[b"info"]) self.assertEqual(b"http://google.com", download.tdef.metainfo[b"announce"]) def test_on_metadata_received_alert_unicode_error_encode(self) -> None: """ Test if no exception is raised when the url is not unicode compatible. """ tdef = TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT) download = Download(tdef, None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(trackers=Mock(return_value=[{"url": "\uD800"}]), torrent_file=Mock(return_value=download.tdef.torrent_info), get_peer_info=Mock(return_value=[])) download.tdef = None download.on_metadata_received_alert(Mock()) self.assertEqual(tdef.infohash, download.tdef.infohash) self.assertDictEqual(tdef.metainfo[b"info"], download.tdef.metainfo[b"info"]) self.assertNotIn(b"announce", download.tdef.metainfo) def test_on_metadata_received_alert_unicode_error_decode(self) -> None: """ Test if no exception is raised when the url is not unicode compatible. See: https://github.com/Tribler/tribler/issues/7223 """ tdef = TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT) download = Download(tdef, None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(trackers=lambda: [{"url": b"\xFD".decode()}], torrent_file=Mock(return_value=download.tdef.torrent_info), get_peer_info=Mock(return_value=[])) download.tdef = None download.on_metadata_received_alert(Mock()) self.assertEqual(tdef.infohash, download.tdef.infohash) self.assertDictEqual(tdef.metainfo[b"info"], download.tdef.metainfo[b"info"]) self.assertNotIn(b"announce", download.tdef.metainfo) def test_metadata_received_invalid_torrent_with_error(self) -> None: """ Test if no torrent def is loaded when a RuntimeError/ValueError occurs when parsing the metadata. """ tdef = TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT) download = Download(tdef, None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(trackers=Mock(return_value=[]), torrent_file=Mock(return_value=Mock(metadata=Mock(return_value=b""))), get_peer_info=Mock(return_value=[])) download.tdef = None download.on_metadata_received_alert(Mock()) self.assertIsNone(download.tdef) def test_torrent_checked_alert_no_pause_no_checkpoint(self) -> None: """ Test if no pause or checkpoint happens if the download state is such. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.checkpoint_disabled = False download.handle = Mock(is_valid=Mock(return_value=True), need_save_resume_data=Mock(return_value=False)) download.pause_after_next_hashcheck = False download.checkpoint_after_next_hashcheck = False download.process_alert(Mock(), "torrent_checked_alert") self.assertIsNone(download.handle.pause.call_args) self.assertIsNone(download.handle.need_save_resume_data.call_args) self.assertFalse(download.pause_after_next_hashcheck) self.assertFalse(download.checkpoint_after_next_hashcheck) def test_torrent_checked_alert_no_pause_checkpoint(self) -> None: """ Test if no pause but a checkpoint happens if the download state is such. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.checkpoint_disabled = False download.handle = Mock(is_valid=Mock(return_value=True), need_save_resume_data=Mock(return_value=False)) download.pause_after_next_hashcheck = False download.checkpoint_after_next_hashcheck = True download.process_alert(Mock(), "torrent_checked_alert") self.assertIsNone(download.handle.pause.call_args) self.assertEqual(call(), download.handle.need_save_resume_data.call_args) self.assertFalse(download.pause_after_next_hashcheck) self.assertFalse(download.checkpoint_after_next_hashcheck) def test_torrent_checked_alert_pause_no_checkpoint(self) -> None: """ Test if a pause but no checkpoint happens if the download state is such. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.checkpoint_disabled = False download.handle = Mock(is_valid=Mock(return_value=True), need_save_resume_data=Mock(return_value=False)) download.pause_after_next_hashcheck = True download.checkpoint_after_next_hashcheck = False download.process_alert(Mock(), "torrent_checked_alert") self.assertEqual(call(), download.handle.pause.call_args) self.assertIsNone(download.handle.need_save_resume_data.call_args) self.assertFalse(download.pause_after_next_hashcheck) self.assertFalse(download.checkpoint_after_next_hashcheck) def test_torrent_checked_alert_pause_checkpoint(self) -> None: """ Test if both a pause and a checkpoint happens if the download state is such. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.checkpoint_disabled = False download.handle = Mock(is_valid=Mock(return_value=True), need_save_resume_data=Mock(return_value=False)) download.pause_after_next_hashcheck = True download.checkpoint_after_next_hashcheck = True download.process_alert(Mock(), "torrent_checked_alert") self.assertEqual(call(), download.handle.pause.call_args) self.assertEqual(call(), download.handle.need_save_resume_data.call_args) self.assertFalse(download.pause_after_next_hashcheck) self.assertFalse(download.checkpoint_after_next_hashcheck) def test_tracker_reply_alert(self) -> None: """ Test if the tracker status is extracted from a reply alert. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.on_tracker_reply_alert(Mock(url="http://google.com", num_peers=42)) self.assertEqual((42, "Working"), download.tracker_status["http://google.com"]) def test_get_pieces_bitmask(self) -> None: """ Test if a correct pieces bitmask is returned when requested. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(status=Mock(return_value=Mock(pieces=[True, False, True, False, False]))) self.assertEqual(b"oA==", download.get_pieces_base64()) async def test_resume_data_failed(self) -> None: """ Test if an error is raised when loading resume data failed. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) future = download.wait_for_alert("save_resume_data_alert", None, "save_resume_data_failed_alert", lambda _: SaveResumeDataError()) download.process_alert(Mock(msg="test error"), "save_resume_data_failed_alert") with self.assertRaises(SaveResumeDataError): await future async def test_on_state_changed_apply_ip_filter(self) -> None: """ Test if the ip filter gets enabled when in torrent status seeding (5) when hops are not zero. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.config.set_hops(1) download.handle = Mock(is_valid=Mock(return_value=True)) download.on_state_changed_alert(type("state_changed_alert", (object,), {"state": 5})) await sleep(0) self.assertEqual(call(True), download.handle.apply_ip_filter.call_args) async def test_on_state_changed_no_filter(self) -> None: """ Test if the ip filter does not get enabled when the hop count is zero. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.config.set_hops(0) download.handle = Mock(is_valid=Mock(return_value=True)) download.on_state_changed_alert(type("state_changed_alert", (object,), {"state": 5})) await sleep(0) self.assertEqual(call(False), download.handle.apply_ip_filter.call_args) async def test_on_state_changed_not_seeding(self) -> None: """ Test if the ip filter does not get enabled when the hop count is zero. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.config.set_hops(1) download.handle = Mock(is_valid=Mock(return_value=True)) download.on_state_changed_alert(type("state_changed_alert", (object,), {"state": 4})) await sleep(0) self.assertEqual(call(False), download.handle.apply_ip_filter.call_args) async def test_checkpoint_timeout(self) -> None: """ Testing whether making a checkpoint times out when we receive no alert from libtorrent. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.futures["save_resume_data"] = [Future()] task = ensure_future(download.save_resume_data()) await sleep(0) download.futures["save_resume_data_alert"][0][0].cancel() self.assertIsNone(await task) def test_on_save_resume_data_alert_permission_denied(self) -> None: """ Test if permission error in writing the download config does not crash the save resume alert handler. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=PermissionErrorDownloadConfig(self.create_mock_download_config().config)) download.checkpoint_disabled = False download.download_manager = Mock(get_checkpoint_dir=Mock(return_value=Path(__file__).absolute().parent)) download.on_save_resume_data_alert(Mock(resume_data={b"info-hash": b"\x01" * 20})) self.assertTrue(download.config.config["TEST_CRASH"]) self.assertEqual("name", download.config.config["download_defaults"]["name"]) async def test_get_tracker_status_unicode_decode_error(self) -> None: """ Test if a tracker status is returned when getting trackers leads to a UnicodeDecodeError. See: https://github.com/Tribler/tribler/issues/7036 """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) fut = Future() fut.set_result(Mock(is_dht_running=Mock(return_value=False))) download.download_manager = Mock(get_session=Mock(return_value=fut)) download.handle = Mock(is_valid=Mock(return_value=True), get_peer_info=Mock( return_value=[Mock(source=1, dht=1, pex=0)] * 42 + [Mock(source=1, pex=1, dht=0)] * 7 ), trackers=Mock(side_effect=UnicodeDecodeError('', b'', 0, 0, ''))) result = download.get_tracker_status() self.assertEqual((42, "Disabled"), result["[DHT]"]) self.assertEqual((7, "Working"), result["[PeX]"]) def test_get_tracker_status_get_peer_info_error(self) -> None: """ Test if a tracker status is returned when getting peer info leads to a RuntimeError. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.download_manager = Mock(get_session=Mock(return_value=Mock(is_dht_running=Mock(return_value=True)))) download.handle = Mock(is_valid=Mock(return_value=True), get_peer_info=Mock(side_effect=RuntimeError), trackers=Mock(return_value=[])) result = download.get_tracker_status() self.assertEqual((0, "Working"), result["[DHT]"]) self.assertEqual((0, "Working"), result["[PeX]"]) async def test_shutdown(self) -> None: """ Test if the shutdown method closes the stream and clears the futures dictionary. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name"), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.stream = Mock() await download.shutdown() self.assertEqual({}, download.futures) self.assertEqual(call(), download.stream.close.call_args) def test_file_piece_range_flat(self) -> None: """ Test if the piece range of a single-file torrent is correctly determined. """ download = Download(TorrentDef.load_from_memory(TORRENT_UBUNTU_FILE_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) total_pieces = download.tdef.torrent_info.num_pieces() piece_range = download.file_piece_range(Path("ubuntu-15.04-desktop-amd64.iso")) self.assertEqual(piece_range, list(range(total_pieces))) def test_file_piece_range_minifiles(self) -> None: """ Test if the piece range of a file is correctly determined if multiple files exist in the same piece. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) piece_range_a = download.file_piece_range(Path("torrent_create") / "abc" / "file2.txt") piece_range_b = download.file_piece_range(Path("torrent_create") / "abc" / "file3.txt") self.assertEqual([0], piece_range_a) self.assertEqual([0], piece_range_b) def test_file_piece_range_wide(self) -> None: """ Test if the piece range of a multi-file torrent is correctly determined. """ tdef = TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT) tdef.metainfo[b"info"][b"files"][0][b"length"] = 60000 tdef.metainfo[b"info"][b"pieces"] = b'\x01' * 80 download = Download(tdef, None, checkpoint_disabled=True, config=self.create_mock_download_config()) file1 = download.file_piece_range(Path("torrent_create") / "abc" / "file2.txt") other_indices = [download.file_piece_range(Path("torrent_create") / Path( *[p.decode() for p in tdef.metainfo[b"info"][b"files"][-1-i][b"path"]] )) for i in range(5)] self.assertEqual([0, 1, 2], file1) for piece_range in other_indices: self.assertEqual([3], piece_range) def test_file_piece_range_nonexistent(self) -> None: """ Test if the piece range of a non-existent file is correctly determined. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) piece_range = download.file_piece_range(Path("I don't exist")) self.assertEqual([], piece_range) def test_file_completion_full(self) -> None: """ Test if a complete file shows 1.0 completion. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(is_valid=Mock(return_value=True), have_piece=Mock(return_value=True)) self.assertEqual(1.0, download.get_file_completion(Path("torrent_create") / "abc" / "file2.txt")) def test_file_completion_nonexistent(self) -> None: """ Test if an unknown path (does not exist in a torrent) shows 1.0 completion. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(is_valid=Mock(return_value=True)) self.assertEqual(1.0, download.get_file_completion(Path("I don't exist"))) def test_file_completion_directory(self) -> None: """ Test if a directory (does not exist in a torrent) shows 1.0 completion. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(is_valid=Mock(return_value=True), have_piece=Mock(return_value=True)) self.assertEqual(1.0, download.get_file_completion(Path("torrent_create"))) def test_file_completion_nohandle(self) -> None: """ Test if a file shows 0.0 completion if the torrent handle is not valid. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(is_valid=Mock(return_value=False), have_piece=Mock(return_value=True)) self.assertEqual(0.0, download.get_file_completion(Path("torrent_create") / "abc" / "file2.txt")) def test_file_completion_partial(self) -> None: """ Test if a file shows 0.0 completion if the torrent handle is not valid. """ download = Download(TorrentDef.load_from_memory(TORRENT_UBUNTU_FILE_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) total_pieces = download.tdef.torrent_info.num_pieces() expected = (total_pieces // 2) / total_pieces def fake_has_piece(piece_index: int) -> bool: return piece_index > total_pieces / 2 # total_pieces // 2 will return True download.handle = Mock(is_valid=Mock(return_value=True), have_piece=fake_has_piece) result = download.get_file_completion(Path("ubuntu-15.04-desktop-amd64.iso")) self.assertEqual(round(expected, 2), round(result, 2)) # Round to make sure we don't get float rounding errors def test_file_length(self) -> None: """ Test if we can get the length of a file. """ download = Download(TorrentDef.load_from_memory(TORRENT_UBUNTU_FILE_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) self.assertEqual(1150844928, download.get_file_length(Path("ubuntu-15.04-desktop-amd64.iso"))) def test_file_length_two(self) -> None: """ Test if we can get the length of a file in a multi-file torrent. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) self.assertEqual(6, download.get_file_length(Path("torrent_create") / "abc" / "file2.txt")) self.assertEqual(6, download.get_file_length(Path("torrent_create") / "abc" / "file3.txt")) def test_file_length_nonexistent(self) -> None: """ Test if the length of a non-existent file is 0. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) self.assertEqual(0, download.get_file_length(Path("I don't exist"))) def test_file_index_unloaded(self) -> None: """ Test if a non-existent path leads to the special unloaded index. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) self.assertEqual(IllegalFileIndex.unloaded.value, download.get_file_index(Path("I don't exist"))) def test_file_index_directory_collapsed(self) -> None: """ Test if a collapsed-dir path leads to the special collapsed dir index. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) self.assertEqual(IllegalFileIndex.collapsed_dir.value, download.get_file_index(Path("torrent_create"))) def test_file_index_directory_expanded(self) -> None: """ Test if an expanded-dir path leads to the special expanded dir index. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.tdef.torrent_file_tree.expand(Path("torrent_create")) self.assertEqual(IllegalFileIndex.expanded_dir.value, download.get_file_index(Path("torrent_create"))) def test_file_index_file(self) -> None: """ Test if we can get the index of a file. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) self.assertEqual(1, download.get_file_index(Path("torrent_create") / "abc" / "file3.txt")) def test_file_selected_nonexistent(self) -> None: """ Test if a non-existent file does not register as selected. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) self.assertFalse(download.is_file_selected(Path("I don't exist"))) def test_file_selected_realfile(self) -> None: """ Test if a file starts off as selected. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) self.assertTrue(download.is_file_selected(Path("torrent_create") / "abc" / "file3.txt")) def test_file_selected_directory(self) -> None: """ Test if a directory does not register as selected. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) self.assertFalse(download.is_file_selected(Path("torrent_create") / "abc")) def test_on_torrent_finished_alert(self) -> None: """ Test if the torrent_finished notification is called when the torrent finishes. """ callback = Mock() download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.stream = Mock() download.handle = Mock(is_valid=Mock(return_value=True), status=Mock(return_value=Mock(total_download=7))) download.notifier = Notifier() download.notifier.add(Notification.torrent_finished, callback) download.on_torrent_finished_alert(Mock()) self.assertEqual(call(infohash=hexlify(download.tdef.infohash).decode(), name="torrent_create", hidden=False), callback.call_args)
44,043
Python
.py
745
47.983893
120
0.646761
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,959
test_download_manager.py
Tribler_tribler/src/tribler/test_unit/core/libtorrent/download_manager/test_download_manager.py
import asyncio import functools from asyncio import Future, ensure_future, sleep from binascii import hexlify from io import StringIO from pathlib import Path from unittest.mock import AsyncMock, MagicMock, Mock, call, patch from configobj import ConfigObj from configobj.validate import Validator, VdtParamError from ipv8.test.base import TestBase from ipv8.util import succeed import tribler from tribler.core.libtorrent.download_manager.download import Download from tribler.core.libtorrent.download_manager.download_config import SPEC_CONTENT, DownloadConfig from tribler.core.libtorrent.download_manager.download_manager import DownloadManager, MetainfoLookup from tribler.core.libtorrent.download_manager.download_state import DownloadState from tribler.core.libtorrent.torrentdef import TorrentDef, TorrentDefNoMetainfo from tribler.core.notifier import Notifier from tribler.test_unit.core.libtorrent.mocks import TORRENT_WITH_DIRS_CONTENT from tribler.tribler_config import TriblerConfigManager class MockTriblerConfigManager(TriblerConfigManager): """ A memory-based TriblerConfigManager. """ def write(self) -> None: """ Don't actually write to any file. """ class TestDownloadManager(TestBase): """ Tests for the DownloadManager class. """ def setUp(self) -> None: """ Create a download manager. """ super().setUp() self.manager = DownloadManager(MockTriblerConfigManager(), Notifier(), Mock()) for i in range(4): fut = Future() fut.set_result(Mock(status=Mock(dht_nodes=0), get_torrents=Mock(return_value=[]))) self.manager.ltsessions[i] = fut self.manager.set_download_states_callback(self.manager.sesscb_states_callback) async def tearDown(self) -> None: """ Shut down the download manager. """ await self.manager.shutdown_task_manager() await super().tearDown() def create_mock_download_config(self) -> DownloadConfig: """ Create a mocked DownloadConfig. """ defaults = ConfigObj(StringIO(SPEC_CONTENT)) conf = ConfigObj() conf.configspec = defaults conf.validate(Validator()) config = DownloadConfig(conf) config.set_dest_dir(Path("")) return config async def test_get_metainfo_valid_metadata(self) -> None: """ Testing if the metainfo is retrieved when the handle has valid metadata immediately. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT)))) download.handle = Mock(is_valid=Mock(return_value=True)) download.get_state = Mock(return_value=Mock(get_num_seeds_peers=Mock(return_value=(42, 7)))) config = DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT))) with patch.object(self.manager, "start_download", AsyncMock(return_value=download)), \ patch.object(self.manager, "remove_download", AsyncMock()), \ patch.object(DownloadConfig, "from_defaults", Mock(return_value=config)): metainfo = await self.manager.get_metainfo(download.tdef.infohash) self.assertEqual(7, metainfo[b"leechers"]) self.assertEqual(42, metainfo[b"seeders"]) self.assertEqual(download.tdef.metainfo, metainfo) async def test_get_metainfo_add_fail(self) -> None: """ Test if invalid metainfo leads to a return value of None. """ config = DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT))) with patch.object(self.manager, "start_download", AsyncMock(side_effect=TypeError)), \ patch.object(DownloadConfig, "from_defaults", Mock(return_value=config)): self.assertIsNone(await self.manager.get_metainfo(b"\x00" * 20)) async def test_get_metainfo_duplicate_request(self) -> None: """ Test if the same request is returned when invoking get_metainfo twice with the same infohash. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT)))) download.handle = Mock(is_valid=Mock(return_value=True)) config = DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT))) with patch.object(self.manager, "start_download", AsyncMock(return_value=download)), \ patch.object(self.manager, "remove_download", AsyncMock()), \ patch.object(DownloadConfig, "from_defaults", Mock(return_value=config)): results = await asyncio.gather(self.manager.get_metainfo(b"\x00" * 20), self.manager.get_metainfo(b"\x00" * 20)) self.assertDictEqual(*results) async def test_get_metainfo_cache(self) -> None: """ Testing if cached metainfo is returned, if available. """ self.manager.metainfo_cache[b"a" * 20] = {'meta_info': 'test', 'time': 0} self.assertEqual("test", await self.manager.get_metainfo(b"a" * 20)) async def test_get_metainfo_with_already_added_torrent(self) -> None: """ Test if metainfo can be fetched for a torrent which is already in session. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT)))) download.handle = Mock(is_valid=Mock(return_value=True)) self.manager.downloads[download.tdef.infohash] = download self.assertEqual(download.tdef.metainfo, await self.manager.get_metainfo(download.tdef.infohash)) async def test_start_download_while_getting_metainfo(self) -> None: """ Test if a torrent can be added while a metainfo request is running. """ info_download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) info_download.handle = Mock(is_valid=Mock(return_value=True)) self.manager.downloads[info_download.tdef.infohash] = info_download self.manager.metainfo_requests[info_download.tdef.infohash] = MetainfoLookup(info_download, 1) tdef = TorrentDefNoMetainfo(info_download.tdef.infohash, b"name", f"magnet:?xt=urn:btih:{hexlify(info_download.tdef.infohash).decode()}&") with patch.object(self.manager, "remove_download", AsyncMock()): download = await self.manager.start_download(tdef=tdef, config=info_download.config, checkpoint_disabled=True) self.assertNotEqual(info_download, download) async def test_start_download(self) -> None: """ Test if a torrent can be added to the download manager. """ mock_handle = Mock(info_hash=Mock(return_value=Mock(to_bytes=Mock(return_value=b"\x01" * 20))), is_valid=Mock(return_value=True)) mock_alert = type("add_torrent_alert", (object,), {"handle": mock_handle, "error": Mock(value=Mock(return_value=None)), "category": MagicMock(return_value=None)}) self.manager.ltsessions[0].result().async_add_torrent = lambda _: self.manager.process_alert(mock_alert()) with patch.object(self.manager, "remove_download", AsyncMock()): download = await self.manager.start_download(tdef=TorrentDefNoMetainfo(b"\x01" * 20, b""), config=self.create_mock_download_config(), checkpoint_disabled=True) self.assertEqual(mock_handle, await download.get_handle()) async def test_start_handle_wait_for_dht_timeout(self) -> None: """ Test if start handle waits no longer than the set timeout for the DHT to be ready. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(is_valid=Mock(return_value=True)) self.manager.dht_ready_task = Future() self.manager.dht_readiness_timeout = 0.001 self.assertIsNone(await self.manager.start_handle(download, {})) async def test_start_handle_wait_for_dht(self) -> None: """ Test if start handle waits for the DHT to be ready. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) download.handle = Mock(is_valid=Mock(return_value=True)) self.manager.dht_ready_task = Future() self.manager.dht_readiness_timeout = 10 task = ensure_future(self.manager.start_handle(download, {})) self.manager.dht_ready_task.set_result(None) await task self.assertTrue(task.done()) async def test_start_download_existing_handle(self) -> None: """ Test if torrents can be added when there is a pre-existing handle. """ mock_handle = Mock(info_hash=Mock(return_value=Mock(to_bytes=Mock(return_value=b"\x01" * 20))), is_valid=Mock(return_value=True)) self.manager.ltsessions[0].result().get_torrents = Mock(return_value=[mock_handle]) download = await self.manager.start_download(tdef=TorrentDefNoMetainfo(b"\x01" * 20, b"name"), config=self.create_mock_download_config(), checkpoint_disabled=True) handle = await download.get_handle() self.assertEqual(mock_handle, handle) def test_convert_rate(self) -> None: """ Test if rates are correctly converted. """ self.assertEqual(-1, DownloadManager.convert_rate(0)) self.assertEqual(1, DownloadManager.convert_rate(-1)) self.assertEqual(1024, DownloadManager.convert_rate(1)) self.assertEqual(2048, DownloadManager.convert_rate(2)) self.assertEqual(-2048, DownloadManager.convert_rate(-2)) def test_reverse_convert_rate(self) -> None: """ Test if converted rates can be reversed. """ self.assertEqual(0, DownloadManager.reverse_convert_rate(-1)) self.assertEqual(-1, DownloadManager.reverse_convert_rate(1)) self.assertEqual(0, DownloadManager.reverse_convert_rate(0)) self.assertEqual(1, DownloadManager.reverse_convert_rate(1024)) self.assertEqual(-2, DownloadManager.reverse_convert_rate(-2048)) async def test_start_download_existing_download(self) -> None: """ Test if torrents can be added when there is a pre-existing download. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) self.manager.downloads[download.tdef.infohash] = download value = await self.manager.start_download(tdef=TorrentDefNoMetainfo(download.tdef.infohash, b"name"), config=self.create_mock_download_config(), checkpoint_disabled=True) self.assertEqual(download, value) async def test_start_download_no_ti_url(self) -> None: """ Test if a ValueError is raised if we try to add a torrent without infohash or url. """ config = DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT))) with self.assertRaises(ValueError), patch.object(DownloadConfig, "from_defaults", Mock(return_value=config)): await self.manager.start_download() def test_remove_unregistered_torrent(self) -> None: """ Test if torrents which aren't known are succesfully removed. """ mock_handle = Mock(is_valid=lambda: False) alert = type('torrent_removed_alert', (object,), {"handle": mock_handle, "info_hash": Mock(to_bytes=Mock(return_value=b"00" * 20))}) self.manager.process_alert(alert()) self.assertNotIn(b"\x00" * 20, self.manager.downloads) def test_set_proxy_settings(self) -> None: """ Test if the proxy settings can be set. """ self.manager.set_proxy_settings(self.manager.get_session(0).result(), 0, ("a", "1234"), ("abc", "def")) self.assertEqual(call({"proxy_type": 0, "proxy_hostnames": True, "proxy_peer_connections": True, "proxy_hostname": "a", "proxy_port": 1234, "proxy_username": "abc", "proxy_password": "def"}), self.manager.ltsessions[0].result().apply_settings.call_args) async def test_post_session_stats(self) -> None: """ Test if post_session_stats actually updates the state of libtorrent readiness for clean shutdown. """ self.manager.post_session_stats() await sleep(0) self.manager.ltsessions[0].result().post_session_stats.assert_called_once() async def test_load_checkpoint_no_metainfo(self) -> None: """ Test if no checkpoint can be loaded from a file with no metainfo. """ with patch.dict(tribler.core.libtorrent.download_manager.download_manager.__dict__, {"ConfigObj": Mock(return_value=self.create_mock_download_config().config)}): value = await self.manager.load_checkpoint("foo.conf") self.assertFalse(value) async def test_load_checkpoint_empty_metainfo(self) -> None: """ Test if no checkpoint can be loaded from a file with empty metainfo. """ download_config = self.create_mock_download_config() download_config.set_metainfo({b"info": {}, b"url": ""}) with patch.dict(tribler.core.libtorrent.download_manager.download_manager.__dict__, {"ConfigObj": Mock(return_value=download_config.config)}): value = await self.manager.load_checkpoint("foo.conf") self.assertFalse(value) async def test_load_checkpoint_bad_url(self) -> None: """ Test if no checkpoint can be loaded from a file with a bad url. """ download_config = self.create_mock_download_config() download_config.set_metainfo({b"url": b"\x80", b"info": { b"name": b"torrent name", b"files": [{b"path": [b"a.txt"], b"length": 123}], b"piece length": 128, b"pieces": b"\x00" * 20 }}) with patch.dict(tribler.core.libtorrent.download_manager.download_manager.__dict__, {"ConfigObj": Mock(return_value=download_config.config)}): value = await self.manager.load_checkpoint("foo.conf") self.assertFalse(value) async def test_load_checkpoint(self) -> None: """ Test if a checkpoint can be loaded. """ download_config = self.create_mock_download_config() download_config.set_metainfo({b"info": { b"name": b"torrent name", b"files": [{b"path": [b"a.txt"], b"length": 123}], b"piece length": 128, b"pieces": b"\x00" * 20 }}) download_config.set_dest_dir(Path(__file__).absolute().parent) with patch.dict(tribler.core.libtorrent.download_manager.download_manager.__dict__, {"ConfigObj": Mock(return_value=download_config.config)}), \ patch.object(self.manager, "start_download", AsyncMock()): value = await self.manager.load_checkpoint("foo.conf") self.assertTrue(value) async def test_load_checkpoint_file_not_found(self) -> None: """ Test if no checkpoint can be loaded if a specified file is not found. """ with patch.dict(tribler.core.libtorrent.download_manager.download_manager.__dict__, {"ConfigObj": Mock(side_effect=OSError('Config file not found: "foo.conf".'))}): value = await self.manager.load_checkpoint("foo.conf") self.assertFalse(value) async def test_load_checkpoint_file_corrupt(self) -> None: """ Test if no checkpoint can be loaded if the specified file is corrupt. """ with patch.dict(tribler.core.libtorrent.download_manager.download_manager.__dict__, {"ConfigObj": Mock(side_effect=VdtParamError("key", "value"))}): value = await self.manager.load_checkpoint("foo.conf") self.assertFalse(value) async def test_download_manager_start(self) -> None: """ Test if all (zero) checkpoints are loaded when starting without downloads. """ self.manager.start() await sleep(0) await self.manager.get_task("start") self.assertTrue(self.manager.all_checkpoints_are_loaded) async def test_readd_download_safe_seeding(self) -> None: """ Test if a download is re-added when doing safe seeding. Safe seeding should be turned off and the number of hops should be retrieved from the defaults. """ config = self.create_mock_download_config() config.set_safe_seeding(True) download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name", None), None, checkpoint_disabled=True, config=config) download.futures["save_resume_data"] = succeed(True) download_state = DownloadState(download, Mock(state=4, paused=False), None) self.manager.downloads = {b"\x01" * 20: download} self.manager.config.set("libtorrent/download_defaults/number_hops", 42) with patch.object(self.manager, "start_download", AsyncMock(return_value=download)) as start_download, \ patch.object(self.manager, "remove_download", AsyncMock()) as remove_download: future = ensure_future(self.manager.sesscb_states_callback([download_state])) while "save_resume_data_alert" not in download.futures: await sleep(0) download.process_alert(Mock(), "save_resume_data_alert") await future self.assertEqual(call(download), remove_download.call_args) self.assertEqual(download.tdef, start_download.call_args.kwargs["tdef"]) self.assertFalse(start_download.call_args.kwargs["config"].get_safe_seeding()) self.assertEqual(42, start_download.call_args.kwargs["config"].get_hops()) def test_get_downloads_by_name(self) -> None: """ Test if downloads can be retrieved by name. """ download = Download(TorrentDefNoMetainfo(b"\x01" * 20, b"name", None), None, checkpoint_disabled=True, config=DownloadConfig(ConfigObj(StringIO(SPEC_CONTENT)))) self.manager.downloads = {b"\x01" * 20: download} self.assertEqual([download], self.manager.get_downloads_by_name("name")) self.assertEqual([], self.manager.get_downloads_by_name("bla")) async def test_start_download_from_magnet_no_name(self) -> None: """ Test if a download is started with `Unknown name` name when the magnet has no name. """ magnet = f'magnet:?xt=urn:btih:{"A" * 40}' with patch.object(self.manager, "start_download", AsyncMock()) as start_download: await self.manager.start_download_from_uri(magnet) self.assertEqual(b'Unknown name', start_download.call_args.kwargs["tdef"].get_name()) async def test_start_download_from_magnet_with_name(self) -> None: """ Test if a download is started with `Unknown name` name when the magnet has no name. """ magnet = f'magnet:?xt=urn:btih:{"A" * 40}&dn=AwesomeTorrent' with patch.object(self.manager, "start_download", AsyncMock()) as start_download: await self.manager.start_download_from_uri(magnet) self.assertEqual(b'AwesomeTorrent', start_download.call_args.kwargs["tdef"].get_name()) def test_update_trackers(self) -> None: """ Test if trackers can be updated for an existing download. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) self.manager.downloads[download.tdef.infohash] = download self.manager.update_trackers(download.tdef.infohash, [b"127.0.0.1/test-announce1"]) self.assertEqual(b"127.0.0.1/test-announce1", download.tdef.metainfo[b"announce"]) self.assertNotIn(b"announce-list", download.tdef.metainfo) def test_update_trackers_list(self) -> None: """ Test if multiple trackers are correctly added as an announce list instead of a the singular announce. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) self.manager.downloads[download.tdef.infohash] = download self.manager.update_trackers(download.tdef.infohash, [f"127.0.0.1/test-announce{i}".encode() for i in range(2)]) self.assertNotIn(b"announce", download.tdef.metainfo) self.assertSetEqual({f"127.0.0.1/test-announce{i}".encode() for i in range(2)}, {announce_url[0] for announce_url in download.tdef.metainfo[b"announce-list"]}) def test_update_trackers_list_append(self) -> None: """ Test if trackers can be updated in sequence. """ download = Download(TorrentDef.load_from_memory(TORRENT_WITH_DIRS_CONTENT), None, checkpoint_disabled=True, config=self.create_mock_download_config()) self.manager.downloads[download.tdef.infohash] = download self.manager.update_trackers(download.tdef.infohash, [b"127.0.0.1/test-announce0"]) self.manager.update_trackers(download.tdef.infohash, [b"127.0.0.1/test-announce1"]) self.assertNotIn(b"announce", download.tdef.metainfo) self.assertSetEqual({f"127.0.0.1/test-announce{i}".encode() for i in range(2)}, {announce_url[0] for announce_url in download.tdef.metainfo[b"announce-list"]}) async def test_get_download_rate_limit(self) -> None: """ Test if the download rate limit can be set. """ settings = {} self.manager.ltsessions[0] = Future() self.manager.ltsessions[0].set_result(Mock( get_settings=Mock(return_value=settings), download_rate_limit=functools.partial(settings.get, "download_rate_limit") )) await self.manager.set_download_rate_limit(42) self.assertEqual(42, await self.manager.get_download_rate_limit()) async def test_get_upload_rate_limit(self) -> None: """ Test if the upload rate limit can be set. """ settings = {} self.manager.ltsessions[0] = Future() self.manager.ltsessions[0].set_result(Mock( get_settings=Mock(return_value=settings), upload_rate_limit = functools.partial(settings.get, "upload_rate_limit") )) await self.manager.set_upload_rate_limit(42) self.assertEqual(42, await self.manager.get_upload_rate_limit())
23,984
Python
.py
418
46.093301
120
0.640666
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,960
test_database.py
Tribler_tribler/src/tribler/test_unit/core/rendezvous/test_database.py
from ipv8.keyvault.crypto import default_eccrypto from ipv8.peer import Peer from ipv8.test.base import TestBase from tribler.core.rendezvous.database import RendezvousDatabase class TestRendezvousDatabase(TestBase): """ Tests for the RendezvousDatabase class. """ def setUp(self) -> None: """ Create a peer and a memory-based database. """ super().setUp() self.peer = Peer(default_eccrypto.generate_key("curve25519").pub()) self.memdb = RendezvousDatabase(":memory:") async def tearDown(self) -> None: """ Shut down the database. """ self.memdb.shutdown() await super().tearDown() def test_retrieve_no_certificates(self) -> None: """ Test if we start out without certificates. """ retrieved = self.memdb.get(self.peer) self.assertEqual(0, len(retrieved)) def test_retrieve_single_certificate(self) -> None: """ Test if we can add a single certificate. """ start_timestamp, stop_timestamp = range(1, 3) self.memdb.add(self.peer, start_timestamp, stop_timestamp) retrieved = self.memdb.get(self.peer) self.assertEqual(1, len(retrieved)) self.assertEqual((start_timestamp, stop_timestamp), (retrieved[0].start, retrieved[0].stop)) def test_retrieve_multiple_certificates(self) -> None: """ Test if we can add multiple certificates. """ start_timestamp1, stop_timestamp1, start_timestamp2, stop_timestamp2 = range(1, 5) self.memdb.add(self.peer, start_timestamp1, stop_timestamp1) self.memdb.add(self.peer, start_timestamp2, stop_timestamp2) retrieved = self.memdb.get(self.peer) self.assertEqual(2, len(retrieved)) self.assertEqual((start_timestamp1, stop_timestamp1), (retrieved[0].start, retrieved[0].stop)) self.assertEqual((start_timestamp2, stop_timestamp2), (retrieved[1].start, retrieved[1].stop)) def test_retrieve_filter_certificates(self) -> None: """ Test if we can retrieve certificates with a filter. """ peer2 = Peer(default_eccrypto.generate_key("curve25519").pub()) start_timestamp1, stop_timestamp1, start_timestamp2, stop_timestamp2 = range(1, 5) self.memdb.add(self.peer, start_timestamp1, stop_timestamp1) self.memdb.add(peer2, start_timestamp2, stop_timestamp2) retrieved = self.memdb.get(self.peer) self.assertEqual(1, len(retrieved)) self.assertEqual((start_timestamp1, stop_timestamp1), (retrieved[0].start, retrieved[0].stop))
2,654
Python
.py
58
37.793103
102
0.665504
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,961
test_community.py
Tribler_tribler/src/tribler/test_unit/core/rendezvous/test_community.py
from __future__ import annotations import os import socket from typing import TYPE_CHECKING from ipv8.test.base import TestBase from ipv8.test.mocking.endpoint import MockEndpointListener from tribler.core.rendezvous.community import RendezvousCommunity, RendezvousSettings from tribler.core.rendezvous.database import RendezvousDatabase from tribler.core.rendezvous.payload import PullRecordPayload, RecordPayload if TYPE_CHECKING: from ipv8.community import CommunitySettings from ipv8.test.mocking.ipv8 import MockIPv8 class TestUserActivityCommunity(TestBase[RendezvousCommunity]): """ Tests for the rendezvous community. """ def setUp(self) -> None: """ Create two communities. """ super().setUp() self.initialize(RendezvousCommunity, 2) def create_node(self, settings: CommunitySettings | None = None, create_dht: bool = False, enable_statistics: bool = False) -> MockIPv8: """ Add a memory database to both nodes. """ return super().create_node(RendezvousSettings(database=RendezvousDatabase(":memory:")), create_dht, enable_statistics) def database(self, i: int) -> RendezvousDatabase: """ Get the database manager of node i. """ return self.overlay(i).composition.database async def test_pull_known_crawler(self) -> None: """ Test if a known crawler is allowed to crawl. """ self.database(1).add(self.peer(0), 0.0, 10.0) self.overlay(1).composition.crawler_mid = self.mid(0) with self.assertReceivedBy(0, [RecordPayload]) as received: self.overlay(0).ez_send(self.peer(1), PullRecordPayload(self.mid(1))) await self.deliver_messages() payload, = received self.assertEqual(self.key_bin(0), payload.public_key) self.assertEqual(socket.inet_pton(socket.AF_INET6 if int(os.environ.get("TEST_IPV8_WITH_IPV6", "0")) else socket.AF_INET, self.address(0)[0]), payload.ip) self.assertEqual(self.address(0)[1], payload.port) self.assertEqual(0.0, payload.start) self.assertEqual(10.0, payload.stop) async def test_pull_unknown_crawler(self) -> None: """ Test if an unknown crawler does not receive any information. """ self.database(1).add(self.peer(0), 0.0, 10.0) self.overlay(1).composition.crawler_mid = bytes(b ^ 255 for b in self.mid(0)) with self.assertReceivedBy(0, []): self.overlay(0).ez_send(self.peer(1), PullRecordPayload(self.mid(1))) await self.deliver_messages() async def test_pull_replay_attack(self) -> None: """ Test if an unknown crawler does not receive any information. """ self.add_node_to_experiment(self.create_node()) self.database(1).add(self.peer(0), 0.0, 10.0) self.overlay(1).composition.crawler_mid = self.mid(0) self.overlay(2).composition.crawler_mid = self.mid(0) packet_sniffer = MockEndpointListener(self.overlay(1).endpoint) self.overlay(1).endpoint.add_listener(packet_sniffer) self.overlay(0).ez_send(self.peer(1), PullRecordPayload(self.mid(1))) await self.deliver_messages() packet, _ = packet_sniffer.received_packets _, data = packet with self.assertReceivedBy(1, []): self.endpoint(1).send(self.address(2), data) await self.deliver_messages()
3,577
Python
.py
76
38.289474
108
0.657192
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,962
test_rendezvous_hook.py
Tribler_tribler/src/tribler/test_unit/core/rendezvous/test_rendezvous_hook.py
from __future__ import annotations from ipv8.keyvault.crypto import default_eccrypto from ipv8.peer import Peer from ipv8.peerdiscovery.network import Network from ipv8.test.base import TestBase from tribler.core.rendezvous.database import RendezvousDatabase from tribler.core.rendezvous.rendezvous_hook import RendezvousHook class MockedRendezvousHook(RendezvousHook): """ A mocked RendezvousHook that allows for time to be controlled. """ def __init__(self, rendezvous_db: RendezvousDatabase, mocked_time: float | None = None) -> None: """ Create a new MockedRendezvousHook with a certain time set. """ super().__init__(rendezvous_db) self.mocked_time = mocked_time @property def current_time(self) -> float: """ Fetch the current time. """ if self.mocked_time is None: return super().current_time return self.mocked_time class TestRendezvousDatabase(TestBase): """ Tests for the RendezvousHook class. """ def setUp(self) -> None: """ Create a peer and a memory-based database. """ super().setUp() self.peer = Peer(default_eccrypto.generate_key("curve25519").pub()) self.memdb = RendezvousDatabase(":memory:") self.hook = MockedRendezvousHook(self.memdb) async def tearDown(self) -> None: """ Shut down the database. """ self.memdb.shutdown() self.hook.shutdown(Network()) await super().tearDown() def test_peer_added(self) -> None: """ Test if peers are not added to the database after an add hook yet. """ self.hook.on_peer_added(self.peer) retrieved = self.memdb.get(self.peer) self.assertEqual(0, len(retrieved)) def test_peer_removed(self) -> None: """ Test if peers are correctly added to the database after a remove hook. """ self.hook.on_peer_added(self.peer) self.hook.mocked_time = self.peer.creation_time + 1.0 self.hook.on_peer_removed(self.peer) retrieved = self.memdb.get(self.peer) self.assertEqual(1, len(retrieved)) self.assertEqual((self.peer.creation_time, self.hook.mocked_time), (retrieved[0].start, retrieved[0].stop)) def test_peer_store_on_shutdown(self) -> None: """ Test if peers are correctly added to the database when shutting down. """ network = Network() network.add_verified_peer(self.peer) self.hook.on_peer_added(self.peer) self.hook.mocked_time = self.peer.creation_time + 1.0 self.hook.shutdown(network) retrieved = self.memdb.get(self.peer) self.assertEqual(1, len(retrieved)) self.assertEqual((self.peer.creation_time, self.hook.mocked_time), (retrieved[0].start, retrieved[0].stop)) def test_peer_ignore_future(self) -> None: """ Test if peers are not added from the future. """ self.hook.on_peer_added(self.peer) self.hook.mocked_time = self.peer.creation_time - 1.0 self.hook.on_peer_removed(self.peer) retrieved = self.memdb.get(self.peer) self.assertEqual(0, len(retrieved))
3,270
Python
.py
82
32.170732
115
0.648768
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,963
components.py
Tribler_tribler/src/tribler/core/components.py
from __future__ import annotations from pathlib import Path from typing import TYPE_CHECKING, Type, cast from ipv8.bootstrapping.dispersy.bootstrapper import DispersyBootstrapper from ipv8.community import Community from ipv8.configuration import DISPERSY_BOOTSTRAPPER from ipv8.loader import CommunityLauncher, after, kwargs, overlay, precondition, set_in_session, walk_strategy from ipv8.overlay import Overlay, SettingsClass from ipv8.peerdiscovery.discovery import DiscoveryStrategy, RandomWalk if TYPE_CHECKING: from ipv8.bootstrapping.bootstrapper_interface import Bootstrapper from ipv8.peer import Peer from ipv8.types import IPv8 from tribler.core.restapi.rest_endpoint import RESTEndpoint from tribler.core.session import Session class CommunityLauncherWEndpoints(CommunityLauncher): """ A CommunityLauncher that can supply endpoints. """ def get_endpoints(self) -> list[RESTEndpoint]: """ Get a list of endpoints that should be loaded. """ return [] class BaseLauncher(CommunityLauncherWEndpoints): """ The base class for all Tribler Community launchers. """ def get_overlay_class(self) -> type[Community]: """ Overwrite this to return the correct Community type. """ raise NotImplementedError def get_bootstrappers(self, session: Session) -> list[tuple[type[Bootstrapper], dict]]: """ Simply use the old Dispersy bootstrapper format. """ return [(DispersyBootstrapper, DISPERSY_BOOTSTRAPPER["init"])] def get_walk_strategies(self) -> list[tuple[type[DiscoveryStrategy], dict, int]]: """ Adhere to the default walking behavior. """ return [(RandomWalk, {}, 20)] def get_my_peer(self, ipv8: IPv8, session: Session) -> Peer: """ Get the default key. """ return ipv8.keys["anonymous id"] class Component(Community): """ A glorified TaskManager. This should also really be a TaskManager. I did not make this a TaskManager because I am lazy - Quinten (2024) """ def __init__(self, settings: SettingsClass) -> None: """ Create a new inert fake Community. """ settings.community_id = self.__class__.__name__.encode() Overlay.__init__(self, settings) self.cancel_pending_task("discover_lan_addresses") self.endpoint.remove_listener(self) self.bootstrappers: list[Bootstrapper] = [] self.max_peers = 0 self._prefix = settings.community_id self.settings = settings class ComponentLauncher(CommunityLauncherWEndpoints): """ A launcher for components that simply need a TaskManager, not a full Community. """ def get_overlay_class(self) -> type[Community]: """ Create a fake Community. """ return cast(Type[Community], type(f"{self.__class__.__name__}", (Component,), {})) def get_my_peer(self, ipv8: IPv8, session: Session) -> Peer: """ Our peer still uses the Tribler default key. """ return ipv8.keys["anonymous id"] @set_in_session("content_discovery_community") @after("DatabaseComponent") @precondition('session.config.get("database/enabled")') @precondition('session.config.get("torrent_checker/enabled")') @precondition('session.config.get("content_discovery_community/enabled")') @overlay("tribler.core.content_discovery.community", "ContentDiscoveryCommunity") @kwargs(metadata_store="session.mds", torrent_checker="session.torrent_checker", notifier="session.notifier") class ContentDiscoveryComponent(BaseLauncher): """ Launch instructions for the content discovery community. """ def finalize(self, ipv8: IPv8, session: Session, community: Community) -> None: """ When we are done launching, register our REST API. """ session.rest_manager.get_endpoint("/api/search").content_discovery_community = community def get_endpoints(self) -> list[RESTEndpoint]: """ Add the search endpoint. """ from tribler.core.content_discovery.restapi.search_endpoint import SearchEndpoint return [*super().get_endpoints(), SearchEndpoint()] @precondition('session.config.get("database/enabled")') class DatabaseComponent(ComponentLauncher): """ Launch instructions for the database. """ def prepare(self, ipv8: IPv8, session: Session) -> None: """ Create the database instances we need for Tribler. """ from tribler.core.database.store import MetadataStore from tribler.core.database.tribler_database import TriblerDatabase from tribler.core.notifier import Notification db_path = str(Path(session.config.get_version_state_dir()) / "sqlite" / "tribler.db") mds_path = str(Path(session.config.get_version_state_dir()) / "sqlite" / "metadata.db") if session.config.get("memory_db"): db_path = ":memory:" mds_path = ":memory:" session.db = TriblerDatabase(db_path) session.mds = MetadataStore( mds_path, session.ipv8.keys["anonymous id"].key, notifier=session.notifier, disable_sync=False ) session.notifier.add(Notification.torrent_metadata_added, session.mds.TorrentMetadata.add_ffa_from_dict) def finalize(self, ipv8: IPv8, session: Session, community: Community) -> None: """ When we are done launching, register our REST API. """ session.rest_manager.get_endpoint("/api/downloads").mds = session.mds session.rest_manager.get_endpoint("/api/statistics").mds = session.mds db_endpoint = session.rest_manager.get_endpoint("/api/metadata") db_endpoint.download_manager = session.download_manager db_endpoint.mds = session.mds db_endpoint.tribler_db = session.db def get_endpoints(self) -> list[RESTEndpoint]: """ Add the database endpoint. """ from tribler.core.database.restapi.database_endpoint import DatabaseEndpoint return [*super().get_endpoints(), DatabaseEndpoint()] @set_in_session("knowledge_community") @after("DatabaseComponent") @precondition('session.config.get("database/enabled")') @precondition('session.config.get("knowledge_community/enabled")') @overlay("tribler.core.knowledge.community", "KnowledgeCommunity") @kwargs(db="session.db", key='session.ipv8.keys["secondary"].key') class KnowledgeComponent(CommunityLauncherWEndpoints): """ Launch instructions for the knowledge community. """ def finalize(self, ipv8: IPv8, session: Session, community: Community) -> None: """ When we are done launching, register our REST API. """ endpoint = session.rest_manager.get_endpoint("/api/knowledge") endpoint.db = session.db endpoint.community = community def get_endpoints(self) -> list[RESTEndpoint]: """ Add the knowledge endpoint. """ from tribler.core.knowledge.restapi.knowledge_endpoint import KnowledgeEndpoint return [*super().get_endpoints(), KnowledgeEndpoint()] @after("DatabaseComponent") @precondition('session.config.get("rendezvous/enabled")') @overlay("tribler.core.rendezvous.community", "RendezvousCommunity") class RendezvousComponent(BaseLauncher): """ Launch instructions for the rendezvous community. """ def get_kwargs(self, session: Session) -> dict: """ Create and forward the rendezvous database for the Community. """ from tribler.core.rendezvous.database import RendezvousDatabase out = super().get_kwargs(session) out["database"] = (RendezvousDatabase(db_path=Path(session.config.get_version_state_dir()) / "sqlite" / "rendezvous.db")) return out def finalize(self, ipv8: IPv8, session: Session, community: Community) -> None: """ Start listening to peer connections after starting. """ from tribler.core.rendezvous.community import RendezvousCommunity from tribler.core.rendezvous.rendezvous_hook import RendezvousHook rendezvous_hook = RendezvousHook(cast(RendezvousCommunity, community).composition.database) ipv8.network.add_peer_observer(rendezvous_hook) @precondition('session.config.get("torrent_checker/enabled")') class TorrentCheckerComponent(ComponentLauncher): """ Launch instructions for the torrent checker. """ def prepare(self, overlay_provider: IPv8, session: Session) -> None: """ Initialize the torrecht checker and the torrent manager. """ from tribler.core.torrent_checker.torrent_checker import TorrentChecker from tribler.core.torrent_checker.tracker_manager import TrackerManager tracker_manager = TrackerManager(state_dir=Path(session.config.get_version_state_dir()), metadata_store=session.mds) torrent_checker = TorrentChecker(config=session.config, download_manager=session.download_manager, notifier=session.notifier, tracker_manager=tracker_manager, metadata_store=session.mds, socks_listen_ports=[s.port for s in session.socks_servers]) session.torrent_checker = torrent_checker def finalize(self, ipv8: IPv8, session: Session, community: Community) -> None: """ When we are done launching, register our REST API. """ community.register_task("Start torrent checker", session.torrent_checker.initialize) session.rest_manager.get_endpoint("/api/metadata").torrent_checker = session.torrent_checker @set_in_session("dht_discovery_community") @precondition('session.config.get("dht_discovery/enabled")') @overlay("ipv8.dht.discovery", "DHTDiscoveryCommunity") class DHTDiscoveryComponent(BaseLauncher): """ Launch instructions for the DHT discovery community. """ def finalize(self, ipv8: IPv8, session: Session, community: Community) -> None: """ When we are done launching, register our REST API. """ session.rest_manager.get_endpoint("/api/ipv8").endpoints["/dht"].dht = community @precondition('session.config.get("recommender/enabled")') @overlay("tribler.core.recommender.community", "RecommenderCommunity") class RecommenderComponent(BaseLauncher): """ Launch instructions for the user recommender community. """ def get_kwargs(self, session: Session) -> dict: """ Create and forward the rendezvous database for the Community. """ from tribler.core.recommender.manager import Manager db_path = str(Path(session.config.get_version_state_dir()) / "sqlite" / "recommender.db") if session.config.get("memory_db"): db_path = ":memory:" out = super().get_kwargs(session) out["manager"] = Manager(db_path) return out def finalize(self, ipv8: IPv8, session: Session, community: Community) -> None: """ When we are done launching, register our REST API. """ from tribler.core.recommender.community import RecommenderCommunity endpoint = session.rest_manager.get_endpoint("/api/recommender") endpoint.manager = cast(RecommenderCommunity, community).manager def get_endpoints(self) -> list[RESTEndpoint]: """ Add the knowledge endpoint. """ from tribler.core.recommender.restapi.endpoint import RecommenderEndpoint return [*super().get_endpoints(), RecommenderEndpoint()] @set_in_session("tunnel_community") @precondition('session.config.get("tunnel_community/enabled")') @after("DHTDiscoveryComponent") @walk_strategy("tribler.core.tunnel.discovery", "GoldenRatioStrategy", -1) @overlay("tribler.core.tunnel.community", "TriblerTunnelCommunity") class TunnelComponent(BaseLauncher): """ Launch instructions for the tunnel community. """ def get_kwargs(self, session: Session) -> dict: """ Extend our community arguments with all necessary config settings and objects. """ from ipv8.dht.discovery import DHTDiscoveryCommunity from ipv8.dht.provider import DHTCommunityProvider out = super().get_kwargs(session) out["exitnode_cache"] = Path(session.config.get_version_state_dir()) / "exitnode_cache.dat" out["notifier"] = session.notifier out["download_manager"] = session.download_manager out["socks_servers"] = session.socks_servers out["min_circuits"] = session.config.get("tunnel_community/min_circuits") out["max_circuits"] = session.config.get("tunnel_community/max_circuits") out["default_hops"] = session.config.get("libtorrent/download_defaults/number_hops") out["dht_provider"] = (DHTCommunityProvider(session.ipv8.get_overlay(DHTDiscoveryCommunity), session.config.get("ipv8/port")) if session.ipv8.get_overlay(DHTDiscoveryCommunity) else None) return out def finalize(self, ipv8: IPv8, session: Session, community: Community) -> None: """ When we are done launching, register our REST API. """ session.rest_manager.get_endpoint("/api/downloads").tunnel_community = community session.rest_manager.get_endpoint("/api/ipv8").endpoints["/tunnel"].tunnels = community @precondition('session.config.get("versioning/enabled")') class VersioningComponent(ComponentLauncher): """ Launch instructions for the versioning of Tribler. """ def finalize(self, ipv8: IPv8, session: Session, community: Community) -> None: """ When we are done launching, register our REST API. """ from tribler.core.versioning.manager import VersioningManager session.rest_manager.get_endpoint("/api/versioning").versioning_manager = VersioningManager( community, session.config ) def get_endpoints(self) -> list[RESTEndpoint]: """ Add the database endpoint. """ from tribler.core.versioning.restapi.versioning_endpoint import VersioningEndpoint return [*super().get_endpoints(), VersioningEndpoint()]
14,598
Python
.py
309
39.252427
112
0.678252
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,964
notifier.py
Tribler_tribler/src/tribler/core/notifier.py
from __future__ import annotations import typing from collections import defaultdict from enum import Enum from typing import Callable from ipv8.messaging.anonymization.tunnel import Circuit class Desc(typing.NamedTuple): """ A Notification callback descriptor. """ name: str fields: list[str] types: list[tuple[type, ...] | type] class Notification(Enum): """ All possible global events that happen in Tribler. """ torrent_finished = Desc("torrent_finished", ["infohash", "name", "hidden"], [str, str, bool]) torrent_status_changed = Desc("torrent_status_changed", ["infohash", "status"], [str, str]) tribler_shutdown_state = Desc("tribler_shutdown_state", ["state"], [str]) tribler_new_version = Desc("tribler_new_version", ["version"], [str]) remote_query_results = Desc("remote_query_results", ["query", "results", "uuid", "peer"], [str, list, str, str]) local_query_results = Desc("local_query_results", ["query", "results"], [str, list]) circuit_removed = Desc("circuit_removed", ["circuit", "additional_info"], [str, Circuit]) tunnel_removed = Desc("tunnel_removed", ["circuit_id", "bytes_up", "bytes_down", "uptime", "additional_info"], [int, int, int, float, str]) watch_folder_corrupt_file = Desc("watch_folder_corrupt_file", ["file_name"], [str]) torrent_health_updated = Desc("torrent_health_updated", ["infohash", "num_seeders", "num_leechers", "last_tracker_check", "health"], [str, int, int, int, str]) low_space = Desc("low_space", ["disk_usage_data"], [dict]) events_start = Desc("events_start", ["public_key", "version"], [str, str]) tribler_exception = Desc("tribler_exception", ["error"], [dict]) content_discovery_community_unknown_torrent_added = Desc("content_discovery_community_unknown_torrent_added", [], []) report_config_error = Desc("report_config_error", ["error"], [str]) peer_disconnected = Desc("peer_disconnected", ["peer_id"], [bytes]) tribler_torrent_peer_update = Desc("tribler_torrent_peer_update", ["peer_id", "infohash", "balance"], [bytes, bytes, int]) torrent_metadata_added = Desc("torrent_metadata_added", ["metadata"], [dict]) new_torrent_metadata_created = Desc("new_torrent_metadata_created", ["infohash", "title"], [(bytes, type(None)), (str, type(None))]) class Notifier: """ The class responsible for managing and calling observers of global Tribler events. """ def __init__(self) -> None: """ Create a new notifier. """ self.observers: dict[Notification, list[Callable[..., None]]] = defaultdict(list) self.delegates: set[Callable[..., None]] = set() def add(self, topic: Notification, observer: Callable[..., None]) -> None: """ Add an observer for the given Notification type. """ self.observers[topic].append(observer) def notify(self, topic: Notification | str, /, **kwargs) -> None: """ Notify all observers that have subscribed to the given topic. """ notification = getattr(Notification, topic) if isinstance(topic, str) else topic topic_name, args, types = notification.value if set(args) ^ set(kwargs.keys()): message = f"{topic_name} expecting arguments {args} (of types {types}) but received {kwargs}" raise ValueError(message) for observer in self.observers[notification]: observer(**kwargs) for delegate in self.delegates: delegate(notification, **kwargs)
3,787
Python
.py
70
45
116
0.618521
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,965
__init__.py
Tribler_tribler/src/tribler/core/__init__.py
""" Tribler is a privacy enhanced BitTorrent client with P2P content discovery. """
84
Python
.py
3
27
75
0.790123
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,966
session.py
Tribler_tribler/src/tribler/core/session.py
from __future__ import annotations import asyncio import logging import sys from asyncio import AbstractEventLoop, Event from contextlib import contextmanager from traceback import format_exception from typing import TYPE_CHECKING, Any, Generator, Type, cast import aiohttp from ipv8.loader import IPv8CommunityLoader from ipv8_service import IPv8 from tribler.core.components import ( ContentDiscoveryComponent, DatabaseComponent, DHTDiscoveryComponent, KnowledgeComponent, RecommenderComponent, RendezvousComponent, TorrentCheckerComponent, TunnelComponent, VersioningComponent, ) from tribler.core.libtorrent.download_manager.download_manager import DownloadManager from tribler.core.libtorrent.restapi.create_torrent_endpoint import CreateTorrentEndpoint from tribler.core.libtorrent.restapi.downloads_endpoint import DownloadsEndpoint from tribler.core.libtorrent.restapi.libtorrent_endpoint import LibTorrentEndpoint from tribler.core.libtorrent.restapi.torrentinfo_endpoint import TorrentInfoEndpoint from tribler.core.notifier import Notification, Notifier from tribler.core.restapi.events_endpoint import EventsEndpoint from tribler.core.restapi.file_endpoint import FileEndpoint from tribler.core.restapi.ipv8_endpoint import IPv8RootEndpoint from tribler.core.restapi.rest_manager import RESTManager from tribler.core.restapi.settings_endpoint import SettingsEndpoint from tribler.core.restapi.shutdown_endpoint import ShutdownEndpoint from tribler.core.restapi.statistics_endpoint import StatisticsEndpoint from tribler.core.restapi.webui_endpoint import WebUIEndpoint from tribler.core.socks5.server import Socks5Server if TYPE_CHECKING: from types import TracebackType from tribler.core.database.store import MetadataStore from tribler.core.database.tribler_database import TriblerDatabase from tribler.core.torrent_checker.torrent_checker import TorrentChecker from tribler.tribler_config import TriblerConfigManager logger = logging.getLogger(__name__) @contextmanager def rust_enhancements(session: Session) -> Generator[None, None, None]: """ Attempt to import the IPv8 Rust anonymization backend. """ use_fallback = session.config.get("statistics") if_specs = [ifc for ifc in session.config.configuration["ipv8"]["interfaces"] if ifc["interface"] == "UDPIPv4"] if not use_fallback: try: from ipv8.messaging.interfaces.dispatcher.endpoint import INTERFACES from ipv8_rust_tunnels.endpoint import RustEndpoint INTERFACES["UDPIPv4"] = RustEndpoint for ifc in if_specs: ifc["worker_threads"] = ifc.get("worker_threads", session.config.get("tunnel_community/max_circuits")) yield if if_specs: for server in session.socks_servers: ipv4_endpoint = session.ipv8.endpoint.interfaces["UDPIPv4"] server.rust_endpoint = ipv4_endpoint if isinstance(ipv4_endpoint, RustEndpoint) else None except ImportError: logger.info("Rust endpoint not found (pip install ipv8-rust-tunnels).") use_fallback = True if use_fallback: # Make sure there are no ``worker_threads`` settings fed into non-Rust endpoints. previous_values = [("worker_threads" in ifc, ifc.pop("worker_threads")) for ifc in if_specs] yield # Restore ``worker_threads`` settings, if they were there. for i, (has_previous_value, previous_value) in enumerate(previous_values): if has_previous_value: if_specs[i]["worker_threads"] = previous_value async def _is_url_available(url: str, timeout: int=1) -> bool: async with aiohttp.ClientSession() as session: try: async with session.get(url, timeout=timeout): return True except (asyncio.TimeoutError, aiohttp.client_exceptions.ClientConnectorError): return False class Session: """ A session manager that manages all components. """ def __init__(self, config: TriblerConfigManager) -> None: """ Create a new session without initializing any components yet. """ self.config = config self.shutdown_event = Event() self.notifier = Notifier() # Libtorrent self.download_manager = DownloadManager(self.config, self.notifier) self.socks_servers = [Socks5Server(port) for port in self.config.get("libtorrent/socks_listen_ports")] # IPv8 with rust_enhancements(self): self.ipv8 = IPv8(self.config.get("ipv8"), enable_statistics=self.config.get("statistics")) self.loader = IPv8CommunityLoader() # REST self.rest_manager = RESTManager(self.config) # Optional globals, set by components: self.db: TriblerDatabase | None = None self.mds: MetadataStore | None = None self.torrent_checker: TorrentChecker | None = None def register_launchers(self) -> None: """ Register all IPv8 launchers that allow communities to be loaded. """ for launcher_class in [ContentDiscoveryComponent, DatabaseComponent, DHTDiscoveryComponent, KnowledgeComponent, RecommenderComponent, RendezvousComponent, TorrentCheckerComponent, TunnelComponent, VersioningComponent]: instance = launcher_class() for rest_ep in instance.get_endpoints(): self.rest_manager.add_endpoint(rest_ep) self.loader.set_launcher(instance) def register_rest_endpoints(self) -> None: """ Register all core REST endpoints without initializing them. """ self.rest_manager.add_endpoint(WebUIEndpoint()) self.rest_manager.add_endpoint(FileEndpoint()) self.rest_manager.add_endpoint(CreateTorrentEndpoint(self.download_manager)) self.rest_manager.add_endpoint(DownloadsEndpoint(self.download_manager)) self.rest_manager.add_endpoint(EventsEndpoint(self.notifier)) self.rest_manager.add_endpoint(IPv8RootEndpoint()) self.rest_manager.add_endpoint(LibTorrentEndpoint(self.download_manager)) self.rest_manager.add_endpoint(SettingsEndpoint(self.config)) self.rest_manager.add_endpoint(ShutdownEndpoint(self.shutdown_event.set)) self.rest_manager.add_endpoint(StatisticsEndpoint()) self.rest_manager.add_endpoint(TorrentInfoEndpoint(self.download_manager)) def _except_hook(self, typ: Type[BaseException], value: BaseException, traceback: TracebackType | None) -> None: """ Handle an uncaught exception. Note: at this point the REST interface is available. Note2: ignored BaseExceptions are BaseExceptionGroup, GeneratorExit, KeyboardInterrupt and SystemExit """ logger.exception("Uncaught exception: %s", "".join(format_exception(typ, value, traceback))) if isinstance(value, Exception): cast(EventsEndpoint, self.rest_manager.get_endpoint("/api/events")).on_tribler_exception(value) def _asyncio_except_hook(self, loop: AbstractEventLoop, context: dict[str, Any]) -> None: """ Handle an uncaught asyncio exception. Note: at this point the REST interface is available. Note2: ignored BaseExceptions are BaseExceptionGroup, GeneratorExit, KeyboardInterrupt and SystemExit """ exc = context.get("exception") if isinstance(exc, Exception): logger.exception("Uncaught async exception: %s", "".join(format_exception(exc.__class__, exc, exc.__traceback__))) cast(EventsEndpoint, self.rest_manager.get_endpoint("/api/events")).on_tribler_exception(exc) raise exc def attach_exception_handler(self) -> None: """ Hook ourselves in as the general exception handler. """ sys.excepthook = self._except_hook asyncio.get_running_loop().set_exception_handler(self._asyncio_except_hook) async def start(self) -> None: """ Initialize and launch all components and REST endpoints. """ self.register_rest_endpoints() self.register_launchers() # REST (1/2) await self.rest_manager.start() self.attach_exception_handler() # Libtorrent for server in self.socks_servers: await server.start() self.download_manager.socks_listen_ports = [s.port for s in self.socks_servers] await self.download_manager.initialize() self.download_manager.start() # IPv8 self.loader.load(self.ipv8, self) await self.ipv8.start() # REST (2/2) self.rest_manager.get_endpoint("/api/ipv8").initialize(self.ipv8) self.rest_manager.get_endpoint("/api/statistics").ipv8 = self.ipv8 if self.config.get("statistics"): self.rest_manager.get_endpoint("/api/ipv8").endpoints["/overlays"].enable_overlay_statistics(True, None, True) async def find_api_server(self) -> str | None: """ Find the API server, if available. """ if port := self.config.get("api/http_port_running"): http_url = f'http://{self.config.get("api/http_host")}:{port}' if await _is_url_available(http_url): return http_url if port := self.config.get("api/https_port_running"): https_url = f'https://{self.config.get("api/https_host")}:{port}' if await _is_url_available(https_url): return https_url return None async def shutdown(self) -> None: """ Shut down all connections and components. """ # Stop network event generators if self.torrent_checker: self.notifier.notify(Notification.tribler_shutdown_state, state="Shutting down torrent checker.") await self.torrent_checker.shutdown() if self.ipv8: self.notifier.notify(Notification.tribler_shutdown_state, state="Shutting down IPv8 peer-to-peer overlays.") await self.ipv8.stop() # Stop libtorrent managers self.notifier.notify(Notification.tribler_shutdown_state, state="Shutting down download manager.") await self.download_manager.shutdown() self.notifier.notify(Notification.tribler_shutdown_state, state="Shutting down local SOCKS5 interface.") for server in self.socks_servers: await server.stop() # Stop database activities if self.db: self.notifier.notify(Notification.tribler_shutdown_state, state="Shutting down general-purpose database.") self.db.shutdown() if self.mds: self.notifier.notify(Notification.tribler_shutdown_state, state="Shutting down metadata database.") self.mds.shutdown() # Stop communication with the GUI self.notifier.notify(Notification.tribler_shutdown_state, state="Shutting down GUI connection. Going dark.") await self.rest_manager.stop()
11,271
Python
.py
222
41.720721
120
0.68683
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,967
events_endpoint.py
Tribler_tribler/src/tribler/core/restapi/events_endpoint.py
from __future__ import annotations import json import time from asyncio import CancelledError, Event, Future, Queue from contextlib import suppress from importlib.metadata import PackageNotFoundError, version from traceback import format_exception from typing import TYPE_CHECKING, TypedDict import marshmallow.fields from aiohttp import web from aiohttp_apispec import docs from ipv8.REST.schema import schema from tribler.core.notifier import Notification, Notifier from tribler.core.restapi.rest_endpoint import RESTEndpoint if TYPE_CHECKING: from aiohttp.abc import Request from ipv8.messaging.anonymization.tunnel import Circuit topics_to_send_to_gui = [ Notification.torrent_status_changed, Notification.tunnel_removed, Notification.watch_folder_corrupt_file, Notification.tribler_new_version, Notification.tribler_exception, Notification.torrent_finished, Notification.torrent_health_updated, Notification.tribler_shutdown_state, Notification.remote_query_results, Notification.low_space, Notification.report_config_error, ] class MessageDict(TypedDict): """ A message fit for the GUI, usually forwarded from the Notifier. """ topic: str kwargs: dict[str, str] class EventsEndpoint(RESTEndpoint): """ Important events in Tribler are returned over the events endpoint. This connection is held open. Each event is pushed over this endpoint in the form of a JSON dictionary. Each JSON dictionary contains a type field that indicates the type of the event. Individual events are separated by a newline character. """ path = "/api/events" def __init__(self, notifier: Notifier, public_key: str | None = None) -> None: """ Create a new events endpoint. """ self.shutdown_event = Event() super().__init__() self.events_responses: list[web.StreamResponse] = [] self.undelivered_error: Exception | None = None self.public_key = public_key self.notifier = notifier self.queue: Queue[MessageDict] = Queue() self.register_task("Process queue", self.process_queue) notifier.add(Notification.circuit_removed, self.on_circuit_removed) notifier.delegates.add(self.on_notification) self.app.add_routes([web.get("", self.get_events)]) @property def _shutdown(self) -> bool: return self.shutdown_event.is_set() @_shutdown.setter def _shutdown(self, value: bool) -> None: if value: self.shutdown_event.set() def on_notification(self, topic: Notification, **kwargs) -> None: """ Callback for when a notification is received. Check if we should forward it to the GUI. """ if topic in topics_to_send_to_gui: self.send_event({"topic": topic.value.name, "kwargs": kwargs}) def on_circuit_removed(self, circuit: Circuit, additional_info: str) -> None: """ Special handler for circuit removal notifications. The original notification contains non-JSON-serializable argument, so we send another one to GUI. """ self.notifier.notify(Notification.tunnel_removed, circuit_id=circuit.circuit_id, bytes_up=circuit.bytes_up, bytes_down=circuit.bytes_down, uptime=time.time() - circuit.creation_time, additional_info=additional_info) def initial_message(self) -> MessageDict: """ Create the initial message to announce to the GUI. """ try: v = version("tribler") except PackageNotFoundError: v = "git" return { "topic": Notification.events_start.value.name, "kwargs": {"public_key": self.public_key or "", "version": v} } def error_message(self, reported_error: Exception) -> MessageDict: """ Create an error message for the GUI. """ return { "topic": Notification.tribler_exception.value.name, "kwargs": { "error": str(reported_error), "traceback": "".join(format_exception(type(reported_error), reported_error, reported_error.__traceback__))}, } def encode_message(self, message: MessageDict) -> bytes: """ Use JSON to dump the given message to bytes. """ try: event = message.get("topic", "message").encode() data = json.dumps(message.get("kwargs", {})).encode() return b"event: " + event + b"\ndata: " + data + b"\n\n" except (UnicodeDecodeError, TypeError) as e: # The message contains invalid characters; fix them self._logger.exception("Event contains non-unicode characters, dropping %s", repr(message)) return self.encode_message(self.error_message(e)) def has_connection_to_gui(self) -> bool: """ Whether the GUI has responded before. """ return bool(self.events_responses) def should_skip_message(self, message: MessageDict) -> bool: """ Returns True if EventsEndpoint should skip sending message to GUI due to a shutdown or no connection to GUI. Issue an appropriate warning if the message cannot be sent. """ if self._shutdown: self._logger.warning("Shutdown is in progress, skip message: %s", str(message)) return True if not self.has_connection_to_gui(): self._logger.warning("No connections to GUI, skip message: %s", str(message)) return True return False def send_event(self, message: MessageDict) -> None: """ Put event message to a queue to be sent to GUI. """ if not self.should_skip_message(message): self.queue.put_nowait(message) async def process_queue(self) -> None: """ Get all failed messages in the queue and send them to the GUI. """ while True: message = await self.queue.get() if not self.should_skip_message(message): await self._write_data(message) async def _write_data(self, message: MessageDict) -> None: """ Write data over the event socket if it's open. """ self._logger.debug("Write message: %s", str(message)) try: message_bytes = self.encode_message(message) except Exception as e: # if a notification arguments contains non-JSON-serializable data, the exception should be logged self._logger.exception("%s: %s", str(e), repr(message)) return processed_responses = [] for response in self.events_responses: try: await response.write(message_bytes) # by creating the list with processed responses we want to remove responses with # ConnectionResetError from `self.events_responses`: processed_responses.append(response) except ConnectionResetError as e: # The connection was closed by GUI self._logger.warning(e, exc_info=True) self.events_responses = processed_responses def on_tribler_exception(self, reported_error: Exception) -> None: """ An exception has occurred in Tribler. """ if self._shutdown: self._logger.warning("Ignoring tribler exception, because the endpoint is shutting down.") return message = self.error_message(reported_error) if self.has_connection_to_gui(): self.send_event(message) elif not self.undelivered_error: # If there are several undelivered errors, we store the first error as more important and skip other self.undelivered_error = reported_error @docs( tags=["General"], summary="Open an EventStream for receiving Tribler events.", responses={ 200: { "schema": schema(EventsResponse={"type": marshmallow.fields.String, "event": marshmallow.fields.Dict}) } } ) async def get_events(self, request: Request) -> web.StreamResponse: """ A GET request to this endpoint will open the event connection. **Example request**: .. sourcecode:: none curl -X GET http://localhost:20100/events """ # Setting content-type to text/event-stream to ensure browsers will handle the content properly response = web.StreamResponse(status=200, reason="OK", headers={"Content-Type": "text/event-stream", "Cache-Control": "no-cache", "Connection": "keep-alive"}) await response.prepare(request) await response.write(self.encode_message(self.initial_message())) if self.undelivered_error: error = self.undelivered_error self.undelivered_error = None await response.write(self.encode_message(self.error_message(error))) self.events_responses.append(response) try: await self.shutdown_event.wait() except CancelledError: self._logger.warning("Event stream was canceled") else: self._logger.info("Event stream was closed due to shutdown") # A ``shutdown()`` on our parent may have cancelled ``_handler_waiter`` before this method returns. # If we leave this be, an error will be raised if the ``Future`` result is set after this method returns. # See: https://github.com/Tribler/tribler/issues/8156 if request.protocol._handler_waiter and request.protocol._handler_waiter.cancelled(): # noqa: SLF001 request.protocol._handler_waiter = Future() # noqa: SLF001 # See: https://github.com/Tribler/tribler/pull/7906 with suppress(ValueError): self.events_responses.remove(response) return response
10,307
Python
.py
227
35.176211
118
0.62642
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,968
shutdown_endpoint.py
Tribler_tribler/src/tribler/core/restapi/shutdown_endpoint.py
from typing import Callable from aiohttp import web from aiohttp_apispec import docs from ipv8.REST.schema import schema from marshmallow.fields import Boolean from tribler.core.restapi.rest_endpoint import RESTEndpoint, RESTResponse class ShutdownEndpoint(RESTEndpoint): """ With this endpoint you can shut down Tribler. """ path = "/api/shutdown" def __init__(self, shutdown_callback: Callable[[], None]) -> None: """ Create a new shutdown endpoint. """ super().__init__() self.shutdown_callback = shutdown_callback self.app.add_routes([web.put("", self.shutdown_request)]) @docs( tags=["General"], summary="Shutdown Tribler.", responses={ 200: { "schema": schema(TriblerShutdownResponse={ "shutdown": Boolean }) } } ) def shutdown_request(self, _: web.Request) -> RESTResponse: """ Shutdown Tribler. """ self._logger.info("Received a shutdown request from GUI") self.shutdown_callback() return RESTResponse({"shutdown": True})
1,173
Python
.py
36
24.833333
73
0.613274
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,969
settings_endpoint.py
Tribler_tribler/src/tribler/core/restapi/settings_endpoint.py
from aiohttp import web from aiohttp_apispec import docs, json_schema from ipv8.REST.schema import schema from marshmallow.fields import Boolean from tribler.core.libtorrent.download_manager.download_manager import DownloadManager from tribler.core.restapi.rest_endpoint import RESTEndpoint, RESTResponse from tribler.tribler_config import TriblerConfigManager class SettingsEndpoint(RESTEndpoint): """ This endpoint is responsible for handing all requests regarding settings and configuration. """ path = "/api/settings" def __init__(self, tribler_config: TriblerConfigManager, download_manager: DownloadManager = None) -> None: """ Create a new settings endpoint. """ super().__init__() self.config = tribler_config self.download_manager = download_manager self.app.add_routes([web.get("", self.get_settings), web.post("", self.update_settings)]) @docs( tags=["General"], summary="Return all the session settings that can be found in Tribler.", responses={ 200: { "schema": schema(GetTriblerSettingsResponse={}) } }, description="This endpoint returns all the session settings that can be found in Tribler.\n\n It also returns " "the runtime-determined ports" ) async def get_settings(self, request: web.Request) -> RESTResponse: """ Return all the session settings that can be found in Tribler. """ self._logger.info("Get settings. Request: %s", str(request)) return RESTResponse({ "settings": self.config.configuration, }) @docs( tags=["General"], summary="Update Tribler settings.", responses={ 200: { "schema": schema(UpdateTriblerSettingsResponse={"modified": Boolean}) } } ) @json_schema(schema(UpdateTriblerSettingsRequest={})) async def update_settings(self, request: web.Request) -> RESTResponse: """ Update Tribler settings. """ settings = await request.json() self._logger.info("Received settings: %s", settings) self._recursive_merge_settings(self.config.configuration, settings) self.config.write() if self.download_manager: self.download_manager.update_max_rates_from_config() return RESTResponse({"modified": True}) def _recursive_merge_settings(self, existing: dict, updates: dict, top: bool = True) -> None: for key in existing: # Ignore top-level ui entry if top and key == "ui": continue value = updates.get(key, existing[key]) if isinstance(value, dict): self._recursive_merge_settings(existing[key], value, False) existing[key] = value # Since the core doesn't need to be aware of the GUI settings, we just copy them. if top and "ui" in updates: existing["ui"] = existing.get("ui", {}) existing["ui"].update(updates["ui"])
3,150
Python
.py
74
33.405405
119
0.630016
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,970
rest_endpoint.py
Tribler_tribler/src/tribler/core/restapi/rest_endpoint.py
from __future__ import annotations import json import logging from typing import TYPE_CHECKING, Dict from aiohttp import web from ipv8.taskmanager import TaskManager if TYPE_CHECKING: from ipv8.REST.root_endpoint import RootEndpoint as IPV8RootEndpoint HTTP_BAD_REQUEST = 400 HTTP_UNAUTHORIZED = 401 HTTP_NOT_FOUND = 404 HTTP_REQUEST_ENTITY_TOO_LARGE = 413 HTTP_INTERNAL_SERVER_ERROR = 500 MAX_REQUEST_SIZE = 16 * 1024 ** 2 # 16 MB class RESTEndpoint(TaskManager): """ The base class for all Tribler REST endpoints. """ path: str def __init__(self, middlewares: tuple = (), client_max_size: int = MAX_REQUEST_SIZE) -> None: """ Create a new REST endpoint. """ super().__init__() self._logger = logging.getLogger(self.__class__.__name__) self.app = web.Application(middlewares=middlewares, client_max_size=client_max_size) class RootEndpoint(RESTEndpoint): """ Create a new root endpoint. Essentially, this is the same as any other REST endpoint, but it is supposed to be the top in the hierarchy. """ def __init__(self, middlewares: tuple = (), client_max_size: int = MAX_REQUEST_SIZE) -> None: """ Create a new root endpoint. """ super().__init__(middlewares, client_max_size) self.endpoints: Dict[str, RESTEndpoint] = {} def add_endpoint(self, prefix: str, endpoint: RESTEndpoint | IPV8RootEndpoint) -> None: """ Add an endpoint to this root endpoint. """ self.endpoints[prefix] = endpoint self.app.add_subapp(prefix, endpoint.app) class RESTResponse(web.Response): """ A Tribler response for web requests. JSON-compatible response bodies are automatically converted to JSON type. """ def __init__(self, body: dict | list | bytes | str | None = None, headers: dict | None = None, content_type: str | None = None, status: int = 200, **kwargs) -> None: """ Create a new rest response. """ if isinstance(body, (dict, list)): body = json.dumps(body) content_type = "application/json" super().__init__(body=body, headers=headers, content_type=content_type, status=status, **kwargs) def return_handled_exception(exception: Exception) -> RESTResponse: """ Create a RESTResponse that tells the use that an exception is handled. :param request: the request that encountered the exception :param exception: the handled exception :return: JSON dictionary describing the exception """ return RESTResponse({ "error": { "handled": True, "code": exception.__class__.__name__, "message": str(exception) } }, status=HTTP_INTERNAL_SERVER_ERROR)
2,813
Python
.py
71
33.225352
112
0.654779
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,971
rest_manager.py
Tribler_tribler/src/tribler/core/restapi/rest_manager.py
from __future__ import annotations import logging import ssl import traceback from asyncio.base_events import Server from functools import wraps from importlib.metadata import PackageNotFoundError, version from pathlib import Path from typing import TYPE_CHECKING, Awaitable, Callable, Generic, TypeVar, cast from aiohttp import tcp_helpers, web, web_protocol from aiohttp.web_exceptions import HTTPNotFound, HTTPRequestEntityTooLarge from aiohttp_apispec import AiohttpApiSpec from apispec.core import VALID_METHODS_OPENAPI_V2 from tribler.core.restapi.rest_endpoint import ( HTTP_INTERNAL_SERVER_ERROR, HTTP_NOT_FOUND, HTTP_REQUEST_ENTITY_TOO_LARGE, HTTP_UNAUTHORIZED, RESTEndpoint, RESTResponse, RootEndpoint, ) if TYPE_CHECKING: import asyncio from aiohttp.abc import Request from tribler.tribler_config import TriblerConfigManager ComponentsType = TypeVar("ComponentsType", bound=tuple[type]) class TriblerRequest(Request, Generic[ComponentsType]): """ A request that guarantees that the given components are not None in its ``context`` attribute. """ context: ComponentsType logger = logging.getLogger(__name__) RESTEndpointType = TypeVar("RESTEndpointType", bound=RESTEndpoint) @wraps(tcp_helpers.tcp_keepalive) def wrap_tcp_keepalive(transport: asyncio.Transport) -> None: """ A wrapper around aiohttp's tcp_keepalive that catches OSError 22 instances. See https://github.com/Tribler/tribler/issues/6429 """ try: wrap_tcp_keepalive.__wrapped__(transport) except OSError as e: logger.warning("Setting tcp_keepalive on aiohttp socket failed!") if e.errno != 22: raise web_protocol.tcp_keepalive = wrap_tcp_keepalive @web.middleware class ApiKeyMiddleware: """ Middleware to check if REST requests include an API key. The key can be in: - The ``X-Api-Key`` header. - The ``apikey`` query parameter. - The ``api_key`` cookie. """ def __init__(self, api_key: str) -> None: """ Initialize the middleware with the given API key. """ self.api_key = api_key async def __call__(self, request: Request, handler: Callable[[Request], Awaitable[RESTResponse]]) -> RESTResponse: """ Call this middleware. """ if self.authenticate(request): return await handler(request) return RESTResponse({"error": "Unauthorized access"}, status=HTTP_UNAUTHORIZED) def authenticate(self, request: Request) -> bool: """ Is the given request authenticated using an API key. """ if any(request.path.startswith(path) for path in ["/docs", "/static", "/ui"]): return True # The api key can either be in the headers or as part of the url query api_key = request.headers.get("X-Api-Key") or request.query.get("key") or request.cookies.get("api_key") expected_api_key = self.api_key return not expected_api_key or expected_api_key == api_key @web.middleware async def error_middleware(request: Request, handler: Callable[[Request], Awaitable[RESTResponse]]) -> RESTResponse: """ Middleware to return nicely-formatted errors when common exceptions occur. """ try: response = await handler(request) except ConnectionResetError: # A client closes the connection. It is not the Core error, nothing to handle or report this. # We cannot return response, as the connection is already closed, so we just propagate the exception # without reporting it to Sentry. The exception will be printed to the log by aiohttp.server.log_exception() raise except HTTPNotFound: return RESTResponse({"error": { "handled": True, "message": f"Could not find {request.path}" }}, status=HTTP_NOT_FOUND) except HTTPRequestEntityTooLarge as http_error: return RESTResponse({"error": { "handled": True, "message": http_error.text, }}, status=HTTP_REQUEST_ENTITY_TOO_LARGE) except Exception as e: full_exception = traceback.format_exc() return RESTResponse({"error": { "handled": False, "code": e.__class__.__name__, "message": str(full_exception) }}, status=HTTP_INTERNAL_SERVER_ERROR) return response @web.middleware async def ui_middleware(request: Request, handler: Callable[[Request], Awaitable[RESTResponse]]) -> RESTResponse: """ Forward request to a unknown pathname to /ui. This enables the GUI to request e.g. /index.html instead of using /ui/index.html. """ if not any(request.path.startswith(path) for path in ["/docs", "/static", "/ui", "/api"]): raise web.HTTPFound('/ui' + request.rel_url.path) return await handler(request) @web.middleware async def required_components_middleware(request: Request, handler: Callable[[Request], Awaitable[RESTResponse]]) -> RESTResponse: """ Read a handler's required components and return HTTP_NOT_FOUND if they have not been set (yet). """ source_handler = handler while hasattr(source_handler, "__wrapped__"): source_handler = source_handler.__wrapped__ if hasattr(source_handler, "__self__") and hasattr(source_handler.__self__, "required_components"): comps = [getattr(source_handler.__self__, name) for name in source_handler.__self__.required_components] if any(comp is None for comp in comps): return RESTResponse({"error": { "handled": True, "message": f"Required components not initialized to serve {request.path}" }}, status=HTTP_NOT_FOUND) request.context = comps return await handler(request) class RESTManager: """ This class is responsible for managing the startup and closing of the Tribler HTTP API. """ def __init__(self, config: TriblerConfigManager, shutdown_timeout: int = 1) -> None: """ Create a new REST manager. """ super().__init__() self._logger = logging.getLogger(self.__class__.__name__) self.root_endpoint = RootEndpoint(middlewares=(ui_middleware, ApiKeyMiddleware(config.get("api/key")), error_middleware, required_components_middleware)) self.runner: web.AppRunner | None = None self.site: web.TCPSite | None = None self.site_https: web.TCPSite | None = None self.config = config self.state_dir = config.get("state_dir") self.http_host = self.config.get("api/http_host") self.https_host = self.config.get("api/https_host") self.shutdown_timeout = shutdown_timeout def add_endpoint(self, endpoint: RESTEndpointType) -> RESTEndpointType: """ Add a REST endpoint to the root endpoint. """ self.root_endpoint.add_endpoint(endpoint.path, endpoint) return endpoint def get_endpoint(self, name: str) -> RESTEndpoint: """ Get an endpoint by its name, including the first forward slash. """ return self.root_endpoint.endpoints.get(name) def get_api_port(self) -> int | None: """ Get the API port of the currently running server. """ if self.site: return cast(Server, self.site._server).sockets[0].getsockname()[1] # noqa: SLF001 return None async def start(self) -> None: """ Starts the HTTP API with the listen port as specified in the session configuration. """ self._logger.info("Starting RESTManager...") try: v = version("tribler") except PackageNotFoundError: v = "git" # Not using setup_aiohttp_apispec here, as we need access to the APISpec to set the security scheme aiohttp_apispec = AiohttpApiSpec( url="/docs/swagger.json", app=self.root_endpoint.app, title="Tribler REST API documentation", version=f"Tribler {v}", swagger_path="/docs" ) if self.config.get("api/key"): self._logger.info("Set security scheme and apply to all endpoints") aiohttp_apispec.spec.options["security"] = [{"apiKey": []}] api_key_scheme = {"type": "apiKey", "in": "header", "name": "X-Api-Key"} aiohttp_apispec.spec.components.security_scheme("apiKey", api_key_scheme) if "head" in VALID_METHODS_OPENAPI_V2: self._logger.info("Remove head") VALID_METHODS_OPENAPI_V2.remove("head") self.runner = web.AppRunner(self.root_endpoint.app, access_log=None, shutdown_timeout=self.shutdown_timeout) await self.runner.setup() if self.config.get("api/http_enabled"): self._logger.info("Http enabled") await self.start_http_site(self.runner) if self.config.get("api/https_enabled"): self._logger.info("Https enabled") await self.start_https_site(self.runner) api_port = self.get_api_port() self._logger.info("Swagger docs: http://%s:%d/docs", self.http_host, api_port) self._logger.info("Swagger JSON: http://%s:%d/docs/swagger.json", self.http_host, api_port) async def start_http_site(self, runner: web.AppRunner) -> None: """ Start serving HTTP requests. """ api_port = self.config.get("api/http_port") or 0 self.site = web.TCPSite(runner, self.http_host, api_port, shutdown_timeout=self.shutdown_timeout) self._logger.info("Starting HTTP REST API server on port %d...", api_port) try: await self.site.start() except BaseException as e: self._logger.exception("Can't start HTTP REST API on port %d: %s: %s", api_port, e.__class__.__name__, str(e)) raise current_port = api_port or cast(Server, self.site._server).sockets[0].getsockname()[1] # noqa: SLF001 self.config.set("api/http_port_running", current_port) self.config.write() self._logger.info("HTTP REST API server started on port %d", self.get_api_port()) async def start_https_site(self, runner: web.AppRunner) -> None: """ Start serving HTTPS requests. """ ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) ssl_context.load_cert_chain(Path(self.config.get("state_dir")) / "https_certfile") port = self.config.get("api/https_port") self.site_https = web.TCPSite(runner, self.https_host, port, ssl_context=ssl_context) await self.site_https.start() self._logger.info("Started HTTPS REST API: %s", self.site_https.name) current_port = port or cast(Server, self.site_https._server).sockets[0].getsockname()[1] # noqa: SLF001 self.config.set("api/https_port_running", current_port) self.config.write() async def stop(self) -> None: """ Clean up all the REST endpoints and connections. """ self._logger.info("Stopping...") if self.runner: await self.runner.cleanup() for endpoint in self.root_endpoint.endpoints.values(): await endpoint.shutdown_task_manager() self._logger.info("Stopped")
11,467
Python
.py
249
37.726908
118
0.647054
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,972
file_endpoint.py
Tribler_tribler/src/tribler/core/restapi/file_endpoint.py
import contextlib import logging import platform from pathlib import Path from aiohttp import web from tribler.core.restapi.rest_endpoint import HTTP_NOT_FOUND, RESTEndpoint, RESTResponse if platform.system() == 'Windows': import win32api class FileEndpoint(RESTEndpoint): """ This endpoint allows clients to view the server's file structure remotely. """ path = '/api/files' def __init__(self) -> None: """ Create a new file endpoint. """ super().__init__() self._logger = logging.getLogger(self.__class__.__name__) self.app.add_routes([web.get('/browse', self.browse), web.get('/list', self.list)]) async def browse(self, request: web.Request) -> RESTResponse: """ Return all files/directories found in the specified path. """ path = request.query.get('path', "") show_files = request.query.get('files') == "1" # Deal with getting the drives on Windows if path == "/" and platform.system() == 'Windows': paths = [] for drive in win32api.GetLogicalDriveStrings().split("\000"): if not drive: continue paths.append( { "name": drive, "path": drive, "dir": True, } ) return RESTResponse({"current": "Root", "paths": paths}) # Move up until we find a directory parent_path = Path(path).resolve() while not parent_path.is_dir(): parent_path = parent_path.parent # Get all files/subdirs results = [] for file in parent_path.iterdir(): if not file.is_dir() and not show_files: continue with contextlib.suppress(PermissionError): results.append({"name": file.name, "path": str(file.resolve()), "dir": file.is_dir()}) results.sort(key=lambda f: not f["dir"]) # Get parent path (if available) results.insert(0, { "name": "..", "path": str(parent_path.parent.resolve()) if parent_path != parent_path.parent else "/", "dir": True, }) return RESTResponse({"current": str(parent_path.resolve()), "paths": results}) async def list(self, request: web.Request) -> RESTResponse: """ Return all files found in the specified path. """ path = Path(request.query.get('path', "")) recursively = request.query.get('recursively') != "0" if not path.exists(): return RESTResponse({"error": f"Directory {path} does not exist"}, status=HTTP_NOT_FOUND) results = [{"name": file.name, "path": str(file.resolve())} for file in path.glob(f"{'**/' if recursively else ''}*") if file.is_file()] return RESTResponse({"paths": results})
3,070
Python
.py
74
30.108108
102
0.543318
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,973
statistics_endpoint.py
Tribler_tribler/src/tribler/core/restapi/statistics_endpoint.py
from __future__ import annotations from typing import TYPE_CHECKING from aiohttp import web from aiohttp_apispec import docs from ipv8.REST.schema import schema from marshmallow.fields import Integer, String from tribler.core.restapi.rest_endpoint import MAX_REQUEST_SIZE, RESTEndpoint, RESTResponse if TYPE_CHECKING: from ipv8.types import IPv8 from tribler.core.database.store import MetadataStore class StatisticsEndpoint(RESTEndpoint): """ This endpoint is responsible for handing requests regarding statistics in Tribler. """ path = "/api/statistics" def __init__(self, middlewares: tuple = (), client_max_size: int = MAX_REQUEST_SIZE) -> None: """ Create a new statistics endpoint. """ super().__init__(middlewares, client_max_size) self.mds: MetadataStore | None = None self.ipv8: IPv8 | None = None self.app.add_routes([web.get("/tribler", self.get_tribler_stats), web.get("/ipv8", self.get_ipv8_stats)]) @docs( tags=["General"], summary="Return general statistics of Tribler.", responses={ 200: { "schema": schema(TriblerStatisticsResponse={ "statistics": schema(TriblerStatistics={ "database_size": Integer, "torrent_queue_stats": [ schema(TorrentQueueStats={ "failed": Integer, "total": Integer, "type": String, "pending": Integer, "success": Integer }) ], }) }) } } ) def get_tribler_stats(self, _: web.Request) -> RESTResponse: """ Return general statistics of Tribler. """ stats_dict = {} if self.mds: stats_dict = {"db_size": self.mds.get_db_file_size(), "num_torrents": self.mds.get_num_torrents()} return RESTResponse({"tribler_statistics": stats_dict}) @docs( tags=["General"], summary="Return general statistics of IPv8.", responses={ 200: { "schema": schema(IPv8StatisticsResponse={ "statistics": schema(IPv8Statistics={ "total_up": Integer, "total_down": Integer }) }) } } ) def get_ipv8_stats(self, _: web.Request) -> RESTResponse: """ Return general statistics of IPv8. """ stats_dict = {} if self.ipv8: stats_dict = { "total_up": self.ipv8.endpoint.bytes_up, "total_down": self.ipv8.endpoint.bytes_down } return RESTResponse({"ipv8_statistics": stats_dict})
3,015
Python
.py
80
25.025
97
0.522081
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,974
ipv8_endpoint.py
Tribler_tribler/src/tribler/core/restapi/ipv8_endpoint.py
from ipv8.REST.root_endpoint import RootEndpoint from tribler.core.restapi.rest_endpoint import RESTEndpoint class IPv8RootEndpoint(RootEndpoint, RESTEndpoint): """ Make the IPv8 REST endpoint Tribler-compatible. """ path = "/api/ipv8" def __init__(self) -> None: """ Create a new IPv8 endpoint. """ RESTEndpoint.__init__(self) RootEndpoint.__init__(self)
422
Python
.py
13
26.384615
59
0.662531
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,975
webui_endpoint.py
Tribler_tribler/src/tribler/core/restapi/webui_endpoint.py
from __future__ import annotations import logging import mimetypes from aiohttp import ClientSession, web import tribler from tribler.core.restapi.rest_endpoint import RESTEndpoint, RESTResponse class WebUIEndpoint(RESTEndpoint): """ This endpoint serves files used by the web UI. """ path = '/ui' def __init__(self) -> None: """ Create a new webUI endpoint. """ super().__init__() self._logger = logging.getLogger(self.__class__.__name__) self.app.add_routes([web.get("/{path:.*}", self.return_files)]) self.webui_root = tribler.get_webui_root() self.has_dist = (self.webui_root / "dist").exists() self.session = ClientSession() if not self.has_dist else None async def return_files(self, request: web.Request) -> RESTResponse | web.FileResponse: """ Return the file at the requested path. """ path = request.match_info["path"] or "index.html" if self.session: async with self.session.get(f"http://localhost:5173/{path}") as client_response: return RESTResponse(body=await client_response.read(), content_type=client_response.content_type) else: resource = self.webui_root / "dist" / path response = web.FileResponse(resource) if path.endswith(".tsx"): response.content_type = "application/javascript" elif (guessed_type := mimetypes.guess_type(path)[0]) is not None: response.content_type = guessed_type else: response.content_type = "application/octet-stream" return response async def shutdown_task_manager(self) -> None: """ Shutdown the taskmanager. """ await super().shutdown_task_manager() if self.session: await self.session.close()
1,901
Python
.py
46
32.521739
113
0.62039
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,976
manager.py
Tribler_tribler/src/tribler/core/versioning/manager.py
from __future__ import annotations import logging import os import platform import shutil from importlib.metadata import PackageNotFoundError, version from pathlib import Path from typing import TYPE_CHECKING from aiohttp import ClientSession from packaging.version import Version from tribler.tribler_config import TriblerConfigManager from tribler.upgrade_script import FROM, TO, upgrade if TYPE_CHECKING: from ipv8.taskmanager import TaskManager logger = logging.getLogger(__name__) class VersioningManager: """ Version related logic. """ def __init__(self, task_manager: TaskManager, config: TriblerConfigManager | None) -> None: """ Create a new versioning manager. """ super().__init__() self.task_manager = task_manager self.config = config or TriblerConfigManager() def get_current_version(self) -> str | None: """ Get the current release version, or None when running from archive or GIT. """ try: return version("tribler") except PackageNotFoundError: return None def get_versions(self) -> list[str]: """ Get all versions in our state directory. """ return [p for p in os.listdir(self.config.get("state_dir")) if os.path.isdir(os.path.join(self.config.get("state_dir"), p))] async def check_version(self) -> str | None: """ Check the tribler.org + GitHub websites for a new version. """ current_version = self.get_current_version() if current_version is None: return None headers = { "User-Agent": (f"Tribler/{current_version} " f"(machine={platform.machine()}; os={platform.system()} {platform.release()}; " f"python={platform.python_version()}; executable={platform.architecture()[0]})") } urls = [ f"https://release.tribler.org/releases/latest?current={current_version}", "https://api.github.com/repos/tribler/tribler/releases/latest" ] for url in urls: try: async with ClientSession(raise_for_status=True) as session: response = await session.get(url, headers=headers, timeout=5.0) response_dict = await response.json(content_type=None) response_version = response_dict["name"] if response_version.startswith("v"): response_version = response_version[1:] except Exception as e: logger.info(e) continue # Case 1: this failed, but we may still have another URL to check. Continue. if Version(response_version) > Version(current_version): return response_version # Case 2: we found a newer version. Stop. break # Case 3: we got a response, but we are already at a newer or equal version. Stop. return None # Either Case 3 or repeated Case 1: no URLs responded. No new version available. def can_upgrade(self) -> str | bool: """ Check if we have old database/download files to port to our current version. Returns the version that can be upgraded from. """ if os.path.isfile(os.path.join(self.config.get_version_state_dir(), ".upgraded")): return False # We have the upgraded marker: nothing to do. if FROM not in self.get_versions(): return False # We can't upgrade from this version. return FROM if (self.get_current_version() in [None, TO]) else False # Always allow upgrades to git (None). def perform_upgrade(self) -> None: """ Upgrade old database/download files to our current version. """ src_dir = Path(self.config.get("state_dir")) / FROM dst_dir = Path(self.config.get_version_state_dir()) self.task_manager.register_executor_task("Upgrade", upgrade, self.config, str(src_dir.expanduser().absolute()), str(dst_dir.expanduser().absolute())) def remove_version(self, version: str) -> None: """ Remove the files for a version. """ shutil.rmtree(os.path.join(self.config.get("state_dir"), version), ignore_errors=True)
4,430
Python
.py
95
36.294737
116
0.614226
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,977
versioning_endpoint.py
Tribler_tribler/src/tribler/core/versioning/restapi/versioning_endpoint.py
from __future__ import annotations from typing import TYPE_CHECKING from aiohttp import web from aiohttp_apispec import docs from ipv8.REST.schema import schema from marshmallow.fields import Bool, List, String from tribler.core.restapi.rest_endpoint import HTTP_BAD_REQUEST, MAX_REQUEST_SIZE, RESTEndpoint, RESTResponse from tribler.tribler_config import VERSION_SUBDIR if TYPE_CHECKING: from typing_extensions import TypeAlias from tribler.core.restapi.rest_manager import TriblerRequest from tribler.core.versioning.manager import VersioningManager RequestType: TypeAlias = TriblerRequest[tuple[VersioningManager]] class VersioningEndpoint(RESTEndpoint): """ An endpoint for version determination and upgrading from the previous version. """ path = "/api/versioning" def __init__(self, middlewares: tuple = (), client_max_size: int = MAX_REQUEST_SIZE) -> None: """ Create a new endpoint to create torrents. """ super().__init__(middlewares, client_max_size) self.versioning_manager: VersioningManager | None = None self.required_components = ("versioning_manager",) self.app.add_routes([ web.get("/versions", self.get_versions), web.get("/versions/current", self.get_current_version), web.get("/versions/check", self.check_version), web.delete("/versions/{version}", self.remove_version), web.post("/upgrade", self.perform_upgrade), web.get("/upgrade/available", self.can_upgrade), web.get("/upgrade/working", self.is_upgrading) ]) @docs( tags=["Versioning"], summary="Get the current release version or whether we are running from source.", responses={ 200: { "schema": schema(CurrentVersionResponse={"version": String}) } } ) async def get_current_version(self, request: RequestType) -> RESTResponse: """ Get the current release version, or None when running from archive or GIT. """ return RESTResponse({"version": request.context[0].get_current_version() or "git"}) @docs( tags=["Versioning"], summary="Get all versions in our state directory.", responses={ 200: { "schema": schema(GetVersionsResponse={"versions": List(String), "current": String}) } } ) async def get_versions(self, request: RequestType) -> RESTResponse: """ Get all versions in our state directory. """ return RESTResponse({"versions": request.context[0].get_versions(), "current": VERSION_SUBDIR}) @docs( tags=["Versioning"], summary="Check the tribler.org + GitHub websites for a new version.", responses={ 200: { "schema": schema(CheckVersionResponse={"new_version": String, "has_version": Bool}) } } ) async def check_version(self, request: RequestType) -> RESTResponse: """ Check the tribler.org + GitHub websites for a new version. """ new_version = await request.context[0].check_version() return RESTResponse({"new_version": new_version or "", "has_version": new_version is not None}) @docs( tags=["Versioning"], summary="Check if we have old database/download files to port to our current version.", responses={ 200: { "schema": schema(CanUpgradeResponse={"can_upgrade": String}) } } ) async def can_upgrade(self, request: RequestType) -> RESTResponse: """ Check if we have old database/download files to port to our current version. """ return RESTResponse({"can_upgrade": request.context[0].can_upgrade()}) @docs( tags=["Versioning"], summary="Perform an upgrade.", responses={ 200: { "schema": schema(PerformUpgradeResponse={"success": Bool}) } } ) async def perform_upgrade(self, request: RequestType) -> RESTResponse: """ Perform an upgrade. """ request.context[0].perform_upgrade() return RESTResponse({"success": True}) @docs( tags=["Versioning"], summary="Check if the upgrade is still running.", responses={ 200: { "schema": schema(IsUpgradingResponse={"running": Bool}) } } ) async def is_upgrading(self, request: RequestType) -> RESTResponse: """ Check if the upgrade is still running. """ return RESTResponse({"running": request.context[0].task_manager.get_task("Upgrade") is not None}) @docs( tags=["Versioning"], summary="Check if the upgrade is still running.", parameters=[{ "in": "path", "name": "version", "description": "The version to remove.", "type": "string", "required": "true" }], responses={ 200: { "schema": schema(RemoveVersionResponse={"success": Bool}) }, HTTP_BAD_REQUEST: { "schema": schema(RemoveVersionNotFoundResponse={"error": String}) } } ) async def remove_version(self, request: RequestType) -> RESTResponse: """ Remove the files for a version. """ version = request.match_info["version"] if not version: return RESTResponse({"error": "No version given"}, status=HTTP_BAD_REQUEST) request.context[0].remove_version(version) return RESTResponse({"success": True})
5,769
Python
.py
148
29.993243
109
0.606104
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,978
community.py
Tribler_tribler/src/tribler/core/tunnel/community.py
from __future__ import annotations import hashlib import math import time from asyncio import Future, open_connection from asyncio import TimeoutError as AsyncTimeoutError from binascii import hexlify, unhexlify from collections import Counter from typing import TYPE_CHECKING, Awaitable import async_timeout from ipv8.messaging.anonymization.community import unpack_cell from ipv8.messaging.anonymization.hidden_services import HiddenTunnelCommunity, HiddenTunnelSettings from ipv8.messaging.anonymization.tunnel import ( CIRCUIT_STATE_READY, CIRCUIT_TYPE_IP_SEEDER, CIRCUIT_TYPE_RP_SEEDER, PEER_FLAG_EXIT_BT, PEER_FLAG_EXIT_IPV8, Circuit, ) from ipv8.peerdiscovery.network import Network from ipv8.taskmanager import task from ipv8.util import succeed from libtorrent import bdecode from tribler.core.libtorrent.download_manager.download_state import DownloadState, DownloadStatus from tribler.core.notifier import Notification, Notifier from tribler.core.tunnel.caches import HTTPRequestCache from tribler.core.tunnel.dispatcher import TunnelDispatcher from tribler.core.tunnel.payload import HTTPRequestPayload, HTTPResponsePayload if TYPE_CHECKING: from pathlib import Path from ipv8.messaging.anonymization.exit_socket import TunnelExitSocket from ipv8.messaging.anonymization.payload import CreatedPayload, CreatePayload, ExtendedPayload from ipv8.types import Address from tribler.core.libtorrent.download_manager.download import Download from tribler.core.libtorrent.download_manager.download_manager import DownloadManager from tribler.core.socks5.server import Socks5Server DESTROY_REASON_BALANCE = 65535 PEER_FLAG_EXIT_HTTP = 32768 MAX_HTTP_PACKET_SIZE = 1400 def is_bencoded(x: bytes) -> bool: """ Returns True is x appears to be valid bencoded byte string. """ return bdecode(x) is not None class TriblerTunnelSettings(HiddenTunnelSettings): """ Settings for Tribler's tunnel community. """ exitnode_cache: Path | None = None notifier: Notifier download_manager: DownloadManager socks_servers: list[Socks5Server] exitnode_enabled: bool = False default_hops: int = 0 class TriblerTunnelCommunity(HiddenTunnelCommunity): """ This community is built upon the anonymous messaging layer in IPv8. It adds support for libtorrent anonymous downloads. """ community_id = unhexlify("a3591a6bd89bbaca0974062a1287afcfbc6fd6bc") settings_class = TriblerTunnelSettings def __init__(self, settings: TriblerTunnelSettings) -> None: """ Create a new tunnel community. """ super().__init__(settings) self.settings.endpoint = self.crypto_endpoint if settings.exitnode_enabled: self.settings.peer_flags |= {PEER_FLAG_EXIT_BT, PEER_FLAG_EXIT_IPV8, PEER_FLAG_EXIT_HTTP} self.logger.info("Using %s with flags %s", self.endpoint.__class__.__name__, self.settings.peer_flags) self.bittorrent_peers: dict[Download, set[tuple[str, int]]] = {} self.dispatcher = TunnelDispatcher(self) self.download_states: dict[bytes, DownloadStatus] = {} self.last_forced_announce: dict[bytes, float] = {} if settings.socks_servers: self.dispatcher.set_socks_servers(settings.socks_servers) for server in settings.socks_servers: server.output_stream = self.dispatcher self.add_cell_handler(HTTPRequestPayload, self.on_http_request) self.add_cell_handler(HTTPResponsePayload, self.on_http_response) if settings.exitnode_cache is not None: self.register_task("Load cached exitnodes", self.restore_exitnodes_from_disk, delay=0.5) self.register_task('Poll download manager for new or changed downloads', self._poll_download_manager, interval=1.0) async def _poll_download_manager(self) -> None: """ Get the latest download states from the download manager. """ # This must run in all circumstances, so catch all exceptions try: dl_states = self.settings.download_manager.get_last_download_states() self.monitor_downloads(dl_states) except Exception as e: self.logger.exception("Error on polling Download Manager: %s", e) def cache_exitnodes_to_disk(self) -> None: """ Write a copy of the exit_candidates to the file self.settings.exitnode_cache. """ exit_nodes = Network() for peer in self.get_candidates(PEER_FLAG_EXIT_BT): exit_nodes.add_verified_peer(peer) snapshot = exit_nodes.snapshot() self.logger.info("Writing exit nodes to cache file: %s", str(self.settings.exitnode_cache)) try: self.settings.exitnode_cache.write_bytes(snapshot) except OSError as e: self.logger.warning("%s: %s", e.__class__.__name__, str(e)) def restore_exitnodes_from_disk(self) -> None: """ Send introduction requests to peers stored in the file self.settings.exitnode_cache. """ if self.settings.exitnode_cache.is_file(): self.logger.debug('Loading exit nodes from cache: %s', self.settings.exitnode_cache) exit_nodes = Network() with self.settings.exitnode_cache.open('rb') as cache: exit_nodes.load_snapshot(cache.read()) for exit_node in exit_nodes.get_walkable_addresses(): self.endpoint.send(exit_node, self.create_introduction_request(exit_node)) else: self.logger.warning("Could not retrieve backup exitnode cache, file does not exist!") def should_join_circuit(self, create_payload: CreatePayload, previous_node_address: Address) -> Future[bool]: """ Check whether we should join a circuit. Returns a future that fires with a boolean. """ joined_circuits = len(self.relay_from_to) + len(self.exit_sockets) if self.settings.max_joined_circuits <= joined_circuits: self.logger.warning("too many relays (%d)", joined_circuits) return succeed(False) return succeed(True) def readd_bittorrent_peers(self) -> None: """ Add the special IPs that belong to circuits to a download. """ for torrent, peers in list(self.bittorrent_peers.items()): infohash = hexlify(torrent.tdef.get_infohash()) for peer in peers: self.logger.info("Re-adding peer %s to torrent %s", peer, infohash) torrent.add_peer(peer) del self.bittorrent_peers[torrent] def update_torrent(self, peers: set[tuple[str, int]], download: Download) -> None: """ Ensure that the given peers are registered in the given download. """ if not download.handle or not download.handle.is_valid(): return peers = peers.intersection({pi.ip for pi in download.handle.get_peer_info()}) if peers: if download not in self.bittorrent_peers: self.bittorrent_peers[download] = peers else: self.bittorrent_peers[download] = peers | self.bittorrent_peers[download] # If there are active circuits, add peers immediately. Otherwise postpone. if self.find_circuits(): self.readd_bittorrent_peers() def remove_circuit(self, circuit_id: int, additional_info: str = "", remove_now: bool = False, destroy: bool = False) -> Awaitable[None]: """ Remove the circuit that belongs to the given circuit id. """ if circuit_id not in self.circuits: self.logger.warning("Circuit %d not found when trying to remove it", circuit_id) return succeed(None) circuit = self.circuits[circuit_id] # Send the notification if self.settings.notifier: self.settings.notifier.notify(Notification.circuit_removed, circuit=circuit, additional_info=additional_info) affected_peers = self.dispatcher.circuit_dead(circuit) # Make sure the circuit is marked as closing, otherwise we may end up reusing it circuit.close() if self.settings.download_manager: for download in self.settings.download_manager.get_downloads(): self.update_torrent(affected_peers, download) # Now we actually remove the circuit return super().remove_circuit(circuit_id, additional_info=additional_info, remove_now=remove_now, destroy=destroy) @task async def remove_relay(self, circuit_id: int, additional_info: str = "", remove_now: bool = False, destroy: bool =False) -> None: """ Callback for when a relay is removed. """ removed_relay = await super().remove_relay(circuit_id, additional_info=additional_info, remove_now=remove_now, destroy=destroy) if self.settings.notifier and removed_relay is not None: self.settings.notifier.notify(Notification.circuit_removed, circuit=removed_relay, additional_info=additional_info) def remove_exit_socket(self, circuit_id: int, additional_info:str = "", remove_now: bool = False, destroy: bool = False) -> TunnelExitSocket | None: """ Remove the exit socket that belongs to the given circuit id. """ if circuit_id in self.exit_sockets and self.settings.notifier: exit_socket = self.exit_sockets[circuit_id] self.settings.notifier.notify(Notification.circuit_removed, circuit=exit_socket, additional_info=additional_info) return super().remove_exit_socket(circuit_id, additional_info=additional_info, remove_now=remove_now, destroy=destroy) def _ours_on_created_extended(self, circuit_id: int, payload: CreatedPayload | ExtendedPayload) -> None: """ Callback for when we receive either a Created or and Extended payload. """ super()._ours_on_created_extended(circuit_id, payload) circuit = self.circuits.get(circuit_id) if circuit and circuit.state == CIRCUIT_STATE_READY: # Re-add BitTorrent peers, if needed. self.readd_bittorrent_peers() def on_raw_data(self, circuit: Circuit, origin: tuple[str, int], data: bytes) -> None: """ Let our dispatcher know that we have incoming data. """ self.dispatcher.on_incoming_from_tunnel(self, circuit, origin, data) def monitor_downloads(self, dslist: list[DownloadState]) -> None: """ Periodically check the Tribler downloads for state changes. """ # Monitor downloads with anonymous flag set, and build rendezvous/introduction points when needed. new_states = {} hops = {} active_downloads_per_hop = {} # Ensure that we stay within the allowed number of circuits for the default hop count. default_hops = self.settings.default_hops if default_hops > 0: active_downloads_per_hop[default_hops] = 0 for ds in dslist: download = ds.get_download() # Metainfo downloads are alive for a short period, and don't warrant additional (e2e) circuit creation if download.hidden is True: continue hop_count = download.config.get_hops() if hop_count > 0: # Convert the real infohash to the infohash used for looking up introduction points real_info_hash = download.get_def().get_infohash() info_hash = self.get_lookup_info_hash(real_info_hash) hops[info_hash] = hop_count new_states[info_hash] = ds.get_status() active = [DownloadStatus.DOWNLOADING, DownloadStatus.SEEDING, DownloadStatus.METADATA] if download.get_state().get_status() in active: active_downloads_per_hop[hop_count] = active_downloads_per_hop.get(hop_count, 0) + 1 # Ugly work-around for the libtorrent DHT not making any requests # after a period of having no circuits if self.last_forced_announce.get(real_info_hash, 0) + 60 <= time.time() \ and self.find_circuits(hops=hop_count) \ and not ds.get_peer_list() \ and self.settings.download_manager.has_session(hop_count): download.force_dht_announce() self.last_forced_announce[real_info_hash] = time.time() # Request 1 circuit per download while ensuring that the total number of circuits requested per hop count # stays within min_circuits and max_circuits. self.circuits_needed = {hop_count: min(max(download_count, self.settings.min_circuits), self.settings.max_circuits) for hop_count, download_count in active_downloads_per_hop.items()} self.monitor_hidden_swarms(new_states, hops) self.download_states = new_states def monitor_hidden_swarms(self, new_states: dict[bytes, DownloadStatus], hops: dict[bytes, int]) -> None: """ Update the known swarms based on the changed states. """ ip_counter = Counter([c.info_hash for c in list(self.circuits.values()) if c.ctype == CIRCUIT_TYPE_IP_SEEDER]) for info_hash in set(list(new_states) + list(self.download_states)): new_state = new_states.get(info_hash) old_state = self.download_states.get(info_hash, None) state_changed = new_state != old_state # Join/leave hidden swarm as needed. active = [DownloadStatus.DOWNLOADING, DownloadStatus.SEEDING, DownloadStatus.METADATA] if state_changed and new_state in active: if old_state != DownloadStatus.METADATA or new_state != DownloadStatus.DOWNLOADING: self.join_swarm(info_hash, hops[info_hash], seeding=new_state == DownloadStatus.SEEDING, callback=lambda addr, ih=info_hash: self.on_e2e_finished(addr, ih)) elif state_changed and new_state in [DownloadStatus.STOPPED, None]: self.leave_swarm(info_hash) # Ensure we have enough introduction points for this infohash. Currently, we only create 1. if new_state == DownloadStatus.SEEDING: for _ in range(1 - ip_counter.get(info_hash, 0)): self.logger.info("Create introducing circuit for %s", hexlify(info_hash)) self.create_introduction_point(info_hash) def on_e2e_finished(self, address: Address, info_hash: bytes) -> None: """ Callback for when an end-to-end connection has been established. """ dl = self.get_download(info_hash) if dl: dl.add_peer(address) else: self.logger.error("Could not find download for adding hidden services peer %s:%d!", *address) def on_rendezvous_established(self, source_address: Address, data: bytes, circuit_id: int) -> None: """ Callback for when a rendezvous node has been established. """ super().on_rendezvous_established(source_address, data, circuit_id) circuit = self.circuits.get(circuit_id) if circuit and self.settings.download_manager: self.update_ip_filter(circuit.info_hash) def update_ip_filter(self, info_hash: bytes) -> None: """ Set the IP filter setting for the given infohash. """ download = self.get_download(info_hash) if download is not None: lt_session = self.settings.download_manager.get_session(download.config.get_hops()) ip_addresses = [self.circuit_id_to_ip(c.circuit_id) for c in self.find_circuits(ctype=CIRCUIT_TYPE_RP_SEEDER)] self.settings.download_manager.update_ip_filter(lt_session, ip_addresses) def get_download(self, lookup_info_hash: bytes) -> Download | None: """ Match the given infohash to a download (or None if it cannot be found). """ if not self.settings.download_manager: return None for download in self.settings.download_manager.get_downloads(): if lookup_info_hash == self.get_lookup_info_hash(download.get_def().get_infohash()): return download return None @task async def create_introduction_point(self, info_hash: bytes, required_ip: Address | None = None) -> None: """ Start creating an introduction point. """ download = self.get_download(info_hash) if download and self.settings.socks_servers: # We now have to associate the SOCKS5 UDP connection with the libtorrent listen port ourselves. # The reason for this is that libtorrent does not include the source IP/port in an SOCKS5 ASSOCIATE message. # Starting from libtorrent 1.2.4 on Windows, listen_port() returns 0 if used in combination with a # SOCKS5 proxy. Therefore on Windows, we resort to using ports received through listen_succeeded_alert. hops = download.config.get_hops() lt_listen_interfaces = [k for k in self.settings.download_manager.listen_ports.get(hops) if k != "127.0.0.1"] lt_listen_port = (self.settings.download_manager.listen_ports.get(hops)[lt_listen_interfaces[0]] if lt_listen_interfaces else None) lt_listen_port = lt_listen_port or self.settings.download_manager.get_session(hops).listen_port() for session in self.settings.socks_servers[hops - 1].sessions: connection = session.udp_connection if connection and lt_listen_port and connection.remote_udp_address is None: connection.remote_udp_address = ("127.0.0.1", lt_listen_port) await super().create_introduction_point(info_hash, required_ip=required_ip) async def unload(self) -> None: """ Shut down our dispatcher and cache the known exit nodes. """ await self.dispatcher.shutdown_task_manager() if self.settings.exitnode_cache is not None: self.cache_exitnodes_to_disk() await super().unload() def get_lookup_info_hash(self, info_hash: bytes) -> bytes: """ Get the SHA-1 hash to lookup for a given torrent info hash. """ return hashlib.sha1(b"tribler anonymous download" + hexlify(info_hash)).digest() @unpack_cell(HTTPRequestPayload) async def on_http_request(self, source_address: Address, payload: HTTPRequestPayload, # noqa: C901 circuit_id: int) -> None: """ Callback for when an HTTP request is received. """ if circuit_id not in self.exit_sockets: self.logger.warning("Received unexpected http-request") return if len([cache for cache in self.request_cache._identifiers.values() # noqa: SLF001 if isinstance(cache, HTTPRequestCache) and cache.circuit_id == circuit_id]) > 5: self.logger.warning("Too many HTTP requests coming from circuit %s") return self.logger.debug("Got http-request from %s", source_address) writer = None try: async with async_timeout.timeout(10): self.logger.debug("Opening TCP connection to %s", payload.target) reader, writer = await open_connection(*payload.target) writer.write(payload.request) response = b"" while True: line = await reader.readline() response += line if not line.strip(): # Read HTTP response body (1MB max) response += await reader.read(1024 ** 2) break except OSError: self.logger.warning("Tunnel HTTP request failed") return except AsyncTimeoutError: self.logger.warning("Tunnel HTTP request timed out") return finally: if writer: writer.close() if not response.startswith(b"HTTP/1.1 307"): _, _, bencoded_data = response.partition(b'\r\n\r\n') if not is_bencoded(bencoded_data): self.logger.warning("Tunnel HTTP request not allowed") return num_cells = math.ceil(len(response) / MAX_HTTP_PACKET_SIZE) for i in range(num_cells): self.send_cell(source_address, HTTPResponsePayload(circuit_id, payload.identifier, i, num_cells, response[i * MAX_HTTP_PACKET_SIZE:(i + 1) * MAX_HTTP_PACKET_SIZE])) @unpack_cell(HTTPResponsePayload) def on_http_response(self, source_address: Address, payload: HTTPResponsePayload, circuit_id: int) -> None: """ Callback for when an HTTP response is received. """ if not self.request_cache.has("http-request", payload.identifier): self.logger.warning("Received unexpected http-response") return cache = self.request_cache.get("http-request", payload.identifier) if cache.circuit_id != payload.circuit_id: self.logger.warning("Received http-response from wrong circuit %s != %s", cache.circuit_id, circuit_id) return self.logger.debug("Got http-response from %s", source_address) if cache.add_response(payload): self.request_cache.pop("http-request", payload.identifier) async def perform_http_request(self, destination: Address, request: bytes, hops: int = 1) -> bytes: """ Perform the actual HTTP request to service the given request. """ # We need a circuit that supports HTTP requests, meaning that the circuit will have to end # with a node that has the PEER_FLAG_EXIT_HTTP flag set. circuit = None circuits = self.find_circuits(exit_flags=[PEER_FLAG_EXIT_HTTP]) if circuits: circuit = circuits[0] else: # Try to create a circuit. Attempt at most 3 times. for _ in range(3): circuit = self.create_circuit(hops, exit_flags=[PEER_FLAG_EXIT_HTTP]) if circuit and await circuit.ready: break if not circuit or circuit.state != CIRCUIT_STATE_READY: msg = "No HTTP circuit available" raise RuntimeError(msg) cache = self.request_cache.add(HTTPRequestCache(self, circuit.circuit_id)) self.send_cell(circuit.hop.address, HTTPRequestPayload(circuit.circuit_id, cache.number, destination, request)) return await cache.response_future
23,511
Python
.py
435
42.473563
120
0.636043
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,979
caches.py
Tribler_tribler/src/tribler/core/tunnel/caches.py
from __future__ import annotations from asyncio import Future from typing import TYPE_CHECKING from ipv8.requestcache import RandomNumberCache if TYPE_CHECKING: from tribler.core.tunnel.community import TriblerTunnelCommunity from tribler.core.tunnel.payload import HTTPResponsePayload class HTTPRequestCache(RandomNumberCache): """ A cache to keep track of an HTTP request. """ def __init__(self, community: TriblerTunnelCommunity, circuit_id: int) -> None: """ Create a new HTTP request cache. """ super().__init__(community.request_cache, "http-request") self.circuit_id = circuit_id self.response: dict[int, bytes] = {} self.response_future: Future[bytes] = Future() self.register_future(self.response_future) def add_response(self, payload: HTTPResponsePayload) -> bool: """ Add a received response payload that belongs to this cache. """ self.response[payload.part] = payload.response if len(self.response) == payload.total and not self.response_future.done(): self.response_future.set_result(b"".join([t[1] for t in sorted(self.response.items())])) return True return False def on_timeout(self) -> None: """ We don't need to do anything on timeout. """
1,360
Python
.py
33
34.151515
100
0.670963
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,980
payload.py
Tribler_tribler/src/tribler/core/tunnel/payload.py
from __future__ import annotations from ipv8.messaging.anonymization.payload import CellablePayload from ipv8.messaging.lazy_payload import vp_compile @vp_compile class HTTPRequestPayload(CellablePayload): """ A request to an HTTP address. """ msg_id = 28 format_list = ["I", "I", "address", "varlenH"] names = ["circuit_id", "identifier", "target", "request"] circuit_id: int identifier: int target: tuple[str, int] request: bytes @vp_compile class HTTPResponsePayload(CellablePayload): """ A response after executing an HTTP request. """ msg_id = 29 format_list = ["I", "I", "H", "H", "varlenH"] names = ["circuit_id", "identifier", "part", "total", "response"] circuit_id: int identifier: int part: int total: int response: bytes
826
Python
.py
28
25.178571
69
0.669202
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,981
dispatcher.py
Tribler_tribler/src/tribler/core/tunnel/dispatcher.py
from __future__ import annotations import random from collections import defaultdict from typing import TYPE_CHECKING from ipv8.messaging.anonymization.tunnel import ( CIRCUIT_ID_PORT, CIRCUIT_STATE_READY, CIRCUIT_TYPE_DATA, CIRCUIT_TYPE_RP_DOWNLOADER, CIRCUIT_TYPE_RP_SEEDER, Circuit, ) from ipv8.taskmanager import TaskManager, task from tribler.core.socks5.conversion import UdpPacket, socks5_serializer if TYPE_CHECKING: from asyncio import Future from ipv8.messaging.interfaces.udp.endpoint import DomainAddress, UDPv4Address from tribler.core.socks5.connection import Socks5Connection from tribler.core.socks5.server import Socks5Server from tribler.core.socks5.udp_connection import RustUDPConnection, SocksUDPConnection from tribler.core.tunnel.community import TriblerTunnelCommunity class TunnelDispatcher(TaskManager): """ This class is responsible for dispatching SOCKS5 traffic to the right circuits and vice versa. This dispatcher acts as a "secondary" proxy between the SOCKS5 UDP session and the tunnel community. """ def __init__(self, tunnels: TriblerTunnelCommunity) -> None: """ Create a new dispatcher. """ super().__init__() self.tunnels = tunnels self.socks_servers: list[Socks5Server] = [] # Map to keep track of the circuits associated with each destination. self.con_to_cir: dict[Socks5Connection, dict[DomainAddress | UDPv4Address, Circuit]] = defaultdict(dict) # Map to keep track of the circuit id to UDP connection. self.cid_to_con: dict[int, Socks5Connection] = {} self.register_task("check_connections", self.check_connections, interval=30) def set_socks_servers(self, socks_servers: list[Socks5Server]) -> None: """ Set the available Socks5 servers. """ self.socks_servers = socks_servers def on_incoming_from_tunnel(self, community: TriblerTunnelCommunity, circuit: Circuit, origin: tuple[str, int], data: bytes) -> bool: """ We received some data from the tunnel community. Dispatch it to the right UDP SOCKS5 socket. """ if circuit.ctype in [CIRCUIT_TYPE_RP_DOWNLOADER, CIRCUIT_TYPE_RP_SEEDER]: origin = (community.circuit_id_to_ip(circuit.circuit_id), CIRCUIT_ID_PORT) try: connection = self.cid_to_con[circuit.circuit_id] except KeyError: session_hops = circuit.goal_hops if circuit.ctype != CIRCUIT_TYPE_RP_DOWNLOADER else circuit.goal_hops - 1 if session_hops > len(self.socks_servers) or not self.socks_servers[session_hops - 1].sessions: self._logger.exception("No connection found for %d hops", session_hops) return False connection = next((s for s in self.socks_servers[session_hops - 1].sessions if s.udp_connection and s.udp_connection.remote_udp_address), None) if connection is None or connection.udp_connection is None: self._logger.error("Connection has closed or has not gotten an UDP associate") self.connection_dead(connection) return False packet = socks5_serializer.pack_serializable(UdpPacket(0, 0, origin, data)) connection.udp_connection.send_datagram(packet) return True def on_socks5_udp_data(self, udp_connection: SocksUDPConnection, request: UdpPacket) -> bool: """ We received some data from the SOCKS5 server (from the SOCKS5 client). This method selects a circuit to send this data over to the final destination. """ connection = udp_connection.socksconnection try: circuit = self.con_to_cir[connection][request.destination] except KeyError: circuit = self.select_circuit(connection, request) if circuit is None: return False if circuit.state != CIRCUIT_STATE_READY: self._logger.debug("Circuit not ready, dropping %d bytes to %s", len(request.data), request.destination) return False self._logger.debug("Sending data over circuit %d destined for %r:%r", circuit.circuit_id, *request.destination) self.tunnels.send_data(circuit.hop.address, circuit.circuit_id, request.destination, ('0.0.0.0', 0), request.data) return True @task async def on_socks5_tcp_data(self, tcp_connection: Socks5Connection, destination: tuple[str, int], request: bytes) -> bool: """ Callback for when we received Socks5 data over TCP. """ self._logger.debug("Got request for %s: %s", destination, request) hops = self.socks_servers.index(tcp_connection.socksserver) + 1 try: response = await self.tunnels.perform_http_request(destination, request, hops) self._logger.debug("Got response from %s: %s", destination, response) except RuntimeError as e: self._logger.info("Failed to get HTTP response using tunnels: %s", e) return False transport = tcp_connection.transport if not transport: return False if response: transport.write(response) transport.close() return True def select_circuit(self, connection: Socks5Connection, request: UdpPacket) -> int | None: """ Get a circuit number for the given connection and request. """ def add_data_if_result(result_func: Future[Circuit | None], connection: SocksUDPConnection | RustUDPConnection | None = connection.udp_connection, request: UdpPacket = request) -> bool | None: if result_func.result() is None: return None return self.on_socks5_udp_data(connection, request) if request.destination[1] == CIRCUIT_ID_PORT: circuit = self.tunnels.circuits.get(self.tunnels.ip_to_circuit_id(request.destination[0])) if circuit and circuit.state == CIRCUIT_STATE_READY and circuit.ctype in [CIRCUIT_TYPE_RP_DOWNLOADER, CIRCUIT_TYPE_RP_SEEDER]: return circuit hops = self.socks_servers.index(connection.socksserver) + 1 options = [c for c in self.tunnels.circuits.values() if c.goal_hops == hops and c.state == CIRCUIT_STATE_READY and c.ctype == CIRCUIT_TYPE_DATA and self.cid_to_con.get(c.circuit_id, connection) == connection] if not options: # We allow each connection to claim at least 1 circuit. If no such circuit exists we'll create one. if connection in self.cid_to_con.values(): self._logger.debug("No circuit for sending data to %s", request.destination) return None circuit = self.tunnels.create_circuit(goal_hops=hops) if circuit is None: self._logger.debug("Failed to create circuit for data to %s", request.destination) return None self._logger.debug("Creating circuit for data to %s. Retrying later..", request.destination) self.cid_to_con[circuit.circuit_id] = connection circuit.ready.add_done_callback(add_data_if_result) return None circuit = random.choice(options) self.cid_to_con[circuit.circuit_id] = connection self.con_to_cir[connection][request.destination] = circuit self._logger.debug("Select circuit %d for %s", circuit.circuit_id, request.destination) return circuit def circuit_dead(self, broken_circuit: Circuit) -> set[tuple[str, int]]: """ When a circuit dies, we update the destinations dictionary and remove all peers that are affected. """ con = self.cid_to_con.pop(broken_circuit.circuit_id, None) destinations = set() destination_to_circuit = self.con_to_cir.get(con, {}) for destination, circuit in list(destination_to_circuit.items()): if circuit == broken_circuit: destination_to_circuit.pop(destination, None) destinations.add(destination) self._logger.debug("Deleted %d peers from destination list", len(destinations)) return destinations def connection_dead(self, connection: Socks5Connection | None) -> None: """ Callback for when a given connection is dead. """ self.con_to_cir.pop(connection, None) for cid, con in list(self.cid_to_con.items()): if con == connection: self.cid_to_con.pop(cid, None) self._logger.error("Detected closed connection") def check_connections(self) -> None: """ Mark connections as dead if they don't have an underlying UDP connection. """ for connection in list(self.cid_to_con.values()): if not connection.udp_connection: self.connection_dead(connection)
9,214
Python
.py
172
42.645349
119
0.646425
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,982
discovery.py
Tribler_tribler/src/tribler/core/tunnel/discovery.py
from __future__ import annotations import time from random import choice from typing import TYPE_CHECKING from ipv8.messaging.anonymization.tunnel import PEER_FLAG_EXIT_BT from ipv8.peerdiscovery.discovery import DiscoveryStrategy if TYPE_CHECKING: from ipv8.types import Peer from tribler.core.tunnel.community import TriblerTunnelCommunity class GoldenRatioStrategy(DiscoveryStrategy): """ Strategy for removing peers once we have too many in the TunnelCommunity. This strategy will remove a "normal" peer if the current ratio of "normal" peers to exit node peers is larger than the set golden ratio. This strategy will remove an exit peer if the current ratio of "normal" peers to exit node peers is smaller than the set golden ratio. """ def __init__(self, overlay: TriblerTunnelCommunity, golden_ratio: float = 9 / 16, target_peers: int = 23) -> None: """ Initialize the GoldenRatioStrategy. :param overlay: the overlay instance to walk over :param golden_ratio: the ratio of normal/exit node peers to pursue (between 0.0 and 1.0) :param target_peers: the amount of peers at which to start removing (>0) """ super().__init__(overlay) self.golden_ratio = golden_ratio self.target_peers = target_peers self.intro_sent: dict[Peer, float] = {} assert target_peers > 0 assert 0.0 <= golden_ratio <= 1.0 def take_step(self) -> None: """ We are asked to update, see if we have enough peers to start culling them. If we do have enough peers, select a suitable peer to remove. """ with self.walk_lock: peers = self.overlay.get_peers() for peer in list(self.intro_sent.keys()): if peer not in peers: self.intro_sent.pop(peer, None) # Some of the peers in the community could have been discovered using the DiscoveryCommunity. If this # happens we have no knowledge of their peer_flags. In order to still get the flags we send them an # introduction request manually. now = time.time() for peer in peers: if peer not in self.overlay.candidates and now > self.intro_sent.get(peer, 0) + 300: self.overlay.send_introduction_request(peer) self.intro_sent[peer] = now peer_count = len(peers) if peer_count > self.target_peers: exit_peers = set(self.overlay.get_candidates(PEER_FLAG_EXIT_BT)) exit_count = len(exit_peers) ratio = 1.0 - exit_count / peer_count # Peer count is > 0 per definition if ratio < self.golden_ratio: self.overlay.network.remove_peer(choice(list(exit_peers))) elif ratio > self.golden_ratio: self.overlay.network.remove_peer(choice(list(set(self.overlay.get_peers()) - exit_peers)))
3,009
Python
.py
57
42.789474
118
0.64852
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,983
community.py
Tribler_tribler/src/tribler/core/content_discovery/community.py
from __future__ import annotations import json import random import sys import time import uuid from binascii import hexlify, unhexlify from importlib.metadata import PackageNotFoundError, version from itertools import count from typing import TYPE_CHECKING, Any, Callable, Sequence from ipv8.community import Community, CommunitySettings from ipv8.lazy_community import lazy_wrapper from ipv8.requestcache import RequestCache from pony.orm import OperationalError, db_session from tribler.core.content_discovery.cache import SelectRequest from tribler.core.content_discovery.payload import ( PopularTorrentsRequest, RemoteSelectPayload, SelectResponsePayload, TorrentsHealthPayload, VersionRequest, VersionResponse, ) from tribler.core.database.layers.knowledge import ResourceType from tribler.core.database.orm_bindings.torrent_metadata import LZ4_EMPTY_ARCHIVE, entries_to_chunk from tribler.core.database.store import MetadataStore, ObjState, ProcessingResult from tribler.core.knowledge.community import is_valid_resource from tribler.core.notifier import Notification, Notifier from tribler.core.torrent_checker.dataclasses import HealthInfo if TYPE_CHECKING: from ipv8.types import Peer from tribler.core.database.orm_bindings.torrent_metadata import TorrentMetadata from tribler.core.database.tribler_database import TriblerDatabase from tribler.core.torrent_checker.torrent_checker import TorrentChecker class ContentDiscoverySettings(CommunitySettings): """ The settings for the content discovery community. """ random_torrent_interval: float = 5 # seconds random_torrent_count: int = 10 max_query_peers: int = 20 maximum_payload_size: int = 1300 max_response_size: int = 100 # Max number of entries returned by SQL query binary_fields: Sequence[str] = ("infohash", "channel_pk") deprecated_parameters: Sequence[str] = ("subscribed", "attribute_ranges", "complete_channel") metadata_store: MetadataStore torrent_checker: TorrentChecker tribler_db: TriblerDatabase | None = None notifier: Notifier | None = None class ContentDiscoveryCommunity(Community): """ Community for disseminating the content across the network. """ community_id = unhexlify("9aca62f878969c437da9844cba29a134917e1648") settings_class = ContentDiscoverySettings def __init__(self, settings: ContentDiscoverySettings) -> None: """ Create a new overlay for content discovery. """ super().__init__(settings) self.composition = settings self.add_message_handler(TorrentsHealthPayload, self.on_torrents_health) self.add_message_handler(PopularTorrentsRequest, self.on_popular_torrents_request) self.add_message_handler(VersionRequest, self.on_version_request) self.add_message_handler(VersionResponse, self.on_version_response) self.add_message_handler(RemoteSelectPayload, self.on_remote_select) self.add_message_handler(SelectResponsePayload, self.on_remote_select_response) self.add_message_handler(209, self.on_deprecated_message) self.deprecated_message_names[209] = "RemoteSelectPayloadEva" self.request_cache = RequestCache() self.remote_queries_in_progress = 0 self.next_remote_query_num = count().__next__ # generator of sequential numbers, for logging & debug purposes self.logger.info("Content Discovery Community initialized (peer mid %s)", hexlify(self.my_peer.mid)) self.register_task("gossip_random_torrents", self.gossip_random_torrents_health, interval=self.composition.random_torrent_interval) async def unload(self) -> None: """ Shut down the request cache. """ await self.request_cache.shutdown() await super().unload() def sanitize_dict(self, parameters: dict[str, Any], decode: bool = True) -> None: """ Convert the binary values in the given dictionary to (decode=True) and from (decode=False) hex format. """ for field in self.composition.binary_fields: value = parameters.get(field) if value is not None: parameters[field] = unhexlify(value.encode()) if decode else hexlify(value.encode()).decode() def sanitize_query(self, query_dict: dict[str, Any], cap: int = 100) -> dict[str, Any]: """ Convert the values in a query to the appropriate format and supply missing values. """ sanitized_dict = dict(query_dict) # We impose a cap on max numbers of returned entries to prevent DDOS-like attacks first = sanitized_dict.get("first") or 0 last = sanitized_dict.get("last") last = last if (last is not None and last <= (first + cap)) else (first + cap) sanitized_dict.update({"first": first, "last": last}) # convert hex fields to binary self.sanitize_dict(sanitized_dict, decode=True) return sanitized_dict def convert_to_json(self, parameters: dict[str, Any]) -> str: """ Sanitize and dump the given dictionary to a string using JSON. """ sanitized = dict(parameters) # Convert metadata_type to an int list if it is a string if "metadata_type" in sanitized and isinstance(sanitized["metadata_type"], str): sanitized["metadata_type"] = [int(sanitized["metadata_type"])] self.sanitize_dict(sanitized, decode=False) if "origin_id" in parameters: sanitized["origin_id"] = int(parameters["origin_id"]) return json.dumps(sanitized) def get_alive_checked_torrents(self) -> list[HealthInfo]: """ Get torrents that we know have seeders AND leechers. """ if not self.composition.torrent_checker: return [] # Filter torrents that have seeders return [health for health in self.composition.torrent_checker.torrents_checked.values() if health.seeders > 0 and health.leechers >= 0] def gossip_random_torrents_health(self) -> None: """ Gossip random torrent health information to another peer. """ peers = self.get_peers() if not peers or not self.composition.torrent_checker: return self.ez_send(random.choice(peers), TorrentsHealthPayload.create(self.get_random_torrents(), {})) for p in random.sample(peers, min(len(peers), 5)): self.ez_send(p, PopularTorrentsRequest()) @lazy_wrapper(TorrentsHealthPayload) async def on_torrents_health(self, peer: Peer, payload: TorrentsHealthPayload) -> None: """ Callback for when we receive torrent health. """ self.logger.debug("Received torrent health information for %d popular torrents" " and %d random torrents", len(payload.torrents_checked), len(payload.random_torrents)) health_tuples = payload.random_torrents + payload.torrents_checked health_list = [HealthInfo(infohash, last_check=last_check, seeders=seeders, leechers=leechers) for infohash, seeders, leechers, last_check in health_tuples] for health_info in health_list: # Get a single result per infohash to avoid duplicates with db_session: self.composition.metadata_store.process_torrent_health(health_info) infohash = hexlify(health_info.infohash).decode() self.send_remote_select(peer=peer, infohash=infohash, last=1) @db_session def process_torrents_health(self, health_list: list[HealthInfo]) -> set[bytes]: """ Get the infohashes that we did not know about before from the given health list. """ infohashes_to_resolve = set() for health in health_list: added = self.composition.metadata_store.process_torrent_health(health) if added: infohashes_to_resolve.add(health.infohash) return infohashes_to_resolve @lazy_wrapper(PopularTorrentsRequest) async def on_popular_torrents_request(self, peer: Peer, payload: PopularTorrentsRequest) -> None: """ Callback for when we receive a request for popular torrents. """ self.logger.debug("Received popular torrents health request") popular_torrents = self.get_random_torrents() self.ez_send(peer, TorrentsHealthPayload.create({}, popular_torrents)) def get_random_torrents(self) -> list[HealthInfo]: """ Get torrent health info for torrents that were alive, last we know of. """ checked_and_alive = self.get_alive_checked_torrents() if not checked_and_alive: return [] num_torrents_to_send = min(self.composition.random_torrent_count, len(checked_and_alive)) return random.sample(checked_and_alive, num_torrents_to_send) def get_random_peers(self, sample_size: int | None = None) -> list[Peer]: """ Randomly sample sample_size peers from the complete list of our peers. """ all_peers = self.get_peers() return random.sample(all_peers, min(sample_size or len(all_peers), len(all_peers))) def send_search_request(self, **kwargs) -> tuple[uuid.UUID, list[Peer]]: """ Send a remote query request to multiple random peers to search for some terms. """ request_uuid = uuid.uuid4() def notify_gui(request: SelectRequest, processing_results: list[ProcessingResult]) -> None: results = [ r.md_obj.to_simple_dict() for r in processing_results if r.obj_state == ObjState.NEW_OBJECT ] if self.composition.notifier: self.composition.notifier.notify(Notification.remote_query_results, query=kwargs.get("txt_filter"), results=results, uuid=str(request_uuid), peer=hexlify(request.peer.mid).decode()) peers_to_query = self.get_random_peers(self.composition.max_query_peers) for p in peers_to_query: self.send_remote_select(p, **kwargs, processing_callback=notify_gui) return request_uuid, peers_to_query @lazy_wrapper(VersionRequest) async def on_version_request(self, peer: Peer, _: VersionRequest) -> None: """ Callback for when our Tribler version and Operating System is requested. """ try: v = version("tribler") except PackageNotFoundError: v = "git" version_response = VersionResponse(f"Tribler {v}", sys.platform) self.ez_send(peer, version_response) @lazy_wrapper(VersionResponse) async def on_version_response(self, peer: Peer, payload: VersionResponse) -> None: """ Callback for when we receive a Tribler version and Operating System of a peer. """ def send_remote_select(self, peer: Peer, processing_callback: Callable[[SelectRequest, list[ProcessingResult]], None] | None = None, **kwargs) -> SelectRequest: """ Query a peer using an SQL statement descriptions (kwargs). """ request = SelectRequest(self.request_cache, kwargs, peer, processing_callback, self._on_query_timeout) self.request_cache.add(request) self.logger.debug("Select to %s with (%s)", hexlify(peer.mid).decode(), str(kwargs)) self.ez_send(peer, RemoteSelectPayload(request.number, self.convert_to_json(kwargs).encode())) return request def should_limit_rate_for_query(self, sanitized_parameters: dict[str, Any]) -> bool: """ Don't allow too many queries with potentially heavy database load. """ return "txt_filter" in sanitized_parameters async def process_rpc_query_rate_limited(self, sanitized_parameters: dict[str, Any]) -> list: """ Process the given query and return results. """ query_num = self.next_remote_query_num() if self.remote_queries_in_progress and self.should_limit_rate_for_query(sanitized_parameters): self.logger.warning("Ignore remote query %d as another one is already processing. The ignored query: %s", query_num, sanitized_parameters) return [] self.logger.info("Process remote query %d: %s", query_num, sanitized_parameters) self.remote_queries_in_progress += 1 t = time.time() try: return await self.process_rpc_query(sanitized_parameters) finally: self.remote_queries_in_progress -= 1 self.logger.info("Remote query %d processed in %f seconds: %s", query_num, time.time() - t, sanitized_parameters) async def process_rpc_query(self, sanitized_parameters: dict[str, Any]) -> list: """ Retrieve the result of a database query from a third party, encoded as raw JSON bytes (through `dumps`). :raises TypeError: if the JSON contains invalid keys. :raises ValueError: if no JSON could be decoded. :raises pony.orm.dbapiprovider.OperationalError: if an illegal query was performed. """ if self.composition.tribler_db: # tags should be extracted because `get_entries_threaded` doesn't expect them as a parameter tags = sanitized_parameters.pop("tags", None) infohash_set = self.composition.tribler_db.instance(self.search_for_tags, tags) if infohash_set: sanitized_parameters["infohash_set"] = {bytes.fromhex(s) for s in infohash_set} # exclude_deleted should be extracted because `get_entries_threaded` doesn't expect it as a parameter sanitized_parameters.pop("exclude_deleted", None) return await self.composition.metadata_store.get_entries_threaded(**sanitized_parameters) @db_session def search_for_tags(self, tags: list[str] | None) -> set[str] | None: """ Query our local database for the given tags. """ if not tags or not self.composition.tribler_db: return None valid_tags = {tag for tag in tags if is_valid_resource(tag)} return self.composition.tribler_db.knowledge.get_subjects_intersection( subjects_type=ResourceType.TORRENT, objects=valid_tags, predicate=ResourceType.TAG, case_sensitive=False ) def send_db_results(self, peer: Peer, request_payload_id: int, db_results: list[TorrentMetadata]) -> None: """ Send the given results to the given peer. """ # Special case of empty results list - sending empty lz4 archive if len(db_results) == 0: self.ez_send(peer, SelectResponsePayload(request_payload_id, LZ4_EMPTY_ARCHIVE)) return index = 0 while index < len(db_results): transfer_size = self.composition.maximum_payload_size data, index = entries_to_chunk(db_results, transfer_size, start_index=index, include_health=True) payload = SelectResponsePayload(request_payload_id, data) self.ez_send(peer, payload) @lazy_wrapper(RemoteSelectPayload) async def on_remote_select(self, peer: Peer, request_payload: RemoteSelectPayload) -> None: """ Callback for when another peer queries us. """ try: sanitized_parameters = self.parse_parameters(request_payload.json) # Drop selects with deprecated queries if any(param in sanitized_parameters for param in self.composition.deprecated_parameters): self.logger.warning("Remote select with deprecated parameters: %s", str(sanitized_parameters)) self.ez_send(peer, SelectResponsePayload(request_payload.id, LZ4_EMPTY_ARCHIVE)) return db_results = await self.process_rpc_query_rate_limited(sanitized_parameters) self.send_db_results(peer, request_payload.id, db_results) except (OperationalError, TypeError, ValueError) as error: self.logger.exception("Remote select error: %s. Request content: %s", str(error), repr(request_payload.json)) def parse_parameters(self, json_bytes: bytes) -> dict[str, Any]: """ Load a (JSON) dict from the given bytes and sanitize it to use as a database query. """ return self.sanitize_query(json.loads(json_bytes), self.composition.max_response_size) @lazy_wrapper(SelectResponsePayload) async def on_remote_select_response(self, peer: Peer, response_payload: SelectResponsePayload) -> list[ProcessingResult] | None: """ Match the response that we received from the network to a query cache and process it by adding the corresponding entries to the MetadataStore database. This processes both direct responses and pushback (updates) responses. """ self.logger.debug("Response from %s", hexlify(peer.mid).decode()) request: SelectRequest | None = self.request_cache.get(hexlify(peer.mid).decode(), response_payload.id) if request is None: return None # Check for limit on the number of packets per request if request.packets_limit > 1: request.packets_limit -= 1 else: self.request_cache.pop(hexlify(peer.mid).decode(), response_payload.id) processing_results = await self.composition.metadata_store.process_compressed_mdblob_threaded( response_payload.raw_blob ) self.logger.debug("Response result: %s", str(processing_results)) if isinstance(request, SelectRequest) and request.processing_callback: request.processing_callback(request, processing_results) # Remember that at least a single packet was received from the queried peer. if isinstance(request, SelectRequest): request.peer_responded = True return processing_results def _on_query_timeout(self, request_cache: SelectRequest) -> None: """ Remove a peer if it failed to respond to our select request. """ if not request_cache.peer_responded: self.logger.debug( "Remote query timeout, deleting peer: %s %s %s", str(request_cache.peer.address), hexlify(request_cache.peer.mid).decode(), str(request_cache.request_kwargs), ) self.network.remove_peer(request_cache.peer) def send_ping(self, peer: Peer) -> None: """ Send a ping to a peer to keep it alive. """ self.send_introduction_request(peer)
19,109
Python
.py
364
42.598901
118
0.661346
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,984
cache.py
Tribler_tribler/src/tribler/core/content_discovery/cache.py
from __future__ import annotations from binascii import hexlify from typing import TYPE_CHECKING, Callable from ipv8.requestcache import RandomNumberCache, RequestCache from typing_extensions import Self if TYPE_CHECKING: from ipv8.types import Peer from tribler.core.database.store import ProcessingResult class SelectRequest(RandomNumberCache): """ Keep track of the packets to a Peer during the answering of a select request. """ def __init__(self, request_cache: RequestCache, request_kwargs: dict, peer: Peer, processing_callback: Callable[[Self, list[ProcessingResult]], None] | None = None, timeout_callback: Callable[[Self], None] | None = None) -> None: """ Create a new select request cache. """ super().__init__(request_cache, hexlify(peer.mid).decode()) self.request_kwargs = request_kwargs # The callback to call on results of processing of the response payload self.processing_callback = processing_callback # The maximum number of packets to receive from any given peer from a single request. # This limit is imposed as a safety precaution to prevent spam/flooding self.packets_limit = 10 self.peer = peer # Indicate if at least a single packet was returned by the queried peer. self.peer_responded = False self.timeout_callback = timeout_callback def on_timeout(self) -> None: """ Call the timeout callback, if one is registered. """ if self.timeout_callback is not None: self.timeout_callback(self)
1,644
Python
.py
35
39.457143
99
0.68793
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,985
payload.py
Tribler_tribler/src/tribler/core/content_discovery/payload.py
from __future__ import annotations from typing import TYPE_CHECKING from ipv8.messaging.lazy_payload import VariablePayload, vp_compile from ipv8.messaging.serialization import default_serializer from typing_extensions import Self if TYPE_CHECKING: from tribler.core.torrent_checker.dataclasses import HealthInfo @vp_compile class TorrentInfoFormat(VariablePayload): """ For a given infohash at a given time: the known seeders and leechers. """ format_list = ["20s", "I", "I", "Q"] names = ["infohash", "seeders", "leechers", "timestamp"] length = 36 infohash: bytes seeders: int leechers: int timestamp: int def to_tuple(self) -> tuple[bytes, int, int, int]: """ Convert this payload to a tuple. """ return self.infohash, self.seeders, self.leechers, self.timestamp @classmethod def from_list_bytes(cls: type[Self], serialized: bytes) -> list[Self]: """ Convert the given bytes to a list of this payload. """ return default_serializer.unpack_serializable_list([cls] * (len(serialized) // cls.length), serialized, consume_all=False)[:-1] @vp_compile class TorrentsHealthPayload(VariablePayload): """ A payload for lists of health information. For backward compatibility, this payload includes two lists. Originally, one list was for random torrents and one list was for torrents that we personally checked. Now, only one is used. """ msg_id = 1 format_list = ["I", "I", "varlenI", "raw"] # Number of random torrents, number of torrents checked by you names = ["random_torrents_length", "torrents_checked_length", "random_torrents", "torrents_checked"] random_torrents_length: int torrents_checked_length: int random_torrents: list[tuple[bytes, int, int, int]] torrents_checked: list[tuple[bytes, int, int, int]] def fix_pack_random_torrents(self, value: list[tuple[bytes, int, int, int]]) -> bytes: """ Convert the list of random torrent info tuples to bytes. """ return b"".join(default_serializer.pack_serializable(TorrentInfoFormat(*sublist)) for sublist in value) def fix_pack_torrents_checked(self, value: list[tuple[bytes, int, int, int]]) -> bytes: """ Convert the list of checked torrent info tuples to bytes. """ return b"".join(default_serializer.pack_serializable(TorrentInfoFormat(*sublist)) for sublist in value) @classmethod def fix_unpack_random_torrents(cls: type[Self], value: bytes) -> list[tuple[bytes, int, int, int]]: """ Convert the raw data back to a list of random torrent info tuples. """ return [payload.to_tuple() for payload in TorrentInfoFormat.from_list_bytes(value)] @classmethod def fix_unpack_torrents_checked(cls: type[Self], value: bytes) -> list[tuple[bytes, int, int, int]]: """ Convert the raw data back to a list of checked torrent info tuples. """ return [payload.to_tuple() for payload in TorrentInfoFormat.from_list_bytes(value)] @classmethod def create(cls: type[Self], random_torrents_checked: list[HealthInfo], popular_torrents_checked: list[HealthInfo]) -> Self: """ Create a payload from the given lists. """ random_torrent_tuples = [(health.infohash, health.seeders, health.leechers, health.last_check) for health in random_torrents_checked] popular_torrent_tuples = [(health.infohash, health.seeders, health.leechers, health.last_check) for health in popular_torrents_checked] return cls(len(random_torrents_checked), len(popular_torrents_checked), random_torrent_tuples, popular_torrent_tuples) @vp_compile class PopularTorrentsRequest(VariablePayload): """ A request to be sent the health information of popular torrents. """ msg_id = 2 @vp_compile class VersionRequest(VariablePayload): """ A request for the Tribler version and Operating System of a peer. """ msg_id = 101 @vp_compile class VersionResponse(VariablePayload): """ A response to a request for Tribler version and OS. """ msg_id = 102 format_list = ["varlenI", "varlenI"] names = ["version", "platform"] version: str platform: str def fix_pack_version(self, value: str) -> bytes: """ Convert the (utf-8) Tribler version string to bytes. """ return value.encode() def fix_pack_platform(self, value: str) -> bytes: """ Convert the (utf-8) platform description string to bytes. """ return value.encode() @classmethod def fix_unpack_version(cls: type[Self], value: bytes) -> str: """ Convert the packed Tribler version back to a string. """ return value.decode() @classmethod def fix_unpack_platform(cls: type[Self], value: bytes) -> str: """ Convert the packed platform description back to a string. """ return value.decode() @vp_compile class RemoteSelectPayload(VariablePayload): """ A payload to sent SQL queries to other peers. """ msg_id = 201 format_list = ["I", "varlenH"] names = ["id", "json"] id: int json: bytes @vp_compile class SelectResponsePayload(VariablePayload): """ A response to a select request. """ msg_id = 202 format_list = ["I", "raw"] names = ["id", "raw_blob"] id: int raw_blob: bytes
5,683
Python
.py
143
32.748252
113
0.653573
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,986
search_endpoint.py
Tribler_tribler/src/tribler/core/content_discovery/restapi/search_endpoint.py
from __future__ import annotations from binascii import hexlify from typing import TYPE_CHECKING from aiohttp import web from aiohttp_apispec import docs, querystring_schema from ipv8.REST.schema import schema from marshmallow.fields import Integer, List, String from typing_extensions import TypeAlias from tribler.core.database.queries import to_fts_query from tribler.core.database.restapi.database_endpoint import DatabaseEndpoint from tribler.core.database.restapi.schema import MetadataParameters from tribler.core.restapi.rest_endpoint import HTTP_BAD_REQUEST, MAX_REQUEST_SIZE, RESTEndpoint, RESTResponse if TYPE_CHECKING: from tribler.core.content_discovery.community import ContentDiscoveryCommunity from tribler.core.restapi.rest_manager import TriblerRequest RequestType: TypeAlias = TriblerRequest[tuple[ContentDiscoveryCommunity]] class RemoteQueryParameters(MetadataParameters): """ The REST API schema for requets to other peers. """ uuid = String() channel_pk = String(description="Channel to query, must also define origin_id") origin_id = Integer(default=None, description="Peer id to query, must also define channel_pk") class SearchEndpoint(RESTEndpoint): """ This endpoint is responsible for searching in channels and torrents present in the local Tribler database. """ path = "/api/search" def __init__(self, middlewares: tuple = (), client_max_size: int = MAX_REQUEST_SIZE) -> None: """ Create a new search endpoint. """ super().__init__(middlewares, client_max_size) self.content_discovery_community = None self.required_components = ("content_discovery_community", ) self.app.add_routes([web.put("/remote", self.remote_search)]) @docs( tags=["Metadata"], summary="Perform a search for a given query.", responses={ 200: { "schema": schema(RemoteSearchResponse={"request_uuid": String(), "peers": List(String())}), "examples": { "Success": { "request_uuid": "268560c0-3f28-4e6e-9d85-d5ccb0269693", "peers": ["50e9a2ce646c373985a8e827e328830e053025c6", "107c84e5d9636c17b46c88c3ddb54842d80081b0"] } } } }, ) @querystring_schema(RemoteQueryParameters) async def remote_search(self, request: RequestType) -> RESTResponse: """ Perform a search for a given query. """ self._logger.info("Create remote search request") # Results are returned over the Events endpoint. try: sanitized = DatabaseEndpoint.sanitize_parameters(request.query) except (ValueError, KeyError) as e: return RESTResponse({"error": f"Error processing request parameters: {e}"}, status=HTTP_BAD_REQUEST) query = request.query.get("fts_text") if query is None: return RESTResponse({"error": f"Got search with no fts_text: {dict(request.query)}"}, status=HTTP_BAD_REQUEST) if t_filter := request.query.get("filter"): query += f" {t_filter}" fts = to_fts_query(query) sanitized["txt_filter"] = fts self._logger.info("Parameters: %s", str(sanitized)) self._logger.info("FTS: %s", fts) request_uuid, peers_list = request.context[0].send_search_request(**sanitized) peers_mid_list = [hexlify(p.mid).decode() for p in peers_list] return RESTResponse({"request_uuid": str(request_uuid), "peers": peers_mid_list})
3,680
Python
.py
76
39.697368
112
0.664065
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,987
community.py
Tribler_tribler/src/tribler/core/recommender/community.py
from __future__ import annotations import json import os from binascii import hexlify, unhexlify from typing import TYPE_CHECKING, TypedDict, cast from ipv8.community import Community, CommunitySettings from ipv8.lazy_community import lazy_wrapper from ipv8.requestcache import NumberCache, RequestCache from tribler.core.recommender.payload import Crawl, CrawlInfo, CrawlResponse if TYPE_CHECKING: from ipv8.types import Peer from tribler.core.recommender.manager import Manager class ResultItem(TypedDict): """ A displayed result. """ infohash: str seeders: int leechers: int def create_crawl_fragment(query_id: int, from_id: int, to_id: int = -1) -> dict: """ A request to get results with id ``from_id`` to ``to_id`` for the query ``query_id``. """ return { "version": 0, "type": "query_fragment", "query_id": query_id, "from_id": from_id, "to_id": to_id } def create_crawl_fragment_response(query_id: int, from_id: int, to_id: int, infohashes: list[str], seeders: list[int], leechers: list[int]) -> dict: """ A response to with results that have id ``from_id`` to ``to_id`` for the query ``query_id``. """ return { "version": 0, "type": "query_fragment", "query_id": query_id, "from_id": from_id, "to_id": to_id, "infohashes": infohashes, "seeders": seeders, "leechers": leechers, } def create_crawl_query_info(query_id: int) -> dict: """ A request to get the number of available results for the query with the id ``query_id``. """ return { "version": 0, "type": "query_info", "query_id": query_id } def create_crawl_query_info_response(query_id: int, timestamp: int, results: int, chosen_index: int, query: str) -> dict: """ A response with the number of available results for the query with the id ``query_id``. """ return { "version": 1, "type": "query_info", "query_id": query_id, "results": results, "chosen_index": chosen_index, "timestamp": timestamp, "query": query } def create_crawl_table_size_response(total_queries: int) -> dict: """ A response with the number of performed queries. """ return { "version": 0, "type": "table_size", "total_queries": total_queries } class RecommenderSettings(CommunitySettings): """ Settings for the RecommenderCommunity. """ crawler_mid: bytes = b"Jy\xa9\x90G\x86\xec[\xde\xda\xf8(\xe6\x81l\xa2\xe0\xba\xaf\xac" manager: Manager crawl_directory: str = "crawl" class RecommenderCommunity(Community): """ A community that allows peer-to-peer learning to rank. """ community_id = b"RecommenderOverlay\x00\x00" settings_class = RecommenderSettings MAX_RESULTS_IN_PACKET = 14 def __init__(self, settings: RecommenderSettings) -> None: """ Create a new recommender community. """ super().__init__(settings) self.manager = settings.manager self.crawler_mid = settings.crawler_mid self.add_message_handler(CrawlInfo, self.on_crawl_info) self.add_message_handler(Crawl, self.on_crawl) self.add_message_handler(CrawlResponse, self.on_crawl_response) def json_pack(self, info: dict) -> bytes: """ Convert crawl request/response dictionary to a bytes string. """ return json.dumps(info, separators=(',', ':')).encode() def valid_crawler(self, peer: Peer, mid: bytes) -> bool: """ Check if the given peer is allowed to crawl. """ return peer.mid == self.crawler_mid and mid == self.my_peer.mid @lazy_wrapper(CrawlInfo) def on_crawl_info(self, peer: Peer, payload: CrawlInfo) -> None: """ Process a crawl info message. """ if not self.valid_crawler(peer, payload.mid): self.logger.warning("Unknown peer attempting to crawl us!") return self.ez_send(peer, CrawlResponse(peer.mid, self.json_pack(create_crawl_table_size_response( total_queries=self.manager.get_total_queries() )), b"")) def process_query_fragment(self, peer: Peer, request: dict) -> None: """ We received a query fragment. """ query = self.manager.get_query(request["query_id"]) unpacked = json.loads(query.json) results = unpacked["results"][request["from_id"]: request["to_id"]][:self.MAX_RESULTS_IN_PACKET] self.ez_send(peer, CrawlResponse(peer.mid, self.json_pack(create_crawl_fragment_response( query_id=query.rowid, from_id=request["from_id"], to_id=request["from_id"] + len(results), infohashes=[r["infohash"] for r in results], seeders=[r["seeders"] for r in results], leechers=[r["leechers"] for r in results] )), b"")) def process_query_info(self, peer: Peer, request: dict) -> None: """ We received a query info request. """ query = self.manager.get_query(request["query_id"]) unpacked = json.loads(query.json) self.ez_send(peer, CrawlResponse(peer.mid, self.json_pack(create_crawl_query_info_response( query_id=query.rowid, results=len(unpacked["results"]), chosen_index=unpacked["chosen_index"], timestamp=unpacked.get("timestamp", 0), query=unpacked["query"], )), b"")) @lazy_wrapper(Crawl) def on_crawl(self, peer: Peer, payload: Crawl) -> None: """ Process a crawl message. """ if not self.valid_crawler(peer, payload.mid): self.logger.warning("Unknown peer attempting to crawl us!") return request = payload.json() request_type = request.get("type") if request_type == "query_fragment": self.process_query_fragment(peer, request) elif request_type == "query_info": self.process_query_info(peer, request) else: self.logger.warning("Crawler sent unknown request type!") @lazy_wrapper(CrawlResponse) def on_crawl_response(self, peer: Peer, payload: CrawlResponse) -> None: """ Process a crawl response message. """ self.logger.warning("Received a crawl response from %s, even though we are not a crawler!", str(peer)) class PartialQueryCache(NumberCache): """ Cache a partially fetched query response. """ def __init__(self, request_cache: RequestCache, peer: Peer, response: dict) -> None: """ A cache for a query that is being retrieved. """ super().__init__(request_cache, hexlify(peer.mid).decode(), response["query_id"]) self.peer = peer self.query_id = response["query_id"] self.total_results = response["results"] self.results: list[ResultItem | None] = [None] * self.total_results self.chosen_index = response["chosen_index"] self.timestamp = response.get("timestamp", 0) self.query = response["query"] def get_next_range(self) -> tuple[int, int] | None: """ Get the next range of value to request. """ try: start_index = self.results.index(None) except ValueError: return None end_index = min(start_index + RecommenderCommunity.MAX_RESULTS_IN_PACKET, len(self.results)) for i in range(start_index, end_index): if self.results[i] is not None: end_index = i return start_index, end_index def process_fragment(self, response: dict) -> None: """ Merge a fragment into our pending results. """ for i in range(len(response["infohashes"])): self.results[response["from_id"] + i] = ResultItem( infohash=response["infohashes"][i], seeders=response["seeders"][i], leechers=response["leechers"][i] ) @property def timeout_delay(self) -> float: """ Consider this peer as dropped after 2 minutes. """ return 120.0 class RecommenderCommunityCrawler(RecommenderCommunity): """ The crawler of the recommender community. """ def __init__(self, settings: RecommenderSettings) -> None: """ Create a new recommender community. """ super().__init__(settings) assert self.my_peer.mid == settings.crawler_mid, "You are not the crawler, begone!" self.request_cache = RequestCache() self.crawl_history: dict[bytes, tuple[int, set[int]]] = {} self.crawl_directory = settings.crawl_directory """ Peer -> (table_size, missing_queries) """ self.init_crawl_history() self.decode_map[CrawlResponse.msg_id] = self.on_crawl_response def init_crawl_history(self) -> None: """ Initialize the crawl history from local files. """ os.makedirs(self.crawl_directory, exist_ok=True) crawl_dirs = os.listdir(self.crawl_directory) for crawl_dir in crawl_dirs: peer_mid = unhexlify(crawl_dir) peer_dir = os.path.join(self.crawl_directory, crawl_dir) max_id = 0 found = set() for query_file in os.listdir(peer_dir): query_id = int(query_file.split(".")[0]) found.add(query_id) max_id = max(max_id, query_id) if max_id == 0: # No files self.crawl_history[peer_mid] = (0, set()) # Size 0, no missing ids else: missing = set(range(1, max_id + 1)) - found # Row ids are base 1, not base 0! self.crawl_history[peer_mid] = (max_id, missing) def finalize_query(self, peer: Peer, query_id: int, query: str, chosen_index: int, timestamp: int, results: list[ResultItem]) -> None: """ Update self.crawl_history and write the results to a file. """ query_dir = os.path.join(self.crawl_directory, hexlify(peer.mid).decode()) os.makedirs(query_dir, exist_ok=True) json_dict = { "query": query, "timestamp": timestamp, "chosen_index": chosen_index, "results": results } with open(f"{query_id}.json", "w") as handle: json.dump(json_dict, handle) size, missing = self.crawl_history.get(peer.mid, (0, set())) missing.remove(query_id) self.crawl_history[peer.mid] = (size, missing) async def unload(self) -> None: """ Destroy this overlay. """ await self.request_cache.shutdown() await super().unload() def crawl_next(self, peer: Peer) -> None: """ Every exchange starts with a request for crawl info (this can update over time). """ self.ez_send(peer, CrawlInfo(peer.mid, b"")) def process_table_size_response(self, peer: Peer, response: dict) -> None: """ We got the table size info from a peer. """ previous_size, missing = self.crawl_history.get(peer.mid, (0, set())) current_size = response["total_queries"] if current_size > previous_size: missing |= {i + 1 for i in range(previous_size, current_size)} # Row ids are base 1, not base 0! self.crawl_history[peer.mid] = (current_size, missing) if missing: self.ez_send(peer, Crawl(peer.mid, self.json_pack(create_crawl_query_info( query_id=min(missing) )), b"")) else: self.logger.info("Nothing left to crawl for %s.", str(peer)) def process_query_info_response(self, peer: Peer, response: dict) -> None: """ We got the query info from a peer. """ cache = PartialQueryCache(self.request_cache, peer, response) next_range = cache.get_next_range() if next_range is None: self.logger.info("Query %d is empty for %s.", response["query_id"], str(peer)) self.finalize_query(peer, cache.query_id, cache.query, cache.chosen_index, cache.timestamp, []) else: self.request_cache.add(cache) self.ez_send(peer, Crawl(peer.mid, self.json_pack(create_crawl_fragment( query_id=response["query_id"], from_id=next_range[0], to_id=next_range[1] )), b"")) def process_query_fragment_response(self, peer: Peer, response: dict) -> None: """ We got a query fragment from a peer. """ cache = cast(PartialQueryCache, self.request_cache.pop(hexlify(peer.mid).decode(), response["query_id"])) cache.process_fragment(response) next_range = cache.get_next_range() if next_range is None: self.logger.info("Query %d has completed for %s.", response["query_id"], str(peer)) self.finalize_query(peer, cache.query_id, cache.query, cache.chosen_index, cache.timestamp, cast(list[ResultItem] , cache.results)) else: self.request_cache.add(cache) # Reset the two-minute timer self.ez_send(peer, Crawl(peer.mid, self.json_pack(create_crawl_fragment( query_id=response["query_id"], from_id=next_range[0], to_id=next_range[1] )), b"")) @lazy_wrapper(CrawlResponse) def on_crawl_response(self, peer: Peer, payload: CrawlResponse) -> None: """ Process a crawl response message. """ response = payload.json() request_type = response.get("type") if request_type == "query_fragment": self.process_query_fragment_response(peer, response) elif request_type == "query_info": self.process_query_info_response(peer, response) elif request_type == "table_size": self.process_table_size_response(peer, response) else: self.logger.warning("Crawlee sent unknown response type!")
14,330
Python
.py
347
32.270893
121
0.599899
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,988
manager.py
Tribler_tribler/src/tribler/core/recommender/manager.py
from __future__ import annotations from pathlib import Path from typing import TYPE_CHECKING from pony.orm import Database, db_session from pony.orm import max as pony_max from tribler.core.recommender.orm_query import define_binding if TYPE_CHECKING: from tribler.core.recommender.orm_query import Query class Manager: """ Database manager for Query objects. """ def __init__(self, db_filename: str) -> None: """ Create a new database connection to retrieve and store Query objects. """ self.db = Database() if db_filename == ":memory:": create_db = True db_path_string = ":memory:" else: create_db = not Path(db_filename).exists() db_path_string = str(db_filename) self.Query = define_binding(self.db) self.db.bind(provider="sqlite", filename=db_path_string, create_db=create_db, timeout=120.0) self.db.generate_mapping(create_tables=create_db, check_tables=True) def get_total_queries(self) -> int: """ Get the total number of queries that we know of. """ with db_session: return pony_max(q.rowid for q in self.Query) or 0 def get_query(self, query_id: int) -> Query: """ Get the Query with a given id. """ with db_session: return self.Query.get(rowid=query_id) def add_query(self, json_data: str) -> None: """ Inject data into our database. """ with db_session: self.Query(version=1, json=json_data)
1,601
Python
.py
44
28.636364
100
0.623057
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,989
payload.py
Tribler_tribler/src/tribler/core/recommender/payload.py
import json from typing import Any from ipv8.messaging.lazy_payload import VariablePayloadWID, vp_compile @vp_compile class CrawlInfo(VariablePayloadWID): """ A request for crawlable information. """ msg_id = 0 format_list = ["20s", "raw"] names = ["mid", "unknown"] mid: bytes unknown: bytes class JSONPayload: """ A generic JSON-based payload. """ format_list = ["20s", "varlenH", "raw"] names = ["mid", "data", "unknown"] mid: bytes data: bytes unknown: bytes def json(self) -> Any: # noqa: ANN401 """ Convert the data of this object to JSON. """ return json.loads(self.data) @vp_compile class Crawl(JSONPayload, VariablePayloadWID): """ A request for crawlable information. """ msg_id = 1 @vp_compile class CrawlResponse(JSONPayload, VariablePayloadWID): """ A response with crawlable information. """ msg_id = 2
967
Python
.py
39
19.974359
70
0.646542
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,990
orm_query.py
Tribler_tribler/src/tribler/core/recommender/orm_query.py
from __future__ import annotations from typing import TYPE_CHECKING from pony import orm if TYPE_CHECKING: from dataclasses import dataclass from typing import Iterator from pony.orm import Database from pony.orm.core import Entity class IterQuery(type): # noqa: D101 def __iter__(cls) -> Iterator[Query]: ... # noqa: D105 @dataclass class Query(Entity, metaclass=IterQuery): """ A generic query data object. """ rowid: int version: int json: str """ { chosen_index: int, timestamp: int, query: str, results: [{infohash: str, seeders: int, leechers: int}] } """ def define_binding(db: Database) -> type[Query]: """ Create the Query binding. """ class Query(db.Entity): """ This ORM binding class is intended to store generic Query objects. """ rowid = orm.PrimaryKey(int, size=64, auto=True) version = orm.Required(int, size=16) json = orm.Required(str) return Query
1,116
Python
.py
38
21.868421
74
0.595484
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,991
endpoint.py
Tribler_tribler/src/tribler/core/recommender/restapi/endpoint.py
from __future__ import annotations from typing import TYPE_CHECKING from aiohttp import web from aiohttp_apispec import docs, json_schema from ipv8.REST.schema import schema from marshmallow.fields import Boolean, Integer, List, Nested, String from tribler.core.restapi.rest_endpoint import MAX_REQUEST_SIZE, RESTEndpoint, RESTResponse if TYPE_CHECKING: from typing_extensions import TypeAlias from tribler.core.recommender.manager import Manager from tribler.core.restapi.rest_manager import TriblerRequest RequestType: TypeAlias = TriblerRequest[tuple[Manager]] class RecommenderEndpoint(RESTEndpoint): """ This is the top-level endpoint class that allows the GUI to communicate what has been clicked by a user. """ path = "/api/recommender" def __init__(self, middlewares: tuple = (), client_max_size: int = MAX_REQUEST_SIZE) -> None: """ Create a new database endpoint. """ super().__init__(middlewares, client_max_size) self.manager: Manager | None = None self.required_components = ("manager", ) self.app.add_routes([web.put("/clicked", self.put_clicked)]) @docs( tags=["Recommender"], summary="Set the preference relationship between infohashes.", parameters=[], responses={ 200: { "schema": schema(ClickedResponse={"added": Boolean}), "examples": {"added": True} } } ) @json_schema(schema(ClickedRequest={ "query": (String, "The query that led to the list of results"), "chosen_index": (String, "The winning result index in the results list"), "timestamp": (Integer, "The timestamp of the query"), "results": (List(Nested(schema(ClickedResult={"infohash": (String, "A displayed infohash"), "seeders": (Integer, "Its displayed number of seeders"), "leechers": (Integer, "Its displayed number of seeders")}))), "The displayed infohashes"), })) async def put_clicked(self, request: RequestType) -> RESTResponse: """ The user has clicked one infohash over others. We expect the format: { query: str, chosen_index: int, timestamp: int, results: list[{ infohash: str, seeders: int, leechers: int }] } """ parameters = await request.text() request.context[0].add_query(parameters) return RESTResponse({"added": True})
2,683
Python
.py
63
32.634921
115
0.609063
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,992
torrent_checker.py
Tribler_tribler/src/tribler/core/torrent_checker/torrent_checker.py
from __future__ import annotations import asyncio import logging import random import time from asyncio import CancelledError, DatagramTransport from binascii import hexlify from collections import defaultdict from typing import TYPE_CHECKING, Any, Dict, List, Tuple, cast from ipv8.taskmanager import TaskManager from pony.orm import db_session, desc, select from pony.utils import between from tribler.core.libtorrent.trackers import MalformedTrackerURLException, is_valid_url from tribler.core.notifier import Notification, Notifier from tribler.core.torrent_checker.dataclasses import HEALTH_FRESHNESS_SECONDS, HealthInfo, TrackerResponse from tribler.core.torrent_checker.torrentchecker_session import ( FakeDHTSession, TrackerSession, UdpSocketManager, create_tracker_session, ) from tribler.core.torrent_checker.tracker_manager import MAX_TRACKER_FAILURES, TrackerManager if TYPE_CHECKING: from tribler.core.database.store import MetadataStore from tribler.core.libtorrent.download_manager.download_manager import DownloadManager from tribler.tribler_config import TriblerConfigManager TRACKER_SELECTION_INTERVAL = 1 # The interval for querying a random tracker TORRENT_SELECTION_INTERVAL = 10 # The interval for checking the health of a random torrent MIN_TORRENT_CHECK_INTERVAL = 900 # How much time we should wait before checking a torrent again TORRENT_CHECK_RETRY_INTERVAL = 30 # Interval when the torrent was successfully checked for the last time MAX_TORRENTS_CHECKED_PER_SESSION = 5 # (5 random + 5 per tracker = 10 torrents) per 10 seconds TORRENT_SELECTION_POOL_SIZE = 2 # How many torrents to check (popular or random) during periodic check USER_CHANNEL_TORRENT_SELECTION_POOL_SIZE = 5 # How many torrents to check from user's channel during periodic check TORRENTS_CHECKED_RETURN_SIZE = 240 # Estimated torrents checked on default 4 hours idle run def aggregate_responses_for_infohash(infohash: bytes, responses: List[TrackerResponse]) -> HealthInfo: """ Finds the "best" health info (with the max number of seeders) for a specified infohash. """ result = HealthInfo(infohash, last_check=0) for response in responses: for health in response.torrent_health_list: if health.infohash == infohash and health > result: result = health return result class TorrentChecker(TaskManager): """ A class to check the health of torrents. """ def __init__(self, config: TriblerConfigManager, download_manager: DownloadManager, notifier: Notifier, tracker_manager: TrackerManager, metadata_store: MetadataStore, socks_listen_ports: List[int] | None = None) -> None: """ Create a new TorrentChecker. """ super().__init__() self._logger = logging.getLogger(self.__class__.__name__) self.tracker_manager = tracker_manager self.mds = metadata_store self.download_manager = download_manager self.notifier = notifier self.config = config self.socks_listen_ports = socks_listen_ports self._should_stop = False self.sessions: dict[str, list[TrackerSession]] = defaultdict(list) self.socket_mgr = UdpSocketManager() self.udp_transport: DatagramTransport | None = None # We keep track of the results of popular torrents checked by you. # The content_discovery community gossips this information around. self._torrents_checked: Dict[bytes, HealthInfo] | None = None async def initialize(self) -> None: """ Start all the looping tasks for the checker and creata socket. """ self.register_task("check random tracker", self.check_random_tracker, interval=TRACKER_SELECTION_INTERVAL) self.register_task("check local torrents", self.check_local_torrents, interval=TORRENT_SELECTION_INTERVAL) await self.create_socket_or_schedule() async def listen_on_udp(self) -> DatagramTransport: """ Start listening on a UDP port. """ loop = asyncio.get_event_loop() transport, _ = await loop.create_datagram_endpoint(lambda: self.socket_mgr, local_addr=('0.0.0.0', 0)) return transport async def create_socket_or_schedule(self) -> None: """ This method attempts to bind to a UDP port. If it fails for some reason (i.e. no network connection), we try again later. """ try: self.udp_transport = await self.listen_on_udp() except OSError as e: self._logger.exception("Error when creating UDP socket in torrent checker: %s", e) self.register_task("listen_udp_port", self.create_socket_or_schedule, delay=10) async def shutdown(self) -> None: """ Shutdown the torrent health checker. Once shut down it can't be started again. :returns A deferred that will fire once the shutdown has completed. """ self._should_stop = True if self.udp_transport: self.udp_transport.close() self.udp_transport = None await self.shutdown_task_manager() async def check_random_tracker(self) -> None: """ Calling this method will fetch a random tracker from the database, select some torrents that have this tracker, and perform a request to these trackers. Return whether the check was successful. """ if self._should_stop: self._logger.warning("Not performing tracker check since we are shutting down") return tracker = self.get_next_tracker() if not tracker: self._logger.warning("No tracker to select from to check torrent health, skip") return # get the torrents that should be checked url = tracker.url with db_session: dynamic_interval = TORRENT_CHECK_RETRY_INTERVAL * (2 ** tracker.failures) torrents = select(ts for ts in tracker.torrents if ts.has_data == 1 # The condition had to be written this way for the index to work and ts.last_check + dynamic_interval < int(time.time())) infohashes = [t.infohash for t in torrents[:MAX_TORRENTS_CHECKED_PER_SESSION]] if len(infohashes) == 0: # We have no torrent to recheck for this tracker. Still update the last_check for this tracker. self._logger.info("No torrent to check for tracker %s", url) self.tracker_manager.update_tracker_info(url) return try: session = self.create_session_for_request(url, timeout=30) except MalformedTrackerURLException as e: session = None # Remove the tracker from the database self.tracker_manager.remove_tracker(url) self._logger.warning(e) if session is None: self._logger.warning('A session cannot be created. The torrent check procedure has been cancelled.') return # We shuffle the list so that different infohashes are checked on subsequent scrape requests if the total # number of infohashes exceeds the maximum number of infohashes we check. random.shuffle(infohashes) for infohash in infohashes: session.add_infohash(infohash) self._logger.info("Selected %d new torrents to check on random tracker: %s", len(infohashes), url) try: response = await self.get_tracker_response(session) except Exception as e: self._logger.warning(e) else: health_list = response.torrent_health_list self._logger.info("Received %d health info results from tracker: %s", len(health_list), str(health_list)) async def get_tracker_response(self, session: TrackerSession) -> TrackerResponse: """ Get the response from a given session. """ t1 = time.time() try: result = await session.connect_to_tracker() except CancelledError: self._logger.info("Tracker session is being cancelled: %s", session.tracker_url) raise except Exception as e: exception_str = str(e).replace('\n]', ']') self._logger.warning("Got session error for the tracker: %s\n%s", session.tracker_url, exception_str) self.tracker_manager.update_tracker_info(session.tracker_url, False) raise e # noqa: TRY201 finally: await self.clean_session(session) t2 = time.time() self._logger.info("Got response from %s in %f seconds: %s", session.__class__.__name__, round(t2 - t1, 3), str(result)) with db_session: for health in result.torrent_health_list: self.update_torrent_health(health) return result @property def torrents_checked(self) -> Dict[bytes, HealthInfo]: """ Get the checked torrents and their health information. """ if self._torrents_checked is None: self._torrents_checked = self.load_torrents_checked_from_db() lines = '\n'.join(f' {health}' for health in sorted(self._torrents_checked.values(), key=lambda health: -health.last_check)) self._logger.info("Initially loaded self-checked torrents:\n%s", lines) return self._torrents_checked @db_session def load_torrents_checked_from_db(self) -> Dict[bytes, HealthInfo]: """ Load the health information from the database. """ result = {} now = int(time.time()) last_fresh_time = now - HEALTH_FRESHNESS_SECONDS checked_torrents = list(self.mds.TorrentState .select(lambda g: g.has_data == 1 # Had to be written this way for index to work and g.self_checked and between(g.last_check, last_fresh_time, now)) .order_by(lambda g: (desc(g.seeders), g.last_check)) .limit(TORRENTS_CHECKED_RETURN_SIZE)) for torrent in checked_torrents: result[torrent.infohash] = HealthInfo(torrent.infohash, torrent.seeders, torrent.leechers, last_check=torrent.last_check, self_checked=True) return result @db_session def torrents_to_check(self) -> list: """ Two categories of torrents are selected (popular & old). From the pool of selected torrents, a certain number of them are submitted for health check. The torrents that are within the freshness window are excluded from the selection considering the health information is still fresh. 1. Popular torrents (50%) The indicator for popularity here is considered as the seeder count with direct proportionality assuming more seeders -> more popular. There could be other indicators to be introduced later. 2. Old torrents (50%) By old torrents, we refer to those checked quite farther in the past, sorted by the last_check value. """ last_fresh_time = time.time() - HEALTH_FRESHNESS_SECONDS popular_torrents = list(self.mds.TorrentState.select( lambda g: g.has_data == 1 # The condition had to be written this way for the partial index to work and g.last_check < last_fresh_time ).order_by(lambda g: (desc(g.seeders), g.last_check)).limit(TORRENT_SELECTION_POOL_SIZE)) old_torrents = list(self.mds.TorrentState.select( lambda g: g.has_data == 1 # The condition had to be written this way for the partial index to work and g.last_check < last_fresh_time ).order_by(lambda g: (g.last_check, desc(g.seeders))).limit(TORRENT_SELECTION_POOL_SIZE)) selected_torrents = popular_torrents + old_torrents return random.sample(selected_torrents, min(TORRENT_SELECTION_POOL_SIZE, len(selected_torrents))) async def check_local_torrents(self) -> Tuple[List, List]: """ Perform a full health check on a few popular and old torrents in the database. """ selected_torrents = self.torrents_to_check() self._logger.info("Check %d local torrents", len(selected_torrents)) results = [await self.check_torrent_health(t.infohash) for t in selected_torrents] self._logger.info("Results for local torrents check: %s", str(results)) return selected_torrents, results def get_next_tracker(self) -> Any | None: # noqa: ANN401 """ Return the next unchecked tracker. """ while tracker := self.tracker_manager.get_next_tracker(): url = tracker.url if not is_valid_url(url): self.tracker_manager.remove_tracker(url) elif tracker.failures >= MAX_TRACKER_FAILURES: self.tracker_manager.update_tracker_info(url, is_successful=False) else: return tracker return None def is_blacklisted_tracker(self, tracker_url: str) -> bool: """ Check if a given url is in the blacklist. """ return tracker_url in self.tracker_manager.blacklist @db_session def get_valid_trackers_of_torrent(self, infohash: bytes) -> set[str]: """ Get a set of valid trackers for torrent. Also remove any invalid torrent. """ db_tracker_list = self.mds.TorrentState.get(infohash=infohash).trackers return {tracker.url for tracker in db_tracker_list if is_valid_url(tracker.url) and not self.is_blacklisted_tracker(tracker.url)} async def check_torrent_health(self, infohash: bytes, timeout: float = 20, scrape_now: bool = False) -> HealthInfo: """ Check the health of a torrent with a given infohash. :param infohash: Torrent infohash. :param timeout: The timeout to use in the performed requests :param scrape_now: Flag whether we want to force scraping immediately """ infohash_hex = hexlify(infohash).decode() self._logger.info("Check health for the torrent: %s", infohash_hex) tracker_set = [] # We first check whether the torrent is already in the database and checked before with db_session: torrent_state = self.mds.TorrentState.get(infohash=infohash) if torrent_state: last_check = torrent_state.last_check time_diff = time.time() - last_check if time_diff < MIN_TORRENT_CHECK_INTERVAL and not scrape_now: self._logger.info("Time interval too short, not doing torrent health check for %s", infohash_hex) return torrent_state.to_health() # get torrent's tracker list from DB tracker_set = self.get_valid_trackers_of_torrent(torrent_state.infohash) self._logger.info("Trackers for %s: %s", infohash_hex, str(tracker_set)) coroutines = [] for tracker_url in tracker_set: if session := self.create_session_for_request(tracker_url, timeout=timeout): session.add_infohash(infohash) coroutines.append(self.get_tracker_response(session)) session = FakeDHTSession(self.download_manager, timeout) session.add_infohash(infohash) self._logger.info("DHT session has been created for %s: %s", infohash_hex, str(session)) self.sessions["DHT"].append(session) coroutines.append(self.get_tracker_response(session)) responses = await asyncio.gather(*coroutines, return_exceptions=True) self._logger.info("%d responses for %s have been received: %s", len(responses), infohash_hex, str(responses)) successful_responses = [response for response in responses if not isinstance(response, Exception)] health = aggregate_responses_for_infohash(infohash, cast(List[TrackerResponse], successful_responses)) if health.last_check == 0: self.notify(health) # We don't need to store this in the db, but we still need to notify the GUI else: self.update_torrent_health(health) return health def create_session_for_request(self, tracker_url: str, timeout: float = 20) -> TrackerSession | None: """ Create a tracker session for a given url. """ self._logger.debug("Creating a session for the request: %s", tracker_url) required_hops = self.config.get("libtorrent/download_defaults/number_hops") actual_hops = len(self.socks_listen_ports or []) if required_hops > actual_hops: self._logger.warning("Dropping the request. Required amount of hops not reached. " "Required hops: %d. Actual hops: %d", required_hops, actual_hops) return None listen_ports = cast(List[int], self.socks_listen_ports) # Guaranteed by check above proxy = ('127.0.0.1', listen_ports[required_hops - 1]) if required_hops > 0 else None session = create_tracker_session(tracker_url, timeout, proxy, self.socket_mgr) self._logger.info("Tracker session has been created: %s", str(session)) self.sessions[tracker_url].append(session) return session async def clean_session(self, session: TrackerSession) -> None: """ Destruct a given session. """ url = session.tracker_url self.tracker_manager.update_tracker_info(url, not session.is_failed) # Remove the session from our session list dictionary self.sessions[url].remove(session) if len(self.sessions[url]) == 0 and url != "DHT": del self.sessions[url] await session.cleanup() self._logger.debug('Session has been cleaned up') def update_torrent_health(self, health: HealthInfo) -> bool: """ Updates the torrent state in the database if it already exists, otherwise do nothing. Returns True if the update was successful, False otherwise. """ if not health.is_valid(): self._logger.warning("Invalid health info ignored: %s", health) return False if not health.self_checked: self._logger.error("Self-checked torrent health expected. Got: %s", health) return False self._logger.debug("Update torrent health: %s", health) with db_session: # Update torrent state torrent_state = self.mds.TorrentState.get_for_update(infohash=health.infohash) if not torrent_state: self._logger.warning("Unknown torrent: %s", hexlify(health.infohash).decode()) return False prev_health = torrent_state.to_health() if not health.should_replace(prev_health): self._logger.info("Skip health update, the health in the database is fresher or have more seeders") self.notify(prev_health) # to update UI state from "Checking..." return False torrent_state.set(seeders=health.seeders, leechers=health.leechers, last_check=health.last_check, self_checked=True) if health.seeders > 0 or health.leechers > 0: self.torrents_checked[health.infohash] = health else: self.torrents_checked.pop(health.infohash, None) self.notify(health) return True def notify(self, health: HealthInfo) -> None: """ Send a health update to the GUI. """ self.notifier.notify(Notification.torrent_health_updated, infohash=hexlify(health.infohash).decode(), num_seeders=health.seeders, num_leechers=health.leechers, last_tracker_check=health.last_check, health='updated')
20,264
Python
.py
378
43.002646
119
0.646519
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,993
torrentchecker_session.py
Tribler_tribler/src/tribler/core/torrent_checker/torrentchecker_session.py
from __future__ import annotations import logging import random import socket import struct import time from abc import ABCMeta, abstractmethod from asyncio import DatagramProtocol, Future, TimeoutError, ensure_future, get_event_loop from typing import TYPE_CHECKING, Any, List, NoReturn, cast import async_timeout import libtorrent as lt from aiohttp import ClientResponseError, ClientSession, ClientTimeout from ipv8.taskmanager import TaskManager from tribler.core.libtorrent.trackers import add_url_params, parse_tracker_url from tribler.core.socks5.aiohttp_connector import Socks5Connector from tribler.core.socks5.client import Socks5Client from tribler.core.torrent_checker.dataclasses import HealthInfo, TrackerResponse if TYPE_CHECKING: from ipv8.messaging.interfaces.udp.endpoint import DomainAddress from tribler.core.libtorrent.download_manager import DownloadManager # Although these are the actions for UDP trackers, they can still be used as # identifiers. TRACKER_ACTION_CONNECT = 0 TRACKER_ACTION_ANNOUNCE = 1 TRACKER_ACTION_SCRAPE = 2 UDP_TRACKER_INIT_CONNECTION_ID = 0x41727101980 MAX_INFOHASHES_IN_SCRAPE = 60 class TrackerSession(TaskManager): """ A single session to query some (subclass) type of trackers. """ __meta__ = ABCMeta def __init__(self, tracker_type: str, tracker_url: str, tracker_address: tuple[str, int], announce_page: str, timeout: float) -> None: """ Initialize the base fields of this class. """ super().__init__() self._logger = logging.getLogger(self.__class__.__name__) # tracker type in lowercase self.tracker_type = tracker_type self.tracker_url = tracker_url self.tracker_address = tracker_address # if this is a nonempty string it starts with '/'. self.announce_page = announce_page self.timeout = timeout self.infohash_list: list[bytes] = [] self.last_contact = 0 # some flags self.is_initiated = False # you cannot add requests to a session if it has been initiated self.is_finished = False self.is_failed = False def __str__(self) -> str: """ Format this class as a human-readable string. """ return f"{self.__class__.__name__}[{self.tracker_type}, {self.tracker_url}]" async def cleanup(self) -> None: """ Shutdown and invalidate. """ await self.shutdown_task_manager() self.infohash_list = [] def has_infohash(self, infohash: bytes) -> bool: """ Whether our list of infohashes to check includes the given infohash. """ return infohash in self.infohash_list def add_infohash(self, infohash: bytes) -> None: """ Adds an infohash into this session. :param infohash: The infohash to be added. """ assert not self.is_initiated, "Must not add request to an initiated session." assert not self.has_infohash(infohash), "Must not add duplicate requests" if len(self.infohash_list) < MAX_INFOHASHES_IN_SCRAPE: self.infohash_list.append(infohash) def failed(self, msg: str | None = None) -> NoReturn: """ This method handles everything that needs to be done when one step in the session has failed and thus no data can be obtained. :raises ValueError: always. """ if not self.is_failed: self.register_anonymous_task("Cleanup", self.cleanup) self.is_failed = True result_msg = f"{self.tracker_type} tracker failed for url {self.tracker_url}" if msg: result_msg += f" (error: {msg})" raise ValueError(result_msg) @abstractmethod async def connect_to_tracker(self) -> TrackerResponse: """Does some work when a connection has been established.""" class HttpTrackerSession(TrackerSession): """ A session for HTTP tracker checks. """ def __init__(self, tracker_url: str, tracker_address: tuple[str, int], announce_page: str, timeout: float, proxy: tuple) -> None: """ Create a new HTTP tracker session. """ super().__init__("http", tracker_url, tracker_address, announce_page, timeout) self.session = ClientSession(connector=Socks5Connector(proxy) if proxy else None, raise_for_status=True, timeout=ClientTimeout(total=self.timeout)) async def connect_to_tracker(self) -> TrackerResponse: """ Create the HTTP GET message. """ # Note: some trackers have strange URLs, e.g., # http://moviezone.ws/announce.php?passkey=8ae51c4b47d3e7d0774a720fa511cc2a # which has some sort of 'key' as parameter, so we need to use the add_url_params # utility function to handle such cases. url = add_url_params("http://{}:{}{}".format(self.tracker_address[0], self.tracker_address[1], self.announce_page.replace("announce", "scrape")), {"info_hash": self.infohash_list}) # no more requests can be appended to this session self.is_initiated = True self.last_contact = int(time.time()) try: self._logger.debug("%s HTTP SCRAPE message sent: %s", self, url) async with self.session, self.session.get(url.encode("ascii").decode()) as response: body = await response.read() except UnicodeEncodeError: raise except ClientResponseError as e: self._logger.warning("%s HTTP SCRAPE error response code %s", self, e.status) self.failed(msg=f"error code {e.status}") except Exception as e: self.failed(msg=str(e)) return self.process_scrape_response(body) def process_scrape_response(self, body: bytes | None) -> TrackerResponse: """ This function handles the response body of an HTTP result from an HTTP tracker. """ if body is None: self.failed(msg="no response body") response_dict = cast(dict[bytes, Any], lt.bdecode(body)) if not response_dict: self.failed(msg="no valid response") health_list: List[HealthInfo] = [] now = int(time.time()) unprocessed_infohashes = set(self.infohash_list) files = response_dict.get(b"files") if isinstance(files, dict): for infohash, file_info in files.items(): seeders = leechers = 0 if isinstance(file_info, dict): # "complete: number of peers with the entire file, i.e. seeders (integer)" # - https://wiki.theory.org/BitTorrentSpecification#Tracker_.27scrape.27_Convention seeders = file_info.get(b"complete", 0) leechers = file_info.get(b"incomplete", 0) unprocessed_infohashes.discard(infohash) health_list.append(HealthInfo(infohash, seeders, leechers, last_check=now, self_checked=True)) elif b"failure reason" in response_dict: self._logger.info("%s Failure as reported by tracker [%s]", self, repr(response_dict[b"failure reason"])) self.failed(msg=repr(response_dict[b"failure reason"])) # handle the infohashes with no result (seeders/leechers = 0/0) health_list.extend(HealthInfo(infohash=infohash, last_check=now, self_checked=True) for infohash in unprocessed_infohashes) self.is_finished = True return TrackerResponse(url=self.tracker_url, torrent_health_list=health_list) async def cleanup(self) -> None: """ Cleans the session by cancelling all deferreds and closing sockets. """ await self.session.close() await super().cleanup() class UdpSocketManager(DatagramProtocol): """ The UdpSocketManager ensures that the network packets are forwarded to the right UdpTrackerSession. """ def __init__(self) -> None: """ Create a new UDP socket protocol for trackers. """ self._logger = logging.getLogger(self.__class__.__name__) self.tracker_sessions: dict[int, Future[bytes]] = {} self.transport: Socks5Client | None = None self.proxy_transports: dict[tuple, Socks5Client] = {} def connection_made(self, transport: Socks5Client) -> None: """ Callback for when a connection is established. """ self.transport = transport async def send_request(self, data: bytes, tracker_session: UdpTrackerSession) -> RuntimeError | bytes: """ Send a request and wait for the answer. """ transport: Socks5Client | None = self.transport proxy = tracker_session.proxy if proxy: transport = self.proxy_transports.get(proxy, Socks5Client(proxy, self.datagram_received)) if not transport.associated: await transport.associate_udp() if proxy not in self.proxy_transports: self.proxy_transports[proxy] = transport if transport is None: return RuntimeError("Unable to write without transport") host = tracker_session.ip_address or tracker_session.tracker_address[0] try: transport.sendto(data, (host, tracker_session.port)) f = self.tracker_sessions[tracker_session.transaction_id] = Future() return await f except OSError as e: self._logger.warning("Unable to write data to %s:%d - %s", tracker_session.ip_address, tracker_session.port, e) return RuntimeError("Unable to write to socket - " + str(e)) def transport_received(self, data: bytes) -> None: """ If the incoming data is valid, find the tracker session and give it the data. """ if data and len(data) >= 4: transaction_id = struct.unpack_from("!i", data, 4)[0] if transaction_id in self.tracker_sessions: session = self.tracker_sessions.pop(transaction_id) if not session.done(): session.set_result(data) def datagram_received(self, data: bytes, _: DomainAddress | tuple[str, int]) -> None: """ If the incoming data is valid, find the tracker session and give it the data. """ self.transport_received(data) class UdpTrackerSession(TrackerSession): """ The UDPTrackerSession makes a connection with a UDP tracker and queries seeders and leechers for one or more infohashes. It handles the message serialization and communication with the torrent checker by making use of Deferred (asynchronously). """ # A list of transaction IDs that have been used in order to avoid conflict. _active_session_dict: dict[UdpTrackerSession, int] = {} def __init__(self, tracker_url: str, tracker_address: tuple[str, int], announce_page: str, timeout: float, proxy: tuple, socket_mgr: UdpSocketManager) -> None: """ Create a session for UDP trackers. """ super().__init__("udp", tracker_url, tracker_address, announce_page, timeout) self._logger.setLevel(logging.INFO) self._connection_id = 0 self.transaction_id = 0 self.port = tracker_address[1] self.ip_address = None self.socket_mgr = socket_mgr self.proxy = proxy # prepare connection message self._connection_id = UDP_TRACKER_INIT_CONNECTION_ID self.action = TRACKER_ACTION_CONNECT self.generate_transaction_id() def generate_transaction_id(self) -> None: """ Generates a unique transaction id and stores this in the _active_session_dict set. """ while True: # make sure there is no duplicated transaction IDs transaction_id = random.randint(0, 2147483647) if transaction_id not in UdpTrackerSession._active_session_dict.values(): UdpTrackerSession._active_session_dict[self] = transaction_id self.transaction_id = transaction_id break def remove_transaction_id(self) -> None: """ Removes an session and its corresponding id from the _active_session_dict set and the socket manager. """ if self in UdpTrackerSession._active_session_dict: del UdpTrackerSession._active_session_dict[self] # Checking for socket_mgr is a workaround for race condition # in Tribler Session startup/shutdown that sometimes causes # unit tests to fail on teardown. if self.socket_mgr and self.transaction_id in self.socket_mgr.tracker_sessions: self.socket_mgr.tracker_sessions.pop(self.transaction_id) async def cleanup(self) -> None: """ Cleans the session by cancelling all deferreds and closing sockets. :return: A deferred that fires once the cleanup is done. """ await super().cleanup() self.remove_transaction_id() async def connect_to_tracker(self) -> TrackerResponse: """ Connects to the tracker and starts querying for seed and leech data. :return: A dictionary containing seed/leech information per infohash """ # No more requests can be appended to this session self.is_initiated = True # Clean old tasks if present await self.cancel_pending_task("result") await self.cancel_pending_task("resolve") try: async with async_timeout.timeout(self.timeout): # We only resolve the hostname if we're not using a proxy. # If a proxy is used, the TunnelCommunity will resolve the hostname at the exit nodes. if not self.proxy: # Resolve the hostname to an IP address if not done already coro = get_event_loop().getaddrinfo(self.tracker_address[0], 0, family=socket.AF_INET) if isinstance(coro, Future): infos = await coro # In Python <=3.6 getaddrinfo returns a Future else: infos = await self.register_anonymous_task("resolve", ensure_future(coro)) self.ip_address = infos[0][-1][0] await self.connect() return await self.scrape() except TimeoutError: self.failed(msg="request timed out") except socket.gaierror as e: self.failed(msg=str(e)) async def connect(self) -> None: """ Creates a connection message and calls the socket manager to send it. """ if not self.socket_mgr.transport: self.failed(msg="UDP socket transport not ready") # Initiate the connection message = struct.pack("!qii", self._connection_id, self.action, self.transaction_id) raw_response = await self.socket_mgr.send_request(message, self) if isinstance(raw_response, Exception): self.failed(msg=str(raw_response)) response = cast(bytes, raw_response) # check message size if len(response) < 16: self._logger.error("%s Invalid response for UDP CONNECT: %s", self, repr(response)) self.failed(msg="invalid response size") # check the response action, transaction_id = struct.unpack_from("!ii", response, 0) if action != self.action or transaction_id != self.transaction_id: # get error message errmsg_length = len(response) - 8 error_message, = struct.unpack_from("!" + str(errmsg_length) + "s", response, 8) self._logger.info("%s Error response for UDP CONNECT [%s]: %s", self, repr(response), repr(error_message)) self.failed(msg=error_message.decode(errors="ignore")) # update action and IDs self._connection_id = struct.unpack_from("!q", response, 8)[0] self.action = TRACKER_ACTION_SCRAPE self.generate_transaction_id() self.last_contact = int(time.time()) async def scrape(self) -> TrackerResponse: """ Parse the response of a tracker. """ fmt = "!qii" + ("20s" * len(self.infohash_list)) message = struct.pack(fmt, self._connection_id, self.action, self.transaction_id, *self.infohash_list) # Send the scrape message raw_response = await self.socket_mgr.send_request(message, self) if isinstance(raw_response, Exception): self.failed(msg=str(raw_response)) response = cast(bytes, raw_response) # check message size if len(response) < 8: self._logger.info("%s Invalid response for UDP SCRAPE: %s", self, repr(response)) self.failed("invalid message size") # check response action, transaction_id = struct.unpack_from("!ii", response, 0) if action != self.action or transaction_id != self.transaction_id: # get error message errmsg_length = len(response) - 8 error_message, = struct.unpack_from("!" + str(errmsg_length) + "s", response, 8) self._logger.info("%s Error response for UDP SCRAPE: [%s] [%s]", self, repr(response), repr(error_message)) self.failed(msg=error_message.decode(errors="ignore")) # get results if len(response) - 8 != len(self.infohash_list) * 12: self._logger.info("%s UDP SCRAPE response mismatch: %s", self, len(response)) self.failed(msg="invalid response size") offset = 8 response_list = [] now = int(time.time()) for infohash in self.infohash_list: complete, _downloaded, incomplete = struct.unpack_from("!iii", response, offset) offset += 12 # Store the information in the hash dict to be returned. # Sow complete as seeders. "complete: number of peers with the entire file, i.e. seeders (integer)" # - https://wiki.theory.org/BitTorrentSpecification#Tracker_.27scrape.27_Convention response_list.append(HealthInfo(infohash, seeders=complete, leechers=incomplete, last_check=now, self_checked=True)) # close this socket and remove its transaction ID from the list self.remove_transaction_id() self.last_contact = int(time.time()) self.is_finished = True return TrackerResponse(url=self.tracker_url, torrent_health_list=response_list) class FakeDHTSession(TrackerSession): """ Fake TrackerSession that manages DHT requests. """ def __init__(self, download_manager: DownloadManager, timeout: float) -> None: """ Create a new fake DHT tracker session. """ super().__init__("DHT", "DHT", ("DHT", 0), "DHT", timeout) self.download_manager = download_manager async def connect_to_tracker(self) -> TrackerResponse: """ Query the bittorrent DHT. """ health_list = [] now = int(time.time()) for infohash in self.infohash_list: metainfo = await self.download_manager.get_metainfo(infohash, timeout=self.timeout) if metainfo is None: continue health = HealthInfo(infohash, seeders=metainfo[b"seeders"], leechers=metainfo[b"leechers"], last_check=now, self_checked=True) health_list.append(health) return TrackerResponse(url="DHT", torrent_health_list=health_list) class FakeBep33DHTSession(FakeDHTSession): """ Fake session for a BEP33 lookup. """ async def connect_to_tracker(self) -> TrackerResponse: """ Query the bittorrent DHT using BEP33 to avoid joining the swarm. """ coros = [self.download_manager.dht_health_manager.get_health(infohash, timeout=self.timeout) for infohash in self.infohash_list] results: list[HealthInfo] = [] for coroutine in coros: local_results = [result for result in (await coroutine) if not isinstance(result, Exception)] results = [*results, *local_results] return TrackerResponse(url="DHT", torrent_health_list=results) def create_tracker_session(tracker_url: str, timeout: float, proxy: tuple, socket_manager: UdpSocketManager) -> TrackerSession: """ Creates a tracker session with the given tracker URL. :param tracker_url: The given tracker URL. :param timeout: The timeout for the session. :return: The tracker session. """ tracker_type, tracker_address, announce_page = parse_tracker_url(tracker_url) if tracker_type == "udp": return UdpTrackerSession(tracker_url, tracker_address, announce_page, timeout, proxy, socket_manager) return HttpTrackerSession(tracker_url, tracker_address, announce_page, timeout, proxy)
21,279
Python
.py
431
39.324826
117
0.63374
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,994
tracker_manager.py
Tribler_tribler/src/tribler/core/torrent_checker/tracker_manager.py
from __future__ import annotations import logging import time from pathlib import Path from typing import TYPE_CHECKING from pony.orm import count, db_session from tribler.core.libtorrent.trackers import get_uniformed_tracker_url if TYPE_CHECKING: from tribler.core.database.store import MetadataStore MAX_TRACKER_FAILURES = 5 # if a tracker fails this amount of times in a row, its 'is_alive' will be marked as 0 (dead). TRACKER_RETRY_INTERVAL = 60 # A "dead" tracker will be retired every 60 seconds class TrackerManager: """ A manager for tracker info in the database. """ def __init__(self, state_dir: Path | None = None, metadata_store: MetadataStore = None) -> None: """ Create a new tracker manager. """ self._logger = logging.getLogger(self.__class__.__name__) self.state_dir = state_dir self.TrackerState = metadata_store.TrackerState self.blacklist: list[str] = [] self.load_blacklist() def load_blacklist(self) -> None: """ Load the tracker blacklist from tracker_blacklist.txt in the session state directory. Entries are newline separated and are supposed to be sanitized. """ blacklist_file = (Path(self.state_dir or ".") / "tracker_blacklist.txt").absolute() if blacklist_file.exists(): with open(blacklist_file) as blacklist_file_handle: # Note that get_uniformed_tracker_url will strip the newline at the end of .readlines() self.blacklist.extend([get_uniformed_tracker_url(url) for url in blacklist_file_handle.readlines()]) else: self._logger.info("No tracker blacklist file found at %s.", blacklist_file) def get_tracker_info(self, tracker_url: str) -> dict[str, str | float] | None: """ Gets the tracker information with the given tracker URL. :param tracker_url: The given tracker URL. :return: The tracker info dict if exists, None otherwise. """ sanitized_tracker_url = get_uniformed_tracker_url(tracker_url) if tracker_url != "DHT" else tracker_url with db_session: tracker = list(self.TrackerState.select(lambda g: g.url == sanitized_tracker_url)) if tracker: return { "id": tracker[0].url, "last_check": tracker[0].last_check, "failures": tracker[0].failures, "is_alive": tracker[0].alive } return None def add_tracker(self, tracker_url: str) -> None: """ Adds a new tracker into the tracker info dict and the database. :param tracker_url: The new tracker URL to be added. """ sanitized_tracker_url = get_uniformed_tracker_url(tracker_url) if sanitized_tracker_url is None: self._logger.warning("skip invalid tracker: %s", repr(tracker_url)) return with db_session: num = count(g for g in self.TrackerState if g.url == sanitized_tracker_url) if num > 0: self._logger.debug("skip existing tracker: %s", repr(tracker_url)) return # insert into database self.TrackerState(url=sanitized_tracker_url, last_check=0, failures=0, alive=True, torrents={}) def remove_tracker(self, tracker_url: str) -> None: """ Remove a given tracker from the database. URL is sanitized first and removed from the database. If the URL is ill formed then try removing the non- sanitized version. :param tracker_url: The URL of the tracker to be deleted. """ sanitized_tracker_url = get_uniformed_tracker_url(tracker_url) with db_session: options = self.TrackerState.select(lambda g: g.url in [tracker_url, sanitized_tracker_url]) for option in options[:]: option.delete() @db_session def update_tracker_info(self, tracker_url: str, is_successful: bool = True) -> None: """ Updates a tracker information. :param tracker_url: The given tracker_url. :param is_successful: If the check was successful. """ if tracker_url == "DHT": return sanitized_tracker_url = get_uniformed_tracker_url(tracker_url) tracker = self.TrackerState.get(lambda g: g.url == sanitized_tracker_url) if not tracker: self._logger.error("Trying to update the tracker info of an unknown tracker URL") return current_time = int(time.time()) failures = 0 if is_successful else tracker.failures + 1 is_alive = failures < MAX_TRACKER_FAILURES # update the dict tracker.last_check = current_time tracker.failures = failures tracker.alive = is_alive self._logger.info("Tracker updated: %s. Alive: %s. Failures: %d.", tracker.url, str(is_alive), failures) @db_session def get_next_tracker(self) -> str | None: """ Gets the next tracker. :return: The next tracker for torrent-checking. """ tracker = self.TrackerState.select( lambda g: str(g.url) and g.alive and g.last_check + TRACKER_RETRY_INTERVAL <= int(time.time()) and str(g.url) not in self.blacklist ).order_by(self.TrackerState.last_check).limit(1) if not tracker: return None return tracker[0]
5,687
Python
.py
122
36.016393
120
0.611091
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,995
dataclasses.py
Tribler_tribler/src/tribler/core/torrent_checker/dataclasses.py
from __future__ import annotations import time from dataclasses import dataclass, field from enum import IntEnum MINUTE = 60 HOUR = MINUTE * 60 TOLERABLE_TIME_DRIFT = MINUTE # When receiving health from another peer, how far the timestamp can be in the future? TORRENT_CHECK_WINDOW = MINUTE # When asking multiple trackers in parallel, we ignore this time difference in responses HEALTH_FRESHNESS_SECONDS = 4 * HOUR # Number of seconds before a torrent health is considered stale. Default: 4 hours class Source(IntEnum): """ Source of the Torrent Health information. """ UNKNOWN = 0 DHT = 1 TRACKER = 2 POPULARITY_COMMUNITY = 3 @dataclass(order=True) class HealthInfo: """ An entry that described the health of a torrent at a particular moment in time. """ infohash: bytes = field(repr=False) seeders: int = 0 leechers: int = 0 last_check: int = field(default_factory=lambda: int(time.time())) self_checked: bool = False source: Source = Source.UNKNOWN tracker: str = '' def is_valid(self) -> bool: """ Whether the reported seeders and leechers are > 0 and the check was performed at most a minute in the future. """ return self.seeders >= 0 and self.leechers >= 0 and self.last_check < int(time.time()) + TOLERABLE_TIME_DRIFT def old(self) -> bool: """ Whether this check is more than 4 hours old. """ now = int(time.time()) return self.last_check < now - HEALTH_FRESHNESS_SECONDS def older_than(self, other: HealthInfo) -> bool: """ Whether this check is older than the given check. """ return self.last_check < other.last_check - TORRENT_CHECK_WINDOW def much_older_than(self, other: HealthInfo) -> bool: """ Whether this check is more than 4 hours older than the given check. """ return self.last_check + HEALTH_FRESHNESS_SECONDS < other.last_check def should_replace(self, prev: HealthInfo) -> bool: """ Whether this check should replace the given check. """ if self.infohash != prev.infohash: msg = "An attempt to compare health for different infohashes" raise ValueError(msg) if not self.is_valid(): return False # Health info with future last_check time is ignored if self.self_checked: return not prev.self_checked \ or prev.older_than(self) \ or (self.seeders, self.leechers) > (prev.seeders, prev.leechers) if self.older_than(prev): # Always ignore a new health info if it is older than the previous health info return False if prev.self_checked and not prev.old(): # The previous self-checked health info is fresh enough, do not replace it with a remote health info return False if prev.much_older_than(self): # The previous health info (that can be self-checked ot not) is very old, # let's replace it with a more recent remote health info return True # self is a remote health info that isn't older than previous health info, but isn't much fresher as well return (self.seeders, self.leechers) > (prev.seeders, prev.leechers) @dataclass class TrackerResponse: """ A list of health responses for the given url. """ url: str torrent_health_list: list[HealthInfo]
3,497
Python
.py
82
35.329268
119
0.65812
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,996
client.py
Tribler_tribler/src/tribler/core/socks5/client.py
from __future__ import annotations import ipaddress import logging import socket from asyncio import BaseTransport, DatagramProtocol, DatagramTransport, Protocol, Queue, WriteTransport, get_event_loop from typing import Callable, cast from ipv8.messaging.interfaces.udp.endpoint import DomainAddress from ipv8.messaging.serialization import PackError from tribler.core.socks5.conversion import ( REQ_CMD_CONNECT, REQ_CMD_UDP_ASSOCIATE, SOCKS_AUTH_ANON, SOCKS_VERSION, CommandRequest, CommandResponse, MethodsRequest, MethodsResponse, UdpPacket, socks5_serializer, ) class Socks5Error(Exception): """ Errors with the SOCKS5 protocol. """ class Socks5ClientUDPConnection(DatagramProtocol): """ A datagram protocol for Socks5 connections. """ def __init__(self, callback: Callable[[bytes, DomainAddress | tuple], None]) -> None: """ Create a new Socks5 udp connection. """ self.callback = callback self.transport: DatagramTransport | None = None self.proxy_udp_addr = None self.logger = logging.getLogger(self.__class__.__name__) def connection_made(self, transport: BaseTransport) -> None: """ Callback for when a transport is available. """ self.transport = cast(DatagramTransport, transport) def datagram_received(self, data: bytes, _: tuple) -> None: """ Callback for when data is received over our transport. """ try: request, _ = socks5_serializer.unpack_serializable(UdpPacket, data) except PackError: self.logger.warning("Error while decoding packet", exc_info=True) else: self.callback(request.data, request.destination) def sendto(self, data: bytes, target_addr: DomainAddress | tuple) -> None: """ Attempt to send the given data to the given address. """ if self.transport is None: return try: ipaddress.IPv4Address(target_addr[0]) except ipaddress.AddressValueError: target_addr = DomainAddress(*target_addr) packet = socks5_serializer.pack_serializable(UdpPacket(0, 0, target_addr, data)) self.transport.sendto(packet, self.proxy_udp_addr) class Socks5Client(Protocol): """ This object represents a minimal Socks5 client. Both TCP and UDP are supported. """ def __init__(self, proxy_addr: tuple, callback: Callable[[bytes, DomainAddress | tuple], None]) -> None: """ Create a client for the given proxy address and call the given callback with incoming data. """ self.proxy_addr = proxy_addr self.callback = callback self.transport: WriteTransport | None = None self.connection: Socks5ClientUDPConnection | None = None self.connected_to: DomainAddress | tuple | None = None self.queue: Queue[bytes] = Queue(maxsize=1) def data_received(self, data: bytes) -> None: """ Callback for when data comes in. Call our registered callback or save the incoming save for calling back later. """ if self.connected_to: self.callback(data, self.connected_to) elif self.queue.empty(): self.queue.put_nowait(data) def connection_lost(self, _: Exception | None) -> None: """ Callback for when the connection is dropped. """ self.transport = None async def _send(self, data: bytes) -> bytes: """ Send data to the remote and wait for an answer. """ cast(WriteTransport, self.transport).write(data) return await self.queue.get() async def _login(self) -> None: """ Send a login. :raises Socks5Error: If the proxy server is unsupported. """ self.transport, _ = await get_event_loop().create_connection(lambda: self, *self.proxy_addr) request = MethodsRequest(SOCKS_VERSION, [SOCKS_AUTH_ANON]) data = await self._send(socks5_serializer.pack_serializable(request)) response, _ = socks5_serializer.unpack_serializable(MethodsResponse, data) if response.version != SOCKS_VERSION or response.method != SOCKS_AUTH_ANON: msg = "Unsupported proxy server" raise Socks5Error(msg) async def _associate_udp(self, local_addr: tuple | None = None) -> None: """ Send an associate request to the connection. """ local_addr = local_addr or ('127.0.0.1', 0) connection = Socks5ClientUDPConnection(self.callback) transport, _ = await get_event_loop().create_datagram_endpoint(lambda: connection, local_addr=local_addr) sock = transport.get_extra_info("socket") request = CommandRequest(SOCKS_VERSION, REQ_CMD_UDP_ASSOCIATE, 0, sock.getsockname()) data = await self._send(socks5_serializer.pack_serializable(request)) response, _ = socks5_serializer.unpack_serializable(CommandResponse, data) connection.proxy_udp_addr = response.bind if response.version != SOCKS_VERSION: msg = "Unsupported proxy server" raise Socks5Error(msg) if response.reply > 0: msg = "UDP associate failed" raise Socks5Error(msg) self.connection = connection async def _connect_tcp(self, target_addr: tuple) -> None: """ Connect to the given address using TCP. """ try: socket.inet_aton(target_addr[0]) except (ValueError, OSError): target_addr = DomainAddress(*target_addr) request = CommandRequest(SOCKS_VERSION, REQ_CMD_CONNECT, 0, target_addr) data = await self._send(socks5_serializer.pack_serializable(request)) response, _ = socks5_serializer.unpack_serializable(CommandResponse, data) if response.version != SOCKS_VERSION: msg = "Unsupported proxy server" raise Socks5Error(msg) if response.reply > 0: msg = "TCP connect failed" raise Socks5Error(msg) self.connected_to = target_addr @property def connected(self) -> bool: """ Whether this client is connected over TCP. """ return self.transport is not None and self.connected_to is not None @property def associated(self) -> bool: """ Whether this client is associated over UDP. """ return self.transport is not None and self.connection is not None async def associate_udp(self) -> None: """ Login and associate with the proxy. """ if self.connected: connection = cast(tuple, self.connected_to) msg = f"Client already used for connecting to {connection[0]}:{connection[1]}" raise Socks5Error(msg) if not self.associated: await self._login() await self._associate_udp() def sendto(self, data: bytes, target_addr: tuple) -> None: """ Attemp to send data to the given address. :raises Socks5Error: If we have not associated UDP yet. """ if not self.associated: msg = "Not associated yet. First call associate_udp." raise Socks5Error(msg) cast(Socks5ClientUDPConnection, self.connection).sendto(data, target_addr) async def connect_tcp(self, target_addr: tuple) -> None: """ Login and connect to the proxy using TCP. :raises Socks5Error: If we have not associated UDP yet. """ if self.associated: msg = "Client already used for UDP communication" raise Socks5Error(msg) if not self.connected: await self._login() await self._connect_tcp(target_addr) def write(self, data: bytes) -> None: """ Write to whatever transport we have. """ if not self.connected: msg = "Not connected yet. First call connect_tcp." raise Socks5Error(msg) cast(WriteTransport, self.transport).write(data)
8,162
Python
.py
196
33.066327
119
0.641262
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,997
udp_connection.py
Tribler_tribler/src/tribler/core/socks5/udp_connection.py
from __future__ import annotations import logging from asyncio import DatagramProtocol, DatagramTransport, get_event_loop from typing import TYPE_CHECKING, cast from ipv8.messaging.serialization import PackError from tribler.core.socks5.conversion import UdpPacket, socks5_serializer if TYPE_CHECKING: from ipv8.messaging.interfaces.udp.endpoint import DomainAddress from ipv8_rust_tunnels.endpoint import RustEndpoint from tribler.core.socks5.connection import Socks5Connection class SocksUDPConnection(DatagramProtocol): """ A datagram protocol for SOCKS5 traffic. """ def __init__(self, socksconnection: Socks5Connection, remote_udp_address: DomainAddress | tuple | None) -> None: """ Create a new socks5 protocol. """ self._logger = logging.getLogger(self.__class__.__name__) self.socksconnection = socksconnection self.transport: DatagramTransport | None = None self.remote_udp_address = remote_udp_address if remote_udp_address != ("0.0.0.0", 0) else None async def open(self) -> None: """ Open a transport for this protocol. """ self.transport, _ = await get_event_loop().create_datagram_endpoint(lambda: self, local_addr=("127.0.0.1", 0)) def get_listen_port(self) -> int: """ Retrieve the listen port for this protocol. """ if self.transport: _, port = self.transport.get_extra_info("sockname") else: port = 0 return port def send_datagram(self, data: bytes) -> bool: """ Send a datagram to the known remote address. Returns False if there is no remote yet. """ if self.remote_udp_address: cast(DatagramTransport, self.transport).sendto(data, self.remote_udp_address) return True self._logger.error("cannot send data, no clue where to send it to") return False def datagram_received(self, data: bytes, source: tuple) -> None: """ The callback for when data is handed to our protocol. """ self.cb_datagram_received(data, source) def cb_datagram_received(self, data: bytes, source: tuple) -> bool: """ The callback for when data is handed to our protocol and whether the handling succeeded. """ # If remote_address was not set before, use first one if self.remote_udp_address is None: self.remote_udp_address = source if self.remote_udp_address == source: try: request, _ = socks5_serializer.unpack_serializable(UdpPacket, data) except PackError: self._logger.warning("Cannot serialize UDP packet") return False if request.frag == 0: output_stream = self.socksconnection.socksserver.output_stream if output_stream is not None: # Swallow the data in case the tunnel community has not started yet return output_stream.on_socks5_udp_data(self, request) self._logger.debug("No support for fragmented data or without destination host, dropping") else: self._logger.debug("Ignoring data from %s:%d, is not %s:%d", *source, *self.remote_udp_address) return False def close(self) -> None: """ Close the transport associated with this protocol. """ if self.transport: self.transport.close() self.transport = None class RustUDPConnection: """ A UDP connection that is offloaded to IPv8 Rust. """ def __init__(self, rust_endpoint: RustEndpoint, hops: int) -> None: """ Create a new connection. """ self.rust_endpoint = rust_endpoint self.hops = hops self.port: int | None = None self.logger = logging.getLogger(self.__class__.__name__) @property def remote_udp_address(self) -> None: """ Ensure this connection doesn't get picked up by the dispatcher. """ return @remote_udp_address.setter def remote_udp_address(self, address: tuple) -> None: """ Set the remote address for this connection. """ self.rust_endpoint.set_udp_associate_default_remote(address) async def open(self) -> None: """ Allow connections to be established. """ if self.port is not None: self.logger.error("UDP connection is already open on port %s", self.port) return self.port = self.rust_endpoint.create_udp_associate(0, self.hops) def get_listen_port(self) -> int: """ Get the claimed port for this connection. """ return self.port or 0 def close(self) -> None: """ Close the underlying connection. """ if self.port is not None: self.rust_endpoint.close_udp_associate(self.port) self.port = None
5,036
Python
.py
123
32.056911
118
0.62697
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,998
__init__.py
Tribler_tribler/src/tribler/core/socks5/__init__.py
""" The Socks5 package contains a basic SOCKS5 server is included which reserves circuits for its client connections and tunnels UDP packets over them back and forth. """
171
Python
.py
4
41.75
85
0.814371
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)
22,999
conversion.py
Tribler_tribler/src/tribler/core/socks5/conversion.py
from __future__ import annotations import logging import socket import struct from typing import Any, Union from ipv8.messaging.interfaces.udp.endpoint import DomainAddress, UDPv4Address from ipv8.messaging.lazy_payload import VariablePayload, vp_compile from ipv8.messaging.serialization import DefaultStruct, ListOf, Packer, Serializer # Some constants used in the RFC 1928 specification SOCKS_VERSION = 0x05 SOCKS_AUTH_ANON = 0x00 SOCKS_AUTH_PWD = 0x01 ADDRESS_TYPE_IPV4 = 0x01 ADDRESS_TYPE_DOMAIN_NAME = 0x03 ADDRESS_TYPE_IPV6 = 0x04 REQ_CMD_CONNECT = 0x01 REQ_CMD_BIND = 0x02 REQ_CMD_UDP_ASSOCIATE = 0x03 REP_SUCCEEDED = 0x00 REP_GENERAL_SOCKS_SERVER_FAIL = 0x01 REP_CONNECTION_NOT_ALLOWED_BY_RULE_SET = 0x02 REP_NETWORK_UNREACHABLE = 0x03 REP_HOST_UNREACHABLE = 0x04 REP_CONNECTION_REFUSED = 0x05 REP_TTL_EXPIRED = 0x06 REP_COMMAND_NOT_SUPPORTED = 0x07 REP_ADDRESS_TYPE_NOT_SUPPORTED = 0x08 logger = logging.getLogger(__name__) @vp_compile class MethodsRequest(VariablePayload): """ A request for supported methods. """ names = ["version", "methods"] format_list = ["B", "list_of_chars"] version: int methods: list[int] @vp_compile class MethodsResponse(VariablePayload): """ A response for supported methods. """ names = ["version", "method"] format_list = ["B", "B"] version: int method: int @vp_compile class CommandRequest(VariablePayload): """ A request for commands to be executed. """ names = ["version", "cmd", "rsv", "destination"] format_list = ["B", "B", "B", "socks5_address"] version: int cmd: int rsv: int destination: DomainAddress | UDPv4Address @vp_compile class CommandResponse(VariablePayload): """ A reply to share the result of (an attempt at) executing a command. """ names = ["version", "reply", "rsv", "bind"] format_list = ["B", "B", "B", "socks5_address"] version: int reply: int rsv: int destination: DomainAddress | UDPv4Address @vp_compile class UdpPacket(VariablePayload): """ A general wrapper for UDP packets. """ names = ["rsv", "frag", "destination", "data"] format_list = ["H", "B", "socks5_address", "raw"] rsv: int frag: int destination: DomainAddress | UDPv4Address data: bytes class Socks5Address(Packer[Union[DomainAddress, tuple], Any]): """ A socks5 address data packer. """ def pack(self, data: DomainAddress | tuple) -> bytes: """ Pack the given data. :raises InvalidAddressException: if the data could not be packed. """ if isinstance(data, DomainAddress): host = data[0].encode() return struct.pack(">BB", ADDRESS_TYPE_DOMAIN_NAME, len(host)) + host + struct.pack(">H", data[1]) if isinstance(data, tuple): return struct.pack(">B4sH", ADDRESS_TYPE_IPV4, socket.inet_aton(data[0]), data[1]) msg = f"Could not pack address {data}" raise InvalidAddressException(msg) def unpack(self, data: bytes, offset: int, unpack_list: list, *args: Any) -> int: # noqa: ANN401 """ Unpack the given bytes to an address. """ address_type, = struct.unpack_from(">B", data, offset) offset += 1 if address_type == ADDRESS_TYPE_IPV4: host = socket.inet_ntoa(data[offset:offset + 4]) port, = struct.unpack_from(">H", data, offset + 4) offset += 6 address = UDPv4Address(host, port) elif address_type == ADDRESS_TYPE_DOMAIN_NAME: domain_length, = struct.unpack_from(">B", data, offset) offset += 1 host = "" try: host = data[offset:offset + domain_length].decode() except UnicodeDecodeError as e: msg = f"Could not decode host {host}" raise InvalidAddressException(msg) from e port, = struct.unpack_from(">H", data, offset + domain_length) offset += domain_length + 2 address = DomainAddress(host, port) elif address_type == ADDRESS_TYPE_IPV6: raise IPv6AddressError else: msg = f"Could not unpack address type {address_type}" raise InvalidAddressException(msg) unpack_list.append(address) return offset class InvalidAddressException(Exception): """ An address could not be packed or unpacked. """ class IPv6AddressError(NotImplementedError): """ An attempt was made to unpack an IPv6 address. """ def __str__(self) -> str: """ Get the textual representation of this error. """ return "IPV6 support not implemented" socks5_serializer = Serializer() socks5_serializer.add_packer("list_of_chars", ListOf(DefaultStruct(">B"))) socks5_serializer.add_packer("socks5_address", Socks5Address())
4,919
Python
.py
141
28.907801
110
0.654983
Tribler/tribler
4,768
443
131
GPL-3.0
9/5/2024, 5:13:26 PM (Europe/Amsterdam)