id int64 0 458k | file_name stringlengths 4 119 | file_path stringlengths 14 227 | content stringlengths 24 9.96M | size int64 24 9.96M | language stringclasses 1 value | extension stringclasses 14 values | total_lines int64 1 219k | avg_line_length float64 2.52 4.63M | max_line_length int64 5 9.91M | alphanum_fraction float64 0 1 | repo_name stringlengths 7 101 | repo_stars int64 100 139k | repo_forks int64 0 26.4k | repo_open_issues int64 0 2.27k | repo_license stringclasses 12 values | repo_extraction_date stringclasses 433 values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
23,000 | connection.py | Tribler_tribler/src/tribler/core/socks5/connection.py | from __future__ import annotations
import logging
from asyncio import BaseTransport, Protocol, WriteTransport, ensure_future
from typing import TYPE_CHECKING, cast
from ipv8.messaging.serialization import PackError
from tribler.core.socks5.conversion import (
REP_COMMAND_NOT_SUPPORTED,
REP_SUCCEEDED,
REQ_CMD_BIND,
REQ_CMD_CONNECT,
REQ_CMD_UDP_ASSOCIATE,
SOCKS_VERSION,
CommandRequest,
CommandResponse,
MethodsRequest,
MethodsResponse,
socks5_serializer,
)
from tribler.core.socks5.udp_connection import RustUDPConnection, SocksUDPConnection
if TYPE_CHECKING:
from tribler.core.socks5.server import Socks5Server
class ConnectionState:
"""
Enumeration of possible SOCKS5 connection states.
"""
BEFORE_METHOD_REQUEST = "BEFORE_METHOD_REQUEST"
METHOD_REQUESTED = "METHOD_REQUESTED"
CONNECTED = "CONNECTED"
PROXY_REQUEST_RECEIVED = "PROXY_REQUEST_RECEIVED"
PROXY_REQUEST_ACCEPTED = "PROXY_REQUEST_ACCEPTED"
class Socks5Connection(Protocol):
"""
SOCKS5 TCP Connection handler.
Supports a subset of the SOCKS5 protocol, no authentication and no support for TCP BIND requests.
"""
def __init__(self, socksserver: Socks5Server) -> None:
"""
Create a socks 5 connection.
"""
super().__init__()
self._logger = logging.getLogger(self.__class__.__name__)
self._logger.setLevel(logging.WARNING)
self.socksserver = socksserver
self.transport: WriteTransport | None = None
self.connect_to = None
self.udp_connection: RustUDPConnection | SocksUDPConnection | None = None
self.state = ConnectionState.BEFORE_METHOD_REQUEST
self.buffer = b""
def connection_made(self, transport: BaseTransport) -> None:
"""
Callback for when a connection is made.
"""
self.transport = cast(WriteTransport, transport)
def data_received(self, data: bytes) -> None:
"""
Callback for when data comes in, try to form a message.
"""
self.buffer = self.buffer + data
while len(self.buffer) > 0:
# We are at the initial state, so we expect a handshake request.
if self.state == ConnectionState.BEFORE_METHOD_REQUEST:
if not self._try_handshake():
break # Not enough bytes so wait till we got more
# We are connected so the
elif self.state == ConnectionState.CONNECTED:
if not self._try_request():
break # Not enough bytes so wait till we got more
elif self.connect_to:
if self.socksserver.output_stream is not None:
# Swallow the data in case the tunnel community has not started yet
self.socksserver.output_stream.on_socks5_tcp_data(self, self.connect_to, self.buffer)
self.buffer = b""
else:
self._logger.error("Throwing away buffer, not in CONNECTED or BEFORE_METHOD_REQUEST state")
self.buffer = b""
def _try_handshake(self) -> bool:
"""
Try to read a HANDSHAKE request.
:return: False if command could not been processes due to lack of bytes, True otherwise
"""
try:
request, offset = socks5_serializer.unpack_serializable(MethodsRequest, self.buffer)
except PackError:
# No (complete) HANDSHAKE received, so dont do anything
return False
# Consume the buffer
self.buffer = self.buffer[offset:]
# Only accept NO AUTH
if request.version != SOCKS_VERSION or 0x00 not in request.methods:
self._logger.error("Client has sent INVALID METHOD REQUEST")
self.buffer = b""
self.close()
return False
self._logger.info("Client has sent METHOD REQUEST")
# Respond that we would like to use NO AUTHENTICATION (0x00)
if self.state is not ConnectionState.CONNECTED:
response = socks5_serializer.pack_serializable(MethodsResponse(SOCKS_VERSION, 0))
cast(WriteTransport, self.transport).write(response)
# We are connected now, the next incoming message will be a REQUEST
self.state = ConnectionState.CONNECTED
return True
def _try_request(self) -> bool:
"""
Try to consume a REQUEST message and respond whether we will accept the
request.
Will setup a TCP relay or an UDP socket to accommodate TCP RELAY and
UDP ASSOCIATE requests. After a TCP relay is set up the handler will
deactivate itself and change the Connection to a TcpRelayConnection.
Further data will be passed on to that handler.
:return: False if command could not been processes due to lack of bytes, True otherwise
"""
self._logger.debug("Client has sent PROXY REQUEST")
try:
request, offset = socks5_serializer.unpack_serializable(CommandRequest, self.buffer)
except PackError:
return False
self.buffer = self.buffer[offset:]
self.state = ConnectionState.PROXY_REQUEST_RECEIVED
if request.cmd == REQ_CMD_UDP_ASSOCIATE:
ensure_future(self.on_udp_associate_request(request)) # noqa: RUF006
elif request.cmd == REQ_CMD_BIND:
payload = CommandResponse(SOCKS_VERSION, REP_SUCCEEDED, 0, ("127.0.0.1", 1081))
response = socks5_serializer.pack_serializable(payload)
cast(WriteTransport, self.transport).write(response)
self.state = ConnectionState.PROXY_REQUEST_ACCEPTED
elif request.cmd == REQ_CMD_CONNECT:
self._logger.info("Accepting TCP CONNECT request to %s:%d", *request.destination)
self.connect_to = request.destination
payload = CommandResponse(SOCKS_VERSION, REP_SUCCEEDED, 0, ("127.0.0.1", 1081))
response = socks5_serializer.pack_serializable(payload)
cast(WriteTransport, self.transport).write(response)
else:
self.deny_request()
return True
def deny_request(self) -> None:
"""
Deny the current SOCKS5 request.
"""
self.state = ConnectionState.CONNECTED
payload = CommandResponse(SOCKS_VERSION, REP_COMMAND_NOT_SUPPORTED, 0, ("0.0.0.0", 0))
response = socks5_serializer.pack_serializable(payload)
cast(WriteTransport, self.transport).write(response)
self._logger.error("DENYING SOCKS5 request")
async def on_udp_associate_request(self, request: CommandRequest) -> None:
"""
Callback for when the connection has associated.
"""
# The DST.ADDR and DST.PORT fields contain the address and port that the client expects
# to use to send UDP datagrams on for the association. The server MAY use this information
# to limit access to the association.
if self.socksserver and self.socksserver.rust_endpoint:
self.udp_connection = RustUDPConnection(self.socksserver.rust_endpoint, self.socksserver.hops)
else:
self.udp_connection = SocksUDPConnection(self, request.destination)
await self.udp_connection.open()
ip, _ = cast(WriteTransport, self.transport).get_extra_info('sockname')
port = self.udp_connection.get_listen_port()
self._logger.info("Accepting UDP ASSOCIATE request to %s:%d (BIND addr %s:%d)", ip, port, *request.destination)
payload = CommandResponse(SOCKS_VERSION, REP_SUCCEEDED, 0, (ip, port))
response = socks5_serializer.pack_serializable(payload)
cast(WriteTransport, self.transport).write(response)
def connection_lost(self, _: Exception | None) -> None:
"""
Callback for when the connection is suddenly terminated.
"""
self.socksserver.connection_lost(self)
def close(self, reason: str = "unspecified") -> None:
"""
Close the session.
"""
self._logger.info("Closing session, reason %s", reason)
if self.udp_connection:
self.udp_connection.close()
self.udp_connection = None
if self.transport:
self.transport.close()
self.transport = None
| 8,342 | Python | .py | 177 | 37.836158 | 119 | 0.656738 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,001 | aiohttp_connector.py | Tribler_tribler/src/tribler/core/socks5/aiohttp_connector.py | from __future__ import annotations
import socket
from asyncio import BaseTransport, wait_for
from typing import TYPE_CHECKING, Callable
from aiohttp import TCPConnector
from aiohttp.abc import AbstractResolver
from tribler.core.socks5.client import Socks5Client, Socks5ClientUDPConnection
if TYPE_CHECKING:
from aiohttp.abc import ResolveResult
class FakeResolver(AbstractResolver):
"""
Pretend to resolve an address. Just echo it back.
"""
async def resolve(self, host: str, port: int = 0, family: int = socket.AF_INET) -> list[ResolveResult]:
"""
Resolve a host to itself.
"""
return [{"hostname": host,
"host": host, "port": port,
"family": family, "proto": 0,
"flags": 0}]
async def close(self) -> None:
"""
Close this resolver.
"""
class Socks5Connector(TCPConnector):
"""
A connector for Socks5 to create clients.
"""
def __init__(self, proxy_addr: tuple, **kwargs) -> None:
"""
Create a new connector.
"""
kwargs["resolver"] = FakeResolver()
super().__init__(**kwargs)
self.proxy_addr = proxy_addr
async def _wrap_create_connection(self, # type: ignore[override]
protocol_factory: Callable[[], Socks5ClientUDPConnection],
**kwargs) -> tuple[BaseTransport, Socks5ClientUDPConnection]:
"""
Create a transport and its associated connection.
"""
client = Socks5Client(self.proxy_addr, lambda *_: None)
host, port = kwargs.pop("addr_infos")[0][-1]
if "timeout" in kwargs and hasattr(kwargs["timeout"], "sock_connect"):
await wait_for(client.connect_tcp((host, port)), timeout=kwargs["timeout"].sock_connect)
else:
await client.connect_tcp((host, port))
proto = protocol_factory()
transport = client.transport
transport._protocol = proto # noqa: SLF001
proto.transport = transport
return transport, proto
| 2,118 | Python | .py | 53 | 31.226415 | 107 | 0.617073 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,002 | server.py | Tribler_tribler/src/tribler/core/socks5/server.py | from __future__ import annotations
import logging
from asyncio import get_event_loop
from typing import TYPE_CHECKING, List
from tribler.core.socks5.connection import Socks5Connection
if TYPE_CHECKING:
from asyncio.base_events import Server
from ipv8_rust_tunnels.endpoint import RustEndpoint
from tribler.core.tunnel.dispatcher import TunnelDispatcher
class Socks5Server:
"""
This object represents a Socks5 server.
"""
def __init__(self, hops: int, port: int | None = None, output_stream: TunnelDispatcher | None = None,
rust_endpoint: RustEndpoint | None = None) -> None:
"""
Create a new Socks5 server.
"""
self._logger = logging.getLogger(self.__class__.__name__)
self.hops = hops
self.port = port
self.output_stream = output_stream
self.server: Server | None = None
self.sessions: List[Socks5Connection] = []
self.rust_endpoint = rust_endpoint
async def start(self) -> None:
"""
Start the socks5 server by listening on the specified TCP ports.
"""
def build_protocol() -> Socks5Connection:
socks5connection = Socks5Connection(self)
self.sessions.append(socks5connection)
return socks5connection
self.server = await get_event_loop().create_server(build_protocol, "127.0.0.1",
self.port) # type: ignore[arg-type]
server_socket = self.server.sockets[0]
_, self.port = server_socket.getsockname()[:2]
self._logger.info("Started SOCKS5 server on port %i", self.port)
async def stop(self) -> None:
"""
Stop the socks5 server.
"""
[s.close("stopping") for s in self.sessions]
self.sessions = []
if self.server:
self.server.close()
await self.server.wait_closed()
def connection_lost(self, socks5connection: Socks5Connection) -> None:
"""
Close the connection if the connection drops.
"""
self._logger.debug("SOCKS5 TCP connection lost")
if socks5connection in self.sessions:
self.sessions.remove(socks5connection)
socks5connection.close()
| 2,282 | Python | .py | 55 | 32.436364 | 105 | 0.631555 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,003 | community.py | Tribler_tribler/src/tribler/core/knowledge/community.py | import random
from binascii import unhexlify
from cryptography.exceptions import InvalidSignature
from ipv8.community import Community, CommunitySettings
from ipv8.keyvault.private.libnaclkey import LibNaCLSK
from ipv8.lazy_community import lazy_wrapper
from ipv8.types import Key, Peer
from pony.orm import db_session
from tribler.core.database.layers.knowledge import Operation, ResourceType
from tribler.core.database.tribler_database import TriblerDatabase
from tribler.core.knowledge.operations_requests import OperationsRequests, PeerValidationError
from tribler.core.knowledge.payload import (
RawStatementOperationMessage,
RequestStatementOperationMessage,
StatementOperation,
StatementOperationMessage,
StatementOperationSignature,
)
REQUESTED_OPERATIONS_COUNT = 10
CLEAR_ALL_REQUESTS_INTERVAL = 10 * 60 # 10 minutes
class KnowledgeCommunitySettings(CommunitySettings):
"""
Settings for the knowledge community.
"""
db: TriblerDatabase
key: LibNaCLSK
request_interval: int = 5
class KnowledgeCommunity(Community):
"""
Community for disseminating knowledge across the network.
"""
community_id = unhexlify("d7f7bdc8bcd3d9ad23f06f25aa8aab6754eb23a0")
settings_class = KnowledgeCommunitySettings
def __init__(self, settings: KnowledgeCommunitySettings) -> None:
"""
Create a new knowledge community.
"""
super().__init__(settings)
self.db = settings.db
self.key = settings.key
self.requests = OperationsRequests()
self.add_message_handler(RawStatementOperationMessage, self.on_message)
self.add_message_handler(RequestStatementOperationMessage, self.on_request)
self.register_task("request_operations", self.request_operations, interval=settings.request_interval)
self.register_task("clear_requests", self.requests.clear_requests, interval=CLEAR_ALL_REQUESTS_INTERVAL)
self.logger.info("Knowledge community initialized")
def request_operations(self) -> None:
"""
Contact peers to request operations.
"""
if not self.get_peers():
return
peer = random.choice(self.get_peers())
self.requests.register_peer(peer, REQUESTED_OPERATIONS_COUNT)
self.logger.info("-> request %d operations from peer %s", REQUESTED_OPERATIONS_COUNT, peer.mid.hex())
self.ez_send(peer, RequestStatementOperationMessage(count=REQUESTED_OPERATIONS_COUNT))
@lazy_wrapper(RawStatementOperationMessage)
def on_message(self, peer: Peer, raw: RawStatementOperationMessage) -> None:
"""
Callback for when a raw statement operation message is received.
"""
operation, _ = self.serializer.unpack_serializable(StatementOperation, raw.operation)
signature, _ = self.serializer.unpack_serializable(StatementOperationSignature, raw.signature)
self.logger.debug("<- message received: %s", str(operation))
try:
remote_key = self.crypto.key_from_public_bin(operation.creator_public_key)
self.requests.validate_peer(peer)
self.verify_signature(packed_message=raw.operation, key=remote_key, signature=signature.signature,
operation=operation)
self.validate_operation(operation)
with db_session():
is_added = self.db.knowledge.add_operation(operation, signature.signature)
if is_added:
s = f"+ operation added ({operation.object!r} \"{operation.predicate}\" {operation.subject!r})"
self.logger.info(s)
except PeerValidationError as e: # peer has exhausted his response count
self.logger.warning(e)
except ValueError as e: # validation error
self.logger.warning(e)
except InvalidSignature as e: # signature verification error
self.logger.warning(e)
@lazy_wrapper(RequestStatementOperationMessage)
def on_request(self, peer: Peer, operation: RequestStatementOperationMessage) -> None:
"""
Callback for when statement operations are requested.
"""
operations_count = min(max(1, operation.count), REQUESTED_OPERATIONS_COUNT)
self.logger.debug("<- peer %s requested %d operations", peer.mid.hex(), operations_count)
with db_session:
random_operations = self.db.knowledge.get_operations_for_gossip(count=operations_count)
self.logger.debug("Response %d operations", len(random_operations))
sent_operations = []
for op in random_operations:
try:
operation = StatementOperation(
subject_type=op.statement.subject.type,
subject=op.statement.subject.name,
predicate=op.statement.object.type,
object=op.statement.object.name,
operation=op.operation,
clock=op.clock,
creator_public_key=op.peer.public_key,
)
self.validate_operation(operation)
signature = StatementOperationSignature(signature=op.signature)
self.ez_send(peer, StatementOperationMessage(operation=operation, signature=signature))
sent_operations.append(operation)
except ValueError as e: # validation error
self.logger.warning(e)
if sent_operations:
sent_tags_info = ", ".join(f"({t})" for t in sent_operations)
self.logger.debug("-> sent operations (%s) to peer: %s", sent_tags_info, peer.mid.hex())
@staticmethod
def validate_operation(operation: StatementOperation) -> None:
"""
Check if an operation is valid and raise an exception if it is not.
:raises ValueError: If the operation failed to validate.
"""
validate_resource(operation.subject)
validate_resource(operation.object)
validate_operation(operation.operation)
validate_resource_type(operation.subject_type)
validate_resource_type(operation.predicate)
def verify_signature(self, packed_message: bytes, key: Key, signature: bytes,
operation: StatementOperation) -> None:
"""
Check if a signature is valid for the given message and raise an exception if it is not.
:raises InvalidSignature: If the message is not correctly signed.
"""
if not self.crypto.is_valid_signature(key, packed_message, signature):
msg = f"Invalid signature for {operation}"
raise InvalidSignature(msg)
def sign(self, operation: StatementOperation) -> bytes:
"""
Sign the given operation using our key.
"""
packed = self.serializer.pack_serializable(operation)
return self.crypto.create_signature(self.key, packed)
def validate_resource(resource: str) -> None:
"""
Validate the resource.
:raises ValueError: If the case the resource is not valid.
"""
if len(resource) < MIN_RESOURCE_LENGTH or len(resource) > MAX_RESOURCE_LENGTH:
msg = f"Tag length should be in range [{MIN_RESOURCE_LENGTH}..{MAX_RESOURCE_LENGTH}]"
raise ValueError(msg)
def is_valid_resource(resource: str) -> bool:
"""
Validate the resource. Returns False, in the case the resource is not valid.
"""
try:
validate_resource(resource)
except ValueError:
return False
return True
def validate_operation(operation: int) -> None:
"""
Validate the incoming operation.
:raises ValueError: If the case the operation is not valid.
"""
Operation(operation)
def validate_resource_type(t: int) -> None:
"""
Validate the resource type.
:raises ValueError: If the case the type is not valid.
"""
ResourceType(t)
MIN_RESOURCE_LENGTH = 2
MAX_RESOURCE_LENGTH = 50
| 8,061 | Python | .py | 169 | 38.449704 | 115 | 0.67401 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,004 | payload.py | Tribler_tribler/src/tribler/core/knowledge/payload.py | from ipv8.messaging.lazy_payload import VariablePayload, vp_compile
@vp_compile
class StatementOperation(VariablePayload):
"""
Do not change the format of the StatementOperation, because this will result in an invalid signature.
"""
names = ["subject_type", "subject", "predicate", "object", "operation", "clock", "creator_public_key"]
format_list = ["q", "varlenHutf8", "q", "varlenHutf8", "q", "q", "74s"]
subject_type: int # ResourceType enum
subject: str
predicate: int # ResourceType enum
object: str
operation: int # Operation enum
clock: int # This is the lamport-like clock that unique for each quadruple {public_key, subject, predicate, object}
creator_public_key: bytes
@vp_compile
class StatementOperationSignature(VariablePayload):
"""
A single signature of an unknown statement.
"""
names = ["signature"]
format_list = ["64s"]
signature: bytes
@vp_compile
class RawStatementOperationMessage(VariablePayload):
"""
RAW payload class is used for reducing ipv8 unpacking operations.
For more information take a look at: https://github.com/Tribler/tribler/pull/6396#discussion_r728334323
"""
names = ["operation", "signature"]
format_list = ["varlenH", "varlenH"]
operation: bytes
signature: bytes
msg_id = 2
@vp_compile
class StatementOperationMessage(VariablePayload):
"""
An operation coupled to a signature.
"""
names = ["operation", "signature"]
format_list = [StatementOperation, StatementOperationSignature]
operation: bytes
signature: bytes
msg_id = 2
@vp_compile
class RequestStatementOperationMessage(VariablePayload):
"""
Request a given number of statements.
"""
names = ["count"]
format_list = ["q"]
count: int
msg_id = 1
| 1,836 | Python | .py | 53 | 30.075472 | 120 | 0.705448 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,005 | content_bundling.py | Tribler_tribler/src/tribler/core/knowledge/content_bundling.py | from __future__ import annotations
import logging
import math
import re
from collections import defaultdict
from itertools import chain
from typing import Dict, Iterable, TypedDict, cast
logger = logging.getLogger(__name__)
class DictWithName(TypedDict):
"""
A dictionary that has a "name" key.
"""
name: str
def _words_pattern(min_word_length: int = 3) -> str:
return r"[^\W\d_]{" + str(min_word_length) + ",}"
def _create_name(content_list: list[DictWithName], number: str, min_word_length: int = 4) -> str:
"""
Create a name for a group of content items based on the most common word in the title.
If several most frequently occurring words are found, preference is given to the longest word.
:param content_list: list of content items
:param number: group number
:param min_word_length: minimum word length to be considered as a candidate for the group name
:returns: created group name. The name is capitalized.
"""
words: defaultdict[str, int] = defaultdict(int)
for item in content_list:
pattern = _words_pattern(min_word_length)
title_words = {w.lower() for w in re.findall(pattern, item["name"]) if w}
for word in title_words:
words[word] += 1
if not words:
return number
m = max(words.values())
candidates = (k for k, v in words.items() if v == m)
longest_word = max(candidates, key=len)
name = f"{longest_word} {number}"
return name[0].capitalize() + name[1:]
def calculate_diversity(content_list: Iterable[DictWithName], min_word_length: int = 4) -> float:
"""
Calculate the diversity of words in the titles of the content list.
The diversity calculation based on Corrected Type-Token Ratio (CTTR) formula.
:param content_list: list of content items. Each item should have a "name" key with a title.
:param min_word_length: minimum word length to be considered as a word in the title.
:returns: diversity of words in the titles
"""
pattern = _words_pattern(min_word_length)
titles = (item["name"] for item in content_list)
words_in_titles = (re.findall(pattern, title) for title in titles)
words = [w.lower() for w in chain.from_iterable(words_in_titles) if w]
total_words = len(words)
if total_words == 0:
return 0
unique_words = set(words)
return len(unique_words) / math.sqrt(2 * total_words)
def group_content_by_number(content_list: Iterable[dict],
min_group_size: int = 2) -> Dict[str, list[DictWithName]]:
"""
Group content by the first number in the title. Returned groups keep the order in which it was found in the input.
:param content_list: list of content items. Each item should have a "name" key with a title.
:param min_group_size: minimum number of content items in a group. In the case of a group with fewer items, it will
not be included in the result.
:returns: group number as key and list of content items as value
"""
groups: defaultdict[str, list[DictWithName]] = defaultdict(list)
for item in content_list:
if "name" in item and (m := re.search(r"\d+", item["name"])):
first_number = m.group(0).lstrip("0") or "0"
groups[first_number].append(cast(DictWithName, item))
filtered_groups = ((k, v) for k, v in groups.items() if len(v) >= min_group_size)
return {_create_name(v, k): v for k, v in filtered_groups}
| 3,575 | Python | .py | 70 | 43.942857 | 120 | 0.664745 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,006 | operations_requests.py | Tribler_tribler/src/tribler/core/knowledge/operations_requests.py | from __future__ import annotations
from collections import defaultdict
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from ipv8.types import Peer
class PeerValidationError(ValueError):
"""
A peer has exceeded their number of responses.
"""
class OperationsRequests:
"""
This class is design for controlling requests during pull-based gossip.
The main idea:
* Before a request, a client registered a peer with some number of expected responses
* While a response, the controller decrements number of expected responses for this peer
* The controller validates response by checking that expected responses for this peer is greater then 0
"""
def __init__(self) -> None:
"""
Create a new dictionary to keep track of responses.
"""
self.requests: dict[Peer, int] = defaultdict(int)
def register_peer(self, peer: Peer, number_of_responses: int) -> None:
"""
Set the number of allowed responses for a given peer.
"""
self.requests[peer] = number_of_responses
def validate_peer(self, peer: Peer) -> None:
"""
Decrement the number of responses of a Peer and check if the given peer has exceeded their allowed responses.
:raises PeerValidationError: When the given peer has less than 0 responses remaining.
"""
if self.requests[peer] <= 0:
msg = f"Peer has exhausted his response count {peer}"
raise PeerValidationError(msg)
self.requests[peer] -= 1
def clear_requests(self) -> None:
"""
Reset all allowed responses for all peers to 0.
"""
self.requests = defaultdict(int)
| 1,721 | Python | .py | 41 | 34.804878 | 117 | 0.673065 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,007 | knowledge_endpoint.py | Tribler_tribler/src/tribler/core/knowledge/restapi/knowledge_endpoint.py | from __future__ import annotations
import binascii
from typing import TYPE_CHECKING
from aiohttp import web
from aiohttp_apispec import docs
from ipv8.REST.schema import schema
from marshmallow import Schema
from marshmallow.fields import Boolean, List, String
from pony.orm import db_session
from typing_extensions import TypeAlias
from tribler.core.database.layers.knowledge import Operation, ResourceType
from tribler.core.knowledge.community import KnowledgeCommunity, is_valid_resource
from tribler.core.knowledge.payload import StatementOperation
from tribler.core.restapi.rest_endpoint import HTTP_BAD_REQUEST, MAX_REQUEST_SIZE, RESTEndpoint, RESTResponse
if TYPE_CHECKING:
from tribler.core.database.tribler_database import TriblerDatabase
from tribler.core.restapi.rest_manager import TriblerRequest
RequestType: TypeAlias = TriblerRequest[tuple[TriblerDatabase, KnowledgeCommunity]]
class HandledErrorSchema(Schema):
"""
The REST schema for knowledge errors.
"""
error = String(description="Optional field describing any failures that may have occurred", required=True)
class KnowledgeEndpoint(RESTEndpoint):
"""
Top-level endpoint for knowledge management.
"""
path = "/api/knowledge"
def __init__(self, middlewares: tuple = (), client_max_size: int = MAX_REQUEST_SIZE) -> None:
"""
Create a new knowledge endpoint.
"""
super().__init__(middlewares, client_max_size)
self.db: TriblerDatabase | None = None
self.required_components = ("db", )
self.community: KnowledgeCommunity | None = None
self.app.add_routes(
[
web.patch("/{infohash}", self.update_knowledge_entries),
web.get("/{infohash}/tag_suggestions", self.get_tag_suggestions),
]
)
@staticmethod
def validate_infohash(infohash: str) -> tuple[bool, RESTResponse | None]:
"""
Check if the given bytes are a string of 40 HEX-character bytes.
"""
try:
if len(infohash) != 40:
return False, RESTResponse({"error": "Invalid infohash"}, status=HTTP_BAD_REQUEST)
except binascii.Error:
return False, RESTResponse({"error": "Invalid infohash"}, status=HTTP_BAD_REQUEST)
return True, None
@docs(
tags=["General"],
summary="Update the metadata associated with a particular torrent.",
responses={
200: {
"schema": schema(UpdateTagsResponse={"success": Boolean()})
},
HTTP_BAD_REQUEST: {
"schema": HandledErrorSchema, "example": {"error": "Invalid tag length"}},
},
description="This endpoint updates a particular torrent with the provided metadata."
)
async def update_knowledge_entries(self, request: RequestType) -> RESTResponse:
"""
Update the metadata associated with a particular torrent.
"""
params = await request.json()
infohash = request.match_info["infohash"]
ih_valid, error_response = KnowledgeEndpoint.validate_infohash(infohash)
if not ih_valid:
return error_response
# Validate whether the size of the tag is within the allowed range and filter out duplicate tags.
statements = []
self._logger.info("Statements about %s: %s", infohash, str(params["statements"]))
for statement in params["statements"]:
obj = statement["object"]
if not is_valid_resource(obj):
return RESTResponse({"error": "Invalid tag length"}, status=HTTP_BAD_REQUEST)
statements.append(statement)
self.modify_statements(request.context[0], infohash, statements)
return RESTResponse({"success": True})
@db_session
def modify_statements(self, db: TriblerDatabase, infohash: str, statements: list) -> None:
"""
Modify the statements of a particular content item.
"""
if not self.community:
return
# First, get the current statements and compute the diff between the old and new statements
old_statements = db.knowledge.get_statements(subject_type=ResourceType.TORRENT, subject=infohash)
old_statements = {(stmt.predicate, stmt.object) for stmt in old_statements}
self._logger.info("Old statements: %s", old_statements)
new_statements = {(stmt["predicate"], stmt["object"]) for stmt in statements}
self._logger.info("New statements: %s", new_statements)
added_statements = new_statements - old_statements
removed_statements = old_statements - new_statements
# Create individual statement operations for the added/removed statements
public_key = self.community.key.pub().key_to_bin()
for stmt in added_statements.union(removed_statements):
predicate, obj = stmt
type_of_operation = Operation.ADD if stmt in added_statements else Operation.REMOVE
operation = StatementOperation(subject_type=ResourceType.TORRENT, subject=infohash,
predicate=predicate,
object=obj, operation=type_of_operation, clock=0,
creator_public_key=public_key)
operation.clock = db.knowledge.get_clock(operation) + 1
signature = self.community.sign(operation)
db.knowledge.add_operation(operation, signature, is_local_peer=True)
self._logger.info("Added statements: %s", added_statements)
self._logger.info("Removed statements: %s", removed_statements)
@docs(
tags=["General"],
summary="Get tag suggestions for a torrent with a particular infohash.",
responses={
200: {
"schema": schema(SuggestedTagsResponse={"suggestions": List(String)})
},
HTTP_BAD_REQUEST: {
"schema": HandledErrorSchema, "example": {"error": "Invalid infohash"}},
},
description="This endpoint updates a particular torrent with the provided tags."
)
async def get_tag_suggestions(self, request: RequestType) -> RESTResponse:
"""
Get suggested tags for a particular torrent.
"""
infohash = request.match_info["infohash"]
ih_valid, error_response = KnowledgeEndpoint.validate_infohash(infohash)
if not ih_valid:
return error_response
with db_session:
suggestions = request.context[0].knowledge.get_suggestions(subject=infohash, predicate=ResourceType.TAG)
return RESTResponse({"suggestions": suggestions})
| 6,741 | Python | .py | 136 | 40.058824 | 116 | 0.660027 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,008 | tribler_database.py | Tribler_tribler/src/tribler/core/database/tribler_database.py | from __future__ import annotations
import logging
import os
from pathlib import Path
from typing import TYPE_CHECKING, cast
from pony import orm
from pony.orm import Database, db_session
from tribler.core.database.layers.health import HealthDataAccessLayer
from tribler.core.database.layers.knowledge import KnowledgeDataAccessLayer
if TYPE_CHECKING:
import dataclasses
@dataclasses.dataclass
class Misc:
"""
A miscellaneous key value mapping in the database.
"""
name: str
value: str | None
def __init__(self, name: str) -> None: ... # noqa: D107
@staticmethod
def get(name: str) -> Misc | None: ... # noqa: D102
@staticmethod
def get_for_update(name: str) -> Misc | None: ... # noqa: D102
MEMORY = ":memory:"
class TriblerDatabase:
"""
A wrapper for the Tribler database.
"""
CURRENT_VERSION = 1
_SCHEME_VERSION_KEY = "scheme_version"
def __init__(self, filename: str | None = None, *, create_tables: bool = True, **generate_mapping_kwargs) -> None:
"""
Create a new tribler database.
"""
self.instance = Database()
self.knowledge = KnowledgeDataAccessLayer(self.instance)
self.health = HealthDataAccessLayer(self.knowledge)
self.Misc = self.define_binding(self.instance)
self.Peer = self.knowledge.Peer
self.Statement = self.knowledge.Statement
self.Resource = self.knowledge.Resource
self.StatementOp = self.knowledge.StatementOp
self.TorrentHealth = self.health.TorrentHealth
self.Tracker = self.health.Tracker
filename = filename or MEMORY
db_does_not_exist = filename == MEMORY or not os.path.isfile(filename)
if filename != MEMORY:
Path(filename).parent.mkdir(parents=True, exist_ok=True)
self.instance.bind(provider='sqlite', filename=filename, create_db=db_does_not_exist)
generate_mapping_kwargs['create_tables'] = create_tables
self.instance.generate_mapping(**generate_mapping_kwargs)
self.logger = logging.getLogger(self.__class__.__name__)
if db_does_not_exist:
self.fill_default_data()
@staticmethod
def define_binding(db: Database) -> type[Misc]:
"""
Define common bindings.
"""
class Misc(db.Entity):
name = orm.PrimaryKey(str)
value = orm.Optional(str)
return Misc
@db_session
def fill_default_data(self) -> None:
"""
Add a misc entry for the database version.
"""
self.logger.info("Filling the DB with the default data")
self.set_misc(self._SCHEME_VERSION_KEY, str(self.CURRENT_VERSION))
def get_misc(self, key: str, default: str | None = None) -> str | None:
"""
Retrieve a value from the database or return the default value if it is not found.
"""
data = self.Misc.get(name=key)
return data.value if data else default
def set_misc(self, key: str, value: str) -> None:
"""
Set or add the value of a given key.
"""
obj = self.Misc.get_for_update(name=key) or self.Misc(name=key)
obj.value = value
@property
def version(self) -> int:
"""
Get the database version.
"""
return int(cast(str, self.get_misc(key=self._SCHEME_VERSION_KEY, default="0")))
@version.setter
def version(self, value: int) -> None:
"""
Set the database version.
"""
if not isinstance(value, int):
msg = "DB version should be integer"
raise TypeError(msg)
self.set_misc(key=self._SCHEME_VERSION_KEY, value=str(value))
def shutdown(self) -> None:
"""
Disconnect from the database.
"""
self.instance.disconnect()
| 3,901 | Python | .py | 102 | 30.411765 | 118 | 0.631174 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,009 | queries.py | Tribler_tribler/src/tribler/core/database/queries.py | from __future__ import annotations
import re
from dataclasses import dataclass, field
fts_query_re = re.compile(r"\w+", re.UNICODE)
@dataclass
class Query:
"""
A simple unpacking of a user query.
"""
original_query: str
tags: set[str] = field(default_factory=set)
fts_text: str = ""
def to_fts_query(text: str | None) -> str | None:
"""
Convert the given text to FTS-compatible text.
"""
if not text:
return None
words = [f'"{w}"' for w in fts_query_re.findall(text) if w]
if not words:
return None
return " ".join(words)
| 599 | Python | .py | 22 | 22.727273 | 63 | 0.642606 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,010 | serialization.py | Tribler_tribler/src/tribler/core/database/serialization.py | from __future__ import annotations
import struct
from binascii import hexlify
from datetime import datetime, timedelta
from typing import TYPE_CHECKING
from ipv8.keyvault.crypto import default_eccrypto
from ipv8.messaging.lazy_payload import VariablePayload, vp_compile
from ipv8.messaging.serialization import VarLenUtf8, default_serializer
from typing_extensions import Self
if TYPE_CHECKING:
from ipv8.types import PrivateKey
default_serializer.add_packer("varlenIutf8", VarLenUtf8(">I"))
EPOCH = datetime(1970, 1, 1) # noqa: DTZ001
SIGNATURE_SIZE = 64
NULL_SIG = b'\x00' * 64
NULL_KEY = b'\x00' * 64
# Metadata types. Should have been an enum, but in Python its unwieldy.
TYPELESS = 100
CHANNEL_NODE = 200
METADATA_NODE = 210
COLLECTION_NODE = 220
JSON_NODE = 230
CHANNEL_DESCRIPTION = 231
BINARY_NODE = 240
CHANNEL_THUMBNAIL = 241
REGULAR_TORRENT = 300
CHANNEL_TORRENT = 400
DELETED = 500
SNIPPET = 600
def time2int(date_time: datetime, epoch: datetime = EPOCH) -> int:
"""
Convert a datetime object to an int.
:param date_time: The datetime object to convert.
:param epoch: The epoch time, defaults to Jan 1, 1970.
:return: The int representation of date_time.
WARNING: TZ-aware timestamps are madhouse...
"""
return int((date_time - epoch).total_seconds())
def int2time(timestamp: int, epoch: datetime = EPOCH) -> datetime:
"""
Convert an int into a datetime object.
:param timestamp: The timestamp to be converted.
:param epoch: The epoch time, defaults to Jan 1, 1970.
:return: The datetime representation of timestamp.
"""
return epoch + timedelta(seconds=timestamp)
class UnknownBlobTypeException(Exception):
"""
A block was received with an unknown blob type.
We only support type:
- 300, REGULAR_TORRENT
"""
def read_payload_with_offset(data: bytes, offset: int = 0) -> tuple[TorrentMetadataPayload, int]:
"""
Read the next payload from the data buffer (at the given offset).
"""
# First we have to determine the actual payload type
metadata_type = struct.unpack_from('>H', data, offset=offset)[0]
payload_class = METADATA_TYPE_TO_PAYLOAD_CLASS.get(metadata_type)
if payload_class is not None:
payload, offset = default_serializer.unpack_serializable(payload_class, data, offset=offset)
payload.signature = data[offset: offset + 64]
return payload, offset + 64
# Unknown metadata type, raise exception
raise UnknownBlobTypeException
@vp_compile
class SignedPayload(VariablePayload):
"""
A payload that captures a public key and metadata type and supports adding a signature over its contents.
This payload can be extended to allow more data to be signed.
"""
names = ["metadata_type", "reserved_flags", "public_key"]
format_list = ["H", "H", "64s"]
signature: bytes = NULL_SIG
metadata_type: int
reserved_flags: int
public_key: bytes
def serialized(self) -> bytes:
"""
Pack this serializable.
"""
return default_serializer.pack_serializable(self)
@classmethod
def from_signed_blob(cls: type[Self], serialized: bytes) -> Self:
"""
Read a SignedPayload from the given serialized form.
"""
payload, offset = default_serializer.unpack_serializable(cls, serialized)
payload.signature = serialized[offset:]
return payload
def to_dict(self) -> dict:
"""
Convert this payload to a dictionary.
"""
return {name: getattr(self, name) for name in ([*self.names, "signature"])}
@classmethod
def from_dict(cls: type[Self], **kwargs) -> Self:
"""
Create a payload from the given data (an unpacked dict).
"""
out = cls(**{key: value for key, value in kwargs.items() if key in cls.names})
signature = kwargs.get("signature")
if signature is not None:
out.signature = signature
return out
def add_signature(self, key: PrivateKey) -> None:
"""
Create a signature for this payload.
"""
self.public_key = key.pub().key_to_bin()[10:]
self.signature = default_eccrypto.create_signature(key, self.serialized())
def has_signature(self) -> bool:
"""
Check if this payload has an attached signature already.
"""
return self.public_key != NULL_KEY or self.signature != NULL_SIG
def check_signature(self) -> bool:
"""
Check if the signature attached to this payload is valid for this payload.
"""
return default_eccrypto.is_valid_signature(
default_eccrypto.key_from_public_bin(b"LibNaCLPK:" + self.public_key),
self.serialized(),
self.signature
)
@vp_compile
class ChannelNodePayload(SignedPayload):
"""
A signed payload that also includes an id, origin id, and a timestamp.
"""
names = [*SignedPayload.names, "id_", "origin_id", "timestamp"]
format_list = [*SignedPayload.format_list, "Q", "Q", "Q"]
id_: int
origin_id: int
timestamp: int
@vp_compile
class MetadataNodePayload(ChannelNodePayload):
"""
Deprecated, do not use.
"""
names = [*ChannelNodePayload.names, "title", "tags"]
format_list = [*ChannelNodePayload.format_list, "varlenIutf8", "varlenIutf8"]
@vp_compile
class JsonNodePayload(ChannelNodePayload):
"""
Deprecated, do not use.
"""
names = [*ChannelNodePayload.names, "json_text"]
format_list = [*ChannelNodePayload.format_list, "varlenIutf8"]
@vp_compile
class BinaryNodePayload(ChannelNodePayload):
"""
Deprecated, do not use.
"""
names = [*ChannelNodePayload.names, "binary_data", "data_type"]
format_list = [*ChannelNodePayload.format_list, "varlenI", "varlenIutf8"]
@vp_compile
class CollectionNodePayload(MetadataNodePayload):
"""
Deprecated, do not use.
"""
names = [*MetadataNodePayload.names, "num_entries"]
format_list = [*MetadataNodePayload.format_list, "Q"]
@vp_compile
class TorrentMetadataPayload(ChannelNodePayload):
"""
Payload for metadata that stores a torrent.
"""
names = [*ChannelNodePayload.names, "infohash", "size", "torrent_date", "title", "tags", "tracker_info"]
format_list = [*ChannelNodePayload.format_list, "20s", "Q", "I", "varlenIutf8", "varlenIutf8", "varlenIutf8"]
infohash: bytes
size: int
torrent_date: int
title: str
tags: str
tracker_info: str
def fix_pack_torrent_date(self, value: datetime | int) -> int:
"""
Auto-convert the torrent date to an integer if it is a ``datetime`` object.
"""
if isinstance(value, datetime):
return time2int(value)
return value
@classmethod
def fix_unpack_torrent_date(cls: type[Self], value: int) -> datetime:
"""
Auto-convert the torrent data from the integer wire format to a ``datetime`` object.
"""
return int2time(value)
def get_magnet(self) -> str:
"""
Create a magnet link for this payload.
"""
return (f"magnet:?xt=urn:btih:{hexlify(self.infohash).decode()}&dn={self.title}"
+ (f"&tr={self.tracker_info}" if self.tracker_info else ""))
@vp_compile
class ChannelMetadataPayload(TorrentMetadataPayload):
"""
Deprecated, do not use.
"""
names = [*TorrentMetadataPayload.names, "num_entries", "start_timestamp"]
format_list = [*TorrentMetadataPayload.format_list, "Q", "Q"]
@vp_compile
class DeletedMetadataPayload(SignedPayload):
"""
Deprecated, do not use.
"""
names = [*SignedPayload.names, "delete_signature"]
format_list = [*SignedPayload.format_list, "64s"]
METADATA_TYPE_TO_PAYLOAD_CLASS = {
REGULAR_TORRENT: TorrentMetadataPayload,
CHANNEL_TORRENT: ChannelMetadataPayload,
COLLECTION_NODE: CollectionNodePayload,
CHANNEL_THUMBNAIL: BinaryNodePayload,
CHANNEL_DESCRIPTION: JsonNodePayload,
DELETED: DeletedMetadataPayload,
}
@vp_compile
class HealthItemsPayload(VariablePayload):
"""
Payload for health item information. See the details of binary format in MetadataCompressor class description.
"""
format_list = ["varlenI"]
names = ["data"]
data: bytes
def serialize(self) -> bytes:
"""
Convert this payload to bytes.
"""
return default_serializer.pack_serializable(self)
@classmethod
def unpack(cls: type[Self], data: bytes) -> list[tuple[int, int, int]]:
"""
Unpack this payload from the given data buffer.
"""
data = default_serializer.unpack_serializable(cls, data)[0].data
items = data.split(b';')[:-1]
return [cls.parse_health_data_item(item) for item in items]
@classmethod
def parse_health_data_item(cls: type[Self], item: bytes) -> tuple[int, int, int]:
"""
Convert the given bytes to ``(seeders, leechers, last_check)`` format.
"""
if not item:
return 0, 0, 0
# The format is forward-compatible: currently only three first elements of data are used,
# and later it is possible to add more fields without breaking old clients
try:
seeders, leechers, last_check = map(int, item.split(b',')[:3])
except Exception:
return 0, 0, 0
# Safety check: seelders, leechers and last_check values cannot be negative
if seeders < 0 or leechers < 0 or last_check < 0:
return 0, 0, 0
return seeders, leechers, last_check
| 9,665 | Python | .py | 255 | 31.941176 | 114 | 0.671201 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,011 | ranks.py | Tribler_tribler/src/tribler/core/database/ranks.py | """
Search utilities.
Author(s): Jelle Roozenburg, Arno Bakker, Alexander Kozlovsky
"""
from __future__ import annotations
import re
import time
from collections import deque
SECONDS_IN_DAY = 60 * 60 * 24
def item_rank(query: str, item: dict) -> float:
"""
Calculates the torrent rank for item received from remote query. Returns the torrent rank value in range [0, 1].
:param query: a user-defined query string
:param item: a dict with torrent info.
Should include key `name`, can include `num_seeders`, `num_leechers`, `created`
:return: the torrent rank value in range [0, 1]
"""
title = item["name"]
seeders = item.get("num_seeders", 0)
leechers = item.get("num_leechers", 0)
created = item.get("created", 0)
freshness = None if created <= 0 else time.time() - created
return torrent_rank(query, title, seeders, leechers, freshness)
def torrent_rank(query: str, title: str, seeders: int = 0, leechers: int = 0, freshness: float | None = None) -> float:
"""
Calculates search rank for a torrent.
:param query: a user-defined query string
:param title: a torrent name
:param seeders: the number of seeders
:param leechers: the number of leechers
:param freshness: the number of seconds since the torrent creation. Zero or negative value means the torrent
creation date is unknown. It is more convenient to use comparing to a timestamp, as it avoids
using the `time()` function call and simplifies testing.
:return: the torrent rank value in range [0, 1]
"""
tr = title_rank(query or '', title or '')
sr = (seeders_rank(seeders or 0, leechers or 0) + 9) / 10 # range [0.9, 1]
fr = (freshness_rank(freshness) + 9) / 10 # range [0.9, 1]
return tr * sr * fr
def seeders_rank(seeders: int, leechers: int = 0) -> float:
"""
Calculates rank based on the number of torrent's seeders and leechers.
:param seeders: the number of seeders for the torrent.
:param leechers: the number of leechers for the torrent.
:return: the torrent rank based on seeders and leechers, normalized to the range [0, 1]
"""
sl = seeders + leechers * 0.1
return sl / (100 + sl)
def freshness_rank(freshness: float | None) -> float:
"""
Calculates a rank value based on the torrent freshness. The result is normalized to the range [0, 1].
:param freshness: number of seconds since the torrent creation.
None means the actual torrent creation date is unknown.
Negative values treated as invalid values and give the same result as None
:return: the torrent rank based on freshness. The result is normalized to the range [0, 1]
"""
if freshness is None or freshness < 0:
return 0
days = freshness / SECONDS_IN_DAY
return 1 / (1 + days / 30)
word_re = re.compile(r'\w+', re.UNICODE)
def title_rank(query: str, title: str) -> float:
"""
Calculate the similarity of the title string to a query string as a float value in range [0, 1].
:param query: a user-defined query string
:param title: a torrent name
:return: the similarity of the title string to a query string as a float value in range [0, 1]
"""
pat_query = word_re.findall(query.lower())
pat_title = word_re.findall(title.lower())
return calculate_rank(pat_query, pat_title)
# These coefficients are found empirically. Their exact values are not very important for a relative ranking of results
# The first word in a query is considered as a more important than the next one and so on,
# 5 means the 5th word in a query is twice as less important as the first one
POSITION_COEFF = 5
# Some big value for a penalty if a query word is totally missed from a torrent title
MISSED_WORD_PENALTY = 10
# If a torrent title contains some words at the very end that are not mentioned in a query, we add a very slight
# penalty for them. The *bigger* the REMAINDER_COEFF is, the *smaller* penalty we add for this excess words
REMAINDER_COEFF = 10
# The exact value of this coefficient is not important. It is used to convert total_error value to a rank value.
# The total_error value is some positive number. We want to have the resulted rank in range [0, 1].
RANK_NORMALIZATION_COEFF = 10
def calculate_rank(query: list[str], title: list[str]) -> float:
"""
Calculates the similarity of the title to the query as a float value in range [0, 1].
:param query: list of query words
:param title: list of title words
:return: the similarity of the title to the query as a float value in range [0, 1]
"""
if not query:
return 1.0
if not title:
return 0.0
q_title = deque(title)
total_error = 0.0
for i, word in enumerate(query):
# The first word is more important than the second word, and so on
word_weight = POSITION_COEFF / (POSITION_COEFF + i)
found, skipped = find_word_and_rotate_title(word, q_title)
if found:
# if the query word is found in the title, add penalty for skipped words in title before it
total_error += skipped * word_weight
else:
# if the query word is not found in the title, add a big penalty for it
total_error += MISSED_WORD_PENALTY * word_weight
# a small penalty for excess words in the title that was not mentioned in the search phrase
remainder_weight = 1 / (REMAINDER_COEFF + len(query))
remained_words_error = len(q_title) * remainder_weight
total_error += remained_words_error
# a search rank should be between 1 and 0
return RANK_NORMALIZATION_COEFF / (RANK_NORMALIZATION_COEFF + total_error)
def find_word_and_rotate_title(word: str, title: deque[str]) -> tuple[bool, int]:
"""
Finds the query word in the title. Returns whether it was found or not and the number of skipped words in the title.
This is a helper function to efficiently answer a question of how close a query string and a title string are,
taking into account the ordering of words in both strings.
For efficiency reasons, the function modifies the `title` deque in place by removing the first entrance
of the found word and rotating all leading non-matching words to the end of the deque. It allows to efficiently
perform multiple calls of the `find_word_and_rotate_title` function for subsequent words from the same query string.
An example: find_word_and_rotate_title('A', deque(['X', 'Y', 'A', 'B', 'C'])) returns `(True, 2)`, where True means
that the word 'A' was found in the `title` deque, and 2 is the number of skipped words ('X', 'Y'). Also, it modifies
the `title` deque, so it starts looking like deque(['B', 'C', 'X', 'Y']). The found word 'A' was removed, and
the leading non-matching words ('X', 'Y') were moved to the end of the deque.
:param word: a word from the user-defined query string
:param title: a deque of words in the title
:return: a two-elements tuple, whether the word was found in the title and the number of skipped words
"""
try:
skipped = title.index(word) # find the query word placement in the title and the number of preceding words
except ValueError:
return False, 0
title.rotate(-skipped) # rotate skipped words to the end
title.popleft() # remove found word
return True, skipped
| 7,457 | Python | .py | 135 | 49.614815 | 120 | 0.69515 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,012 | store.py | Tribler_tribler/src/tribler/core/database/store.py | from __future__ import annotations
import enum
import logging
import re
import threading
from asyncio import get_running_loop
from dataclasses import dataclass, field
from datetime import datetime, timedelta
from os.path import getsize
from pathlib import Path
from time import sleep, time
from typing import TYPE_CHECKING, Any, Callable
from lz4.frame import LZ4FrameDecompressor
from pony import orm
from pony.orm import Database, db_session, desc, left_join, raw_sql, select
from pony.orm.dbproviders.sqlite import keep_exception
from tribler.core.database.orm_bindings import misc, torrent_metadata, tracker_state
from tribler.core.database.orm_bindings import torrent_state as torrent_state_
from tribler.core.database.orm_bindings.torrent_metadata import NULL_KEY_SUBST
from tribler.core.database.ranks import torrent_rank
from tribler.core.database.serialization import (
CHANNEL_TORRENT,
COLLECTION_NODE,
NULL_KEY,
REGULAR_TORRENT,
HealthItemsPayload,
TorrentMetadataPayload,
read_payload_with_offset,
)
from tribler.core.torrent_checker.dataclasses import HealthInfo
if TYPE_CHECKING:
from sqlite3 import Connection
from ipv8.types import PrivateKey
from pony.orm.core import Entity, Query
from tribler.core.database.layers.layer import EntityImpl
from tribler.core.database.orm_bindings.torrent_metadata import TorrentMetadata
from tribler.core.notifier import Notifier
class ObjState(enum.Enum):
"""
Different states of information in the store.
"""
UPDATED_LOCAL_VERSION = enum.auto() # We updated the local version of the ORM object with the received one
LOCAL_VERSION_NEWER = enum.auto() # The local version of the ORM object is newer than the received one
LOCAL_VERSION_SAME = enum.auto() # The local version of the ORM object is the same as the received one
NEW_OBJECT = enum.auto() # The received object is unknown to us and thus added to ORM
DUPLICATE_OBJECT = enum.auto() # We already know about the received object
@dataclass
class ProcessingResult:
"""
This class is used to return results of processing of a payload by process_payload.
It includes the ORM object created as a result of processing, the state of the object
as indicated by ObjState enum, and missing dependencies list that includes a list of query
arguments for get_entries to query the sender back through Remote Query Community.
"""
md_obj: EntityImpl
obj_state: object
missing_deps: list = field(default_factory=list)
BETA_DB_VERSIONS = [0, 1, 2, 3, 4, 5]
CURRENT_DB_VERSION = 15
MIN_BATCH_SIZE = 10
MAX_BATCH_SIZE = 1000
POPULAR_TORRENTS_FRESHNESS_PERIOD = 60 * 60 * 24 # Last day
POPULAR_TORRENTS_COUNT = 100
# This table should never be used from ORM directly.
# It is created as a VIRTUAL table by raw SQL and
# maintained by SQL triggers.
sql_create_fts_table = """
CREATE VIRTUAL TABLE IF NOT EXISTS FtsIndex USING FTS5
(title, content='ChannelNode', prefix = '2 3 4 5',
tokenize='porter unicode61 remove_diacritics 1');"""
sql_add_fts_trigger_insert = """
CREATE TRIGGER IF NOT EXISTS fts_ai AFTER INSERT ON ChannelNode
BEGIN
INSERT INTO FtsIndex(rowid, title) VALUES (new.rowid, new.title);
END;"""
sql_add_fts_trigger_delete = """
CREATE TRIGGER IF NOT EXISTS fts_ad AFTER DELETE ON ChannelNode
BEGIN
DELETE FROM FtsIndex WHERE rowid = old.rowid;
END;"""
sql_add_fts_trigger_update = """
CREATE TRIGGER IF NOT EXISTS fts_au AFTER UPDATE ON ChannelNode BEGIN
DELETE FROM FtsIndex WHERE rowid = old.rowid;
INSERT INTO FtsIndex(rowid, title) VALUES (new.rowid, new.title);
END;"""
sql_add_torrentstate_trigger_after_insert = """
CREATE TRIGGER IF NOT EXISTS torrentstate_ai AFTER INSERT ON TorrentState
BEGIN
UPDATE "TorrentState" SET has_data = (last_check > 0) WHERE rowid = new.rowid;
END;
"""
sql_add_torrentstate_trigger_after_update = """
CREATE TRIGGER IF NOT EXISTS torrentstate_au AFTER UPDATE ON TorrentState
BEGIN
UPDATE "TorrentState" SET has_data = (last_check > 0) WHERE rowid = new.rowid;
END;
"""
sql_create_partial_index_torrentstate_last_check = """
CREATE INDEX IF NOT EXISTS idx_torrentstate__last_check__partial
ON TorrentState (last_check, seeders, leechers, self_checked)
WHERE has_data = 1;
"""
class MetadataStore:
"""
Storage of metadata for channels and torrents.
"""
def __init__(
self,
db_filename: str,
private_key: PrivateKey,
disable_sync: bool = False,
notifier: Notifier | None = None,
check_tables: bool = True,
db_version: int = CURRENT_DB_VERSION
) -> None:
"""
Create a new metadata store.
"""
self.notifier = notifier # Reference to app-level notification service
self.db_path = db_filename
self.my_key = private_key
self.my_public_key_bin = self.my_key.pub().key_to_bin()[10:]
self._logger = logging.getLogger(self.__class__.__name__)
self._shutting_down = False
self.batch_size = 10 # reasonable number, a little bit more than typically fits in a single UDP packet
self.reference_timedelta = timedelta(milliseconds=100)
self.sleep_on_external_thread = 0.05 # sleep this amount of seconds between batches executed on external thread
# We have to dynamically define/init ORM-managed entities here to be able to support
# multiple sessions in Tribler. ORM-managed classes are bound to the database instance
# at definition.
self.db = Database()
# This attribute is internally called by Pony on startup, though pylint cannot detect it
# with the static analysis.
@self.db.on_connect
def on_connect(_: Database, connection: Connection) -> None:
cursor = connection.cursor()
cursor.execute("PRAGMA journal_mode = DELETE")
cursor.execute("PRAGMA synchronous = NORMAL")
cursor.execute("PRAGMA temp_store = MEMORY")
cursor.execute("PRAGMA foreign_keys = ON")
# Disable disk sync for special cases
if disable_sync:
# !!! ACHTUNG !!! This should be used only for special cases (e.g. DB upgrades), because
# losing power during a write will corrupt the database.
cursor.execute("PRAGMA journal_mode = 0")
cursor.execute("PRAGMA synchronous = 0")
sqlite_rank = keep_exception(torrent_rank)
connection.create_function('search_rank', 5, sqlite_rank)
self.MiscData = misc.define_binding(self.db)
self.TrackerState = tracker_state.define_binding(self.db)
self.TorrentState = torrent_state_.define_binding(self.db)
self.TorrentMetadata = torrent_metadata.define_binding(
self.db,
notifier=notifier,
tag_processor_version=0
)
if db_filename == ":memory:":
create_db = True
db_path_string = ":memory:"
else:
create_db = not Path(db_filename).exists()
db_path_string = str(db_filename)
self.db.bind(provider="sqlite", filename=db_path_string, create_db=create_db, timeout=120.0)
self.db.generate_mapping(
create_tables=create_db, check_tables=check_tables
) # Must be run out of session scope
if create_db:
with db_session(ddl=True):
self.db.execute(sql_create_fts_table)
self.create_fts_triggers()
self.create_torrentstate_triggers()
if create_db:
with db_session:
self.MiscData(name="db_version", value=str(db_version))
def set_value(self, key: str, value: str) -> None:
"""
Set a generic key to a value.
"""
obj = self.MiscData.get_for_update(name=key) or self.MiscData(name=key)
obj.value = value
def get_value(self, key: str, default: str | None = None) -> str | None:
"""
Retrieve the value for a given key.
"""
data = self.MiscData.get(name=key)
return data.value if data else default
def drop_indexes(self) -> None:
"""
Drop the indices for this database.
"""
cursor = self.db.get_connection().cursor()
cursor.execute("select name from sqlite_master where type='index' and name like 'idx_%'")
for [index_name] in cursor.fetchall():
cursor.execute(f"drop index {index_name}")
def get_objects_to_create(self) -> list[Entity]:
"""
Get the objects that need to be created.
"""
connection = self.db.get_connection()
schema = self.db.schema
provider = schema.provider
return [db_object for table in schema.order_tables_to_create()
for db_object in table.get_objects_to_create(set())
if not db_object.exists(provider, connection)]
def get_db_file_size(self) -> int:
"""
Get the physical size on disk (always 0 for memory dbs).
"""
return 0 if self.db_path == ":memory:" else getsize(self.db_path)
def drop_fts_triggers(self) -> None:
"""
Drop the FTS triggers.
"""
cursor = self.db.get_connection().cursor()
cursor.execute("select name from sqlite_master where type='trigger' and name like 'fts_%'")
for [trigger_name] in cursor.fetchall():
cursor.execute(f"drop trigger {trigger_name}")
def create_fts_triggers(self) -> None:
"""
Create the FTS triggers.
"""
cursor = self.db.get_connection().cursor()
cursor.execute(sql_add_fts_trigger_insert)
cursor.execute(sql_add_fts_trigger_delete)
cursor.execute(sql_add_fts_trigger_update)
def fill_fts_index(self) -> None:
"""
Insert the FTS indices.
"""
cursor = self.db.get_connection().cursor()
cursor.execute("insert into FtsIndex(rowid, title) select rowid, title from ChannelNode")
def create_torrentstate_triggers(self) -> None:
"""
Create the torrent state triggers.
"""
cursor = self.db.get_connection().cursor()
cursor.execute(sql_add_torrentstate_trigger_after_insert)
cursor.execute(sql_add_torrentstate_trigger_after_update)
def shutdown(self) -> None:
"""
Disconnect the connection to the database.
"""
self._shutting_down = True
self.db.disconnect()
async def run_threaded(self, func: Callable, *args: Any, **kwargs) -> Any: # noqa: ANN401
"""
Run ``func`` threaded and close DB connection at the end of the execution.
:param func: the function to be executed threaded
:param args: args for the function call
:param kwargs: kwargs for the function call
:return: a result of the func call.
"""
def wrapper(): # noqa: ANN202
try:
return func(*args, **kwargs)
finally:
is_main_thread = threading.current_thread() is threading.main_thread()
if not is_main_thread:
self.db.disconnect()
return await get_running_loop().run_in_executor(None, wrapper)
async def process_compressed_mdblob_threaded(self, compressed_data: bytes, **kwargs) -> list[ProcessingResult]:
"""
Decompress the given data in a thread and return a list of uncompressed results.
"""
try:
return await self.run_threaded(self.process_compressed_mdblob, compressed_data, **kwargs)
except Exception as e:
self._logger.exception("DB transaction error when tried to process compressed mdblob: %s: %s",
e.__class__.__name__, str(e), exc_info=e)
return []
def process_compressed_mdblob(self, compressed_data: bytes,
skip_personal_metadata_payload: bool = True) -> list[ProcessingResult]:
"""
Decompress the given data and return a list of uncompressed results.
"""
try:
with LZ4FrameDecompressor() as decompressor:
decompressed_data = decompressor.decompress(compressed_data)
unused_data = decompressor.unused_data
except RuntimeError as e:
self._logger.warning("Unable to decompress mdblob: %s", str(e))
return []
health_info = None
if unused_data:
try:
health_info = HealthItemsPayload.unpack(unused_data)
except Exception as e:
self._logger.warning("Unable to parse health information: %s: %s", type(e).__name__, str(e))
raise
return self.process_squashed_mdblob(decompressed_data, health_info=health_info,
skip_personal_metadata_payload=skip_personal_metadata_payload)
def process_torrent_health(self, health: HealthInfo) -> bool:
"""
Adds or updates information about a torrent health for the torrent with the specified infohash value.
:param health: a health info of a torrent
:return: True if a new TorrentState object was added
"""
if not health.is_valid():
self._logger.warning("Invalid health info ignored: %s", str(health))
return False
torrent_state = self.TorrentState.get_for_update(infohash=health.infohash)
if torrent_state and health.should_replace(torrent_state.to_health()):
self._logger.debug("Update health info %s", str(health))
torrent_state.set(seeders=health.seeders, leechers=health.leechers, last_check=health.last_check,
self_checked=False)
return False
if not torrent_state:
self._logger.debug("Add health info %s", str(health))
self.TorrentState.from_health(health)
return True
return False
def process_squashed_mdblob(self, chunk_data: bytes, external_thread: bool = False, # noqa: C901
health_info: list[tuple[int, int, int]] | None = None,
skip_personal_metadata_payload: bool = True) -> list[ProcessingResult]:
"""
Process raw concatenated payloads blob. This routine breaks the database access into smaller batches.
It uses a congestion-control like algorithm to determine the optimal batch size, targeting the
batch processing time value of self.reference_timedelta.
:param chunk_data: the blob itself, consists of one or more GigaChannel payloads concatenated together
:param external_thread: if this is set to True, we add some sleep between batches to allow other threads
to get the database lock. This is an ugly workaround for Python and asynchronous programming (locking)
imperfections. It only makes sense to use it when this routine runs on a non-reactor thread.
:return: a list of tuples of (<metadata or payload>, <action type>)
"""
offset = 0
payload_list = []
while offset < len(chunk_data):
payload, offset = read_payload_with_offset(chunk_data, offset)
if payload and isinstance(payload, TorrentMetadataPayload):
# Silently ignore deprecated payloads
payload_list.append(payload)
if health_info and len(health_info) == len(payload_list):
with db_session:
for payload, (seeders, leechers, last_check) in zip(payload_list, health_info):
if hasattr(payload, "infohash"):
health = HealthInfo(payload.infohash, last_check=last_check,
seeders=seeders, leechers=leechers)
self.process_torrent_health(health)
result = []
total_size = len(payload_list)
start = 0
while start < total_size:
end = start + self.batch_size
batch = payload_list[start:end]
batch_start_time = datetime.now() # noqa: DTZ005
# We separate the sessions to minimize database locking.
with db_session(immediate=True):
for payload in batch:
result.extend(self.process_payload(payload, skip_personal_metadata_payload))
# Batch size adjustment
batch_end_time = datetime.now() - batch_start_time # noqa: DTZ005
target_coeff = batch_end_time.total_seconds() / self.reference_timedelta.total_seconds()
if len(batch) == self.batch_size:
# Adjust batch size only for full batches
if target_coeff < 0.8:
self.batch_size += self.batch_size
elif target_coeff > 1.0:
self.batch_size = int(float(self.batch_size) / target_coeff)
# we want to guarantee that at least something
# will go through, but not too much
self.batch_size = min(max(self.batch_size, MIN_BATCH_SIZE), MAX_BATCH_SIZE)
self._logger.debug(
(
"Added payload batch to DB (entries, seconds): %i %f",
(self.batch_size, float(batch_end_time.total_seconds())),
)
)
start = end
if self._shutting_down:
break
if external_thread:
sleep(self.sleep_on_external_thread)
return result
@db_session
def process_payload(self, payload: TorrentMetadataPayload,
skip_personal_metadata_payload: bool = True) -> list[ProcessingResult]:
"""
Write a payload to our database (if necessary).
"""
# Don't process our own torrents
if skip_personal_metadata_payload and payload.public_key == self.my_public_key_bin:
return []
# Don't process unknown/deprecated payloads
if payload.metadata_type != REGULAR_TORRENT:
return []
# Don't process torrents with a bad signature
if payload.has_signature() and not payload.check_signature():
return []
# Process unsigned torrents
if payload.public_key == NULL_KEY:
node = self.TorrentMetadata.add_ffa_from_dict(payload.to_dict())
return [ProcessingResult(md_obj=node, obj_state=ObjState.NEW_OBJECT)] if node else []
# Do we already know about this object? In that case, we keep the first one (i.e., no versioning).
node = self.TorrentMetadata.get_for_update(public_key=payload.public_key, id_=payload.id_)
if node:
return [ProcessingResult(md_obj=node, obj_state=ObjState.DUPLICATE_OBJECT)]
# Process signed torrents
obj = self.TorrentMetadata.from_payload(payload)
return [ProcessingResult(md_obj=obj, obj_state=ObjState.NEW_OBJECT)]
@db_session
def get_num_torrents(self) -> int:
"""
Get the number of torrents in the database.
"""
return orm.count(self.TorrentMetadata.select(lambda g: g.metadata_type == REGULAR_TORRENT))
def search_keyword(self, query: str, origin_id: int | None = None) -> Query:
"""
Search for an FTS query, potentially restricted to a given origin id.
Requires FTS5 table "FtsIndex" to be generated and populated. FTS table is maintained automatically by SQL
triggers. BM25 ranking is embedded in FTS5.
"""
# Sanitize FTS query
if not query or query == "*":
return []
if origin_id is not None:
# When filtering a specific channel folder, we want to return all matching results
fts_ids = raw_sql("""
SELECT rowid FROM ChannelNode
WHERE origin_id = $origin_id
AND rowid IN (SELECT rowid FROM FtsIndex WHERE FtsIndex MATCH $query)
""")
else:
# When searching through an entire database for some text queries, the database can contain hundreds
# of thousands of matching torrents. The ranking of this number of torrents may be very expensive: we need
# to retrieve each matching torrent info and the torrent state from the database for proper ordering.
# They are scattered randomly through the entire database file, so fetching all these torrents is slow.
# Also, the torrent_rank function used inside the final ORDER BY section is written in Python. It is about
# 30 times slower than a possible similar function written in C due to SQLite-Python communication cost.
#
# To speed up the query, we limit and filter search results in several iterations, and each time apply
# a more expensive ranking algorithm:
# * First, we quickly fetch at most 10000 of the most recent torrents that match the search criteria
# and ignore older torrents. This way, we avoid sorting all hundreds of thousands of matching torrents
# in degenerative cases. In typical cases, when the text query is specific enough, the number of
# matching torrents is not that big.
# * Then, we sort these 10000 torrents to prioritize torrents with seeders and restrict the number
# of torrents to just 1000.
# * Finally, in the main query, we apply a slow ranking function to these 1000 torrents to show the most
# relevant torrents at the top of the search result list.
#
# This multistep sort+limit sequence allows speedup queries up to two orders of magnitude. To further
# speed up full-text search queries, we can rewrite the torrent_rank function to C one day.
fts_ids = raw_sql("""
SELECT fts.rowid
FROM (
SELECT rowid FROM FtsIndex WHERE FtsIndex MATCH $query ORDER BY rowid DESC LIMIT 10000
) fts
LEFT JOIN ChannelNode cn on fts.rowid = cn.rowid
LEFT JOIN main.TorrentState ts on cn.health = ts.rowid
ORDER BY coalesce(ts.seeders, 0) DESC, fts.rowid DESC
LIMIT 1000
""")
return left_join(g for g in self.TorrentMetadata if g.rowid in fts_ids)
@db_session
def get_entries_query( # noqa: C901, PLR0913
self,
metadata_type: int | None = None,
channel_pk: bytes | None = None,
hide_xxx: bool = False,
origin_id: int | None = None,
sort_by: str | None = None,
sort_desc: bool = True,
max_rowid: int | None = None,
txt_filter: str | None = None,
category: str | None = None,
infohash: bytes | None =None,
infohash_set: set[bytes] | None = None,
id_: int | None = None,
self_checked_torrent: bool | None = None,
health_checked_after: int | None = None,
popular: bool | None = None,
**kwargs
) -> Query:
"""
This method implements REST-friendly way to get entries from the database.
Warning! For Pony magic to work, iteration variable name (e.g. 'g') should be the same everywhere!
:return: PonyORM query object corresponding to the given params.
"""
if kwargs:
self._logger.info("get_entries_query got ignored kwargs: %s", ", ".join(kwargs.keys()))
if txt_filter:
pony_query = self.search_keyword(txt_filter, origin_id=origin_id)
elif popular:
if metadata_type != REGULAR_TORRENT:
msg = "With `popular=True`, only `metadata_type=REGULAR_TORRENT` is allowed"
raise TypeError(msg)
t = time() - POPULAR_TORRENTS_FRESHNESS_PERIOD
return select(
g for g in self.TorrentMetadata
for health in self.TorrentState
if health.has_data == 1 # The condition had to be written this way for the partial index to work
and health.last_check >= t and (health.seeders > 0 or health.leechers > 0)
and g.health == health
).order_by(
lambda g: (desc(g.health.seeders), desc(g.health.leechers), desc(g.health.last_check))
).limit(POPULAR_TORRENTS_COUNT)
else:
pony_query = left_join(g for g in self.TorrentMetadata)
infohash_set = infohash_set or ({infohash} if infohash else None)
if max_rowid is not None:
pony_query = pony_query.where(lambda g: g.rowid <= max_rowid)
if metadata_type is not None:
pony_query = pony_query.where(lambda g: g.metadata_type == metadata_type)
pony_query = (
pony_query.where(public_key=(b"" if channel_pk == NULL_KEY_SUBST else channel_pk))
if channel_pk is not None
else pony_query
)
# origin_id can be zero, for e.g. root channel
pony_query = pony_query.where(id_=id_) if id_ is not None else pony_query
pony_query = pony_query.where(origin_id=origin_id) if origin_id is not None else pony_query
pony_query = pony_query.where(lambda g: g.tags == category) if category else pony_query
pony_query = pony_query.where(lambda g: g.xxx == 0) if hide_xxx else pony_query
pony_query = pony_query.where(lambda g: g.infohash in infohash_set) if infohash_set else pony_query
pony_query = (
pony_query.where(lambda g: g.health.self_checked == self_checked_torrent)
if self_checked_torrent is not None
else pony_query
)
if health_checked_after is not None:
pony_query = pony_query.where(lambda g: g.health.has_data == 1 # Has to be written this way for index
and g.health.last_check >= health_checked_after)
# Sort the query
pony_query = pony_query.sort_by("desc(g.rowid)" if sort_desc else "g.rowid")
if sort_by == "HEALTH":
pony_query = pony_query.sort_by(
"(desc(g.health.seeders), desc(g.health.leechers))"
if sort_desc
else "(g.health.seeders, g.health.leechers)"
)
elif sort_by == "size":
# Remark: this can be optimized to skip cases where size field does not matter
# When querying for mixed channels / torrents lists, channels should have priority over torrents
sort_expression = "desc(g.size)" if sort_desc else "g.size"
pony_query = pony_query.sort_by(sort_expression)
elif sort_by:
sort_expression = raw_sql(f"g.{sort_by} COLLATE NOCASE" + (" DESC" if sort_desc else ""))
pony_query = pony_query.sort_by(sort_expression)
if sort_by is None and txt_filter:
"""
The following call of `sort_by` produces an ORDER BY expression that looks like this:
ORDER BY
case when "g"."metadata_type" = $CHANNEL_TORRENT then 1
when "g"."metadata_type" = $COLLECTION_NODE then 2
else 3 end,
search_rank(
$QUERY_STRING,
g.title,
torrentstate.seeders,
torrentstate.leechers,
$CURRENT_TIME - strftime('%s', g.torrent_date)
) DESC,
"torrentstate"."last_check" DESC,
So, the channel torrents and channel folders are always on top if they are not filtered out.
Then regular torrents are selected in order of their relevance according to a search_rank() result.
If two torrents have the same search rank, they are ordered by the last time they were checked.
The search_rank() function is called directly from the SQLite query, but is implemented in Python,
it is actually the torrent_rank() function from core/utilities/search_utils.py, wrapped with
keep_exception() to return possible exception from SQLite to Python.
The search_rank() function receives the following arguments:
- the current query string (like "Big Buck Bunny");
- the title of the current torrent;
- the number of seeders;
- the number of leechers;
- the number of seconds since the torrent's creation time.
"""
pony_query = pony_query.sort_by(
f"""
(1 if g.metadata_type == {CHANNEL_TORRENT} else 2 if g.metadata_type == {COLLECTION_NODE} else 3),
raw_sql('''search_rank(
$txt_filter, g.title, torrentstate.seeders, torrentstate.leechers,
$int(time()) - strftime('%s', g.torrent_date)
) DESC'''),
desc(g.health.last_check) # just to trigger the TorrentState table inclusion into the left join
"""
)
return pony_query
async def get_entries_threaded(self, **kwargs) -> list[TorrentMetadata]:
"""
Retrieve entries in a thread and return a list of results.
"""
return await self.run_threaded(self.get_entries, **kwargs)
@db_session
def get_entries(self, first: int = 1, last: int | None = None, **kwargs) -> list[TorrentMetadata]:
"""
Get some torrents. Optionally sort the results by a specific field, or filter the channels based
on a keyword/whether you are subscribed to it.
:return: A list of class members
"""
pony_query = self.get_entries_query(**kwargs)
result = pony_query[(first or 1) - 1: last]
for entry in result:
# ACHTUNG! This is necessary in order to load entry.health inside db_session,
# to be able to perform successfully `entry.to_simple_dict()` later
entry.to_simple_dict()
return result
@db_session
def get_total_count(self, **kwargs) -> int | None:
"""
Get total count of torrents that would be returned if there would be no pagination/limits/sort.
"""
for p in ["first", "last", "sort_by", "sort_desc"]:
kwargs.pop(p, None)
return self.get_entries_query(**kwargs).count()
@db_session
def get_entries_count(self, **kwargs) -> int | None:
"""
Get the count of torrents that would be returned if there would be no pagination/limits.
"""
for p in ["first", "last"]:
kwargs.pop(p, None)
return self.get_entries_query(**kwargs).count()
@db_session
def get_max_rowid(self) -> int:
"""
Get the highest-known row id.
"""
return select(max(obj.rowid) for obj in self.TorrentMetadata).get() or 0
fts_keyword_search_re = re.compile(r'\w+', re.UNICODE)
def get_auto_complete_terms(self, text: str, max_terms: int) -> list[str]:
"""
Get the auto-completion terms for a given query.
"""
if not text:
return []
words = self.fts_keyword_search_re.findall(text)
if not words:
return []
fts_query = '"{}"*'.format(" ".join(f"{word}" for word in words))
suggestion_pattern = r'\W+'.join(word for word in words) + r'(\W*)((?:[.-]?\w)*)'
suggestion_re = re.compile(suggestion_pattern, re.UNICODE)
with db_session:
titles = self.db.select("""
cn.title
FROM ChannelNode cn
LEFT JOIN TorrentState ts ON cn.health = ts.rowid
WHERE cn.rowid in (
SELECT rowid FROM FtsIndex WHERE FtsIndex MATCH $fts_query ORDER BY rowid DESC LIMIT $max_terms
)
ORDER BY coalesce(ts.seeders, 0) DESC
""", globals={"fts_query": fts_query, "max_terms": max_terms})
result = []
for title in titles:
match = suggestion_re.search(title.lower())
if match:
# group(2) is the ending of the last word (if the word is not finished) or the next word
continuation = match.group(2)
if re.match(r'^.*\w$', text) and match.group(1): # group(1) is non-word symbols (spaces, commas, etc.)
continuation = match.group(1) + continuation
suggestion = text + continuation
if suggestion not in result:
result.append(suggestion)
if len(result) >= max_terms:
break
return result
| 33,115 | Python | .py | 655 | 39.662595 | 120 | 0.619717 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,013 | database_endpoint.py | Tribler_tribler/src/tribler/core/database/restapi/database_endpoint.py | from __future__ import annotations
import asyncio
import json
import typing
from binascii import unhexlify
from dataclasses import asdict
from aiohttp import web
from aiohttp_apispec import docs, querystring_schema
from ipv8.REST.schema import schema
from marshmallow.fields import Boolean, Integer, String
from pony.orm import db_session
from typing_extensions import Self, TypeAlias
from tribler.core.database.layers.knowledge import ResourceType
from tribler.core.database.queries import to_fts_query
from tribler.core.database.restapi.schema import MetadataSchema, SearchMetadataParameters, TorrentSchema
from tribler.core.database.serialization import REGULAR_TORRENT
from tribler.core.notifier import Notification
from tribler.core.restapi.rest_endpoint import (
HTTP_BAD_REQUEST,
HTTP_NOT_FOUND,
MAX_REQUEST_SIZE,
RESTEndpoint,
RESTResponse,
)
if typing.TYPE_CHECKING:
from multidict import MultiDictProxy, MultiMapping
from tribler.core.database.store import MetadataStore
from tribler.core.database.tribler_database import TriblerDatabase
from tribler.core.libtorrent.download_manager.download_manager import DownloadManager
from tribler.core.restapi.rest_manager import TriblerRequest
from tribler.core.torrent_checker.torrent_checker import TorrentChecker
RequestType: TypeAlias = TriblerRequest[tuple[MetadataStore]]
TORRENT_CHECK_TIMEOUT = 20
# This dict is used to translate JSON fields into the columns used in Pony for _sorting_.
# id_ is not in the list because there is not index on it, so we never really want to sort on it.
json2pony_columns = {
"category": "tags",
"name": "title",
"size": "size",
"infohash": "infohash",
"date": "torrent_date",
"created": "torrent_date",
"status": "status",
"votes": "votes",
"subscribed": "subscribed",
"health": "HEALTH",
}
def parse_bool(obj: str) -> bool:
"""
Parse input to boolean True or False.
Allow parsing text 'false', 'true' '1', '0' to boolean.
:param obj: Object to parse
"""
return bool(json.loads(obj))
class DatabaseEndpoint(RESTEndpoint):
"""
This is the top-level endpoint class that serves other endpoints.
/metadata
/torrents
/<public_key>
"""
path = "/api/metadata"
def __init__(self, middlewares: tuple = (), client_max_size: int = MAX_REQUEST_SIZE) -> None:
"""
Create a new database endpoint.
"""
super().__init__(middlewares, client_max_size)
self.mds: MetadataStore | None = None
self.required_components = ("mds", )
self.download_manager: DownloadManager | None = None
self.torrent_checker: TorrentChecker | None = None
self.tribler_db: TriblerDatabase | None = None
self.app.add_routes(
[
web.get("/torrents/{infohash}/health", self.get_torrent_health),
web.get("/torrents/popular", self.get_popular_torrents),
web.get("/search/local", self.local_search),
web.get("/search/completions", self.completions)
]
)
@classmethod
def sanitize_parameters(cls: type[Self],
parameters: MultiDictProxy | MultiMapping[str]
) -> dict[str, str | float | list[str] | set[bytes] | bytes | None]:
"""
Sanitize the parameters for a request that fetches channels.
"""
sanitized: dict[str, str | float | list[str] | set[bytes] | bytes | None] = {
"first": int(parameters.get("first", 1)),
"last": int(parameters.get("last", 50)),
"sort_by": json2pony_columns.get(parameters.get("sort_by", "")),
"sort_desc": parse_bool(parameters.get("sort_desc", "true")),
"hide_xxx": parse_bool(parameters.get("hide_xxx", "false")),
"category": parameters.get("category"),
}
if "tags" in parameters:
sanitized["tags"] = parameters.getall("tags")
if "max_rowid" in parameters:
sanitized["max_rowid"] = int(parameters["max_rowid"])
if "channel_pk" in parameters:
sanitized["channel_pk"] = unhexlify(parameters["channel_pk"])
if "origin_id" in parameters:
sanitized["origin_id"] = int(parameters["origin_id"])
if "popular" in parameters and parse_bool(parameters.get("popular", "false")):
sanitized["sort_by"] = "HEALTH"
return sanitized
@db_session
def add_statements_to_metadata_list(self, contents_list: list[dict]) -> None:
"""
Load statements from the database and attach them to the torrent descriptions in the content list.
"""
if self.tribler_db is None:
self._logger.error("Cannot add statements to metadata list: tribler_db is not set in %s",
self.__class__.__name__)
return
for torrent in contents_list:
if torrent["type"] == REGULAR_TORRENT:
raw_statements = self.tribler_db.knowledge.get_simple_statements(
subject_type=ResourceType.TORRENT,
subject=torrent["infohash"]
)
torrent["statements"] = [asdict(stmt) for stmt in raw_statements]
@docs(
tags=["Metadata"],
summary="Fetch the swarm health of a specific torrent.",
parameters=[
{
"in": "path",
"name": "infohash",
"description": "Infohash of the download to remove",
"type": "string",
"required": True,
},
{
"in": "query",
"name": "timeout",
"description": "Timeout to be used in the connections to the trackers",
"type": "integer",
"default": 20,
"required": False,
},
],
responses={
200: {
"schema": schema(
HealthCheckResponse={
"checking": Boolean()
}
),
"examples": [
{"checking": 1},
],
}
},
)
async def get_torrent_health(self, request: RequestType) -> RESTResponse:
"""
Fetch the swarm health of a specific torrent.
"""
self._logger.info("Get torrent health request: %s", str(request))
try:
timeout = int(request.query.get("timeout", TORRENT_CHECK_TIMEOUT))
except ValueError as e:
return RESTResponse({"error": f"Error processing timeout parameter: {e}"}, status=HTTP_BAD_REQUEST)
if self.torrent_checker is None:
return RESTResponse({"checking": False})
infohash = unhexlify(request.match_info["infohash"])
health_check_coro = self.torrent_checker.check_torrent_health(infohash, timeout=timeout, scrape_now=True)
_ = self.register_anonymous_task("health_check", asyncio.ensure_future(health_check_coro))
return RESTResponse({"checking": True})
def add_download_progress_to_metadata_list(self, contents_list: list[dict]) -> None:
"""
Retrieve the download status from libtorrent and attach it to the torrent descriptions in the content list.
"""
if self.download_manager is not None:
for torrent in contents_list:
if torrent["type"] == REGULAR_TORRENT:
dl = self.download_manager.get_download(unhexlify(torrent["infohash"]))
if dl is not None and dl.tdef.infohash not in self.download_manager.metainfo_requests:
torrent["progress"] = dl.get_state().get_progress()
@docs(
tags=["Metadata"],
summary="Get the list of most popular torrents.",
responses={
200: {
"schema": schema(
GetPopularTorrentsResponse={
"results": [TorrentSchema],
"first": Integer(),
"last": Integer(),
}
)
}
},
)
async def get_popular_torrents(self, request: RequestType) -> RESTResponse:
"""
Get the list of most popular torrents.
"""
sanitized = self.sanitize_parameters(request.query)
sanitized["metadata_type"] = REGULAR_TORRENT
sanitized["popular"] = True
if t_filter := request.query.get("filter"):
sanitized["txt_filter"] = t_filter
with db_session:
contents_list = [entry.to_simple_dict() for entry in request.context[0].get_entries(**sanitized)]
self.add_download_progress_to_metadata_list(contents_list)
self.add_statements_to_metadata_list(contents_list)
response_dict = {
"results": contents_list,
"first": sanitized["first"],
"last": sanitized["last"],
}
return RESTResponse(response_dict)
@docs(
tags=["Metadata"],
summary="Perform a search for a given query.",
responses={
200: {
"schema": schema(
SearchResponse={
"results": [MetadataSchema],
"first": Integer(),
"last": Integer(),
"sort_by": String(),
"sort_desc": Integer(),
"total": Integer(),
}
)
}
},
)
@querystring_schema(SearchMetadataParameters)
async def local_search(self, request: RequestType) -> RESTResponse: # noqa: C901
"""
Perform a search for a given query.
"""
try:
sanitized = self.sanitize_parameters(request.query)
tags = sanitized.pop("tags", None)
except (ValueError, KeyError):
return RESTResponse({"error": "Error processing request parameters"}, status=HTTP_BAD_REQUEST)
if self.tribler_db is None:
return RESTResponse({"error": "Tribler DB not initialized"}, status=HTTP_NOT_FOUND)
include_total = request.query.get("include_total", "")
query = request.query.get("fts_text")
if query is None:
return RESTResponse({"error": f"Got search with no fts_text: {dict(request.query)}"},
status=HTTP_BAD_REQUEST)
if t_filter := request.query.get("filter"):
query += f" {t_filter}"
fts = to_fts_query(query)
sanitized["txt_filter"] = fts
self._logger.info("FTS: %s", fts)
mds: MetadataStore = request.context[0]
def search_db() -> tuple[list[dict], int, int]:
with db_session:
pony_query = mds.get_entries(**sanitized)
search_results = [r.to_simple_dict() for r in pony_query]
if include_total:
total = mds.get_total_count(**sanitized)
max_rowid = mds.get_max_rowid()
else:
total = max_rowid = None
if self.download_manager is not None:
self.download_manager.notifier.notify(Notification.local_query_results,
query=request.query.get("fts_text"),
results=list(search_results))
return search_results, total, max_rowid
try:
with db_session:
if tags:
infohash_set = self.tribler_db.knowledge.get_subjects_intersection(
subjects_type=ResourceType.TORRENT,
objects=set(typing.cast(list[str], tags)),
predicate=ResourceType.TAG,
case_sensitive=False)
if infohash_set:
sanitized["infohash_set"] = {bytes.fromhex(s) for s in infohash_set}
search_results, total, max_rowid = await mds.run_threaded(search_db)
except Exception as e:
self._logger.exception("Error while performing DB search: %s: %s", type(e).__name__, e)
return RESTResponse(status=HTTP_BAD_REQUEST)
self.add_statements_to_metadata_list(search_results)
response_dict = {
"results": search_results,
"first": sanitized["first"],
"last": sanitized["last"],
"sort_by": sanitized["sort_by"],
"sort_desc": sanitized["sort_desc"],
}
if include_total:
response_dict.update(total=total, max_rowid=max_rowid)
return RESTResponse(response_dict)
@docs(
tags=["Metadata"],
summary="Return auto-completion suggestions for a given query.",
parameters=[{"in": "query", "name": "q", "description": "Search query", "type": "string", "required": True}],
responses={
200: {
"schema": schema(
CompletionsResponse={
"completions": [String],
}
),
"examples": {"completions": ["pioneer one", "pioneer movie"]},
}
},
)
async def completions(self, request: RequestType) -> RESTResponse:
"""
Return auto-completion suggestions for a given query.
"""
args = request.query
if "q" not in args:
return RESTResponse({"error": "query parameter missing"}, status=HTTP_BAD_REQUEST)
keywords = args["q"].strip().lower()
results = request.context[0].get_auto_complete_terms(keywords, max_terms=5)
return RESTResponse({"completions": results})
| 13,906 | Python | .py | 319 | 32.040752 | 117 | 0.575489 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,014 | schema.py | Tribler_tribler/src/tribler/core/database/restapi/schema.py | from marshmallow import Schema
from marshmallow.fields import Boolean, Float, Integer, List, String
class MetadataParameters(Schema):
"""
The REST API schema for metadata parameters.
"""
first = Integer(default=1, description="Limit the range of the query")
last = Integer(default=50, description="Limit the range of the query")
sort_by = String(description='Sorts results in forward or backward, based on column name (e.g. "id" vs "-id")')
sort_desc = Boolean(default=True)
txt_filter = String(description="FTS search on the chosen word* terms")
hide_xxx = Boolean(default=False, description="Toggles xxx filter")
category = String()
exclude_deleted = Boolean(default=False)
metadata_type = List(String(description='Limits query to certain metadata types (e.g. "torrent" or "channel")'))
class SearchMetadataParameters(MetadataParameters):
"""
The REST API schema for search parameters.
"""
include_total = Boolean(default=False, description="Include total rows found in query response, expensive if "
"there is many rows")
max_rowid = Integer(default=None, description="Only return results with rowid lesser than max_rowid")
class MetadataSchema(Schema):
"""
The REST API schema for metadata itself.
"""
type = Integer()
id = Integer()
origin_id = Integer()
public_key = String()
name = String()
category = String()
progress = Float(required=False)
class CollectionSchema(MetadataSchema):
"""
The REST API schema for collected torrents.
"""
torrents = Integer()
state = String()
description_flag = Boolean()
thumbnail_flag = Boolean()
class TorrentSchema(MetadataSchema):
"""
The REST API schema for a torrent.
"""
status = Integer()
infohash = String()
size = Integer()
num_seeders = Integer()
num_leechers = Integer()
last_tracker_check = Integer()
updated = Integer()
class ChannelSchema(TorrentSchema, CollectionSchema):
"""
The REST API schema for a channel.
"""
dirty = Boolean()
subscribed = Boolean()
votes = Float()
| 2,200 | Python | .py | 59 | 31.661017 | 116 | 0.681583 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,015 | layer.py | Tribler_tribler/src/tribler/core/database/layers/layer.py | from __future__ import annotations
from typing import TypeVar
from pony.orm.core import Entity
EntityImpl = TypeVar("EntityImpl", bound=Entity)
class Layer:
"""
A generic database layer.
"""
def get_or_create(self, cls: type[EntityImpl], create_kwargs: dict | None = None, **kwargs) -> EntityImpl:
"""
Get or create a db entity.
:param cls: The Entity's class.
:param create_kwargs: Any necessary additional keyword arguments to create the entity.
:param kwargs: Keyword arguments to find the entity.
:returns: A new or existing instance.
"""
obj = cls.get_for_update(**kwargs)
if not obj:
if create_kwargs:
kwargs.update(create_kwargs)
obj = cls(**kwargs)
return obj
| 811 | Python | .py | 22 | 29.363636 | 110 | 0.636829 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,016 | knowledge.py | Tribler_tribler/src/tribler/core/database/layers/knowledge.py | from __future__ import annotations
import datetime
import logging
from dataclasses import dataclass
from enum import IntEnum
from typing import TYPE_CHECKING, Callable, Iterator, List, Set
from pony import orm
from pony.orm import raw_sql
from pony.orm.core import Database, Entity, Query, UnrepeatableReadError, select
from pony.utils import between
from tribler.core.database.layers.layer import EntityImpl, Layer
from tribler.core.knowledge.payload import StatementOperation
CLOCK_START_VALUE = 0
PUBLIC_KEY_FOR_AUTO_GENERATED_OPERATIONS = b"auto_generated"
SHOW_THRESHOLD = 1 # how many operation needed for showing a knowledge graph statement in the UI
HIDE_THRESHOLD = -2 # how many operation needed for hiding a knowledge graph statement in the UI
if TYPE_CHECKING:
import dataclasses
from tribler.core.database.layers.health import TorrentHealth, Tracker
@dataclasses.dataclass
class Peer(EntityImpl):
"""
Database type for a peer.
"""
id: int
public_key: bytes
added_at: datetime.datetime | None
operations: set[StatementOp]
def __init__(self, public_key: bytes) -> None: ... # noqa: D107
@staticmethod
def get(public_key: bytes) -> Peer | None: ... # noqa: D102
@staticmethod
def get_for_update(public_key: bytes) -> Peer | None: ... # noqa: D102
@dataclasses.dataclass
class Statement(EntityImpl):
"""
Database type for a statement.
"""
id: int
subject: Resource
object: Resource
operations: set[StatementOp]
added_count: int
removed_count: int
local_operation: int | None
def __init__(self, subject: Resource, object: Resource) -> None: ... # noqa: D107, A002
@staticmethod
def get(subject: Resource, object: Resource) -> Statement | None: ... # noqa: D102, A002
@staticmethod
def get_for_update(subject: Resource, object: Resource) -> Statement | None: ... # noqa: D102, A002
class IterResource(type): # noqa: D101
def __iter__(cls) -> Iterator[Resource]: ... # noqa: D105
@dataclasses.dataclass
class Resource(EntityImpl, metaclass=IterResource):
"""
Database type for a resources.
"""
id: int
name: str
type: int
subject_statements: set[Statement]
object_statements: set[Statement]
torrent_healths: set[TorrentHealth]
trackers: set[Tracker]
def __init__(self, name: str, type: int) -> None: ... # noqa: D107, A002
@staticmethod
def get(name: str, type: int) -> Resource | None: ... # noqa: D102, A002
@staticmethod
def get_for_update(name: str, type: int) -> Resource | None: ... # noqa: D102, A002
@dataclasses.dataclass
class StatementOp(EntityImpl):
"""
Database type for a statement operation.
"""
id: int
statement: Statement
peer: Peer
operation: int
clock: int
signature: bytes
updated_at: datetime.datetime
auto_generated: bool
def __init__(self, statement: Statement, peer: Peer, operation: int, clock: int, # noqa: D107
signature: bytes, auto_generated: bool) -> None: ...
@staticmethod
def get(statement: Statement, peer: Peer) -> StatementOp | None: ... # noqa: D102
@staticmethod
def get_for_update(statement: Statement, peer: Peer) -> StatementOp | None: ... # noqa: D102
class Operation(IntEnum):
"""
Available types of statement operations.
"""
ADD = 1 # +1 operation
REMOVE = 2 # -1 operation
class ResourceType(IntEnum):
"""
Description of available resources within the Knowledge Graph.
These types are also using as a predicate for the statements.
Based on https://en.wikipedia.org/wiki/Dublin_Core
"""
CONTRIBUTOR = 1
COVERAGE = 2
CREATOR = 3
DATE = 4
DESCRIPTION = 5
FORMAT = 6
IDENTIFIER = 7
LANGUAGE = 8
PUBLISHER = 9
RELATION = 10
RIGHTS = 11
SOURCE = 12
SUBJECT = 13
TITLE = 14
TYPE = 15
# this is a section for extra types
TAG = 101
TORRENT = 102
CONTENT_ITEM = 103
@dataclass
class SimpleStatement:
"""
A statement that reflects some (typed) attribute ``object`` of a given (typed) ``subject``.
"""
subject_type: int
subject: str
predicate: int
object: str
class KnowledgeDataAccessLayer(Layer):
"""
A database layer for knowledge.
"""
def __init__(self, instance: orm.Database) -> None:
"""
Create a new knowledge database layer.
"""
self.logger = logging.getLogger(self.__class__.__name__)
self.instance = instance
self.Peer, self.Statement, self.Resource, self.StatementOp = self.define_binding(self.instance)
@staticmethod
def define_binding(db: Database) -> tuple[type[Peer], type[Statement], type[Resource], type[StatementOp]]:
"""
Create the bindings for this layer.
"""
class Peer(db.Entity):
id = orm.PrimaryKey(int, auto=True)
public_key = orm.Required(bytes, unique=True)
added_at = orm.Optional(datetime.datetime, default=datetime.datetime.utcnow)
operations = orm.Set(lambda: StatementOp)
class Statement(db.Entity):
id = orm.PrimaryKey(int, auto=True)
subject = orm.Required(lambda: Resource)
object = orm.Required(lambda: Resource, index=True)
operations = orm.Set(lambda: StatementOp)
added_count = orm.Required(int, default=0)
removed_count = orm.Required(int, default=0)
local_operation = orm.Optional(int) # in case user don't (or do) want to see it locally
orm.composite_key(subject, object)
@property
def score(self) -> int:
return self.added_count - self.removed_count
def update_counter(self, operation: Operation, increment: int = 1, is_local_peer: bool = False) -> None:
"""
Update Statement's counter.
:param operation: Resource operation.
:param increment:
:param is_local_peer: The flag indicates whether do we perform operations from a local user.
"""
if is_local_peer:
self.local_operation = operation
if operation == Operation.ADD:
self.added_count += increment
if operation == Operation.REMOVE:
self.removed_count += increment
class Resource(db.Entity):
id = orm.PrimaryKey(int, auto=True)
name = orm.Required(str)
type = orm.Required(int) # ResourceType enum
subject_statements = orm.Set(lambda: Statement, reverse="subject")
object_statements = orm.Set(lambda: Statement, reverse="object")
torrent_healths = orm.Set(lambda: db.TorrentHealth, reverse="torrent")
trackers = orm.Set(lambda: db.Tracker, reverse="torrents")
orm.composite_key(name, type)
class StatementOp(db.Entity):
id = orm.PrimaryKey(int, auto=True)
statement = orm.Required(lambda: Statement)
peer = orm.Required(lambda: Peer)
operation = orm.Required(int)
clock = orm.Required(int)
signature = orm.Required(bytes)
updated_at = orm.Required(datetime.datetime, default=datetime.datetime.utcnow)
auto_generated = orm.Required(bool, default=False)
orm.composite_key(statement, peer)
return Peer, Statement, Resource, StatementOp
def _get_resources(self, resource_type: ResourceType | None, name: str | None, case_sensitive: bool) -> Query:
"""
Get resources.
:param resource_type: type of resources
:param name: name of resources
:param case_sensitive: if True, then Resources are selected in a case-sensitive manner.
:returns: a Query object for requested resources
"""
results = self.Resource.select()
if name:
results = results.filter(
(lambda r: r.name == name) if case_sensitive else (lambda r: r.name.lower() == name.lower())
)
if resource_type:
results = results.filter(lambda r: r.type == resource_type.value)
return results
def get_statements(self, source_type: ResourceType | None, source_name: str | None,
statements_getter: Callable[[Entity], Entity],
target_condition: Callable[[Statement], bool], condition: Callable[[Statement], bool],
case_sensitive: bool, ) -> Iterator[Statement]:
"""
Get entities that satisfies the given condition.
"""
for resource in self._get_resources(source_type, source_name, case_sensitive):
results = orm.select(_ for _ in statements_getter(resource)
.select(condition)
.filter(target_condition)
.order_by(lambda s: orm.desc(s.score)))
yield from list(results)
def add_operation(self, operation: StatementOperation, signature: bytes, is_local_peer: bool = False,
is_auto_generated: bool = False, counter_increment: int = 1) -> bool:
"""
Add the operation that will be applied to a statement.
:param operation: the class describes the adding operation
:param signature: the signature of the operation
:param is_local_peer: local operations processes differently than remote operations.
:param is_auto_generated: the indicator of whether this resource was generated automatically or not
:param counter_increment: the counter or "numbers" of adding operations
:returns: True if the operation has been added/updated, False otherwise.
"""
self.logger.debug('Add operation. %s "%s" %s',
str(operation.subject), str(operation.predicate), str(operation.object))
peer = self.get_or_create(self.Peer, public_key=operation.creator_public_key)
subject = self.get_or_create(self.Resource, name=operation.subject, type=operation.subject_type)
obj = self.get_or_create(self.Resource, name=operation.object, type=operation.predicate)
statement = self.get_or_create(self.Statement, subject=subject, object=obj)
op = self.StatementOp.get_for_update(statement=statement, peer=peer)
if not op: # then insert
self.StatementOp(statement=statement, peer=peer, operation=operation.operation,
clock=operation.clock, signature=signature, auto_generated=is_auto_generated)
statement.update_counter(operation.operation, increment=counter_increment, is_local_peer=is_local_peer)
return True
# if it is a message from the past, then return
if operation.clock <= op.clock:
return False
# To prevent endless incrementing of the operation, we apply the following logic:
# 1. Decrement previous operation
statement.update_counter(op.operation, increment=-counter_increment, is_local_peer=is_local_peer)
# 2. Increment new operation
statement.update_counter(operation.operation, increment=counter_increment, is_local_peer=is_local_peer)
# 3. Update the operation entity
op.set(operation=operation.operation, clock=operation.clock, signature=signature,
updated_at=datetime.datetime.utcnow(), auto_generated=is_auto_generated) # noqa: DTZ003
return True
def add_auto_generated_operation(self, subject_type: ResourceType, subject: str, predicate: ResourceType,
obj: str) -> bool:
"""
Add an autogenerated operation.
The difference between "normal" and "autogenerated" operation is that the autogenerated operation will be added
with the flag `is_auto_generated=True` and with the `PUBLIC_KEY_FOR_AUTO_GENERATED_TAGS` public key.
:param subject_type: a type of adding subject. See: ResourceType enum.
:param subject: a string that represents a subject of adding operation.
:param predicate: the enum that represents a predicate of adding operation.
:param obj: a string that represents an object of adding operation.
"""
operation = StatementOperation(
subject_type=subject_type,
subject=subject,
predicate=predicate,
object=obj,
operation=Operation.ADD,
clock=CLOCK_START_VALUE,
creator_public_key=PUBLIC_KEY_FOR_AUTO_GENERATED_OPERATIONS,
)
return self.add_operation(operation, signature=b"", is_local_peer=False, is_auto_generated=True,
counter_increment=SHOW_THRESHOLD)
@staticmethod
def _show_condition(s: Statement) -> bool:
"""
This function determines show condition for the statement.
"""
return s.local_operation == Operation.ADD.value or not s.local_operation and s.score >= SHOW_THRESHOLD
def get_objects(self, subject_type: ResourceType | None = None, subject: str | None = "",
predicate: ResourceType | None = None, case_sensitive: bool = True,
condition: Callable[[Statement], bool] | None = None) -> List[str]:
"""
Get objects that satisfy the given subject and predicate.
To understand the order of parameters, keep in ming the following generic construction:
(<subject_type>, <subject>, <predicate>, <object>).
So in the case of retrieving objects this construction becomes
(<subject_type>, <subject>, <predicate>, ?).
:param subject_type: a type of the subject.
:param subject: a string that represents the subject.
:param predicate: the enum that represents a predicate of querying operations.
:param case_sensitive: if True, then Resources are selected in a case-sensitive manner.
:returns: a list of the strings representing the objects.
"""
self.logger.debug("Get subjects for %s with %s", str(subject), str(predicate))
statements = self.get_statements(
source_type=subject_type,
source_name=subject,
statements_getter=lambda r: r.subject_statements,
target_condition=(lambda s: s.object.type == predicate.value) if predicate else (lambda _: True),
condition=condition or self._show_condition,
case_sensitive=case_sensitive,
)
return [s.object.name for s in statements]
def get_subjects(self, subject_type: ResourceType | None = None, predicate: ResourceType | None = None,
obj: str | None = "", case_sensitive: bool = True) -> List[str]:
"""
Get subjects that satisfy the given object and predicate.
To understand the order of parameters, keep in mind the following generic construction:
(<subject_type>, <subject>, <predicate>, <object>).
So in the case of retrieving subjects this construction becomes
(<subject_type>, ?, <predicate>, <object>).
:param subject_type: a type of the subject.
:param obj: a string that represents the object.
:param predicate: the enum that represents a predicate of querying operations.
:param case_sensitive: if True, then Resources are selected in a case-sensitive manner.
:returns: a list of the strings representing the subjects.
"""
self.logger.debug("Get linked back resources for %s with %s", str(obj), str(predicate))
statements = self.get_statements(
source_type=predicate,
source_name=obj,
statements_getter=lambda r: r.object_statements,
target_condition=(lambda s: s.subject.type == subject_type.value) if subject_type else (lambda _: True),
condition=self._show_condition,
case_sensitive=case_sensitive,
)
return [s.subject.name for s in statements]
def get_simple_statements(self, subject_type: ResourceType | None = None, subject: str | None = "",
case_sensitive: bool = True) -> list[SimpleStatement]:
"""
Get simple statements for the given subject search.
"""
statements = self.get_statements(
source_type=subject_type,
source_name=subject,
statements_getter=lambda r: r.subject_statements,
target_condition=lambda _: True,
condition=self._show_condition,
case_sensitive=case_sensitive,
)
results = []
for s in statements:
try:
results.append(SimpleStatement(subject_type=s.subject.type, subject=s.subject.name,
predicate=s.object.type, object=s.object.name))
except UnrepeatableReadError as e:
self.logger.exception(e)
return results
def get_suggestions(self, subject_type: ResourceType | None = None, subject: str | None = "",
predicate: ResourceType | None = None, case_sensitive: bool = True) -> List[str]:
"""
Get all suggestions for a particular subject.
:param subject_type: a type of the subject.
:param subject: a string that represents the subject.
:param predicate: the enum that represents a predicate of querying operations.
:param case_sensitive: if True, then Resources are selected in a case-sensitive manner.
:returns: a list of the strings representing the objects.
"""
self.logger.debug("Getting suggestions for %s with %s", str(subject), str(predicate))
return self.get_objects(
subject_type=subject_type,
subject=subject,
predicate=predicate,
case_sensitive=case_sensitive,
condition=lambda s: not s.local_operation and between(s.score, HIDE_THRESHOLD + 1, SHOW_THRESHOLD - 1)
)
def get_subjects_intersection(self, objects: Set[str],
predicate: ResourceType | None,
subjects_type: ResourceType = ResourceType.TORRENT,
case_sensitive: bool = True) -> Set[str]:
"""
Get all subjects that have a certain predicate.
"""
if not objects:
return set()
if case_sensitive:
name_condition = '"obj"."name" = $obj_name'
else:
name_condition = 'py_lower("obj"."name") = py_lower($obj_name)'
query = select(r.name for r in self.Resource if r.type == subjects_type.value)
for obj_name in objects:
query = query.filter(raw_sql("""
r.id IN (
SELECT "s"."subject"
FROM "Statement" "s"
WHERE (
"s"."local_operation" = $(Operation.ADD.value)
OR
("s"."local_operation" = 0 OR "s"."local_operation" IS NULL)
AND ("s"."added_count" - "s"."removed_count") >= $SHOW_THRESHOLD
) AND "s"."object" IN (
SELECT "obj"."id" FROM "Resource" "obj"
WHERE "obj"."type" = $(predicate.value) AND $name_condition
)
)"""), globals={"obj_name": obj_name, "name_condition": name_condition, "SHOW_THRESHOLD": SHOW_THRESHOLD})
return set(query)
def get_clock(self, operation: StatementOperation) -> int:
"""
Get the clock (int) of operation.
"""
peer = self.Peer.get(public_key=operation.creator_public_key)
subject = self.Resource.get(name=operation.subject, type=operation.subject_type)
obj = self.Resource.get(name=operation.object, type=operation.predicate)
if not subject or not obj or not peer:
return CLOCK_START_VALUE
statement = self.Statement.get(subject=subject, object=obj)
if not statement:
return CLOCK_START_VALUE
op = self.StatementOp.get(statement=statement, peer=peer)
return op.clock if op else CLOCK_START_VALUE
def get_operations_for_gossip(self, count: int = 10) -> set[Entity]:
"""
Get random operations from the DB.
:param count: a limit for a resulting query
"""
return self._get_random_operations_by_condition(
condition=lambda so: not so.auto_generated,
count=count
)
def _get_random_operations_by_condition(self, condition: Callable[[Entity], bool], count: int = 5,
attempts: int = 100) -> set[Entity]:
"""
Get `count` random operations that satisfy the given condition.
This method were introduce as an fast alternative for native Pony `random` method.
:param condition: the condition by which the entities will be queried.
:param count: the amount of entities to return.
:param attempts: maximum attempt count for requesting the DB.
:returns: a set of random operations
"""
operations: set[Entity] = set()
for _ in range(attempts):
if len(operations) == count:
return operations
random_operations_list = self.StatementOp.select_random(1)
if random_operations_list:
operation = random_operations_list[0]
if condition(operation):
operations.add(operation)
return operations
| 21,968 | Python | .py | 447 | 38.651007 | 120 | 0.626792 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,017 | health.py | Tribler_tribler/src/tribler/core/database/layers/health.py | from __future__ import annotations
import logging
from binascii import hexlify
from datetime import datetime
from enum import IntEnum
from typing import TYPE_CHECKING
from pony import orm
from tribler.core.database.layers.layer import EntityImpl, Layer
if TYPE_CHECKING:
import dataclasses
from pony.orm import Database
from tribler.core.database.layers.knowledge import KnowledgeDataAccessLayer, Resource
from tribler.core.torrent_checker.dataclasses import HealthInfo
@dataclasses.dataclass
class TorrentHealth(EntityImpl):
"""
Database type for torrent health information.
"""
id: int
torrent: Resource
seeders: int
leechers: int
source: int
tracker: Tracker | None
last_check: datetime
def __init__(self, torrent: Resource) -> None: ... # noqa: D107
@staticmethod
def get(torrent: Resource) -> TorrentHealth | None: ... # noqa: D102
@staticmethod
def get_for_update(torrent: Resource) -> TorrentHealth | None: ... # noqa: D102
@dataclasses.dataclass
class Tracker(EntityImpl):
"""
Database type for tracker definitions.
"""
id: int
url: str
last_check: datetime | None
alive: bool
failures: int
torrents = set[Resource]
torrent_health_set = set[TorrentHealth]
def __init__(self, url: str) -> None: ... # noqa: D107
@staticmethod
def get(url: str) -> Tracker | None: ... # noqa: D102
@staticmethod
def get_for_update(url: str) -> Tracker | None: ... # noqa: D102
class ResourceType(IntEnum):
"""
Description of available resources within the Knowledge Graph.
These types are also using as a predicate for the statements.
Based on https://en.wikipedia.org/wiki/Dublin_Core
"""
CONTRIBUTOR = 1
COVERAGE = 2
CREATOR = 3
DATE = 4
DESCRIPTION = 5
FORMAT = 6
IDENTIFIER = 7
LANGUAGE = 8
PUBLISHER = 9
RELATION = 10
RIGHTS = 11
SOURCE = 12
SUBJECT = 13
TITLE = 14
TYPE = 15
# this is a section for extra types
TAG = 101
TORRENT = 102
CONTENT_ITEM = 103
class HealthDataAccessLayer(Layer):
"""
A layer that stores health information.
"""
def __init__(self, knowledge_layer: KnowledgeDataAccessLayer) -> None:
"""
Create a new health layer and initialize its bindings.
"""
self.logger = logging.getLogger(self.__class__.__name__)
self.instance = knowledge_layer.instance
self.Resource = knowledge_layer.Resource
self.TorrentHealth, self.Tracker = self.define_binding(self.instance)
def get_torrent_health(self, infohash: str) -> TorrentHealth | None:
"""
Get the health belonging to the given infohash.
"""
if torrent := self.Resource.get(name=infohash, type=ResourceType.TORRENT):
return self.TorrentHealth.get(torrent=torrent)
return None
@staticmethod
def define_binding(db: Database) -> tuple[type[TorrentHealth], type[Tracker]]:
"""
Create the bindings for this layer.
"""
class TorrentHealth(db.Entity):
id = orm.PrimaryKey(int, auto=True)
torrent = orm.Required(lambda: db.Resource, index=True)
seeders = orm.Required(int, default=0)
leechers = orm.Required(int, default=0)
source = orm.Required(int, default=0) # Source enum
tracker = orm.Optional(lambda: Tracker)
last_check = orm.Required(datetime, default=datetime.utcnow)
class Tracker(db.Entity):
id = orm.PrimaryKey(int, auto=True)
url = orm.Required(str, unique=True)
last_check = orm.Optional(datetime)
alive = orm.Required(bool, default=True)
failures = orm.Required(int, default=0)
torrents = orm.Set(lambda: db.Resource)
torrent_health_set = orm.Set(lambda: TorrentHealth, reverse='tracker')
return TorrentHealth, Tracker
def add_torrent_health(self, health_info: HealthInfo) -> None:
"""
Store the given health info in the database.
"""
torrent = self.get_or_create(
self.Resource,
name=hexlify(health_info.infohash),
type=ResourceType.TORRENT
)
torrent_health = self.get_or_create(
self.TorrentHealth,
torrent=torrent
)
torrent_health.seeders = health_info.seeders
torrent_health.leechers = health_info.leechers
if health_info.tracker:
torrent_health.tracker = self.get_or_create(
self.Tracker,
url=health_info.tracker
)
torrent_health.source = health_info.source
torrent_health.last_check = datetime.utcfromtimestamp(health_info.last_check) # noqa: DTZ004
| 5,002 | Python | .py | 135 | 28.851852 | 101 | 0.633823 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,018 | torrent_state.py | Tribler_tribler/src/tribler/core/database/orm_bindings/torrent_state.py | from __future__ import annotations
from typing import TYPE_CHECKING
from pony import orm
from typing_extensions import Self
from tribler.core.torrent_checker.dataclasses import HealthInfo
if TYPE_CHECKING:
from dataclasses import dataclass
from pony.orm import Database
from tribler.core.database.orm_bindings.torrent_metadata import TorrentMetadata
from tribler.core.database.orm_bindings.tracker_state import TrackerState
@dataclass
class TorrentState:
"""
Database type for the state of a torrent.
"""
rowid: int
infohash: bytes
seeders: int | None
leechers: int | None
last_check: int | None
self_checked: bool | None
has_data: bool
metadata: set[TorrentMetadata]
trackers: set[TrackerState]
def __init__(self, infohash: bytes) -> None: ... # noqa: D107
@staticmethod
def get(infohash: bytes) -> TorrentState | None: ... # noqa: D102
@staticmethod
def get_for_update(infohash: bytes) -> TorrentState | None: ... # noqa: D102
def define_binding(db: Database) -> type[TorrentState]:
"""
Define the tracker state binding.
"""
class TorrentState(db.Entity):
"""
This ORM class represents torrent swarms. It is used by HealthChecker.
"""
rowid = orm.PrimaryKey(int, auto=True)
infohash = orm.Required(bytes, unique=True)
seeders = orm.Optional(int, default=0)
leechers = orm.Optional(int, default=0)
last_check = orm.Optional(int, size=64, default=0)
self_checked = orm.Optional(bool, default=False, sql_default='0')
has_data = orm.Required(bool, default=False, sql_default='0', volatile=True)
metadata = orm.Set('TorrentMetadata', reverse='health')
trackers = orm.Set('TrackerState', reverse='torrents')
@classmethod
def from_health(cls: type[Self], health: HealthInfo) -> Self:
return cls(infohash=health.infohash, seeders=health.seeders, leechers=health.leechers,
last_check=health.last_check, self_checked=health.self_checked)
def to_health(self) -> HealthInfo:
return HealthInfo(self.infohash, self.seeders, self.leechers, self.last_check, self.self_checked)
return TorrentState
| 2,353 | Python | .py | 53 | 36.471698 | 109 | 0.667982 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,019 | torrent_metadata.py | Tribler_tribler/src/tribler/core/database/orm_bindings/torrent_metadata.py | from __future__ import annotations
import random
from binascii import hexlify, unhexlify
from datetime import datetime
from struct import unpack
from typing import TYPE_CHECKING, Any
from lz4.frame import LZ4FrameCompressor
from pony import orm
from pony.orm import Database, db_session
from typing_extensions import Self
from tribler.core.database.serialization import (
EPOCH,
REGULAR_TORRENT,
HealthItemsPayload,
TorrentMetadataPayload,
time2int,
)
from tribler.core.libtorrent.trackers import get_uniformed_tracker_url
from tribler.core.notifier import Notification, Notifier
NULL_KEY_SUBST = b"\00"
CHANNEL_DIR_NAME_PK_LENGTH = 32 # It's not 40, so it could be distinguished from infohash
CHANNEL_DIR_NAME_ID_LENGTH = 16 # Zero-padded long int in hex form
CHANNEL_DIR_NAME_LENGTH = CHANNEL_DIR_NAME_PK_LENGTH + CHANNEL_DIR_NAME_ID_LENGTH
LZ4_EMPTY_ARCHIVE = unhexlify("04224d184040c000000000")
LZ4_END_MARK_SIZE = 4 # in bytes, from original specification. We don't use CRC
HEALTH_ITEM_HEADER_SIZE = 4 # in bytes, len of varlenI header
# Metadata, torrents and channel statuses
NEW = 0 # The entry is newly created and is not published yet. It will be committed at the next commit.
TODELETE = 1 # The entry is marked to be removed at the next commit.
COMMITTED = 2 # The entry is committed and seeded.
UPDATED = 6 # One of the entry's properties was updated. It will be committed at the next commit.
PUBLIC_KEY_LEN = 64
if TYPE_CHECKING:
from dataclasses import dataclass
from tribler.core.database.orm_bindings.torrent_state import TorrentState
from tribler.core.libtorrent.torrentdef import TorrentDef
@dataclass
class TorrentMetadata:
"""
Database type for torrent metadata.
"""
rowid: int
infohash: bytes
size: int | None
torrent_date: datetime | None
tracker_info: str | None
title: str | None
tags: str | None
metadata_type: int
reserved_flags: int | None
origin_id: int | None
public_key: bytes
id_: int
timestamp: int
signature: bytes | None
added_on: datetime | None
status: int | None
xxx: float | None
health: TorrentState | None
tag_processor_version: int
def serialized_health(self) -> bytes: ... # noqa: D102
def serialized(self, key: bytes | None = None) -> bytes: ... # noqa: D102
def to_simple_dict(self) -> dict[str, str | bytes | float | None]: ... # noqa: D102
def infohash_to_id(infohash: bytes) -> int:
"""
This function is used to devise id_ from infohash in deterministic way. Used in FFA channels.
"""
return abs(unpack(">q", infohash[:8])[0])
def tdef_to_metadata_dict(tdef: TorrentDef) -> dict:
"""
Helper function to create a TorrentMetadata-compatible dict from TorrentDef.
"""
tags = "Unknown"
try:
torrent_date = datetime.fromtimestamp(tdef.get_creation_date()) # noqa: DTZ006
except (ValueError, TypeError):
torrent_date = EPOCH
tracker = tdef.get_tracker()
if not isinstance(tracker, bytes):
tracker = b""
tracker_url = tracker.decode()
tracker_info = get_uniformed_tracker_url(tracker_url) or ''
return {
"infohash": tdef.get_infohash(),
"title": tdef.get_name_as_unicode()[:300],
"tags": tags[:200],
"size": tdef.get_length(),
"torrent_date": torrent_date if torrent_date >= EPOCH else EPOCH,
"tracker_info": tracker_info,
}
def entries_to_chunk(metadata_list: list[TorrentMetadata], chunk_size: int, start_index: int = 0,
include_health: bool = False) -> tuple[bytes, int]:
"""
Put serialized data of one or more metadata entries into a single binary chunk. The data is added
incrementally until it stops fitting into the designated chunk size. The first entry is added
regardless of violating the chunk size limit.
The chunk format is:
<LZ4-compressed sequence of serialized metadata entries>
[<optional HealthItemsPayload>]
For the details of the health info format see the documentation: doc/metadata_store/serialization_format.rst
:param metadata_list: the list of metadata to process.
:param chunk_size: the desired chunk size limit, in bytes.
:param start_index: the index of the element of metadata_list from which the processing should start.
:param include_health: if True, put metadata health information into the chunk.
:return: (chunk, last_entry_index) tuple, where chunk is the resulting chunk in string form and
last_entry_index is the index of the element of the input list that was put into the chunk the last.
"""
if start_index >= len(metadata_list):
msg = "Could not serialize chunk: incorrect start_index"
raise Exception(msg, metadata_list, chunk_size, start_index)
compressor = LZ4FrameCompressor(auto_flush=True)
metadata_buffer = compressor.begin()
health_buffer = b''
index = 0
size = len(metadata_buffer) + LZ4_END_MARK_SIZE
if include_health:
size += HEALTH_ITEM_HEADER_SIZE
for count in range(start_index, len(metadata_list)):
metadata = metadata_list[count]
metadata_bytes = compressor.compress(metadata.serialized())
health_bytes = metadata.serialized_health() if include_health else b''
size += len(metadata_bytes) + len(health_bytes)
if size > chunk_size and count > 0:
# The first entry is always added even if the resulted size exceeds the chunk size.
# This lets higher levels to decide what to do in this case, e.g. send it through EVA protocol.
break
metadata_buffer += metadata_bytes
if include_health:
health_buffer += health_bytes
index = count
result = metadata_buffer + compressor.flush()
if include_health:
result += HealthItemsPayload(health_buffer).serialize()
return result, index + 1
def define_binding(db: Database, notifier: Notifier | None, # noqa: C901
tag_processor_version: int) -> type[TorrentMetadata]:
"""
Define the torrent metadata binding.
"""
class TorrentMetadata(db.Entity):
"""
This ORM binding class is intended to store Torrent objects, i.e. infohashes along with some related metadata.
"""
_discriminator_ = REGULAR_TORRENT
_table_ = "ChannelNode"
_CHUNK_SIZE_LIMIT = 1 * 1024 * 1024 # We use 1MB chunks as a workaround for Python's lack of string pointers
rowid = orm.PrimaryKey(int, size=64, auto=True)
# Serializable
infohash = orm.Required(bytes, index=True)
size = orm.Optional(int, size=64, default=0)
torrent_date = orm.Optional(datetime, default=datetime.utcnow, index=True)
tracker_info = orm.Optional(str, default='')
title = orm.Optional(str, default='')
tags = orm.Optional(str, default='')
metadata_type = orm.Discriminator(int, size=16)
reserved_flags = orm.Optional(int, size=16, default=0)
origin_id = orm.Optional(int, size=64, default=0, index=True)
public_key = orm.Required(bytes)
id_ = orm.Required(int, size=64)
timestamp = orm.Required(int, size=64, default=0)
# Signature is nullable. This means that "None" entries are stored in DB as NULLs instead of empty strings.
# NULLs are not checked for uniqueness and not indexed.
# This is necessary to store unsigned signatures without violating the uniqueness constraints.
signature = orm.Optional(bytes, unique=True, nullable=True, default=None)
orm.composite_key(public_key, id_)
orm.composite_index(public_key, origin_id)
# Local
added_on = orm.Optional(datetime, default=datetime.utcnow)
status = orm.Optional(int, default=COMMITTED)
xxx = orm.Optional(float, default=0)
health = orm.Optional('TorrentState', reverse='metadata')
tag_processor_version = orm.Required(int, default=0)
# Special class-level properties
payload_class = TorrentMetadataPayload
def __init__(self, *args: Any, **kwargs) -> None: # noqa: ANN401
# Any public keys + signatures are considered to be correct at this point, and should
# be checked after receiving the payload from the network.
if "health" not in kwargs and "infohash" in kwargs:
infohash = kwargs["infohash"]
health = db.TorrentState.get_for_update(infohash=infohash) or db.TorrentState(infohash=infohash)
kwargs["health"] = health
if "timestamp" not in kwargs:
kwargs["timestamp"] = time2int(datetime.utcnow()) * 1000 # noqa: DTZ003
if "id_" not in kwargs:
kwargs["id_"] = int(random.getrandbits(63))
# Free-for-all entries require special treatment
kwargs["public_key"] = kwargs.get("public_key", b"")
if kwargs["public_key"] == b"":
# We have to give the entry an unique sig to honor the DB constraints. We use the entry's id_
# as the sig to keep it unique and short. The uniqueness is guaranteed by DB as it already
# imposes uniqueness constraints on the id_+public_key combination.
kwargs["signature"] = None
super().__init__(*args, **kwargs)
if 'tracker_info' in kwargs:
self.add_tracker(kwargs["tracker_info"])
if notifier:
notifier.notify(Notification.new_torrent_metadata_created,
infohash=kwargs.get("infohash"), title=self.title)
self.tag_processor_version = tag_processor_version
def add_tracker(self, tracker_url: str) -> None:
sanitized_url = get_uniformed_tracker_url(tracker_url)
if sanitized_url:
tracker = db.TrackerState.get_for_update(url=sanitized_url) or db.TrackerState(url=sanitized_url)
self.health.trackers.add(tracker)
def before_update(self) -> None:
self.add_tracker(self.tracker_info)
def get_magnet(self) -> str:
return f"magnet:?xt=urn:btih:{hexlify(self.infohash).decode()}&dn={self.title}" + (
f"&tr={self.tracker_info}" if self.tracker_info else ""
)
@classmethod
@db_session
def add_ffa_from_dict(cls: type[Self], metadata: dict) -> Self | None:
# To produce a relatively unique id_ we take some bytes of the infohash and convert these to a number.
# abs is necessary as the conversion can produce a negative value, and we do not support that.
id_ = infohash_to_id(metadata["infohash"])
# Check that this torrent is yet unknown to GigaChannel, and if there is no duplicate FFA entry.
# Test for a duplicate id_+public_key is necessary to account for a (highly improbable) situation when
# two entries have different infohashes but the same id_. We do not want people to exploit this.
ih_blob = metadata["infohash"]
pk_blob = b""
if cls.exists(lambda g: (g.infohash == ih_blob) or (g.id_ == id_ and g.public_key == pk_blob)):
return None
# Add the torrent as a free-for-all entry if it is unknown to GigaChannel
return cls.from_dict(dict(metadata, public_key=b'', status=COMMITTED, id_=id_))
@db_session
def to_simple_dict(self) -> dict[str, str | float]:
"""
Return a basic dictionary with information about the channel.
"""
epoch = datetime.utcfromtimestamp(0) # noqa: DTZ004
return {
"name": self.title,
"category": self.tags,
"infohash": hexlify(self.infohash).decode(),
"size": self.size,
"num_seeders": self.health.seeders,
"num_leechers": self.health.leechers,
"last_tracker_check": self.health.last_check,
"created": int((self.torrent_date - epoch).total_seconds()),
"tag_processor_version": self.tag_processor_version,
"type": self.get_type(),
"id": self.id_,
"origin_id": self.origin_id,
"public_key": hexlify(self.public_key).decode(),
"status": self.status,
}
def get_type(self) -> int:
return self._discriminator_
@classmethod
def from_payload(cls: type[Self], payload: TorrentMetadataPayload) -> Self:
return cls(**payload.to_dict())
@classmethod
def from_dict(cls: type[Self], dct: dict) -> Self:
return cls(**dct)
@classmethod
@db_session
def get_with_infohash(cls: type[Self], infohash: bytes) -> Self:
return cls.select(lambda g: g.infohash == infohash).first()
@classmethod
@db_session
def get_torrent_title(cls: type[Self], infohash: bytes) -> str | None:
md = cls.get_with_infohash(infohash)
return md.title if md else None
def serialized_health(self) -> bytes:
health = self.health
if not health or (not health.seeders and not health.leechers and not health.last_check):
return b";"
return b"%d,%d,%d;" % (health.seeders or 0, health.leechers or 0, health.last_check or 0)
def serialized(self, key: bytes | None = None) -> bytes:
"""
Serializes the object and returns the result with added signature (blob output).
:param key: private key to sign object with
:return: serialized_data+signature binary string
"""
kwargs = self.to_dict()
payload = self.payload_class.from_dict(**kwargs)
payload.signature = kwargs.pop("signature", None) or payload.signature
if key:
payload.add_signature(key)
return payload.serialized() + payload.signature
return TorrentMetadata
| 14,336 | Python | .py | 282 | 41.404255 | 118 | 0.642765 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,020 | tracker_state.py | Tribler_tribler/src/tribler/core/database/orm_bindings/tracker_state.py | from __future__ import annotations
from typing import TYPE_CHECKING, Any
from pony import orm
from tribler.core.libtorrent.trackers import MalformedTrackerURLException, get_uniformed_tracker_url
if TYPE_CHECKING:
from dataclasses import dataclass
from pony.orm import Database
from tribler.core.database.orm_bindings.torrent_state import TorrentState
@dataclass
class TrackerState:
"""
Database type for the state of a tracker.
"""
rowid: int
url: str
last_check: int | None
alive: bool | None
torrents: set[TorrentState]
failures: int | None
def __init__(self, url: str) -> None: ... # noqa: D107
@staticmethod
def get(url: str) -> TrackerState | None: ... # noqa: D102
@staticmethod
def get_for_update(url: str) -> TrackerState | None: ... # noqa: D102
def define_binding(db: Database) -> type[TrackerState]:
"""
Define the tracker state binding.
"""
class TrackerState(db.Entity):
"""
This ORM class holds information about torrent trackers that TorrentChecker got while checking
torrents' health.
"""
rowid = orm.PrimaryKey(int, auto=True)
url = orm.Required(str, unique=True)
last_check = orm.Optional(int, size=64, default=0)
alive = orm.Optional(bool, default=True)
torrents = orm.Set('TorrentState', reverse='trackers')
failures = orm.Optional(int, size=32, default=0)
def __init__(self, *args: Any, **kwargs) -> None: # noqa: ANN401
# Sanitize and canonicalize the tracker URL
sanitized = get_uniformed_tracker_url(kwargs['url'])
if sanitized:
kwargs['url'] = sanitized
else:
msg = f"Could not canonicalize tracker URL ({kwargs['url']})"
raise MalformedTrackerURLException(msg)
super().__init__(*args, **kwargs)
return TrackerState
| 2,012 | Python | .py | 49 | 32.571429 | 102 | 0.630658 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,021 | __init__.py | Tribler_tribler/src/tribler/core/database/orm_bindings/__init__.py | """
This subpackage contains bindings of various Tribler metadata classes for PonyORM.
"""
| 91 | Python | .py | 3 | 29.333333 | 82 | 0.806818 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,022 | misc.py | Tribler_tribler/src/tribler/core/database/orm_bindings/misc.py | from __future__ import annotations
from typing import TYPE_CHECKING
from pony import orm
if TYPE_CHECKING:
import dataclasses
from pony.orm import Database
@dataclasses.dataclass
class MiscData:
"""
A miscellaneous key value mapping in the database.
"""
name: str
value: str | None
def __init__(self, name: str) -> None: ... # noqa: D107
@staticmethod
def get(name: str) -> MiscData | None: ... # noqa: D102
@staticmethod
def get_for_update(name: str) -> MiscData | None: ... # noqa: D102
def define_binding(db: Database) -> type[MiscData]:
"""
Define the misc data binding.
"""
class MiscData(db.Entity):
"""
This binding is used to store all kinds of values, like DB version, counters, etc.
"""
name = orm.PrimaryKey(str)
value = orm.Optional(str)
return MiscData
| 940 | Python | .py | 29 | 25.517241 | 90 | 0.614955 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,023 | trackers.py | Tribler_tribler/src/tribler/core/libtorrent/trackers.py | from __future__ import annotations
import re
from http.client import HTTP_PORT, HTTPS_PORT
from json import dumps
from urllib.parse import ParseResult, parse_qsl, unquote, urlencode, urlparse, urlsplit
UDP = "udp"
HTTP = "http"
HTTPS = "https"
SUPPORTED_SCHEMES = {UDP, HTTP, HTTPS}
DEFAULT_PORTS = {HTTP: HTTP_PORT, HTTPS: HTTPS_PORT}
class MalformedTrackerURLException(Exception):
"""
The tracker URL is not valid.
"""
delimiters_regex = re.compile(r'[\r\n\x00\s\t;]+(%20)*')
url_regex = re.compile(
r"^(?:http|udp|wss)s?://" # http:// or https://
r"(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|" # domain...
r"localhost|" # localhost...
r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})" # ...or ip
r"(?::\d+)?" # optional port
r"(?:/?|[/?]\S+)$", re.IGNORECASE)
remove_trailing_junk = re.compile(r"[,*.:]+\Z")
truncated_url_detector = re.compile(r"\.\.\.")
def get_uniformed_tracker_url(tracker_url: str) -> str | None:
"""
Parses the given tracker URL and returns in a uniform URL format.
It uses regex to sanitize the URL.
:param tracker_url: Tracker URL
:return: the tracker in a uniform format <type>://<host>:<port>/<page>
"""
assert isinstance(tracker_url, str), f"tracker_url is not a str: {type(tracker_url)}"
# Search the string for delimiters and try to get the first correct URL
for next_tracker_url in re.split(delimiters_regex, tracker_url):
# Rule out the case where the regex returns None
if not next_tracker_url:
continue
# Rule out truncated URLs
if re.search(truncated_url_detector, next_tracker_url):
continue
# Try to match it against a simple regexp
if not re.match(url_regex, next_tracker_url):
continue
try:
scheme, (host, port), path = _parse_tracker_url(re.sub(remove_trailing_junk, "", next_tracker_url))
if scheme == UDP:
return f"{scheme}://{host}:{port}"
if scheme in {HTTP, HTTPS}:
# HTTP(S) trackers must have a path
path = path.rstrip("/")
if not path:
continue
uniformed_port = "" if port == DEFAULT_PORTS[scheme] else f":{port}"
return f"{scheme}://{host}{uniformed_port}{path}"
except MalformedTrackerURLException:
continue
return None
def parse_tracker_url(tracker_url: str) -> tuple[str, tuple[str, int], str]:
"""
Parses the tracker URL and checks whether it satisfies tracker URL constraints.
Additionally, it also checks if the tracker URL is a uniform and valid URL.
:param tracker_url the URL of the tracker
:returns: Tuple (scheme, (host, port), announce_path)
"""
http_prefix = f"{HTTP}://"
http_port_suffix = f":{HTTP_PORT}/"
https_prefix = f"{HTTPS}://"
https_port_suffix = f":{HTTPS_PORT}/"
url = tracker_url.lower()
if url.startswith(http_prefix) and http_port_suffix in url:
tracker_url = tracker_url.replace(http_port_suffix, "/", 1)
if url.startswith(https_prefix) and https_port_suffix in url:
tracker_url = tracker_url.replace(https_port_suffix, "/", 1)
if tracker_url != get_uniformed_tracker_url(tracker_url):
msg = f"Tracker URL is not sanitized ({tracker_url})."
raise MalformedTrackerURLException(msg)
return _parse_tracker_url(tracker_url)
def _parse_tracker_url(tracker_url: str) -> tuple[str, tuple[str, int], str]:
"""
Parses the tracker URL and check whether it satisfies certain constraints.
- The tracker type must be one of the supported types (udp, http, https).
- UDP trackers requires a port.
- HTTP(s) trackers requires an announce path.
- HTTP(S) trackers default to HTTP(S)_PORT if port is not present on the URL.
:param tracker_url the URL of the tracker
:returns: Tuple (scheme, (host, port), announce_path)
"""
parsed_url = urlparse(tracker_url)
host = parsed_url.hostname
path = parsed_url.path
scheme = parsed_url.scheme
port = parsed_url.port
if host is None:
msg = f"Could not resolve hostname from {tracker_url}."
raise MalformedTrackerURLException(msg)
if scheme not in SUPPORTED_SCHEMES:
msg = f"Unsupported tracker type ({scheme})."
raise MalformedTrackerURLException(msg)
if scheme == UDP and not port:
msg = f"Missing port for UDP tracker URL ({tracker_url})."
raise MalformedTrackerURLException(msg)
if scheme in {HTTP, HTTPS}:
if not path:
msg = f"Missing announce path for HTTP(S) tracker URL ({tracker_url})."
raise MalformedTrackerURLException(msg)
if not port:
port = DEFAULT_PORTS[scheme]
return scheme, (host, port or 0), path
def add_url_params(url: str, params: dict) -> str:
"""
Add GET params to provided URL being aware of existing.
>> url = 'http://stackoverflow.com/test?answers=true'
>> new_params = {'answers': False, 'data': ['some','values']}
>> add_url_params(url, new_params)
'http://stackoverflow.com/test?data=some&data=values&answers=false'
:param url: string of target URL
:param params: dict containing requested params to be added
:return: string with updated URL
"""
# Unquoting URL first so we don't loose existing args
url = unquote(url)
# Extracting url info
parsed_url = urlparse(url)
# Extracting URL arguments from parsed URL
get_args = parsed_url.query
# Converting URL arguments to dict
parsed_get_args = dict(parse_qsl(get_args))
# Merging URL arguments dict with new params
parsed_get_args.update(params)
# Bool and Dict values should be converted to json-friendly values
# you may throw this part away if you don't like it :)
parsed_get_args.update(
{k: dumps(v) for k, v in parsed_get_args.items()
if isinstance(v, (bool, dict))}
)
# Converting URL argument to proper query string
encoded_get_args = urlencode(parsed_get_args, doseq=True)
# Creating new parsed result object based on provided with new
# URL arguments. Same thing happens inside of urlparse.
return ParseResult(
parsed_url.scheme, parsed_url.netloc, parsed_url.path,
parsed_url.params, encoded_get_args, parsed_url.fragment
).geturl()
def is_valid_url(url: str) -> bool | None:
"""
Checks whether the given URL is a valid URL.
Both UDP and HTTP URLs will be validated correctly.
:param url: an object representing the URL
:return: Boolean specifying whether the URL is valid
"""
if " " in url.strip():
return None
if url.lower().startswith("udp"):
url = url.lower().replace("udp", "http", 1)
split_url = urlsplit(url)
return not (split_url[0] == "" or split_url[1] == "")
| 6,975 | Python | .py | 156 | 38.076923 | 111 | 0.648162 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,024 | torrentdef.py | Tribler_tribler/src/tribler/core/libtorrent/torrentdef.py | """
Author(s): Arno Bakker.
"""
from __future__ import annotations
import itertools
import logging
from asyncio import get_running_loop
from contextlib import suppress
from functools import cached_property
from hashlib import sha1
from pathlib import Path
from typing import TYPE_CHECKING, Any, Generator, Iterable, Literal, cast, overload
import aiohttp
import libtorrent as lt
from tribler.core.libtorrent.torrent_file_tree import TorrentFileTree
from tribler.core.libtorrent.trackers import is_valid_url
if TYPE_CHECKING:
from os import PathLike
class FileDict(dict): # noqa: D101
@overload # type: ignore[override]
def __getitem__(self, key: Literal[b"length"]) -> int: ...
@overload
def __getitem__(self, key: Literal[b"path"]) -> list[bytes]: ...
@overload
def __getitem__(self, key: Literal[b"path.utf-8"]) -> list[bytes] | None: ...
def __getitem__(self, key: bytes) -> Any: ... # noqa: D105
class InfoDict(dict): # noqa: D101
@overload # type: ignore[override]
def __getitem__(self, key: Literal[b"files"]) -> list[FileDict]: ...
@overload
def __getitem__(self, key: Literal[b"length"]) -> int: ...
@overload
def __getitem__(self, key: Literal[b"name"]) -> bytes: ...
@overload
def __getitem__(self, key: Literal[b"name.utf-8"]) -> bytes: ...
@overload
def __getitem__(self, key: Literal[b"piece length"]) -> int: ...
@overload
def __getitem__(self, key: Literal[b"pieces"]) -> bytes: ...
def __getitem__(self, key: bytes) -> Any: ... # noqa: D105
class MetainfoDict(dict): # noqa: D101
@overload # type: ignore[override]
def __getitem__(self, key: Literal[b"announce"]) -> bytes | None: ...
@overload
def __getitem__(self, key: Literal[b"announce-list"]) -> list[list[bytes]] | None: ...
@overload
def __getitem__(self, key: Literal[b"comment"]) -> bytes | None: ...
@overload
def __getitem__(self, key: Literal[b"created by"]) -> bytes | None: ...
@overload
def __getitem__(self, key: Literal[b"creation date"]) -> bytes | None: ...
@overload
def __getitem__(self, key: Literal[b"encoding"]) -> bytes | None: ...
@overload
def __getitem__(self, key: Literal[b"info"]) -> InfoDict: ...
@overload
def __getitem__(self, key: Literal[b"httpseeds"]) -> list[bytes] | None: ...
@overload
def __getitem__(self, key: Literal[b"nodes"]) -> list[bytes] | None: ...
@overload
def __getitem__(self, key: Literal[b"urllist"]) -> list[bytes] | None: ...
def __getitem__(self, key: bytes) -> Any: ... # noqa: D105
class TorrentParameters(dict): # noqa: D101
@overload # type: ignore[override]
def __getitem__(self, key: Literal[b"announce"]) -> bytes | None: ...
@overload
def __getitem__(self, key: Literal[b"announce-list"]) -> list[list[bytes]] | None: ...
@overload
def __getitem__(self, key: Literal[b"comment"]) -> bytes | None: ...
@overload
def __getitem__(self, key: Literal[b"created by"]) -> bytes | None: ...
@overload
def __getitem__(self, key: Literal[b"creation date"]) -> bytes | None: ...
@overload
def __getitem__(self, key: Literal[b"encoding"]) -> bytes | None: ...
@overload
def __getitem__(self, key: Literal[b"httpseeds"]) -> list[bytes] | None: ...
@overload
def __getitem__(self, key: Literal[b"name"]) -> bytes | None: ...
@overload
def __getitem__(self, key: Literal[b"name.utf-8"]) -> bytes | None: ...
@overload
def __getitem__(self, key: Literal[b"nodes"]) -> list[bytes] | None: ...
@overload
def __getitem__(self, key: Literal[b"urllist"]) -> list[bytes] | None: ...
def __getitem__(self, key: bytes) -> Any: ... # noqa: D105
else:
FileDict = dict
InfoDict = dict
MetainfoDict = dict[bytes, Any]
TorrentParameters = dict
def escape_as_utf8(string: bytes, encoding: str = "utf8") -> str:
"""
Make a string UTF-8 compliant, destroying characters if necessary.
:param string: the string to convert
:param encoding: the string encoding to use
:return: the utf-8 string derivative
"""
try:
# Check if the delivered encoding is correct and we can convert to utf8 without any issues.
return string.decode(encoding).encode('utf8').decode('utf8')
except (LookupError, TypeError, ValueError):
try:
# The delivered encoding is incorrect, cast it to latin1 and hope for the best (minor corruption).
return string.decode('latin1').encode('utf8', 'ignore').decode('utf8')
except (TypeError, ValueError):
# This is a very nasty string (e.g. '\u266b'), remove the illegal entries.
return string.decode('utf8', 'ignore')
def pathlist2filename(pathlist: Iterable[bytes]) -> Path:
"""
Convert a multi-file torrent file 'path' entry to a filename.
"""
return Path(*(x.decode() for x in pathlist))
def get_length_from_metainfo(metainfo: MetainfoDict, selectedfiles: set[Path] | None) -> int:
"""
Loop through all files in a torrent and calculate the total size.
"""
if b"files" not in metainfo[b"info"]:
# single-file torrent
return metainfo[b"info"][b"length"]
# multi-file torrent
files = metainfo[b"info"][b"files"]
total = 0
for i in range(len(files)):
path = files[i][b"path"]
length = files[i][b"length"]
if length > 0 and (not selectedfiles or pathlist2filename(path) in selectedfiles):
total += length
return total
class TorrentDef:
"""
This object acts as a wrapper around some libtorrent metadata.
It can be used to create new torrents, or analyze existing ones.
"""
def __init__(self, metainfo: MetainfoDict | None = None,
torrent_parameters: TorrentParameters | None = None,
ignore_validation: bool = True) -> None:
"""
Create a new TorrentDef object, possibly based on existing data.
:param metainfo: A dictionary with metainfo, i.e. from a .torrent file.
:param torrent_parameters: User-defined parameters for the new TorrentDef.
:param ignore_validation: Whether we ignore the libtorrent validation.
"""
self._logger = logging.getLogger(self.__class__.__name__)
self.torrent_parameters: TorrentParameters = cast(TorrentParameters, {})
self.metainfo: MetainfoDict | None = metainfo
self.infohash: bytes | None = None
self._torrent_info: lt.torrent_info | None = None
if self.metainfo is not None:
# First, make sure the passed metainfo is valid
if not ignore_validation:
try:
self._torrent_info = lt.torrent_info(self.metainfo)
raw_infohash = self._torrent_info.info_hash() # LT1.X: bytes, LT2.X: sha1_hash
self.infohash = raw_infohash if isinstance(raw_infohash, bytes) else raw_infohash.to_bytes()
except RuntimeError as exc:
raise ValueError from exc
else:
try:
if not self.metainfo[b'info']:
msg = "Empty metainfo!"
raise ValueError(msg)
self.infohash = sha1(lt.bencode(self.metainfo[b'info'])).digest()
except (KeyError, RuntimeError) as exc:
raise ValueError from exc
self.copy_metainfo_to_torrent_parameters()
elif torrent_parameters is not None:
self.torrent_parameters.update(torrent_parameters)
def copy_metainfo_to_torrent_parameters(self) -> None: # noqa: C901
"""
Populate the torrent_parameters dictionary with information from the metainfo.
"""
if self.metainfo is not None:
if b"comment" in self.metainfo:
self.torrent_parameters[b"comment"] = self.metainfo[b"comment"]
if b"created by" in self.metainfo:
self.torrent_parameters[b"created by"] = self.metainfo[b"created by"]
if b"creation date" in self.metainfo:
self.torrent_parameters[b"creation date"] = self.metainfo[b"creation date"]
if b"announce" in self.metainfo:
self.torrent_parameters[b"announce"] = self.metainfo[b"announce"]
if b"announce-list" in self.metainfo:
self.torrent_parameters[b"announce-list"] = self.metainfo[b"announce-list"]
if b"nodes" in self.metainfo:
self.torrent_parameters[b"nodes"] = self.metainfo[b"nodes"]
if b"httpseeds" in self.metainfo:
self.torrent_parameters[b"httpseeds"] = self.metainfo[b"httpseeds"]
if b"urllist" in self.metainfo:
self.torrent_parameters[b"urllist"] = self.metainfo[b"urllist"]
if b"name" in self.metainfo[b"info"]:
self.torrent_parameters[b"name"] = self.metainfo[b"info"][b"name"]
if b"piece length" in self.metainfo[b"info"]:
self.torrent_parameters[b"piece length"] = self.metainfo[b"info"][b"piece length"]
@property
def torrent_info(self) -> lt.torrent_info | None:
"""
Get the libtorrent torrent info instance or load it from our metainfo.
"""
self.load_torrent_info()
return self._torrent_info
def invalidate_torrent_info(self) -> None:
"""
Invalidate the torrent info.
"""
self._torrent_info = None
def load_torrent_info(self) -> None:
"""
Load the torrent info into memory from our metainfo if it does not exist.
"""
if self._torrent_info is None:
self._torrent_info = lt.torrent_info(cast(dict[bytes, Any], self.metainfo))
def torrent_info_loaded(self) -> bool:
"""
Check if the libtorrent torrent info is loaded.
"""
return self._torrent_info is not None
@cached_property
def torrent_file_tree(self) -> TorrentFileTree:
"""
Construct a file tree from this torrent definition.
"""
return TorrentFileTree.from_lt_file_storage(self.torrent_info.files()) # type: ignore[union-attr]
@staticmethod
def _threaded_load_job(filepath: str | bytes | PathLike) -> TorrentDef:
"""
Perform the actual loading of the torrent.
Called from a thread: don't call this directly!
"""
with open(filepath, "rb") as torrent_file:
file_content = torrent_file.read()
return TorrentDef.load_from_memory(file_content)
@staticmethod
async def load(filepath: str | bytes | PathLike) -> TorrentDef:
"""
Create a TorrentDef object from a .torrent file.
:param filepath: The path to the .torrent file
"""
return await get_running_loop().run_in_executor(None, TorrentDef._threaded_load_job, filepath)
@staticmethod
def load_from_memory(bencoded_data: bytes) -> TorrentDef:
"""
Load some bencoded data into a TorrentDef.
:param bencoded_data: The bencoded data to decode and use as metainfo
"""
metainfo = lt.bdecode(bencoded_data)
# Some versions of libtorrent will not raise an exception when providing invalid data.
# This issue is present in 1.0.8 (included with Tribler 7.3.0), but has been fixed since at least 1.2.1.
if metainfo is None:
msg = "Data is not a bencoded string"
raise ValueError(msg)
return TorrentDef.load_from_dict(cast(MetainfoDict, metainfo))
@staticmethod
def load_from_dict(metainfo: MetainfoDict) -> TorrentDef:
"""
Load a metainfo dictionary into a TorrentDef object.
:param metainfo: The metainfo dictionary
"""
return TorrentDef(metainfo=metainfo)
@staticmethod
async def load_from_url(url: str) -> TorrentDef:
"""
Create a TorrentDef with information from a remote source.
:param url: The HTTP/HTTPS url where to fetch the torrent info from.
"""
session = aiohttp.ClientSession(raise_for_status=True)
response = await session.get(url)
body = await response.read()
return TorrentDef.load_from_memory(body)
def _filter_characters(self, name: bytes) -> str:
"""
Sanitize the names in path to unicode by replacing out all
characters that may -even remotely- cause problems with the '?'
character.
:param name: the name to sanitize
:return: the sanitized string
"""
def filter_character(char: int) -> str:
if 0 < char < 128:
return chr(char)
self._logger.debug("Bad character 0x%X", char)
return "?"
return "".join(map(filter_character, name))
def set_encoding(self, enc: bytes) -> None:
"""
Set the character encoding for e.g. the 'name' field.
:param enc: The new encoding of the file.
"""
self.torrent_parameters[b"encoding"] = enc
def get_encoding(self) -> str:
"""
Returns the used encoding of the TorrentDef.
"""
return self.torrent_parameters.get(b"encoding", b"utf-8").decode()
def set_tracker(self, url: str) -> None:
"""
Set the tracker of this torrent, according to a given URL.
:param url: The tracker url.
"""
if not is_valid_url(url):
msg = "Invalid URL"
raise ValueError(msg)
if url.endswith("/"): # Some tracker code can't deal with / at end
url = url[:-1]
self.torrent_parameters[b"announce"] = url
def get_tracker(self) -> bytes | None:
"""
Returns the torrent announce URL.
"""
return self.torrent_parameters.get(b"announce", None)
def get_tracker_hierarchy(self) -> list[list[bytes]]:
"""
Returns the hierarchy of trackers.
"""
return self.torrent_parameters.get(b"announce-list", [])
def get_trackers(self) -> set[bytes]:
"""
Returns a flat set of all known trackers.
:return: all known trackers
"""
if self.get_tracker_hierarchy():
trackers = itertools.chain.from_iterable(self.get_tracker_hierarchy())
return set(filter(None, trackers))
tracker = self.get_tracker()
if tracker:
return {tracker}
return set()
def set_piece_length(self, piece_length: int) -> None:
"""
Set the size of the pieces in which the content is traded.
The piece size must be a multiple of the chunk size, the unit in which
it is transmitted, which is 16K by default. The default is automatic (value 0).
:param piece_length: The piece length.
"""
if not isinstance(piece_length, int):
msg = "Piece length not an int/long"
raise ValueError(msg) # noqa: TRY004
self.torrent_parameters[b"piece length"] = piece_length
def get_piece_length(self) -> int:
"""
Returns the piece size.
"""
return self.torrent_parameters.get(b"piece length", 0)
def get_nr_pieces(self) -> int:
"""
Returns the number of pieces.
"""
if not self.metainfo:
return 0
return len(self.metainfo[b"info"][b"pieces"]) // 20
def get_infohash(self) -> bytes | None:
"""
Returns the infohash of the torrent, if metainfo is provided. Might be None if no metainfo is provided.
"""
return self.infohash
def get_metainfo(self) -> MetainfoDict | None:
"""
Returns the metainfo of the torrent. Might be None if no metainfo is provided.
"""
return self.metainfo
def get_name(self) -> bytes | None:
"""
Returns the name as raw string of bytes.
"""
return self.torrent_parameters[b"name"]
def get_name_utf8(self) -> str:
"""
Not all names are utf-8, attempt to construct it as utf-8 anyway.
"""
return escape_as_utf8(self.get_name() or b"", self.get_encoding())
def set_name(self, name: bytes) -> None:
"""
Set the name of this torrent.
:param name: The new name of the torrent
"""
self.torrent_parameters[b"name"] = name
def get_name_as_unicode(self) -> str:
"""
Returns the info['name'] field as Unicode string.
If there is an utf-8 encoded name, we assume that it is correctly encoded and use it normally.
Otherwise, if there is an encoding[1], we attempt to decode the (bytes) name.
Otherwise, we attempt to decode the (bytes) name as UTF-8.
Otherwise, we attempt to replace non-UTF-8 characters from the (bytes) name with "?".
If all of the above fails, this returns an empty string.
[1] Some encodings are not supported by python. For instance, the MBCS codec which is used by Windows is not
supported (Jan 2010).
"""
if self.metainfo is not None:
if b"name.utf-8" in self.metainfo[b"info"]:
with suppress(UnicodeError):
return self.metainfo[b"info"][b"name.utf-8"].decode()
if (name := self.metainfo[b"info"].get(b"name")) is not None:
if (encoding := self.metainfo.get(b"encoding")) is not None:
with suppress(UnicodeError), suppress(LookupError):
return name.decode(encoding.decode())
with suppress(UnicodeError):
return name.decode()
with suppress(UnicodeError):
return self._filter_characters(name)
return ""
def _get_all_files_as_unicode_with_length(self) -> Generator[tuple[Path, int], None, None]: # noqa: C901
"""
Get a generator for files in the torrent def. No filtering is possible and all tricks are allowed to obtain
a unicode list of filenames.
:return: A unicode filename generator.
"""
if self.metainfo and b"files" in self.metainfo[b"info"]:
# Multi-file torrent
files = cast(FileDict, self.metainfo[b"info"][b"files"])
for file_dict in files:
if b"path.utf-8" in file_dict:
# This file has an utf-8 encoded list of elements.
# We assume that it is correctly encoded and use it normally.
try:
yield (Path(*(element.decode() for element in file_dict[b"path.utf-8"])),
file_dict[b"length"])
continue
except UnicodeError:
pass
if b"path" in file_dict:
# Try to use the 'encoding' field. If it exists, it should contain something like 'utf-8'.
if (encoding := self.metainfo.get(b"encoding")) is not None:
try:
yield (Path(*(element.decode(encoding.decode()) for element in file_dict[b"path"])),
file_dict[b"length"])
continue
except UnicodeError:
pass
except LookupError:
# Some encodings are not supported by Python. For instance, the MBCS codec which is used
# by Windows is not supported (Jan 2010).
pass
# Try to convert the names in path to unicode, assuming that it was encoded as utf-8.
try:
yield (Path(*(element.decode() for element in file_dict[b"path"])),
file_dict[b"length"])
continue
except UnicodeError:
pass
# Convert the names in path to unicode by replacing out all characters that may - even remotely -
# cause problems with the '?' character.
try:
yield Path(*map(self._filter_characters, file_dict[b"path"])), file_dict[b"length"]
continue
except UnicodeError:
pass
elif self.metainfo:
# Single-file torrent
yield Path(self.get_name_as_unicode()), self.metainfo[b"info"][b"length"]
def get_files_with_length(self, exts: set[str] | None = None) -> list[tuple[Path, int]]:
"""
The list of files in the torrent def.
:param exts: (Optional) list of filename extensions (without leading .)
to search for.
:return: A list of filenames.
"""
videofiles = []
for filename, length in self._get_all_files_as_unicode_with_length():
ext = Path(filename).suffix
if ext != "" and ext[0] == ".":
ext = ext[1:]
if exts is None or ext.lower() in exts:
videofiles.append((filename, length))
return videofiles
def get_files(self, exts: set[str] | None = None) -> list[Path]:
"""
Return the list of file paths in this torrent.
"""
return [filename for filename, _ in self.get_files_with_length(exts)]
def get_length(self, selectedfiles: set[Path] | None = None) -> int:
"""
Returns the total size of the content in the torrent. If the optional selectedfiles argument is specified, the
method returns the total size of only those files.
:return: A length (long)
"""
if self.metainfo:
return get_length_from_metainfo(self.metainfo, selectedfiles)
return 0
def get_creation_date(self) -> int:
"""
Returns the creation date of the torrent.
"""
return self.metainfo.get(b"creation date", 0) if self.metainfo else 0
def is_multifile_torrent(self) -> bool:
"""
Returns whether this TorrentDef is a multi-file torrent.
"""
if self.metainfo:
return b"files" in self.metainfo[b"info"]
return False
def is_private(self) -> bool:
"""
Returns whether this TorrentDef is a private torrent (and is not announced in the DHT).
"""
try:
private = int(self.metainfo[b"info"].get(b"private", 0)) if self.metainfo else 0
except (ValueError, KeyError) as e:
self._logger.warning("%s: %s", e.__class__.__name__, str(e))
private = 0
return private == 1
def get_index_of_file_in_files(self, file: str | None) -> int:
"""
Get the index of the given file path in the torrent.
Raises a ValueError if the path is not found.
"""
if not self.metainfo:
msg = "TorrentDef does not have metainfo"
raise ValueError(msg)
info = self.metainfo[b"info"]
if file is not None and b"files" in info:
for i in range(len(info[b"files"])):
file_dict = info[b"files"][i]
intorrentpath = pathlist2filename(file_dict.get(b"path.utf-8", file_dict[b"path"]))
if intorrentpath == Path(file):
return i
msg = "File not found in torrent"
raise ValueError(msg)
msg = "File not found in single-file torrent"
raise ValueError(msg)
class TorrentDefNoMetainfo(TorrentDef):
"""
Instances of this class are used when working with a torrent def that contains no metainfo (yet), for instance,
when starting a download with only an infohash. Other methods that are using this class do not distinguish between
a TorrentDef with and without data and may still expect this class to have various methods in TorrentDef
implemented.
"""
def __init__(self, infohash: bytes, name: bytes, url: bytes | str | None = None) -> None:
"""
Create a new valid torrent def without metainfo.
"""
torrent_parameters: TorrentParameters = cast(TorrentParameters, {b"name": name})
if url is not None:
torrent_parameters[b"urllist"] = [url if isinstance(url, bytes) else url.encode()]
super().__init__(torrent_parameters=torrent_parameters)
self.infohash = infohash
def get_url(self) -> bytes | str | None:
"""
Get the URL belonging to this torrent.
"""
if urllist := self.torrent_parameters.get(b"urllist"):
return urllist[0]
return None
@property
def torrent_info(self) -> lt.torrent_info | None:
"""
A torrent def without metinfo has no libtorrent torrent_info.
"""
return None
def load_torrent_info(self) -> None:
"""
If there cannot be torrent info, we don't need to try and load it.
"""
def get_name_as_unicode(self) -> str:
"""
Get the name of this torrent.
"""
return self.get_name_utf8()
| 25,588 | Python | .py | 556 | 35.606115 | 118 | 0.591862 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,025 | uris.py | Tribler_tribler/src/tribler/core/libtorrent/uris.py | import logging
import os
from pathlib import Path
from aiohttp import ClientSession
from aiohttp.hdrs import LOCATION
from yarl import URL
logger = logging.getLogger(__name__)
def url_to_path(file_url: str) -> str:
"""
Convert a URL to a path.
Example:
-------
'file:///path/to/file' -> '/path/to/file'
"""
url = URL(file_url)
if os.name == "nt" and url.host:
# UNC file path, \\server\share\path...
# ref: https://docs.microsoft.com/en-us/dotnet/standard/io/file-path-formats
_, share, *segments = url.parts
path = (rf"\\{url.host}\{share}", *segments)
elif os.name == "nt":
path = (url.path.lstrip("/"), )
else:
path = (url.path, )
return str(Path(*path))
async def unshorten(uri: str) -> str:
"""
Unshorten a URI if it is a short URI. Return the original URI if it is not a short URI.
:param uri: A string representing the shortened URL that needs to be unshortened.
:return: The unshortened URL. If the original URL does not redirect to another URL, the original URL is returned.
"""
scheme = URL(uri).scheme
if scheme not in ("http", "https"):
return uri
logger.info("Unshortening URI: %s", uri)
async with ClientSession() as session:
try:
async with await session.get(uri, allow_redirects=False) as response:
if response.status in (301, 302, 303, 307, 308):
uri = response.headers.get(LOCATION, uri)
except Exception as e:
logger.warning("Error while unshortening a URI: %s: %s", e.__class__.__name__, str(e), exc_info=e)
logger.info("Unshorted URI: %s", uri)
return uri
| 1,715 | Python | .py | 44 | 32.568182 | 117 | 0.628244 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,026 | torrent_file_tree.py | Tribler_tribler/src/tribler/core/libtorrent/torrent_file_tree.py | from __future__ import annotations
import os
import re
from bisect import bisect
from dataclasses import dataclass, field
from pathlib import Path
from typing import TYPE_CHECKING, Dict, Generator, ItemsView, cast
if TYPE_CHECKING:
import libtorrent
class TorrentFileTree:
"""
A tree of directories that contain other directories and files.
"""
@dataclass
class Directory:
"""
A directory that contains other directories and files.
"""
directories: dict[str, TorrentFileTree.Directory] = field(default_factory=dict)
files: list[TorrentFileTree.File] = field(default_factory=list)
collapsed: bool = True
size: int = 0
def calc_size(self) -> None:
"""
Calculate the size of this Directory, assuming all subdirectories already have their size calculated.
"""
self.size = sum(d.size for d in self.directories.values()) + sum(f.size for f in self.files)
def iter_dirs(self) -> Generator[TorrentFileTree.Directory, None, None]:
"""
Iterate through the subdirectories in this directory and then this directory itself.
We do it this way so that calc_size() can be easily/efficiently executed!
"""
for directory in self.directories.values():
yield from directory.iter_dirs()
yield self
def tostr(self, depth: int = 0, name: str = "") -> str:
"""
Create a beautifully formatted string representation of this directory.
"""
tab = "\t"
if self.collapsed:
return "\n" + "\t" * depth + f"CollapsedDirectory({name!r}, {self.size} bytes)"
# Pretty directories
has_no_directories = len(self.directories) == 0
pretty_directories = ",".join(v.tostr(depth + 2, k) for k, v in self.directories.items())
dir_closure = "" if has_no_directories else "\n" + tab * (depth + 1)
pretty_directories = f"\n{tab * (depth + 1)}directories=[{pretty_directories}{dir_closure}]"
# Pretty files
pretty_files = "".join("\n" + v.tostr(depth + 2) for v in self.files)
pretty_files = f"\n{tab * (depth + 1)}files=[{pretty_files}]"
return "\n" + "\t" * depth + f"Directory({name!r},{pretty_directories},{pretty_files}, {self.size} bytes)"
@dataclass(unsafe_hash=True)
class File:
"""
A File object that has a name (relative to its parent directory) and a file index in the torrent's file list.
"""
name: str
index: int
size: int = 0
selected: bool = True
_sort_pattern = re.compile('([0-9]+)') # We use this for natural sorting (see sort_key())
def tostr(self, depth: int = 0) -> str:
"""
Create a beautifully formatted string representation of this File.
"""
return "\t" * depth + f"File({self.index}, {self.name}, {self.size} bytes)"
def sort_key(self) -> tuple[int | str, ...]:
"""
Sort File instances using natural sort based on their names, which SHOULD be unique.
"""
return tuple(int(part) if part.isdigit() else part for part in self._sort_pattern.split(self.name))
def __lt__(self, other: TorrentFileTree.File) -> bool:
"""
Python 3.8 quirk/shortcoming is that File needs to be a SupportsRichComparisonT (instead of using a key).
"""
return self.sort_key() < other.sort_key()
def __le__(self, other: TorrentFileTree.File) -> bool:
"""
Python 3.8 quirk/shortcoming is that File needs to be a SupportsRichComparisonT (instead of using a key).
"""
return self.sort_key() <= other.sort_key()
def __gt__(self, other: TorrentFileTree.File) -> bool:
"""
Python 3.8 quirk/shortcoming is that File needs to be a SupportsRichComparisonT (instead of using a key).
"""
return self.sort_key() > other.sort_key()
def __ge__(self, other: TorrentFileTree.File) -> bool:
"""
Python 3.8 quirk/shortcoming is that File needs to be a SupportsRichComparisonT (instead of using a key).
"""
return self.sort_key() >= other.sort_key()
def __eq__(self, other: object) -> bool:
"""
Python 3.8 quirk/shortcoming is that File needs to be a SupportsRichComparisonT (instead of using a key).
"""
if isinstance(other, TorrentFileTree.File):
return self.sort_key() == other.sort_key()
return False
def __ne__(self, other: object) -> bool:
"""
Python 3.8 quirk/shortcoming is that File needs to be a SupportsRichComparisonT (instead of using a key).
"""
if isinstance(other, TorrentFileTree.File):
return self.sort_key() != other.sort_key()
return True
def __init__(self, file_storage: libtorrent.file_storage) -> None:
"""
Construct an empty tree data structure belonging to the given file storage.
Note that the file storage contents are not loaded in yet at this point.
"""
self.root = TorrentFileTree.Directory()
self.root.collapsed = False
self.file_storage = file_storage
self.paths: Dict[Path, TorrentFileTree.Directory | TorrentFileTree.File] = {}
def __str__(self) -> str:
"""
Represent the tree as a string, which is actually just the tostr() of its root directory.
"""
return f"TorrentFileTree({self.root.tostr()}\n)"
@classmethod
def from_lt_file_storage(cls: type[TorrentFileTree], file_storage: libtorrent.file_storage) -> TorrentFileTree:
"""
Load in the tree contents from the given file storage, sorting the files in each directory.
"""
tree = cls(file_storage)
# Map libtorrent's flat list to a tree structure.
for i in range(file_storage.num_files()):
full_file_path = Path(file_storage.file_path(i))
*subdirs, fname = full_file_path.parts
# Register all directories on each file's path.
current_dir = tree.root
full_path = Path("")
for subdir in subdirs:
d = current_dir.directories.get(subdir, TorrentFileTree.Directory())
current_dir.directories[subdir] = d
current_dir = d
# Register the current path in `tree.paths` to enable later searches with O(1) complexity.
full_path = full_path / subdir
tree.paths[full_path] = d
# After the 'for' loop iteration, `current_dir` points to the rightmost directory in the current file path.
file_instance = cls.File(fname, i, file_storage.file_size(i))
current_dir.files.append(file_instance)
tree.paths[full_file_path] = file_instance # As with directories, register the file path for searching
# Sorting afterward is faster than sorting during insertion (roughly 4x speedup)
for directory in tree.root.iter_dirs():
directory.files.sort()
directory.calc_size()
return tree
def expand(self, path: Path) -> None:
"""
Expand all directories that are necessary to view the given path.
"""
current_dir = self.root
for directory in path.parts:
if directory not in current_dir.directories:
break
current_dir = current_dir.directories[directory]
current_dir.collapsed = False
def collapse(self, path: Path) -> None:
"""
Collapse ONLY the specific given directory.
"""
element = self.find(path)
if isinstance(element, TorrentFileTree.Directory) and element != self.root:
element.collapsed = True
def set_selected(self, path: Path, selected: bool) -> list[int]:
"""
Set the selected status for a File or entire Directory.
:returns: the list of modified file indices.
"""
item = self.find(path)
if item is None:
return []
if isinstance(item, TorrentFileTree.File):
item.selected = selected
return [item.index]
out = []
for key in item.directories:
out += self.set_selected(path / key, selected)
for file in item.files:
file.selected = selected
out.append(file.index)
return out
def find(self, path: Path) -> Directory | File | None:
"""
Get the Directory or File object at the given path, or None if it does not exist.
Searching for files is "expensive" (use libtorrent instead).
"""
if path == Path(""):
return self.root
current_dir = self.root
for directory in path.parts:
if directory not in current_dir.directories:
# Not a directory but a file?
if len(current_dir.files) == 0:
return None
search = self.File(directory, 0)
found_at = bisect(current_dir.files, search)
element = current_dir.files[found_at - 1]
return element if element == search else None
current_dir = current_dir.directories[directory]
return current_dir
def path_is_dir(self, path: Path) -> bool:
"""
Check if the given path points to a Directory (instead of a File).
"""
if path == Path(""): # Note that Path("") == Path(".") but "" != "."
return True
current_dir = self.root
for directory in path.parts:
if directory not in current_dir.directories:
# We ended up at a File (or a Path that does not exist, which is also not a directory)
return False
current_dir = current_dir.directories[directory]
return True
def find_next_directory(self, from_path: Path) -> tuple[Directory, Path] | None:
"""
Get the next unvisited directory from a given path.
When we ran out of files, we have to go up in the tree. However, when we go up, we may immediately be at the
end of the list of that parent directory and we may have to go up again. If we are at the end of the list all
the way up to the root of the tree, we return None.
"""
from_parts = from_path.parts
for i in range(1, len(from_parts) + 1):
parent_path = Path(os.sep.join(from_parts[:-i]))
parent = cast(TorrentFileTree.Directory, self.find(parent_path))
dir_in_parent = from_parts[-i]
dir_indices = list(parent.directories.keys()) # Python 3 "quirk": dict keys() order is stable
index_in_parent = dir_indices.index(dir_in_parent)
if index_in_parent != len(dir_indices) - 1:
# We did not run through all available directories in the parent yet
dirname = dir_indices[index_in_parent + 1]
return parent.directories[dirname], parent_path / dirname
if len(parent.files) > 0:
# We did not run through all available files in the parent yet
return parent, parent_path / parent.files[0].name
return None
def _view_get_fetch_path_and_dir(self, start_path: tuple[Directory, Path] | Path) -> tuple[Directory, Path, Path]:
"""
Given a start path, which may be a file, get the containing Directory object and directory path.
In the case that we start from a given Directory object and a file path, we only correct the file path to
start at the given Directory's path.
"""
if isinstance(start_path, Path):
fetch_path = start_path if self.path_is_dir(start_path) else start_path.parent
fetch_directory = cast(TorrentFileTree.Directory, self.find(fetch_path))
return fetch_directory, fetch_path, start_path
fetch_directory, fetch_path = start_path
requested_fetch_path = fetch_path
if not self.path_is_dir(fetch_path):
fetch_path = fetch_path.parent
return fetch_directory, fetch_path, requested_fetch_path
def _view_up_after_files(self, number: int, fetch_path: Path) -> list[str]:
"""
Run up the tree to the next available directory (if it exists) and continue building a view.
"""
next_dir_desc = self.find_next_directory(fetch_path)
view: list[str] = []
if next_dir_desc is None:
return view
next_dir, next_dir_path = next_dir_desc
view.append(str(next_dir_path))
number -= 1
if number == 0:
return view
return view + self.view((next_dir, next_dir_path), number)
def _view_process_directories(self, number: int, directory_items: ItemsView[str, Directory],
fetch_path: Path) -> tuple[list[str], int]:
"""
Process the directories dictionary of a given (parent directory) path.
Note that we only need to process the first directory and the remainder is visited through recursion.
"""
view = []
for dirname, dirobj in directory_items:
full_path = fetch_path / dirname
# The subdirectory is an item of the tree itself.
view.append(str(full_path))
number -= 1
if number == 0: # Exit early if we don't need anymore items
return view, number
# If the elements of the subdirectory are not collapsed, recurse into a view of those elements.
if not dirobj.collapsed:
elems = self.view((dirobj, full_path), number)
view += elems
number -= len(elems)
break
# We exhausted all subdirectories (note that the number may still be larger than 0)
return view, number
def view(self, start_path: tuple[Directory, Path] | Path, number: int) -> list[str]:
"""
Construct a view of a given number of path names (directories and files) in the tree.
The view is constructed AFTER the given starting path. To view the root folder contents, simply call this
method with Path("") or Path(".").
"""
fetch_directory, fetch_path, element_path = self._view_get_fetch_path_and_dir(start_path)
# This is a collapsed directory, it has no elements.
if fetch_directory.collapsed:
return self._view_up_after_files(number, fetch_path)
view: list[str] = []
if self.path_is_dir(element_path):
# This is a directory: loop through its directories, then process its files.
view, number = self._view_process_directories(number, fetch_directory.directories.items(), fetch_path)
if number == 0:
return view
# We either landed here when:
# 1. We went up to a file to continue processing from this fetch_directory (added to the view).
# > This is the case if (and only if) the last file in the view is the currently requested view. The
# > last file in the view is not necessarily equal the first file in the fetch_directory though!
# > In this case, we ignore the first file in the fetch_directory and then pull the "number" of files
# > that was requested of us.
# 2. We went up and discovered a folder to evaluate.
# > We simply fetch the first "number" of files starting at the first file in the fetch_directory.
if (len(view) > 0 and len(fetch_directory.files) > 0
and view[-1] == self.file_storage.file_path(fetch_directory.files[0].index)): # O(1) index lookup
files = [str(element_path / f.name) for f in fetch_directory.files[1:number + 1]]
else:
files = [str(element_path / f.name) for f in fetch_directory.files[:number]]
else:
# Starting in the middle of the files of a directory. Because the file list is presorted, we can efficiently
# search for our starting index by using bisect.
fetch_index = bisect(fetch_directory.files, self.File(element_path.parts[-1], 0))
files = [str(fetch_path / f.name) for f in fetch_directory.files[fetch_index:fetch_index + number]]
# At this point we run through the given files ("files") and go up one folder in the tree if we need more.
view += files
number -= len(files)
return view if number == 0 else view + self._view_up_after_files(number, fetch_path)
| 16,984 | Python | .py | 326 | 40.947853 | 120 | 0.605699 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,027 | torrents.py | Tribler_tribler/src/tribler/core/libtorrent/torrents.py | from __future__ import annotations
import logging
from asyncio import CancelledError, Future
from contextlib import suppress
from hashlib import sha1
from os.path import getsize
from typing import TYPE_CHECKING, Callable, Iterable, TypedDict, TypeVar
import libtorrent as lt
from typing_extensions import ParamSpec
if TYPE_CHECKING:
from pathlib import Path
from tribler.core.libtorrent.download_manager.download import Download
from tribler.core.libtorrent.download_manager.stream import Stream
from tribler.core.libtorrent.torrentdef import InfoDict
logger = logging.getLogger(__name__)
WrappedParams = ParamSpec("WrappedParams")
WrappedReturn = TypeVar("WrappedReturn")
Wrapped = Callable[WrappedParams, WrappedReturn]
def check_handle(default: WrappedReturn) -> Wrapped:
"""
Return the libtorrent handle if it's available, else return the default value.
Author(s): Egbert Bouman
"""
def wrap(f: Wrapped) -> Wrapped:
def invoke_func(self: Download, *args: WrappedParams.args, **kwargs: WrappedParams.kwargs) -> WrappedReturn:
if self.handle and self.handle.is_valid():
return f(self, *args, **kwargs)
return default
return invoke_func
return wrap
def require_handle(func: Wrapped) -> Wrapped:
"""
Invoke the function once the handle is available. Returns a future that will fire once the function has completed.
Author(s): Egbert Bouman
"""
def invoke_func(self: Download, *args: WrappedParams.args,
**kwargs: WrappedParams.kwargs) -> Future[WrappedReturn | None]:
result_future: Future[WrappedReturn | None] = Future()
def done_cb(fut: Future[lt.torrent_handle]) -> None:
with suppress(CancelledError):
handle = fut.result()
if fut.cancelled() or result_future.done() or handle != self.handle or not handle.is_valid():
logger.warning('Can not invoke function, handle is not valid or future is cancelled')
result_future.set_result(None)
return
try:
result = func(self, *args, **kwargs)
except RuntimeError as e:
# ignore runtime errors, for more info see: https://github.com/Tribler/tribler/pull/7783
logger.exception(e)
result_future.set_result(None)
except Exception as e:
logger.exception(e)
result_future.set_exception(e)
else:
result_future.set_result(result)
handle_future = self.get_handle()
handle_future.add_done_callback(done_cb)
return result_future
return invoke_func
def check_vod(default: WrappedReturn) -> Wrapped:
"""
Check if torrent is vod mode, else return default.
"""
def wrap(f: Wrapped) -> Wrapped:
def invoke_func(self: Stream, *args: WrappedParams.args, **kwargs: WrappedParams.kwargs) -> WrappedReturn:
if self.enabled:
return f(self, *args, **kwargs)
return default
return invoke_func
return wrap
def common_prefix(paths_list: list[Path]) -> Path:
"""
Get the path prefixes component-wise.
"""
base_set = set(paths_list[0].parents)
for p in paths_list[1:]:
base_set.intersection_update(set(p.parents))
return sorted(base_set, reverse=True)[0]
def _existing_files(path_list: list[Path]) -> Iterable[Path]:
for path in path_list:
if not path.exists():
msg = f"Path does not exist: {path}"
raise OSError(msg)
elif path.is_file():
yield path
class TorrentFileResult(TypedDict):
"""
A dictionary to describe a newly-created torrent.
"""
success: bool
base_dir: Path
torrent_file_path: str | None
metainfo: bytes
infohash: bytes
def create_torrent_file(file_path_list: list[Path], params: InfoDict, # noqa: C901
torrent_filepath: str | None = None) -> TorrentFileResult:
"""
Create a torrent file from the given paths and parameters.
If an output file path is omitted, no file will be written to disk.
"""
fs = lt.file_storage()
# filter all non-files
path_list = list(_existing_files(file_path_list))
# ACHTUNG!
# In the case of a multi-file torrent, the torrent name plays the role of the toplevel dir name.
# get the directory where these files are in. If there are multiple files, take the common directory they are in
base_dir = (common_prefix(path_list).parent if len(path_list) > 1 else path_list[0].parent).absolute()
for path in path_list:
relative = path.relative_to(base_dir)
fs.add_file(str(relative), getsize(str(path)))
piece_size = params[b"piece length"] if params.get(b"piece length") else 0
flag_v1_only = 2**6 # Backward compatibility for libtorrent < 2.x
flags = lt.create_torrent_flags_t.optimize | flag_v1_only
params = {k: (v.decode() if isinstance(v, bytes) else v) for k, v in params.items()}
torrent = lt.create_torrent(fs, piece_size=piece_size, flags=flags)
if params.get(b"comment"):
torrent.set_comment(params[b"comment"])
if params.get(b"created by"):
torrent.set_creator(params[b"created by"])
# main tracker
if params.get(b"announce"):
torrent.add_tracker(params[b"announce"])
# tracker list
if params.get(b"announce-list"):
tier = 1
for tracker in params[b"announce-list"]:
torrent.add_tracker(tracker[0], tier=tier)
tier += 1
# DHT nodes
# http://www.bittorrent.org/beps/bep_0005.html
if params.get(b"nodes"):
for node in params[b"nodes"]:
torrent.add_node(*node)
# HTTP seeding
# http://www.bittorrent.org/beps/bep_0017.html
if params.get(b"httpseeds"):
torrent.add_http_seed(params[b"httpseeds"])
# Web seeding
# http://www.bittorrent.org/beps/bep_0019.html
if len(file_path_list) == 1 and params.get(b"urllist", False):
torrent.add_url_seed(params[b"urllist"])
# read the files and calculate the hashes
lt.set_piece_hashes(torrent, str(base_dir))
t1 = torrent.generate()
torrent_bytes = lt.bencode(t1)
if torrent_filepath:
with open(torrent_filepath, "wb") as f:
f.write(torrent_bytes)
return {
"success": True,
"base_dir": base_dir,
"torrent_file_path": torrent_filepath,
"metainfo": torrent_bytes,
"infohash": sha1(lt.bencode(t1[b"info"])).digest()
}
def get_info_from_handle(handle: lt.torrent_handle) -> lt.torrent_info | None:
"""
Call handle.torrent_file() and handle RuntimeErrors.
"""
try:
return handle.torrent_file()
except AttributeError as ae:
logger.warning("No torrent info found from handle: %s", str(ae))
return None
except RuntimeError as e: # This can happen when the torrent handle is invalid.
logger.warning("Got exception when fetching info from handle: %s", str(e))
return None
| 7,156 | Python | .py | 168 | 35.10119 | 118 | 0.658695 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,028 | torrentinfo_endpoint.py | Tribler_tribler/src/tribler/core/libtorrent/restapi/torrentinfo_endpoint.py | from __future__ import annotations
import json
import logging
from asyncio.exceptions import TimeoutError as AsyncTimeoutError
from binascii import hexlify, unhexlify
from copy import deepcopy
from ssl import SSLError
from typing import TYPE_CHECKING, Iterable
import libtorrent as lt
from aiohttp import (
BaseConnector,
ClientConnectorCertificateError,
ClientConnectorError,
ClientResponseError,
ClientSession,
ClientTimeout,
ServerConnectionError,
web,
)
from aiohttp_apispec import docs
from ipv8.REST.schema import schema
from marshmallow.fields import Boolean, String
from yarl import URL
from tribler.core.database.orm_bindings.torrent_metadata import tdef_to_metadata_dict
from tribler.core.libtorrent.torrentdef import TorrentDef
from tribler.core.libtorrent.uris import unshorten, url_to_path
from tribler.core.notifier import Notification
from tribler.core.restapi.rest_endpoint import (
HTTP_BAD_REQUEST,
HTTP_INTERNAL_SERVER_ERROR,
RESTEndpoint,
RESTResponse,
)
if TYPE_CHECKING:
from aiohttp.abc import Request
from aiohttp.typedefs import LooseHeaders
from tribler.core.libtorrent.download_manager.download_manager import DownloadManager
logger = logging.getLogger(__name__)
def recursive_unicode(obj: Iterable, ignore_errors: bool = False) -> Iterable:
"""
Converts any bytes within a data structure to unicode strings. Bytes are assumed to be UTF-8 encoded text.
:param obj: object comprised of lists/dicts/strings/bytes
:return: obj: object comprised of lists/dicts/strings
"""
if isinstance(obj, dict):
return {recursive_unicode(k, ignore_errors): recursive_unicode(v, ignore_errors) for k, v in obj.items()}
if isinstance(obj, list):
return [recursive_unicode(i, ignore_errors) for i in obj]
if isinstance(obj, bytes):
try:
return obj.decode()
except UnicodeDecodeError:
if ignore_errors:
return "".join(chr(c) for c in obj)
raise
return obj
async def query_uri(uri: str, connector: BaseConnector | None = None, headers: LooseHeaders | None = None,
timeout: ClientTimeout | None = None, return_json: bool = False,
valid_cert: bool = True) -> bytes | dict:
"""
Retrieve the response for the given aiohttp context.
"""
kwargs: dict = {"headers": headers}
if timeout:
# ClientSession uses a sentinel object for the default timeout. Therefore, it should only be specified if an
# actual value has been passed to this function.
kwargs["timeout"] = timeout
async with ClientSession(connector=connector, raise_for_status=True) as session, \
await session.get(uri, ssl=valid_cert, **kwargs) as response:
if return_json:
return await response.json(content_type=None)
return await response.read()
class TorrentInfoEndpoint(RESTEndpoint):
"""
This endpoint is responsible for handing all requests regarding torrent info in Tribler.
"""
path = "/api/torrentinfo"
def __init__(self, download_manager: DownloadManager) -> None:
"""
Create a new torrent info endpoint.
"""
super().__init__()
self.download_manager = download_manager
self.app.add_routes([web.get("", self.get_torrent_info),
web.put("", self.get_torrent_info_from_file)])
@docs(
tags=["Libtorrent"],
summary="Return metainfo from a torrent found at a provided URI.",
parameters=[{
"in": "query",
"name": "torrent",
"description": "URI for which to return torrent information. This URI can either represent "
"a file location, a magnet link or a HTTP(S) url.",
"type": "string",
"required": True
}],
responses={
200: {
"description": "Return a hex-encoded json-encoded string with torrent metainfo",
"schema": schema(GetMetainfoResponse={"metainfo": String, "download_exists": Boolean,
"valid_certificate": Boolean})
}
}
)
async def get_torrent_info(self, request: Request) -> RESTResponse: # noqa: C901, PLR0911, PLR0912, PLR0915
"""
Return metainfo from a torrent found at a provided URI.
"""
params = request.query
hops = params.get("hops")
i_hops = 0
p_uri = params.get("uri")
self._logger.info("URI: %s", p_uri)
if hops:
try:
i_hops = int(hops)
except ValueError:
return RESTResponse({"error": f"wrong value of 'hops' parameter: {hops}"}, status=HTTP_BAD_REQUEST)
if not p_uri:
return RESTResponse({"error": "uri parameter missing"}, status=HTTP_BAD_REQUEST)
uri = await unshorten(p_uri)
scheme = URL(uri).scheme
valid_cert = True
if scheme == "file":
file_path = url_to_path(uri)
try:
tdef = await TorrentDef.load(file_path)
metainfo = tdef.metainfo
except (OSError, TypeError, ValueError, RuntimeError):
return RESTResponse({"error": f"error while decoding torrent file: {file_path}"},
status=HTTP_INTERNAL_SERVER_ERROR)
elif scheme in ("http", "https"):
try:
try:
response = await query_uri(uri)
except ClientConnectorCertificateError:
response = await query_uri(uri, valid_cert=False)
valid_cert = False
except (ServerConnectionError, ClientResponseError, SSLError, ClientConnectorError,
AsyncTimeoutError, ValueError) as e:
self._logger.warning("Error while querying http uri: %s", str(e))
return RESTResponse({"error": str(e)}, status=HTTP_INTERNAL_SERVER_ERROR)
if not isinstance(response, bytes):
self._logger.warning("Error while reading response from http uri: %s", repr(response))
return RESTResponse({"error": "Error while reading response from http uri"},
status=HTTP_INTERNAL_SERVER_ERROR)
if response.startswith(b'magnet'):
try:
try:
# libtorrent 1.2.19
infohash = lt.parse_magnet_uri(uri)["info_hash"] # type: ignore[index] # (checker uses 2.X)
except TypeError:
# libtorrent 2.0.9
infohash = unhexlify(str(lt.parse_magnet_uri(uri).info_hash))
except RuntimeError as e:
return RESTResponse(
{"error": f'Error while getting an infohash from magnet: {e.__class__.__name__}: {e}'},
status=HTTP_INTERNAL_SERVER_ERROR
)
metainfo = await self.download_manager.get_metainfo(infohash, timeout=60, hops=i_hops,
url=response.decode())
else:
metainfo = lt.bdecode(response)
elif scheme == "magnet":
self._logger.info("magnet scheme detected")
try:
try:
# libtorrent 1.2.19
infohash = lt.parse_magnet_uri(uri)["info_hash"] # type: ignore[index] # (checker uses 2.X)
except TypeError:
# libtorrent 2.0.9
infohash = unhexlify(str(lt.parse_magnet_uri(uri).info_hash))
except RuntimeError as e:
return RESTResponse(
{"error": f'Error while getting an infohash from magnet: {e.__class__.__name__}: {e}'},
status=HTTP_BAD_REQUEST
)
metainfo = await self.download_manager.get_metainfo(infohash, timeout=60, hops=i_hops, url=uri)
else:
return RESTResponse({"error": "invalid uri"}, status=HTTP_BAD_REQUEST)
if not metainfo:
return RESTResponse({"error": "metainfo error"}, status=HTTP_INTERNAL_SERVER_ERROR)
if not isinstance(metainfo, dict) or b"info" not in metainfo:
self._logger.warning("Received metainfo is not a valid dictionary")
return RESTResponse({"error": "invalid response"}, status=HTTP_INTERNAL_SERVER_ERROR)
# Add the torrent to metadata.db
torrent_def = TorrentDef.load_from_dict(metainfo)
metadata_dict = tdef_to_metadata_dict(torrent_def)
self.download_manager.notifier.notify(Notification.torrent_metadata_added, metadata=metadata_dict)
download = self.download_manager.downloads.get(metadata_dict["infohash"])
metainfo_lookup = self.download_manager.metainfo_requests.get(metadata_dict["infohash"])
metainfo_download = metainfo_lookup.download if metainfo_lookup else None
download_is_metainfo_request = download == metainfo_download
# Check if the torrent is already in the downloads
encoded_metainfo = deepcopy(metainfo)
ready_for_unicode = recursive_unicode(encoded_metainfo, ignore_errors=True)
json_dump = json.dumps(ready_for_unicode, ensure_ascii=False)
return RESTResponse({"metainfo": hexlify(json_dump.encode()).decode(),
"download_exists": download and not download_is_metainfo_request,
"valid_certificate": valid_cert})
@docs(
tags=["Libtorrent"],
summary="Return metainfo from a torrent found at a provided .torrent file.",
responses={
200: {
"description": "Return a hex-encoded json-encoded string with torrent metainfo",
"schema": schema(GetMetainfoResponse={"metainfo": String})
}
}
)
async def get_torrent_info_from_file(self, request: web.Request) -> RESTResponse:
"""
Return metainfo from a torrent found at a provided .torrent file.
"""
tdef = TorrentDef.load_from_memory(await request.read())
infohash = tdef.get_infohash()
# Check if the torrent is already in the downloads
download = self.download_manager.downloads.get(infohash)
metainfo_lookup = self.download_manager.metainfo_requests.get(infohash)
metainfo_download = metainfo_lookup.download if metainfo_lookup else None
requesting_metainfo = download == metainfo_download
metainfo_unicode = recursive_unicode(deepcopy(tdef.get_metainfo()), ignore_errors=True)
metainfo_json = json.dumps(metainfo_unicode, ensure_ascii=False)
return RESTResponse({"infohash": hexlify(infohash).decode(),
"metainfo": hexlify(metainfo_json.encode('utf-8')).decode(),
"download_exists": download and not requesting_metainfo})
| 11,147 | Python | .py | 226 | 37.845133 | 116 | 0.618077 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,029 | create_torrent_endpoint.py | Tribler_tribler/src/tribler/core/libtorrent/restapi/create_torrent_endpoint.py | import asyncio
import base64
import json
from importlib.metadata import PackageNotFoundError, version
from pathlib import Path
import libtorrent as lt
from aiohttp import web
from aiohttp.abc import Request
from aiohttp_apispec import docs, json_schema
from ipv8.REST.schema import schema
from marshmallow.fields import String
from tribler.core.knowledge.restapi.knowledge_endpoint import HandledErrorSchema
from tribler.core.libtorrent.download_manager.download_config import DownloadConfig
from tribler.core.libtorrent.download_manager.download_manager import DownloadManager
from tribler.core.libtorrent.torrentdef import TorrentDef
from tribler.core.libtorrent.torrents import create_torrent_file
from tribler.core.restapi.rest_endpoint import (
HTTP_BAD_REQUEST,
MAX_REQUEST_SIZE,
RESTEndpoint,
RESTResponse,
return_handled_exception,
)
def recursive_bytes(obj): # noqa: ANN001, ANN201
"""
Converts any unicode strings within a Python data structure to bytes. Strings will be encoded using UTF-8.
:param obj: object comprised of lists/dicts/strings/bytes
:return: obj: object comprised of lists/dicts/bytes
"""
if isinstance(obj, dict):
return {recursive_bytes(k): recursive_bytes(v) for k, v in obj.items()}
if isinstance(obj, list):
return [recursive_bytes(i) for i in obj]
if isinstance(obj, str):
return obj.encode('utf8')
return obj
class CreateTorrentEndpoint(RESTEndpoint):
"""
Create a torrent file from local files.
See: http://www.bittorrent.org/beps/bep_0012.html
"""
path = "/api/createtorrent"
def __init__(self, download_manager: DownloadManager, client_max_size: int = MAX_REQUEST_SIZE) -> None:
"""
Create a new endpoint to create torrents.
"""
super().__init__(client_max_size=client_max_size)
self.download_manager = download_manager
self.app.add_routes([web.post("", self.create_torrent)])
@docs(
tags=["Libtorrent"],
summary="Create a torrent from local files and return it in base64 encoding.",
parameters=[{
"in": "query",
"name": "download",
"description": "Flag indicating whether or not to start downloading",
"type": "boolean",
"required": False
}],
responses={
200: {
"schema": schema(CreateTorrentResponse={"torrent": "base64 encoded torrent file"}),
"examples": {"Success": {"success": True}}
},
HTTP_BAD_REQUEST: {
"schema": HandledErrorSchema,
"examples": {"Error": {"error": "files parameter missing"}}
}
}
)
@json_schema(schema(CreateTorrentRequest={
"files": [String],
"name": String,
"description": String,
"trackers": [String],
"export_dir": String
}))
async def create_torrent(self, request: Request) -> RESTResponse:
"""
Create a torrent from local files and return it in base64 encoding.
"""
parameters = await request.json()
params = {}
if parameters.get("files"):
file_path_list = [Path(p) for p in parameters["files"]]
else:
return RESTResponse({"error": "files parameter missing"}, status=HTTP_BAD_REQUEST)
if parameters.get("description"):
params["comment"] = parameters["description"]
if parameters.get("trackers"):
tracker_url_list = parameters["trackers"]
params["announce"] = tracker_url_list[0]
params["announce-list"] = tracker_url_list
name = "unknown"
if parameters.get("name"):
name = parameters["name"]
params["name"] = name
export_dir = None
if parameters.get("export_dir"):
export_dir = Path(parameters["export_dir"])
try:
v = version("tribler")
except PackageNotFoundError:
v = "git"
params["created by"] = f"Tribler version: {v}"
params["nodes"] = False
params["httpseeds"] = False
params["encoding"] = False
params["piece length"] = 0 # auto
save_path = export_dir / (f"{name}.torrent") if export_dir and export_dir.exists() else None
try:
result = await asyncio.get_event_loop().run_in_executor(None, create_torrent_file,
file_path_list, recursive_bytes(params),
save_path)
except (OSError, UnicodeDecodeError, RuntimeError) as e:
self._logger.exception(e)
return return_handled_exception(e)
metainfo_dict = lt.bdecode(result["metainfo"])
# Download this torrent if specified
if "download" in request.query and request.query["download"] and request.query["download"] == "1":
download_config = DownloadConfig.from_defaults(self.download_manager.config)
download_config.set_dest_dir(result["base_dir"])
download_config.set_hops(self.download_manager.config.get("libtorrent/download_defaults/number_hops"))
await self.download_manager.start_download(save_path, TorrentDef(metainfo_dict), download_config)
return RESTResponse(json.dumps({"torrent": base64.b64encode(result["metainfo"]).decode()}))
| 5,483 | Python | .py | 125 | 34.688 | 114 | 0.633621 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,030 | downloads_endpoint.py | Tribler_tribler/src/tribler/core/libtorrent/restapi/downloads_endpoint.py | from __future__ import annotations
import mimetypes
from asyncio import get_event_loop, shield, wait_for
from binascii import hexlify, unhexlify
from pathlib import Path, PurePosixPath
from typing import TYPE_CHECKING, Any, Optional, TypedDict, cast
import libtorrent as lt
from aiohttp import web
from aiohttp.web_exceptions import HTTPPartialContent, HTTPRequestRangeNotSatisfiable
from aiohttp.web_response import StreamResponse
from aiohttp_apispec import docs, json_schema
from ipv8.messaging.anonymization.tunnel import PEER_FLAG_EXIT_BT
from ipv8.REST.schema import schema
from marshmallow.fields import Boolean, Float, Integer, List, String
from tribler.core.libtorrent.download_manager.download import Download, IllegalFileIndex
from tribler.core.libtorrent.download_manager.download_config import DownloadConfig
from tribler.core.libtorrent.download_manager.download_manager import DownloadManager
from tribler.core.libtorrent.download_manager.download_state import DOWNLOAD, UPLOAD, DownloadStatus
from tribler.core.libtorrent.download_manager.stream import Stream, StreamChunk
from tribler.core.libtorrent.torrentdef import TorrentDef
from tribler.core.restapi.rest_endpoint import (
HTTP_BAD_REQUEST,
HTTP_INTERNAL_SERVER_ERROR,
HTTP_NOT_FOUND,
RESTEndpoint,
RESTResponse,
return_handled_exception,
)
if TYPE_CHECKING:
from aiohttp.abc import AbstractStreamWriter, BaseRequest, Request
from tribler.core.database.store import MetadataStore
from tribler.core.tunnel.community import TriblerTunnelCommunity
TOTAL = "total"
LOADED = "loaded"
ALL_LOADED = "all_loaded"
class JSONFilesInfo(TypedDict):
"""
A JSON dict to describe file info.
"""
index: int
name: str
size: int
included: bool
progress: float
class DownloadsEndpoint(RESTEndpoint):
"""
This endpoint is responsible for all requests regarding downloads. Examples include getting all downloads,
starting, pausing and stopping downloads.
"""
path = "/api/downloads"
def __init__(self, download_manager: DownloadManager, metadata_store: MetadataStore | None = None,
tunnel_community: TriblerTunnelCommunity | None = None) -> None:
"""
Create a new endpoint to query the status of downloads.
"""
super().__init__()
self.download_manager = download_manager
self.mds = metadata_store
self.tunnel_community = tunnel_community
self.app.add_routes([
web.get("", self.get_downloads),
web.put("", self.add_download),
web.delete("/{infohash}", self.delete_download),
web.patch("/{infohash}", self.update_download),
web.get("/{infohash}/torrent", self.get_torrent),
web.put("/{infohash}/trackers", self.add_tracker),
web.delete("/{infohash}/trackers", self.remove_tracker),
web.put("/{infohash}/tracker_force_announce", self.tracker_force_announce),
web.get("/{infohash}/files", self.get_files),
web.get("/{infohash}/files/expand", self.expand_tree_directory),
web.get("/{infohash}/files/collapse", self.collapse_tree_directory),
web.get("/{infohash}/files/select", self.select_tree_path),
web.get("/{infohash}/files/deselect", self.deselect_tree_path),
web.get("/{infohash}/stream/{fileindex}", self.stream, allow_head=False)
])
@staticmethod
def return_404(message: str = "this download does not exist") -> RESTResponse:
"""
Returns a 404 response code if your channel has not been created.
"""
return RESTResponse({"error": message}, status=HTTP_NOT_FOUND)
def create_dconfig_from_params(self, parameters: dict) -> tuple[DownloadConfig, None] | tuple[None, str]:
"""
Create a download configuration based on some given parameters.
Possible parameters are:
- anon_hops: the number of hops for the anonymous download. 0 hops is equivalent to a plain download
- safe_seeding: whether the seeding of the download should be anonymous or not (0 = off, 1 = on)
- destination: the destination path of the torrent (where it is saved on disk)
"""
download_config = DownloadConfig.from_defaults(self.download_manager.config)
anon_hops = parameters.get('anon_hops')
safe_seeding = bool(parameters.get('safe_seeding', 0))
if anon_hops is not None:
if anon_hops > 0 and not safe_seeding:
return None, "Cannot set anonymous download without safe seeding enabled"
if anon_hops >= 0:
download_config.set_hops(anon_hops)
if safe_seeding:
download_config.set_safe_seeding(True)
if 'destination' in parameters:
download_config.set_dest_dir(parameters['destination'])
if 'selected_files' in parameters:
download_config.set_selected_files(parameters['selected_files'])
return download_config, None
@staticmethod
def get_files_info_json(download: Download) -> list[JSONFilesInfo]:
"""
Return file information as JSON from a specified download.
"""
files_json = []
files_completion = dict(download.get_state().get_files_completion())
selected_files = download.config.get_selected_files()
for file_index, (fn, size) in enumerate(download.get_def().get_files_with_length()):
files_json.append(cast(JSONFilesInfo, {
"index": file_index,
# We always return files in Posix format to make GUI independent of Core and simplify testing
"name": str(PurePosixPath(fn)),
"size": size,
"included": (file_index in selected_files or not selected_files),
"progress": files_completion.get(fn, 0.0)
}))
return files_json
@staticmethod
def get_files_info_json_paged(download: Download, view_start: Path, view_size: int) -> list[JSONFilesInfo]:
"""
Return file info, similar to get_files_info_json() but paged (based on view_start and view_size).
Note that the view_start path is not included in the return value.
:param view_start: The last-known path from which to fetch new paths.
:param view_size: The requested number of elements (though only less may be available).
"""
if not download.tdef.torrent_info_loaded():
download.tdef.load_torrent_info()
return [{
"index": IllegalFileIndex.unloaded.value,
"name": "loading...",
"size": 0,
"included": False,
"progress": 0.0
}]
return [
{
"index": download.get_file_index(path),
"name": str(PurePosixPath(path_str)),
"size": download.get_file_length(path),
"included": download.is_file_selected(path),
"progress": download.get_file_completion(path)
}
for path_str in download.tdef.torrent_file_tree.view(view_start, view_size)
if (path := Path(path_str))
]
@docs(
tags=["Libtorrent"],
summary="Return all downloads, both active and inactive",
parameters=[{
"in": "query",
"name": "get_peers",
"description": "Flag indicating whether or not to include peers",
"type": "boolean",
"required": False
},
{
"in": "query",
"name": "get_pieces",
"description": "Flag indicating whether or not to include pieces",
"type": "boolean",
"required": False
},
{
"in": "query",
"name": "get_availability",
"description": "Flag indicating whether or not to include availability",
"type": "boolean",
"required": False
},
{
"in": "query",
"name": "infohash",
"description": "Limit fetching of files, peers, and pieces to a specific infohash",
"type": "str",
"required": False
},
{
"in": "query",
"name": "excluded",
"description": "If specified, only return downloads excluding this one",
"type": "str",
"required": False
}
],
responses={
200: {
"schema": schema(DownloadsResponse={
"downloads": schema(Download={
"name": String,
"progress": Float,
"infohash": String,
"speed_down": Float,
"speed_up": Float,
"status": String,
"status_code": Integer,
"size": Integer,
"eta": Integer,
"num_peers": Integer,
"num_seeds": Integer,
"all_time_upload": Integer,
"all_time_download": Integer,
"all_time_ratio": Float,
"files": String,
"trackers": String,
"hops": Integer,
"anon_download": Boolean,
"safe_seeding": Boolean,
"max_upload_speed": Integer,
"max_download_speed": Integer,
"destination": String,
"availability": Float,
"peers": String,
"total_pieces": Integer,
"vod_prebuffering_progress": Float,
"vod_prebuffering_progress_consec": Float,
"error": String,
"time_added": Integer
}),
"checkpoints": schema(Checkpoints={
TOTAL: Integer,
LOADED: Integer,
ALL_LOADED: Boolean,
})
}),
}
},
description="This endpoint returns all downloads in Tribler, both active and inactive. The progress "
"is a number ranging from 0 to 1, indicating the progress of the specific state (downloading, "
"checking etc). The download speeds have the unit bytes/sec. The size of the torrent is given "
"in bytes. The estimated time assumed is given in seconds.\n\n"
"Detailed information about peers and pieces is only requested when the get_peers and/or "
"get_pieces flag is set. Note that setting this flag has a negative impact on performance "
"and should only be used in situations where this data is required. "
)
async def get_downloads(self, request: Request) -> RESTResponse: # noqa: C901
"""
Return all downloads, both active and inactive.
"""
params = request.query
get_peers = params.get('get_peers', '0') == '1'
get_pieces = params.get('get_pieces', '0') == '1'
get_availability = params.get('get_availability', '0') == '1'
unfiltered = not params.get('infohash')
checkpoints = {
TOTAL: self.download_manager.checkpoints_count,
LOADED: self.download_manager.checkpoints_loaded,
ALL_LOADED: self.download_manager.all_checkpoints_are_loaded,
}
result = []
downloads = self.download_manager.get_downloads()
for download in downloads:
if download.hidden:
continue
state = download.get_state()
tdef = download.get_def()
hex_infohash = hexlify(tdef.get_infohash()).decode()
if params.get("excluded") == hex_infohash:
continue
# Create tracker information of the download
tracker_info = []
for url, url_info in download.get_tracker_status().items():
tracker_info.append({"url": url, "peers": url_info[0], "status": url_info[1]})
num_seeds, num_peers = state.get_num_seeds_peers()
num_connected_seeds, num_connected_peers = download.get_num_connected_seeds_peers()
name = tdef.get_name_utf8()
status = self._get_extended_status(download)
info = {
"name": name,
"progress": state.get_progress(),
"infohash": hex_infohash,
"speed_down": state.get_current_payload_speed(DOWNLOAD),
"speed_up": state.get_current_payload_speed(UPLOAD),
"status": status.name,
"status_code": status.value,
"size": tdef.get_length(),
"eta": state.get_eta(),
"num_peers": num_peers,
"num_seeds": num_seeds,
"num_connected_peers": num_connected_peers,
"num_connected_seeds": num_connected_seeds,
"all_time_upload": state.all_time_upload,
"all_time_download": state.all_time_download,
"all_time_ratio": state.get_all_time_ratio(),
"trackers": tracker_info,
"hops": download.config.get_hops(),
"anon_download": download.get_anon_mode(),
"safe_seeding": download.config.get_safe_seeding(),
# Maximum upload/download rates are set for entire sessions
"max_upload_speed": DownloadManager.get_libtorrent_max_upload_rate(self.download_manager.config),
"max_download_speed": DownloadManager.get_libtorrent_max_download_rate(self.download_manager.config),
"destination": str(download.config.get_dest_dir()),
"total_pieces": tdef.get_nr_pieces(),
"error": repr(state.get_error()) if state.get_error() else "",
"time_added": download.config.get_time_added()
}
if download.stream:
info.update({
"vod_prebuffering_progress": download.stream.prebuffprogress,
"vod_prebuffering_progress_consec": download.stream.prebuffprogress_consec,
"vod_header_progress": download.stream.headerprogress,
"vod_footer_progress": download.stream.footerprogress,
})
if unfiltered or params.get("infohash") == info["infohash"]:
# Add peers information if requested
if get_peers:
peer_list = state.get_peer_list(include_have=False)
for peer_info in peer_list:
if "extended_version" in peer_info:
peer_info["extended_version"] = self._safe_extended_peer_info(peer_info["extended_version"])
info["peers"] = peer_list
# Add piece information if requested
if get_pieces:
info["pieces"] = download.get_pieces_base64().decode()
# Add availability if requested
if get_availability:
info["availability"] = state.get_availability()
result.append(info)
return RESTResponse({"downloads": result, "checkpoints": checkpoints})
@docs(
tags=["Libtorrent"],
summary="Start a download from a provided URI.",
parameters=[{
"in": "query",
"name": "get_peers",
"description": "Flag indicating whether or not to include peers",
"type": "boolean",
"required": False
},
{
"in": "query",
"name": "get_pieces",
"description": "Flag indicating whether or not to include pieces",
"type": "boolean",
"required": False
},
{
"in": "query",
"name": "get_files",
"description": "Flag indicating whether or not to include files",
"type": "boolean",
"required": False
}],
responses={
200: {
"schema": schema(AddDownloadResponse={"started": Boolean, "infohash": String}),
"examples": {"started": True, "infohash": "4344503b7e797ebf31582327a5baae35b11bda01"}
}
},
)
@json_schema(schema(AddDownloadRequest={
"anon_hops": (Integer, "Number of hops for the anonymous download. No hops is equivalent to a plain download"),
"safe_seeding": (Boolean, "Whether the seeding of the download should be anonymous or not"),
"destination": (String, "the download destination path of the torrent"),
"uri*": (String, "The URI of the torrent file that should be downloaded. This URI can either represent a file "
"location, a magnet link or a HTTP(S) url."),
}))
async def add_download(self, request: Request) -> RESTResponse: # noqa: C901
"""
Start a download from a provided URI.
"""
tdef = uri = None
if request.content_type == 'applications/x-bittorrent':
params: dict[str, str | int | list[int]] = {}
for k, v in request.query.items():
if k == "anon_hops":
params[k] = int(v)
elif k == "safe_seeding":
params[k] = v != "false"
else:
params[k] = v
body = await request.read()
metainfo = cast(dict[bytes, Any], lt.bdecode(body))
packed_selected_files = cast(Optional[list[int]], metainfo.pop(b"selected_files", None))
if packed_selected_files is not None:
params["selected_files"] = packed_selected_files
tdef = TorrentDef.load_from_dict(metainfo)
else:
params = await request.json()
uri = params.get("uri")
if not uri:
return RESTResponse({"error": "uri parameter missing"}, status=HTTP_BAD_REQUEST)
download_config, error = self.create_dconfig_from_params(params)
if error:
return RESTResponse({"error": error}, status=HTTP_BAD_REQUEST)
try:
if tdef:
download = await self.download_manager.start_download(tdef=tdef, config=download_config)
elif uri:
download = await self.download_manager.start_download_from_uri(uri, config=download_config)
except Exception as e:
return RESTResponse({"error": str(e)}, status=HTTP_INTERNAL_SERVER_ERROR)
return RESTResponse({"started": True, "infohash": hexlify(download.get_def().get_infohash()).decode()})
@docs(
tags=["Libtorrent"],
summary="Remove a specific download.",
parameters=[{
"in": "path",
"name": "infohash",
"description": "Infohash of the download to remove",
"type": "string",
"required": True
}],
responses={
200: {
"schema": schema(DeleteDownloadResponse={"removed": Boolean, "infohash": String}),
"examples": {"removed": True, "infohash": "4344503b7e797ebf31582327a5baae35b11bda01"}
}
},
)
@json_schema(schema(RemoveDownloadRequest={
'remove_data': (Boolean, 'Whether or not to remove the associated data'),
}))
async def delete_download(self, request: Request) -> RESTResponse:
"""
Remove a specific download.
"""
parameters = await request.json()
if "remove_data" not in parameters:
return RESTResponse({"error": "remove_data parameter missing"}, status=HTTP_BAD_REQUEST)
infohash = unhexlify(request.match_info["infohash"])
download = self.download_manager.get_download(infohash)
if not download:
return DownloadsEndpoint.return_404()
try:
await self.download_manager.remove_download(download, remove_content=parameters["remove_data"])
except Exception as e:
self._logger.exception(e)
return return_handled_exception(e)
return RESTResponse({"removed": True, "infohash": hexlify(download.get_def().get_infohash()).decode()})
@docs(
tags=["Libtorrent"],
summary="Update a specific download.",
parameters=[{
"in": "path",
"name": "infohash",
"description": "Infohash of the download to update",
"type": "string",
"required": True
}],
responses={
200: {
"schema": schema(UpdateDownloadResponse={"modified": Boolean, "infohash": String}),
"examples": {"modified": True, "infohash": "4344503b7e797ebf31582327a5baae35b11bda01"}
}
},
)
@json_schema(schema(UpdateDownloadRequest={
"state": (String, "State parameter to be passed to modify the state of the download (resume/stop/recheck)"),
"selected_files": (List(Integer), "File indexes to be included in the download"),
"anon_hops": (Integer, "The anonymity of a download can be changed at runtime by passing the anon_hops "
"parameter, however, this must be the only parameter in this request.")
}))
async def update_download(self, request: Request) -> RESTResponse: # noqa: C901, PLR0912
"""
Update a specific download.
"""
infohash = unhexlify(request.match_info["infohash"])
download = self.download_manager.get_download(infohash)
if not download:
return DownloadsEndpoint.return_404()
parameters = await request.json()
if len(parameters) > 1 and "anon_hops" in parameters:
return RESTResponse({"error": "anon_hops must be the only parameter in this request"},
status=HTTP_BAD_REQUEST)
if 'anon_hops' in parameters:
anon_hops = int(parameters['anon_hops'])
try:
await self.download_manager.update_hops(download, anon_hops)
except Exception as e:
self._logger.exception(e)
return return_handled_exception(e)
return RESTResponse({"modified": True, "infohash": hexlify(download.get_def().get_infohash()).decode()})
if "selected_files" in parameters:
selected_files_list = parameters["selected_files"]
num_files = len(download.tdef.get_files())
if not all(0 <= index < num_files for index in selected_files_list):
return RESTResponse({"error": "index out of range"}, status=HTTP_BAD_REQUEST)
download.set_selected_files(selected_files_list)
if state := parameters.get("state"):
if state == "resume":
download.resume()
elif state == "stop":
await download.stop(user_stopped=True)
elif state == "recheck":
download.force_recheck()
elif state == "move_storage":
dest_dir = Path(parameters["dest_dir"])
if not dest_dir.exists():
return RESTResponse({"error": f"Target directory ({dest_dir}) does not exist"},
status=HTTP_BAD_REQUEST)
download.move_storage(dest_dir)
download.checkpoint()
else:
return RESTResponse({"error": "unknown state parameter"}, status=HTTP_BAD_REQUEST)
return RESTResponse({"modified": True, "infohash": hexlify(download.get_def().get_infohash()).decode()})
@docs(
tags=["Libtorrent"],
summary="Return the .torrent file associated with the specified download.",
parameters=[{
"in": "path",
"name": "infohash",
"description": "Infohash of the download from which to get the .torrent file",
"type": "string",
"required": True
}],
responses={
200: {'description': 'The torrent'}
}
)
async def get_torrent(self, request: Request) -> RESTResponse:
"""
Return the .torrent file associated with the specified download.
"""
infohash = unhexlify(request.match_info["infohash"])
download = self.download_manager.get_download(infohash)
if not download:
return DownloadsEndpoint.return_404()
torrent = download.get_torrent_data()
if not torrent:
return DownloadsEndpoint.return_404()
return RESTResponse(lt.bencode(torrent), headers={
"content-type": "application/x-bittorrent",
"Content-Disposition": f"attachment; filename={hexlify(infohash).decode()}.torrent"
})
@docs(
tags=["Libtorrent"],
summary="Add a tracker to the specified torrent.",
parameters=[{
"in": "path",
"name": "infohash",
"description": "Infohash of the download to add the given tracker to",
"type": "string",
"required": True
}],
responses={
200: {
"schema": schema(AddTrackerResponse={"added": Boolean}),
"examples": {"added": True}
}
}
)
@json_schema(schema(AddTrackerRequest={
"url": (String, "The tracker URL to insert"),
}))
async def add_tracker(self, request: Request) -> RESTResponse:
"""
Return the .torrent file associated with the specified download.
"""
infohash = unhexlify(request.match_info["infohash"])
download = self.download_manager.get_download(infohash)
if not download:
return DownloadsEndpoint.return_404()
parameters = await request.json()
url = parameters.get("url")
if not url:
return RESTResponse({"error": "url parameter missing"}, status=HTTP_BAD_REQUEST)
try:
download.add_trackers([url])
download.handle.force_reannounce(0, len(download.handle.trackers()) - 1)
except RuntimeError as e:
return RESTResponse({"error": str(e)}, status=HTTP_INTERNAL_SERVER_ERROR)
return RESTResponse({"added": True})
@docs(
tags=["Libtorrent"],
summary="Remove a tracker from the specified torrent.",
parameters=[{
"in": "path",
"name": "infohash",
"description": "Infohash of the download to remove the given tracker from",
"type": "string",
"required": True
}],
responses={
200: {
"schema": schema(AddTrackerResponse={"removed": Boolean}),
"examples": {"removed": True}
}
}
)
@json_schema(schema(AddTrackerRequest={
"url": (String, "The tracker URL to remove"),
}))
async def remove_tracker(self, request: Request) -> RESTResponse:
"""
Return the .torrent file associated with the specified download.
"""
infohash = unhexlify(request.match_info["infohash"])
download = self.download_manager.get_download(infohash)
if not download:
return DownloadsEndpoint.return_404()
parameters = await request.json()
url = parameters.get("url")
if not url:
return RESTResponse({"error": "url parameter missing"}, status=HTTP_BAD_REQUEST)
try:
download.handle.replace_trackers([tracker for tracker in download.handle.trackers()
if tracker["url"] != url])
except RuntimeError as e:
return RESTResponse({"error": str(e)}, status=HTTP_INTERNAL_SERVER_ERROR)
return RESTResponse({"removed": True})
@docs(
tags=["Libtorrent"],
summary="Forcefully announce to the given tracker.",
parameters=[{
"in": "path",
"name": "infohash",
"description": "Infohash of the download to force the tracker announce for",
"type": "string",
"required": True
}],
responses={
200: {
"schema": schema(AddTrackerResponse={"forced": Boolean}),
"examples": {"forced": True}
}
}
)
@json_schema(schema(AddTrackerRequest={
"url": (String, "The tracker URL to query"),
}))
async def tracker_force_announce(self, request: Request) -> RESTResponse:
"""
Forcefully announce to the given tracker.
"""
infohash = unhexlify(request.match_info["infohash"])
download = self.download_manager.get_download(infohash)
if not download:
return DownloadsEndpoint.return_404()
parameters = await request.json()
url = parameters.get("url")
if not url:
return RESTResponse({"error": "url parameter missing"}, status=HTTP_BAD_REQUEST)
try:
for i, tracker in enumerate(download.handle.trackers()):
if tracker["url"] == url:
download.handle.force_reannounce(0, i)
break
except RuntimeError as e:
return RESTResponse({"error": str(e)}, status=HTTP_INTERNAL_SERVER_ERROR)
return RESTResponse({"forced": True})
@docs(
tags=["Libtorrent"],
summary="Return file information of a specific download.",
parameters=[{
"in": "path",
"name": "infohash",
"description": "Infohash of the download to from which to get file information",
"type": "string",
"required": True
},
{
"in": "query",
"name": "view_start_path",
"description": "Path of the file or directory to form a view for",
"type": "string",
"required": False
},
{
"in": "query",
"name": "view_size",
"description": "Number of files to include in the view",
"type": "number",
"required": False
}],
responses={
200: {
"schema": schema(GetFilesResponse={"files": [schema(File={"index": Integer,
"name": String,
"size": Integer,
"included": Boolean,
"progress": Float})]})
}
}
)
async def get_files(self, request: Request) -> RESTResponse:
"""
Return file information of a specific download.
"""
infohash = unhexlify(request.match_info["infohash"])
download = self.download_manager.get_download(infohash)
if not download:
return DownloadsEndpoint.return_404()
params = request.query
view_start_path = params.get("view_start_path")
if view_start_path is None:
return RESTResponse({
"infohash": request.match_info["infohash"],
"files": self.get_files_info_json(download)
})
view_size = int(params.get("view_size", "100"))
return RESTResponse({
"infohash": request.match_info["infohash"],
"query": view_start_path,
"files": self.get_files_info_json_paged(download, Path(view_start_path), view_size)
})
@docs(
tags=["Libtorrent"],
summary="Collapse a tree directory.",
parameters=[{
"in": "path",
"name": "infohash",
"description": "Infohash of the download",
"type": "string",
"required": True
},
{
"in": "query",
"name": "path",
"description": "Path of the directory to collapse",
"type": "string",
"required": True
}],
responses={
200: {
"schema": schema(File={"path": path})
}
}
)
async def collapse_tree_directory(self, request: Request) -> RESTResponse:
"""
Collapse a tree directory.
"""
infohash = unhexlify(request.match_info["infohash"])
download = self.download_manager.get_download(infohash)
if not download:
return DownloadsEndpoint.return_404()
params = request.query
path = params.get("path")
if not path:
return RESTResponse({"error": "path parameter missing"}, status=HTTP_BAD_REQUEST)
download.tdef.torrent_file_tree.collapse(Path(path))
return RESTResponse({"path": path})
@docs(
tags=["Libtorrent"],
summary="Expand a tree directory.",
parameters=[{
"in": "path",
"name": "infohash",
"description": "Infohash of the download",
"type": "string",
"required": True
},
{
"in": "query",
"name": "path",
"description": "Path of the directory to expand",
"type": "string",
"required": True
}],
responses={
200: {
"schema": schema(File={"path": String})
}
}
)
async def expand_tree_directory(self, request: Request) -> RESTResponse:
"""
Expand a tree directory.
"""
infohash = unhexlify(request.match_info["infohash"])
download = self.download_manager.get_download(infohash)
if not download:
return DownloadsEndpoint.return_404()
params = request.query
path = params.get("path")
if not path:
return RESTResponse({"error": "path parameter missing"}, status=HTTP_BAD_REQUEST)
download.tdef.torrent_file_tree.expand(Path(path))
return RESTResponse({"path": path})
@docs(
tags=["Libtorrent"],
summary="Select a tree path.",
parameters=[{
"in": "path",
"name": "infohash",
"description": "Infohash of the download",
"type": "string",
"required": True
},
{
"in": "query",
"name": "path",
"description": "Path of the directory to select",
"type": "string",
"required": True
}],
responses={
200: {}
}
)
async def select_tree_path(self, request: Request) -> RESTResponse:
"""
Select a tree path.
"""
infohash = unhexlify(request.match_info["infohash"])
download = self.download_manager.get_download(infohash)
if not download:
return DownloadsEndpoint.return_404()
params = request.query
path = params.get("path")
if not path:
return RESTResponse({"error": "path parameter missing"}, status=HTTP_BAD_REQUEST)
download.set_selected_file_or_dir(Path(path), True)
return RESTResponse({})
@docs(
tags=["Libtorrent"],
summary="Deselect a tree path.",
parameters=[{
"in": "path",
"name": "infohash",
"description": "Infohash of the download",
"type": "string",
"required": True
},
{
"in": "query",
"name": "path",
"description": "Path of the directory to deselect",
"type": "string",
"required": True
}],
responses={
200: {}
}
)
async def deselect_tree_path(self, request: Request) -> RESTResponse:
"""
Deselect a tree path.
"""
infohash = unhexlify(request.match_info["infohash"])
download = self.download_manager.get_download(infohash)
if not download:
return DownloadsEndpoint.return_404()
params = request.query
path = params.get("path")
if not path:
return RESTResponse({"error": "path parameter missing"}, status=HTTP_BAD_REQUEST)
download.set_selected_file_or_dir(Path(path), False)
return RESTResponse({})
def _get_extended_status(self, download: Download) -> DownloadStatus:
"""
This function filters the original download status to possibly add tunnel-related status.
Extracted from DownloadState to remove coupling between DownloadState and Tunnels.
"""
state = download.get_state()
status = state.get_status()
# Nothing to do with tunnels. If stopped - it happened by the user or libtorrent-only reason
stopped_by_user = state.lt_status and state.lt_status.paused
if status == DownloadStatus.STOPPED and not stopped_by_user:
if download.config.get_hops() == 0:
return DownloadStatus.STOPPED
if self.tunnel_community and self.tunnel_community.get_candidates(PEER_FLAG_EXIT_BT):
return DownloadStatus.CIRCUITS
return DownloadStatus.EXIT_NODES
return status
def _safe_extended_peer_info(self, ext_peer_info: bytes) -> str:
"""
Given a string describing peer info, return a json.dumps() safe representation.
:param ext_peer_info: the string to convert to a dumpable format
:return: the safe string
"""
# First see if we can use this as-is
if not ext_peer_info:
return ""
try:
return ext_peer_info.decode()
except UnicodeDecodeError as e:
# We might have some special unicode characters in here
self._logger.warning("Error while decoding peer info: %s. %s: %s",
str(ext_peer_info), e.__class__.__name__, str(e))
return ''.join(map(chr, ext_peer_info))
@docs(
tags=["Libtorrent"],
summary="Stream the contents of a file that is being downloaded.",
parameters=[{
"in": "path",
"name": "infohash",
"description": "Infohash of the download to stream",
"type": "string",
"required": True
},
{
"in": "path",
"name": "fileindex",
"description": "The fileindex to stream",
"type": "string",
"required": True
}],
responses={
206: {"description": "Contents of the stream"}
}
)
async def stream(self, request: Request) -> web.StreamResponse:
"""
Stream the contents of a file that is being downloaded.
"""
infohash = unhexlify(request.match_info["infohash"])
download = self.download_manager.get_download(infohash)
if not download:
return DownloadsEndpoint.return_404()
file_index = int(request.match_info["fileindex"])
if not 0 <= file_index < len(download.get_def().get_files()):
return DownloadsEndpoint.return_404()
return TorrentStreamResponse(download, file_index)
class TorrentStreamResponse(StreamResponse):
"""A response object to stream the contents of a download."""
def __init__(self, download: Download, file_index: int, **kwargs) -> None:
"""
Create a new TorrentStreamResponse.
"""
super().__init__(**kwargs)
self._download = download
self._file_index = file_index
async def prepare(self, request: BaseRequest) -> AbstractStreamWriter | None:
"""
Prepare the response.
"""
file_name, file_size = self._download.get_def().get_files_with_length()[self._file_index]
try:
start = request.http_range.start
stop = request.http_range.stop
except ValueError:
self.headers["Content-Range"] = f"bytes */{file_size}"
self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
return await super().prepare(request)
todo = file_size
if start is not None or stop is not None:
if start < 0:
start += file_size
start = max(start, 0)
stop = min(stop if stop is not None else file_size, file_size)
todo = stop - start
if start >= file_size:
self.headers["Content-Range"] = f"bytes */{file_size}"
self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
return await super().prepare(request)
self.headers["Content-Range"] = f"bytes {start}-{start + todo - 1}/{file_size}"
self.set_status(HTTPPartialContent.status_code)
content_type, _ = mimetypes.guess_type(str(file_name))
self.content_type = content_type or "application/octet-stream"
self.content_length = todo
self.headers["Accept-Ranges"] = "bytes"
if self._download.stream is None:
self._download.add_stream()
self._download.stream = cast(Stream, self._download.stream)
stream = self._download.stream
start = start or 0
if not stream.enabled or stream.fileindex != self._file_index:
await wait_for(stream.enable(self._file_index, start), 10)
await stream.updateprios()
reader = StreamChunk(self._download.stream, start)
await reader.open()
try:
writer = await super().prepare(request)
assert writer is not None
await reader.seek(start)
# Note that the chuck size is the same as the underlying torrent's piece length
data = await reader.read()
while data:
await writer.write(data[:todo])
todo -= len(data)
if todo <= 0:
break
data = await reader.read()
await writer.drain()
await writer.write_eof()
return writer
finally:
await shield(get_event_loop().run_in_executor(None, reader.close))
| 42,983 | Python | .py | 974 | 31.788501 | 120 | 0.562342 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,031 | libtorrent_endpoint.py | Tribler_tribler/src/tribler/core/libtorrent/restapi/libtorrent_endpoint.py | from __future__ import annotations
from asyncio import Future
from binascii import hexlify
from typing import TYPE_CHECKING
from aiohttp import web
from aiohttp_apispec import docs
from ipv8.REST.schema import schema
from marshmallow.fields import Integer
from tribler.core.restapi.rest_endpoint import RESTEndpoint, RESTResponse
if TYPE_CHECKING:
import libtorrent
from aiohttp.abc import Request
from tribler.core.libtorrent.download_manager.download_manager import DownloadManager
class LibTorrentEndpoint(RESTEndpoint):
"""
Endpoint for getting information about libtorrent sessions and settings.
"""
path = "/api/libtorrent"
def __init__(self, download_manager: DownloadManager) -> None:
"""
Create a new libtorrent endpoint.
"""
super().__init__()
self.download_manager = download_manager
self.app.add_routes([web.get("/settings", self.get_libtorrent_settings),
web.get("/session", self.get_libtorrent_session_info)])
@docs(
tags=["Libtorrent"],
summary="Return Libtorrent session settings.",
parameters=[{
"in": "query",
"name": "hop",
"description": "The hop count of the session for which to return settings",
"type": "string",
"required": False
}],
responses={
200: {
"description": "Return a dictonary with key-value pairs from the Libtorrent session settings",
"schema": schema(LibtorrentSessionResponse={"hop": Integer,
"settings": schema(LibtorrentSettings={})})
}
}
)
async def get_libtorrent_settings(self, request: Request) -> RESTResponse:
"""
Return Libtorrent session settings.
"""
args = request.query
hop = 0
if args.get("hop"):
hop = int(args["hop"])
if hop not in self.download_manager.ltsessions:
return RESTResponse({"hop": hop, "settings": {}})
lt_session = await self.download_manager.ltsessions[hop]
if hop == 0:
lt_settings = self.download_manager.get_session_settings(lt_session)
lt_settings["peer_fingerprint"] = hexlify(lt_settings["peer_fingerprint"].encode()).decode()
else:
lt_settings = lt_session.get_settings()
return RESTResponse({"hop": hop, "settings": lt_settings})
@docs(
tags=["Libtorrent"],
summary="Return Libtorrent session information.",
parameters=[{
"in": "query",
"name": "hop",
"description": "The hop count of the session for which to return information",
"type": "string",
"required": False
}],
responses={
200: {
"description": "Return a dictonary with key-value pairs from the Libtorrent session information",
"schema": schema(LibtorrentinfoResponse={"hop": Integer,
"settings": schema(LibtorrentInfo={})})
}
}
)
async def get_libtorrent_session_info(self, request: Request) -> RESTResponse:
"""
Return Libtorrent session information.
"""
session_stats: Future[dict[str, int]] = Future()
def on_session_stats_alert_received(alert: libtorrent.session_stats_alert) -> None:
if not session_stats.done():
session_stats.set_result(alert.values)
args = request.query
hop = 0
if args.get("hop"):
hop = int(args["hop"])
if hop not in self.download_manager.ltsessions or \
not hasattr(self.download_manager.ltsessions[hop].result(), "post_session_stats"):
return RESTResponse({"hop": hop, "session": {}})
self.download_manager.session_stats_callback = on_session_stats_alert_received
(await self.download_manager.ltsessions[hop]).post_session_stats()
stats = await session_stats
return RESTResponse({"hop": hop, "session": stats})
| 4,195 | Python | .py | 98 | 32.459184 | 113 | 0.603824 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,032 | download.py | Tribler_tribler/src/tribler/core/libtorrent/download_manager/download.py | """
A wrapper around a libtorrent download.
Author(s): Arno Bakker, Egbert Bouman
"""
from __future__ import annotations
import asyncio
import base64
import itertools
import logging
from asyncio import CancelledError, Future, get_running_loop, sleep, wait_for
from binascii import hexlify
from collections import defaultdict
from contextlib import suppress
from enum import Enum
from pathlib import Path
from typing import TYPE_CHECKING, Any, Awaitable, Callable, List, Tuple, TypedDict, cast
import libtorrent as lt
from bitarray import bitarray
from ipv8.taskmanager import TaskManager, task
from ipv8.util import succeed
from tribler.core.libtorrent.download_manager.download_config import DownloadConfig
from tribler.core.libtorrent.download_manager.download_state import DownloadState, DownloadStatus
from tribler.core.libtorrent.download_manager.stream import Stream
from tribler.core.libtorrent.torrent_file_tree import TorrentFileTree
from tribler.core.libtorrent.torrentdef import MetainfoDict, TorrentDef, TorrentDefNoMetainfo
from tribler.core.libtorrent.torrents import check_handle, get_info_from_handle, require_handle
from tribler.core.notifier import Notification, Notifier
from tribler.tribler_config import TriblerConfigManager
if TYPE_CHECKING:
from tribler.core.libtorrent.download_manager.download_manager import DownloadManager
Getter = Callable[[Any], Any]
class SaveResumeDataError(Exception):
"""This error is used when the resume data of a download fails to save."""
class IllegalFileIndex(Enum):
"""
Error codes for Download.get_file_index(). These are used by the GUI to render directories.
"""
collapsed_dir = -1
expanded_dir = -2
unloaded = -3
class PeerDict(TypedDict):
"""
Information of another peer, connected through libtorrent.
"""
id: str # PeerID or 'http seed'
extended_version: str # Peer client version, as received during the extend handshake message
ip: str # IP address as string or URL of httpseed
port: int
pex_received: bool
optimistic: bool
direction: str # 'L'/'R' (outgoing/incoming)
uprate: float # Upload rate in KB/s
uinterested: bool # Upload Interested: True/False
uchoked: bool # Upload Choked: True/False
uhasqueries: bool # Upload has requests in buffer and not choked
uflushed: bool # Upload is not flushed
downrate: float # Download rate in KB/s
dinterested: bool # Download interested: True/False
dchoked: bool # Download choked: True/False
snubbed: bool # Download snubbed: True/False
utotal: float # Total uploaded from peer in KB
dtotal: float # Total downloaded from peer in KB
completed: float # Fraction of download completed by peer (0-1.0)
speed: float # The peer's current total download speed (estimated)
class PeerDictHave(PeerDict):
"""
Extended peer info that includes the "have" field.
"""
have: list[bool] # Bitfield object for this peer if not completed
class Download(TaskManager):
"""
Download subclass that represents a libtorrent download.
"""
def __init__(self, # noqa: PLR0913
tdef: TorrentDef,
download_manager: DownloadManager,
config: DownloadConfig | None = None,
notifier: Notifier | None = None,
state_dir: Path | None = None,
checkpoint_disabled: bool = False,
hidden: bool = False) -> None:
"""
Create a new download.
"""
super().__init__()
self._logger = logging.getLogger(self.__class__.__name__)
self.tdef = tdef
self.handle: lt.torrent_handle | None = None
self.state_dir = state_dir
self.download_manager = download_manager
self.notifier = notifier
# Libtorrent status
self.lt_status: lt.torrent_status | None = None
self.error = None
self.pause_after_next_hashcheck = False
self.checkpoint_after_next_hashcheck = False
self.tracker_status: dict[str, tuple[int, str]] = {} # {url: (num_peers, status_str)}
self.futures: dict[str, list[tuple[Future, Callable, Getter | None]]] = defaultdict(list)
self.alert_handlers: dict[str, list[Callable[[lt.torrent_alert], None]]] = defaultdict(list)
self.future_added = self.wait_for_alert("add_torrent_alert", lambda a: a.handle)
self.future_removed = self.wait_for_alert("torrent_removed_alert")
self.future_finished = self.wait_for_alert("torrent_finished_alert")
self.future_metainfo = self.wait_for_alert("metadata_received_alert", lambda a: self.tdef.get_metainfo())
alert_handlers = {"tracker_reply_alert": self.on_tracker_reply_alert,
"tracker_error_alert": self.on_tracker_error_alert,
"tracker_warning_alert": self.on_tracker_warning_alert,
"metadata_received_alert": self.on_metadata_received_alert,
"performance_alert": self.on_performance_alert,
"torrent_checked_alert": self.on_torrent_checked_alert,
"torrent_finished_alert": self.on_torrent_finished_alert,
"save_resume_data_alert": self.on_save_resume_data_alert,
"state_changed_alert": self.on_state_changed_alert,
"torrent_error_alert": self.on_torrent_error_alert,
"add_torrent_alert": self.on_add_torrent_alert,
"torrent_removed_alert": self.on_torrent_removed_alert}
for alert_type, alert_handler in alert_handlers.items():
self.register_alert_handler(alert_type, alert_handler)
self.stream: Stream | None = None
# With hidden True download will not be in GET/downloads set, as a result will not be shown in GUI
self.hidden = hidden
self.checkpoint_disabled = checkpoint_disabled
self.config: DownloadConfig = config
if config is None and self.download_manager is not None:
self.config = DownloadConfig.from_defaults(self.download_manager.config)
elif config is None:
self.config = DownloadConfig.from_defaults(TriblerConfigManager())
self._logger.debug("Setup: %s", hexlify(self.tdef.get_infohash()).decode())
self.checkpoint()
def __str__(self) -> str:
"""
Convert this download to a human-readable string.
"""
return "Download(name=%s, hops=%d, checkpoint_disabled=%d)" % \
(self.tdef.get_name(), self.config.get_hops(), self.checkpoint_disabled)
def __repr__(self) -> str:
"""
Convert this download to a print-safe human-readable string.
"""
return self.__str__()
def add_stream(self) -> None:
"""
Initialize a stream for this download.
"""
assert self.stream is None
self.stream = Stream(self)
def get_torrent_data(self) -> dict[bytes, Any] | None:
"""
Return torrent data, if the handle is valid and metadata is available.
"""
if not self.handle or not self.handle.is_valid() or not self.handle.has_metadata():
return None
torrent_info = get_info_from_handle(self.handle)
t = lt.create_torrent(torrent_info)
return t.generate()
def register_alert_handler(self, alert_type: str, handler: Callable[[lt.torrent_alert], None]) -> None:
"""
Add (no replace) a callback for a given alert type.
"""
self.alert_handlers[alert_type].append(handler)
def wait_for_alert(self, success_type: str, success_getter: Getter | None = None,
fail_type: str | None = None, fail_getter: Getter | None = None) -> Future:
"""
Create a future that fires when a certain alert is received.
"""
future: Future[Any] = Future()
if success_type:
self.futures[success_type].append((future, future.set_result, success_getter))
if fail_type:
self.futures[fail_type].append((future, future.set_exception, fail_getter))
return future
async def wait_for_status(self, *status: DownloadStatus) -> None:
"""
Wait for a given download status to occur.
"""
while self.get_state().get_status() not in status:
await sleep(0)
await self.wait_for_alert("state_changed_alert")
def get_def(self) -> TorrentDef:
"""
Get the torrent def belonging to this download.
"""
return self.tdef
def get_handle(self) -> Future[lt.torrent_handle]:
"""
Returns a deferred that fires with a valid libtorrent download handle.
"""
if self.handle:
# This block could be safely omitted because `self.future_added` does the same thing.
# However, it is used in tests, therefore it is better to keep it for now.
return succeed(self.handle)
return self.future_added
def get_atp(self) -> dict:
"""
Get the libtorrent "add torrent parameters" instantiation dictionary.
"""
save_path = self.config.get_dest_dir()
atp = {"save_path": str(save_path),
"storage_mode": lt.storage_mode_t.storage_mode_sparse,
"flags": lt.add_torrent_params_flags_t.flag_paused
| lt.add_torrent_params_flags_t.flag_duplicate_is_error
| lt.add_torrent_params_flags_t.flag_update_subscribe}
if self.config.get_share_mode():
atp["flags"] = cast(int, atp["flags"]) | lt.add_torrent_params_flags_t.flag_share_mode
if self.config.get_upload_mode():
atp["flags"] = cast(int, atp["flags"]) | lt.add_torrent_params_flags_t.flag_upload_mode
resume_data = self.config.get_engineresumedata()
if not isinstance(self.tdef, TorrentDefNoMetainfo):
metainfo = self.tdef.get_metainfo()
torrentinfo = lt.torrent_info(metainfo)
atp["ti"] = torrentinfo
if resume_data and isinstance(resume_data, dict):
# Rewrite save_path as a global path, if it is given as a relative path
save_path = (resume_data[b"save_path"].decode() if b"save_path" in resume_data
else None)
if save_path and not Path(save_path).is_absolute():
resume_data[b"save_path"] = str(self.state_dir / save_path)
atp["resume_data"] = lt.bencode(resume_data)
else:
atp["url"] = self.tdef.get_url() or "magnet:?xt=urn:btih:" + hexlify(self.tdef.get_infohash()).decode()
atp["name"] = self.tdef.get_name_as_unicode()
return atp
def on_add_torrent_alert(self, alert: lt.add_torrent_alert) -> None:
"""
Handle an add torrent alert.
"""
self._logger.info("On add torrent alert: %s", repr(alert))
if hasattr(alert, "error") and alert.error.value():
self._logger.error("Failed to add torrent (%s)", self.tdef.get_name_as_unicode())
raise RuntimeError(alert.error.message())
if not alert.handle.is_valid():
self._logger.error("Received invalid torrent handle")
return
self.handle = alert.handle
self._logger.debug("Added torrent %s", str(self.handle.info_hash()))
# In LibTorrent auto_managed flag is now on by default, and as a result
# any torrent"s state can change from Stopped to Downloading at any time.
# Here we unset this flag to prevent auto-resuming of stopped torrents.
if hasattr(self.handle, "unset_flags"):
self.handle.unset_flags(lt.add_torrent_params_flags_t.flag_auto_managed)
self.set_selected_files()
user_stopped = self.config.get_user_stopped()
# If we lost resume_data always resume download in order to force checking
if not user_stopped or not self.config.get_engineresumedata():
self.handle.resume()
# If we only needed to perform checking, pause download after it is complete
self.pause_after_next_hashcheck = user_stopped
# Limit the amount of connections if we have specified that
self.handle.set_max_connections(self.download_manager.config.get("libtorrent/max_connections_download"))
# By default don't apply the IP filter
self.apply_ip_filter(False)
self.checkpoint()
def get_anon_mode(self) -> bool:
"""
Get whether this torrent is anonymized.
"""
return self.config.get_hops() > 0
@check_handle(b"")
def get_pieces_base64(self) -> bytes:
"""
Returns a base64 encoded bitmask of the pieces that we have.
"""
binary_gen = (int(boolean) for boolean in cast(lt.torrent_handle, self.handle).status().pieces)
try:
bits = bitarray(binary_gen)
except ValueError:
return b""
return base64.b64encode(bits.tobytes())
def post_alert(self, alert_type: str, alert_dict: dict | None = None) -> None:
"""
Manually post an alert.
"""
alert_dict = alert_dict or {}
alert_dict["category"] = lambda _: None
alert = type("anonymous_alert", (object,), alert_dict)()
self.process_alert(alert, alert_type)
def process_alert(self, alert: lt.torrent_alert, alert_type: str) -> None:
"""
Dispatch an alert to the appriopriate registered handlers.
"""
try:
if alert.category() in [lt.alert.category_t.error_notification, lt.alert.category_t.performance_warning]:
self._logger.debug("Got alert: %s", repr(alert))
for handler in self.alert_handlers.get(alert_type, []):
try:
handler(alert)
except UnicodeDecodeError as e:
self._logger.warning("UnicodeDecodeError in %s: %s", handler.__name__, str(e))
for future, future_setter, getter in self.futures.pop(alert_type, []):
if not future.done():
future_setter(getter(alert) if getter else alert)
except Exception as e:
self._logger.exception("process_alert failed with %s: %s for alert %s",
e.__class__.__name__, str(e), repr(alert))
def on_torrent_error_alert(self, alert: lt.torrent_error_alert) -> None:
"""
Handle a torrent error alert.
"""
self._logger.error("On torrent error alert: %s", repr(alert))
def on_state_changed_alert(self, alert: lt.state_changed_alert) -> None:
"""
Handle a state change alert.
"""
self._logger.info("On state changed alert: %s", repr(alert))
if not self.handle:
return
self.update_lt_status(self.handle.status())
enable = alert.state == lt.torrent_status.seeding and self.config.get_hops() > 0
self._logger.debug("Setting IP filter for %s to %s", hexlify(self.tdef.get_infohash()), enable)
self.apply_ip_filter(enable)
# On a rare occasion we don't get a metadata_received_alert. If this is the case, post an alert manually.
if alert.state == lt.torrent_status.downloading and isinstance(self.tdef, TorrentDefNoMetainfo):
self.post_alert("metadata_received_alert")
def on_save_resume_data_alert(self, alert: lt.save_resume_data_alert) -> None:
"""
Callback for the alert that contains the resume data of a specific download.
This resume data will be written to a file on disk.
"""
self._logger.debug("On save resume data alert: %s", repr(alert))
if self.checkpoint_disabled:
return
resume_data = (cast(dict[bytes, Any], lt.bdecode(alert.resume_data))
if isinstance(alert.resume_data, bytes) # Libtorrent 2.X
else alert.resume_data) # Libtorrent 1.X
# Make save_path relative if the torrent is saved in the Tribler state directory
if self.state_dir and b"save_path" in resume_data:
save_path = Path(resume_data[b"save_path"].decode()).absolute()
resume_data[b"save_path"] = str(save_path)
if not isinstance(self.tdef, TorrentDefNoMetainfo):
self.config.set_metainfo(self.tdef.get_metainfo())
else:
self.config.set_metainfo({
"infohash": self.tdef.get_infohash(),
"name": self.tdef.get_name_as_unicode(),
"url": self.tdef.get_url()
})
self.config.set_engineresumedata(resume_data)
# Save it to file
basename = hexlify(resume_data[b"info-hash"]).decode() + ".conf"
Path(self.download_manager.get_checkpoint_dir()).mkdir(parents=True, exist_ok=True)
filename = self.download_manager.get_checkpoint_dir() / basename
self.config.config["download_defaults"]["name"] = self.tdef.get_name_as_unicode() # store name (for debugging)
try:
self.config.write(filename)
except OSError as e:
self._logger.warning("%s: %s", e.__class__.__name__, str(e))
else:
self._logger.debug("Resume data has been saved to: %s", filename)
def on_tracker_reply_alert(self, alert: lt.tracker_reply_alert) -> None:
"""
Handle a tracker reply alert.
"""
self._logger.info("On tracker reply alert: %s", repr(alert))
self.tracker_status[alert.url] = (alert.num_peers, 'Working')
def on_tracker_error_alert(self, alert: lt.tracker_error_alert) -> None:
"""
This alert is generated on tracker timeouts, premature disconnects, invalid response
or an HTTP response other than "200 OK". - From Libtorrent documentation.
"""
# The try-except block is added as a workaround to suppress UnicodeDecodeError in `repr(alert)`,
# `alert.url` and `alert.msg`. See https://github.com/arvidn/libtorrent/issues/143
self._logger.error("On tracker error alert: %s", repr(alert))
url = alert.url
if alert.msg:
status = "Error: " + alert.msg
elif alert.status_code > 0:
status = "HTTP status code %d" % alert.status_code
elif alert.status_code == 0:
status = "Timeout"
else:
status = "Not working"
peers = 0 # If there is a tracker error, alert.num_peers is not available. So resetting peer count to zero.
self.tracker_status[url] = (peers, status)
def on_tracker_warning_alert(self, alert: lt.tracker_warning_alert) -> None:
"""
Handle a tracker warning alert.
"""
self._logger.warning("On tracker warning alert: %s", repr(alert))
peers = self.tracker_status[alert.url][0] if alert.url in self.tracker_status else 0
status = "Warning: " + str(alert.message())
self.tracker_status[alert.url] = (peers, status)
@check_handle(None)
def on_metadata_received_alert(self, alert: lt.metadata_received_alert) -> None:
"""
Handle a metadata received alert.
"""
self._logger.info("On metadata received alert: %s", repr(alert))
self.handle = cast(lt.torrent_handle, self.handle)
torrent_info = get_info_from_handle(self.handle)
if not torrent_info:
return
try:
metadata = cast(MetainfoDict, {b"info": lt.bdecode(torrent_info.metadata())})
except (RuntimeError, ValueError) as e:
self._logger.warning(e)
return
tracker_urls = []
trackers = []
try:
trackers = self.handle.trackers()
except UnicodeDecodeError as e:
self._logger.warning(e)
for tracker in trackers:
url = tracker["url"]
try:
tracker_urls.append(url.encode())
except UnicodeEncodeError as e:
self._logger.warning(e)
if len(tracker_urls) > 1:
metadata[b"announce-list"] = [[tracker] for tracker in tracker_urls]
elif tracker_urls:
metadata[b"announce"] = tracker_urls[0]
try:
self.tdef = TorrentDef.load_from_dict(metadata)
with suppress(RuntimeError):
# Try to load the torrent info in the background if we have a loop.
get_running_loop().run_in_executor(None, self.tdef.load_torrent_info)
except ValueError as ve:
self._logger.exception(ve)
return
self.set_selected_files()
self.checkpoint()
def on_performance_alert(self, alert: lt.performance_alert) -> None:
"""
Handle a performance alert.
"""
self._logger.info("On performance alert: %s", repr(alert))
if self.get_anon_mode() or self.download_manager.ltsessions is None:
return
# When the send buffer watermark is too low, double the buffer size to a
# maximum of 50MiB. This is the same mechanism as Deluge uses.
lt_session = self.download_manager.get_session(self.config.get_hops()).result()
settings = self.download_manager.get_session_settings(lt_session)
if alert.message().endswith("send buffer watermark too low (upload rate will suffer)"):
if settings["send_buffer_watermark"] <= 26214400:
self._logger.info("Setting send_buffer_watermark to %s", 2 * settings["send_buffer_watermark"])
settings["send_buffer_watermark"] *= 2
self.download_manager.set_session_settings(self.download_manager.get_session().result(), settings)
# When the write cache is too small, double the buffer size to a maximum
# of 64MiB. Again, this is the same mechanism as Deluge uses.
elif (alert.message().endswith("max outstanding disk writes reached")
and settings["max_queued_disk_bytes"] <= 33554432):
self._logger.info("Setting max_queued_disk_bytes to %s", 2 * settings["max_queued_disk_bytes"])
settings["max_queued_disk_bytes"] *= 2
self.download_manager.set_session_settings(self.download_manager.get_session().result(), settings)
def on_torrent_removed_alert(self, alert: lt.torrent_removed_alert) -> None:
"""
Handle a torrent removed alert.
"""
self._logger.info("On torrent remove alert: %s", repr(alert))
self._logger.debug("Removing %s", self.tdef.get_name())
self.handle = None
def on_torrent_checked_alert(self, alert: lt.torrent_checked_alert) -> None:
"""
Handle a torrent checked alert.
"""
self._logger.info("On torrent checked alert: %s", repr(alert))
if self.pause_after_next_hashcheck and self.handle:
self.pause_after_next_hashcheck = False
self.handle.pause()
if self.checkpoint_after_next_hashcheck:
self.checkpoint_after_next_hashcheck = False
self.checkpoint()
@check_handle(None)
def on_torrent_finished_alert(self, alert: lt.torrent_finished_alert) -> None:
"""
Handle a torrent finished alert.
"""
self._logger.info("On torrent finished alert: %s", repr(alert))
self.handle = cast(lt.torrent_handle, self.handle)
self.update_lt_status(self.handle.status())
self.checkpoint()
downloaded = self.get_state().total_download
if downloaded > 0 and self.notifier is not None:
name = self.tdef.get_name_as_unicode()
infohash = self.tdef.get_infohash().hex()
self.notifier.notify(Notification.torrent_finished, infohash=infohash, name=name, hidden=self.hidden)
def update_lt_status(self, lt_status: lt.torrent_status) -> None:
"""
Update libtorrent stats and check if the download should be stopped.
"""
old_status = self.get_state().get_status()
self.lt_status = lt_status
state = self.get_state()
# Notify the GUI if the status has changed
if self.notifier and not self.hidden and state.get_status() != old_status:
self.notifier.notify(Notification.torrent_status_changed,
infohash=hexlify(self.tdef.get_infohash()).decode(),
status=state.get_status().name)
if state.get_status() == DownloadStatus.SEEDING:
mode = self.download_manager.config.get("libtorrent/download_defaults/seeding_mode")
seeding_ratio = self.download_manager.config.get("libtorrent/download_defaults/seeding_ratio")
seeding_time = self.download_manager.config.get("libtorrent/download_defaults/seeding_time")
if (mode == "never" or
(mode == "ratio" and state.get_all_time_ratio() >= seeding_ratio) or
(mode == "time" and state.get_seeding_time() >= seeding_time)):
self.stop()
@check_handle(None)
def set_selected_files(self, selected_files: list[int] | None = None, prio: int = 4,
force: bool = False) -> int | None:
"""
Set the selected files. If the selected files is None or empty, all files will be selected.
"""
if not force and self.stream is not None:
return None
if not isinstance(self.tdef, TorrentDefNoMetainfo) and not self.get_share_mode():
if selected_files is None:
selected_files = self.config.get_selected_files()
else:
self.config.set_selected_files(selected_files)
tree = self.tdef.torrent_file_tree
total_files = self.tdef.torrent_info.num_files()
if not selected_files:
selected_files = list(range(total_files))
def map_selected(index: int) -> int:
file_instance = tree.find(Path(tree.file_storage.file_path(index)))
if index in selected_files:
file_instance.selected = True
return prio
file_instance.selected = False
return 0
self.set_file_priorities(list(map(map_selected, range(total_files))))
return None
@check_handle(False)
def move_storage(self, new_dir: Path) -> bool:
"""
Move the output files to a different location.
"""
if not isinstance(self.tdef, TorrentDefNoMetainfo):
self.handle = cast(lt.torrent_handle, self.handle)
self.handle.move_storage(str(new_dir))
self.config.set_dest_dir(new_dir)
return True
@check_handle(None)
def force_recheck(self) -> None:
"""
Force libtorrent to validate the files.
"""
if not isinstance(self.tdef, TorrentDefNoMetainfo):
self.handle = cast(lt.torrent_handle, self.handle)
if self.get_state().get_status() == DownloadStatus.STOPPED:
self.pause_after_next_hashcheck = True
self.checkpoint_after_next_hashcheck = True
self.handle.resume()
self.handle.force_recheck()
def get_state(self) -> DownloadState:
"""
Returns a snapshot of the current state of the download.
"""
return DownloadState(self, self.lt_status, self.error)
@task
async def save_resume_data(self, timeout: int = 10) -> None:
"""
Save the resume data of a download. This method returns when the resume data is available.
Note that this method only calls save_resume_data once on subsequent calls.
"""
if "save_resume_data" not in self.futures:
handle = await self.get_handle()
handle.save_resume_data()
try:
await wait_for(self.wait_for_alert("save_resume_data_alert", None,
"save_resume_data_failed_alert",
lambda a: SaveResumeDataError(a.error.message())), timeout=timeout)
except (CancelledError, SaveResumeDataError, TimeoutError, asyncio.exceptions.TimeoutError) as e:
self._logger.exception("Resume data failed to save: %s", e)
def get_peer_list(self, include_have: bool = True) -> List[PeerDict | PeerDictHave]:
"""
Returns a list of dictionaries, one for each connected peer containing the statistics for that peer.
In particular, the dictionary contains the keys.
"""
peers = []
peer_infos = self.handle.get_peer_info() if self.handle and self.handle.is_valid() else []
for peer_info in peer_infos:
try:
extended_version = peer_info.client
except UnicodeDecodeError:
extended_version = b"unknown"
peer_dict: PeerDict | PeerDictHave = cast(PeerDict, {
"id": hexlify(peer_info.pid.to_bytes()).decode(),
"extended_version": extended_version,
"ip": peer_info.ip[0],
"port": peer_info.ip[1],
# optimistic_unchoke = 0x800 seems unavailable in python bindings
"optimistic": bool(peer_info.flags & 0x800),
"direction": "L" if bool(peer_info.flags & peer_info.local_connection) else "R",
"uprate": peer_info.payload_up_speed,
"uinterested": bool(peer_info.flags & peer_info.remote_interested),
"uchoked": bool(peer_info.flags & peer_info.remote_choked),
"uhasqueries": peer_info.upload_queue_length > 0,
"uflushed": peer_info.used_send_buffer > 0,
"downrate": peer_info.payload_down_speed,
"dinterested": bool(peer_info.flags & peer_info.interesting),
"dchoked": bool(peer_info.flags & peer_info.choked),
"snubbed": bool(peer_info.flags & 0x1000),
"utotal": peer_info.total_upload,
"dtotal": peer_info.total_download,
"completed": peer_info.progress,
"speed": peer_info.remote_dl_rate,
"connection_type": peer_info.connection_type, # type: ignore[attr-defined] # shortcoming of stubs
"seed": bool(peer_info.flags & peer_info.seed),
"upload_only": bool(peer_info.flags & peer_info.upload_only)
})
if include_have:
peer_dict = cast(PeerDictHave, peer_dict)
peer_dict["have"] = peer_info.pieces
peers.append(peer_dict)
return peers
def get_num_connected_seeds_peers(self) -> Tuple[int, int]:
"""
Return the number of connected seeders and leechers.
"""
num_seeds = num_peers = 0
if not self.handle or not self.handle.is_valid():
return 0, 0
for peer_info in self.handle.get_peer_info():
if peer_info.flags & peer_info.seed:
num_seeds += 1
else:
num_peers += 1
return num_seeds, num_peers
def get_torrent(self) -> dict[bytes, Any] | None:
"""
Create the raw torrent data from this download.
"""
if not self.handle or not self.handle.is_valid() or not self.handle.has_metadata():
return None
torrent_info = get_info_from_handle(self.handle)
t = lt.create_torrent(torrent_info)
return t.generate()
@check_handle(default={})
def get_tracker_status(self) -> dict[str, tuple[int, str]]:
"""
Retrieve an overview of the trackers and their statuses.
"""
self.handle = cast(lt.torrent_handle, self.handle)
# Make sure all trackers are in the tracker_status dict
try:
tracker_urls = {tracker["url"] for tracker in self.handle.trackers()}
for removed in (set(self.tracker_status.keys()) - tracker_urls):
self.tracker_status.pop(removed)
for tracker_url in tracker_urls:
if tracker_url not in self.tracker_status:
self.tracker_status[tracker_url] = (0, "Not contacted yet")
except UnicodeDecodeError:
self._logger.warning("UnicodeDecodeError in get_tracker_status")
# Count DHT and PeX peers
dht_peers = pex_peers = 0
peer_info = []
try:
peer_info = self.handle.get_peer_info()
except Exception as e:
self._logger.exception(e)
for info in peer_info:
if info.source & info.dht:
dht_peers += 1
if info.source & info.pex:
pex_peers += 1
ltsession = self.download_manager.get_session(self.config.get_hops()).result()
public = self.tdef and not self.tdef.is_private()
result = self.tracker_status.copy()
result["[DHT]"] = (dht_peers, "Working" if ltsession.is_dht_running() and public else "Disabled")
result["[PeX]"] = (pex_peers, "Working")
return result
async def shutdown(self) -> None:
"""
Shut down the download.
"""
self._logger.info("Shutting down...")
self.alert_handlers.clear()
if self.stream is not None:
self.stream.close()
active_futures = [f for f, _, _ in itertools.chain(*self.futures.values()) if not f.done()]
for future in active_futures:
future.cancel()
with suppress(CancelledError):
await asyncio.gather(*active_futures) # wait for futures to be actually cancelled
self.futures.clear()
await self.shutdown_task_manager()
def stop(self, user_stopped: bool | None = None) -> Awaitable[None]:
"""
Stop downloading the download.
"""
self._logger.debug("Stopping %s", self.tdef.get_name())
if self.stream is not None:
self.stream.disable()
if user_stopped is not None:
self.config.set_user_stopped(user_stopped)
if self.handle and self.handle.is_valid():
self.handle.pause()
return self.checkpoint()
return succeed(None)
def resume(self) -> None:
"""
Resume downloading the download.
"""
self._logger.debug("Resuming %s", self.tdef.get_name())
self.config.set_user_stopped(False)
if self.handle and self.handle.is_valid():
self.handle.set_upload_mode(self.get_upload_mode())
self.handle.resume()
def get_content_dest(self) -> Path:
"""
Returns the file to which the downloaded content is saved.
"""
return self.config.get_dest_dir() / self.tdef.get_name_as_unicode()
def checkpoint(self) -> Awaitable[None]:
"""
Checkpoint this download. Returns when the checkpointing is completed.
"""
if self.checkpoint_disabled:
self._logger.debug("Ignoring checkpoint() call as checkpointing is disabled for this download")
return succeed(None)
if self.handle and self.handle.is_valid() and not self.handle.need_save_resume_data():
self._logger.debug("Ignoring checkpoint() call as checkpointing is not needed")
return succeed(None)
if not self.handle or not self.handle.is_valid():
# Libtorrent hasn't received or initialized this download yet
# 1. Check if we have data for this infohash already (don't overwrite it if we do!)
basename = hexlify(self.tdef.get_infohash()).decode() + ".conf"
filename = Path(self.download_manager.get_checkpoint_dir() / basename)
if not filename.is_file():
# 2. If there is no saved data for this infohash, checkpoint it without data so we do not
# lose it when we crash or restart before the download becomes known.
resume_data = self.config.get_engineresumedata() or {
b"file-format": b"libtorrent resume file",
b"file-version": 1,
b"info-hash": self.tdef.get_infohash()
}
self.post_alert("save_resume_data_alert", {"resume_data": resume_data})
return succeed(None)
return self.save_resume_data()
def set_def(self, tdef: TorrentDef) -> None:
"""
Set the torrent definition for this download.
"""
self.tdef = tdef
@check_handle(None)
def add_trackers(self, trackers: list[bytes]) -> None:
"""
Add the given trackers to the handle.
"""
self.handle = cast(lt.torrent_handle, self.handle)
if hasattr(self.handle, "add_tracker"):
for tracker in trackers:
self.handle.add_tracker({"url": tracker, "verified": False})
@check_handle(None)
def get_magnet_link(self) -> str:
"""
Generate a magnet link for our download.
"""
return lt.make_magnet_uri(cast(lt.torrent_handle, self.handle)) # Ensured by ``check_handle``
@require_handle
def add_peer(self, addr: tuple[str, int]) -> None:
"""
Add a peer address from 3rd source (not tracker, not DHT) to this download.
:param addr: The (hostname_ip,port) tuple to connect to
"""
self.handle = cast(lt.torrent_handle, self.handle)
self.handle.connect_peer(addr, 0)
@require_handle
def set_priority(self, priority: int) -> None:
"""
Set the priority of this download.
"""
self.handle = cast(lt.torrent_handle, self.handle)
self.handle.set_priority(priority)
@require_handle
def set_max_upload_rate(self, value: int) -> None:
"""
Set the maximum upload rate of this download.
"""
self.handle = cast(lt.torrent_handle, self.handle)
self.handle.set_upload_limit(value * 1024)
@require_handle
def set_max_download_rate(self, value: int) -> None:
"""
Set the maximum download rate of this download.
"""
self.handle = cast(lt.torrent_handle, self.handle)
self.handle.set_download_limit(value * 1024)
@require_handle
def apply_ip_filter(self, enable: bool) -> None:
"""
Enable the IP filter on this download.
"""
self.handle = cast(lt.torrent_handle, self.handle)
self.handle.apply_ip_filter(enable)
def get_share_mode(self) -> bool:
"""
Get whether this download is in sharing mode.
"""
return self.config.get_share_mode()
@require_handle
def set_share_mode(self, share_mode: bool) -> None:
"""
Set whether this download is in sharing mode.
"""
self.handle = cast(lt.torrent_handle, self.handle)
self.config.set_share_mode(share_mode)
self.handle.set_share_mode(share_mode)
def get_upload_mode(self) -> bool:
"""
Get whether this download is in upload mode.
"""
return self.config.get_upload_mode()
@require_handle
def set_upload_mode(self, upload_mode: bool) -> None:
"""
Set whether this download is in upload mode.
"""
self.handle = cast(lt.torrent_handle, self.handle)
self.config.set_upload_mode(upload_mode)
self.handle.set_upload_mode(upload_mode)
@require_handle
def force_dht_announce(self) -> None:
"""
Force announce thid download on the DHT.
"""
self.handle = cast(lt.torrent_handle, self.handle)
self.handle.force_dht_announce()
@require_handle
def set_sequential_download(self, enable: bool) -> None:
"""
Set this download to sequential download mode.
"""
self.handle = cast(lt.torrent_handle, self.handle)
self.handle.set_sequential_download(enable)
@check_handle(None)
def set_piece_priorities(self, piece_priorities: list[int]) -> None:
"""
Set the priority for all pieces in the download.
"""
self.handle = cast(lt.torrent_handle, self.handle)
self.handle.prioritize_pieces(piece_priorities)
@check_handle([])
def get_piece_priorities(self) -> list[int]:
"""
Get the priorities of all pieces in the download.
"""
self.handle = cast(lt.torrent_handle, self.handle)
return self.handle.piece_priorities()
@check_handle(None)
def set_file_priorities(self, file_priorities: list[int]) -> None:
"""
Set the priority for all files in the download.
"""
self.handle = cast(lt.torrent_handle, self.handle)
self.handle.prioritize_files(file_priorities)
@check_handle(None)
def set_file_priority(self, file_index: int, prio: int = 4) -> None:
"""
Set the priority for a particular file in the download.
"""
self.handle = cast(lt.torrent_handle, self.handle)
self.handle.file_priority(file_index, prio)
@check_handle(None)
def reset_piece_deadline(self, piece: int) -> None:
"""
Reset the deadline for the given piece.
"""
self.handle = cast(lt.torrent_handle, self.handle)
self.handle.reset_piece_deadline(piece)
@check_handle(None)
def set_piece_deadline(self, piece: int, deadline: int, flags: int = 0) -> None:
"""
Set the deadline for a given piece.
"""
self.handle = cast(lt.torrent_handle, self.handle)
self.handle.set_piece_deadline(piece, deadline, flags)
@check_handle([])
def get_file_priorities(self) -> list[int]:
"""
Get the priorities of all files in the download.
"""
self.handle = cast(lt.torrent_handle, self.handle)
return self.handle.file_priorities()
def file_piece_range(self, file_path: Path) -> list[int]:
"""
Get the piece range of a given file, specified by the path.
Calling this method with anything but a file path will return an empty list.
"""
file_index = self.get_file_index(file_path)
if file_index < 0:
return []
start_piece = self.tdef.torrent_info.map_file(file_index, 0, 1).piece
# Note: next_piece contains the next piece that is NOT part of this file.
if file_index < self.tdef.torrent_info.num_files() - 1:
next_piece = self.tdef.torrent_info.map_file(file_index + 1, 0, 1).piece
else:
# There is no next file so the nex piece is the last piece index + 1 (num_pieces()).
next_piece = self.tdef.torrent_info.num_pieces()
if start_piece == next_piece:
# A single piece with multiple files.
return [start_piece]
return list(range(start_piece, next_piece))
@check_handle(0.0)
def get_file_completion(self, path: Path) -> float:
"""
Calculate the completion of a given file or directory.
"""
self.handle = cast(lt.torrent_handle, self.handle)
total = 0
have = 0
for piece_index in self.file_piece_range(path):
have += self.handle.have_piece(piece_index)
total += 1
if total == 0:
return 1.0
return have / total
def get_file_length(self, path: Path) -> int:
"""
Get the length of a file or directory in bytes. Returns 0 if the given path does not point to an existing path.
"""
result = self.tdef.torrent_file_tree.find(path)
if result is not None:
return result.size
return 0
def get_file_index(self, path: Path) -> int:
"""
Get the index of a file or directory in a torrent. Note that directories do not have real file indices.
Special cases ("error codes"):
- ``-1`` (IllegalFileIndex.collapsed_dir): the given path is not a file but a collapsed directory.
- ``-2`` (IllegalFileIndex.expanded_dir): the given path is not a file but an expanded directory.
- ``-3`` (IllegalFileIndex.unloaded): the data structure is not loaded or the path is not found.
"""
result = self.tdef.torrent_file_tree.find(path)
if isinstance(result, TorrentFileTree.File):
return self.tdef.torrent_file_tree.find(path).index
if isinstance(result, TorrentFileTree.Directory):
return (IllegalFileIndex.collapsed_dir.value if result.collapsed
else IllegalFileIndex.expanded_dir.value)
return IllegalFileIndex.unloaded.value
@check_handle(None)
def set_selected_file_or_dir(self, path: Path, selected: bool) -> None:
"""
Set a single file or directory to be selected or not.
"""
self.handle = cast(lt.torrent_handle, self.handle)
tree = self.tdef.torrent_file_tree
prio = 4 if selected else 0
for index in tree.set_selected(Path(path), selected):
self.set_file_priority(index, prio)
if not selected:
with suppress(ValueError):
self.config.get_selected_files().remove(index)
else:
self.config.get_selected_files().append(index)
def is_file_selected(self, file_path: Path) -> bool:
"""
Check if the given file path is selected.
Calling this method with anything but a file path will return False.
"""
result = self.tdef.torrent_file_tree.find(file_path)
if isinstance(result, TorrentFileTree.File):
return result.selected
return False
| 46,001 | Python | .py | 955 | 37.882723 | 119 | 0.617129 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,033 | stream.py | Tribler_tribler/src/tribler/core/libtorrent/download_manager/stream.py | """
There are 2 types of prioritisation implemented:
1-STATIC PRIORITISATION: Header + Footer + Prebuffer Prioritisation:
When a file in torrent is set to stream, predefined size of heeder + footer + prebuffer is
set to prio:7 deadline:0 and rest of the files and pieces are set to prio:0 (which means dont download at all)
to only focus the required pieces to finish before file starts to stream. The client is expected not to start playing
the file in this state. The state of this static buffering can be observed over the rest api.
2-DYNMAIC PRIORITISATION: When the static prio is finished then the client can start playing the file,
When a client starts playing the file over http, it will request a chunk, each requested chunk will initiate a
dynmaic buffer according to the current read position of the file mapped to related piece of the torrent file.
The undownloaded pieces starting from the current read position with the length of prebuffsize will be prioritised
with the DEADLINE_PRIO_MAP sequence and will be deadlined with the indexes of the same map.
Rest of the pieces are to prio: 1 and no deadline. Note that, the prio, deadline and the actual pieces impacted
will be dynamically updated eachtime more chunks are readed, until EOF.
Each chunk will have its own prio applied, and there can be multiple concurrent chucks
for a given fileindex of a torrent
"""
from __future__ import annotations
import logging
from asyncio import sleep
from io import BufferedReader
from pathlib import Path
from typing import TYPE_CHECKING, Callable, Generator, cast
import libtorrent
from typing_extensions import Self
from tribler.core.libtorrent.download_manager.download_state import DownloadStatus
from tribler.core.libtorrent.torrents import check_vod
if TYPE_CHECKING:
from types import TracebackType
from tribler.core.libtorrent.download_manager.download import Download
# Header and footer sizes are necessary for video client to detect file codecs and muxer metadata.
# Without below pieces are ready, streamer should not start
HEADER_SIZE = 5 * 1024 * 1024
FOOTER_SIZE = 1 * 1024 * 1024
# the percent of the file size to be used as moving or static prebufferng size
PREBUFF_PERCENT = 0.05
# Below map defines the priority/deadline sequence for pieces. List index represents the deadline, and
# actual value in the index represents the priority of the piece sequence in the torrent file.
# Minimum prio must be 2, because prio 1 is used for not relevant pieces in streaming
# Max prio is 7 and must have only 1 deadline attached to it, because lt selects prio 7 regardles of picker desicion
# The narrower this map, better the prioritaztion worse the throughput ie: 7,6,5,3,2,1
# The wider this map, worse the prioritization, better the throughput: ie: 7,6,6,6,6,6,6,6,6,5,5,5,5,5,5,5,5,5,5...
# Below logarithmic map is based on 2^n and achieves 3~4MB/s with a moving 4~5MB window prioritization
# There is no prio 5 in libtorrent priorities
# https://www.libtorrent.org/manual.html#piece-priority-prioritize-pieces-piece-priorities
DEADLINE_PRIO_MAP = [7, 6, 6, 4, 4, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2]
# time to detect if the client is no more requesting stream chunks from rest api
# basically this means the video is either paused, or seeked to another postion
# but the player still wants the old tcp session alive,
# above setting can be reduced as low as 1 sec.
# lower this value, better the seek responsivenes
STREAM_PAUSE_TIME = 1
# never use 0 priority because when streams are paused
# we still want lt to download the pieces not important for the stream
MIN_PIECE_PRIO = 1
class NotStreamingError(Exception):
"""
An attempt was made to create a chunk for no stream.
"""
class NoAvailableStreamError(Exception):
"""
An attempt was made to create a stream for no files.
"""
class Stream:
"""
Holds the streaming status of a specific download.
"""
def __init__(self, download: Download) -> None:
"""
Create a stream for the given download.
"""
self._logger = logging.getLogger(self.__class__.__name__)
self.infohash: bytes | None = None
self.filename: Path | None = None
self.filesize: int | None = None
self.enabledfiles: list[int] | None = None
self.firstpiece: int | None = None
self.lastpiece: int | None = None
self.prebuffsize: int | None = None
self.destdir: Path | None = None
self.piecelen: int | None = None
self.files: list[tuple[Path, int]] | None = None
self.mapfile: Callable[[int, int, int], libtorrent.peer_request] | None = None
self.prebuffpieces: list[int] = []
self.headerpieces: list[int] = []
self.footerpieces: list[int] = []
# cursorpiecemap represents the pieces maintained by all available chunks.
# Each chunk is identified by its startbyte
# structure for cursorpieces is
# <-------------------- dynamic buffer pieces -------------------->
# {int:startbyte: (bool:ispaused, list:piecestobuffer 'according to the cursor of the related chunk')
self.cursorpiecemap: dict[int, tuple[bool, list[int]]] = {}
self.fileindex: int | None = None
# when first initiate this instance does not have related callback ready,
# this coro will be awaited when the stream is enabled. If never enabled,
# this coro will be closed.
self.__prepare_coro = self.__prepare(download)
# required callbacks used in this class but defined in download class.
# an other approach would be using self.__download = download but
# below method looks cleaner
self.__lt_state = download.get_state
self.__getpieceprios = download.get_piece_priorities
self.__setpieceprios = download.set_piece_priorities
self.__getfileprios = download.get_file_priorities
self.__setselectedfiles = download.set_selected_files
self.__setdeadline = download.set_piece_deadline
self.__resetdeadline = download.reset_piece_deadline
self.__resumedownload = download.resume
async def __prepare(self, download: Download) -> None:
# wait for an handle first
await download.get_handle()
self.destdir = download.get_content_dest()
metainfo = None
while not metainfo:
# Wait for an actual tdef with an actual metadata is available
metainfo = download.get_def().get_metainfo()
if not metainfo:
await sleep(1)
tdef = download.get_def()
self.piecelen = tdef.get_piece_length()
self.files = tdef.get_files_with_length()
# we use self.infohash also like a flag to detect that stream class is prepared
self.infohash = tdef.get_infohash()
self.mapfile = tdef.torrent_info.map_file
async def enable(self, fileindex: int = 0, prebufpos: int | None = None) -> None:
"""
Enable streaming mode for a given fileindex.
"""
# if not prepared, prepare the callbacks
if not self.infohash:
await self.__prepare_coro
self.destdir = cast(Path, self.destdir)
self.piecelen = cast(int, self.piecelen)
self.files = cast(list[tuple[Path, int]], self.files)
self.infohash = cast(bytes, self.infohash)
self.mapfile = cast(Callable[[int, int, int], libtorrent.peer_request], self.mapfile)
# if fileindex not available for torrent raise exception
if fileindex >= len(self.files):
raise NoAvailableStreamError
# if download is stopped for some reason, resume it.
self.__resumedownload()
# wait until dlstate is downloading or seeding
while True:
status = self.__lt_state().get_status()
if status in [DownloadStatus.DOWNLOADING, DownloadStatus.SEEDING]:
break
await sleep(1)
# the streaming status is tracked based on the infohash, if there is already no streaming
# or there is already a streaming for a fileindex but we need a new one, reinitialize the
# status map. When there is a state for a given infohash, the stream is accepted as streaming,
# which means after below line, stream.enaled = True
if fileindex != self.fileindex:
self.fileindex = fileindex
elif self.enabled:
# if already there is a state with the same file index do nothing
if prebufpos is not None:
# if the prebuffposiiton is updated, update the static prebuff pieces
currrent_prebuf = list(self.prebuffpieces)
currrent_prebuf.extend(self.bytestopieces(prebufpos, self.prebuffsize))
self.prebuffpieces = sorted(set(currrent_prebuf))
return
# update the file name and size with the file index
filename, self.filesize = self.files[fileindex]
if len(self.files) > 1:
self.filename = self.destdir / filename
else:
self.filename = self.destdir
# Backup file prios, and set the the streaming file max prio
if not self.enabledfiles:
self.enabledfiles = [x[0] for x in enumerate(self.__getfileprios()) if x[1]]
self.__setselectedfiles([fileindex], 7, True)
# Get the piece map of the file
self.firstpiece = self.bytetopiece(0) # inclusive
self.lastpiece = min(self.bytetopiece(self.filesize), len(self.pieceshave) - 1) # inclusive
# prebuffer size PREBUFF_PERCENT of the file size
self.prebuffsize = int(self.filesize * PREBUFF_PERCENT)
# calculate static buffer pieces
self.headerpieces = self.bytestopieces(0, HEADER_SIZE)
self.footerpieces = self.bytestopieces(-FOOTER_SIZE, 0)
self.prebuffpieces = [] if prebufpos is None else self.bytestopieces(prebufpos, self.prebuffsize)
@property
def enabled(self) -> bool:
"""
Check if stream is enabled.
"""
return self.infohash is not None and self.fileindex is not None
@property
@check_vod(0)
def headerprogress(self) -> float:
"""
Get current progress of downloaded header pieces of the enabled stream, if not enabled returns 0.
"""
return self.calculateprogress(self.headerpieces, False)
@property
@check_vod(0)
def footerprogress(self) -> float:
"""
Get current progress of downloaded footer pieces of the enabled stream, if not enabled returns 0.
"""
return self.calculateprogress(self.footerpieces, False)
@property
@check_vod(0)
def prebuffprogress(self) -> float:
"""
Get current progress of downloaded prebuff pieces of the enabled stream, if not enabled returns 0.
"""
return self.calculateprogress(self.prebuffpieces, False)
@property
@check_vod(0)
def prebuffprogress_consec(self) -> float:
"""
Get current progress of cosequently downloaded prebuff pieces of the enabled stream, if not enabled returns 0.
"""
return self.calculateprogress(self.prebuffpieces, True)
@property
@check_vod([])
def pieceshave(self) -> list[int]:
"""
Get a list of Booleans indicating that individual pieces of the selected fileindex has been downloaded or not.
"""
return self.__lt_state().get_pieces_complete()
@check_vod(True)
def disable(self) -> None:
"""
Stop Streaming.
"""
self.fileindex = None
self.headerpieces = []
self.footerpieces = []
self.prebuffpieces = []
self.cursorpiecemap = {}
self.resetprios()
self.__setselectedfiles(self.enabledfiles)
def close(self) -> None:
"""
Close this class gracefully.
"""
# Close the coroutine. Unnecessary calls should be harmless.
self.__prepare_coro.close()
self.disable()
@check_vod([])
def bytestopieces(self, bytes_begin: int, bytes_end: int) -> list[int]:
"""
Returns the pieces that represents the given byte range.
"""
self.filesize = cast(int, self.filesize) # Ensured by ``check_vod``
bytes_begin = min(self.filesize, bytes_begin) if bytes_begin >= 0 else self.filesize + bytes_begin
bytes_end = min(self.filesize, bytes_end) if bytes_end > 0 else self.filesize + bytes_end
startpiece = self.bytetopiece(bytes_begin)
endpiece = self.bytetopiece(bytes_end)
startpiece = max(startpiece, self.firstpiece)
endpiece = min(endpiece, self.lastpiece)
return list(range(startpiece, endpiece + 1))
@check_vod(-1)
def bytetopiece(self, byte_begin: int) -> int:
"""
Finds the piece position that begin_bytes is mapped to.
``check_vod`` ensures the types of ``self.mapfile`` and ``self.fileindex``.
"""
self.mapfile = cast(Callable[[int, int, int], libtorrent.peer_request], self.mapfile)
return self.mapfile(cast(int, self.fileindex), byte_begin, 0).piece
@check_vod(0)
def calculateprogress(self, pieces: list[int], consec: bool) -> float:
"""
Claculates the download progress of a given piece list.
if consec is True, calcaulation is based only the pieces downloaded sequentially.
"""
if not pieces:
return 1.0
have = 0.0
for piece in self.iterpieces(have=True, consec=consec):
if have >= len(pieces):
break
if piece in pieces:
have += 1
return have / len(pieces) if pieces else 0.0
@check_vod([])
def iterpieces(self, have: bool | None = None, consec: bool = False,
startfrom: int | None = None) -> Generator[int, None, None]:
"""
Generator function that yield the pieces for the active fileindex.
:param have: None: nofilter, True: only pieces we have, False: only pieces we dont have
:param consec: True: sequentially, False: all pieces
:param startfrom: int: start form index, None: start from first piece
"""
self.firstpiece = cast(int, self.firstpiece) # Ensured by ``check_vod``
self.lastpiece = cast(int, self.lastpiece) # Ensured by ``check_vod``
if have is not None:
pieces_have = self.pieceshave
for piece in range(self.firstpiece, self.lastpiece + 1):
if startfrom is not None and piece < startfrom:
continue
if have is None or have and pieces_have[piece] or not have and not pieces_have[piece]:
yield piece
elif consec:
break
async def updateprios(self) -> None: # noqa: C901, PLR0912, PLR0915
"""
This async function controls how the individual piece priority and deadline is configured.
This method is called when a stream in enabled, and when a chunk reads the stream each time.
The performance of this method is crucical since it gets called quite frequently.
"""
if not self.enabled:
return
def _updateprio(piece: int, prio: int, deadline: int | None = None) -> None:
"""
Utility function to update piece priorities.
"""
if curr_prio != prio:
piecepriorities[piece] = prio
if deadline is not None:
# it is cool to step deadlines with 10ms interval but in realty there is no need.
self.__setdeadline(piece, deadline * 10)
diffmap[piece] = f"{piece}:{deadline * 10}:{curr_prio}->{prio}"
else:
self.__resetdeadline(piece)
diffmap[piece] = f"{piece}:-:{curr_prio}->{prio}"
def _find_deadline(piece: int) -> tuple[int, int] | tuple[None, None]:
"""
Find the cursor which has this piece closest to its start.
Returns the deadline for the piece and the cursor startbyte.
"""
# if piece is not in piecemaps, then there is no deadline
# if piece in piecemaps, then the deadline is the index of the related piecemap
deadline = None
cursor = None
for startbyte in self.cursorpiecemap:
paused, cursorpieces = self.cursorpiecemap[startbyte]
if not paused and piece in cursorpieces and \
(deadline is None or cursorpieces.index(piece) < deadline):
deadline = cursorpieces.index(piece)
cursor = startbyte
if cursor is not None and deadline is not None:
return deadline, cursor
return None, None
# current priorities
piecepriorities = self.__getpieceprios()
if not piecepriorities:
# this case might happen when hop count is changing.
return
# a map holds the changes, used only for logging purposes
diffmap: dict[int, str] = {}
# flag that holds if we are in static buffering phase of dynamic buffering
staticbuff = False
for piece in self.iterpieces(have=False):
# current piece prio
curr_prio = piecepriorities[piece]
if piece in self.footerpieces:
_updateprio(piece, 7, 0)
staticbuff = True
elif piece in self.headerpieces:
_updateprio(piece, 7, 1)
staticbuff = True
elif piece in self.prebuffpieces:
_updateprio(piece, 7, 2)
staticbuff = True
elif staticbuff:
_updateprio(piece, 0)
else:
# dynamic buffering
deadline, cursor = _find_deadline(piece)
if cursor is not None and deadline is not None:
if deadline < len(DEADLINE_PRIO_MAP):
# get prio according to deadline
_updateprio(piece, DEADLINE_PRIO_MAP[deadline], deadline)
else:
# the deadline is outside of map, set piece prio 1 with the deadline
# buffer size is bigger then prio_map
_updateprio(piece, 1, deadline)
else:
# the piece is not in buffer zone, set to min prio without deadline
_updateprio(piece, MIN_PIECE_PRIO)
if diffmap:
# log stuff
self._logger.info("Piece Piority changed: %s", repr(diffmap))
self._logger.debug("Header Pieces: %s", repr(self.headerpieces))
self._logger.debug("Footer Pieces: %s", repr(self.footerpieces))
self._logger.debug("Prebuff Pieces: %s", repr(self.prebuffpieces))
for startbyte in self.cursorpiecemap:
self._logger.debug("Cursor '%s' Pieces: %s", startbyte, repr(self.cursorpiecemap[startbyte]))
# BELOW LINE WILL BE REMOVED, Most of the above are for debugging to be cleaned
self._logger.debug("Current Prios: %s", [(x, piecepriorities[x]) for x in self.iterpieces(have=False)])
self.__setpieceprios(piecepriorities)
def resetprios(self, pieces: list[int] | None = None, prio: int | None = None) -> None:
"""
Resets the prios and deadline of the pieces of the active fileindex,
If no pieces are provided, resets every piece for the fileindex.
"""
prio = prio if prio is not None else 4
piecepriorities = self.__getpieceprios()
if pieces is None:
pieces = list(range(len(piecepriorities)))
for piece in pieces:
self.__resetdeadline(piece)
self.__setpieceprios([prio] * len(pieces))
class StreamChunk:
"""
This class represents the chunk to be read for the torrent file, and controls the dynamic buffer of the
stream instance according to read position.
"""
def __init__(self, stream: Stream, startpos: int = 0) -> None:
"""
Create a new StreamChunk.
:param stream: the stream to be read
:param startpos: the position offset the the chunk should read from.
"""
self._logger = logging.getLogger(self.__class__.__name__)
if not stream.enabled:
raise NotStreamingError
self.stream = stream
self.file: BufferedReader | None = None
self.startpos = startpos
self.__seekpos = self.startpos
@property
def seekpos(self) -> int:
"""
Current seek position of the actual file on the filesystem.
"""
return self.__seekpos
async def __aenter__(self) -> Self:
"""
Open the chunk.
"""
await self.open()
return self
async def __aexit__(self, exc_type: type[BaseException] | None, exc_value: BaseException | None,
traceback: TracebackType | None) -> None:
"""
Close the chunk.
"""
self._logger.info("Stream %s closed due to %s", self.startpos, exc_type)
self.close()
async def open(self) -> None:
"""
Opens the file in the filesystem until its ready and seeks to the seekpos position.
"""
filename = cast(Path, self.stream.filename) # Ensured by ``NotStreamingError`` (in ``__init__``)
while not filename.exists():
await sleep(1)
self.file = open(filename, "rb") # noqa: ASYNC101, SIM115
self.file.seek(self.seekpos)
@property
def isclosed(self) -> bool:
"""
Check if the file (if it exists) belonging to this chunk is closed.
"""
return self.file is None or self.file.closed
@property
def isstarted(self) -> bool:
"""
Checks if the this chunk has already registered itself to stream instance.
"""
return self.startpos in self.stream.cursorpiecemap
@property
def ispaused(self) -> bool:
"""
Checks if the chunk is in paused state.
"""
if self.isstarted and self.stream.cursorpiecemap[self.startpos][0]:
return True
return False
@property
def shouldpause(self) -> bool:
"""
Checks if this chunk should pause, based on the desicion that
any other chunks also is streaming the same torrent or not.
"""
for spos in self.stream.cursorpiecemap:
if spos == self.startpos:
continue
paused, pieces = self.stream.cursorpiecemap[spos]
if not paused and pieces:
return True
return False
def pause(self, force: bool = False) -> bool:
"""
Sets the chunk pieces to pause, if not forced, chunk is only paused if other chunks are not paused.
"""
if not self.ispaused and (self.shouldpause or force):
self.stream.cursorpiecemap[self.startpos] = True, self.stream.cursorpiecemap[self.startpos][1]
return True
return False
def resume(self, force: bool = False) -> bool:
"""
Sets the chunk pieces to resume, if not forced, chunk is only resume if other chunks are paused.
"""
if self.ispaused and (not self.shouldpause or force):
self.stream.cursorpiecemap[self.startpos] = False, self.stream.cursorpiecemap[self.startpos][1]
return True
return False
async def seek(self, positionbyte: int) -> list[int]:
"""
Seeks the stream to the related picece that represents the position byte.
Also updates the dynamic buffer accordingly.
"""
self.stream.prebuffsize = cast(int, self.stream.prebuffsize) # Ensured by ``NotStreamingError``
self.stream.piecelen = cast(int, self.stream.piecelen) # Ensured by ``NotStreamingError``
buffersize = 0
pospiece = self.stream.bytetopiece(positionbyte)
pieces = []
# note that piece buffer is based the undownloaded piece up the size of prebuffsize
for piece in self.stream.iterpieces(have=False, startfrom=pospiece):
if buffersize < self.stream.prebuffsize:
pieces.append(piece)
buffersize += self.stream.piecelen
else:
break
# update cursor piece that represents this chunk
self.stream.cursorpiecemap[self.startpos] = (self.ispaused, pieces)
# update the torrent prios
await self.stream.updateprios()
# update the file cursor also
if self.file:
self.__seekpos = positionbyte
self.file.seek(self.seekpos)
return pieces
def close(self) -> None:
"""
Closes the chunk gracefully, also unregisters the cursor pieces from the stream instance
and resets the relevant piece prios.
"""
if self.file:
self.file.close()
self.file = None
if self.isstarted:
pieces = self.stream.cursorpiecemap.pop(self.startpos)
self.stream.resetprios(pieces[1], MIN_PIECE_PRIO)
async def read(self) -> bytes:
"""
Reads 1 piece that contains the seekpos.
"""
if not self.file and self.isstarted:
await self.open()
await self.seek(self.seekpos)
piece = self.stream.bytetopiece(self.seekpos)
self._logger.debug('Chunk %s: Get piece %s', self.startpos, piece)
if self.isclosed or piece > self.stream.lastpiece or not self.isstarted:
self.close()
self._logger.debug('Chunk %s: Got no bytes, file is closed', self.startpos)
return b''
self.file = cast(BufferedReader, self.file) # Ensured by ``self.isclosed``
# wait until we download what we want, then read the localfile
# experiment a garbage write mechanism here if the torrent read is too slow
piece = self.stream.bytetopiece(self.seekpos)
while True:
pieces_have = self.stream.pieceshave
if piece == -1:
self.close()
return b''
if (0 <= piece < len(pieces_have) and pieces_have[piece]) or not self.isstarted:
break
self._logger.debug('Chunk %s, Waiting piece %s', self.startpos, piece)
await sleep(1)
result = self.file.read(self.stream.piecelen)
self._logger.debug('Chunk %s: Got bytes %s-%s, %s bytes, piecelen: %s',
self.startpos, self.seekpos, self.seekpos + len(result), len(result), self.stream.piecelen)
self.__seekpos = self.file.tell()
return result
| 26,995 | Python | .py | 558 | 38.919355 | 118 | 0.637236 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,034 | download_manager.py | Tribler_tribler/src/tribler/core/libtorrent/download_manager/download_manager.py | """
A wrapper around libtorrent.
Author(s): Egbert Bouman
"""
from __future__ import annotations
import asyncio
import dataclasses
import logging
import os
import time
from asyncio import CancelledError, Future, gather, iscoroutine, shield, sleep, wait_for
from binascii import hexlify, unhexlify
from collections import defaultdict
from copy import deepcopy
from pathlib import Path
from tempfile import TemporaryDirectory
from typing import TYPE_CHECKING, Any, Awaitable, Callable, Dict, Iterable, List, cast
import libtorrent as lt
from configobj import ConfigObj
from ipv8.taskmanager import TaskManager
from validate import Validator
from yarl import URL
from tribler.core.libtorrent.download_manager.download import Download
from tribler.core.libtorrent.download_manager.download_config import DownloadConfig
from tribler.core.libtorrent.download_manager.download_state import DownloadState, DownloadStatus
from tribler.core.libtorrent.torrentdef import MetainfoDict, TorrentDef, TorrentDefNoMetainfo
from tribler.core.libtorrent.uris import unshorten, url_to_path
from tribler.core.notifier import Notification, Notifier
from tribler.tribler_config import VERSION_SUBDIR
if TYPE_CHECKING:
from tribler.core.libtorrent.download_manager.dht_health_manager import DHTHealthManager
from tribler.tribler_config import TriblerConfigManager
SOCKS5_PROXY_DEF = 2
LTSTATE_FILENAME = "lt.state"
METAINFO_CACHE_PERIOD = 5 * 60
DEFAULT_DHT_ROUTERS = [
("dht.aelitis.com", 6881),
("dht.libtorrent.org", 6881),
("dht.libtorrent.org", 25401),
("dht.transmissionbt.com", 6881),
("router.bitcomet.com", 6881),
("router.bittorrent.com", 6881),
("router.utorrent.com", 6881),
]
DEFAULT_LT_EXTENSIONS = [
lt.create_ut_metadata_plugin,
lt.create_ut_pex_plugin,
lt.create_smart_ban_plugin
]
logger = logging.getLogger(__name__)
@dataclasses.dataclass
class MetainfoLookup:
"""
A metainfo lookup download and the number of times it has been invoked.
"""
download: Download
pending: int
def encode_atp(atp: dict) -> dict:
"""
Encode the "Add Torrent Params" dictionary to only include bytes, instead of strings and Paths.
"""
for k, v in atp.items():
if isinstance(v, str):
atp[k] = v.encode()
elif isinstance(v, Path):
atp[k] = str(v)
return atp
class DownloadManager(TaskManager):
"""
The manager of all downloads.
"""
def __init__(self, config: TriblerConfigManager, notifier: Notifier,
metadata_tmpdir: TemporaryDirectory | None = None) -> None:
"""
Create a new download manager.
"""
super().__init__()
self.config = config
self.state_dir = Path(config.get_version_state_dir())
self.ltsettings: dict[lt.session, dict] = {} # Stores a copy of the settings dict for each libtorrent session
self.ltsessions: dict[int, Future[lt.session]] = {}
self.dht_health_manager: DHTHealthManager | None = None
self.listen_ports: dict[int, dict[str, int]] = defaultdict(dict)
self.socks_listen_ports = config.get("libtorrent/socks_listen_ports")
self.notifier = notifier
self.register_task("Set default upload rate limit", self.set_upload_rate_limit, 0)
self.register_task("Set default download rate limit", self.set_download_rate_limit, 0)
self.downloads: Dict[bytes, Download] = {}
self.checkpoint_directory = (self.state_dir / "dlcheckpoints")
self.checkpoints_count = 0
self.checkpoints_loaded = 0
self.all_checkpoints_are_loaded = False
self.metadata_tmpdir: TemporaryDirectory | None = (metadata_tmpdir or
TemporaryDirectory(suffix="tribler_metainfo_tmpdir"))
# Dictionary that maps infohashes to download instances. These include only downloads that have
# been made specifically for fetching metainfo, and will be removed afterwards.
self.metainfo_requests: dict[bytes, MetainfoLookup] = {}
self.metainfo_cache: dict[bytes, MetainfoDict] = {} # Dictionary that maps infohashes to cached metainfo items
self.default_alert_mask = lt.alert.category_t.error_notification | lt.alert.category_t.status_notification | \
lt.alert.category_t.storage_notification | lt.alert.category_t.performance_warning | \
lt.alert.category_t.tracker_notification | lt.alert.category_t.debug_notification
self.session_stats_callback: Callable | None = None
self.state_cb_count = 0
self.queued_write_bytes = -1
# Status of libtorrent session to indicate if it can safely close and no pending writes to disk exists.
self.lt_session_shutdown_ready: dict[int, bool] = {}
self.dht_ready_task = None
self.dht_readiness_timeout = config.get("libtorrent/dht_readiness_timeout")
self._last_states_list: list[DownloadState] = []
def is_shutting_down(self) -> bool:
"""
Whether the download manager is currently shutting down.
"""
return self._shutdown
@staticmethod
def convert_rate(rate: int) -> int:
"""
Rate conversion due to the fact that we had a different system with Swift
and the old python BitTorrent core: unlimited == 0, stop == -1, else rate in kbytes.
"""
if rate == 0:
return -1
if rate == -1:
return 1
return rate * 1024
@staticmethod
def reverse_convert_rate(rate: int) -> int:
"""
Rate conversion due to the fact that we had a different system with Swift
and the old python BitTorrent core: unlimited == 0, stop == -1, else rate in kbytes.
"""
if rate == -1:
return 0
if rate == 1:
return -1
return rate // 1024
async def _check_dht_ready(self, min_dht_peers: int = 60) -> None:
"""
Checks whether we got enough DHT peers. If the number of DHT peers is low,
checking for a bunch of torrents in a short period of time may result in several consecutive requests
sent to the same peers. This can trigger those peers' flood protection mechanism,
which results in DHT checks stuck for hours.
See https://github.com/Tribler/tribler/issues/5319
"""
while (await self.get_session()).status().dht_nodes < min_dht_peers:
await asyncio.sleep(1)
async def initialize(self) -> None:
"""
Initialize the directory structure, launch the periodic tasks and start libtorrent background processes.
"""
# Create the checkpoints directory
self.checkpoint_directory.mkdir(exist_ok=True, parents=True)
# Register tasks
self.register_task("process_alerts", self._task_process_alerts, interval=1, ignore=(Exception, ))
if self.dht_readiness_timeout > 0 and self.config.get("libtorrent/dht"):
self.dht_ready_task = self.register_task("check_dht_ready", self._check_dht_ready)
self.register_task("request_torrent_updates", self._request_torrent_updates, interval=1)
self.register_task("task_cleanup_metacache", self._task_cleanup_metainfo_cache, interval=60, delay=0)
self.set_download_states_callback(self.sesscb_states_callback)
# Start upnp
if self.config.get("libtorrent/upnp"):
(await self.get_session()).start_upnp()
def start(self) -> None:
"""
Start loading the checkpoints from disk.
"""
self.register_task("start", self.load_checkpoints)
def notify_shutdown_state(self, state: str) -> None:
"""
Call the notifier to signal a shutdown state update.
"""
logger.info("Notify shutdown state: %s", state)
self.notifier.notify(Notification.tribler_shutdown_state, state=state)
async def shutdown(self, timeout: int = 30) -> None:
"""
Shut down all pending tasks and background tasks.
"""
logger.info("Shutting down...")
self.cancel_pending_task("start")
self.cancel_pending_task("download_states_lc")
if self.downloads:
logger.info("Stopping downloads...")
self.notify_shutdown_state("Checkpointing Downloads...")
await gather(*[download.stop() for download in self.downloads.values()], return_exceptions=True)
self.notify_shutdown_state("Shutting down Downloads...")
await gather(*[download.shutdown() for download in self.downloads.values()], return_exceptions=True)
self.notify_shutdown_state("Shutting down LibTorrent Manager...")
# If libtorrent session has pending disk io, wait until timeout (default: 30 seconds) to let it finish.
# In between ask for session stats to check if state is clean for shutdown.
end_time = time.time() + timeout
force_quit = end_time - time.time()
while not self.is_shutdown_ready() and force_quit > 0:
not_ready = list(self.lt_session_shutdown_ready.values()).count(False)
self.notify_shutdown_state(
f"Waiting for {not_ready} downloads to finish."
+ (".." if self.queued_write_bytes == -1 else f" {self.queued_write_bytes} bytes left to write.")
+ f" {force_quit:.2f} seconds left until forced shutdown."
)
self.post_session_stats()
await asyncio.sleep(max(1.0, not_ready * 0.01)) # 10 ms per download, up to 1 second
force_quit = end_time - time.time()
logger.info("Awaiting shutdown task manager...")
await self.shutdown_task_manager()
if self.dht_health_manager:
await self.dht_health_manager.shutdown_task_manager()
# Save libtorrent state
if self.has_session():
logger.info("Saving state...")
self.notify_shutdown_state("Writing session state to disk.")
session = await self.get_session()
with open(self.state_dir / LTSTATE_FILENAME, "wb") as ltstate_file: # noqa: ASYNC230
ltstate_file.write(lt.bencode(session.save_state()))
if self.has_session() and self.config.get("libtorrent/upnp"):
logger.info("Stopping upnp...")
self.notify_shutdown_state("Stopping UPnP.")
(await self.get_session()).stop_upnp()
# Remove metadata temporary directory
if self.metadata_tmpdir:
logger.info("Removing temp directory...")
self.notify_shutdown_state("Removing temporary download files.")
self.metadata_tmpdir.cleanup()
self.metadata_tmpdir = None
logger.info("Shutdown completed")
self.notify_shutdown_state("Finished shutting down download manager.")
def is_shutdown_ready(self) -> bool:
"""
Check if the libtorrent shutdown is complete.
"""
return all(self.lt_session_shutdown_ready.values())
def create_session(self, hops: int = 0) -> lt.session: # noqa: PLR0915
"""
Construct a libtorrent session for the given number of anonymization hops.
"""
# Due to a bug in Libtorrent 0.16.18, the outgoing_port and num_outgoing_ports value should be set in
# the settings dictionary
logger.info("Creating a session")
settings: dict[str, str | float] = {"outgoing_port": 0,
"num_outgoing_ports": 1,
"allow_multiple_connections_per_ip": 0,
"enable_upnp": int(self.config.get("libtorrent/upnp")),
"enable_dht": int(self.config.get("libtorrent/dht")),
"enable_lsd": int(self.config.get("libtorrent/lsd")),
"enable_natpmp": int(self.config.get("libtorrent/natpmp"))}
# Copy construct so we don't modify the default list
extensions = list(DEFAULT_LT_EXTENSIONS)
logger.info("Hops: %d.", hops)
# Elric: Strip out the -rcX, -beta, -whatever tail on the version string.
ltsession = lt.session(lt.fingerprint("TL", 0, 0, 0, 0), flags=0) if hops == 0 else lt.session(flags=0)
libtorrent_port = self.config.get("libtorrent/port")
logger.info("Libtorrent port: %d", libtorrent_port)
if hops == 0:
settings["user_agent"] = 'Tribler/' + VERSION_SUBDIR
enable_utp = self.config.get("libtorrent/utp")
settings["enable_outgoing_utp"] = enable_utp
settings["enable_incoming_utp"] = enable_utp
settings["prefer_rc4"] = True
settings["listen_interfaces"] = f"0.0.0.0:{libtorrent_port or 6881}"
else:
settings["enable_outgoing_utp"] = True
settings["enable_incoming_utp"] = True
settings["enable_outgoing_tcp"] = False
settings["enable_incoming_tcp"] = False
settings["anonymous_mode"] = True
settings["force_proxy"] = True
self.set_session_settings(ltsession, settings)
ltsession.set_alert_mask(self.default_alert_mask)
if hops == 0:
self.set_proxy_settings(ltsession, *self.get_libtorrent_proxy_settings())
else:
self.set_proxy_settings(ltsession, SOCKS5_PROXY_DEF, ("127.0.0.1", self.socks_listen_ports[hops - 1]))
for extension in extensions:
ltsession.add_extension(extension)
# Set listen port & start the DHT
if hops == 0:
ltsession.listen_on(libtorrent_port, libtorrent_port + 10)
try:
with open(self.state_dir / LTSTATE_FILENAME, "rb") as fp:
lt_state = lt.bdecode(fp.read())
if lt_state is not None:
ltsession.load_state(lt_state)
else:
logger.warning("the lt.state appears to be corrupt, writing new data on shutdown")
except Exception as exc:
logger.info("could not load libtorrent state, got exception: %s. starting from scratch", repr(exc))
else:
rate = DownloadManager.get_libtorrent_max_upload_rate(self.config)
download_rate = DownloadManager.get_libtorrent_max_download_rate(self.config)
settings = {"upload_rate_limit": rate,
"download_rate_limit": download_rate}
self.set_session_settings(ltsession, settings)
if self.config.get("libtorrent/dht"):
ltsession.start_dht()
for router in DEFAULT_DHT_ROUTERS:
ltsession.add_dht_router(*router)
ltsession.start_lsd()
logger.info("Started libtorrent session for %d hops on port %d", hops, ltsession.listen_port())
self.lt_session_shutdown_ready[hops] = False
return ltsession
def has_session(self, hops: int = 0) -> bool:
"""
Check if we have a session for the given number of anonymization hops.
"""
return hops in self.ltsessions
def get_session(self, hops: int = 0) -> Future[lt.session]:
"""
Get the session for the given number of anonymization hops.
"""
if hops not in self.ltsessions:
self.ltsessions[hops] = self.register_executor_task(f"Create session {hops}", self.create_session, hops)
return self.ltsessions[hops]
def set_proxy_settings(self, ltsession: lt.session, ptype: int, server: tuple[str, str | int] | None = None,
auth: tuple[str, str] | None = None) -> None:
"""
Apply the proxy settings to a libtorrent session. This mechanism changed significantly in libtorrent 1.1.0.
"""
settings: dict[str, str | float] = {"proxy_type": ptype, "proxy_hostnames": True,
"proxy_peer_connections": True}
if server is not None:
proxy_host = server[0]
if proxy_host:
settings["proxy_hostname"] = proxy_host
settings["proxy_port"] = int(server[1]) if server[1] else 0
if auth is not None:
settings["proxy_username"] = auth[0]
settings["proxy_password"] = auth[1]
self.set_session_settings(ltsession, settings)
def set_max_connections(self, conns: int, hops: int | None = None) -> None:
"""
Set the maximum number of connections for the given hop count.
"""
self._map_call_on_ltsessions(hops, "set_max_connections", conns)
async def set_upload_rate_limit(self, rate: int) -> None:
"""
Set the upload rate limit for the given session.
"""
# Rate conversion due to the fact that we had a different system with Swift
# and the old python BitTorrent core: unlimited == 0, stop == -1, else rate in kbytes
libtorrent_rate = self.convert_rate(rate=rate)
# Pass outgoing_port and num_outgoing_ports to dict due to bug in libtorrent 0.16.18
settings_dict = {"upload_rate_limit": libtorrent_rate, "outgoing_port": 0, "num_outgoing_ports": 1}
for session in self.ltsessions.values():
self.set_session_settings(await session, settings_dict)
async def get_upload_rate_limit(self, hops: int = 0) -> int:
"""
Get the upload rate limit for the session with the given hop count.
"""
# Rate conversion due to the fact that we had a different system with Swift
# and the old python BitTorrent core: unlimited == 0, stop == -1, else rate in kbytes
session = await self.get_session(hops)
libtorrent_rate = session.upload_rate_limit()
return self.reverse_convert_rate(rate=libtorrent_rate)
async def set_download_rate_limit(self, rate: int) -> None:
"""
Set the download rate limit for the given session.
"""
libtorrent_rate = self.convert_rate(rate=rate)
# Pass outgoing_port and num_outgoing_ports to dict due to bug in libtorrent 0.16.18
settings_dict = {"download_rate_limit": libtorrent_rate}
for session in self.ltsessions.values():
self.set_session_settings(await session, settings_dict)
async def get_download_rate_limit(self, hops: int = 0) -> int:
"""
Get the download rate limit for the session with the given hop count.
"""
session = await self.get_session(hops)
libtorrent_rate = session.download_rate_limit()
return self.reverse_convert_rate(rate=libtorrent_rate)
def process_alert(self, alert: lt.alert, hops: int = 0) -> None: # noqa: C901, PLR0912
"""
Process a libtorrent alert.
"""
alert_type = alert.__class__.__name__
# Periodically, libtorrent will send us a state_update_alert, which contains the torrent status of
# all torrents changed since the last time we received this alert.
if alert_type == "state_update_alert":
for status in cast(lt.state_update_alert, alert).status:
infohash = status.info_hash.to_bytes()
if infohash not in self.downloads:
logger.debug("Got state_update for unknown torrent %s", hexlify(infohash))
continue
self.downloads[infohash].update_lt_status(status)
if alert_type == "state_changed_alert":
handle = cast(lt.state_changed_alert, alert).handle
infohash = handle.info_hash().to_bytes()
if infohash not in self.downloads:
logger.debug("Got state_change for unknown torrent %s", hexlify(infohash))
else:
self.downloads[infohash].update_lt_status(handle.status())
infohash = (alert.handle.info_hash().to_bytes() if hasattr(alert, "handle") and alert.handle.is_valid()
else getattr(alert, "info_hash", b""))
download = self.downloads.get(infohash)
if download:
is_process_alert = (download.handle and download.handle.is_valid()) \
or (not download.handle and alert_type == "add_torrent_alert") \
or (download.handle and alert_type == "torrent_removed_alert")
if is_process_alert:
download.process_alert(cast(lt.torrent_alert, alert), alert_type)
else:
logger.debug("Got alert for download without handle %s: %s", infohash, alert)
elif infohash:
logger.debug("Got alert for unknown download %s: %s", infohash, alert)
if alert_type == "listen_succeeded_alert":
ls_alert = cast(lt.listen_succeeded_alert, alert)
self.listen_ports[hops][ls_alert.address] = ls_alert.port
elif alert_type == "peer_disconnected_alert":
self.notifier.notify(Notification.peer_disconnected,
peer_id=cast(lt.peer_disconnected_alert, alert).pid.to_bytes())
elif alert_type == "session_stats_alert":
ss_alert = cast(lt.session_stats_alert, alert)
queued_disk_jobs = ss_alert.values["disk.queued_disk_jobs"]
self.queued_write_bytes = ss_alert.values["disk.queued_write_bytes"]
num_write_jobs = ss_alert.values["disk.num_write_jobs"]
if queued_disk_jobs == self.queued_write_bytes == num_write_jobs == 0:
self.lt_session_shutdown_ready[hops] = True
if self.session_stats_callback:
self.session_stats_callback(ss_alert)
elif alert_type == "dht_pkt_alert" and self.dht_health_manager is not None:
# Unfortunately, the Python bindings don't have a direction attribute.
# So, we'll have to resort to using the string representation of the alert instead.
incoming = str(alert).startswith("<==")
decoded = cast(dict[bytes, Any], lt.bdecode(cast(lt.dht_pkt_alert, alert).pkt_buf))
if not decoded:
return
# We are sending a raw DHT message - notify the DHTHealthManager of the outstanding request.
if not incoming and decoded.get(b"y") == b"q" \
and decoded.get(b"q") == b"get_peers" and decoded[b"a"].get(b"scrape") == 1:
self.dht_health_manager.requesting_bloomfilters(decoded[b"t"],
decoded[b"a"][b"info_hash"])
# We received a raw DHT message - decode it and check whether it is a BEP33 message.
if incoming and b"r" in decoded and b"BFsd" in decoded[b"r"] and b"BFpe" in decoded[b"r"]:
self.dht_health_manager.received_bloomfilters(decoded[b"t"],
bytearray(decoded[b"r"][b"BFsd"]),
bytearray(decoded[b"r"][b"BFpe"]))
def update_ip_filter(self, lt_session: lt.session, ip_addresses: Iterable[str]) -> None:
"""
Add illegal IPs to libtorrent.
"""
logger.debug("Updating IP filter %s", ip_addresses)
ip_filter = lt.ip_filter()
ip_filter.add_rule("0.0.0.0", "255.255.255.255", 1)
for ip in ip_addresses:
ip_filter.add_rule(ip, ip, 0)
lt_session.set_ip_filter(ip_filter)
async def get_metainfo(self, infohash: bytes, timeout: float = 7, hops: int | None = None, # noqa: C901
url: str | None = None, raise_errors: bool = False) -> dict | None:
"""
Lookup metainfo for a given infohash. The mechanism works by joining the swarm for the infohash connecting
to a few peers, and downloading the metadata for the torrent.
:param infohash: The (binary) infohash to lookup metainfo for.
:param timeout: A timeout in seconds.
:param hops: the number of tunnel hops to use for this lookup. If None, use config default.
:param url: Optional URL. Can contain trackers info, etc.
:return: The metainfo
"""
infohash_hex = hexlify(infohash)
if infohash in self.metainfo_cache:
logger.info("Returning metainfo from cache for %s", infohash_hex)
return self.metainfo_cache[infohash]["meta_info"]
logger.info("Trying to fetch metainfo for %s", infohash_hex)
if infohash in self.metainfo_requests:
download = self.metainfo_requests[infohash].download
self.metainfo_requests[infohash].pending += 1
elif infohash in self.downloads:
download = self.downloads[infohash]
else:
tdef = TorrentDefNoMetainfo(infohash, b"metainfo request", url=url)
dcfg = DownloadConfig.from_defaults(self.config)
dcfg.set_hops(hops or self.config.get("libtorrent/download_defaults/number_hops"))
dcfg.set_upload_mode(True) # Upload mode should prevent libtorrent from creating files
if self.metadata_tmpdir is not None:
dcfg.set_dest_dir(self.metadata_tmpdir.name)
try:
download = await self.start_download(tdef=tdef, config=dcfg, hidden=True, checkpoint_disabled=True)
except TypeError as e:
logger.warning(e)
if raise_errors:
raise
return None
self.metainfo_requests[infohash] = MetainfoLookup(download, 1)
try:
metainfo = download.tdef.get_metainfo() or await wait_for(shield(download.future_metainfo), timeout)
except (CancelledError, asyncio.TimeoutError) as e:
logger.warning("%s: %s (timeout=%f)", type(e).__name__, str(e), timeout)
logger.info("Failed to retrieve metainfo for %s", infohash_hex)
if raise_errors:
raise
return None
logger.info("Successfully retrieved metainfo for %s", infohash_hex)
self.metainfo_cache[infohash] = {"time": time.time(), "meta_info": metainfo}
self.notifier.notify(Notification.torrent_metadata_added, metadata={
"infohash": infohash,
"size": download.tdef.get_length(),
"title": download.tdef.get_name_utf8(),
"metadata_type": 300,
"tracker_info": (list(download.tdef.get_trackers()) or [""])[0]
})
seeders, leechers = download.get_state().get_num_seeds_peers()
metainfo[b"seeders"] = seeders
metainfo[b"leechers"] = leechers
if infohash in self.metainfo_requests:
self.metainfo_requests[infohash].pending -= 1
if self.metainfo_requests[infohash].pending <= 0:
await self.remove_download(download, remove_content=True)
self.metainfo_requests.pop(infohash, None)
return metainfo
def _task_cleanup_metainfo_cache(self) -> None:
oldest_time = time.time() - METAINFO_CACHE_PERIOD
for info_hash, cache_entry in list(self.metainfo_cache.items()):
last_time = cache_entry["time"]
if last_time < oldest_time:
del self.metainfo_cache[info_hash]
async def _request_torrent_updates(self) -> None:
for ltsession in self.ltsessions.values():
(await ltsession).post_torrent_updates(0xffffffff)
async def _task_process_alerts(self) -> None:
for hops, ltsession in list(self.ltsessions.items()):
for alert in (await ltsession).pop_alerts():
self.process_alert(alert, hops=hops)
def _map_call_on_ltsessions(self, hops: int | None, funcname: str, *args: Any, **kwargs) -> None: # noqa: ANN401
if hops is None:
for session in self.ltsessions.values():
session.add_done_callback(lambda s: getattr(s.result(), funcname)(*args, **kwargs))
else:
self.get_session(hops).add_done_callback(lambda s: getattr(s.result(), funcname)(*args, **kwargs))
async def start_download_from_uri(self, uri: str, config: DownloadConfig | None = None) -> Download:
"""
Start a download from the given uri.
"""
logger.info("Start download from URI: %s", uri)
uri = await unshorten(uri)
scheme = URL(uri).scheme
if scheme in ("http", "https"):
logger.info("Http(s) scheme detected")
tdef = await TorrentDef.load_from_url(uri)
return await self.start_download(tdef=tdef, config=config)
if scheme == "magnet":
logger.info("Magnet scheme detected")
params = lt.parse_magnet_uri(uri)
try:
# libtorrent 1.2.19
name, infohash = params["name"].encode(), params["info_hash"] # type: ignore[index] # (checker is 2.X)
except TypeError:
# libtorrent 2.0.9
name = params.name.encode()
infohash = unhexlify(str(params.info_hash))
logger.info("Name: %s. Infohash: %s", name, infohash)
if infohash in self.metainfo_cache:
logger.info("Metainfo found in cache")
tdef = TorrentDef.load_from_dict(self.metainfo_cache[infohash]["meta_info"])
else:
logger.info("Metainfo not found in cache")
tdef = TorrentDefNoMetainfo(infohash, name if name else b"Unknown name", url=uri)
return await self.start_download(tdef=tdef, config=config)
if scheme == "file":
logger.info("File scheme detected")
file = url_to_path(uri)
return await self.start_download(torrent_file=file, config=config)
msg = "invalid uri"
raise Exception(msg)
async def start_download(self, torrent_file: str | None = None, tdef: TorrentDef | None = None,
config: DownloadConfig | None = None,
checkpoint_disabled: bool = False, hidden: bool = False) -> Download:
"""
Start a download from the given information.
"""
logger.info("Starting download: filename: %s, torrent def: %s", str(torrent_file), str(tdef))
if config is None:
config = DownloadConfig.from_defaults(self.config)
logger.info("Use a default config.")
# the priority of the parameters is: (1) tdef, (2) torrent_file.
# so if we have tdef, and torrent_file will be ignored, and so on.
if tdef is None:
logger.info("Torrent def is None. Trying to load it from torrent file.")
if torrent_file is None:
msg = "Torrent file must be provided if tdef is not given"
raise ValueError(msg)
# try to get the torrent from the given torrent file
tdef = await TorrentDef.load(torrent_file)
assert tdef is not None, "tdef MUST not be None after loading torrent"
infohash = tdef.get_infohash()
download = self.get_download(infohash)
if download and infohash not in self.metainfo_requests:
logger.info("Download exists and metainfo is not requested.")
new_trackers = list(tdef.get_trackers() - download.get_def().get_trackers())
if new_trackers:
logger.info("New trackers: %s", str(new_trackers))
self.update_trackers(tdef.get_infohash(), new_trackers)
return download
# Create the destination directory if it does not exist yet
try:
destination_directory = config.get_dest_dir()
if not destination_directory.is_dir():
logger.info("Destination directory does not exist. Creating it: %s", str(destination_directory))
os.makedirs(destination_directory)
except OSError:
logger.exception("Unable to create the download destination directory.")
if config.get_time_added() == 0:
config.set_time_added(int(time.time()))
# Create the download
download = Download(tdef=tdef,
config=config,
checkpoint_disabled=checkpoint_disabled,
hidden=hidden or config.get_bootstrap_download(),
notifier=self.notifier,
state_dir=self.state_dir,
download_manager=self)
logger.info("Download created: %s", str(download))
atp = download.get_atp()
logger.info("ATP: %s", str({k: v for k, v in atp.items() if k not in ["resume_data"]}))
# Keep metainfo downloads in self.downloads for now because we will need to remove it later,
# and removing the download at this point will stop us from receiving any further alerts.
if infohash not in self.metainfo_requests or self.metainfo_requests[infohash].download == download:
logger.info("Metainfo is not requested or download is the first in the queue.")
self.downloads[infohash] = download
logger.info("Starting handle.")
await self.start_handle(download, atp)
return download
async def start_handle(self, download: Download, atp: dict) -> None:
"""
Create and start the libtorrent handle for the given download.
"""
atp_resume_data_skipped = atp.copy()
resume_data = atp.get("resume_data")
if resume_data:
atp_resume_data_skipped["resume_data"] = "<skipped in log>"
logger.info("Start handle. Download: %s. Atp: %s", str(download), str(atp_resume_data_skipped))
if resume_data:
logger.debug("Download resume data: %s", str(atp["resume_data"]))
ltsession = await self.get_session(download.config.get_hops())
infohash = download.get_def().get_infohash()
if infohash in self.metainfo_requests and self.metainfo_requests[infohash].download != download:
logger.info("Cancelling metainfo request(s) for infohash:%s", hexlify(infohash))
# Leave the checkpoint. Any checkpoint that exists will belong to the download we are currently starting.
await self.remove_download(self.metainfo_requests.pop(infohash).download,
remove_content=True, remove_checkpoint=False)
self.downloads[infohash] = download
known = {h.info_hash().to_bytes(): h for h in ltsession.get_torrents()}
existing_handle = known.get(infohash)
if existing_handle:
# Reuse existing handle
logger.debug("Reusing handle %s", hexlify(infohash))
download.post_alert("add_torrent_alert", {"handle": existing_handle})
else:
# Otherwise, add it anew
_ = self.replace_task(f"AddTorrent{infohash}", self._async_add_torrent, ltsession, infohash, atp,
ignore=(Exception,))
async def _async_add_torrent(self, ltsession: lt.session, infohash: bytes , atp: dict) -> None:
self._logger.debug("Adding handle %s", hexlify(infohash))
# To prevent flooding the DHT with a short burst of queries and triggering
# flood protection, we postpone adding torrents until we get enough DHT peers.
# The asynchronous wait should be done as close as possible to the actual
# Libtorrent calls, so the higher-level download-adding logic does not block.
# Otherwise, e.g. if added to the Session init sequence, this results in startup
# time increasing by 10-20 seconds.
# See https://github.com/Tribler/tribler/issues/5319
if self.dht_readiness_timeout > 0 and self.dht_ready_task is not None:
try:
await wait_for(shield(self.dht_ready_task), timeout=self.dht_readiness_timeout)
except asyncio.TimeoutError:
self._logger.warning("Timeout waiting for libtorrent DHT getting enough peers")
ltsession.async_add_torrent(encode_atp(atp))
def get_libtorrent_version(self) -> str:
"""
Get the libtorrent version.
"""
try:
return lt.__version__
except AttributeError:
return lt.version
def set_session_settings(self, lt_session: lt.session, new_settings: dict) -> None:
"""
Apply/set new sessions in a libtorrent session.
:param lt_session: The libtorrent session to apply the settings to.
:param new_settings: The new settings to apply.
"""
# Keeping a copy of the settings because subsequent calls to get_settings are likely to fail
# when libtorrent will try to decode peer_fingerprint to unicode.
if lt_session not in self.ltsettings:
self.ltsettings[lt_session] = lt_session.get_settings()
self.ltsettings[lt_session].update(new_settings)
try:
if hasattr(lt_session, "apply_settings"):
lt_session.apply_settings(new_settings)
else:
lt_session.set_settings(new_settings) # type: ignore[attr-defined] # (checker uses 2.X)
except OverflowError as e:
msg = f"Overflow error when setting libtorrent sessions with settings: {new_settings}"
raise OverflowError(msg) from e
def get_session_settings(self, lt_session: lt.session) -> dict:
"""
Get a copy of the libtorrent settings for the given session.
"""
return deepcopy(self.ltsettings.get(lt_session, {}))
def update_max_rates_from_config(self) -> None:
"""
Set the maximum download and maximum upload rate limits with the value in the config.
This is the extra step necessary to apply a new maximum download/upload rate setting.
:return:
"""
rate = DownloadManager.get_libtorrent_max_upload_rate(self.config)
download_rate = DownloadManager.get_libtorrent_max_download_rate(self.config)
settings = {"download_rate_limit": download_rate,
"upload_rate_limit": rate}
for lt_session in self.ltsessions.values():
lt_session.add_done_callback(lambda s: self.set_session_settings(s.result(), settings))
def post_session_stats(self) -> None:
"""
Gather statistics and cause a ``session_stats_alert``.
"""
logger.info("Post session stats")
for session in self.ltsessions.values():
session.add_done_callback(lambda s: s.result().post_session_stats())
async def remove_download(self, download: Download, remove_content: bool = False,
remove_checkpoint: bool = True) -> None:
"""
Remove a download and optionally also remove the downloaded file(s) and checkpoint.
"""
infohash = download.get_def().get_infohash()
handle = download.handle
# Note that the following block of code needs to be able to deal with multiple simultaneous
# calls using the same download object. We need to make sure that we don't return without
# the removal having finished.
if handle:
if handle.is_valid():
if download.stream is not None:
download.stream.disable()
logger.debug("Removing handle %s", hexlify(infohash))
(await self.get_session(download.config.get_hops())).remove_torrent(handle, int(remove_content))
else:
logger.debug("Cannot remove handle %s because it does not exists", hexlify(infohash))
await download.shutdown()
if infohash in self.downloads and self.downloads[infohash] == download:
self.downloads.pop(infohash)
if remove_checkpoint:
self.remove_config(infohash)
else:
logger.debug("Cannot remove unknown download")
def get_download(self, infohash: bytes) -> Download | None:
"""
Get the download belonging to a given infohash.
"""
return self.downloads.get(infohash, None)
def get_downloads(self) -> List[Download]:
"""
Get a list of all known downloads.
"""
return list(self.downloads.values())
def download_exists(self, infohash: bytes) -> bool:
"""
Check if there is a download with a given infohash.
"""
return infohash in self.downloads
async def update_hops(self, download: Download, new_hops: int) -> None:
"""
Update the amount of hops for a specified download. This can be done on runtime.
"""
infohash = hexlify(download.tdef.get_infohash())
logger.info("Updating the amount of hops of download %s", infohash)
await download.save_resume_data()
await self.remove_download(download)
# copy the old download_config and change the hop count
config = download.config.copy()
config.set_hops(new_hops)
# If the user wants to change the hop count to 0, don't automatically bump this up to 1 anymore
config.set_safe_seeding(False)
await self.start_download(tdef=download.tdef, config=config)
def update_trackers(self, infohash: bytes, trackers: list[bytes]) -> None:
"""
Update the trackers for a download.
:param infohash: infohash of the torrent that needs to be updated
:param trackers: A list of tracker urls.
"""
download = self.get_download(infohash)
if download:
old_def = download.get_def()
old_trackers = old_def.get_trackers()
new_trackers = list(set(trackers) - old_trackers)
all_trackers = [*old_trackers, *new_trackers]
if new_trackers:
# Add new trackers to the download
download.add_trackers(new_trackers)
# Create a new TorrentDef
if isinstance(old_def, TorrentDefNoMetainfo):
new_def = TorrentDefNoMetainfo(old_def.get_infohash(), old_def.get_name(),
download.get_magnet_link())
else:
metainfo = old_def.get_metainfo()
if len(all_trackers) > 1:
metainfo[b"announce-list"] = [[tracker] for tracker in all_trackers]
metainfo.pop(b"announce", None)
else:
metainfo[b"announce"] = all_trackers[0]
new_def = TorrentDef.load_from_dict(metainfo)
# Set TorrentDef + checkpoint
download.set_def(new_def)
download.checkpoint()
def set_download_states_callback(self, user_callback: Callable[[list[DownloadState]], Awaitable[None] | None],
interval: float = 1.0) -> None:
"""
Set the download state callback. Remove any old callback if it's present.
Calls user_callback with a list of
DownloadStates, one for each Download in the Session as first argument.
The user_callback must return a tuple (when, getpeerlist) that indicates
when to invoke the callback again (as a number of seconds from now,
or < 0.0 if not at all) and whether to also include the details of
the connected peers in the DownloadStates on that next call.
:param user_callback: a function adhering to the above spec
:param interval: time in between the download states callback's
"""
logger.debug("Starting the download state callback with interval %f", interval)
self.replace_task("download_states_lc", self._invoke_states_cb, user_callback, interval=interval)
async def _invoke_states_cb(self, callback: Callable[[list[DownloadState]], Awaitable[None] | None]) -> None:
"""
Invoke the download states callback with a list of the download states.
"""
result = callback([download.get_state() for download in self.downloads.values()])
if iscoroutine(result):
await result
async def sesscb_states_callback(self, states_list: list[DownloadState]) -> None:
"""
This method is periodically (every second) called with a list of the download states of the active downloads.
"""
self.state_cb_count += 1
for ds in states_list:
download = ds.get_download()
infohash = download.get_def().get_infohash()
if (ds.get_status() == DownloadStatus.SEEDING and download.config.get_hops() == 0
and download.config.get_safe_seeding()):
# Re-add the download with anonymity enabled
hops = self.config.get("libtorrent/download_defaults/number_hops")
await self.update_hops(download, hops)
# Check the peers of this download every five seconds and add them to the payout manager when
# this peer runs a Tribler instance
if self.state_cb_count % 5 == 0 and download.config.get_hops() == 0 and self.notifier:
for peer in download.get_peer_list():
if str(peer["extended_version"]).startswith("Tribler"):
self.notifier.notify(Notification.tribler_torrent_peer_update,
peer_id=unhexlify(peer["id"]), infohash=infohash, balance=peer["dtotal"])
if self.state_cb_count % 4 == 0:
self._last_states_list = states_list
def get_last_download_states(self) -> list[DownloadState]:
"""
Get the last download states.
"""
return self._last_states_list
async def load_checkpoints(self) -> None:
"""
Load the checkpoint files in the checkpoint directory.
"""
self._logger.info("Load checkpoints...")
checkpoint_filenames = list(self.get_checkpoint_dir().glob("*.conf"))
self.checkpoints_count = len(checkpoint_filenames)
for i, filename in enumerate(checkpoint_filenames, start=1):
await self.load_checkpoint(filename)
self.checkpoints_loaded = i
await sleep(0)
self.all_checkpoints_are_loaded = True
self._logger.info("Checkpoints are loaded")
async def load_checkpoint(self, filename: Path | str) -> bool:
"""
Load a checkpoint from a given file name.
"""
try:
conf_obj = ConfigObj(str(filename), configspec=DownloadConfig.get_spec_file_name(self.config))
conf_obj.validate(Validator())
config = DownloadConfig(conf_obj)
except Exception:
self._logger.exception("Could not open checkpoint file %s", filename)
return False
metainfo = config.get_metainfo()
if not metainfo:
self._logger.error("Could not resume checkpoint %s; metainfo not found", filename)
return False
if not isinstance(metainfo, dict):
self._logger.error("Could not resume checkpoint %s; metainfo is not dict %s %s",
filename, type(metainfo), repr(metainfo))
return False
try:
url = metainfo.get(b"url")
url = url.decode() if url is not None else url
tdef = (TorrentDefNoMetainfo(metainfo[b"infohash"], metainfo[b"name"], url)
if b"infohash" in metainfo else TorrentDef.load_from_dict(metainfo))
except (KeyError, ValueError) as e:
self._logger.exception("Could not restore tdef from metainfo dict: %s %s ", e, metainfo)
return False
config.state_dir = self.state_dir
if config.get_dest_dir() == "": # removed torrent ignoring
self._logger.info("Removing checkpoint %s destdir is %s", filename, config.get_dest_dir())
os.remove(filename)
return False
try:
if self.download_exists(tdef.get_infohash()):
self._logger.info("Not resuming checkpoint because download has already been added")
else:
await self.start_download(tdef=tdef, config=config)
return True
except Exception:
self._logger.exception("Not resuming checkpoint due to exception while adding download")
return False
def remove_config(self, infohash: bytes) -> None:
"""
Remove the configuration for the download belonging to the given infohash.
"""
if infohash not in self.downloads:
try:
basename = hexlify(infohash).decode() + ".conf"
filename = self.get_checkpoint_dir() / basename
self._logger.debug("Removing download checkpoint %s", filename)
if os.access(filename, os.F_OK):
os.remove(filename)
except:
# Show must go on
self._logger.exception("Could not remove state")
else:
self._logger.warning("Download is back, restarted? Cancelling removal! %s", hexlify(infohash))
def get_checkpoint_dir(self) -> Path:
"""
Returns the directory in which to checkpoint the Downloads in this Session.
"""
return self.state_dir / "dlcheckpoints"
def get_downloads_by_name(self, torrent_name: str) -> list[Download]:
"""
Get all downloads for which the UTF-8 name equals the given string.
"""
downloads = self.get_downloads()
return [d for d in downloads if d.get_def().get_name_utf8() == torrent_name]
@staticmethod
def set_libtorrent_proxy_settings(config: TriblerConfigManager, proxy_type: int,
server: tuple[str, int] | None = None,
auth: tuple[str, str] | None = None) -> None:
"""
Set which proxy LibTorrent should use (default = 0).
:param config: libtorrent config
:param proxy_type: int (0 = no proxy server,
1 = SOCKS4,
2 = SOCKS5,
3 = SOCKS5 + auth,
4 = HTTP,
5 = HTTP + auth)
:param server: (host, port) tuple or None
:param auth: (username, password) tuple or None
"""
config.set("libtorrent/proxy_type", proxy_type)
config.set("libtorrent/proxy_server", server if proxy_type else ":")
config.set("libtorrent/proxy_auth", auth if proxy_type in [3, 5] else ":")
def get_libtorrent_proxy_settings(self) -> tuple[int, tuple[str, str] | None, tuple[str, str] | None]:
"""
Get the settings for the libtorrent proxy.
"""
setting_proxy_server = str(self.config.get("libtorrent/proxy_server")).split(":")
proxy_server = ((setting_proxy_server[0], setting_proxy_server[1])
if setting_proxy_server and len(setting_proxy_server) == 2 else None)
setting_proxy_auth = str(self.config.get("libtorrent/proxy_auth")).split(":")
proxy_auth = ((setting_proxy_auth[0], setting_proxy_auth[1])
if setting_proxy_auth and len(setting_proxy_auth) == 2 else None)
return self.config.get("libtorrent/proxy_type"), proxy_server, proxy_auth
@staticmethod
def get_libtorrent_max_upload_rate(config: TriblerConfigManager) -> float:
"""
Gets the maximum upload rate (kB / s).
:return: the maximum upload rate in kB / s
"""
return min(config.get("libtorrent/max_upload_rate"), 2147483647)
@staticmethod
def get_libtorrent_max_download_rate(config: TriblerConfigManager) -> float:
"""
Gets the maximum download rate (kB / s).
:return: the maximum download rate in kB / s
"""
return min(config.get("libtorrent/max_download_rate"), 2147483647)
| 52,290 | Python | .py | 970 | 42.314433 | 120 | 0.619183 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,035 | download_config.py | Tribler_tribler/src/tribler/core/libtorrent/download_manager/download_config.py | from __future__ import annotations
import base64
from io import StringIO
from pathlib import Path
from typing import TYPE_CHECKING, Any, Dict, Literal, TypedDict, cast, overload
import libtorrent as lt
from configobj import ConfigObj
from validate import Validator
if TYPE_CHECKING:
from tribler.tribler_config import TriblerConfigManager
class DownloadConfigDefaultsSection(TypedDict):
"""
The default config settings for a download.
"""
hops: int
selected_files: list[str]
selected_file_indexes: list[int]
safe_seeding: bool
user_stopped: bool
share_mode: bool
upload_mode: bool
time_added: int
bootstrap_download: bool
channel_download: bool
add_download_to_channel: bool
saveas: str | None
class StateConfigSection(TypedDict):
"""
The runtime state info of a download.
"""
metainfo: str
engineresumedata: str
class DownloadConfigDict(dict):
"""
All config settings in the config file.
"""
@overload # type: ignore[override]
def __getitem__(self, key: Literal["filename"]) -> str: ...
@overload
def __getitem__(self, key: Literal["download_defaults"]) -> DownloadConfigDefaultsSection: ...
@overload
def __getitem__(self, key: Literal["state"]) -> StateConfigSection: ...
def __getitem__(self, key: str) -> Any: ... # noqa: D105
def write(self) -> None: ... # noqa: D102
else:
DownloadConfigDict = ConfigObj
SPEC_FILENAME = 'download_config.spec'
SPEC_CONTENT = """[download_defaults]
hops = integer(default=0)
selected_files = string_list(default=list())
selected_file_indexes = int_list(default=list())
safe_seeding = boolean(default=False)
user_stopped = boolean(default=False)
share_mode = boolean(default=False)
upload_mode = boolean(default=False)
time_added = integer(default=0)
bootstrap_download = boolean(default=False)
channel_download = boolean(default=False)
add_download_to_channel = boolean(default=False)
saveas = string(default=None)
[state]
metainfo = string(default='ZGU=')
engineresumedata = string(default='ZGU=')
"""
def _from_dict(value: Dict) -> str:
binary = lt.bencode(value)
base64_bytes = base64.b64encode(binary)
return base64_bytes.decode()
def _to_dict(value: str) -> dict[bytes, Any]:
"""
Convert a string value to a libtorrent dict.
:raises RuntimeError: if the value could not be converted.
"""
binary = value.encode()
# b'==' is added to avoid incorrect padding
base64_bytes = base64.b64decode(binary + b"==")
return cast(dict[bytes, Any], lt.bdecode(base64_bytes))
class DownloadConfig:
"""
A configuration belonging to a specific download.
"""
def __init__(self, config: ConfigObj) -> None:
"""
Create a download config from the given ConfigObj.
"""
self.config: DownloadConfigDict = cast(DownloadConfigDict, config)
@staticmethod
def get_spec_file_name(settings: TriblerConfigManager) -> str:
"""
Get the file name of the download spec.
"""
return str(Path(settings.get_version_state_dir()) / SPEC_FILENAME)
@staticmethod
def from_defaults(settings: TriblerConfigManager) -> DownloadConfig:
"""
Create a new download config from the given Tribler configuration.
"""
spec_file_name = DownloadConfig.get_spec_file_name(settings)
defaults = ConfigObj(StringIO(SPEC_CONTENT))
defaults["filename"] = spec_file_name
Path(spec_file_name).parent.mkdir(parents=True, exist_ok=True) # Required for the next write
with open(spec_file_name, "wb") as spec_file:
defaults.write(spec_file)
defaults = ConfigObj(StringIO(), configspec=spec_file_name)
defaults.validate(Validator())
config = DownloadConfig(defaults)
if settings.get("libtorrent/download_defaults/anonymity_enabled"):
config.set_hops(int(settings.get("libtorrent/download_defaults/number_hops")))
else:
config.set_hops(0)
config.set_safe_seeding(settings.get("libtorrent/download_defaults/safeseeding_enabled"))
config.set_dest_dir(settings.get("libtorrent/download_defaults/saveas"))
return config
def copy(self) -> DownloadConfig:
"""
Create a copy of this config.
"""
return DownloadConfig(ConfigObj(self.config))
def write(self, filename: Path) -> None:
"""
Write the contents of this config to a file.
"""
config_obj = cast(ConfigObj, self.config)
config_obj.filename = str(filename)
config_obj.write()
def set_dest_dir(self, path: Path | str) -> None:
"""
Sets the directory where to save this Download.
:param path: A path of a directory.
"""
self.config["download_defaults"]["saveas"] = str(path)
def get_dest_dir(self) -> Path:
"""
Gets the directory where to save this Download.
"""
dest_dir = self.config["download_defaults"]["saveas"] or ""
return Path(dest_dir)
def set_hops(self, hops: int) -> None:
"""
Set the number of hops for the download.
"""
self.config["download_defaults"]["hops"] = hops
def get_hops(self) -> int:
"""
Get the set number of hops for the download.
"""
return self.config["download_defaults"]["hops"]
def set_safe_seeding(self, value: bool) -> None:
"""
Set the safe seeding mode of the download.
"""
self.config["download_defaults"]["safe_seeding"] = value
def get_safe_seeding(self) -> bool:
"""
Get the safe seeding mode of the download.
"""
return self.config["download_defaults"]["safe_seeding"]
def set_user_stopped(self, value: bool) -> None:
"""
Set whether the download has been stopped by the user.
"""
self.config["download_defaults"]["user_stopped"] = value
def get_user_stopped(self) -> bool:
"""
Get whether the download has been stopped by the user.
"""
return self.config["download_defaults"]["user_stopped"]
def set_share_mode(self, value: bool) -> None:
"""
Set whether the download is in sharing mode.
"""
self.config["download_defaults"]["share_mode"] = value
def get_share_mode(self) -> bool:
"""
Get whether the download is in sharing mode.
"""
return self.config["download_defaults"]["share_mode"]
def set_upload_mode(self, value: bool) -> None:
"""
Set whether the download is in upload-only mode.
"""
self.config["download_defaults"]["upload_mode"] = value
def get_upload_mode(self) -> bool:
"""
Get whether the download is in upload-only mode.
"""
return self.config["download_defaults"]["upload_mode"]
def set_time_added(self, value: int) -> None:
"""
Set the UNIX timestamp for when this download was added.
"""
self.config["download_defaults"]["time_added"] = value
def get_time_added(self) -> int:
"""
Get the UNIX timestamp for when this download was added.
"""
return self.config["download_defaults"]["time_added"]
def set_selected_files(self, file_indexes: list[int]) -> None:
"""
Select which files in the torrent to download.
:param file_indexes: List of file indexes as ordered in the torrent (e.g. [0,1])
"""
self.config["download_defaults"]["selected_file_indexes"] = file_indexes
def get_selected_files(self) -> list[int]:
"""
Returns the list of files selected for download.
:return: A list of file indexes.
"""
return self.config["download_defaults"]["selected_file_indexes"]
def set_bootstrap_download(self, value: bool) -> None:
"""
Mark this download as a bootstrap download.
"""
self.config["download_defaults"]["bootstrap_download"] = value
def get_bootstrap_download(self) -> bool:
"""
Get whether this download is a bootstrap download.
"""
return self.config["download_defaults"]["bootstrap_download"]
def set_metainfo(self, metainfo: dict) -> None:
"""
Set the metainfo dict for this download.
"""
self.config["state"]["metainfo"] = _from_dict(metainfo)
def get_metainfo(self) -> dict | None:
"""
Get the metainfo dict for this download or None if it cannot be decoded.
"""
return _to_dict(self.config["state"]["metainfo"])
def set_engineresumedata(self, engineresumedata: dict) -> None:
"""
Set the engine resume data dict for this download.
"""
self.config["state"]["engineresumedata"] = _from_dict(engineresumedata)
def get_engineresumedata(self) -> dict | None:
"""
Get the engine resume data dict for this download or None if it cannot be decoded.
"""
return _to_dict(self.config["state"]["engineresumedata"])
| 9,338 | Python | .py | 239 | 31.619247 | 102 | 0.634443 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,036 | dht_health_manager.py | Tribler_tribler/src/tribler/core/libtorrent/download_manager/dht_health_manager.py | from __future__ import annotations
import math
from asyncio import Future
from binascii import hexlify
from typing import Awaitable
import libtorrent as lt
from ipv8.taskmanager import TaskManager
from tribler.core.torrent_checker.dataclasses import HealthInfo
class DHTHealthManager(TaskManager):
"""
This class manages BEP33 health requests to the libtorrent DHT.
"""
def __init__(self, lt_session: lt.session) -> None:
"""
Initialize the DHT health manager.
:param lt_session: The session used to perform health lookups.
"""
TaskManager.__init__(self)
self.lookup_futures: dict[bytes, Future[HealthInfo]] = {} # Map from binary infohash to future
self.bf_seeders: dict[bytes, bytearray] = {} # Map from infohash to (final) seeders bloomfilter
self.bf_peers: dict[bytes, bytearray] = {} # Map from infohash to (final) peers bloomfilter
self.outstanding: dict[str, bytes] = {} # Map from transaction_id to infohash
self.lt_session = lt_session
def get_health(self, infohash: bytes, timeout: float = 15) -> Awaitable[HealthInfo]:
"""
Lookup the health of a given infohash.
:param infohash: The 20-byte infohash to lookup.
:param timeout: The timeout of the lookup.
"""
if infohash in self.lookup_futures:
return self.lookup_futures[infohash]
lookup_future: Future[HealthInfo] = Future()
self.lookup_futures[infohash] = lookup_future
self.bf_seeders[infohash] = bytearray(256)
self.bf_peers[infohash] = bytearray(256)
# Perform a get_peers request. This should result in get_peers responses with the BEP33 bloom filters.
self.lt_session.dht_get_peers(lt.sha1_hash(bytes(infohash)))
self.register_task(f"lookup_{hexlify(infohash).decode}", self.finalize_lookup, infohash, delay=timeout)
return lookup_future
def finalize_lookup(self, infohash: bytes) -> None:
"""
Finalize the lookup of the provided infohash and invoke the appropriate deferred.
:param infohash: The infohash of the lookup we finialize.
"""
for transaction_id in [key for key, value in self.outstanding.items() if value == infohash]:
self.outstanding.pop(transaction_id, None)
if infohash not in self.lookup_futures:
return
# Determine the seeders/peers
bf_seeders = self.bf_seeders.pop(infohash)
bf_peers = self.bf_peers.pop(infohash)
seeders = DHTHealthManager.get_size_from_bloomfilter(bf_seeders)
peers = DHTHealthManager.get_size_from_bloomfilter(bf_peers)
if not self.lookup_futures[infohash].done():
health = HealthInfo(infohash, seeders=seeders, leechers=peers)
self.lookup_futures[infohash].set_result(health)
self.lookup_futures.pop(infohash, None)
@staticmethod
def combine_bloomfilters(bf1: bytearray, bf2: bytearray) -> bytearray:
"""
Combine two given bloom filters by ORing the bits.
:param bf1: The first bloom filter to combine.
:param bf2: The second bloom filter to combine.
:return: A bytearray with the combined bloomfilter.
"""
final_bf_len = min(len(bf1), len(bf2))
final_bf = bytearray(final_bf_len)
for bf_index in range(final_bf_len):
final_bf[bf_index] = bf1[bf_index] | bf2[bf_index]
return final_bf
@staticmethod
def get_size_from_bloomfilter(bf: bytearray) -> int:
"""
Return the estimated number of items in the bloom filter.
:param bf: The bloom filter of which we estimate the size.
:return: A rounded integer, approximating the number of items in the filter.
"""
def tobits(s: bytes) -> list[int]:
result = []
for num in s:
bits = bin(num)[2:]
bits = "00000000"[len(bits):] + bits
result.extend([int(b) for b in bits])
return result
bits_array = tobits(bytes(bf))
total_zeros = 0
for bit in bits_array:
if bit == 0:
total_zeros += 1
if total_zeros == 0:
return 6000 # The maximum capacity of the bloom filter used in BEP33
m = 256 * 8
c = min(m - 1, total_zeros)
return int(math.log(c / float(m)) / (2 * math.log(1 - 1 / float(m))))
def requesting_bloomfilters(self, transaction_id: str, infohash: bytes) -> None:
"""
Tne libtorrent DHT has sent a get_peers query for an infohash we may be interested in.
If so, keep track of the transaction and node IDs.
:param transaction_id: The ID of the query
:param infohash: The infohash for which the query was sent.
"""
if infohash in self.lookup_futures:
self.outstanding[transaction_id] = infohash
elif transaction_id in self.outstanding:
# Libtorrent is reusing the transaction_id, and is now using it for a infohash that we're not interested in.
self.outstanding.pop(transaction_id, None)
def received_bloomfilters(self, transaction_id: str, bf_seeds: bytearray = bytearray(256), # noqa: B008
bf_peers: bytearray = bytearray(256)) -> None: # noqa: B008
"""
We have received bloom filters from the libtorrent DHT. Register the bloom filters and process them.
:param transaction_id: The ID of the query for which we are receiving the bloom filter.
:param bf_seeds: The bloom filter indicating the IP addresses of the seeders.
:param bf_peers: The bloom filter indicating the IP addresses of the peers (leechers).
"""
infohash = self.outstanding.get(transaction_id)
if not infohash:
self._logger.info("Could not find lookup infohash for incoming BEP33 bloomfilters")
return
self.bf_seeders[infohash] = DHTHealthManager.combine_bloomfilters(self.bf_seeders[infohash], bf_seeds)
self.bf_peers[infohash] = DHTHealthManager.combine_bloomfilters(self.bf_peers[infohash], bf_peers)
| 6,215 | Python | .py | 120 | 42.55 | 120 | 0.655013 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,037 | download_state.py | Tribler_tribler/src/tribler/core/libtorrent/download_manager/download_state.py | """
Contains a snapshot of the state of the Download at a specific point in time.
Author(s): Arno Bakker
"""
from __future__ import annotations
import logging
from enum import Enum
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from pathlib import Path
import libtorrent
from tribler.core.libtorrent.download_manager.download import Download, PeerDict, PeerDictHave
class DownloadStatus(Enum):
"""
The libtorrent status for a download.
"""
ALLOCATING_DISKSPACE = 0
WAITING_FOR_HASHCHECK = 1
HASHCHECKING = 2
DOWNLOADING = 3
SEEDING = 4
STOPPED = 5
STOPPED_ON_ERROR = 6
METADATA = 7
CIRCUITS = 8
EXIT_NODES = 9
# Map used to convert libtorrent -> Tribler download status
DOWNLOAD_STATUS_MAP = [
DownloadStatus.WAITING_FOR_HASHCHECK,
DownloadStatus.HASHCHECKING,
DownloadStatus.METADATA,
DownloadStatus.DOWNLOADING,
DownloadStatus.SEEDING,
DownloadStatus.SEEDING,
DownloadStatus.ALLOCATING_DISKSPACE,
DownloadStatus.HASHCHECKING,
]
UPLOAD = 'up'
DOWNLOAD = 'down'
class DownloadState:
"""
Contains a snapshot of the state of the Download at a specific
point in time. Using a snapshot instead of providing live data and
protecting access via locking should be faster.
cf. libtorrent torrent_status
"""
def __init__(self, download: Download, lt_status: libtorrent.torrent_status, error: str | None) -> None:
"""
Internal constructor.
:param download: The download this state belongs too.
:param lt_status: The libtorrent status object.
"""
self._logger = logging.getLogger(self.__class__.__name__)
self.download = download
self.lt_status = lt_status
self.error = error
def __str__(self) -> str:
"""
Create a pretty printed string.
"""
return f"DownloadState(infohash={self.download.tdef.infohash}, lt_status={self.lt_status}, error={self.error})"
def get_download(self) -> Download:
"""
Returns the Download object of which this is the state.
"""
return self.download
def get_progress(self) -> float:
"""
The general progress of the Download as a percentage. When status is
* HASHCHECKING it is the percentage of already downloaded
content checked for integrity.
* DOWNLOADING/SEEDING it is the percentage downloaded.
:return: Progress as a float (0..1).
"""
return self.lt_status.progress if self.lt_status else 0
def get_status(self) -> DownloadStatus:
"""
Returns the status of the torrent.
"""
if self.lt_status:
if self.lt_status.paused:
return DownloadStatus.STOPPED
return DOWNLOAD_STATUS_MAP[self.lt_status.state]
if self.get_error():
return DownloadStatus.STOPPED_ON_ERROR
return DownloadStatus.STOPPED
def get_error(self) -> str | None:
"""
Returns the Exception that caused the download to be moved to STOPPED_ON_ERROR status.
:return: An error message
"""
return self.error or (self.lt_status.error if self.lt_status and self.lt_status.error else None)
def get_current_speed(self, direct: str) -> int:
"""
Returns the current up or download speed.
:return: The speed in bytes/s.
"""
if not self.lt_status or self.get_status() not in [DownloadStatus.DOWNLOADING, DownloadStatus.SEEDING]:
return 0
if direct == UPLOAD:
return self.lt_status.upload_rate
return self.lt_status.download_rate
def get_current_payload_speed(self, direct: str) -> int:
"""
Returns the current up or download payload speed.
:return: The speed in bytes/s.
"""
if not self.lt_status or self.get_status() not in [DownloadStatus.DOWNLOADING, DownloadStatus.SEEDING]:
return 0
if direct == UPLOAD:
return self.lt_status.upload_payload_rate
return self.lt_status.download_payload_rate
@property
def all_time_upload(self) -> int:
"""
Returns accumulated upload byte counter. It is persisted in the resume data to keep totals across sessions.
:return: The amount in bytes.
"""
if not self.lt_status:
return 0
return self.lt_status.all_time_upload
@property
def all_time_download(self) -> int:
"""
Returns accumulated download byte counter. It is persisted in the resume data to keep totals across sessions.
:return: The amount in bytes.
"""
if not self.lt_status:
return 0
return self.lt_status.all_time_download
@property
def total_upload(self) -> int:
"""
Returns the number of bytes uploaded to all peers, accumulated, this session only.
:return: The amount in bytes.
"""
if not self.lt_status:
return 0
return self.lt_status.total_upload
@property
def total_download(self) -> int:
"""
Returns the number of bytes downloaded from all peers, accumulated, this session only.
:return: The amount in bytes.
"""
if not self.lt_status:
return 0
return self.lt_status.total_download
@property
def total_payload_upload(self) -> int:
"""
Returns the amount of bytes sent this session, but only the actual payload data.
:return: The amount in bytes.
"""
if not self.lt_status:
return 0
return self.lt_status.total_payload_upload
@property
def total_payload_download(self) -> int:
"""
Returns the amount of bytes received this session, but only the actual payload data.
:return: The amount in bytes.
"""
if not self.lt_status:
return 0
return self.lt_status.total_payload_download
def get_all_time_ratio(self) -> float:
"""
Returns the accumulated seeding ratio of the download across multiple sessions.
"""
if not self.lt_status:
return 0
if not self.all_time_download:
# We're returning -1 instead of infinity, as it avoids issues when JSON encoding.
return 0 if not self.all_time_upload else -1
return self.all_time_upload / self.all_time_download
def get_seeding_time(self) -> int:
"""
The active time (not paused), while finished and while being a seed, in seconds.
"""
return self.lt_status.finished_time if self.lt_status else 0
def get_eta(self) -> float:
"""
Returns the estimated time to finish of download.
:return: The time in ?, as ?.
"""
return (1.0 - self.get_progress()) * (float(self.download.get_def().get_length()) /
max(0.000001, self.lt_status.download_rate)) \
if self.lt_status else 0.0
def get_num_seeds_peers(self) -> tuple[int, int]:
"""
Returns the sum of the number of seeds and peers.
:return: A tuple (num seeds, num peers)
"""
if not self.lt_status or self.get_status() not in [DownloadStatus.DOWNLOADING, DownloadStatus.SEEDING]:
return 0, 0
total = self.lt_status.list_peers
seeds = self.lt_status.list_seeds
return seeds, total - seeds
def get_pieces_complete(self) -> list[bool]:
"""
Returns a list of booleans indicating whether we have completely
received that piece of the content. The list of pieces for which
we provide this info depends on which files were selected for download
using DownloadConfig.set_selected_files().
:return: A list of booleans.
"""
return self.lt_status.pieces if self.lt_status else []
def get_pieces_total_complete(self) -> tuple[int, int]:
"""
Returns the number of total and completed pieces.
:return: A tuple containing two integers, total and completed nr of pieces
"""
return (len(self.lt_status.pieces), sum(self.lt_status.pieces)) if self.lt_status else (0, 0)
def get_files_completion(self) -> list[tuple[Path, float]]:
"""
Returns a list of filename, progress tuples indicating the progress
for every file selected using set_selected_files. Progress is a float
between 0 and 1.
"""
completion = []
if self.lt_status and self.download.handle and self.download.handle.is_valid():
files = self.download.get_def().get_files_with_length()
try:
progress = self.download.handle.file_progress(flags=1)
except RuntimeError:
# For large torrents, the handle can be invalid at this point.
# See https://github.com/Tribler/tribler/issues/6454
progress = None
if progress and len(progress) == len(files):
for index, (path, size) in enumerate(files):
completion_frac = (float(progress[index]) / size) if size > 0 else 1
completion.append((path, completion_frac))
return completion
def get_selected_files(self) -> list[int] | None:
"""
Get the selection status of the download's files, or None if it is not available.
"""
selected_files = self.download.config.get_selected_files()
return selected_files if len(selected_files) > 0 else None
def get_availability(self) -> float:
"""
Return the overall availability of all pieces, using connected peers.
Availability is defined as the number of complete copies of a piece, thus seeders
increment the availability by 1. Leechers provide a subset of piece thus we count the
overall availability of all pieces provided by the connected peers and use the minimum
of this + the average of all additional pieces.
"""
if not self.lt_status:
return 0 # We do not have any info for this download so we cannot accurately get its availability
nr_seeders_complete = 0
merged_bitfields = [0] * len(self.lt_status.pieces)
peers = self.get_peer_list()
for peer in peers:
completed = peer.get('completed', 0)
have = peer.get('have', [])
if completed == 1 or have and all(have):
nr_seeders_complete += 1
elif have and len(have) == len(merged_bitfields):
for i in range(len(have)):
if have[i]:
merged_bitfields[i] += 1
if merged_bitfields:
# count the number of complete copies due to overlapping leecher bitfields
nr_leechers_complete = min(merged_bitfields)
# detect remainder of bitfields which are > 0
nr_more_than_min = len([x for x in merged_bitfields if x > nr_leechers_complete])
fraction_additonal = float(nr_more_than_min) / len(merged_bitfields)
return nr_seeders_complete + nr_leechers_complete + fraction_additonal
return nr_seeders_complete
def get_peer_list(self, include_have: bool = True) -> list[PeerDict | PeerDictHave]:
"""
Returns a list of dictionaries, one for each connected peer, containing the statistics for that peer.
"""
return self.download.get_peer_list(include_have)
| 11,648 | Python | .py | 275 | 33.396364 | 119 | 0.63359 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,038 | community.py | Tribler_tribler/src/tribler/core/rendezvous/community.py | from __future__ import annotations
from typing import TYPE_CHECKING
from ipv8.community import Community, CommunitySettings
from ipv8.lazy_community import lazy_wrapper
from tribler.core.rendezvous.payload import PullRecordPayload, RecordPayload
if TYPE_CHECKING:
from ipv8.types import Peer
from tribler.core.rendezvous.database import RendezvousDatabase
class RendezvousSettings(CommunitySettings):
"""
Settings for the RendezvousCommunity.
"""
database: RendezvousDatabase
crawler_mid: bytes = b"Jy\xa9\x90G\x86\xec[\xde\xda\xf8(\xe6\x81l\xa2\xe0\xba\xaf\xac"
class RendezvousCommunity(Community):
"""
A community that shares preferred infohashes.
"""
community_id = b"RendezvousCommunity\x00"
settings_class = RendezvousSettings
def __init__(self, settings: RendezvousSettings) -> None:
"""
Create a new user activity community.
"""
super().__init__(settings)
self.composition = settings
self.add_message_handler(PullRecordPayload, self.on_pull_record)
self.add_message_handler(RecordPayload, self.on_record)
@lazy_wrapper(RecordPayload)
def on_record(self, peer: Peer, payload: RecordPayload) -> None:
"""
We are not a crawler. Do nothing.
"""
@lazy_wrapper(PullRecordPayload)
def on_pull_record(self, peer: Peer, payload: PullRecordPayload) -> None:
"""
We received a pull message. We only allow specific peers to do this!
"""
peer_mid = peer.mid
if peer_mid != self.composition.crawler_mid:
self.logger.warning("Refusing to serve a pull from %s, not a crawler!", str(peer))
return
if payload.mid != self.my_peer.mid:
self.logger.warning("Refusing to serve a pull from %s, replay attack?!", str(peer))
return
entry = self.composition.database.random()
if entry is None:
return
self.ez_send(peer, RecordPayload(entry.public_key, entry.ip, entry.port, entry.ping, entry.start, entry.stop))
| 2,092 | Python | .py | 49 | 35.612245 | 118 | 0.686914 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,039 | rendezvous_hook.py | Tribler_tribler/src/tribler/core/rendezvous/rendezvous_hook.py | import logging
import time
from ipv8.peerdiscovery.network import Network, PeerObserver
from ipv8.types import Peer
from tribler.core.rendezvous.database import RendezvousDatabase
class RendezvousHook(PeerObserver):
"""
Keep track of peers that we have seen.
"""
def __init__(self, rendezvous_db: RendezvousDatabase) -> None:
"""
Write rendezvous info to the given database.
"""
self.rendezvous_db = rendezvous_db
def shutdown(self, network: Network) -> None:
"""
Write all data to disk.
"""
for peer in network.verified_peers:
self.on_peer_removed(peer)
if self.rendezvous_db:
self.rendezvous_db.shutdown()
@property
def current_time(self) -> float:
"""
Get the current time.
"""
return time.time()
def on_peer_added(self, peer: Peer) -> None:
"""
Callback for when a peer comes online. We do nothing with this info.
"""
def on_peer_removed(self, peer: Peer) -> None:
"""
Callback for when a peer is removed: write its online time to the database.
"""
if self.current_time >= peer.creation_time:
self.rendezvous_db.add(peer, peer.creation_time, self.current_time)
else:
logging.exception("%s was first seen in the future! Something is seriously wrong!", peer)
| 1,422 | Python | .py | 40 | 28.025 | 101 | 0.632192 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,040 | payload.py | Tribler_tribler/src/tribler/core/rendezvous/payload.py | from __future__ import annotations
from ipv8.messaging.lazy_payload import VariablePayloadWID, vp_compile
@vp_compile
class RecordPayload(VariablePayloadWID):
"""
A network payload containing a query and corresponding infohashes with their perceived preference.
"""
msg_id = 1
names = ["public_key", "ip", "port", "ping", "start", "stop"]
format_list = ["varlenH", "varlenH", "H", "d", "d", "d"]
public_key: bytes
ip: bytes
port: int
ping: float
start: float
stop: float
@vp_compile
class PullRecordPayload(VariablePayloadWID):
"""
Ask for a random record.
"""
msg_id = 2
names = ["mid"]
format_list = ["varlenH"]
mid: bytes
| 712 | Python | .py | 25 | 24 | 102 | 0.662722 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,041 | database.py | Tribler_tribler/src/tribler/core/rendezvous/database.py | from __future__ import annotations
import socket
from pathlib import Path
from typing import TYPE_CHECKING
from ipv8.messaging.interfaces.udp.endpoint import UDPv6Address
from pony.orm import Database, db_session, select
from tribler.core.rendezvous.orm_bindings import certificate
if TYPE_CHECKING:
from os import PathLike
from ipv8.peer import Peer
from tribler.core.rendezvous.orm_bindings.certificate import RendezvousCertificate
class RendezvousDatabase:
"""
The database to keep track of rendezvous info.
"""
def __init__(self, db_path: PathLike) -> None:
"""
Create a new database.
"""
create_db = db_path == ":memory:" or not Path(db_path).exists()
db_path_string = ":memory:" if db_path == ":memory:" else str(db_path)
self.database = Database()
self.Certificate = certificate.define_binding(self.database)
self.database.bind(provider="sqlite", filename=db_path_string, create_db=create_db, timeout=120.0)
self.database.generate_mapping(create_tables=create_db)
def add(self, peer: Peer, start_timestamp: float, stop_timestamp: float) -> None:
"""
Write a peer's session time to the database.
"""
with db_session(immediate=True):
address = peer.address
family = socket.AF_INET6 if isinstance(address, UDPv6Address) else socket.AF_INET
self.Certificate(public_key=peer.public_key.key_to_bin(),
ip=socket.inet_pton(family, address[0]),
port=address[1],
ping=peer.get_median_ping() or -1.0,
start=start_timestamp,
stop=stop_timestamp)
def get(self, peer: Peer) -> list[RendezvousCertificate]:
"""
Get the certificates for the given peer.
"""
with db_session():
return select(certificate for certificate in self.Certificate
if certificate.public_key == peer.public_key.key_to_bin()).fetch()
def random(self) -> RendezvousCertificate | None:
"""
Get a random certificate.
"""
with db_session():
results = self.Certificate.select_random(limit=1)
if not results:
return None
return results[0]
def shutdown(self) -> None:
"""
Disconnect from the database.
"""
self.database.disconnect()
| 2,517 | Python | .py | 59 | 32.694915 | 106 | 0.619067 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,042 | certificate.py | Tribler_tribler/src/tribler/core/rendezvous/orm_bindings/certificate.py | from __future__ import annotations
from typing import TYPE_CHECKING, Iterator
from pony.orm import Database, Required
from typing_extensions import Self
if TYPE_CHECKING:
import dataclasses
class IterRendezvousCertificate(type): # noqa: D101
def __iter__(cls) -> Iterator[RendezvousCertificate]: ... # noqa: D105
@dataclasses.dataclass
class RendezvousCertificate(metaclass=IterRendezvousCertificate):
"""
The database type for rendezvous certificates.
"""
public_key: bytes
ip: bytes
port: int
ping: float
start: float
stop: float
def __init__(self, public_key: bytes, ip: bytes, port: int, ping: float, # noqa: D107, PLR0913
start: float, stop: float) -> None: ...
@classmethod
def select_random(cls: type[Self], limit: int) -> list[RendezvousCertificate]: ... # noqa: D102
def define_binding(db: Database) -> type[RendezvousCertificate]:
"""
Define the certificate binding for the given database.
"""
class RendezvousCertificate(db.Entity):
public_key = Required(bytes, index=True)
ip = Required(bytes)
port = Required(int)
ping = Required(float)
start = Required(float)
stop = Required(float)
return RendezvousCertificate
| 1,353 | Python | .py | 35 | 31.285714 | 104 | 0.657209 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,043 | test_hidden_services.py | Tribler_tribler/src/tribler/test_integration/test_hidden_services.py | from __future__ import annotations
from asyncio import sleep
from typing import TYPE_CHECKING, cast
from unittest.mock import Mock
import libtorrent
from ipv8.community import CommunitySettings
from ipv8.keyvault.crypto import default_eccrypto
from ipv8.messaging.anonymization.tunnel import PEER_FLAG_EXIT_BT
from ipv8.peer import Peer
from ipv8.test.base import TestBase
from ipv8.test.messaging.anonymization.mock import MockDHTProvider
from ipv8.test.mocking.exit_socket import MockTunnelExitSocket
from ipv8.test.mocking.ipv8 import MockIPv8
from tribler.core.libtorrent.download_manager.download_config import DownloadConfig
from tribler.core.libtorrent.download_manager.download_manager import DownloadManager
from tribler.core.libtorrent.download_manager.download_state import DownloadStatus
from tribler.core.libtorrent.torrentdef import TorrentDef, TorrentDefNoMetainfo
from tribler.core.libtorrent.torrents import create_torrent_file
from tribler.core.notifier import Notifier
from tribler.core.socks5.server import Socks5Server
from tribler.core.tunnel.community import TriblerTunnelCommunity, TriblerTunnelSettings
from tribler.tribler_config import TriblerConfigManager
if TYPE_CHECKING:
from tribler.core.libtorrent.download_manager.download import Download
DOWNLOADER = 0 # Node 0 tries to download through the tunnels.
SEEDER = 1 # Node 1 tries to seed through the tunnels.
RELAYS = [2, 3, 4] # Nodes 2, 3, and 4, act as tunnel relays.
EXITS = [5, 6] # Nodes 5 and 6 act as exit nodes.
class GlobalTestSettings(CommunitySettings):
"""
Keep track of a global node identifier.
"""
node_id: int = 0
class MockTriblerConfigManager(TriblerConfigManager):
"""
A memory-based TriblerConfigManager.
"""
def write(self) -> None:
"""
Don't actually write to any file.
"""
class MockDownloadManager(DownloadManager):
"""
Mocked manager that always allows shutdown (regardless of the libtorrent state).
"""
def is_shutdown_ready(self) -> bool:
"""
Always ready.
"""
return True
class MockExitDict(dict):
"""
Wrap all exit sockets in a mock.
"""
def __getitem__(self, key: int) -> MockTunnelExitSocket:
"""
Get the exit socket belonging to a given circuit id.
"""
value = super().__getitem__(key)
return value if isinstance(value, MockTunnelExitSocket) else MockTunnelExitSocket(value)
class TestHiddenServicesDownload(TestBase[TriblerTunnelCommunity]):
"""
An integration test for anonymous downloads.
"""
MAX_TEST_TIME = 90
def setUp(self) -> None:
"""
Create the necessary tunnel communities.
"""
super().setUp()
downloader_config = MockTriblerConfigManager()
downloader_config.set("libtorrent/dht", False)
self.download_manager_downloader = MockDownloadManager(downloader_config, Notifier())
seeder_config = MockTriblerConfigManager()
seeder_config.set("libtorrent/dht", False)
seeder_config.set("libtorrent/upnp", False)
seeder_config.set("libtorrent/natpmp", False)
seeder_config.set("libtorrent/lsd", False)
self.download_manager_seeder = MockDownloadManager(seeder_config, Notifier())
self.socks_servers: list[Socks5Server] = [Socks5Server(hops % 3 + 1) for hops in range(6)]
self.initialize(TriblerTunnelCommunity, 7, GlobalTestSettings())
async def tearDown(self) -> None:
"""
Tear down all communities and the download managers.
"""
await self.download_manager_downloader.shutdown()
await self.download_manager_seeder.shutdown()
await super().tearDown()
def create_node(self, settings: CommunitySettings | None = None, create_dht: bool = False,
enable_statistics: bool = False) -> MockIPv8:
"""
Create a downloader, relay, or exit node.
The seeder does not have an overlay!
"""
global_settings = cast(GlobalTestSettings, settings)
my_node_id = global_settings.node_id
global_settings.node_id += 1
node_peer = Peer(default_eccrypto.generate_key("curve25519"))
tunnel_settings = TriblerTunnelSettings(remove_tunnel_delay=0, socks_servers=[], download_manager=None,
notifier=Notifier())
if my_node_id == DOWNLOADER:
self.download_manager_downloader.peer_mid = node_peer.mid
tunnel_settings.download_manager = self.download_manager_downloader
tunnel_settings.notifier = self.download_manager_downloader.notifier
elif my_node_id == SEEDER:
self.download_manager_seeder.peer_mid = node_peer.mid
tunnel_settings.download_manager = self.download_manager_seeder
tunnel_settings.notifier = self.download_manager_seeder.notifier
elif my_node_id in RELAYS:
tunnel_settings.download_manager = MockDownloadManager(MockTriblerConfigManager(), tunnel_settings.notifier)
tunnel_settings.download_manager.checkpoint_directory = Mock()
elif my_node_id in EXITS:
tunnel_settings.exitnode_enabled = True
tunnel_settings.peer_flags = {PEER_FLAG_EXIT_BT}
tunnel_settings.download_manager = MockDownloadManager(MockTriblerConfigManager(), tunnel_settings.notifier)
tunnel_settings.download_manager.checkpoint_directory = Mock()
out = MockIPv8(node_peer, TriblerTunnelCommunity, tunnel_settings)
out.overlay.dht_provider = MockDHTProvider(node_peer)
out.overlay.settings.remove_tunnel_delay = 0
out.overlay.exit_sockets = MockExitDict(out.overlay.crypto_endpoint.exit_sockets)
out.overlay.crypto_endpoint.exit_sockets = out.overlay.exit_sockets
out.overlay.min_dht_lookup_interval = 0
return out
async def start_socks_servers(self) -> None:
"""
Start the socks servers.
"""
ports = []
for server in self.socks_servers:
await server.start()
ports.append(server.port)
downloader_ports = ports[:3]
seeder_ports = ports[3:]
self.download_manager_downloader.config.set("libtorrent/socks_listen_ports", downloader_ports)
self.download_manager_downloader.socks_listen_ports = downloader_ports
self.overlay(0).settings.socks_servers = self.socks_servers[:3]
self.overlay(0).dispatcher.set_socks_servers(self.socks_servers[:3])
for server in self.socks_servers[:3]:
server.output_stream = self.overlay(0).dispatcher
self.download_manager_seeder.config.set("libtorrent/socks_listen_ports", seeder_ports)
self.download_manager_seeder.socks_listen_ports = seeder_ports
self.overlay(1).settings.socks_servers = self.socks_servers[3:]
self.overlay(1).dispatcher.set_socks_servers(self.socks_servers[3:])
for server in self.socks_servers[3:]:
server.output_stream = self.overlay(1).dispatcher
async def add_mock_download_config(self, manager: DownloadManager, hops: int) -> DownloadConfig:
"""
Create a mocked DownloadConfig.
"""
dest_dir = self.temporary_directory()
defaults = MockTriblerConfigManager()
defaults.set("libtorrent/download_defaults/saveas", dest_dir)
defaults.set("state_dir", dest_dir)
config = DownloadConfig.from_defaults(defaults)
config.set_hops(hops)
manager.state_dir = config.get_dest_dir()
manager.metadata_tmpdir = Mock(name=config.get_dest_dir())
manager.checkpoint_directory = config.get_dest_dir()
manager.peer_mid = b"0000"
await manager.initialize()
manager.start()
await sleep(0)
return config
async def start_seeding(self) -> bytes:
"""
Create a file and start the seeding.
"""
config = await self.add_mock_download_config(self.download_manager_seeder, 1)
with open(config.get_dest_dir() / "ubuntu-15.04-desktop-amd64.iso", "wb") as f: # noqa: ASYNC230
f.write(bytes([0] * 524288))
metainfo = create_torrent_file([config.get_dest_dir() / "ubuntu-15.04-desktop-amd64.iso"], {})["metainfo"]
tdef = TorrentDef(metainfo=libtorrent.bdecode(metainfo))
download = await self.download_manager_seeder.start_download(tdef=tdef, config=config)
await download.wait_for_status(DownloadStatus.SEEDING)
return tdef.infohash
async def start_anon_download(self, infohash: bytes) -> Download:
"""
Start an anonymous download in the main Tribler session.
"""
config = await self.add_mock_download_config(self.download_manager_downloader, 1)
return await self.download_manager_downloader.start_download(tdef=TorrentDefNoMetainfo(infohash, b"test"),
config=config)
async def test_hidden_services(self) -> None:
"""
Test an e2e anonymous download.
"""
await self.start_socks_servers()
await self.introduce_nodes()
infohash = await self.start_seeding()
await self.overlay(1).do_peer_discovery()
download = await self.start_anon_download(infohash)
await self.overlay(0).do_peer_discovery()
await download.wait_for_status(DownloadStatus.SEEDING)
| 9,536 | Python | .py | 193 | 41.150259 | 120 | 0.690048 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,044 | test_anon_download.py | Tribler_tribler/src/tribler/test_integration/test_anon_download.py | from __future__ import annotations
from asyncio import sleep
from typing import TYPE_CHECKING, cast
from unittest.mock import Mock
import libtorrent
from ipv8.community import CommunitySettings
from ipv8.keyvault.crypto import default_eccrypto
from ipv8.messaging.anonymization.tunnel import PEER_FLAG_EXIT_BT
from ipv8.peer import Peer
from ipv8.test.base import TestBase
from ipv8.test.messaging.anonymization.mock import MockDHTProvider
from ipv8.test.mocking.ipv8 import MockIPv8
from tribler.core.libtorrent.download_manager.download_config import DownloadConfig
from tribler.core.libtorrent.download_manager.download_manager import DownloadManager
from tribler.core.libtorrent.download_manager.download_state import DownloadStatus
from tribler.core.libtorrent.torrentdef import TorrentDef, TorrentDefNoMetainfo
from tribler.core.libtorrent.torrents import create_torrent_file
from tribler.core.notifier import Notifier
from tribler.core.socks5.server import Socks5Server
from tribler.core.tunnel.community import TriblerTunnelCommunity, TriblerTunnelSettings
from tribler.tribler_config import TriblerConfigManager
if TYPE_CHECKING:
from tribler.core.libtorrent.download_manager.download import Download
DOWNLOADER = 0 # Node 0 tries to download through the tunnels.
RELAY = 1 # Node 1 acts as a tunnel relay.
EXIT = 2 # Node 2 acts as an exit node.
class GlobalTestSettings(CommunitySettings):
"""
Keep track of a global node identifier.
"""
node_id: int = 0
class MockTriblerConfigManager(TriblerConfigManager):
"""
A memory-based TriblerConfigManager.
"""
def write(self) -> None:
"""
Don't actually write to any file.
"""
class MockDownloadManager(DownloadManager):
"""
Mocked manager that always allows shutdown (regardless of the libtorrent state).
"""
def is_shutdown_ready(self) -> bool:
"""
Always ready.
"""
return True
class TestAnonymousDownload(TestBase[TriblerTunnelCommunity]):
"""
An integration test for anonymous downloads.
"""
MAX_TEST_TIME = 30
def setUp(self) -> None:
"""
Create the necessary tunnel communities.
"""
super().setUp()
downloader_config = MockTriblerConfigManager()
downloader_config.set("libtorrent/dht", False)
self.download_manager_downloader = MockDownloadManager(downloader_config, Notifier())
seeder_config = MockTriblerConfigManager()
seeder_config.set("libtorrent/dht", False)
seeder_config.set("libtorrent/upnp", False)
seeder_config.set("libtorrent/natpmp", False)
seeder_config.set("libtorrent/lsd", False)
self.download_manager_seeder = MockDownloadManager(seeder_config, Notifier())
self.socks_servers: list[Socks5Server] = [Socks5Server(hops+1) for hops in range(3)]
self.initialize(TriblerTunnelCommunity, 3, GlobalTestSettings())
async def tearDown(self) -> None:
"""
Tear down all communities and the download managers.
"""
await self.download_manager_downloader.shutdown()
await self.download_manager_seeder.shutdown()
await super().tearDown()
def create_node(self, settings: CommunitySettings | None = None, create_dht: bool = False,
enable_statistics: bool = False) -> MockIPv8:
"""
Create a downloader, relay, or exit node.
The seeder does not have an overlay!
"""
global_settings = cast(GlobalTestSettings, settings)
my_node_id = global_settings.node_id
global_settings.node_id += 1
node_peer = Peer(default_eccrypto.generate_key("curve25519"))
tunnel_settings = TriblerTunnelSettings(remove_tunnel_delay=0, socks_servers=[], download_manager=None,
notifier=Notifier())
if my_node_id == DOWNLOADER:
self.download_manager_downloader.peer_mid = node_peer.mid
tunnel_settings.download_manager = self.download_manager_downloader
tunnel_settings.notifier = self.download_manager_downloader.notifier
elif my_node_id == RELAY:
tunnel_settings.download_manager = MockDownloadManager(MockTriblerConfigManager(), tunnel_settings.notifier)
tunnel_settings.download_manager.checkpoint_directory = Mock()
elif my_node_id == EXIT:
tunnel_settings.exitnode_enabled = True
tunnel_settings.peer_flags = {PEER_FLAG_EXIT_BT}
tunnel_settings.download_manager = MockDownloadManager(MockTriblerConfigManager(), tunnel_settings.notifier)
tunnel_settings.download_manager.checkpoint_directory = Mock()
out = MockIPv8(node_peer, TriblerTunnelCommunity, tunnel_settings)
out.overlay.dht_provider = MockDHTProvider(node_peer)
out.overlay.settings.remove_tunnel_delay = 0
return out
async def start_socks_servers(self) -> None:
"""
Start the socks servers.
"""
ports = []
for server in self.socks_servers:
await server.start()
ports.append(server.port)
self.download_manager_downloader.config.set("libtorrent/socks_listen_ports", ports)
self.download_manager_downloader.socks_listen_ports = ports
self.overlay(0).dispatcher.set_socks_servers(self.socks_servers)
for server in self.socks_servers:
server.output_stream = self.overlay(0).dispatcher
async def add_mock_download_config(self, manager: DownloadManager, hops: int) -> DownloadConfig:
"""
Create a mocked DownloadConfig.
"""
dest_dir = self.temporary_directory()
defaults = MockTriblerConfigManager()
defaults.set("libtorrent/download_defaults/saveas", dest_dir)
defaults.set("state_dir", dest_dir)
config = DownloadConfig.from_defaults(defaults)
config.set_hops(hops)
manager.state_dir = config.get_dest_dir()
manager.metadata_tmpdir = Mock(name=config.get_dest_dir())
manager.checkpoint_directory = config.get_dest_dir()
manager.peer_mid = b"0000"
await manager.initialize()
manager.start()
await sleep(0)
return config
async def start_seeding(self) -> bytes:
"""
Create a file and start the seeding.
"""
config = await self.add_mock_download_config(self.download_manager_seeder, 0)
config.set_safe_seeding(False)
with open(config.get_dest_dir() / "ubuntu-15.04-desktop-amd64.iso", "wb") as f: # noqa: ASYNC230
f.write(bytes([1] * 524288))
metainfo = create_torrent_file([config.get_dest_dir() / "ubuntu-15.04-desktop-amd64.iso"], {})["metainfo"]
tdef = TorrentDef(metainfo=libtorrent.bdecode(metainfo))
download = await self.download_manager_seeder.start_download(tdef=tdef, config=config)
await download.wait_for_status(DownloadStatus.SEEDING)
return tdef.infohash
async def start_anon_download(self, infohash: bytes) -> Download:
"""
Start an anonymous download in the main Tribler session.
"""
config = await self.add_mock_download_config(self.download_manager_downloader, 2)
download = await self.download_manager_downloader.start_download(tdef=TorrentDefNoMetainfo(infohash, b"test"),
config=config)
while not self.download_manager_seeder.listen_ports[0]:
await sleep(0.1)
self.overlay(DOWNLOADER).bittorrent_peers[download] = {
("127.0.0.1", self.download_manager_seeder.listen_ports[0]["127.0.0.1"])
}
return download
async def test_anon_download(self) -> None:
"""
Test a plain anonymous download with an exit node.
"""
await self.start_socks_servers()
await self.introduce_nodes()
infohash = await self.start_seeding()
download = await self.start_anon_download(infohash)
await download.wait_for_status(DownloadStatus.SEEDING)
| 8,188 | Python | .py | 170 | 39.8 | 120 | 0.687618 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,045 | mypy.yml | Tribler_tribler/.github/workflows/mypy.yml | name: Mypy
on: [pull_request, workflow_dispatch]
jobs:
mypy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
submodules: 'true'
- name: Setup Python 3.9
uses: actions/setup-python@v5
with:
python-version: 3.9
cache: 'pip'
- run: python -m pip install -r requirements.txt
- name: Install mypy
run: pip install mypy
- name: Run mypy
run: |
wget -O libtorrent.pyi https://github.com/arvidn/libtorrent/raw/master/bindings/python/libtorrent/__init__.pyi
mypy -p src.tribler.core
| 641 | Python | .py | 21 | 22.333333 | 121 | 0.587097 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,046 | conf.py | Tribler_tribler/doc/conf.py | #
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# ruff: noqa: A001
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'Tribler'
copyright = '2004-2024, Tribler' # Do not change manually! Handled by github_increment_version.py
author = 'Tribler'
# The short X.Y version
version = '8.0' # Do not change manually! Handled by github_increment_version.py
# The full version, including alpha/beta/rc tags
release = '8.0.0' # Do not change manually! Handled by github_increment_version.py
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'autoapi.extension',
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
]
autoapi_type = 'python'
autoapi_add_toctree_entry = False
autoapi_dirs = ['../src']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = "en"
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = None
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'Triblerdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Tribler.tex', 'Tribler Documentation',
'Tribler', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'tribler', 'Tribler Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Tribler', 'Tribler Documentation',
author, 'Tribler', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# -- Extension configuration -------------------------------------------------
# -- Options for intersphinx extension ---------------------------------------
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'python': ('https://docs.python.org/3', None)}
# -- Options for todo extension ----------------------------------------------
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
| 6,104 | Python | .py | 148 | 39.108108 | 98 | 0.662771 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,047 | locate-python.py | Tribler_tribler/build/win/locate-python.py | """
Print the directory where Python is installed.
Author(s): Lipu Fei
"""
import os
import sys
if __name__ == "__main__":
print(os.path.abspath(os.path.dirname(sys.executable)))
| 185 | Python | .pyt | 8 | 21.375 | 59 | 0.708571 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,048 | test_dataclasses.py | Tribler_tribler/src/tribler/test_unit/core/torrent_checker/test_dataclasses.py | import sys
from ipv8.test.base import TestBase
from tribler.core.torrent_checker.dataclasses import HealthInfo
class TestHealthInfo(TestBase):
"""
Tests for the HealthInfo class.
"""
def test_is_valid_no_seeders(self) -> None:
"""
Test if health info with negative seeders is invalid.
"""
health = HealthInfo(b"\x00" * 20, -1, 200)
self.assertFalse(health.is_valid())
def test_is_valid_no_leechers(self) -> None:
"""
Test if health info with negative leechers is invalid.
"""
health = HealthInfo(b"\x00" * 20, 200, -1)
self.assertFalse(health.is_valid())
def test_is_valid_old(self) -> None:
"""
Test if old info is invalid.
"""
health = HealthInfo(b"\x00" * 20, 200, 200, last_check=sys.maxsize)
self.assertFalse(health.is_valid())
def test_is_valid_healthy(self) -> None:
"""
Test if health info with recent healthy seeders and leechers is valid.
"""
health = HealthInfo(b"\x00" * 20, 200, 200)
self.assertTrue(health.is_valid())
def test_old_old(self) -> None:
"""
Test if health info that is old is flagged as old.
"""
health = HealthInfo(b"\x00" * 20, 200, 200, last_check=0)
self.assertTrue(health.old())
def test_old_new(self) -> None:
"""
Test if health info that is new is not flagged as old.
"""
health = HealthInfo(b"\x00" * 20, 200, 200)
self.assertFalse(health.old())
def test_older_than(self) -> None:
"""
Test if old health info is older than new health info.
"""
health = HealthInfo(b"\x00" * 20, 200, 200, last_check=0)
self.assertTrue(health.older_than(HealthInfo(b"\x00" * 20, 200, 200)))
def test_much_older_than(self) -> None:
"""
Test if very old health info is older than new health info.
"""
health = HealthInfo(b"\x00" * 20, 200, 200, last_check=0)
self.assertTrue(health.much_older_than(HealthInfo(b"\x00" * 20, 200, 200)))
def test_should_replace_unrelated(self) -> None:
"""
Test if trying to replace unrelated health info raises an error.
"""
health = HealthInfo(b"\x00" * 20, 200, 200)
with self.assertRaises(ValueError):
health.should_replace(HealthInfo(b"\x01" * 20, 200, 200))
def test_should_replace_invalid(self) -> None:
"""
Test if invalid health info should not replace anything.
"""
health = HealthInfo(b"\x00" * 20, -1, 200)
self.assertFalse(health.should_replace(HealthInfo(b"\x00" * 20, 200, 200)))
def test_should_replace_own_self_checked(self) -> None:
"""
Test if self-checked health info should replace equal self-checked health info.
"""
health = HealthInfo(b"\x00" * 20, 200, 200, self_checked=True)
self.assertFalse(health.should_replace(HealthInfo(b"\x00" * 20, 200, 200, self_checked=True)))
def test_should_replace_not_self_checked_old(self) -> None:
"""
Test if self-checked health info should replace old non-self-checked health info.
"""
health = HealthInfo(b"\x00" * 20, 200, 200, self_checked=True)
self.assertTrue(health.should_replace(HealthInfo(b"\x00" * 20, 200, 200, last_check=0)))
def test_should_replace_not_self_checked_lower(self) -> None:
"""
Test if self-checked health info should replace non-self-checked health info with less seeders/leechers.
"""
health = HealthInfo(b"\x00" * 20, 200, 200, self_checked=True)
self.assertTrue(health.should_replace(HealthInfo(b"\x00" * 20, 100, 200)))
def test_should_replace_old(self) -> None:
"""
Test if newer health info should replace older health info.
"""
health = HealthInfo(b"\x00" * 20, 200, 200)
self.assertTrue(health.should_replace(HealthInfo(b"\x00" * 20, 200, 200, last_check=0)))
def test_should_replace_self_checked(self) -> None:
"""
Test if health info should replace self-checked health info.
"""
health = HealthInfo(b"\x00" * 20, 200, 200)
self.assertFalse(health.should_replace(HealthInfo(b"\x00" * 20, 200, 200, self_checked=True)))
def test_should_replace_equivalent_lower(self) -> None:
"""
Test if health info should replace other health info with less seeders/leechers.
"""
health = HealthInfo(b"\x00" * 20, 200, 200)
self.assertTrue(health.should_replace(HealthInfo(b"\x00" * 20, 100, 200)))
| 4,690 | Python | .tac | 104 | 36.75 | 112 | 0.618242 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,049 | dataclasses.py | Tribler_tribler/src/tribler/core/torrent_checker/dataclasses.py | from __future__ import annotations
import time
from dataclasses import dataclass, field
from enum import IntEnum
MINUTE = 60
HOUR = MINUTE * 60
TOLERABLE_TIME_DRIFT = MINUTE # When receiving health from another peer, how far the timestamp can be in the future?
TORRENT_CHECK_WINDOW = MINUTE # When asking multiple trackers in parallel, we ignore this time difference in responses
HEALTH_FRESHNESS_SECONDS = 4 * HOUR # Number of seconds before a torrent health is considered stale. Default: 4 hours
class Source(IntEnum):
"""
Source of the Torrent Health information.
"""
UNKNOWN = 0
DHT = 1
TRACKER = 2
POPULARITY_COMMUNITY = 3
@dataclass(order=True)
class HealthInfo:
"""
An entry that described the health of a torrent at a particular moment in time.
"""
infohash: bytes = field(repr=False)
seeders: int = 0
leechers: int = 0
last_check: int = field(default_factory=lambda: int(time.time()))
self_checked: bool = False
source: Source = Source.UNKNOWN
tracker: str = ''
def is_valid(self) -> bool:
"""
Whether the reported seeders and leechers are > 0 and the check was performed at most a minute in the future.
"""
return self.seeders >= 0 and self.leechers >= 0 and self.last_check < int(time.time()) + TOLERABLE_TIME_DRIFT
def old(self) -> bool:
"""
Whether this check is more than 4 hours old.
"""
now = int(time.time())
return self.last_check < now - HEALTH_FRESHNESS_SECONDS
def older_than(self, other: HealthInfo) -> bool:
"""
Whether this check is older than the given check.
"""
return self.last_check < other.last_check - TORRENT_CHECK_WINDOW
def much_older_than(self, other: HealthInfo) -> bool:
"""
Whether this check is more than 4 hours older than the given check.
"""
return self.last_check + HEALTH_FRESHNESS_SECONDS < other.last_check
def should_replace(self, prev: HealthInfo) -> bool:
"""
Whether this check should replace the given check.
"""
if self.infohash != prev.infohash:
msg = "An attempt to compare health for different infohashes"
raise ValueError(msg)
if not self.is_valid():
return False # Health info with future last_check time is ignored
if self.self_checked:
return not prev.self_checked \
or prev.older_than(self) \
or (self.seeders, self.leechers) > (prev.seeders, prev.leechers)
if self.older_than(prev):
# Always ignore a new health info if it is older than the previous health info
return False
if prev.self_checked and not prev.old():
# The previous self-checked health info is fresh enough, do not replace it with a remote health info
return False
if prev.much_older_than(self):
# The previous health info (that can be self-checked ot not) is very old,
# let's replace it with a more recent remote health info
return True
# self is a remote health info that isn't older than previous health info, but isn't much fresher as well
return (self.seeders, self.leechers) > (prev.seeders, prev.leechers)
@dataclass
class TrackerResponse:
"""
A list of health responses for the given url.
"""
url: str
torrent_health_list: list[HealthInfo]
| 3,497 | Python | .tac | 82 | 35.329268 | 119 | 0.65812 | Tribler/tribler | 4,768 | 443 | 131 | GPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,050 | client.py | python-gitlab_python-gitlab/gitlab/client.py | """Wrapper for the GitLab API."""
from __future__ import annotations
import os
import re
from typing import (
Any,
BinaryIO,
cast,
Dict,
List,
Optional,
Tuple,
Type,
TYPE_CHECKING,
Union,
)
from urllib import parse
import requests
import gitlab
import gitlab.config
import gitlab.const
import gitlab.exceptions
from gitlab import _backends, utils
try:
import gql
import gql.transport.exceptions
import graphql
import httpx
from ._backends.graphql import GitlabTransport
_GQL_INSTALLED = True
except ImportError: # pragma: no cover
_GQL_INSTALLED = False
REDIRECT_MSG = (
"python-gitlab detected a {status_code} ({reason!r}) redirection. You must update "
"your GitLab URL to the correct URL to avoid issues. The redirection was from: "
"{source!r} to {target!r}"
)
# https://docs.gitlab.com/ee/api/#offset-based-pagination
_PAGINATION_URL = (
f"https://python-gitlab.readthedocs.io/en/v{gitlab.__version__}/"
f"api-usage.html#pagination"
)
class Gitlab:
"""Represents a GitLab server connection.
Args:
url: The URL of the GitLab server (defaults to https://gitlab.com).
private_token: The user private token
oauth_token: An oauth token
job_token: A CI job token
ssl_verify: Whether SSL certificates should be validated. If
the value is a string, it is the path to a CA file used for
certificate validation.
timeout: Timeout to use for requests to the GitLab server.
http_username: Username for HTTP authentication
http_password: Password for HTTP authentication
api_version: Gitlab API version to use (support for 4 only)
pagination: Can be set to 'keyset' to use keyset pagination
order_by: Set order_by globally
user_agent: A custom user agent to use for making HTTP requests.
retry_transient_errors: Whether to retry after 500, 502, 503, 504
or 52x responses. Defaults to False.
keep_base_url: keep user-provided base URL for pagination if it
differs from response headers
Keyword Args:
requests.Session session: HTTP Requests Session
RequestsBackend backend: Backend that will be used to make http requests
"""
def __init__(
self,
url: Optional[str] = None,
private_token: Optional[str] = None,
oauth_token: Optional[str] = None,
job_token: Optional[str] = None,
ssl_verify: Union[bool, str] = True,
http_username: Optional[str] = None,
http_password: Optional[str] = None,
timeout: Optional[float] = None,
api_version: str = "4",
per_page: Optional[int] = None,
pagination: Optional[str] = None,
order_by: Optional[str] = None,
user_agent: str = gitlab.const.USER_AGENT,
retry_transient_errors: bool = False,
keep_base_url: bool = False,
**kwargs: Any,
) -> None:
self._api_version = str(api_version)
self._server_version: Optional[str] = None
self._server_revision: Optional[str] = None
self._base_url = utils.get_base_url(url)
self._url = f"{self._base_url}/api/v{api_version}"
#: Timeout to use for requests to gitlab server
self.timeout = timeout
self.retry_transient_errors = retry_transient_errors
self.keep_base_url = keep_base_url
#: Headers that will be used in request to GitLab
self.headers = {"User-Agent": user_agent}
#: Whether SSL certificates should be validated
self.ssl_verify = ssl_verify
self.private_token = private_token
self.http_username = http_username
self.http_password = http_password
self.oauth_token = oauth_token
self.job_token = job_token
self._set_auth_info()
#: Create a session object for requests
_backend: Type[_backends.DefaultBackend] = kwargs.pop(
"backend", _backends.DefaultBackend
)
self._backend = _backend(**kwargs)
self.session = self._backend.client
self.per_page = per_page
self.pagination = pagination
self.order_by = order_by
# We only support v4 API at this time
if self._api_version not in ("4",):
raise ModuleNotFoundError(f"gitlab.v{self._api_version}.objects")
# NOTE: We must delay import of gitlab.v4.objects until now or
# otherwise it will cause circular import errors
from gitlab.v4 import objects
self._objects = objects
self.user: Optional[objects.CurrentUser] = None
self.broadcastmessages = objects.BroadcastMessageManager(self)
"""See :class:`~gitlab.v4.objects.BroadcastMessageManager`"""
self.bulk_imports = objects.BulkImportManager(self)
"""See :class:`~gitlab.v4.objects.BulkImportManager`"""
self.bulk_import_entities = objects.BulkImportAllEntityManager(self)
"""See :class:`~gitlab.v4.objects.BulkImportAllEntityManager`"""
self.ci_lint = objects.CiLintManager(self)
"""See :class:`~gitlab.v4.objects.CiLintManager`"""
self.deploykeys = objects.DeployKeyManager(self)
"""See :class:`~gitlab.v4.objects.DeployKeyManager`"""
self.deploytokens = objects.DeployTokenManager(self)
"""See :class:`~gitlab.v4.objects.DeployTokenManager`"""
self.geonodes = objects.GeoNodeManager(self)
"""See :class:`~gitlab.v4.objects.GeoNodeManager`"""
self.gitlabciymls = objects.GitlabciymlManager(self)
"""See :class:`~gitlab.v4.objects.GitlabciymlManager`"""
self.gitignores = objects.GitignoreManager(self)
"""See :class:`~gitlab.v4.objects.GitignoreManager`"""
self.groups = objects.GroupManager(self)
"""See :class:`~gitlab.v4.objects.GroupManager`"""
self.hooks = objects.HookManager(self)
"""See :class:`~gitlab.v4.objects.HookManager`"""
self.issues = objects.IssueManager(self)
"""See :class:`~gitlab.v4.objects.IssueManager`"""
self.issues_statistics = objects.IssuesStatisticsManager(self)
"""See :class:`~gitlab.v4.objects.IssuesStatisticsManager`"""
self.keys = objects.KeyManager(self)
"""See :class:`~gitlab.v4.objects.KeyManager`"""
self.ldapgroups = objects.LDAPGroupManager(self)
"""See :class:`~gitlab.v4.objects.LDAPGroupManager`"""
self.licenses = objects.LicenseManager(self)
"""See :class:`~gitlab.v4.objects.LicenseManager`"""
self.namespaces = objects.NamespaceManager(self)
"""See :class:`~gitlab.v4.objects.NamespaceManager`"""
self.mergerequests = objects.MergeRequestManager(self)
"""See :class:`~gitlab.v4.objects.MergeRequestManager`"""
self.notificationsettings = objects.NotificationSettingsManager(self)
"""See :class:`~gitlab.v4.objects.NotificationSettingsManager`"""
self.projects = objects.ProjectManager(self)
"""See :class:`~gitlab.v4.objects.ProjectManager`"""
self.registry_repositories = objects.RegistryRepositoryManager(self)
"""See :class:`~gitlab.v4.objects.RegistryRepositoryManager`"""
self.runners = objects.RunnerManager(self)
"""See :class:`~gitlab.v4.objects.RunnerManager`"""
self.runners_all = objects.RunnerAllManager(self)
"""See :class:`~gitlab.v4.objects.RunnerManager`"""
self.settings = objects.ApplicationSettingsManager(self)
"""See :class:`~gitlab.v4.objects.ApplicationSettingsManager`"""
self.appearance = objects.ApplicationAppearanceManager(self)
"""See :class:`~gitlab.v4.objects.ApplicationAppearanceManager`"""
self.sidekiq = objects.SidekiqManager(self)
"""See :class:`~gitlab.v4.objects.SidekiqManager`"""
self.snippets = objects.SnippetManager(self)
"""See :class:`~gitlab.v4.objects.SnippetManager`"""
self.users = objects.UserManager(self)
"""See :class:`~gitlab.v4.objects.UserManager`"""
self.todos = objects.TodoManager(self)
"""See :class:`~gitlab.v4.objects.TodoManager`"""
self.dockerfiles = objects.DockerfileManager(self)
"""See :class:`~gitlab.v4.objects.DockerfileManager`"""
self.events = objects.EventManager(self)
"""See :class:`~gitlab.v4.objects.EventManager`"""
self.audit_events = objects.AuditEventManager(self)
"""See :class:`~gitlab.v4.objects.AuditEventManager`"""
self.features = objects.FeatureManager(self)
"""See :class:`~gitlab.v4.objects.FeatureManager`"""
self.pagesdomains = objects.PagesDomainManager(self)
"""See :class:`~gitlab.v4.objects.PagesDomainManager`"""
self.user_activities = objects.UserActivitiesManager(self)
"""See :class:`~gitlab.v4.objects.UserActivitiesManager`"""
self.applications = objects.ApplicationManager(self)
"""See :class:`~gitlab.v4.objects.ApplicationManager`"""
self.variables = objects.VariableManager(self)
"""See :class:`~gitlab.v4.objects.VariableManager`"""
self.personal_access_tokens = objects.PersonalAccessTokenManager(self)
"""See :class:`~gitlab.v4.objects.PersonalAccessTokenManager`"""
self.topics = objects.TopicManager(self)
"""See :class:`~gitlab.v4.objects.TopicManager`"""
self.statistics = objects.ApplicationStatisticsManager(self)
"""See :class:`~gitlab.v4.objects.ApplicationStatisticsManager`"""
def __enter__(self) -> "Gitlab":
return self
def __exit__(self, *args: Any) -> None:
self.session.close()
def __getstate__(self) -> Dict[str, Any]:
state = self.__dict__.copy()
state.pop("_objects")
return state
def __setstate__(self, state: Dict[str, Any]) -> None:
self.__dict__.update(state)
# We only support v4 API at this time
if self._api_version not in ("4",):
raise ModuleNotFoundError(
f"gitlab.v{self._api_version}.objects"
) # pragma: no cover, dead code currently
# NOTE: We must delay import of gitlab.v4.objects until now or
# otherwise it will cause circular import errors
from gitlab.v4 import objects
self._objects = objects
@property
def url(self) -> str:
"""The user-provided server URL."""
return self._base_url
@property
def api_url(self) -> str:
"""The computed API base URL."""
return self._url
@property
def api_version(self) -> str:
"""The API version used (4 only)."""
return self._api_version
@classmethod
def from_config(
cls,
gitlab_id: Optional[str] = None,
config_files: Optional[List[str]] = None,
**kwargs: Any,
) -> "Gitlab":
"""Create a Gitlab connection from configuration files.
Args:
gitlab_id: ID of the configuration section.
config_files list[str]: List of paths to configuration files.
kwargs:
session requests.Session: Custom requests Session
Returns:
A Gitlab connection.
Raises:
gitlab.config.GitlabDataError: If the configuration is not correct.
"""
config = gitlab.config.GitlabConfigParser(
gitlab_id=gitlab_id, config_files=config_files
)
return cls(
config.url,
private_token=config.private_token,
oauth_token=config.oauth_token,
job_token=config.job_token,
ssl_verify=config.ssl_verify,
timeout=config.timeout,
http_username=config.http_username,
http_password=config.http_password,
api_version=config.api_version,
per_page=config.per_page,
pagination=config.pagination,
order_by=config.order_by,
user_agent=config.user_agent,
retry_transient_errors=config.retry_transient_errors,
**kwargs,
)
@classmethod
def merge_config(
cls,
options: Dict[str, Any],
gitlab_id: Optional[str] = None,
config_files: Optional[List[str]] = None,
) -> "Gitlab":
"""Create a Gitlab connection by merging configuration with
the following precedence:
1. Explicitly provided CLI arguments,
2. Environment variables,
3. Configuration files:
a. explicitly defined config files:
i. via the `--config-file` CLI argument,
ii. via the `PYTHON_GITLAB_CFG` environment variable,
b. user-specific config file,
c. system-level config file,
4. Environment variables always present in CI (CI_SERVER_URL, CI_JOB_TOKEN).
Args:
options: A dictionary of explicitly provided key-value options.
gitlab_id: ID of the configuration section.
config_files: List of paths to configuration files.
Returns:
(gitlab.Gitlab): A Gitlab connection.
Raises:
gitlab.config.GitlabDataError: If the configuration is not correct.
"""
config = gitlab.config.GitlabConfigParser(
gitlab_id=gitlab_id, config_files=config_files
)
url = (
options.get("server_url")
or config.url
or os.getenv("CI_SERVER_URL")
or gitlab.const.DEFAULT_URL
)
private_token, oauth_token, job_token = cls._merge_auth(options, config)
return cls(
url=url,
private_token=private_token,
oauth_token=oauth_token,
job_token=job_token,
ssl_verify=options.get("ssl_verify") or config.ssl_verify,
timeout=options.get("timeout") or config.timeout,
api_version=options.get("api_version") or config.api_version,
per_page=options.get("per_page") or config.per_page,
pagination=options.get("pagination") or config.pagination,
order_by=options.get("order_by") or config.order_by,
user_agent=options.get("user_agent") or config.user_agent,
)
@staticmethod
def _merge_auth(
options: Dict[str, Any], config: gitlab.config.GitlabConfigParser
) -> Tuple[Optional[str], Optional[str], Optional[str]]:
"""
Return a tuple where at most one of 3 token types ever has a value.
Since multiple types of tokens may be present in the environment,
options, or config files, this precedence ensures we don't
inadvertently cause errors when initializing the client.
This is especially relevant when executed in CI where user and
CI-provided values are both available.
"""
private_token = options.get("private_token") or config.private_token
oauth_token = options.get("oauth_token") or config.oauth_token
job_token = (
options.get("job_token") or config.job_token or os.getenv("CI_JOB_TOKEN")
)
if private_token:
return (private_token, None, None)
if oauth_token:
return (None, oauth_token, None)
if job_token:
return (None, None, job_token)
return (None, None, None)
def auth(self) -> None:
"""Performs an authentication using private token. Warns the user if a
potentially misconfigured URL is detected on the client or server side.
The `user` attribute will hold a `gitlab.objects.CurrentUser` object on
success.
"""
self.user = self._objects.CurrentUserManager(self).get()
if hasattr(self.user, "web_url") and hasattr(self.user, "username"):
self._check_url(self.user.web_url, path=self.user.username)
def version(self) -> Tuple[str, str]:
"""Returns the version and revision of the gitlab server.
Note that self.version and self.revision will be set on the gitlab
object.
Returns:
The server version and server revision.
('unknown', 'unknown') if the server doesn't perform as expected.
"""
if self._server_version is None:
try:
data = self.http_get("/version")
if isinstance(data, dict):
self._server_version = data["version"]
self._server_revision = data["revision"]
else:
self._server_version = "unknown"
self._server_revision = "unknown"
except Exception:
self._server_version = "unknown"
self._server_revision = "unknown"
return cast(str, self._server_version), cast(str, self._server_revision)
@gitlab.exceptions.on_http_error(gitlab.exceptions.GitlabMarkdownError)
def markdown(
self, text: str, gfm: bool = False, project: Optional[str] = None, **kwargs: Any
) -> str:
"""Render an arbitrary Markdown document.
Args:
text: The markdown text to render
gfm: Render text using GitLab Flavored Markdown. Default is False
project: Full path of a project used a context when `gfm` is True
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabMarkdownError: If the server cannot perform the request
Returns:
The HTML rendering of the markdown text.
"""
post_data = {"text": text, "gfm": gfm}
if project is not None:
post_data["project"] = project
data = self.http_post("/markdown", post_data=post_data, **kwargs)
if TYPE_CHECKING:
assert not isinstance(data, requests.Response)
assert isinstance(data["html"], str)
return data["html"]
@gitlab.exceptions.on_http_error(gitlab.exceptions.GitlabLicenseError)
def get_license(self, **kwargs: Any) -> Dict[str, Union[str, Dict[str, str]]]:
"""Retrieve information about the current license.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the server cannot perform the request
Returns:
The current license information
"""
result = self.http_get("/license", **kwargs)
if isinstance(result, dict):
return result
return {}
@gitlab.exceptions.on_http_error(gitlab.exceptions.GitlabLicenseError)
def set_license(self, license: str, **kwargs: Any) -> Dict[str, Any]:
"""Add a new license.
Args:
license: The license string
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabPostError: If the server cannot perform the request
Returns:
The new license information
"""
data = {"license": license}
result = self.http_post("/license", post_data=data, **kwargs)
if TYPE_CHECKING:
assert not isinstance(result, requests.Response)
return result
def _set_auth_info(self) -> None:
tokens = [
token
for token in [self.private_token, self.oauth_token, self.job_token]
if token
]
if len(tokens) > 1:
raise ValueError(
"Only one of private_token, oauth_token or job_token should "
"be defined"
)
if (self.http_username and not self.http_password) or (
not self.http_username and self.http_password
):
raise ValueError("Both http_username and http_password should be defined")
if tokens and self.http_username:
raise ValueError(
"Only one of token authentications or http "
"authentication should be defined"
)
self._auth: Optional[requests.auth.AuthBase] = None
if self.private_token:
self._auth = _backends.PrivateTokenAuth(self.private_token)
if self.oauth_token:
self._auth = _backends.OAuthTokenAuth(self.oauth_token)
if self.job_token:
self._auth = _backends.JobTokenAuth(self.job_token)
if self.http_username and self.http_password:
self._auth = requests.auth.HTTPBasicAuth(
self.http_username, self.http_password
)
def enable_debug(self, mask_credentials: bool = True) -> None:
import logging
from http import client
client.HTTPConnection.debuglevel = 1
logging.basicConfig()
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
httpclient_log = logging.getLogger("http.client")
httpclient_log.propagate = True
httpclient_log.setLevel(logging.DEBUG)
requests_log = logging.getLogger("requests.packages.urllib3")
requests_log.setLevel(logging.DEBUG)
requests_log.propagate = True
# shadow http.client prints to log()
# https://stackoverflow.com/a/16337639
def print_as_log(*args: Any) -> None:
httpclient_log.log(logging.DEBUG, " ".join(args))
setattr(client, "print", print_as_log)
if not mask_credentials:
return
token = self.private_token or self.oauth_token or self.job_token
handler = logging.StreamHandler()
handler.setFormatter(utils.MaskingFormatter(masked=token))
logger.handlers.clear()
logger.addHandler(handler)
def _get_session_opts(self) -> Dict[str, Any]:
return {
"headers": self.headers.copy(),
"auth": self._auth,
"timeout": self.timeout,
"verify": self.ssl_verify,
}
def _build_url(self, path: str) -> str:
"""Returns the full url from path.
If path is already a url, return it unchanged. If it's a path, append
it to the stored url.
Returns:
The full URL
"""
if path.startswith("http://") or path.startswith("https://"):
return path
return f"{self._url}{path}"
def _check_url(self, url: Optional[str], *, path: str = "api") -> Optional[str]:
"""
Checks if ``url`` starts with a different base URL from the user-provided base
URL and warns the user before returning it. If ``keep_base_url`` is set to
``True``, instead returns the URL massaged to match the user-provided base URL.
"""
if not url or url.startswith(self.url):
return url
match = re.match(rf"(^.*?)/{path}", url)
if not match:
return url
base_url = match.group(1)
if self.keep_base_url:
return url.replace(base_url, f"{self._base_url}")
utils.warn(
message=(
f"The base URL in the server response differs from the user-provided "
f"base URL ({self.url} -> {base_url}).\nThis is usually caused by a "
f"misconfigured base URL on your side or a misconfigured external_url "
f"on the server side, and can lead to broken pagination and unexpected "
f"behavior. If this is intentional, use `keep_base_url=True` when "
f"initializing the Gitlab instance to keep the user-provided base URL."
),
category=UserWarning,
)
return url
@staticmethod
def _check_redirects(result: requests.Response) -> None:
# Check the requests history to detect 301/302 redirections.
# If the initial verb is POST or PUT, the redirected request will use a
# GET request, leading to unwanted behaviour.
# If we detect a redirection with a POST or a PUT request, we
# raise an exception with a useful error message.
if not result.history:
return
for item in result.history:
if item.status_code not in (301, 302):
continue
# GET and HEAD methods can be redirected without issue
if item.request.method in ("GET", "HEAD"):
continue
target = item.headers.get("location")
raise gitlab.exceptions.RedirectError(
REDIRECT_MSG.format(
status_code=item.status_code,
reason=item.reason,
source=item.url,
target=target,
)
)
def http_request(
self,
verb: str,
path: str,
query_data: Optional[Dict[str, Any]] = None,
post_data: Optional[Union[Dict[str, Any], bytes, BinaryIO]] = None,
raw: bool = False,
streamed: bool = False,
files: Optional[Dict[str, Any]] = None,
timeout: Optional[float] = None,
obey_rate_limit: bool = True,
retry_transient_errors: Optional[bool] = None,
max_retries: int = 10,
**kwargs: Any,
) -> requests.Response:
"""Make an HTTP request to the Gitlab server.
Args:
verb: The HTTP method to call ('get', 'post', 'put', 'delete')
path: Path or full URL to query ('/projects' or
'http://whatever/v4/api/projecs')
query_data: Data to send as query parameters
post_data: Data to send in the body (will be converted to
json by default)
raw: If True, do not convert post_data to json
streamed: Whether the data should be streamed
files: The files to send to the server
timeout: The timeout, in seconds, for the request
obey_rate_limit: Whether to obey 429 Too Many Request
responses. Defaults to True.
retry_transient_errors: Whether to retry after 500, 502, 503, 504
or 52x responses. Defaults to False.
max_retries: Max retries after 429 or transient errors,
set to -1 to retry forever. Defaults to 10.
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
A requests result object.
Raises:
GitlabHttpError: When the return code is not 2xx
"""
query_data = query_data or {}
raw_url = self._build_url(path)
# parse user-provided URL params to ensure we don't add our own duplicates
parsed = parse.urlparse(raw_url)
params = parse.parse_qs(parsed.query)
utils.copy_dict(src=query_data, dest=params)
url = parse.urlunparse(parsed._replace(query=""))
# Deal with kwargs: by default a user uses kwargs to send data to the
# gitlab server, but this generates problems (python keyword conflicts
# and python-gitlab/gitlab conflicts).
# So we provide a `query_parameters` key: if it's there we use its dict
# value as arguments for the gitlab server, and ignore the other
# arguments, except pagination ones (per_page and page)
if "query_parameters" in kwargs:
utils.copy_dict(src=kwargs["query_parameters"], dest=params)
for arg in ("per_page", "page"):
if arg in kwargs:
params[arg] = kwargs[arg]
else:
utils.copy_dict(src=kwargs, dest=params)
opts = self._get_session_opts()
verify = opts.pop("verify")
opts_timeout = opts.pop("timeout")
# If timeout was passed into kwargs, allow it to override the default
if timeout is None:
timeout = opts_timeout
if retry_transient_errors is None:
retry_transient_errors = self.retry_transient_errors
# We need to deal with json vs. data when uploading files
send_data = self._backend.prepare_send_data(files, post_data, raw)
opts["headers"]["Content-type"] = send_data.content_type
retry = utils.Retry(
max_retries=max_retries,
obey_rate_limit=obey_rate_limit,
retry_transient_errors=retry_transient_errors,
)
while True:
try:
result = self._backend.http_request(
method=verb,
url=url,
json=send_data.json,
data=send_data.data,
params=params,
timeout=timeout,
verify=verify,
stream=streamed,
**opts,
)
except (requests.ConnectionError, requests.exceptions.ChunkedEncodingError):
if retry.handle_retry():
continue
raise
self._check_redirects(result.response)
if 200 <= result.status_code < 300:
return result.response
if retry.handle_retry_on_status(
result.status_code, result.headers, result.reason
):
continue
error_message = result.content
try:
error_json = result.json()
for k in ("message", "error"):
if k in error_json:
error_message = error_json[k]
except (KeyError, ValueError, TypeError):
pass
if result.status_code == 401:
raise gitlab.exceptions.GitlabAuthenticationError(
response_code=result.status_code,
error_message=error_message,
response_body=result.content,
)
raise gitlab.exceptions.GitlabHttpError(
response_code=result.status_code,
error_message=error_message,
response_body=result.content,
)
def http_get(
self,
path: str,
query_data: Optional[Dict[str, Any]] = None,
streamed: bool = False,
raw: bool = False,
**kwargs: Any,
) -> Union[Dict[str, Any], requests.Response]:
"""Make a GET request to the Gitlab server.
Args:
path: Path or full URL to query ('/projects' or
'http://whatever/v4/api/projecs')
query_data: Data to send as query parameters
streamed: Whether the data should be streamed
raw: If True do not try to parse the output as json
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
A requests result object is streamed is True or the content type is
not json.
The parsed json data otherwise.
Raises:
GitlabHttpError: When the return code is not 2xx
GitlabParsingError: If the json data could not be parsed
"""
query_data = query_data or {}
result = self.http_request(
"get", path, query_data=query_data, streamed=streamed, **kwargs
)
content_type = utils.get_content_type(result.headers.get("Content-Type"))
if content_type == "application/json" and not streamed and not raw:
try:
json_result = result.json()
if TYPE_CHECKING:
assert isinstance(json_result, dict)
return json_result
except Exception as e:
raise gitlab.exceptions.GitlabParsingError(
error_message="Failed to parse the server message"
) from e
else:
return result
def http_head(
self, path: str, query_data: Optional[Dict[str, Any]] = None, **kwargs: Any
) -> "requests.structures.CaseInsensitiveDict[Any]":
"""Make a HEAD request to the Gitlab server.
Args:
path: Path or full URL to query ('/projects' or
'http://whatever/v4/api/projecs')
query_data: Data to send as query parameters
**kwargs: Extra options to send to the server (e.g. sudo, page,
per_page)
Returns:
A requests.header object
Raises:
GitlabHttpError: When the return code is not 2xx
"""
query_data = query_data or {}
result = self.http_request("head", path, query_data=query_data, **kwargs)
return result.headers
def http_list(
self,
path: str,
query_data: Optional[Dict[str, Any]] = None,
*,
iterator: Optional[bool] = None,
message_details: Optional[utils.WarnMessageData] = None,
**kwargs: Any,
) -> Union["GitlabList", List[Dict[str, Any]]]:
"""Make a GET request to the Gitlab server for list-oriented queries.
Args:
path: Path or full URL to query ('/projects' or
'http://whatever/v4/api/projects')
query_data: Data to send as query parameters
iterator: Indicate if should return a generator (True)
**kwargs: Extra options to send to the server (e.g. sudo, page,
per_page)
Returns:
A list of the objects returned by the server. If `iterator` is
True and no pagination-related arguments (`page`, `per_page`,
`get_all`) are defined then a GitlabList object (generator) is returned
instead. This object will make API calls when needed to fetch the
next items from the server.
Raises:
GitlabHttpError: When the return code is not 2xx
GitlabParsingError: If the json data could not be parsed
"""
query_data = query_data or {}
# Provide a `get_all`` param to avoid clashes with `all` API attributes.
get_all = kwargs.pop("get_all", None)
if get_all is None:
# For now, keep `all` without deprecation.
get_all = kwargs.pop("all", None)
url = self._build_url(path)
page = kwargs.get("page")
if iterator and page is not None:
arg_used_message = f"iterator={iterator}"
utils.warn(
message=(
f"`{arg_used_message}` and `page={page}` were both specified. "
f"`{arg_used_message}` will be ignored and a `list` will be "
f"returned."
),
category=UserWarning,
)
if iterator and page is None:
# Generator requested
return GitlabList(self, url, query_data, **kwargs)
if get_all is True:
return list(GitlabList(self, url, query_data, **kwargs))
# pagination requested, we return a list
gl_list = GitlabList(self, url, query_data, get_next=False, **kwargs)
items = list(gl_list)
def should_emit_warning() -> bool:
# No warning is emitted if any of the following conditions apply:
# * `get_all=False` was set in the `list()` call.
# * `page` was set in the `list()` call.
# * GitLab did not return the `x-per-page` header.
# * Number of items received is less than per-page value.
# * Number of items received is >= total available.
if get_all is False:
return False
if page is not None:
return False
if gl_list.per_page is None:
return False
if len(items) < gl_list.per_page:
return False
if gl_list.total is not None and len(items) >= gl_list.total:
return False
return True
if not should_emit_warning():
return items
# Warn the user that they are only going to retrieve `per_page`
# maximum items. This is a common cause of issues filed.
total_items = "many" if gl_list.total is None else gl_list.total
if message_details is not None:
message = message_details.message.format_map(
{
"len_items": len(items),
"per_page": gl_list.per_page,
"total_items": total_items,
}
)
show_caller = message_details.show_caller
else:
message = (
f"Calling a `list()` method without specifying `get_all=True` or "
f"`iterator=True` will return a maximum of {gl_list.per_page} items. "
f"Your query returned {len(items)} of {total_items} items. See "
f"{_PAGINATION_URL} for more details. If this was done intentionally, "
f"then this warning can be supressed by adding the argument "
f"`get_all=False` to the `list()` call."
)
show_caller = True
utils.warn(
message=message,
category=UserWarning,
show_caller=show_caller,
)
return items
def http_post(
self,
path: str,
query_data: Optional[Dict[str, Any]] = None,
post_data: Optional[Dict[str, Any]] = None,
raw: bool = False,
files: Optional[Dict[str, Any]] = None,
**kwargs: Any,
) -> Union[Dict[str, Any], requests.Response]:
"""Make a POST request to the Gitlab server.
Args:
path: Path or full URL to query ('/projects' or
'http://whatever/v4/api/projecs')
query_data: Data to send as query parameters
post_data: Data to send in the body (will be converted to
json by default)
raw: If True, do not convert post_data to json
files: The files to send to the server
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
The parsed json returned by the server if json is return, else the
raw content
Raises:
GitlabHttpError: When the return code is not 2xx
GitlabParsingError: If the json data could not be parsed
"""
query_data = query_data or {}
post_data = post_data or {}
result = self.http_request(
"post",
path,
query_data=query_data,
post_data=post_data,
files=files,
raw=raw,
**kwargs,
)
content_type = utils.get_content_type(result.headers.get("Content-Type"))
try:
if content_type == "application/json":
json_result = result.json()
if TYPE_CHECKING:
assert isinstance(json_result, dict)
return json_result
except Exception as e:
raise gitlab.exceptions.GitlabParsingError(
error_message="Failed to parse the server message"
) from e
return result
def http_put(
self,
path: str,
query_data: Optional[Dict[str, Any]] = None,
post_data: Optional[Union[Dict[str, Any], bytes, BinaryIO]] = None,
raw: bool = False,
files: Optional[Dict[str, Any]] = None,
**kwargs: Any,
) -> Union[Dict[str, Any], requests.Response]:
"""Make a PUT request to the Gitlab server.
Args:
path: Path or full URL to query ('/projects' or
'http://whatever/v4/api/projecs')
query_data: Data to send as query parameters
post_data: Data to send in the body (will be converted to
json by default)
raw: If True, do not convert post_data to json
files: The files to send to the server
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
The parsed json returned by the server.
Raises:
GitlabHttpError: When the return code is not 2xx
GitlabParsingError: If the json data could not be parsed
"""
query_data = query_data or {}
post_data = post_data or {}
result = self.http_request(
"put",
path,
query_data=query_data,
post_data=post_data,
files=files,
raw=raw,
**kwargs,
)
if result.status_code in gitlab.const.NO_JSON_RESPONSE_CODES:
return result
try:
json_result = result.json()
if TYPE_CHECKING:
assert isinstance(json_result, dict)
return json_result
except Exception as e:
raise gitlab.exceptions.GitlabParsingError(
error_message="Failed to parse the server message"
) from e
def http_patch(
self,
path: str,
*,
query_data: Optional[Dict[str, Any]] = None,
post_data: Optional[Union[Dict[str, Any], bytes]] = None,
raw: bool = False,
**kwargs: Any,
) -> Union[Dict[str, Any], requests.Response]:
"""Make a PATCH request to the Gitlab server.
Args:
path: Path or full URL to query ('/projects' or
'http://whatever/v4/api/projecs')
query_data: Data to send as query parameters
post_data: Data to send in the body (will be converted to
json by default)
raw: If True, do not convert post_data to json
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
The parsed json returned by the server.
Raises:
GitlabHttpError: When the return code is not 2xx
GitlabParsingError: If the json data could not be parsed
"""
query_data = query_data or {}
post_data = post_data or {}
result = self.http_request(
"patch",
path,
query_data=query_data,
post_data=post_data,
raw=raw,
**kwargs,
)
if result.status_code in gitlab.const.NO_JSON_RESPONSE_CODES:
return result
try:
json_result = result.json()
if TYPE_CHECKING:
assert isinstance(json_result, dict)
return json_result
except Exception as e:
raise gitlab.exceptions.GitlabParsingError(
error_message="Failed to parse the server message"
) from e
def http_delete(self, path: str, **kwargs: Any) -> requests.Response:
"""Make a DELETE request to the Gitlab server.
Args:
path: Path or full URL to query ('/projects' or
'http://whatever/v4/api/projecs')
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
The requests object.
Raises:
GitlabHttpError: When the return code is not 2xx
"""
return self.http_request("delete", path, **kwargs)
@gitlab.exceptions.on_http_error(gitlab.exceptions.GitlabSearchError)
def search(
self, scope: str, search: str, **kwargs: Any
) -> Union["GitlabList", List[Dict[str, Any]]]:
"""Search GitLab resources matching the provided string.'
Args:
scope: Scope of the search
search: Search string
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabSearchError: If the server failed to perform the request
Returns:
A list of dicts describing the resources found.
"""
data = {"scope": scope, "search": search}
return self.http_list("/search", query_data=data, **kwargs)
class GitlabList:
"""Generator representing a list of remote objects.
The object handles the links returned by a query to the API, and will call
the API again when needed.
"""
def __init__(
self,
gl: Gitlab,
url: str,
query_data: Dict[str, Any],
get_next: bool = True,
**kwargs: Any,
) -> None:
self._gl = gl
# Preserve kwargs for subsequent queries
self._kwargs = kwargs.copy()
self._query(url, query_data, **self._kwargs)
self._get_next = get_next
# Remove query_parameters from kwargs, which are saved via the `next` URL
self._kwargs.pop("query_parameters", None)
def _query(
self, url: str, query_data: Optional[Dict[str, Any]] = None, **kwargs: Any
) -> None:
query_data = query_data or {}
result = self._gl.http_request("get", url, query_data=query_data, **kwargs)
try:
next_url = result.links["next"]["url"]
except KeyError:
next_url = None
self._next_url = self._gl._check_url(next_url)
self._current_page: Optional[str] = result.headers.get("X-Page")
self._prev_page: Optional[str] = result.headers.get("X-Prev-Page")
self._next_page: Optional[str] = result.headers.get("X-Next-Page")
self._per_page: Optional[str] = result.headers.get("X-Per-Page")
self._total_pages: Optional[str] = result.headers.get("X-Total-Pages")
self._total: Optional[str] = result.headers.get("X-Total")
try:
self._data: List[Dict[str, Any]] = result.json()
except Exception as e:
raise gitlab.exceptions.GitlabParsingError(
error_message="Failed to parse the server message"
) from e
self._current = 0
@property
def current_page(self) -> int:
"""The current page number."""
if TYPE_CHECKING:
assert self._current_page is not None
return int(self._current_page)
@property
def prev_page(self) -> Optional[int]:
"""The previous page number.
If None, the current page is the first.
"""
return int(self._prev_page) if self._prev_page else None
@property
def next_page(self) -> Optional[int]:
"""The next page number.
If None, the current page is the last.
"""
return int(self._next_page) if self._next_page else None
@property
def per_page(self) -> Optional[int]:
"""The number of items per page."""
return int(self._per_page) if self._per_page is not None else None
# NOTE(jlvillal): When a query returns more than 10,000 items, GitLab doesn't return
# the headers 'x-total-pages' and 'x-total'. In those cases we return None.
# https://docs.gitlab.com/ee/user/gitlab_com/index.html#pagination-response-headers
@property
def total_pages(self) -> Optional[int]:
"""The total number of pages."""
if self._total_pages is not None:
return int(self._total_pages)
return None
@property
def total(self) -> Optional[int]:
"""The total number of items."""
if self._total is not None:
return int(self._total)
return None
def __iter__(self) -> "GitlabList":
return self
def __len__(self) -> int:
if self._total is None:
return 0
return int(self._total)
def __next__(self) -> Dict[str, Any]:
return self.next()
def next(self) -> Dict[str, Any]:
try:
item = self._data[self._current]
self._current += 1
return item
except IndexError:
pass
if self._next_url and self._get_next is True:
self._query(self._next_url, **self._kwargs)
return self.next()
raise StopIteration
class GraphQL:
def __init__(
self,
url: Optional[str] = None,
*,
token: Optional[str] = None,
ssl_verify: Union[bool, str] = True,
client: Optional[httpx.Client] = None,
timeout: Optional[float] = None,
user_agent: str = gitlab.const.USER_AGENT,
fetch_schema_from_transport: bool = False,
max_retries: int = 10,
obey_rate_limit: bool = True,
retry_transient_errors: bool = False,
) -> None:
if not _GQL_INSTALLED:
raise ImportError(
"The GraphQL client could not be initialized because "
"the gql dependencies are not installed. "
"Install them with 'pip install python-gitlab[graphql]'"
)
self._base_url = utils.get_base_url(url)
self._timeout = timeout
self._token = token
self._url = f"{self._base_url}/api/graphql"
self._user_agent = user_agent
self._ssl_verify = ssl_verify
self._max_retries = max_retries
self._obey_rate_limit = obey_rate_limit
self._retry_transient_errors = retry_transient_errors
opts = self._get_client_opts()
self._http_client = client or httpx.Client(**opts)
self._transport = GitlabTransport(self._url, client=self._http_client)
self._client = gql.Client(
transport=self._transport,
fetch_schema_from_transport=fetch_schema_from_transport,
)
self._gql = gql.gql
def __enter__(self) -> "GraphQL":
return self
def __exit__(self, *args: Any) -> None:
self._http_client.close()
def _get_client_opts(self) -> Dict[str, Any]:
headers = {"User-Agent": self._user_agent}
if self._token:
headers["Authorization"] = f"Bearer {self._token}"
return {
"headers": headers,
"timeout": self._timeout,
"verify": self._ssl_verify,
}
def execute(
self, request: Union[str, graphql.Source], *args: Any, **kwargs: Any
) -> Any:
parsed_document = self._gql(request)
retry = utils.Retry(
max_retries=self._max_retries,
obey_rate_limit=self._obey_rate_limit,
retry_transient_errors=self._retry_transient_errors,
)
while True:
try:
result = self._client.execute(parsed_document, *args, **kwargs)
except gql.transport.exceptions.TransportServerError as e:
if retry.handle_retry_on_status(
status_code=e.code, headers=self._transport.response_headers
):
continue
if e.code == 401:
raise gitlab.exceptions.GitlabAuthenticationError(
response_code=e.code,
error_message=str(e),
)
raise gitlab.exceptions.GitlabHttpError(
response_code=e.code,
error_message=str(e),
)
return result
| 51,217 | Python | .py | 1,179 | 32.547074 | 88 | 0.594066 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,051 | config.py | python-gitlab_python-gitlab/gitlab/config.py | import configparser
import os
import shlex
import subprocess
from os.path import expanduser, expandvars
from pathlib import Path
from typing import List, Optional, Union
from gitlab.const import USER_AGENT
_DEFAULT_FILES: List[str] = [
"/etc/python-gitlab.cfg",
str(Path.home() / ".python-gitlab.cfg"),
]
HELPER_PREFIX = "helper:"
HELPER_ATTRIBUTES = ["job_token", "http_password", "private_token", "oauth_token"]
_CONFIG_PARSER_ERRORS = (configparser.NoOptionError, configparser.NoSectionError)
def _resolve_file(filepath: Union[Path, str]) -> str:
resolved = Path(filepath).resolve(strict=True)
return str(resolved)
def _get_config_files(
config_files: Optional[List[str]] = None,
) -> Union[str, List[str]]:
"""
Return resolved path(s) to config files if they exist, with precedence:
1. Files passed in config_files
2. File defined in PYTHON_GITLAB_CFG
3. User- and system-wide config files
"""
resolved_files = []
if config_files:
for config_file in config_files:
try:
resolved = _resolve_file(config_file)
except OSError as e:
raise GitlabConfigMissingError(
f"Cannot read config from file: {e}"
) from e
resolved_files.append(resolved)
return resolved_files
try:
env_config = os.environ["PYTHON_GITLAB_CFG"]
return _resolve_file(env_config)
except KeyError:
pass
except OSError as e:
raise GitlabConfigMissingError(
f"Cannot read config from PYTHON_GITLAB_CFG: {e}"
) from e
for config_file in _DEFAULT_FILES:
try:
resolved = _resolve_file(config_file)
except OSError:
continue
resolved_files.append(resolved)
return resolved_files
class ConfigError(Exception):
pass
class GitlabIDError(ConfigError):
pass
class GitlabDataError(ConfigError):
pass
class GitlabConfigMissingError(ConfigError):
pass
class GitlabConfigHelperError(ConfigError):
pass
class GitlabConfigParser:
def __init__(
self, gitlab_id: Optional[str] = None, config_files: Optional[List[str]] = None
) -> None:
self.gitlab_id = gitlab_id
self.http_username: Optional[str] = None
self.http_password: Optional[str] = None
self.job_token: Optional[str] = None
self.oauth_token: Optional[str] = None
self.private_token: Optional[str] = None
self.api_version: str = "4"
self.order_by: Optional[str] = None
self.pagination: Optional[str] = None
self.per_page: Optional[int] = None
self.retry_transient_errors: bool = False
self.ssl_verify: Union[bool, str] = True
self.timeout: int = 60
self.url: Optional[str] = None
self.user_agent: str = USER_AGENT
self.keep_base_url: bool = False
self._files = _get_config_files(config_files)
if self._files:
self._parse_config()
if self.gitlab_id and not self._files:
raise GitlabConfigMissingError(
f"A gitlab id was provided ({self.gitlab_id}) but no config file found"
)
def _parse_config(self) -> None:
_config = configparser.ConfigParser()
_config.read(self._files, encoding="utf-8")
if self.gitlab_id and not _config.has_section(self.gitlab_id):
raise GitlabDataError(
f"A gitlab id was provided ({self.gitlab_id}) "
"but no config section found"
)
if self.gitlab_id is None:
try:
self.gitlab_id = _config.get("global", "default")
except Exception as e:
raise GitlabIDError(
"Impossible to get the gitlab id (not specified in config file)"
) from e
try:
self.url = _config.get(self.gitlab_id, "url")
except Exception as e:
raise GitlabDataError(
"Impossible to get gitlab details from "
f"configuration ({self.gitlab_id})"
) from e
try:
self.ssl_verify = _config.getboolean("global", "ssl_verify")
except ValueError:
# Value Error means the option exists but isn't a boolean.
# Get as a string instead as it should then be a local path to a
# CA bundle.
self.ssl_verify = _config.get("global", "ssl_verify")
except _CONFIG_PARSER_ERRORS:
pass
try:
self.ssl_verify = _config.getboolean(self.gitlab_id, "ssl_verify")
except ValueError:
# Value Error means the option exists but isn't a boolean.
# Get as a string instead as it should then be a local path to a
# CA bundle.
self.ssl_verify = _config.get(self.gitlab_id, "ssl_verify")
except _CONFIG_PARSER_ERRORS:
pass
try:
self.timeout = _config.getint("global", "timeout")
except _CONFIG_PARSER_ERRORS:
pass
try:
self.timeout = _config.getint(self.gitlab_id, "timeout")
except _CONFIG_PARSER_ERRORS:
pass
try:
self.private_token = _config.get(self.gitlab_id, "private_token")
except _CONFIG_PARSER_ERRORS:
pass
try:
self.oauth_token = _config.get(self.gitlab_id, "oauth_token")
except _CONFIG_PARSER_ERRORS:
pass
try:
self.job_token = _config.get(self.gitlab_id, "job_token")
except _CONFIG_PARSER_ERRORS:
pass
try:
self.http_username = _config.get(self.gitlab_id, "http_username")
self.http_password = _config.get(
self.gitlab_id, "http_password"
) # pragma: no cover
except _CONFIG_PARSER_ERRORS:
pass
self._get_values_from_helper()
try:
self.api_version = _config.get("global", "api_version")
except _CONFIG_PARSER_ERRORS:
pass
try:
self.api_version = _config.get(self.gitlab_id, "api_version")
except _CONFIG_PARSER_ERRORS:
pass
if self.api_version not in ("4",):
raise GitlabDataError(f"Unsupported API version: {self.api_version}")
for section in ["global", self.gitlab_id]:
try:
self.per_page = _config.getint(section, "per_page")
except _CONFIG_PARSER_ERRORS:
pass
if self.per_page is not None and not 0 <= self.per_page <= 100:
raise GitlabDataError(f"Unsupported per_page number: {self.per_page}")
try:
self.pagination = _config.get(self.gitlab_id, "pagination")
except _CONFIG_PARSER_ERRORS:
pass
try:
self.order_by = _config.get(self.gitlab_id, "order_by")
except _CONFIG_PARSER_ERRORS:
pass
try:
self.user_agent = _config.get("global", "user_agent")
except _CONFIG_PARSER_ERRORS:
pass
try:
self.user_agent = _config.get(self.gitlab_id, "user_agent")
except _CONFIG_PARSER_ERRORS:
pass
try:
self.keep_base_url = _config.getboolean("global", "keep_base_url")
except _CONFIG_PARSER_ERRORS:
pass
try:
self.keep_base_url = _config.getboolean(self.gitlab_id, "keep_base_url")
except _CONFIG_PARSER_ERRORS:
pass
try:
self.retry_transient_errors = _config.getboolean(
"global", "retry_transient_errors"
)
except _CONFIG_PARSER_ERRORS:
pass
try:
self.retry_transient_errors = _config.getboolean(
self.gitlab_id, "retry_transient_errors"
)
except _CONFIG_PARSER_ERRORS:
pass
def _get_values_from_helper(self) -> None:
"""Update attributes that may get values from an external helper program"""
for attr in HELPER_ATTRIBUTES:
value = getattr(self, attr)
if not isinstance(value, str):
continue
if not value.lower().strip().startswith(HELPER_PREFIX):
continue
helper = value[len(HELPER_PREFIX) :].strip()
commmand = [expanduser(expandvars(token)) for token in shlex.split(helper)]
try:
value = (
subprocess.check_output(commmand, stderr=subprocess.PIPE)
.decode("utf-8")
.strip()
)
except subprocess.CalledProcessError as e:
stderr = e.stderr.decode().strip()
raise GitlabConfigHelperError(
f"Failed to read {attr} value from helper "
f"for {self.gitlab_id}:\n{stderr}"
) from e
setattr(self, attr, value)
| 9,088 | Python | .py | 235 | 28.225532 | 87 | 0.584934 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,052 | mixins.py | python-gitlab_python-gitlab/gitlab/mixins.py | import enum
from types import ModuleType
from typing import (
Any,
Callable,
Dict,
Iterator,
List,
Optional,
Tuple,
Type,
TYPE_CHECKING,
Union,
)
import requests
import gitlab
from gitlab import base, cli
from gitlab import exceptions as exc
from gitlab import utils
__all__ = [
"GetMixin",
"GetWithoutIdMixin",
"RefreshMixin",
"ListMixin",
"RetrieveMixin",
"CreateMixin",
"UpdateMixin",
"SetMixin",
"DeleteMixin",
"CRUDMixin",
"NoUpdateMixin",
"SaveMixin",
"ObjectDeleteMixin",
"UserAgentDetailMixin",
"AccessRequestMixin",
"DownloadMixin",
"SubscribableMixin",
"TodoMixin",
"TimeTrackingMixin",
"ParticipantsMixin",
"BadgeRenderMixin",
]
if TYPE_CHECKING:
# When running mypy we use these as the base classes
_RestManagerBase = base.RESTManager
_RestObjectBase = base.RESTObject
else:
_RestManagerBase = object
_RestObjectBase = object
class HeadMixin(_RestManagerBase):
@exc.on_http_error(exc.GitlabHeadError)
def head(
self, id: Optional[Union[str, int]] = None, **kwargs: Any
) -> "requests.structures.CaseInsensitiveDict[Any]":
"""Retrieve headers from an endpoint.
Args:
id: ID of the object to retrieve
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
A requests header object.
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabHeadError: If the server cannot perform the request
"""
if TYPE_CHECKING:
assert self.path is not None
path = self.path
if id is not None:
path = f"{path}/{utils.EncodedId(id)}"
return self.gitlab.http_head(path, **kwargs)
class GetMixin(HeadMixin, _RestManagerBase):
_computed_path: Optional[str]
_from_parent_attrs: Dict[str, Any]
_obj_cls: Optional[Type[base.RESTObject]]
_optional_get_attrs: Tuple[str, ...] = ()
_parent: Optional[base.RESTObject]
_parent_attrs: Dict[str, Any]
_path: Optional[str]
gitlab: gitlab.Gitlab
@exc.on_http_error(exc.GitlabGetError)
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> base.RESTObject:
"""Retrieve a single object.
Args:
id: ID of the object to retrieve
lazy: If True, don't request the server, but create a
shallow object giving access to the managers. This is
useful if you want to avoid useless calls to the API.
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
The generated RESTObject.
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the server cannot perform the request
"""
if isinstance(id, str):
id = utils.EncodedId(id)
path = f"{self.path}/{id}"
if TYPE_CHECKING:
assert self._obj_cls is not None
if lazy is True:
if TYPE_CHECKING:
assert self._obj_cls._id_attr is not None
return self._obj_cls(self, {self._obj_cls._id_attr: id}, lazy=lazy)
server_data = self.gitlab.http_get(path, **kwargs)
if TYPE_CHECKING:
assert not isinstance(server_data, requests.Response)
return self._obj_cls(self, server_data, lazy=lazy)
class GetWithoutIdMixin(HeadMixin, _RestManagerBase):
_computed_path: Optional[str]
_from_parent_attrs: Dict[str, Any]
_obj_cls: Optional[Type[base.RESTObject]]
_optional_get_attrs: Tuple[str, ...] = ()
_parent: Optional[base.RESTObject]
_parent_attrs: Dict[str, Any]
_path: Optional[str]
gitlab: gitlab.Gitlab
@exc.on_http_error(exc.GitlabGetError)
def get(self, **kwargs: Any) -> base.RESTObject:
"""Retrieve a single object.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
The generated RESTObject
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the server cannot perform the request
"""
if TYPE_CHECKING:
assert self.path is not None
server_data = self.gitlab.http_get(self.path, **kwargs)
if TYPE_CHECKING:
assert not isinstance(server_data, requests.Response)
assert self._obj_cls is not None
return self._obj_cls(self, server_data)
class RefreshMixin(_RestObjectBase):
_id_attr: Optional[str]
_attrs: Dict[str, Any]
_module: ModuleType
_parent_attrs: Dict[str, Any]
_updated_attrs: Dict[str, Any]
manager: base.RESTManager
@exc.on_http_error(exc.GitlabGetError)
def refresh(self, **kwargs: Any) -> None:
"""Refresh a single object from server.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Returns None (updates the object)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the server cannot perform the request
"""
if self._id_attr:
path = f"{self.manager.path}/{self.encoded_id}"
else:
if TYPE_CHECKING:
assert self.manager.path is not None
path = self.manager.path
server_data = self.manager.gitlab.http_get(path, **kwargs)
if TYPE_CHECKING:
assert not isinstance(server_data, requests.Response)
self._update_attrs(server_data)
class ListMixin(HeadMixin, _RestManagerBase):
_computed_path: Optional[str]
_from_parent_attrs: Dict[str, Any]
_list_filters: Tuple[str, ...] = ()
_obj_cls: Optional[Type[base.RESTObject]]
_parent: Optional[base.RESTObject]
_parent_attrs: Dict[str, Any]
_path: Optional[str]
gitlab: gitlab.Gitlab
@exc.on_http_error(exc.GitlabListError)
def list(self, **kwargs: Any) -> Union[base.RESTObjectList, List[base.RESTObject]]:
"""Retrieve a list of objects.
Args:
all: If True, return all the items, without pagination
per_page: Number of items to retrieve per request
page: ID of the page to return (starts with page 1)
iterator: If set to True and no pagination option is
defined, return a generator instead of a list
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
The list of objects, or a generator if `iterator` is True
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabListError: If the server cannot perform the request
"""
data, _ = utils._transform_types(
data=kwargs,
custom_types=self._types,
transform_data=True,
transform_files=False,
)
if self.gitlab.per_page:
data.setdefault("per_page", self.gitlab.per_page)
# global keyset pagination
if self.gitlab.pagination:
data.setdefault("pagination", self.gitlab.pagination)
if self.gitlab.order_by:
data.setdefault("order_by", self.gitlab.order_by)
# Allow to overwrite the path, handy for custom listings
path = data.pop("path", self.path)
if TYPE_CHECKING:
assert self._obj_cls is not None
obj = self.gitlab.http_list(path, **data)
if isinstance(obj, list):
return [self._obj_cls(self, item, created_from_list=True) for item in obj]
return base.RESTObjectList(self, self._obj_cls, obj)
class RetrieveMixin(ListMixin, GetMixin):
_computed_path: Optional[str]
_from_parent_attrs: Dict[str, Any]
_obj_cls: Optional[Type[base.RESTObject]]
_parent: Optional[base.RESTObject]
_parent_attrs: Dict[str, Any]
_path: Optional[str]
gitlab: gitlab.Gitlab
class CreateMixin(_RestManagerBase):
_computed_path: Optional[str]
_from_parent_attrs: Dict[str, Any]
_obj_cls: Optional[Type[base.RESTObject]]
_parent: Optional[base.RESTObject]
_parent_attrs: Dict[str, Any]
_path: Optional[str]
gitlab: gitlab.Gitlab
@exc.on_http_error(exc.GitlabCreateError)
def create(
self, data: Optional[Dict[str, Any]] = None, **kwargs: Any
) -> base.RESTObject:
"""Create a new object.
Args:
data: parameters to send to the server to create the
resource
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
A new instance of the managed object class built with
the data sent by the server
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabCreateError: If the server cannot perform the request
"""
if data is None:
data = {}
self._create_attrs.validate_attrs(data=data)
data, files = utils._transform_types(
data=data, custom_types=self._types, transform_data=False
)
# Handle specific URL for creation
path = kwargs.pop("path", self.path)
server_data = self.gitlab.http_post(path, post_data=data, files=files, **kwargs)
if TYPE_CHECKING:
assert not isinstance(server_data, requests.Response)
assert self._obj_cls is not None
return self._obj_cls(self, server_data)
@enum.unique
class UpdateMethod(enum.IntEnum):
PUT = 1
POST = 2
PATCH = 3
class UpdateMixin(_RestManagerBase):
_computed_path: Optional[str]
_from_parent_attrs: Dict[str, Any]
_obj_cls: Optional[Type[base.RESTObject]]
_parent: Optional[base.RESTObject]
_parent_attrs: Dict[str, Any]
_path: Optional[str]
_update_method: UpdateMethod = UpdateMethod.PUT
gitlab: gitlab.Gitlab
def _get_update_method(
self,
) -> Callable[..., Union[Dict[str, Any], requests.Response]]:
"""Return the HTTP method to use.
Returns:
http_put (default) or http_post
"""
if self._update_method is UpdateMethod.POST:
http_method = self.gitlab.http_post
elif self._update_method is UpdateMethod.PATCH:
# only patch uses required kwargs, so our types are a bit misaligned
http_method = self.gitlab.http_patch # type: ignore[assignment]
else:
http_method = self.gitlab.http_put
return http_method
@exc.on_http_error(exc.GitlabUpdateError)
def update(
self,
id: Optional[Union[str, int]] = None,
new_data: Optional[Dict[str, Any]] = None,
**kwargs: Any,
) -> Dict[str, Any]:
"""Update an object on the server.
Args:
id: ID of the object to update (can be None if not required)
new_data: the update data for the object
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
The new object data (*not* a RESTObject)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabUpdateError: If the server cannot perform the request
"""
new_data = new_data or {}
if id is None:
path = self.path
else:
path = f"{self.path}/{utils.EncodedId(id)}"
excludes = []
if self._obj_cls is not None and self._obj_cls._id_attr is not None:
excludes = [self._obj_cls._id_attr]
self._update_attrs.validate_attrs(data=new_data, excludes=excludes)
new_data, files = utils._transform_types(
data=new_data, custom_types=self._types, transform_data=False
)
http_method = self._get_update_method()
result = http_method(path, post_data=new_data, files=files, **kwargs)
if TYPE_CHECKING:
assert not isinstance(result, requests.Response)
return result
class SetMixin(_RestManagerBase):
_computed_path: Optional[str]
_from_parent_attrs: Dict[str, Any]
_obj_cls: Optional[Type[base.RESTObject]]
_parent: Optional[base.RESTObject]
_parent_attrs: Dict[str, Any]
_path: Optional[str]
gitlab: gitlab.Gitlab
@exc.on_http_error(exc.GitlabSetError)
def set(self, key: str, value: str, **kwargs: Any) -> base.RESTObject:
"""Create or update the object.
Args:
key: The key of the object to create/update
value: The value to set for the object
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabSetError: If an error occurred
Returns:
The created/updated attribute
"""
path = f"{self.path}/{utils.EncodedId(key)}"
data = {"value": value}
server_data = self.gitlab.http_put(path, post_data=data, **kwargs)
if TYPE_CHECKING:
assert not isinstance(server_data, requests.Response)
assert self._obj_cls is not None
return self._obj_cls(self, server_data)
class DeleteMixin(_RestManagerBase):
_computed_path: Optional[str]
_from_parent_attrs: Dict[str, Any]
_obj_cls: Optional[Type[base.RESTObject]]
_parent: Optional[base.RESTObject]
_parent_attrs: Dict[str, Any]
_path: Optional[str]
gitlab: gitlab.Gitlab
@exc.on_http_error(exc.GitlabDeleteError)
def delete(self, id: Optional[Union[str, int]] = None, **kwargs: Any) -> None:
"""Delete an object on the server.
Args:
id: ID of the object to delete
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabDeleteError: If the server cannot perform the request
"""
if id is None:
path = self.path
else:
path = f"{self.path}/{utils.EncodedId(id)}"
if TYPE_CHECKING:
assert path is not None
self.gitlab.http_delete(path, **kwargs)
class CRUDMixin(GetMixin, ListMixin, CreateMixin, UpdateMixin, DeleteMixin):
_computed_path: Optional[str]
_from_parent_attrs: Dict[str, Any]
_obj_cls: Optional[Type[base.RESTObject]]
_parent: Optional[base.RESTObject]
_parent_attrs: Dict[str, Any]
_path: Optional[str]
gitlab: gitlab.Gitlab
class NoUpdateMixin(GetMixin, ListMixin, CreateMixin, DeleteMixin):
_computed_path: Optional[str]
_from_parent_attrs: Dict[str, Any]
_obj_cls: Optional[Type[base.RESTObject]]
_parent: Optional[base.RESTObject]
_parent_attrs: Dict[str, Any]
_path: Optional[str]
gitlab: gitlab.Gitlab
class SaveMixin(_RestObjectBase):
"""Mixin for RESTObject's that can be updated."""
_id_attr: Optional[str]
_attrs: Dict[str, Any]
_module: ModuleType
_parent_attrs: Dict[str, Any]
_updated_attrs: Dict[str, Any]
manager: base.RESTManager
def _get_updated_data(self) -> Dict[str, Any]:
updated_data = {}
for attr in self.manager._update_attrs.required:
# Get everything required, no matter if it's been updated
updated_data[attr] = getattr(self, attr)
# Add the updated attributes
updated_data.update(self._updated_attrs)
return updated_data
def save(self, **kwargs: Any) -> Optional[Dict[str, Any]]:
"""Save the changes made to the object to the server.
The object is updated to match what the server returns.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
The new object data (*not* a RESTObject)
Raise:
GitlabAuthenticationError: If authentication is not correct
GitlabUpdateError: If the server cannot perform the request
"""
updated_data = self._get_updated_data()
# Nothing to update. Server fails if sent an empty dict.
if not updated_data:
return None
# call the manager
obj_id = self.encoded_id
if TYPE_CHECKING:
assert isinstance(self.manager, UpdateMixin)
server_data = self.manager.update(obj_id, updated_data, **kwargs)
self._update_attrs(server_data)
return server_data
class ObjectDeleteMixin(_RestObjectBase):
"""Mixin for RESTObject's that can be deleted."""
_id_attr: Optional[str]
_attrs: Dict[str, Any]
_module: ModuleType
_parent_attrs: Dict[str, Any]
_updated_attrs: Dict[str, Any]
manager: base.RESTManager
def delete(self, **kwargs: Any) -> None:
"""Delete the object from the server.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabDeleteError: If the server cannot perform the request
"""
if TYPE_CHECKING:
assert isinstance(self.manager, DeleteMixin)
assert self.encoded_id is not None
self.manager.delete(self.encoded_id, **kwargs)
class UserAgentDetailMixin(_RestObjectBase):
_id_attr: Optional[str]
_attrs: Dict[str, Any]
_module: ModuleType
_parent_attrs: Dict[str, Any]
_updated_attrs: Dict[str, Any]
manager: base.RESTManager
@cli.register_custom_action(cls_names=("Snippet", "ProjectSnippet", "ProjectIssue"))
@exc.on_http_error(exc.GitlabGetError)
def user_agent_detail(self, **kwargs: Any) -> Dict[str, Any]:
"""Get the user agent detail.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the server cannot perform the request
"""
path = f"{self.manager.path}/{self.encoded_id}/user_agent_detail"
result = self.manager.gitlab.http_get(path, **kwargs)
if TYPE_CHECKING:
assert not isinstance(result, requests.Response)
return result
class AccessRequestMixin(_RestObjectBase):
_id_attr: Optional[str]
_attrs: Dict[str, Any]
_module: ModuleType
_parent_attrs: Dict[str, Any]
_updated_attrs: Dict[str, Any]
manager: base.RESTManager
@cli.register_custom_action(
cls_names=("ProjectAccessRequest", "GroupAccessRequest"),
optional=("access_level",),
)
@exc.on_http_error(exc.GitlabUpdateError)
def approve(
self, access_level: int = gitlab.const.DEVELOPER_ACCESS, **kwargs: Any
) -> None:
"""Approve an access request.
Args:
access_level: The access level for the user
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabUpdateError: If the server fails to perform the request
"""
path = f"{self.manager.path}/{self.encoded_id}/approve"
data = {"access_level": access_level}
server_data = self.manager.gitlab.http_put(path, post_data=data, **kwargs)
if TYPE_CHECKING:
assert not isinstance(server_data, requests.Response)
self._update_attrs(server_data)
class DownloadMixin(_RestObjectBase):
_id_attr: Optional[str]
_attrs: Dict[str, Any]
_module: ModuleType
_parent_attrs: Dict[str, Any]
_updated_attrs: Dict[str, Any]
manager: base.RESTManager
@cli.register_custom_action(cls_names=("GroupExport", "ProjectExport"))
@exc.on_http_error(exc.GitlabGetError)
def download(
self,
streamed: bool = False,
action: Optional[Callable[[bytes], None]] = None,
chunk_size: int = 1024,
*,
iterator: bool = False,
**kwargs: Any,
) -> Optional[Union[bytes, Iterator[Any]]]:
"""Download the archive of a resource export.
Args:
streamed: If True the data will be processed by chunks of
`chunk_size` and each chunk is passed to `action` for
treatment
iterator: If True directly return the underlying response
iterator
action: Callable responsible of dealing with chunk of
data
chunk_size: Size of each chunk
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the server failed to perform the request
Returns:
The blob content if streamed is False, None otherwise
"""
path = f"{self.manager.path}/download"
result = self.manager.gitlab.http_get(
path, streamed=streamed, raw=True, **kwargs
)
if TYPE_CHECKING:
assert isinstance(result, requests.Response)
return utils.response_content(
result, streamed, action, chunk_size, iterator=iterator
)
class RotateMixin(_RestManagerBase):
_computed_path: Optional[str]
_from_parent_attrs: Dict[str, Any]
_obj_cls: Optional[Type[base.RESTObject]]
_parent: Optional[base.RESTObject]
_parent_attrs: Dict[str, Any]
_path: Optional[str]
gitlab: gitlab.Gitlab
@exc.on_http_error(exc.GitlabRotateError)
def rotate(
self, id: Union[str, int], expires_at: Optional[str] = None, **kwargs: Any
) -> Dict[str, Any]:
"""Rotate an access token.
Args:
id: ID of the token to rotate
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabRotateError: If the server cannot perform the request
"""
path = f"{self.path}/{utils.EncodedId(id)}/rotate"
data: Dict[str, Any] = {}
if expires_at is not None:
data = {"expires_at": expires_at}
server_data = self.gitlab.http_post(path, post_data=data, **kwargs)
if TYPE_CHECKING:
assert not isinstance(server_data, requests.Response)
return server_data
class ObjectRotateMixin(_RestObjectBase):
_id_attr: Optional[str]
_attrs: Dict[str, Any]
_module: ModuleType
_parent_attrs: Dict[str, Any]
_updated_attrs: Dict[str, Any]
manager: base.RESTManager
def rotate(self, **kwargs: Any) -> None:
"""Rotate the current access token object.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabRotateError: If the server cannot perform the request
"""
if TYPE_CHECKING:
assert isinstance(self.manager, RotateMixin)
assert self.encoded_id is not None
server_data = self.manager.rotate(self.encoded_id, **kwargs)
self._update_attrs(server_data)
class SubscribableMixin(_RestObjectBase):
_id_attr: Optional[str]
_attrs: Dict[str, Any]
_module: ModuleType
_parent_attrs: Dict[str, Any]
_updated_attrs: Dict[str, Any]
manager: base.RESTManager
@cli.register_custom_action(
cls_names=("ProjectIssue", "ProjectMergeRequest", "ProjectLabel", "GroupLabel")
)
@exc.on_http_error(exc.GitlabSubscribeError)
def subscribe(self, **kwargs: Any) -> None:
"""Subscribe to the object notifications.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
raises:
GitlabAuthenticationError: If authentication is not correct
GitlabSubscribeError: If the subscription cannot be done
"""
path = f"{self.manager.path}/{self.encoded_id}/subscribe"
server_data = self.manager.gitlab.http_post(path, **kwargs)
if TYPE_CHECKING:
assert not isinstance(server_data, requests.Response)
self._update_attrs(server_data)
@cli.register_custom_action(
cls_names=("ProjectIssue", "ProjectMergeRequest", "ProjectLabel", "GroupLabel")
)
@exc.on_http_error(exc.GitlabUnsubscribeError)
def unsubscribe(self, **kwargs: Any) -> None:
"""Unsubscribe from the object notifications.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
raises:
GitlabAuthenticationError: If authentication is not correct
GitlabUnsubscribeError: If the unsubscription cannot be done
"""
path = f"{self.manager.path}/{self.encoded_id}/unsubscribe"
server_data = self.manager.gitlab.http_post(path, **kwargs)
if TYPE_CHECKING:
assert not isinstance(server_data, requests.Response)
self._update_attrs(server_data)
class TodoMixin(_RestObjectBase):
_id_attr: Optional[str]
_attrs: Dict[str, Any]
_module: ModuleType
_parent_attrs: Dict[str, Any]
_updated_attrs: Dict[str, Any]
manager: base.RESTManager
@cli.register_custom_action(cls_names=("ProjectIssue", "ProjectMergeRequest"))
@exc.on_http_error(exc.GitlabTodoError)
def todo(self, **kwargs: Any) -> None:
"""Create a todo associated to the object.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabTodoError: If the todo cannot be set
"""
path = f"{self.manager.path}/{self.encoded_id}/todo"
self.manager.gitlab.http_post(path, **kwargs)
class TimeTrackingMixin(_RestObjectBase):
_id_attr: Optional[str]
_attrs: Dict[str, Any]
_module: ModuleType
_parent_attrs: Dict[str, Any]
_updated_attrs: Dict[str, Any]
manager: base.RESTManager
@cli.register_custom_action(cls_names=("ProjectIssue", "ProjectMergeRequest"))
@exc.on_http_error(exc.GitlabTimeTrackingError)
def time_stats(self, **kwargs: Any) -> Dict[str, Any]:
"""Get time stats for the object.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabTimeTrackingError: If the time tracking update cannot be done
"""
# Use the existing time_stats attribute if it exist, otherwise make an
# API call
if "time_stats" in self.attributes:
time_stats = self.attributes["time_stats"]
if TYPE_CHECKING:
assert isinstance(time_stats, dict)
return time_stats
path = f"{self.manager.path}/{self.encoded_id}/time_stats"
result = self.manager.gitlab.http_get(path, **kwargs)
if TYPE_CHECKING:
assert not isinstance(result, requests.Response)
return result
@cli.register_custom_action(
cls_names=("ProjectIssue", "ProjectMergeRequest"), required=("duration",)
)
@exc.on_http_error(exc.GitlabTimeTrackingError)
def time_estimate(self, duration: str, **kwargs: Any) -> Dict[str, Any]:
"""Set an estimated time of work for the object.
Args:
duration: Duration in human format (e.g. 3h30)
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabTimeTrackingError: If the time tracking update cannot be done
"""
path = f"{self.manager.path}/{self.encoded_id}/time_estimate"
data = {"duration": duration}
result = self.manager.gitlab.http_post(path, post_data=data, **kwargs)
if TYPE_CHECKING:
assert not isinstance(result, requests.Response)
return result
@cli.register_custom_action(cls_names=("ProjectIssue", "ProjectMergeRequest"))
@exc.on_http_error(exc.GitlabTimeTrackingError)
def reset_time_estimate(self, **kwargs: Any) -> Dict[str, Any]:
"""Resets estimated time for the object to 0 seconds.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabTimeTrackingError: If the time tracking update cannot be done
"""
path = f"{self.manager.path}/{self.encoded_id}/reset_time_estimate"
result = self.manager.gitlab.http_post(path, **kwargs)
if TYPE_CHECKING:
assert not isinstance(result, requests.Response)
return result
@cli.register_custom_action(
cls_names=("ProjectIssue", "ProjectMergeRequest"), required=("duration",)
)
@exc.on_http_error(exc.GitlabTimeTrackingError)
def add_spent_time(self, duration: str, **kwargs: Any) -> Dict[str, Any]:
"""Add time spent working on the object.
Args:
duration: Duration in human format (e.g. 3h30)
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabTimeTrackingError: If the time tracking update cannot be done
"""
path = f"{self.manager.path}/{self.encoded_id}/add_spent_time"
data = {"duration": duration}
result = self.manager.gitlab.http_post(path, post_data=data, **kwargs)
if TYPE_CHECKING:
assert not isinstance(result, requests.Response)
return result
@cli.register_custom_action(cls_names=("ProjectIssue", "ProjectMergeRequest"))
@exc.on_http_error(exc.GitlabTimeTrackingError)
def reset_spent_time(self, **kwargs: Any) -> Dict[str, Any]:
"""Resets the time spent working on the object.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabTimeTrackingError: If the time tracking update cannot be done
"""
path = f"{self.manager.path}/{self.encoded_id}/reset_spent_time"
result = self.manager.gitlab.http_post(path, **kwargs)
if TYPE_CHECKING:
assert not isinstance(result, requests.Response)
return result
class ParticipantsMixin(_RestObjectBase):
_id_attr: Optional[str]
_attrs: Dict[str, Any]
_module: ModuleType
_parent_attrs: Dict[str, Any]
_updated_attrs: Dict[str, Any]
manager: base.RESTManager
@cli.register_custom_action(cls_names=("ProjectMergeRequest", "ProjectIssue"))
@exc.on_http_error(exc.GitlabListError)
def participants(
self, **kwargs: Any
) -> Union[gitlab.client.GitlabList, List[Dict[str, Any]]]:
"""List the participants.
Args:
all: If True, return all the items, without pagination
per_page: Number of items to retrieve per request
page: ID of the page to return (starts with page 1)
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabListError: If the list could not be retrieved
Returns:
The list of participants
"""
path = f"{self.manager.path}/{self.encoded_id}/participants"
result = self.manager.gitlab.http_list(path, **kwargs)
if TYPE_CHECKING:
assert not isinstance(result, requests.Response)
return result
class BadgeRenderMixin(_RestManagerBase):
@cli.register_custom_action(
cls_names=("GroupBadgeManager", "ProjectBadgeManager"),
required=("link_url", "image_url"),
)
@exc.on_http_error(exc.GitlabRenderError)
def render(self, link_url: str, image_url: str, **kwargs: Any) -> Dict[str, Any]:
"""Preview link_url and image_url after interpolation.
Args:
link_url: URL of the badge link
image_url: URL of the badge image
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabRenderError: If the rendering failed
Returns:
The rendering properties
"""
path = f"{self.path}/render"
data = {"link_url": link_url, "image_url": image_url}
result = self.gitlab.http_get(path, data, **kwargs)
if TYPE_CHECKING:
assert not isinstance(result, requests.Response)
return result
class PromoteMixin(_RestObjectBase):
_id_attr: Optional[str]
_attrs: Dict[str, Any]
_module: ModuleType
_parent_attrs: Dict[str, Any]
_updated_attrs: Dict[str, Any]
_update_method: UpdateMethod = UpdateMethod.PUT
manager: base.RESTManager
def _get_update_method(
self,
) -> Callable[..., Union[Dict[str, Any], requests.Response]]:
"""Return the HTTP method to use.
Returns:
http_put (default) or http_post
"""
if self._update_method is UpdateMethod.POST:
http_method = self.manager.gitlab.http_post
else:
http_method = self.manager.gitlab.http_put
return http_method
@exc.on_http_error(exc.GitlabPromoteError)
def promote(self, **kwargs: Any) -> Dict[str, Any]:
"""Promote the item.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabPromoteError: If the item could not be promoted
GitlabParsingError: If the json data could not be parsed
Returns:
The updated object data (*not* a RESTObject)
"""
path = f"{self.manager.path}/{self.encoded_id}/promote"
http_method = self._get_update_method()
result = http_method(path, **kwargs)
if TYPE_CHECKING:
assert not isinstance(result, requests.Response)
return result
class UploadMixin(_RestObjectBase):
_id_attr: Optional[str]
_attrs: Dict[str, Any]
_module: ModuleType
_parent_attrs: Dict[str, Any]
_updated_attrs: Dict[str, Any]
_upload_path: str
manager: base.RESTManager
def _get_upload_path(self) -> str:
"""Formats _upload_path with object attributes.
Returns:
The upload path
"""
if TYPE_CHECKING:
assert isinstance(self._upload_path, str)
data = self.attributes
return self._upload_path.format(**data)
@cli.register_custom_action(
cls_names=("Project", "ProjectWiki"), required=("filename", "filepath")
)
@exc.on_http_error(exc.GitlabUploadError)
def upload(
self,
filename: str,
filedata: Optional[bytes] = None,
filepath: Optional[str] = None,
**kwargs: Any,
) -> Dict[str, Any]:
"""Upload the specified file.
.. note::
Either ``filedata`` or ``filepath`` *MUST* be specified.
Args:
filename: The name of the file being uploaded
filedata: The raw data of the file being uploaded
filepath: The path to a local file to upload (optional)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabUploadError: If the file upload fails
GitlabUploadError: If ``filedata`` and ``filepath`` are not
specified
GitlabUploadError: If both ``filedata`` and ``filepath`` are
specified
Returns:
A ``dict`` with info on the uploaded file
"""
if filepath is None and filedata is None:
raise exc.GitlabUploadError("No file contents or path specified")
if filedata is not None and filepath is not None:
raise exc.GitlabUploadError("File contents and file path specified")
if filepath is not None:
with open(filepath, "rb") as f:
filedata = f.read()
file_info = {"file": (filename, filedata)}
path = self._get_upload_path()
server_data = self.manager.gitlab.http_post(path, files=file_info, **kwargs)
if TYPE_CHECKING:
assert isinstance(server_data, dict)
return server_data
| 36,645 | Python | .py | 894 | 32.350112 | 88 | 0.636699 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,053 | _version.py | python-gitlab_python-gitlab/gitlab/_version.py | __author__ = "Gauvain Pocentek, python-gitlab team"
__copyright__ = "Copyright 2013-2019 Gauvain Pocentek, 2019-2023 python-gitlab team"
__email__ = "gauvainpocentek@gmail.com"
__license__ = "LGPL3"
__title__ = "python-gitlab"
__version__ = "4.13.0"
| 250 | Python | .py | 6 | 40.666667 | 84 | 0.696721 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,054 | cli.py | python-gitlab_python-gitlab/gitlab/cli.py | import argparse
import dataclasses
import functools
import os
import pathlib
import re
import sys
from types import ModuleType
from typing import (
Any,
Callable,
cast,
Dict,
NoReturn,
Optional,
Tuple,
Type,
TYPE_CHECKING,
TypeVar,
Union,
)
from requests.structures import CaseInsensitiveDict
import gitlab.config
from gitlab.base import RESTObject
# This regex is based on:
# https://github.com/jpvanhal/inflection/blob/master/inflection/__init__.py
camel_upperlower_regex = re.compile(r"([A-Z]+)([A-Z][a-z])")
camel_lowerupper_regex = re.compile(r"([a-z\d])([A-Z])")
@dataclasses.dataclass
class CustomAction:
required: Tuple[str, ...]
optional: Tuple[str, ...]
in_object: bool
requires_id: bool # if the `_id_attr` value should be a required argument
help: Optional[str] # help text for the custom action
# custom_actions = {
# cls: {
# action: CustomAction,
# },
# }
custom_actions: Dict[str, Dict[str, CustomAction]] = {}
# For an explanation of how these type-hints work see:
# https://mypy.readthedocs.io/en/stable/generics.html#declaring-decorators
#
# The goal here is that functions which get decorated will retain their types.
__F = TypeVar("__F", bound=Callable[..., Any])
def register_custom_action(
*,
cls_names: Union[str, Tuple[str, ...]],
required: Tuple[str, ...] = (),
optional: Tuple[str, ...] = (),
custom_action: Optional[str] = None,
requires_id: bool = True, # if the `_id_attr` value should be a required argument
help: Optional[str] = None, # help text for the action
) -> Callable[[__F], __F]:
def wrap(f: __F) -> __F:
@functools.wraps(f)
def wrapped_f(*args: Any, **kwargs: Any) -> Any:
return f(*args, **kwargs)
# in_obj defines whether the method belongs to the obj or the manager
in_obj = True
if isinstance(cls_names, tuple):
classes = cls_names
else:
classes = (cls_names,)
for cls_name in classes:
final_name = cls_name
if cls_name.endswith("Manager"):
final_name = cls_name.replace("Manager", "")
in_obj = False
if final_name not in custom_actions:
custom_actions[final_name] = {}
action = custom_action or f.__name__.replace("_", "-")
custom_actions[final_name][action] = CustomAction(
required=required,
optional=optional,
in_object=in_obj,
requires_id=requires_id,
help=help,
)
return cast(__F, wrapped_f)
return wrap
def die(msg: str, e: Optional[Exception] = None) -> NoReturn:
if e:
msg = f"{msg} ({e})"
sys.stderr.write(f"{msg}\n")
sys.exit(1)
def gitlab_resource_to_cls(
gitlab_resource: str, namespace: ModuleType
) -> Type[RESTObject]:
classes = CaseInsensitiveDict(namespace.__dict__)
lowercase_class = gitlab_resource.replace("-", "")
class_type = classes[lowercase_class]
if TYPE_CHECKING:
assert isinstance(class_type, type)
assert issubclass(class_type, RESTObject)
return class_type
def cls_to_gitlab_resource(cls: RESTObject) -> str:
dasherized_uppercase = camel_upperlower_regex.sub(r"\1-\2", cls.__name__)
dasherized_lowercase = camel_lowerupper_regex.sub(r"\1-\2", dasherized_uppercase)
return dasherized_lowercase.lower()
def _get_base_parser(add_help: bool = True) -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(
add_help=add_help,
description="GitLab API Command Line Interface",
allow_abbrev=False,
)
parser.add_argument("--version", help="Display the version.", action="store_true")
parser.add_argument(
"-v",
"--verbose",
"--fancy",
help="Verbose mode (legacy format only) [env var: GITLAB_VERBOSE]",
action="store_true",
default=os.getenv("GITLAB_VERBOSE"),
)
parser.add_argument(
"-d",
"--debug",
help="Debug mode (display HTTP requests) [env var: GITLAB_DEBUG]",
action="store_true",
default=os.getenv("GITLAB_DEBUG"),
)
parser.add_argument(
"-c",
"--config-file",
action="append",
help=(
"Configuration file to use. Can be used multiple times. "
"[env var: PYTHON_GITLAB_CFG]"
),
)
parser.add_argument(
"-g",
"--gitlab",
help=(
"Which configuration section should "
"be used. If not defined, the default selection "
"will be used."
),
required=False,
)
parser.add_argument(
"-o",
"--output",
help="Output format (v4 only): json|legacy|yaml",
required=False,
choices=["json", "legacy", "yaml"],
default="legacy",
)
parser.add_argument(
"-f",
"--fields",
help=(
"Fields to display in the output (comma "
"separated). Not used with legacy output"
),
required=False,
)
parser.add_argument(
"--server-url",
help=("GitLab server URL [env var: GITLAB_URL]"),
required=False,
default=os.getenv("GITLAB_URL"),
)
ssl_verify_group = parser.add_mutually_exclusive_group()
ssl_verify_group.add_argument(
"--ssl-verify",
help=(
"Path to a CA_BUNDLE file or directory with certificates of trusted CAs. "
"[env var: GITLAB_SSL_VERIFY]"
),
required=False,
default=os.getenv("GITLAB_SSL_VERIFY"),
)
ssl_verify_group.add_argument(
"--no-ssl-verify",
help="Disable SSL verification",
required=False,
dest="ssl_verify",
action="store_false",
)
parser.add_argument(
"--timeout",
help=(
"Timeout to use for requests to the GitLab server. "
"[env var: GITLAB_TIMEOUT]"
),
required=False,
type=int,
default=os.getenv("GITLAB_TIMEOUT"),
)
parser.add_argument(
"--api-version",
help=("GitLab API version [env var: GITLAB_API_VERSION]"),
required=False,
default=os.getenv("GITLAB_API_VERSION"),
)
parser.add_argument(
"--per-page",
help=(
"Number of entries to return per page in the response. "
"[env var: GITLAB_PER_PAGE]"
),
required=False,
type=int,
default=os.getenv("GITLAB_PER_PAGE"),
)
parser.add_argument(
"--pagination",
help=(
"Whether to use keyset or offset pagination [env var: GITLAB_PAGINATION]"
),
required=False,
default=os.getenv("GITLAB_PAGINATION"),
)
parser.add_argument(
"--order-by",
help=("Set order_by globally [env var: GITLAB_ORDER_BY]"),
required=False,
default=os.getenv("GITLAB_ORDER_BY"),
)
parser.add_argument(
"--user-agent",
help=(
"The user agent to send to GitLab with the HTTP request. "
"[env var: GITLAB_USER_AGENT]"
),
required=False,
default=os.getenv("GITLAB_USER_AGENT"),
)
tokens = parser.add_mutually_exclusive_group()
tokens.add_argument(
"--private-token",
help=("GitLab private access token [env var: GITLAB_PRIVATE_TOKEN]"),
required=False,
default=os.getenv("GITLAB_PRIVATE_TOKEN"),
)
tokens.add_argument(
"--oauth-token",
help=("GitLab OAuth token [env var: GITLAB_OAUTH_TOKEN]"),
required=False,
default=os.getenv("GITLAB_OAUTH_TOKEN"),
)
tokens.add_argument(
"--job-token",
help=("GitLab CI job token [env var: CI_JOB_TOKEN]"),
required=False,
)
parser.add_argument(
"--skip-login",
help=(
"Skip initial authenticated API call to the current user endpoint. "
"This may be useful when invoking the CLI in scripts. "
"[env var: GITLAB_SKIP_LOGIN]"
),
action="store_true",
default=os.getenv("GITLAB_SKIP_LOGIN"),
)
parser.add_argument(
"--no-mask-credentials",
help="Don't mask credentials in debug mode",
dest="mask_credentials",
action="store_false",
)
return parser
def _get_parser() -> argparse.ArgumentParser:
# NOTE: We must delay import of gitlab.v4.cli until now or
# otherwise it will cause circular import errors
from gitlab.v4 import cli as v4_cli
parser = _get_base_parser()
return v4_cli.extend_parser(parser)
def _parse_value(v: Any) -> Any:
if isinstance(v, str) and v.startswith("@@"):
return v[1:]
if isinstance(v, str) and v.startswith("@"):
# If the user-provided value starts with @, we try to read the file
# path provided after @ as the real value.
filepath = pathlib.Path(v[1:]).expanduser().resolve()
try:
with open(filepath, encoding="utf-8") as f:
return f.read()
except UnicodeDecodeError:
with open(filepath, "rb") as f:
return f.read()
except OSError as exc:
exc_name = type(exc).__name__
sys.stderr.write(f"{exc_name}: {exc}\n")
sys.exit(1)
return v
def docs() -> argparse.ArgumentParser: # pragma: no cover
"""
Provide a statically generated parser for sphinx only, so we don't need
to provide dummy gitlab config for readthedocs.
"""
if "sphinx" not in sys.modules:
sys.exit("Docs parser is only intended for build_sphinx")
return _get_parser()
def main() -> None:
if "--version" in sys.argv:
print(gitlab.__version__)
sys.exit(0)
parser = _get_base_parser(add_help=False)
# This first parsing step is used to find the gitlab config to use, and
# load the propermodule (v3 or v4) accordingly. At that point we don't have
# any subparser setup
(options, _) = parser.parse_known_args(sys.argv)
try:
config = gitlab.config.GitlabConfigParser(options.gitlab, options.config_file)
except gitlab.config.ConfigError as e:
if "--help" in sys.argv or "-h" in sys.argv:
parser.print_help()
sys.exit(0)
sys.exit(str(e))
# We only support v4 API at this time
if config.api_version not in ("4",): # dead code # pragma: no cover
raise ModuleNotFoundError(f"gitlab.v{config.api_version}.cli")
# Now we build the entire set of subcommands and do the complete parsing
parser = _get_parser()
try:
import argcomplete # type: ignore
argcomplete.autocomplete(parser) # pragma: no cover
except Exception:
pass
args = parser.parse_args()
config_files = args.config_file
gitlab_id = args.gitlab
verbose = args.verbose
output = args.output
fields = []
if args.fields:
fields = [x.strip() for x in args.fields.split(",")]
debug = args.debug
gitlab_resource = args.gitlab_resource
resource_action = args.resource_action
skip_login = args.skip_login
mask_credentials = args.mask_credentials
args_dict = vars(args)
# Remove CLI behavior-related args
for item in (
"api_version",
"config_file",
"debug",
"fields",
"gitlab",
"gitlab_resource",
"job_token",
"mask_credentials",
"oauth_token",
"output",
"pagination",
"private_token",
"resource_action",
"server_url",
"skip_login",
"ssl_verify",
"timeout",
"user_agent",
"verbose",
"version",
):
args_dict.pop(item)
args_dict = {k: _parse_value(v) for k, v in args_dict.items() if v is not None}
try:
gl = gitlab.Gitlab.merge_config(vars(options), gitlab_id, config_files)
if debug:
gl.enable_debug(mask_credentials=mask_credentials)
if not skip_login and (gl.private_token or gl.oauth_token):
gl.auth()
except Exception as e:
die(str(e))
gitlab.v4.cli.run(
gl, gitlab_resource, resource_action, args_dict, verbose, output, fields
)
| 12,416 | Python | .py | 374 | 25.754011 | 86 | 0.599867 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,055 | const.py | python-gitlab_python-gitlab/gitlab/const.py | from enum import Enum, IntEnum
from gitlab._version import __title__, __version__
class GitlabEnum(str, Enum):
"""An enum mixed in with str to make it JSON-serializable."""
# https://gitlab.com/gitlab-org/gitlab/-/blob/e97357824bedf007e75f8782259fe07435b64fbb/lib/gitlab/access.rb#L12-18
class AccessLevel(IntEnum):
NO_ACCESS: int = 0
MINIMAL_ACCESS: int = 5
GUEST: int = 10
REPORTER: int = 20
DEVELOPER: int = 30
MAINTAINER: int = 40
OWNER: int = 50
ADMIN: int = 60
# https://gitlab.com/gitlab-org/gitlab/-/blob/e97357824bedf007e75f8782259fe07435b64fbb/lib/gitlab/visibility_level.rb#L23-25
class Visibility(GitlabEnum):
PRIVATE: str = "private"
INTERNAL: str = "internal"
PUBLIC: str = "public"
class NotificationLevel(GitlabEnum):
DISABLED: str = "disabled"
PARTICIPATING: str = "participating"
WATCH: str = "watch"
GLOBAL: str = "global"
MENTION: str = "mention"
CUSTOM: str = "custom"
# https://gitlab.com/gitlab-org/gitlab/-/blob/e97357824bedf007e75f8782259fe07435b64fbb/app/views/search/_category.html.haml#L10-37
class SearchScope(GitlabEnum):
# all scopes (global, group and project)
PROJECTS: str = "projects"
ISSUES: str = "issues"
MERGE_REQUESTS: str = "merge_requests"
MILESTONES: str = "milestones"
WIKI_BLOBS: str = "wiki_blobs"
COMMITS: str = "commits"
BLOBS: str = "blobs"
USERS: str = "users"
# specific global scope
GLOBAL_SNIPPET_TITLES: str = "snippet_titles"
# specific project scope
PROJECT_NOTES: str = "notes"
# https://docs.gitlab.com/ee/api/merge_requests.html#merge-status
class DetailedMergeStatus(GitlabEnum):
# possible values for the detailed_merge_status field of Merge Requests
BLOCKED_STATUS: str = "blocked_status"
BROKEN_STATUS: str = "broken_status"
CHECKING: str = "checking"
UNCHECKED: str = "unchecked"
CI_MUST_PASS: str = "ci_must_pass"
CI_STILL_RUNNING: str = "ci_still_running"
DISCUSSIONS_NOT_RESOLVED: str = "discussions_not_resolved"
DRAFT_STATUS: str = "draft_status"
EXTERNAL_STATUS_CHECKS: str = "external_status_checks"
MERGEABLE: str = "mergeable"
NOT_APPROVED: str = "not_approved"
NOT_OPEN: str = "not_open"
POLICIES_DENIED: str = "policies_denied"
# https://docs.gitlab.com/ee/api/pipelines.html
class PipelineStatus(GitlabEnum):
CREATED: str = "created"
WAITING_FOR_RESOURCE: str = "waiting_for_resource"
PREPARING: str = "preparing"
PENDING: str = "pending"
RUNNING: str = "running"
SUCCESS: str = "success"
FAILED: str = "failed"
CANCELED: str = "canceled"
SKIPPED: str = "skipped"
MANUAL: str = "manual"
SCHEDULED: str = "scheduled"
DEFAULT_URL: str = "https://gitlab.com"
NO_ACCESS = AccessLevel.NO_ACCESS.value
MINIMAL_ACCESS = AccessLevel.MINIMAL_ACCESS.value
GUEST_ACCESS = AccessLevel.GUEST.value
REPORTER_ACCESS = AccessLevel.REPORTER.value
DEVELOPER_ACCESS = AccessLevel.DEVELOPER.value
MAINTAINER_ACCESS = AccessLevel.MAINTAINER.value
OWNER_ACCESS = AccessLevel.OWNER.value
ADMIN_ACCESS = AccessLevel.ADMIN.value
VISIBILITY_PRIVATE = Visibility.PRIVATE.value
VISIBILITY_INTERNAL = Visibility.INTERNAL.value
VISIBILITY_PUBLIC = Visibility.PUBLIC.value
NOTIFICATION_LEVEL_DISABLED = NotificationLevel.DISABLED.value
NOTIFICATION_LEVEL_PARTICIPATING = NotificationLevel.PARTICIPATING.value
NOTIFICATION_LEVEL_WATCH = NotificationLevel.WATCH.value
NOTIFICATION_LEVEL_GLOBAL = NotificationLevel.GLOBAL.value
NOTIFICATION_LEVEL_MENTION = NotificationLevel.MENTION.value
NOTIFICATION_LEVEL_CUSTOM = NotificationLevel.CUSTOM.value
# Search scopes
# all scopes (global, group and project)
SEARCH_SCOPE_PROJECTS = SearchScope.PROJECTS.value
SEARCH_SCOPE_ISSUES = SearchScope.ISSUES.value
SEARCH_SCOPE_MERGE_REQUESTS = SearchScope.MERGE_REQUESTS.value
SEARCH_SCOPE_MILESTONES = SearchScope.MILESTONES.value
SEARCH_SCOPE_WIKI_BLOBS = SearchScope.WIKI_BLOBS.value
SEARCH_SCOPE_COMMITS = SearchScope.COMMITS.value
SEARCH_SCOPE_BLOBS = SearchScope.BLOBS.value
SEARCH_SCOPE_USERS = SearchScope.USERS.value
# specific global scope
SEARCH_SCOPE_GLOBAL_SNIPPET_TITLES = SearchScope.GLOBAL_SNIPPET_TITLES.value
# specific project scope
SEARCH_SCOPE_PROJECT_NOTES = SearchScope.PROJECT_NOTES.value
USER_AGENT: str = f"{__title__}/{__version__}"
NO_JSON_RESPONSE_CODES = [204]
RETRYABLE_TRANSIENT_ERROR_CODES = [500, 502, 503, 504] + list(range(520, 531))
__all__ = [
"AccessLevel",
"Visibility",
"NotificationLevel",
"SearchScope",
"ADMIN_ACCESS",
"DEFAULT_URL",
"DEVELOPER_ACCESS",
"GUEST_ACCESS",
"MAINTAINER_ACCESS",
"MINIMAL_ACCESS",
"NO_ACCESS",
"NOTIFICATION_LEVEL_CUSTOM",
"NOTIFICATION_LEVEL_DISABLED",
"NOTIFICATION_LEVEL_GLOBAL",
"NOTIFICATION_LEVEL_MENTION",
"NOTIFICATION_LEVEL_PARTICIPATING",
"NOTIFICATION_LEVEL_WATCH",
"OWNER_ACCESS",
"REPORTER_ACCESS",
"SEARCH_SCOPE_BLOBS",
"SEARCH_SCOPE_COMMITS",
"SEARCH_SCOPE_GLOBAL_SNIPPET_TITLES",
"SEARCH_SCOPE_ISSUES",
"SEARCH_SCOPE_MERGE_REQUESTS",
"SEARCH_SCOPE_MILESTONES",
"SEARCH_SCOPE_PROJECT_NOTES",
"SEARCH_SCOPE_PROJECTS",
"SEARCH_SCOPE_USERS",
"SEARCH_SCOPE_WIKI_BLOBS",
"USER_AGENT",
"VISIBILITY_INTERNAL",
"VISIBILITY_PRIVATE",
"VISIBILITY_PUBLIC",
]
| 5,352 | Python | .py | 140 | 34.485714 | 130 | 0.738233 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,056 | utils.py | python-gitlab_python-gitlab/gitlab/utils.py | import dataclasses
import email.message
import logging
import pathlib
import time
import traceback
import urllib.parse
import warnings
from typing import (
Any,
Callable,
Dict,
Iterator,
Literal,
MutableMapping,
Optional,
Tuple,
Type,
Union,
)
import requests
from gitlab import const, types
class _StdoutStream:
def __call__(self, chunk: Any) -> None:
print(chunk)
def get_base_url(url: Optional[str] = None) -> str:
"""Return the base URL with the trailing slash stripped.
If the URL is a Falsy value, return the default URL.
Returns:
The base URL
"""
if not url:
return const.DEFAULT_URL
return url.rstrip("/")
def get_content_type(content_type: Optional[str]) -> str:
message = email.message.Message()
if content_type is not None:
message["content-type"] = content_type
return message.get_content_type()
class MaskingFormatter(logging.Formatter):
"""A logging formatter that can mask credentials"""
def __init__(
self,
fmt: Optional[str] = logging.BASIC_FORMAT,
datefmt: Optional[str] = None,
style: Literal["%", "{", "$"] = "%",
validate: bool = True,
masked: Optional[str] = None,
) -> None:
super().__init__(fmt, datefmt, style, validate)
self.masked = masked
def _filter(self, entry: str) -> str:
if not self.masked:
return entry
return entry.replace(self.masked, "[MASKED]")
def format(self, record: logging.LogRecord) -> str:
original = logging.Formatter.format(self, record)
return self._filter(original)
def response_content(
response: requests.Response,
streamed: bool,
action: Optional[Callable[[bytes], None]],
chunk_size: int,
*,
iterator: bool,
) -> Optional[Union[bytes, Iterator[Any]]]:
if iterator:
return response.iter_content(chunk_size=chunk_size)
if streamed is False:
return response.content
if action is None:
action = _StdoutStream()
for chunk in response.iter_content(chunk_size=chunk_size):
if chunk:
action(chunk)
return None
class Retry:
def __init__(
self,
max_retries: int,
obey_rate_limit: Optional[bool] = True,
retry_transient_errors: Optional[bool] = False,
) -> None:
self.cur_retries = 0
self.max_retries = max_retries
self.obey_rate_limit = obey_rate_limit
self.retry_transient_errors = retry_transient_errors
def _retryable_status_code(
self, status_code: Optional[int], reason: str = ""
) -> bool:
if status_code == 429 and self.obey_rate_limit:
return True
if not self.retry_transient_errors:
return False
if status_code in const.RETRYABLE_TRANSIENT_ERROR_CODES:
return True
if status_code == 409 and "Resource lock" in reason:
return True
return False
def handle_retry_on_status(
self,
status_code: Optional[int],
headers: Optional[MutableMapping[str, str]] = None,
reason: str = "",
) -> bool:
if not self._retryable_status_code(status_code, reason):
return False
if headers is None:
headers = {}
# Response headers documentation:
# https://docs.gitlab.com/ee/user/admin_area/settings/user_and_ip_rate_limits.html#response-headers
if self.max_retries == -1 or self.cur_retries < self.max_retries:
wait_time = 2**self.cur_retries * 0.1
if "Retry-After" in headers:
wait_time = int(headers["Retry-After"])
elif "RateLimit-Reset" in headers:
wait_time = int(headers["RateLimit-Reset"]) - time.time()
self.cur_retries += 1
time.sleep(wait_time)
return True
return False
def handle_retry(self) -> bool:
if self.retry_transient_errors and (
self.max_retries == -1 or self.cur_retries < self.max_retries
):
wait_time = 2**self.cur_retries * 0.1
self.cur_retries += 1
time.sleep(wait_time)
return True
return False
def _transform_types(
data: Dict[str, Any],
custom_types: Dict[str, Any],
*,
transform_data: bool,
transform_files: Optional[bool] = True,
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
"""Copy the data dict with attributes that have custom types and transform them
before being sent to the server.
``transform_files``: If ``True`` (default), also populates the ``files`` dict for
FileAttribute types with tuples to prepare fields for requests' MultipartEncoder:
https://toolbelt.readthedocs.io/en/latest/user.html#multipart-form-data-encoder
``transform_data``: If ``True`` transforms the ``data`` dict with fields
suitable for encoding as query parameters for GitLab's API:
https://docs.gitlab.com/ee/api/#encoding-api-parameters-of-array-and-hash-types
Returns:
A tuple of the transformed data dict and files dict"""
# Duplicate data to avoid messing with what the user sent us
data = data.copy()
if not transform_files and not transform_data:
return data, {}
files = {}
for attr_name, attr_class in custom_types.items():
if attr_name not in data:
continue
gitlab_attribute = attr_class(data[attr_name])
# if the type is FileAttribute we need to pass the data as file
if isinstance(gitlab_attribute, types.FileAttribute) and transform_files:
key = gitlab_attribute.get_file_name(attr_name)
files[attr_name] = (key, data.pop(attr_name))
continue
if not transform_data:
continue
if isinstance(gitlab_attribute, types.GitlabAttribute):
key, value = gitlab_attribute.get_for_api(key=attr_name)
if key != attr_name:
del data[attr_name]
data[key] = value
return data, files
def copy_dict(
*,
src: Dict[str, Any],
dest: Dict[str, Any],
) -> None:
for k, v in src.items():
if isinstance(v, dict):
# NOTE(jlvillal): This provides some support for the `hash` type
# https://docs.gitlab.com/ee/api/#hash
# Transform dict values to new attributes. For example:
# custom_attributes: {'foo', 'bar'} =>
# "custom_attributes['foo']": "bar"
for dict_k, dict_v in v.items():
dest[f"{k}[{dict_k}]"] = dict_v
else:
dest[k] = v
class EncodedId(str):
"""A custom `str` class that will return the URL-encoded value of the string.
* Using it recursively will only url-encode the value once.
* Can accept either `str` or `int` as input value.
* Can be used in an f-string and output the URL-encoded string.
Reference to documentation on why this is necessary.
See::
https://docs.gitlab.com/ee/api/index.html#namespaced-path-encoding
https://docs.gitlab.com/ee/api/index.html#path-parameters
"""
def __new__(cls, value: Union[str, int, "EncodedId"]) -> "EncodedId":
if isinstance(value, EncodedId):
return value
if not isinstance(value, (int, str)):
raise TypeError(f"Unsupported type received: {type(value)}")
if isinstance(value, str):
value = urllib.parse.quote(value, safe="")
return super().__new__(cls, value)
def remove_none_from_dict(data: Dict[str, Any]) -> Dict[str, Any]:
return {k: v for k, v in data.items() if v is not None}
def warn(
message: str,
*,
category: Optional[Type[Warning]] = None,
source: Optional[Any] = None,
show_caller: bool = True,
) -> None:
"""This `warnings.warn` wrapper function attempts to show the location causing the
warning in the user code that called the library.
It does this by walking up the stack trace to find the first frame located outside
the `gitlab/` directory. This is helpful to users as it shows them their code that
is causing the warning.
"""
# Get `stacklevel` for user code so we indicate where issue is in
# their code.
pg_dir = pathlib.Path(__file__).parent.resolve()
stack = traceback.extract_stack()
stacklevel = 1
warning_from = ""
for stacklevel, frame in enumerate(reversed(stack), start=1):
warning_from = f" (python-gitlab: {frame.filename}:{frame.lineno})"
frame_dir = str(pathlib.Path(frame.filename).parent.resolve())
if not frame_dir.startswith(str(pg_dir)):
break
if show_caller:
message += warning_from
warnings.warn(
message=message,
category=category,
stacklevel=stacklevel,
source=source,
)
@dataclasses.dataclass
class WarnMessageData:
message: str
show_caller: bool
| 9,027 | Python | .py | 241 | 30.016598 | 107 | 0.632737 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,057 | __init__.py | python-gitlab_python-gitlab/gitlab/__init__.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2019 Gauvain Pocentek, 2019-2023 python-gitlab team
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Wrapper for the GitLab API."""
import warnings
import gitlab.config # noqa: F401
from gitlab._version import ( # noqa: F401
__author__,
__copyright__,
__email__,
__license__,
__title__,
__version__,
)
from gitlab.client import Gitlab, GitlabList, GraphQL # noqa: F401
from gitlab.exceptions import * # noqa: F401,F403
warnings.filterwarnings("default", category=DeprecationWarning, module="^gitlab")
__all__ = [
"__author__",
"__copyright__",
"__email__",
"__license__",
"__title__",
"__version__",
"Gitlab",
"GitlabList",
"GraphQL",
]
__all__.extend(gitlab.exceptions.__all__)
| 1,406 | Python | .py | 42 | 30.928571 | 81 | 0.701987 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,058 | base.py | python-gitlab_python-gitlab/gitlab/base.py | import copy
import importlib
import json
import pprint
import textwrap
from types import ModuleType
from typing import Any, Dict, Iterable, Optional, Type, TYPE_CHECKING, Union
import gitlab
from gitlab import types as g_types
from gitlab.exceptions import GitlabParsingError
from .client import Gitlab, GitlabList
__all__ = [
"RESTObject",
"RESTObjectList",
"RESTManager",
]
_URL_ATTRIBUTE_ERROR = (
f"https://python-gitlab.readthedocs.io/en/v{gitlab.__version__}/"
f"faq.html#attribute-error-list"
)
class RESTObject:
"""Represents an object built from server data.
It holds the attributes know from the server, and the updated attributes in
another. This allows smart updates, if the object allows it.
You can redefine ``_id_attr`` in child classes to specify which attribute
must be used as the unique ID. ``None`` means that the object can be updated
without ID in the url.
Likewise, you can define a ``_repr_attr`` in subclasses to specify which
attribute should be added as a human-readable identifier when called in the
object's ``__repr__()`` method.
"""
_id_attr: Optional[str] = "id"
_attrs: Dict[str, Any]
_created_from_list: bool # Indicates if object was created from a list() action
_module: ModuleType
_parent_attrs: Dict[str, Any]
_repr_attr: Optional[str] = None
_updated_attrs: Dict[str, Any]
_lazy: bool
manager: "RESTManager"
def __init__(
self,
manager: "RESTManager",
attrs: Dict[str, Any],
*,
created_from_list: bool = False,
lazy: bool = False,
) -> None:
if not isinstance(attrs, dict):
raise GitlabParsingError(
f"Attempted to initialize RESTObject with a non-dictionary value: "
f"{attrs!r}\nThis likely indicates an incorrect or malformed server "
f"response."
)
self.__dict__.update(
{
"manager": manager,
"_attrs": attrs,
"_updated_attrs": {},
"_module": importlib.import_module(self.__module__),
"_created_from_list": created_from_list,
"_lazy": lazy,
}
)
self.__dict__["_parent_attrs"] = self.manager.parent_attrs
self._create_managers()
def __getstate__(self) -> Dict[str, Any]:
state = self.__dict__.copy()
module = state.pop("_module")
state["_module_name"] = module.__name__
return state
def __setstate__(self, state: Dict[str, Any]) -> None:
module_name = state.pop("_module_name")
self.__dict__.update(state)
self.__dict__["_module"] = importlib.import_module(module_name)
def __getattr__(self, name: str) -> Any:
if name in self.__dict__["_updated_attrs"]:
return self.__dict__["_updated_attrs"][name]
if name in self.__dict__["_attrs"]:
value = self.__dict__["_attrs"][name]
# If the value is a list, we copy it in the _updated_attrs dict
# because we are not able to detect changes made on the object
# (append, insert, pop, ...). Without forcing the attr
# creation __setattr__ is never called, the list never ends up
# in the _updated_attrs dict, and the update() and save()
# method never push the new data to the server.
# See https://github.com/python-gitlab/python-gitlab/issues/306
#
# note: _parent_attrs will only store simple values (int) so we
# don't make this check in the next block.
if isinstance(value, list):
self.__dict__["_updated_attrs"][name] = value[:]
return self.__dict__["_updated_attrs"][name]
return value
if name in self.__dict__["_parent_attrs"]:
return self.__dict__["_parent_attrs"][name]
message = f"{type(self).__name__!r} object has no attribute {name!r}"
if self._created_from_list:
message = (
f"{message}\n\n"
+ textwrap.fill(
f"{self.__class__!r} was created via a list() call and "
f"only a subset of the data may be present. To ensure "
f"all data is present get the object using a "
f"get(object.id) call. For more details, see:"
)
+ f"\n\n{_URL_ATTRIBUTE_ERROR}"
)
elif self._lazy:
message = f"{message}\n\n" + textwrap.fill(
f"If you tried to access object attributes returned from the server, "
f"note that {self.__class__!r} was created as a `lazy` object and was "
f"not initialized with any data."
)
raise AttributeError(message)
def __setattr__(self, name: str, value: Any) -> None:
self.__dict__["_updated_attrs"][name] = value
def asdict(self, *, with_parent_attrs: bool = False) -> Dict[str, Any]:
data = {}
if with_parent_attrs:
data.update(copy.deepcopy(self._parent_attrs))
data.update(copy.deepcopy(self._attrs))
data.update(copy.deepcopy(self._updated_attrs))
return data
@property
def attributes(self) -> Dict[str, Any]:
return self.asdict(with_parent_attrs=True)
def to_json(self, *, with_parent_attrs: bool = False, **kwargs: Any) -> str:
return json.dumps(self.asdict(with_parent_attrs=with_parent_attrs), **kwargs)
def __str__(self) -> str:
return f"{type(self)} => {self.asdict()}"
def pformat(self) -> str:
return f"{type(self)} => \n{pprint.pformat(self.asdict())}"
def pprint(self) -> None:
print(self.pformat())
def __repr__(self) -> str:
name = self.__class__.__name__
if (self._id_attr and self._repr_value) and (self._id_attr != self._repr_attr):
return (
f"<{name} {self._id_attr}:{self.get_id()} "
f"{self._repr_attr}:{self._repr_value}>"
)
if self._id_attr:
return f"<{name} {self._id_attr}:{self.get_id()}>"
if self._repr_value:
return f"<{name} {self._repr_attr}:{self._repr_value}>"
return f"<{name}>"
def __eq__(self, other: object) -> bool:
if not isinstance(other, RESTObject):
return NotImplemented
if self.get_id() and other.get_id():
return self.get_id() == other.get_id()
return super() == other
def __ne__(self, other: object) -> bool:
if not isinstance(other, RESTObject):
return NotImplemented
if self.get_id() and other.get_id():
return self.get_id() != other.get_id()
return super() != other
def __dir__(self) -> Iterable[str]:
return set(self.attributes).union(super().__dir__())
def __hash__(self) -> int:
if not self.get_id():
return super().__hash__()
return hash(self.get_id())
def _create_managers(self) -> None:
# NOTE(jlvillal): We are creating our managers by looking at the class
# annotations. If an attribute is annotated as being a *Manager type
# then we create the manager and assign it to the attribute.
for attr, annotation in sorted(self.__class__.__annotations__.items()):
# We ignore creating a manager for the 'manager' attribute as that
# is done in the self.__init__() method
if attr in ("manager",):
continue
if not isinstance(annotation, (type, str)): # pragma: no cover
continue
if isinstance(annotation, type):
cls_name = annotation.__name__
else:
cls_name = annotation
# All *Manager classes are used except for the base "RESTManager" class
if cls_name == "RESTManager" or not cls_name.endswith("Manager"):
continue
cls = getattr(self._module, cls_name)
manager = cls(self.manager.gitlab, parent=self)
# Since we have our own __setattr__ method, we can't use setattr()
self.__dict__[attr] = manager
def _update_attrs(self, new_attrs: Dict[str, Any]) -> None:
self.__dict__["_updated_attrs"] = {}
self.__dict__["_attrs"] = new_attrs
def get_id(self) -> Optional[Union[int, str]]:
"""Returns the id of the resource."""
if self._id_attr is None or not hasattr(self, self._id_attr):
return None
id_val = getattr(self, self._id_attr)
if TYPE_CHECKING:
assert id_val is None or isinstance(id_val, (int, str))
return id_val
@property
def _repr_value(self) -> Optional[str]:
"""Safely returns the human-readable resource name if present."""
if self._repr_attr is None or not hasattr(self, self._repr_attr):
return None
repr_val = getattr(self, self._repr_attr)
if TYPE_CHECKING:
assert isinstance(repr_val, str)
return repr_val
@property
def encoded_id(self) -> Optional[Union[int, str]]:
"""Ensure that the ID is url-encoded so that it can be safely used in a URL
path"""
obj_id = self.get_id()
if isinstance(obj_id, str):
obj_id = gitlab.utils.EncodedId(obj_id)
return obj_id
class RESTObjectList:
"""Generator object representing a list of RESTObject's.
This generator uses the Gitlab pagination system to fetch new data when
required.
Note: you should not instantiate such objects, they are returned by calls
to RESTManager.list()
Args:
manager: Manager to attach to the created objects
obj_cls: Type of objects to create from the json data
_list: A GitlabList object
"""
def __init__(
self, manager: "RESTManager", obj_cls: Type[RESTObject], _list: GitlabList
) -> None:
"""Creates an objects list from a GitlabList.
You should not create objects of this type, but use managers list()
methods instead.
Args:
manager: the RESTManager to attach to the objects
obj_cls: the class of the created objects
_list: the GitlabList holding the data
"""
self.manager = manager
self._obj_cls = obj_cls
self._list = _list
def __iter__(self) -> "RESTObjectList":
return self
def __len__(self) -> int:
return len(self._list)
def __next__(self) -> RESTObject:
return self.next()
def next(self) -> RESTObject:
data = self._list.next()
return self._obj_cls(self.manager, data, created_from_list=True)
@property
def current_page(self) -> int:
"""The current page number."""
return self._list.current_page
@property
def prev_page(self) -> Optional[int]:
"""The previous page number.
If None, the current page is the first.
"""
return self._list.prev_page
@property
def next_page(self) -> Optional[int]:
"""The next page number.
If None, the current page is the last.
"""
return self._list.next_page
@property
def per_page(self) -> Optional[int]:
"""The number of items per page."""
return self._list.per_page
@property
def total_pages(self) -> Optional[int]:
"""The total number of pages."""
return self._list.total_pages
@property
def total(self) -> Optional[int]:
"""The total number of items."""
return self._list.total
class RESTManager:
"""Base class for CRUD operations on objects.
Derived class must define ``_path`` and ``_obj_cls``.
``_path``: Base URL path on which requests will be sent (e.g. '/projects')
``_obj_cls``: The class of objects that will be created
"""
_create_attrs: g_types.RequiredOptional = g_types.RequiredOptional()
_update_attrs: g_types.RequiredOptional = g_types.RequiredOptional()
_path: Optional[str] = None
_obj_cls: Optional[Type[RESTObject]] = None
_from_parent_attrs: Dict[str, Any] = {}
_types: Dict[str, Type[g_types.GitlabAttribute]] = {}
_computed_path: Optional[str]
_parent: Optional[RESTObject]
_parent_attrs: Dict[str, Any]
gitlab: Gitlab
def __init__(self, gl: Gitlab, parent: Optional[RESTObject] = None) -> None:
"""REST manager constructor.
Args:
gl: :class:`~gitlab.Gitlab` connection to use to make requests.
parent: REST object to which the manager is attached.
"""
self.gitlab = gl
self._parent = parent # for nested managers
self._computed_path = self._compute_path()
@property
def parent_attrs(self) -> Optional[Dict[str, Any]]:
return self._parent_attrs
def _compute_path(self, path: Optional[str] = None) -> Optional[str]:
self._parent_attrs = {}
if path is None:
path = self._path
if path is None:
return None
if self._parent is None or not self._from_parent_attrs:
return path
data: Dict[str, Optional[gitlab.utils.EncodedId]] = {}
for self_attr, parent_attr in self._from_parent_attrs.items():
if not hasattr(self._parent, parent_attr):
data[self_attr] = None
continue
data[self_attr] = gitlab.utils.EncodedId(getattr(self._parent, parent_attr))
self._parent_attrs = data
return path.format(**data)
@property
def path(self) -> Optional[str]:
return self._computed_path
| 13,810 | Python | .py | 324 | 33.469136 | 88 | 0.592203 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,059 | exceptions.py | python-gitlab_python-gitlab/gitlab/exceptions.py | import functools
from typing import Any, Callable, cast, Optional, Type, TYPE_CHECKING, TypeVar, Union
class GitlabError(Exception):
def __init__(
self,
error_message: Union[str, bytes] = "",
response_code: Optional[int] = None,
response_body: Optional[bytes] = None,
) -> None:
Exception.__init__(self, error_message)
# Http status code
self.response_code = response_code
# Full http response
self.response_body = response_body
# Parsed error message from gitlab
try:
# if we receive str/bytes we try to convert to unicode/str to have
# consistent message types (see #616)
if TYPE_CHECKING:
assert isinstance(error_message, bytes)
self.error_message = error_message.decode()
except Exception:
if TYPE_CHECKING:
assert isinstance(error_message, str)
self.error_message = error_message
def __str__(self) -> str:
if self.response_code is not None:
return f"{self.response_code}: {self.error_message}"
return f"{self.error_message}"
class GitlabAuthenticationError(GitlabError):
pass
class RedirectError(GitlabError):
pass
class GitlabParsingError(GitlabError):
pass
class GitlabCiLintError(GitlabError):
pass
class GitlabConnectionError(GitlabError):
pass
class GitlabOperationError(GitlabError):
pass
class GitlabHttpError(GitlabError):
pass
class GitlabListError(GitlabOperationError):
pass
class GitlabGetError(GitlabOperationError):
pass
class GitlabHeadError(GitlabOperationError):
pass
class GitlabCreateError(GitlabOperationError):
pass
class GitlabUpdateError(GitlabOperationError):
pass
class GitlabDeleteError(GitlabOperationError):
pass
class GitlabSetError(GitlabOperationError):
pass
class GitlabProtectError(GitlabOperationError):
pass
class GitlabTransferProjectError(GitlabOperationError):
pass
class GitlabGroupTransferError(GitlabOperationError):
pass
class GitlabProjectDeployKeyError(GitlabOperationError):
pass
class GitlabPromoteError(GitlabOperationError):
pass
class GitlabCancelError(GitlabOperationError):
pass
class GitlabPipelineCancelError(GitlabCancelError):
pass
class GitlabRetryError(GitlabOperationError):
pass
class GitlabBuildCancelError(GitlabCancelError):
pass
class GitlabBuildRetryError(GitlabRetryError):
pass
class GitlabBuildPlayError(GitlabRetryError):
pass
class GitlabBuildEraseError(GitlabRetryError):
pass
class GitlabJobCancelError(GitlabCancelError):
pass
class GitlabJobRetryError(GitlabRetryError):
pass
class GitlabJobPlayError(GitlabRetryError):
pass
class GitlabJobEraseError(GitlabRetryError):
pass
class GitlabPipelinePlayError(GitlabRetryError):
pass
class GitlabPipelineRetryError(GitlabRetryError):
pass
class GitlabBlockError(GitlabOperationError):
pass
class GitlabUnblockError(GitlabOperationError):
pass
class GitlabDeactivateError(GitlabOperationError):
pass
class GitlabActivateError(GitlabOperationError):
pass
class GitlabBanError(GitlabOperationError):
pass
class GitlabUnbanError(GitlabOperationError):
pass
class GitlabSubscribeError(GitlabOperationError):
pass
class GitlabUnsubscribeError(GitlabOperationError):
pass
class GitlabMRForbiddenError(GitlabOperationError):
pass
class GitlabMRApprovalError(GitlabOperationError):
pass
class GitlabMRRebaseError(GitlabOperationError):
pass
class GitlabMRResetApprovalError(GitlabOperationError):
pass
class GitlabMRClosedError(GitlabOperationError):
pass
class GitlabMROnBuildSuccessError(GitlabOperationError):
pass
class GitlabTodoError(GitlabOperationError):
pass
class GitlabTopicMergeError(GitlabOperationError):
pass
class GitlabTimeTrackingError(GitlabOperationError):
pass
class GitlabUploadError(GitlabOperationError):
pass
class GitlabAttachFileError(GitlabOperationError):
pass
class GitlabImportError(GitlabOperationError):
pass
class GitlabInvitationError(GitlabOperationError):
pass
class GitlabCherryPickError(GitlabOperationError):
pass
class GitlabHousekeepingError(GitlabOperationError):
pass
class GitlabOwnershipError(GitlabOperationError):
pass
class GitlabSearchError(GitlabOperationError):
pass
class GitlabStopError(GitlabOperationError):
pass
class GitlabMarkdownError(GitlabOperationError):
pass
class GitlabVerifyError(GitlabOperationError):
pass
class GitlabRenderError(GitlabOperationError):
pass
class GitlabRepairError(GitlabOperationError):
pass
class GitlabRestoreError(GitlabOperationError):
pass
class GitlabRevertError(GitlabOperationError):
pass
class GitlabRotateError(GitlabOperationError):
pass
class GitlabLicenseError(GitlabOperationError):
pass
class GitlabFollowError(GitlabOperationError):
pass
class GitlabUnfollowError(GitlabOperationError):
pass
class GitlabUserApproveError(GitlabOperationError):
pass
class GitlabUserRejectError(GitlabOperationError):
pass
class GitlabDeploymentApprovalError(GitlabOperationError):
pass
class GitlabHookTestError(GitlabOperationError):
pass
# For an explanation of how these type-hints work see:
# https://mypy.readthedocs.io/en/stable/generics.html#declaring-decorators
#
# The goal here is that functions which get decorated will retain their types.
__F = TypeVar("__F", bound=Callable[..., Any])
def on_http_error(error: Type[Exception]) -> Callable[[__F], __F]:
"""Manage GitlabHttpError exceptions.
This decorator function can be used to catch GitlabHttpError exceptions
raise specialized exceptions instead.
Args:
The exception type to raise -- must inherit from GitlabError
"""
def wrap(f: __F) -> __F:
@functools.wraps(f)
def wrapped_f(*args: Any, **kwargs: Any) -> Any:
try:
return f(*args, **kwargs)
except GitlabHttpError as e:
raise error(e.error_message, e.response_code, e.response_body) from e
return cast(__F, wrapped_f)
return wrap
# Export manually to keep mypy happy
__all__ = [
"GitlabActivateError",
"GitlabAttachFileError",
"GitlabAuthenticationError",
"GitlabBanError",
"GitlabBlockError",
"GitlabBuildCancelError",
"GitlabBuildEraseError",
"GitlabBuildPlayError",
"GitlabBuildRetryError",
"GitlabCancelError",
"GitlabCherryPickError",
"GitlabCiLintError",
"GitlabConnectionError",
"GitlabCreateError",
"GitlabDeactivateError",
"GitlabDeleteError",
"GitlabDeploymentApprovalError",
"GitlabError",
"GitlabFollowError",
"GitlabGetError",
"GitlabGroupTransferError",
"GitlabHeadError",
"GitlabHookTestError",
"GitlabHousekeepingError",
"GitlabHttpError",
"GitlabImportError",
"GitlabInvitationError",
"GitlabJobCancelError",
"GitlabJobEraseError",
"GitlabJobPlayError",
"GitlabJobRetryError",
"GitlabLicenseError",
"GitlabListError",
"GitlabMRApprovalError",
"GitlabMRClosedError",
"GitlabMRForbiddenError",
"GitlabMROnBuildSuccessError",
"GitlabMRRebaseError",
"GitlabMRResetApprovalError",
"GitlabMarkdownError",
"GitlabOperationError",
"GitlabOwnershipError",
"GitlabParsingError",
"GitlabPipelineCancelError",
"GitlabPipelinePlayError",
"GitlabPipelineRetryError",
"GitlabProjectDeployKeyError",
"GitlabPromoteError",
"GitlabProtectError",
"GitlabRenderError",
"GitlabRepairError",
"GitlabRestoreError",
"GitlabRetryError",
"GitlabRevertError",
"GitlabRotateError",
"GitlabSearchError",
"GitlabSetError",
"GitlabStopError",
"GitlabSubscribeError",
"GitlabTimeTrackingError",
"GitlabTodoError",
"GitlabTopicMergeError",
"GitlabTransferProjectError",
"GitlabUnbanError",
"GitlabUnblockError",
"GitlabUnfollowError",
"GitlabUnsubscribeError",
"GitlabUpdateError",
"GitlabUploadError",
"GitlabUserApproveError",
"GitlabUserRejectError",
"GitlabVerifyError",
"RedirectError",
]
| 8,390 | Python | .py | 270 | 26.022222 | 85 | 0.767144 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,060 | types.py | python-gitlab_python-gitlab/gitlab/types.py | import dataclasses
from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING
@dataclasses.dataclass(frozen=True)
class RequiredOptional:
required: Tuple[str, ...] = ()
optional: Tuple[str, ...] = ()
exclusive: Tuple[str, ...] = ()
def validate_attrs(
self,
*,
data: Dict[str, Any],
excludes: Optional[List[str]] = None,
) -> None:
if excludes is None:
excludes = []
if self.required:
required = [k for k in self.required if k not in excludes]
missing = [attr for attr in required if attr not in data]
if missing:
raise AttributeError(f"Missing attributes: {', '.join(missing)}")
if self.exclusive:
exclusives = [attr for attr in data if attr in self.exclusive]
if len(exclusives) > 1:
raise AttributeError(
f"Provide only one of these attributes: {', '.join(exclusives)}"
)
if not exclusives:
raise AttributeError(
f"Must provide one of these attributes: "
f"{', '.join(self.exclusive)}"
)
class GitlabAttribute:
def __init__(self, value: Any = None) -> None:
self._value = value
def get(self) -> Any:
return self._value
def set_from_cli(self, cli_value: Any) -> None:
self._value = cli_value
def get_for_api(self, *, key: str) -> Tuple[str, Any]:
return (key, self._value)
class _ListArrayAttribute(GitlabAttribute):
"""Helper class to support `list` / `array` types."""
def set_from_cli(self, cli_value: str) -> None:
if not cli_value.strip():
self._value = []
else:
self._value = [item.strip() for item in cli_value.split(",")]
def get_for_api(self, *, key: str) -> Tuple[str, str]:
# Do not comma-split single value passed as string
if isinstance(self._value, str):
return (key, self._value)
if TYPE_CHECKING:
assert isinstance(self._value, list)
return (key, ",".join([str(x) for x in self._value]))
class ArrayAttribute(_ListArrayAttribute):
"""To support `array` types as documented in
https://docs.gitlab.com/ee/api/#array"""
def get_for_api(self, *, key: str) -> Tuple[str, Any]:
if isinstance(self._value, str):
return (f"{key}[]", self._value)
if TYPE_CHECKING:
assert isinstance(self._value, list)
return (f"{key}[]", self._value)
class CommaSeparatedListAttribute(_ListArrayAttribute):
"""For values which are sent to the server as a Comma Separated Values
(CSV) string. We allow them to be specified as a list and we convert it
into a CSV"""
class LowercaseStringAttribute(GitlabAttribute):
def get_for_api(self, *, key: str) -> Tuple[str, str]:
return (key, str(self._value).lower())
class FileAttribute(GitlabAttribute):
@staticmethod
def get_file_name(attr_name: Optional[str] = None) -> Optional[str]:
return attr_name
class ImageAttribute(FileAttribute):
@staticmethod
def get_file_name(attr_name: Optional[str] = None) -> str:
return f"{attr_name}.png" if attr_name else "image.png"
| 3,312 | Python | .py | 78 | 33.782051 | 84 | 0.603056 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,061 | protocol.py | python-gitlab_python-gitlab/gitlab/_backends/protocol.py | import abc
import sys
from typing import Any, Dict, Optional, Union
import requests
from requests_toolbelt.multipart.encoder import MultipartEncoder # type: ignore
if sys.version_info >= (3, 8):
from typing import Protocol
else:
from typing_extensions import Protocol
class BackendResponse(Protocol):
@abc.abstractmethod
def __init__(self, response: requests.Response) -> None: ...
class Backend(Protocol):
@abc.abstractmethod
def http_request(
self,
method: str,
url: str,
json: Optional[Union[Dict[str, Any], bytes]],
data: Optional[Union[Dict[str, Any], MultipartEncoder]],
params: Optional[Any],
timeout: Optional[float],
verify: Optional[Union[bool, str]],
stream: Optional[bool],
**kwargs: Any,
) -> BackendResponse: ...
| 842 | Python | .py | 26 | 27 | 80 | 0.679012 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,062 | __init__.py | python-gitlab_python-gitlab/gitlab/_backends/__init__.py | """
Defines http backends for processing http requests
"""
from .requests_backend import (
JobTokenAuth,
OAuthTokenAuth,
PrivateTokenAuth,
RequestsBackend,
RequestsResponse,
)
DefaultBackend = RequestsBackend
DefaultResponse = RequestsResponse
__all__ = [
"DefaultBackend",
"DefaultResponse",
"JobTokenAuth",
"OAuthTokenAuth",
"PrivateTokenAuth",
]
| 392 | Python | .py | 19 | 17.368421 | 50 | 0.745946 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,063 | graphql.py | python-gitlab_python-gitlab/gitlab/_backends/graphql.py | from typing import Any
import httpx
from gql.transport.httpx import HTTPXTransport
class GitlabTransport(HTTPXTransport):
"""A gql httpx transport that reuses an existing httpx.Client.
By default, gql's transports do not have a keep-alive session
and do not enable providing your own session that's kept open.
This transport lets us provide and close our session on our own
and provide additional auth.
For details, see https://github.com/graphql-python/gql/issues/91.
"""
def __init__(self, *args: Any, client: httpx.Client, **kwargs: Any):
super().__init__(*args, **kwargs)
self.client = client
def connect(self) -> None:
pass
def close(self) -> None:
pass
| 737 | Python | .py | 18 | 35.611111 | 72 | 0.702665 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,064 | requests_backend.py | python-gitlab_python-gitlab/gitlab/_backends/requests_backend.py | from __future__ import annotations
import dataclasses
from typing import Any, BinaryIO, Dict, Optional, TYPE_CHECKING, Union
import requests
from requests import PreparedRequest
from requests.auth import AuthBase
from requests.structures import CaseInsensitiveDict
from requests_toolbelt.multipart.encoder import MultipartEncoder # type: ignore
from . import protocol
class TokenAuth:
def __init__(self, token: str):
self.token = token
class OAuthTokenAuth(TokenAuth, AuthBase):
def __call__(self, r: PreparedRequest) -> PreparedRequest:
r.headers["Authorization"] = f"Bearer {self.token}"
r.headers.pop("PRIVATE-TOKEN", None)
r.headers.pop("JOB-TOKEN", None)
return r
class PrivateTokenAuth(TokenAuth, AuthBase):
def __call__(self, r: PreparedRequest) -> PreparedRequest:
r.headers["PRIVATE-TOKEN"] = self.token
r.headers.pop("JOB-TOKEN", None)
r.headers.pop("Authorization", None)
return r
class JobTokenAuth(TokenAuth, AuthBase):
def __call__(self, r: PreparedRequest) -> PreparedRequest:
r.headers["JOB-TOKEN"] = self.token
r.headers.pop("PRIVATE-TOKEN", None)
r.headers.pop("Authorization", None)
return r
@dataclasses.dataclass
class SendData:
content_type: str
data: Optional[Union[Dict[str, Any], MultipartEncoder]] = None
json: Optional[Union[Dict[str, Any], bytes]] = None
def __post_init__(self) -> None:
if self.json is not None and self.data is not None:
raise ValueError(
f"`json` and `data` are mutually exclusive. Only one can be set. "
f"json={self.json!r} data={self.data!r}"
)
class RequestsResponse(protocol.BackendResponse):
def __init__(self, response: requests.Response) -> None:
self._response: requests.Response = response
@property
def response(self) -> requests.Response:
return self._response
@property
def status_code(self) -> int:
return self._response.status_code
@property
def headers(self) -> CaseInsensitiveDict[str]:
return self._response.headers
@property
def content(self) -> bytes:
return self._response.content
@property
def reason(self) -> str:
return self._response.reason
def json(self) -> Any:
return self._response.json()
class RequestsBackend(protocol.Backend):
def __init__(self, session: Optional[requests.Session] = None) -> None:
self._client: requests.Session = session or requests.Session()
@property
def client(self) -> requests.Session:
return self._client
@staticmethod
def prepare_send_data(
files: Optional[Dict[str, Any]] = None,
post_data: Optional[Union[Dict[str, Any], bytes, BinaryIO]] = None,
raw: bool = False,
) -> SendData:
if files:
if post_data is None:
post_data = {}
else:
# When creating a `MultipartEncoder` instance with data-types
# which don't have an `encode` method it will cause an error:
# object has no attribute 'encode'
# So convert common non-string types into strings.
if TYPE_CHECKING:
assert isinstance(post_data, dict)
for k, v in post_data.items():
if isinstance(v, bool):
v = int(v)
if isinstance(v, (complex, float, int)):
post_data[k] = str(v)
post_data["file"] = files.get("file")
post_data["avatar"] = files.get("avatar")
data = MultipartEncoder(fields=post_data)
return SendData(data=data, content_type=data.content_type)
if raw and post_data:
return SendData(data=post_data, content_type="application/octet-stream")
if TYPE_CHECKING:
assert not isinstance(post_data, BinaryIO)
return SendData(json=post_data, content_type="application/json")
def http_request(
self,
method: str,
url: str,
json: Optional[Union[Dict[str, Any], bytes]] = None,
data: Optional[Union[Dict[str, Any], MultipartEncoder]] = None,
params: Optional[Any] = None,
timeout: Optional[float] = None,
verify: Optional[Union[bool, str]] = True,
stream: Optional[bool] = False,
**kwargs: Any,
) -> RequestsResponse:
"""Make HTTP request
Args:
method: The HTTP method to call ('get', 'post', 'put', 'delete', etc.)
url: The full URL
data: The data to send to the server in the body of the request
json: Data to send in the body in json by default
timeout: The timeout, in seconds, for the request
verify: Whether SSL certificates should be validated. If
the value is a string, it is the path to a CA file used for
certificate validation.
stream: Whether the data should be streamed
Returns:
A requests Response object.
"""
response: requests.Response = self._client.request(
method=method,
url=url,
params=params,
data=data,
timeout=timeout,
stream=stream,
verify=verify,
json=json,
**kwargs,
)
return RequestsResponse(response=response)
| 5,534 | Python | .py | 135 | 31.62963 | 84 | 0.615356 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,065 | cli.py | python-gitlab_python-gitlab/gitlab/v4/cli.py | import argparse
import json
import operator
import sys
from typing import Any, Dict, List, Optional, Type, TYPE_CHECKING, Union
import gitlab
import gitlab.base
import gitlab.v4.objects
from gitlab import cli
from gitlab.exceptions import GitlabCiLintError
class GitlabCLI:
def __init__(
self,
gl: gitlab.Gitlab,
gitlab_resource: str,
resource_action: str,
args: Dict[str, str],
) -> None:
self.cls: Type[gitlab.base.RESTObject] = cli.gitlab_resource_to_cls(
gitlab_resource, namespace=gitlab.v4.objects
)
self.cls_name = self.cls.__name__
self.gitlab_resource = gitlab_resource.replace("-", "_")
self.resource_action = resource_action.lower()
self.gl = gl
self.args = args
self.parent_args: Dict[str, Any] = {}
self.mgr_cls: Union[
Type[gitlab.mixins.CreateMixin],
Type[gitlab.mixins.DeleteMixin],
Type[gitlab.mixins.GetMixin],
Type[gitlab.mixins.GetWithoutIdMixin],
Type[gitlab.mixins.ListMixin],
Type[gitlab.mixins.UpdateMixin],
] = getattr(gitlab.v4.objects, f"{self.cls.__name__}Manager")
# We could do something smart, like splitting the manager name to find
# parents, build the chain of managers to get to the final object.
# Instead we do something ugly and efficient: interpolate variables in
# the class _path attribute, and replace the value with the result.
if TYPE_CHECKING:
assert self.mgr_cls._path is not None
self._process_from_parent_attrs()
self.mgr_cls._path = self.mgr_cls._path.format(**self.parent_args)
self.mgr = self.mgr_cls(gl)
self.mgr._from_parent_attrs = self.parent_args
if self.mgr_cls._types:
for attr_name, type_cls in self.mgr_cls._types.items():
if attr_name in self.args.keys():
obj = type_cls()
obj.set_from_cli(self.args[attr_name])
self.args[attr_name] = obj.get()
def _process_from_parent_attrs(self) -> None:
"""Items in the path need to be url-encoded. There is a 1:1 mapping from
mgr_cls._from_parent_attrs <--> mgr_cls._path. Those values must be url-encoded
as they may contain a slash '/'."""
for key in self.mgr_cls._from_parent_attrs:
if key not in self.args:
continue
self.parent_args[key] = gitlab.utils.EncodedId(self.args[key])
# If we don't delete it then it will be added to the URL as a query-string
del self.args[key]
def run(self) -> Any:
# Check for a method that matches gitlab_resource + action
method = f"do_{self.gitlab_resource}_{self.resource_action}"
if hasattr(self, method):
return getattr(self, method)()
# Fallback to standard actions (get, list, create, ...)
method = f"do_{self.resource_action}"
if hasattr(self, method):
return getattr(self, method)()
# Finally try to find custom methods
return self.do_custom()
def do_custom(self) -> Any:
class_instance: Union[gitlab.base.RESTManager, gitlab.base.RESTObject]
in_obj = cli.custom_actions[self.cls_name][self.resource_action].in_object
# Get the object (lazy), then act
if in_obj:
data = {}
if self.mgr._from_parent_attrs:
for k in self.mgr._from_parent_attrs:
data[k] = self.parent_args[k]
if not issubclass(self.cls, gitlab.mixins.GetWithoutIdMixin):
if TYPE_CHECKING:
assert isinstance(self.cls._id_attr, str)
data[self.cls._id_attr] = self.args.pop(self.cls._id_attr)
class_instance = self.cls(self.mgr, data)
else:
class_instance = self.mgr
method_name = self.resource_action.replace("-", "_")
return getattr(class_instance, method_name)(**self.args)
def do_project_export_download(self) -> None:
try:
project = self.gl.projects.get(self.parent_args["project_id"], lazy=True)
export_status = project.exports.get()
if TYPE_CHECKING:
assert export_status is not None
data = export_status.download()
if TYPE_CHECKING:
assert data is not None
assert isinstance(data, bytes)
sys.stdout.buffer.write(data)
except Exception as e: # pragma: no cover, cli.die is unit-tested
cli.die("Impossible to download the export", e)
def do_validate(self) -> None:
if TYPE_CHECKING:
assert isinstance(self.mgr, gitlab.v4.objects.CiLintManager)
try:
self.mgr.validate(self.args)
except GitlabCiLintError as e: # pragma: no cover, cli.die is unit-tested
cli.die("CI YAML Lint failed", e)
except Exception as e: # pragma: no cover, cli.die is unit-tested
cli.die("Cannot validate CI YAML", e)
def do_create(self) -> gitlab.base.RESTObject:
if TYPE_CHECKING:
assert isinstance(self.mgr, gitlab.mixins.CreateMixin)
try:
result = self.mgr.create(self.args)
except Exception as e: # pragma: no cover, cli.die is unit-tested
cli.die("Impossible to create object", e)
return result
def do_list(
self,
) -> Union[gitlab.base.RESTObjectList, List[gitlab.base.RESTObject]]:
if TYPE_CHECKING:
assert isinstance(self.mgr, gitlab.mixins.ListMixin)
message_details = gitlab.utils.WarnMessageData(
message=(
"Your query returned {len_items} of {total_items} items. To return all "
"items use `--get-all`. To silence this warning use `--no-get-all`."
),
show_caller=False,
)
try:
result = self.mgr.list(**self.args, message_details=message_details)
except Exception as e: # pragma: no cover, cli.die is unit-tested
cli.die("Impossible to list objects", e)
return result
def do_get(self) -> Optional[gitlab.base.RESTObject]:
if isinstance(self.mgr, gitlab.mixins.GetWithoutIdMixin):
try:
result = self.mgr.get(id=None, **self.args)
except Exception as e: # pragma: no cover, cli.die is unit-tested
cli.die("Impossible to get object", e)
return result
if TYPE_CHECKING:
assert isinstance(self.mgr, gitlab.mixins.GetMixin)
assert isinstance(self.cls._id_attr, str)
id = self.args.pop(self.cls._id_attr)
try:
result = self.mgr.get(id, lazy=False, **self.args)
except Exception as e: # pragma: no cover, cli.die is unit-tested
cli.die("Impossible to get object", e)
return result
def do_delete(self) -> None:
if TYPE_CHECKING:
assert isinstance(self.mgr, gitlab.mixins.DeleteMixin)
assert isinstance(self.cls._id_attr, str)
id = self.args.pop(self.cls._id_attr)
try:
self.mgr.delete(id, **self.args)
except Exception as e: # pragma: no cover, cli.die is unit-tested
cli.die("Impossible to destroy object", e)
def do_update(self) -> Dict[str, Any]:
if TYPE_CHECKING:
assert isinstance(self.mgr, gitlab.mixins.UpdateMixin)
if issubclass(self.mgr_cls, gitlab.mixins.GetWithoutIdMixin):
id = None
else:
if TYPE_CHECKING:
assert isinstance(self.cls._id_attr, str)
id = self.args.pop(self.cls._id_attr)
try:
result = self.mgr.update(id, self.args)
except Exception as e: # pragma: no cover, cli.die is unit-tested
cli.die("Impossible to update object", e)
return result
# https://github.com/python/typeshed/issues/7539#issuecomment-1076581049
if TYPE_CHECKING:
_SubparserType = argparse._SubParsersAction[argparse.ArgumentParser]
else:
_SubparserType = Any
def _populate_sub_parser_by_class(
cls: Type[gitlab.base.RESTObject],
sub_parser: _SubparserType,
) -> None:
mgr_cls_name = f"{cls.__name__}Manager"
mgr_cls = getattr(gitlab.v4.objects, mgr_cls_name)
action_parsers: Dict[str, argparse.ArgumentParser] = {}
for action_name, help_text in [
("list", "List the GitLab resources"),
("get", "Get a GitLab resource"),
("create", "Create a GitLab resource"),
("update", "Update a GitLab resource"),
("delete", "Delete a GitLab resource"),
]:
if not hasattr(mgr_cls, action_name):
continue
sub_parser_action = sub_parser.add_parser(
action_name,
conflict_handler="resolve",
help=help_text,
)
action_parsers[action_name] = sub_parser_action
sub_parser_action.add_argument("--sudo", required=False)
if mgr_cls._from_parent_attrs:
for x in mgr_cls._from_parent_attrs:
sub_parser_action.add_argument(
f"--{x.replace('_', '-')}", required=True
)
if action_name == "list":
for x in mgr_cls._list_filters:
sub_parser_action.add_argument(
f"--{x.replace('_', '-')}", required=False
)
sub_parser_action.add_argument("--page", required=False, type=int)
sub_parser_action.add_argument("--per-page", required=False, type=int)
get_all_group = sub_parser_action.add_mutually_exclusive_group()
get_all_group.add_argument(
"--get-all",
required=False,
action="store_const",
const=True,
default=None,
dest="get_all",
help="Return all items from the server, without pagination.",
)
get_all_group.add_argument(
"--no-get-all",
required=False,
action="store_const",
const=False,
default=None,
dest="get_all",
help="Don't return all items from the server.",
)
if action_name == "delete":
if cls._id_attr is not None:
id_attr = cls._id_attr.replace("_", "-")
sub_parser_action.add_argument(f"--{id_attr}", required=True)
if action_name == "get":
if not issubclass(cls, gitlab.mixins.GetWithoutIdMixin):
if cls._id_attr is not None:
id_attr = cls._id_attr.replace("_", "-")
sub_parser_action.add_argument(f"--{id_attr}", required=True)
for x in mgr_cls._optional_get_attrs:
sub_parser_action.add_argument(
f"--{x.replace('_', '-')}", required=False
)
if action_name == "create":
for x in mgr_cls._create_attrs.required:
sub_parser_action.add_argument(
f"--{x.replace('_', '-')}", required=True
)
for x in mgr_cls._create_attrs.optional:
sub_parser_action.add_argument(
f"--{x.replace('_', '-')}", required=False
)
if mgr_cls._create_attrs.exclusive:
group = sub_parser_action.add_mutually_exclusive_group()
for x in mgr_cls._create_attrs.exclusive:
group.add_argument(f"--{x.replace('_', '-')}")
if action_name == "update":
if cls._id_attr is not None:
id_attr = cls._id_attr.replace("_", "-")
sub_parser_action.add_argument(f"--{id_attr}", required=True)
for x in mgr_cls._update_attrs.required:
if x != cls._id_attr:
sub_parser_action.add_argument(
f"--{x.replace('_', '-')}", required=True
)
for x in mgr_cls._update_attrs.optional:
if x != cls._id_attr:
sub_parser_action.add_argument(
f"--{x.replace('_', '-')}", required=False
)
if mgr_cls._update_attrs.exclusive:
group = sub_parser_action.add_mutually_exclusive_group()
for x in mgr_cls._update_attrs.exclusive:
group.add_argument(f"--{x.replace('_', '-')}")
if cls.__name__ in cli.custom_actions:
name = cls.__name__
for action_name in cli.custom_actions[name]:
custom_action = cli.custom_actions[name][action_name]
# NOTE(jlvillal): If we put a function for the `default` value of
# the `get` it will always get called, which will break things.
action_parser = action_parsers.get(action_name)
if action_parser is None:
sub_parser_action = sub_parser.add_parser(
action_name, help=custom_action.help
)
else:
sub_parser_action = action_parser
# Get the attributes for URL/path construction
if mgr_cls._from_parent_attrs:
for x in mgr_cls._from_parent_attrs:
sub_parser_action.add_argument(
f"--{x.replace('_', '-')}", required=True
)
sub_parser_action.add_argument("--sudo", required=False)
# We need to get the object somehow
if not issubclass(cls, gitlab.mixins.GetWithoutIdMixin):
if cls._id_attr is not None and custom_action.requires_id:
id_attr = cls._id_attr.replace("_", "-")
sub_parser_action.add_argument(f"--{id_attr}", required=True)
for x in custom_action.required:
if x != cls._id_attr:
sub_parser_action.add_argument(
f"--{x.replace('_', '-')}", required=True
)
for x in custom_action.optional:
if x != cls._id_attr:
sub_parser_action.add_argument(
f"--{x.replace('_', '-')}", required=False
)
if mgr_cls.__name__ in cli.custom_actions:
name = mgr_cls.__name__
for action_name in cli.custom_actions[name]:
# NOTE(jlvillal): If we put a function for the `default` value of
# the `get` it will always get called, which will break things.
action_parser = action_parsers.get(action_name)
if action_parser is None:
sub_parser_action = sub_parser.add_parser(action_name)
else:
sub_parser_action = action_parser
if mgr_cls._from_parent_attrs:
for x in mgr_cls._from_parent_attrs:
sub_parser_action.add_argument(
f"--{x.replace('_', '-')}", required=True
)
sub_parser_action.add_argument("--sudo", required=False)
custom_action = cli.custom_actions[name][action_name]
for x in custom_action.required:
if x != cls._id_attr:
sub_parser_action.add_argument(
f"--{x.replace('_', '-')}", required=True
)
for x in custom_action.optional:
if x != cls._id_attr:
sub_parser_action.add_argument(
f"--{x.replace('_', '-')}", required=False
)
def extend_parser(parser: argparse.ArgumentParser) -> argparse.ArgumentParser:
subparsers = parser.add_subparsers(
title="resource",
dest="gitlab_resource",
help="The GitLab resource to manipulate.",
)
subparsers.required = True
# populate argparse for all Gitlab Object
classes = set()
for cls in gitlab.v4.objects.__dict__.values():
if not isinstance(cls, type):
continue
if issubclass(cls, gitlab.base.RESTManager):
if cls._obj_cls is not None:
classes.add(cls._obj_cls)
for cls in sorted(classes, key=operator.attrgetter("__name__")):
arg_name = cli.cls_to_gitlab_resource(cls)
mgr_cls_name = f"{cls.__name__}Manager"
mgr_cls = getattr(gitlab.v4.objects, mgr_cls_name)
object_group = subparsers.add_parser(
arg_name,
help=f"API endpoint: {mgr_cls._path}",
)
object_subparsers = object_group.add_subparsers(
title="action",
dest="resource_action",
help="Action to execute on the GitLab resource.",
)
_populate_sub_parser_by_class(cls, object_subparsers)
object_subparsers.required = True
return parser
def get_dict(
obj: Union[str, Dict[str, Any], gitlab.base.RESTObject], fields: List[str]
) -> Union[str, Dict[str, Any]]:
if not isinstance(obj, gitlab.base.RESTObject):
return obj
if fields:
return {k: v for k, v in obj.attributes.items() if k in fields}
return obj.attributes
class JSONPrinter:
@staticmethod
def display(d: Union[str, Dict[str, Any]], **_kwargs: Any) -> None:
print(json.dumps(d))
@staticmethod
def display_list(
data: List[Union[str, Dict[str, Any], gitlab.base.RESTObject]],
fields: List[str],
**_kwargs: Any,
) -> None:
print(json.dumps([get_dict(obj, fields) for obj in data]))
class YAMLPrinter:
@staticmethod
def display(d: Union[str, Dict[str, Any]], **_kwargs: Any) -> None:
try:
import yaml # noqa
print(yaml.safe_dump(d, default_flow_style=False))
except ImportError:
sys.exit(
"PyYaml is not installed.\n"
"Install it with `pip install PyYaml` "
"to use the yaml output feature"
)
@staticmethod
def display_list(
data: List[Union[str, Dict[str, Any], gitlab.base.RESTObject]],
fields: List[str],
**_kwargs: Any,
) -> None:
try:
import yaml # noqa
print(
yaml.safe_dump(
[get_dict(obj, fields) for obj in data], default_flow_style=False
)
)
except ImportError:
sys.exit(
"PyYaml is not installed.\n"
"Install it with `pip install PyYaml` "
"to use the yaml output feature"
)
class LegacyPrinter:
def display(self, _d: Union[str, Dict[str, Any]], **kwargs: Any) -> None:
verbose = kwargs.get("verbose", False)
padding = kwargs.get("padding", 0)
obj: Optional[Union[Dict[str, Any], gitlab.base.RESTObject]] = kwargs.get("obj")
if TYPE_CHECKING:
assert obj is not None
def display_dict(d: Dict[str, Any], padding: int) -> None:
for k in sorted(d.keys()):
v = d[k]
if isinstance(v, dict):
print(f"{' ' * padding}{k.replace('_', '-')}:")
new_padding = padding + 2
self.display(v, verbose=True, padding=new_padding, obj=v)
continue
print(f"{' ' * padding}{k.replace('_', '-')}: {v}")
if verbose:
if isinstance(obj, dict):
display_dict(obj, padding)
return
# not a dict, we assume it's a RESTObject
if obj._id_attr:
id = getattr(obj, obj._id_attr, None)
print(f"{obj._id_attr}: {id}")
attrs = obj.attributes
if obj._id_attr:
attrs.pop(obj._id_attr)
display_dict(attrs, padding)
return
lines = []
if TYPE_CHECKING:
assert isinstance(obj, gitlab.base.RESTObject)
if obj._id_attr:
id = getattr(obj, obj._id_attr)
lines.append(f"{obj._id_attr.replace('_', '-')}: {id}")
if obj._repr_attr:
value = getattr(obj, obj._repr_attr, "None") or "None"
value = value.replace("\r", "").replace("\n", " ")
# If the attribute is a note (ProjectCommitComment) then we do
# some modifications to fit everything on one line
line = f"{obj._repr_attr}: {value}"
# ellipsize long lines (comments)
if len(line) > 79:
line = f"{line[:76]}..."
lines.append(line)
if lines:
print("\n".join(lines))
return
print(
f"No default fields to show for {obj!r}. "
f"Please use '--verbose' or the JSON/YAML formatters."
)
def display_list(
self,
data: List[Union[str, gitlab.base.RESTObject]],
fields: List[str],
**kwargs: Any,
) -> None:
verbose = kwargs.get("verbose", False)
for obj in data:
if isinstance(obj, gitlab.base.RESTObject):
self.display(get_dict(obj, fields), verbose=verbose, obj=obj)
else:
print(obj)
print("")
PRINTERS: Dict[
str, Union[Type[JSONPrinter], Type[LegacyPrinter], Type[YAMLPrinter]]
] = {
"json": JSONPrinter,
"legacy": LegacyPrinter,
"yaml": YAMLPrinter,
}
def run(
gl: gitlab.Gitlab,
gitlab_resource: str,
resource_action: str,
args: Dict[str, Any],
verbose: bool,
output: str,
fields: List[str],
) -> None:
g_cli = GitlabCLI(
gl=gl,
gitlab_resource=gitlab_resource,
resource_action=resource_action,
args=args,
)
data = g_cli.run()
printer: Union[JSONPrinter, LegacyPrinter, YAMLPrinter] = PRINTERS[output]()
if isinstance(data, dict):
printer.display(data, verbose=True, obj=data)
elif isinstance(data, list):
printer.display_list(data, fields, verbose=verbose)
elif isinstance(data, gitlab.base.RESTObjectList):
printer.display_list(list(data), fields, verbose=verbose)
elif isinstance(data, gitlab.base.RESTObject):
printer.display(get_dict(data, fields), verbose=verbose, obj=data)
elif isinstance(data, str):
print(data)
elif isinstance(data, bytes):
sys.stdout.buffer.write(data)
elif hasattr(data, "decode"):
print(data.decode())
| 22,721 | Python | .py | 525 | 31.619048 | 88 | 0.560997 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,066 | artifacts.py | python-gitlab_python-gitlab/gitlab/v4/objects/artifacts.py | """
GitLab API:
https://docs.gitlab.com/ee/api/job_artifacts.html
"""
from typing import Any, Callable, Iterator, Optional, TYPE_CHECKING, Union
import requests
from gitlab import cli
from gitlab import exceptions as exc
from gitlab import utils
from gitlab.base import RESTManager, RESTObject
__all__ = ["ProjectArtifact", "ProjectArtifactManager"]
class ProjectArtifact(RESTObject):
"""Dummy object to manage custom actions on artifacts"""
_id_attr = "ref_name"
class ProjectArtifactManager(RESTManager):
_obj_cls = ProjectArtifact
_path = "/projects/{project_id}/jobs/artifacts"
_from_parent_attrs = {"project_id": "id"}
@exc.on_http_error(exc.GitlabDeleteError)
def delete(self, **kwargs: Any) -> None:
"""Delete the project's artifacts on the server.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabDeleteError: If the server cannot perform the request
"""
path = self._compute_path("/projects/{project_id}/artifacts")
if TYPE_CHECKING:
assert path is not None
self.gitlab.http_delete(path, **kwargs)
@cli.register_custom_action(
cls_names="ProjectArtifactManager",
required=("ref_name", "job"),
optional=("job_token",),
)
@exc.on_http_error(exc.GitlabGetError)
def download(
self,
ref_name: str,
job: str,
streamed: bool = False,
action: Optional[Callable[[bytes], None]] = None,
chunk_size: int = 1024,
*,
iterator: bool = False,
**kwargs: Any,
) -> Optional[Union[bytes, Iterator[Any]]]:
"""Get the job artifacts archive from a specific tag or branch.
Args:
ref_name: Branch or tag name in repository. HEAD or SHA references
are not supported.
job: The name of the job.
job_token: Job token for multi-project pipeline triggers.
streamed: If True the data will be processed by chunks of
`chunk_size` and each chunk is passed to `action` for
treatment
iterator: If True directly return the underlying response
iterator
action: Callable responsible of dealing with chunk of
data
chunk_size: Size of each chunk
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the artifacts could not be retrieved
Returns:
The artifacts if `streamed` is False, None otherwise.
"""
path = f"{self.path}/{ref_name}/download"
result = self.gitlab.http_get(
path, job=job, streamed=streamed, raw=True, **kwargs
)
if TYPE_CHECKING:
assert isinstance(result, requests.Response)
return utils.response_content(
result, streamed, action, chunk_size, iterator=iterator
)
@cli.register_custom_action(
cls_names="ProjectArtifactManager",
required=("ref_name", "artifact_path", "job"),
)
@exc.on_http_error(exc.GitlabGetError)
def raw(
self,
ref_name: str,
artifact_path: str,
job: str,
streamed: bool = False,
action: Optional[Callable[[bytes], None]] = None,
chunk_size: int = 1024,
*,
iterator: bool = False,
**kwargs: Any,
) -> Optional[Union[bytes, Iterator[Any]]]:
"""Download a single artifact file from a specific tag or branch from
within the job's artifacts archive.
Args:
ref_name: Branch or tag name in repository. HEAD or SHA references
are not supported.
artifact_path: Path to a file inside the artifacts archive.
job: The name of the job.
streamed: If True the data will be processed by chunks of
`chunk_size` and each chunk is passed to `action` for
treatment
iterator: If True directly return the underlying response
iterator
action: Callable responsible of dealing with chunk of
data
chunk_size: Size of each chunk
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the artifacts could not be retrieved
Returns:
The artifact if `streamed` is False, None otherwise.
"""
path = f"{self.path}/{ref_name}/raw/{artifact_path}"
result = self.gitlab.http_get(
path, streamed=streamed, raw=True, job=job, **kwargs
)
if TYPE_CHECKING:
assert isinstance(result, requests.Response)
return utils.response_content(
result, streamed, action, chunk_size, iterator=iterator
)
| 5,098 | Python | .py | 126 | 31.007937 | 78 | 0.620279 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,067 | users.py | python-gitlab_python-gitlab/gitlab/v4/objects/users.py | """
GitLab API:
https://docs.gitlab.com/ee/api/users.html
https://docs.gitlab.com/ee/api/projects.html#list-projects-starred-by-a-user
"""
from typing import Any, cast, Dict, List, Optional, Union
import requests
from gitlab import cli
from gitlab import exceptions as exc
from gitlab import types
from gitlab.base import RESTManager, RESTObject, RESTObjectList
from gitlab.mixins import (
CreateMixin,
CRUDMixin,
DeleteMixin,
GetWithoutIdMixin,
ListMixin,
NoUpdateMixin,
ObjectDeleteMixin,
RetrieveMixin,
SaveMixin,
UpdateMixin,
)
from gitlab.types import ArrayAttribute, RequiredOptional
from .custom_attributes import UserCustomAttributeManager # noqa: F401
from .events import UserEventManager # noqa: F401
from .personal_access_tokens import UserPersonalAccessTokenManager # noqa: F401
__all__ = [
"CurrentUserEmail",
"CurrentUserEmailManager",
"CurrentUserGPGKey",
"CurrentUserGPGKeyManager",
"CurrentUserKey",
"CurrentUserKeyManager",
"CurrentUserRunner",
"CurrentUserRunnerManager",
"CurrentUserStatus",
"CurrentUserStatusManager",
"CurrentUser",
"CurrentUserManager",
"User",
"UserManager",
"ProjectUser",
"ProjectUserManager",
"StarredProject",
"StarredProjectManager",
"UserEmail",
"UserEmailManager",
"UserActivities",
"UserStatus",
"UserStatusManager",
"UserActivitiesManager",
"UserGPGKey",
"UserGPGKeyManager",
"UserKey",
"UserKeyManager",
"UserIdentityProviderManager",
"UserImpersonationToken",
"UserImpersonationTokenManager",
"UserMembership",
"UserMembershipManager",
"UserProject",
"UserProjectManager",
]
class CurrentUserEmail(ObjectDeleteMixin, RESTObject):
_repr_attr = "email"
class CurrentUserEmailManager(RetrieveMixin, CreateMixin, DeleteMixin, RESTManager):
_path = "/user/emails"
_obj_cls = CurrentUserEmail
_create_attrs = RequiredOptional(required=("email",))
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> CurrentUserEmail:
return cast(CurrentUserEmail, super().get(id=id, lazy=lazy, **kwargs))
class CurrentUserGPGKey(ObjectDeleteMixin, RESTObject):
pass
class CurrentUserGPGKeyManager(RetrieveMixin, CreateMixin, DeleteMixin, RESTManager):
_path = "/user/gpg_keys"
_obj_cls = CurrentUserGPGKey
_create_attrs = RequiredOptional(required=("key",))
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> CurrentUserGPGKey:
return cast(CurrentUserGPGKey, super().get(id=id, lazy=lazy, **kwargs))
class CurrentUserKey(ObjectDeleteMixin, RESTObject):
_repr_attr = "title"
class CurrentUserKeyManager(RetrieveMixin, CreateMixin, DeleteMixin, RESTManager):
_path = "/user/keys"
_obj_cls = CurrentUserKey
_create_attrs = RequiredOptional(required=("title", "key"))
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> CurrentUserKey:
return cast(CurrentUserKey, super().get(id=id, lazy=lazy, **kwargs))
class CurrentUserRunner(RESTObject):
pass
class CurrentUserRunnerManager(CreateMixin, RESTManager):
_path = "/user/runners"
_obj_cls = CurrentUserRunner
_types = {"tag_list": types.CommaSeparatedListAttribute}
_create_attrs = RequiredOptional(
required=("runner_type",),
optional=(
"group_id",
"project_id",
"description",
"paused",
"locked",
"run_untagged",
"tag_list",
"access_level",
"maximum_timeout",
"maintenance_note",
),
)
class CurrentUserStatus(SaveMixin, RESTObject):
_id_attr = None
_repr_attr = "message"
class CurrentUserStatusManager(GetWithoutIdMixin, UpdateMixin, RESTManager):
_path = "/user/status"
_obj_cls = CurrentUserStatus
_update_attrs = RequiredOptional(optional=("emoji", "message"))
def get(self, **kwargs: Any) -> CurrentUserStatus:
return cast(CurrentUserStatus, super().get(**kwargs))
class CurrentUser(RESTObject):
_id_attr = None
_repr_attr = "username"
emails: CurrentUserEmailManager
gpgkeys: CurrentUserGPGKeyManager
keys: CurrentUserKeyManager
runners: CurrentUserRunnerManager
status: CurrentUserStatusManager
class CurrentUserManager(GetWithoutIdMixin, RESTManager):
_path = "/user"
_obj_cls = CurrentUser
def get(self, **kwargs: Any) -> CurrentUser:
return cast(CurrentUser, super().get(**kwargs))
class User(SaveMixin, ObjectDeleteMixin, RESTObject):
_repr_attr = "username"
customattributes: UserCustomAttributeManager
emails: "UserEmailManager"
events: UserEventManager
followers_users: "UserFollowersManager"
following_users: "UserFollowingManager"
gpgkeys: "UserGPGKeyManager"
identityproviders: "UserIdentityProviderManager"
impersonationtokens: "UserImpersonationTokenManager"
keys: "UserKeyManager"
memberships: "UserMembershipManager"
personal_access_tokens: UserPersonalAccessTokenManager
projects: "UserProjectManager"
starred_projects: "StarredProjectManager"
status: "UserStatusManager"
@cli.register_custom_action(cls_names="User")
@exc.on_http_error(exc.GitlabBlockError)
def block(self, **kwargs: Any) -> Optional[bool]:
"""Block the user.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabBlockError: If the user could not be blocked
Returns:
Whether the user status has been changed
"""
path = f"/users/{self.encoded_id}/block"
# NOTE: Undocumented behavior of the GitLab API is that it returns a
# boolean or None
server_data = cast(
Optional[bool], self.manager.gitlab.http_post(path, **kwargs)
)
if server_data is True:
self._attrs["state"] = "blocked"
return server_data
@cli.register_custom_action(cls_names="User")
@exc.on_http_error(exc.GitlabFollowError)
def follow(self, **kwargs: Any) -> Union[Dict[str, Any], requests.Response]:
"""Follow the user.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabFollowError: If the user could not be followed
Returns:
The new object data (*not* a RESTObject)
"""
path = f"/users/{self.encoded_id}/follow"
return self.manager.gitlab.http_post(path, **kwargs)
@cli.register_custom_action(cls_names="User")
@exc.on_http_error(exc.GitlabUnfollowError)
def unfollow(self, **kwargs: Any) -> Union[Dict[str, Any], requests.Response]:
"""Unfollow the user.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabUnfollowError: If the user could not be followed
Returns:
The new object data (*not* a RESTObject)
"""
path = f"/users/{self.encoded_id}/unfollow"
return self.manager.gitlab.http_post(path, **kwargs)
@cli.register_custom_action(cls_names="User")
@exc.on_http_error(exc.GitlabUnblockError)
def unblock(self, **kwargs: Any) -> Optional[bool]:
"""Unblock the user.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabUnblockError: If the user could not be unblocked
Returns:
Whether the user status has been changed
"""
path = f"/users/{self.encoded_id}/unblock"
# NOTE: Undocumented behavior of the GitLab API is that it returns a
# boolean or None
server_data = cast(
Optional[bool], self.manager.gitlab.http_post(path, **kwargs)
)
if server_data is True:
self._attrs["state"] = "active"
return server_data
@cli.register_custom_action(cls_names="User")
@exc.on_http_error(exc.GitlabDeactivateError)
def deactivate(self, **kwargs: Any) -> Union[Dict[str, Any], requests.Response]:
"""Deactivate the user.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabDeactivateError: If the user could not be deactivated
Returns:
Whether the user status has been changed
"""
path = f"/users/{self.encoded_id}/deactivate"
server_data = self.manager.gitlab.http_post(path, **kwargs)
if server_data:
self._attrs["state"] = "deactivated"
return server_data
@cli.register_custom_action(cls_names="User")
@exc.on_http_error(exc.GitlabActivateError)
def activate(self, **kwargs: Any) -> Union[Dict[str, Any], requests.Response]:
"""Activate the user.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabActivateError: If the user could not be activated
Returns:
Whether the user status has been changed
"""
path = f"/users/{self.encoded_id}/activate"
server_data = self.manager.gitlab.http_post(path, **kwargs)
if server_data:
self._attrs["state"] = "active"
return server_data
@cli.register_custom_action(cls_names="User")
@exc.on_http_error(exc.GitlabUserApproveError)
def approve(self, **kwargs: Any) -> Union[Dict[str, Any], requests.Response]:
"""Approve a user creation request.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabUserApproveError: If the user could not be activated
Returns:
The new object data (*not* a RESTObject)
"""
path = f"/users/{self.encoded_id}/approve"
return self.manager.gitlab.http_post(path, **kwargs)
@cli.register_custom_action(cls_names="User")
@exc.on_http_error(exc.GitlabUserRejectError)
def reject(self, **kwargs: Any) -> Union[Dict[str, Any], requests.Response]:
"""Reject a user creation request.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabUserRejectError: If the user could not be rejected
Returns:
The new object data (*not* a RESTObject)
"""
path = f"/users/{self.encoded_id}/reject"
return self.manager.gitlab.http_post(path, **kwargs)
@cli.register_custom_action(cls_names="User")
@exc.on_http_error(exc.GitlabBanError)
def ban(self, **kwargs: Any) -> Union[Dict[str, Any], requests.Response]:
"""Ban the user.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabBanError: If the user could not be banned
Returns:
Whether the user has been banned
"""
path = f"/users/{self.encoded_id}/ban"
server_data = self.manager.gitlab.http_post(path, **kwargs)
if server_data:
self._attrs["state"] = "banned"
return server_data
@cli.register_custom_action(cls_names="User")
@exc.on_http_error(exc.GitlabUnbanError)
def unban(self, **kwargs: Any) -> Union[Dict[str, Any], requests.Response]:
"""Unban the user.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabUnbanError: If the user could not be unbanned
Returns:
Whether the user has been unbanned
"""
path = f"/users/{self.encoded_id}/unban"
server_data = self.manager.gitlab.http_post(path, **kwargs)
if server_data:
self._attrs["state"] = "active"
return server_data
class UserManager(CRUDMixin, RESTManager):
_path = "/users"
_obj_cls = User
_list_filters = (
"active",
"blocked",
"username",
"extern_uid",
"provider",
"external",
"search",
"custom_attributes",
"status",
"two_factor",
)
_create_attrs = RequiredOptional(
optional=(
"email",
"username",
"name",
"password",
"reset_password",
"skype",
"linkedin",
"twitter",
"projects_limit",
"extern_uid",
"provider",
"bio",
"admin",
"can_create_group",
"website_url",
"skip_confirmation",
"external",
"organization",
"location",
"avatar",
"public_email",
"private_profile",
"color_scheme_id",
"theme_id",
),
)
_update_attrs = RequiredOptional(
required=("email", "username", "name"),
optional=(
"password",
"skype",
"linkedin",
"twitter",
"projects_limit",
"extern_uid",
"provider",
"bio",
"admin",
"can_create_group",
"website_url",
"skip_reconfirmation",
"external",
"organization",
"location",
"avatar",
"public_email",
"private_profile",
"color_scheme_id",
"theme_id",
),
)
_types = {"confirm": types.LowercaseStringAttribute, "avatar": types.ImageAttribute}
def get(self, id: Union[str, int], lazy: bool = False, **kwargs: Any) -> User:
return cast(User, super().get(id=id, lazy=lazy, **kwargs))
class ProjectUser(RESTObject):
pass
class ProjectUserManager(ListMixin, RESTManager):
_path = "/projects/{project_id}/users"
_obj_cls = ProjectUser
_from_parent_attrs = {"project_id": "id"}
_list_filters = ("search", "skip_users")
_types = {"skip_users": types.ArrayAttribute}
class UserEmail(ObjectDeleteMixin, RESTObject):
_repr_attr = "email"
class UserEmailManager(RetrieveMixin, CreateMixin, DeleteMixin, RESTManager):
_path = "/users/{user_id}/emails"
_obj_cls = UserEmail
_from_parent_attrs = {"user_id": "id"}
_create_attrs = RequiredOptional(required=("email",))
def get(self, id: Union[str, int], lazy: bool = False, **kwargs: Any) -> UserEmail:
return cast(UserEmail, super().get(id=id, lazy=lazy, **kwargs))
class UserActivities(RESTObject):
_id_attr = "username"
class UserStatus(RESTObject):
_id_attr = None
_repr_attr = "message"
class UserStatusManager(GetWithoutIdMixin, RESTManager):
_path = "/users/{user_id}/status"
_obj_cls = UserStatus
_from_parent_attrs = {"user_id": "id"}
def get(self, **kwargs: Any) -> UserStatus:
return cast(UserStatus, super().get(**kwargs))
class UserActivitiesManager(ListMixin, RESTManager):
_path = "/user/activities"
_obj_cls = UserActivities
class UserGPGKey(ObjectDeleteMixin, RESTObject):
pass
class UserGPGKeyManager(RetrieveMixin, CreateMixin, DeleteMixin, RESTManager):
_path = "/users/{user_id}/gpg_keys"
_obj_cls = UserGPGKey
_from_parent_attrs = {"user_id": "id"}
_create_attrs = RequiredOptional(required=("key",))
def get(self, id: Union[str, int], lazy: bool = False, **kwargs: Any) -> UserGPGKey:
return cast(UserGPGKey, super().get(id=id, lazy=lazy, **kwargs))
class UserKey(ObjectDeleteMixin, RESTObject):
pass
class UserKeyManager(RetrieveMixin, CreateMixin, DeleteMixin, RESTManager):
_path = "/users/{user_id}/keys"
_obj_cls = UserKey
_from_parent_attrs = {"user_id": "id"}
_create_attrs = RequiredOptional(required=("title", "key"))
def get(self, id: Union[str, int], lazy: bool = False, **kwargs: Any) -> UserKey:
return cast(UserKey, super().get(id=id, lazy=lazy, **kwargs))
class UserIdentityProviderManager(DeleteMixin, RESTManager):
"""Manager for user identities.
This manager does not actually manage objects but enables
functionality for deletion of user identities by provider.
"""
_path = "/users/{user_id}/identities"
_from_parent_attrs = {"user_id": "id"}
class UserImpersonationToken(ObjectDeleteMixin, RESTObject):
pass
class UserImpersonationTokenManager(NoUpdateMixin, RESTManager):
_path = "/users/{user_id}/impersonation_tokens"
_obj_cls = UserImpersonationToken
_from_parent_attrs = {"user_id": "id"}
_create_attrs = RequiredOptional(
required=("name", "scopes"), optional=("expires_at",)
)
_list_filters = ("state",)
_types = {"scopes": ArrayAttribute}
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> UserImpersonationToken:
return cast(UserImpersonationToken, super().get(id=id, lazy=lazy, **kwargs))
class UserMembership(RESTObject):
_id_attr = "source_id"
class UserMembershipManager(RetrieveMixin, RESTManager):
_path = "/users/{user_id}/memberships"
_obj_cls = UserMembership
_from_parent_attrs = {"user_id": "id"}
_list_filters = ("type",)
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> UserMembership:
return cast(UserMembership, super().get(id=id, lazy=lazy, **kwargs))
# Having this outside projects avoids circular imports due to ProjectUser
class UserProject(RESTObject):
pass
class UserProjectManager(ListMixin, CreateMixin, RESTManager):
_path = "/projects/user/{user_id}"
_obj_cls = UserProject
_from_parent_attrs = {"user_id": "id"}
_create_attrs = RequiredOptional(
required=("name",),
optional=(
"default_branch",
"issues_enabled",
"wall_enabled",
"merge_requests_enabled",
"wiki_enabled",
"snippets_enabled",
"squash_option",
"public",
"visibility",
"description",
"builds_enabled",
"public_builds",
"import_url",
"only_allow_merge_if_build_succeeds",
),
)
_list_filters = (
"archived",
"visibility",
"order_by",
"sort",
"search",
"simple",
"owned",
"membership",
"starred",
"statistics",
"with_issues_enabled",
"with_merge_requests_enabled",
"with_custom_attributes",
"with_programming_language",
"wiki_checksum_failed",
"repository_checksum_failed",
"min_access_level",
"id_after",
"id_before",
)
def list(self, **kwargs: Any) -> Union[RESTObjectList, List[RESTObject]]:
"""Retrieve a list of objects.
Args:
all: If True, return all the items, without pagination
per_page: Number of items to retrieve per request
page: ID of the page to return (starts with page 1)
iterator: If set to True and no pagination option is
defined, return a generator instead of a list
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
The list of objects, or a generator if `iterator` is True
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabListError: If the server cannot perform the request
"""
if self._parent:
path = f"/users/{self._parent.id}/projects"
else:
path = f"/users/{self._from_parent_attrs['user_id']}/projects"
return ListMixin.list(self, path=path, **kwargs)
class StarredProject(RESTObject):
pass
class StarredProjectManager(ListMixin, RESTManager):
_path = "/users/{user_id}/starred_projects"
_obj_cls = StarredProject
_from_parent_attrs = {"user_id": "id"}
_list_filters = (
"archived",
"membership",
"min_access_level",
"order_by",
"owned",
"search",
"simple",
"sort",
"starred",
"statistics",
"visibility",
"with_custom_attributes",
"with_issues_enabled",
"with_merge_requests_enabled",
)
class UserFollowersManager(ListMixin, RESTManager):
_path = "/users/{user_id}/followers"
_obj_cls = User
_from_parent_attrs = {"user_id": "id"}
class UserFollowingManager(ListMixin, RESTManager):
_path = "/users/{user_id}/following"
_obj_cls = User
_from_parent_attrs = {"user_id": "id"}
| 21,381 | Python | .py | 569 | 29.757469 | 88 | 0.634868 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,068 | project_access_tokens.py | python-gitlab_python-gitlab/gitlab/v4/objects/project_access_tokens.py | from typing import Any, cast, Union
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import (
CreateMixin,
DeleteMixin,
ObjectDeleteMixin,
ObjectRotateMixin,
RetrieveMixin,
RotateMixin,
)
from gitlab.types import ArrayAttribute, RequiredOptional
__all__ = [
"ProjectAccessToken",
"ProjectAccessTokenManager",
]
class ProjectAccessToken(ObjectDeleteMixin, ObjectRotateMixin, RESTObject):
pass
class ProjectAccessTokenManager(
CreateMixin, DeleteMixin, RetrieveMixin, RotateMixin, RESTManager
):
_path = "/projects/{project_id}/access_tokens"
_obj_cls = ProjectAccessToken
_from_parent_attrs = {"project_id": "id"}
_create_attrs = RequiredOptional(
required=("name", "scopes"), optional=("access_level", "expires_at")
)
_types = {"scopes": ArrayAttribute}
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> ProjectAccessToken:
return cast(ProjectAccessToken, super().get(id=id, lazy=lazy, **kwargs))
| 1,043 | Python | .py | 31 | 29.322581 | 80 | 0.720398 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,069 | templates.py | python-gitlab_python-gitlab/gitlab/v4/objects/templates.py | from typing import Any, cast, Union
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import RetrieveMixin
__all__ = [
"Dockerfile",
"DockerfileManager",
"Gitignore",
"GitignoreManager",
"Gitlabciyml",
"GitlabciymlManager",
"License",
"LicenseManager",
]
class Dockerfile(RESTObject):
_id_attr = "name"
class DockerfileManager(RetrieveMixin, RESTManager):
_path = "/templates/dockerfiles"
_obj_cls = Dockerfile
def get(self, id: Union[str, int], lazy: bool = False, **kwargs: Any) -> Dockerfile:
return cast(Dockerfile, super().get(id=id, lazy=lazy, **kwargs))
class Gitignore(RESTObject):
_id_attr = "name"
class GitignoreManager(RetrieveMixin, RESTManager):
_path = "/templates/gitignores"
_obj_cls = Gitignore
def get(self, id: Union[str, int], lazy: bool = False, **kwargs: Any) -> Gitignore:
return cast(Gitignore, super().get(id=id, lazy=lazy, **kwargs))
class Gitlabciyml(RESTObject):
_id_attr = "name"
class GitlabciymlManager(RetrieveMixin, RESTManager):
_path = "/templates/gitlab_ci_ymls"
_obj_cls = Gitlabciyml
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> Gitlabciyml:
return cast(Gitlabciyml, super().get(id=id, lazy=lazy, **kwargs))
class License(RESTObject):
_id_attr = "key"
class LicenseManager(RetrieveMixin, RESTManager):
_path = "/templates/licenses"
_obj_cls = License
_list_filters = ("popular",)
_optional_get_attrs = ("project", "fullname")
def get(self, id: Union[str, int], lazy: bool = False, **kwargs: Any) -> License:
return cast(License, super().get(id=id, lazy=lazy, **kwargs))
| 1,726 | Python | .py | 45 | 33.577778 | 88 | 0.679325 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,070 | ldap.py | python-gitlab_python-gitlab/gitlab/v4/objects/ldap.py | from typing import Any, List, Union
from gitlab import exceptions as exc
from gitlab.base import RESTManager, RESTObject, RESTObjectList
__all__ = [
"LDAPGroup",
"LDAPGroupManager",
]
class LDAPGroup(RESTObject):
_id_attr = None
class LDAPGroupManager(RESTManager):
_path = "/ldap/groups"
_obj_cls = LDAPGroup
_list_filters = ("search", "provider")
@exc.on_http_error(exc.GitlabListError)
def list(self, **kwargs: Any) -> Union[List[LDAPGroup], RESTObjectList]:
"""Retrieve a list of objects.
Args:
all: If True, return all the items, without pagination
per_page: Number of items to retrieve per request
page: ID of the page to return (starts with page 1)
iterator: If set to True and no pagination option is
defined, return a generator instead of a list
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
The list of objects, or a generator if `iterator` is True
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabListError: If the server cannot perform the request
"""
data = kwargs.copy()
if self.gitlab.per_page:
data.setdefault("per_page", self.gitlab.per_page)
if "provider" in data:
path = f"/ldap/{data['provider']}/groups"
else:
path = self._path
obj = self.gitlab.http_list(path, **data)
if isinstance(obj, list):
return [self._obj_cls(self, item) for item in obj]
return RESTObjectList(self, self._obj_cls, obj)
| 1,662 | Python | .py | 40 | 33.05 | 76 | 0.635404 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,071 | issues.py | python-gitlab_python-gitlab/gitlab/v4/objects/issues.py | from typing import Any, cast, Dict, List, Optional, Tuple, TYPE_CHECKING, Union
import requests
from gitlab import cli, client
from gitlab import exceptions as exc
from gitlab import types
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import (
CreateMixin,
CRUDMixin,
DeleteMixin,
ListMixin,
ObjectDeleteMixin,
ParticipantsMixin,
RetrieveMixin,
SaveMixin,
SubscribableMixin,
TimeTrackingMixin,
TodoMixin,
UserAgentDetailMixin,
)
from gitlab.types import RequiredOptional
from .award_emojis import ProjectIssueAwardEmojiManager # noqa: F401
from .discussions import ProjectIssueDiscussionManager # noqa: F401
from .events import ( # noqa: F401
ProjectIssueResourceIterationEventManager,
ProjectIssueResourceLabelEventManager,
ProjectIssueResourceMilestoneEventManager,
ProjectIssueResourceStateEventManager,
ProjectIssueResourceWeightEventManager,
)
from .notes import ProjectIssueNoteManager # noqa: F401
__all__ = [
"Issue",
"IssueManager",
"GroupIssue",
"GroupIssueManager",
"ProjectIssue",
"ProjectIssueManager",
"ProjectIssueLink",
"ProjectIssueLinkManager",
]
class Issue(RESTObject):
_url = "/issues"
_repr_attr = "title"
class IssueManager(RetrieveMixin, RESTManager):
_path = "/issues"
_obj_cls = Issue
_list_filters = (
"state",
"labels",
"milestone",
"scope",
"author_id",
"iteration_id",
"assignee_id",
"my_reaction_emoji",
"iids",
"order_by",
"sort",
"search",
"created_after",
"created_before",
"updated_after",
"updated_before",
)
_types = {"iids": types.ArrayAttribute, "labels": types.CommaSeparatedListAttribute}
def get(self, id: Union[str, int], lazy: bool = False, **kwargs: Any) -> Issue:
return cast(Issue, super().get(id=id, lazy=lazy, **kwargs))
class GroupIssue(RESTObject):
pass
class GroupIssueManager(ListMixin, RESTManager):
_path = "/groups/{group_id}/issues"
_obj_cls = GroupIssue
_from_parent_attrs = {"group_id": "id"}
_list_filters = (
"state",
"labels",
"milestone",
"order_by",
"sort",
"iids",
"author_id",
"iteration_id",
"assignee_id",
"my_reaction_emoji",
"search",
"created_after",
"created_before",
"updated_after",
"updated_before",
)
_types = {"iids": types.ArrayAttribute, "labels": types.CommaSeparatedListAttribute}
class ProjectIssue(
UserAgentDetailMixin,
SubscribableMixin,
TodoMixin,
TimeTrackingMixin,
ParticipantsMixin,
SaveMixin,
ObjectDeleteMixin,
RESTObject,
):
_repr_attr = "title"
_id_attr = "iid"
awardemojis: ProjectIssueAwardEmojiManager
discussions: ProjectIssueDiscussionManager
links: "ProjectIssueLinkManager"
notes: ProjectIssueNoteManager
resourcelabelevents: ProjectIssueResourceLabelEventManager
resourcemilestoneevents: ProjectIssueResourceMilestoneEventManager
resourcestateevents: ProjectIssueResourceStateEventManager
resource_iteration_events: ProjectIssueResourceIterationEventManager
resource_weight_events: ProjectIssueResourceWeightEventManager
@cli.register_custom_action(cls_names="ProjectIssue", required=("to_project_id",))
@exc.on_http_error(exc.GitlabUpdateError)
def move(self, to_project_id: int, **kwargs: Any) -> None:
"""Move the issue to another project.
Args:
to_project_id: ID of the target project
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabUpdateError: If the issue could not be moved
"""
path = f"{self.manager.path}/{self.encoded_id}/move"
data = {"to_project_id": to_project_id}
server_data = self.manager.gitlab.http_post(path, post_data=data, **kwargs)
if TYPE_CHECKING:
assert isinstance(server_data, dict)
self._update_attrs(server_data)
@cli.register_custom_action(
cls_names="ProjectIssue", required=("move_after_id", "move_before_id")
)
@exc.on_http_error(exc.GitlabUpdateError)
def reorder(
self,
move_after_id: Optional[int] = None,
move_before_id: Optional[int] = None,
**kwargs: Any,
) -> None:
"""Reorder an issue on a board.
Args:
move_after_id: ID of an issue that should be placed after this issue
move_before_id: ID of an issue that should be placed before this issue
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabUpdateError: If the issue could not be reordered
"""
path = f"{self.manager.path}/{self.encoded_id}/reorder"
data: Dict[str, Any] = {}
if move_after_id is not None:
data["move_after_id"] = move_after_id
if move_before_id is not None:
data["move_before_id"] = move_before_id
server_data = self.manager.gitlab.http_put(path, post_data=data, **kwargs)
if TYPE_CHECKING:
assert isinstance(server_data, dict)
self._update_attrs(server_data)
@cli.register_custom_action(cls_names="ProjectIssue")
@exc.on_http_error(exc.GitlabGetError)
def related_merge_requests(
self, **kwargs: Any
) -> Union[client.GitlabList, List[Dict[str, Any]]]:
"""List merge requests related to the issue.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetErrot: If the merge requests could not be retrieved
Returns:
The list of merge requests.
"""
path = f"{self.manager.path}/{self.encoded_id}/related_merge_requests"
result = self.manager.gitlab.http_list(path, **kwargs)
if TYPE_CHECKING:
assert not isinstance(result, requests.Response)
return result
@cli.register_custom_action(cls_names="ProjectIssue")
@exc.on_http_error(exc.GitlabGetError)
def closed_by(
self, **kwargs: Any
) -> Union[client.GitlabList, List[Dict[str, Any]]]:
"""List merge requests that will close the issue when merged.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetErrot: If the merge requests could not be retrieved
Returns:
The list of merge requests.
"""
path = f"{self.manager.path}/{self.encoded_id}/closed_by"
result = self.manager.gitlab.http_list(path, **kwargs)
if TYPE_CHECKING:
assert not isinstance(result, requests.Response)
return result
class ProjectIssueManager(CRUDMixin, RESTManager):
_path = "/projects/{project_id}/issues"
_obj_cls = ProjectIssue
_from_parent_attrs = {"project_id": "id"}
_list_filters = (
"iids",
"state",
"labels",
"milestone",
"scope",
"author_id",
"iteration_id",
"assignee_id",
"my_reaction_emoji",
"order_by",
"sort",
"search",
"created_after",
"created_before",
"updated_after",
"updated_before",
)
_create_attrs = RequiredOptional(
required=("title",),
optional=(
"description",
"confidential",
"assignee_ids",
"assignee_id",
"milestone_id",
"labels",
"created_at",
"due_date",
"merge_request_to_resolve_discussions_of",
"discussion_to_resolve",
),
)
_update_attrs = RequiredOptional(
optional=(
"title",
"description",
"confidential",
"assignee_ids",
"assignee_id",
"milestone_id",
"labels",
"state_event",
"updated_at",
"due_date",
"discussion_locked",
),
)
_types = {"iids": types.ArrayAttribute, "labels": types.CommaSeparatedListAttribute}
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> ProjectIssue:
return cast(ProjectIssue, super().get(id=id, lazy=lazy, **kwargs))
class ProjectIssueLink(ObjectDeleteMixin, RESTObject):
_id_attr = "issue_link_id"
class ProjectIssueLinkManager(ListMixin, CreateMixin, DeleteMixin, RESTManager):
_path = "/projects/{project_id}/issues/{issue_iid}/links"
_obj_cls = ProjectIssueLink
_from_parent_attrs = {"project_id": "project_id", "issue_iid": "iid"}
_create_attrs = RequiredOptional(required=("target_project_id", "target_issue_iid"))
@exc.on_http_error(exc.GitlabCreateError)
# NOTE(jlvillal): Signature doesn't match CreateMixin.create() so ignore
# type error
def create( # type: ignore
self, data: Dict[str, Any], **kwargs: Any
) -> Tuple[RESTObject, RESTObject]:
"""Create a new object.
Args:
data: parameters to send to the server to create the
resource
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
The source and target issues
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabCreateError: If the server cannot perform the request
"""
self._create_attrs.validate_attrs(data=data)
if TYPE_CHECKING:
assert self.path is not None
server_data = self.gitlab.http_post(self.path, post_data=data, **kwargs)
if TYPE_CHECKING:
assert isinstance(server_data, dict)
assert self._parent is not None
source_issue = ProjectIssue(self._parent.manager, server_data["source_issue"])
target_issue = ProjectIssue(self._parent.manager, server_data["target_issue"])
return source_issue, target_issue
| 10,393 | Python | .py | 288 | 28.197917 | 88 | 0.636156 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,072 | badges.py | python-gitlab_python-gitlab/gitlab/v4/objects/badges.py | from typing import Any, cast, Union
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import BadgeRenderMixin, CRUDMixin, ObjectDeleteMixin, SaveMixin
from gitlab.types import RequiredOptional
__all__ = [
"GroupBadge",
"GroupBadgeManager",
"ProjectBadge",
"ProjectBadgeManager",
]
class GroupBadge(SaveMixin, ObjectDeleteMixin, RESTObject):
pass
class GroupBadgeManager(BadgeRenderMixin, CRUDMixin, RESTManager):
_path = "/groups/{group_id}/badges"
_obj_cls = GroupBadge
_from_parent_attrs = {"group_id": "id"}
_create_attrs = RequiredOptional(required=("link_url", "image_url"))
_update_attrs = RequiredOptional(optional=("link_url", "image_url"))
def get(self, id: Union[str, int], lazy: bool = False, **kwargs: Any) -> GroupBadge:
return cast(GroupBadge, super().get(id=id, lazy=lazy, **kwargs))
class ProjectBadge(SaveMixin, ObjectDeleteMixin, RESTObject):
pass
class ProjectBadgeManager(BadgeRenderMixin, CRUDMixin, RESTManager):
_path = "/projects/{project_id}/badges"
_obj_cls = ProjectBadge
_from_parent_attrs = {"project_id": "id"}
_create_attrs = RequiredOptional(required=("link_url", "image_url"))
_update_attrs = RequiredOptional(optional=("link_url", "image_url"))
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> ProjectBadge:
return cast(ProjectBadge, super().get(id=id, lazy=lazy, **kwargs))
| 1,464 | Python | .py | 32 | 41.25 | 88 | 0.711268 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,073 | merge_requests.py | python-gitlab_python-gitlab/gitlab/v4/objects/merge_requests.py | """
GitLab API:
https://docs.gitlab.com/ee/api/merge_requests.html
https://docs.gitlab.com/ee/api/merge_request_approvals.html
"""
from typing import Any, cast, Dict, Optional, TYPE_CHECKING, Union
import requests
import gitlab
from gitlab import cli
from gitlab import exceptions as exc
from gitlab import types
from gitlab.base import RESTManager, RESTObject, RESTObjectList
from gitlab.mixins import (
CRUDMixin,
ListMixin,
ObjectDeleteMixin,
ParticipantsMixin,
RetrieveMixin,
SaveMixin,
SubscribableMixin,
TimeTrackingMixin,
TodoMixin,
)
from gitlab.types import RequiredOptional
from .award_emojis import ProjectMergeRequestAwardEmojiManager # noqa: F401
from .commits import ProjectCommit, ProjectCommitManager
from .discussions import ProjectMergeRequestDiscussionManager # noqa: F401
from .draft_notes import ProjectMergeRequestDraftNoteManager
from .events import ( # noqa: F401
ProjectMergeRequestResourceLabelEventManager,
ProjectMergeRequestResourceMilestoneEventManager,
ProjectMergeRequestResourceStateEventManager,
)
from .issues import ProjectIssue, ProjectIssueManager
from .merge_request_approvals import ( # noqa: F401
ProjectMergeRequestApprovalManager,
ProjectMergeRequestApprovalRuleManager,
ProjectMergeRequestApprovalStateManager,
)
from .notes import ProjectMergeRequestNoteManager # noqa: F401
from .pipelines import ProjectMergeRequestPipelineManager # noqa: F401
from .reviewers import ProjectMergeRequestReviewerDetailManager
__all__ = [
"MergeRequest",
"MergeRequestManager",
"GroupMergeRequest",
"GroupMergeRequestManager",
"ProjectMergeRequest",
"ProjectMergeRequestManager",
"ProjectDeploymentMergeRequest",
"ProjectDeploymentMergeRequestManager",
"ProjectMergeRequestDiff",
"ProjectMergeRequestDiffManager",
]
class MergeRequest(RESTObject):
pass
class MergeRequestManager(ListMixin, RESTManager):
_path = "/merge_requests"
_obj_cls = MergeRequest
_list_filters = (
"state",
"order_by",
"sort",
"milestone",
"view",
"labels",
"with_labels_details",
"with_merge_status_recheck",
"created_after",
"created_before",
"updated_after",
"updated_before",
"scope",
"author_id",
"author_username",
"assignee_id",
"approver_ids",
"approved_by_ids",
"reviewer_id",
"reviewer_username",
"my_reaction_emoji",
"source_branch",
"target_branch",
"search",
"in",
"wip",
"not",
"environment",
"deployed_before",
"deployed_after",
)
_types = {
"approver_ids": types.ArrayAttribute,
"approved_by_ids": types.ArrayAttribute,
"in": types.CommaSeparatedListAttribute,
"labels": types.CommaSeparatedListAttribute,
}
class GroupMergeRequest(RESTObject):
pass
class GroupMergeRequestManager(ListMixin, RESTManager):
_path = "/groups/{group_id}/merge_requests"
_obj_cls = GroupMergeRequest
_from_parent_attrs = {"group_id": "id"}
_list_filters = (
"state",
"order_by",
"sort",
"milestone",
"view",
"labels",
"created_after",
"created_before",
"updated_after",
"updated_before",
"scope",
"author_id",
"assignee_id",
"approver_ids",
"approved_by_ids",
"my_reaction_emoji",
"source_branch",
"target_branch",
"search",
"wip",
)
_types = {
"approver_ids": types.ArrayAttribute,
"approved_by_ids": types.ArrayAttribute,
"labels": types.CommaSeparatedListAttribute,
}
class ProjectMergeRequest(
SubscribableMixin,
TodoMixin,
TimeTrackingMixin,
ParticipantsMixin,
SaveMixin,
ObjectDeleteMixin,
RESTObject,
):
_id_attr = "iid"
approval_rules: ProjectMergeRequestApprovalRuleManager
approval_state: ProjectMergeRequestApprovalStateManager
approvals: ProjectMergeRequestApprovalManager
awardemojis: ProjectMergeRequestAwardEmojiManager
diffs: "ProjectMergeRequestDiffManager"
discussions: ProjectMergeRequestDiscussionManager
draft_notes: ProjectMergeRequestDraftNoteManager
notes: ProjectMergeRequestNoteManager
pipelines: ProjectMergeRequestPipelineManager
resourcelabelevents: ProjectMergeRequestResourceLabelEventManager
resourcemilestoneevents: ProjectMergeRequestResourceMilestoneEventManager
resourcestateevents: ProjectMergeRequestResourceStateEventManager
reviewer_details: ProjectMergeRequestReviewerDetailManager
@cli.register_custom_action(cls_names="ProjectMergeRequest")
@exc.on_http_error(exc.GitlabMROnBuildSuccessError)
def cancel_merge_when_pipeline_succeeds(self, **kwargs: Any) -> Dict[str, str]:
"""Cancel merge when the pipeline succeeds.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabMROnBuildSuccessError: If the server could not handle the
request
Returns:
dict of the parsed json returned by the server
"""
path = (
f"{self.manager.path}/{self.encoded_id}/cancel_merge_when_pipeline_succeeds"
)
server_data = self.manager.gitlab.http_post(path, **kwargs)
# 2022-10-30: The docs at
# https://docs.gitlab.com/ee/api/merge_requests.html#cancel-merge-when-pipeline-succeeds
# are incorrect in that the return value is actually just:
# {'status': 'success'} for a successful cancel.
if TYPE_CHECKING:
assert isinstance(server_data, dict)
return server_data
@cli.register_custom_action(cls_names="ProjectMergeRequest")
@exc.on_http_error(exc.GitlabListError)
def related_issues(self, **kwargs: Any) -> RESTObjectList:
"""List issues related to this merge request."
Args:
all: If True, return all the items, without pagination
per_page: Number of items to retrieve per request
page: ID of the page to return (starts with page 1)
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabListError: If the list could not be retrieved
Returns:
List of issues
"""
path = f"{self.manager.path}/{self.encoded_id}/related_issues"
data_list = self.manager.gitlab.http_list(path, iterator=True, **kwargs)
if TYPE_CHECKING:
assert isinstance(data_list, gitlab.GitlabList)
manager = ProjectIssueManager(self.manager.gitlab, parent=self.manager._parent)
return RESTObjectList(manager, ProjectIssue, data_list)
@cli.register_custom_action(cls_names="ProjectMergeRequest")
@exc.on_http_error(exc.GitlabListError)
def closes_issues(self, **kwargs: Any) -> RESTObjectList:
"""List issues that will close on merge."
Args:
all: If True, return all the items, without pagination
per_page: Number of items to retrieve per request
page: ID of the page to return (starts with page 1)
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabListError: If the list could not be retrieved
Returns:
List of issues
"""
path = f"{self.manager.path}/{self.encoded_id}/closes_issues"
data_list = self.manager.gitlab.http_list(path, iterator=True, **kwargs)
if TYPE_CHECKING:
assert isinstance(data_list, gitlab.GitlabList)
manager = ProjectIssueManager(self.manager.gitlab, parent=self.manager._parent)
return RESTObjectList(manager, ProjectIssue, data_list)
@cli.register_custom_action(cls_names="ProjectMergeRequest")
@exc.on_http_error(exc.GitlabListError)
def commits(self, **kwargs: Any) -> RESTObjectList:
"""List the merge request commits.
Args:
all: If True, return all the items, without pagination
per_page: Number of items to retrieve per request
page: ID of the page to return (starts with page 1)
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabListError: If the list could not be retrieved
Returns:
The list of commits
"""
path = f"{self.manager.path}/{self.encoded_id}/commits"
data_list = self.manager.gitlab.http_list(path, iterator=True, **kwargs)
if TYPE_CHECKING:
assert isinstance(data_list, gitlab.GitlabList)
manager = ProjectCommitManager(self.manager.gitlab, parent=self.manager._parent)
return RESTObjectList(manager, ProjectCommit, data_list)
@cli.register_custom_action(
cls_names="ProjectMergeRequest", optional=("access_raw_diffs",)
)
@exc.on_http_error(exc.GitlabListError)
def changes(self, **kwargs: Any) -> Union[Dict[str, Any], requests.Response]:
"""List the merge request changes.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabListError: If the list could not be retrieved
Returns:
List of changes
"""
path = f"{self.manager.path}/{self.encoded_id}/changes"
return self.manager.gitlab.http_get(path, **kwargs)
@cli.register_custom_action(cls_names="ProjectMergeRequest", optional=("sha",))
@exc.on_http_error(exc.GitlabMRApprovalError)
def approve(self, sha: Optional[str] = None, **kwargs: Any) -> Dict[str, Any]:
"""Approve the merge request.
Args:
sha: Head SHA of MR
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabMRApprovalError: If the approval failed
Returns:
A dict containing the result.
https://docs.gitlab.com/ee/api/merge_request_approvals.html#approve-merge-request
"""
path = f"{self.manager.path}/{self.encoded_id}/approve"
data = {}
if sha:
data["sha"] = sha
server_data = self.manager.gitlab.http_post(path, post_data=data, **kwargs)
if TYPE_CHECKING:
assert isinstance(server_data, dict)
self._update_attrs(server_data)
return server_data
@cli.register_custom_action(cls_names="ProjectMergeRequest")
@exc.on_http_error(exc.GitlabMRApprovalError)
def unapprove(self, **kwargs: Any) -> None:
"""Unapprove the merge request.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabMRApprovalError: If the unapproval failed
https://docs.gitlab.com/ee/api/merge_request_approvals.html#unapprove-merge-request
"""
path = f"{self.manager.path}/{self.encoded_id}/unapprove"
data: Dict[str, Any] = {}
server_data = self.manager.gitlab.http_post(path, post_data=data, **kwargs)
if TYPE_CHECKING:
assert isinstance(server_data, dict)
self._update_attrs(server_data)
@cli.register_custom_action(cls_names="ProjectMergeRequest")
@exc.on_http_error(exc.GitlabMRRebaseError)
def rebase(self, **kwargs: Any) -> Union[Dict[str, Any], requests.Response]:
"""Attempt to rebase the source branch onto the target branch
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabMRRebaseError: If rebasing failed
"""
path = f"{self.manager.path}/{self.encoded_id}/rebase"
data: Dict[str, Any] = {}
return self.manager.gitlab.http_put(path, post_data=data, **kwargs)
@cli.register_custom_action(cls_names="ProjectMergeRequest")
@exc.on_http_error(exc.GitlabMRResetApprovalError)
def reset_approvals(
self, **kwargs: Any
) -> Union[Dict[str, Any], requests.Response]:
"""Clear all approvals of the merge request.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabMRResetApprovalError: If reset approval failed
"""
path = f"{self.manager.path}/{self.encoded_id}/reset_approvals"
data: Dict[str, Any] = {}
return self.manager.gitlab.http_put(path, post_data=data, **kwargs)
@cli.register_custom_action(cls_names="ProjectMergeRequest")
@exc.on_http_error(exc.GitlabGetError)
def merge_ref(self, **kwargs: Any) -> Union[Dict[str, Any], requests.Response]:
"""Attempt to merge changes between source and target branches into
`refs/merge-requests/:iid/merge`.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabGetError: If cannot be merged
"""
path = f"{self.manager.path}/{self.encoded_id}/merge_ref"
return self.manager.gitlab.http_get(path, **kwargs)
@cli.register_custom_action(
cls_names="ProjectMergeRequest",
optional=(
"merge_commit_message",
"should_remove_source_branch",
"merge_when_pipeline_succeeds",
),
)
@exc.on_http_error(exc.GitlabMRClosedError)
def merge(
self,
merge_commit_message: Optional[str] = None,
should_remove_source_branch: Optional[bool] = None,
merge_when_pipeline_succeeds: Optional[bool] = None,
**kwargs: Any,
) -> Dict[str, Any]:
"""Accept the merge request.
Args:
merge_commit_message: Commit message
should_remove_source_branch: If True, removes the source
branch
merge_when_pipeline_succeeds: Wait for the build to succeed,
then merge
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabMRClosedError: If the merge failed
"""
path = f"{self.manager.path}/{self.encoded_id}/merge"
data: Dict[str, Any] = {}
if merge_commit_message:
data["merge_commit_message"] = merge_commit_message
if should_remove_source_branch is not None:
data["should_remove_source_branch"] = should_remove_source_branch
if merge_when_pipeline_succeeds is not None:
data["merge_when_pipeline_succeeds"] = merge_when_pipeline_succeeds
server_data = self.manager.gitlab.http_put(path, post_data=data, **kwargs)
if TYPE_CHECKING:
assert isinstance(server_data, dict)
self._update_attrs(server_data)
return server_data
class ProjectMergeRequestManager(CRUDMixin, RESTManager):
_path = "/projects/{project_id}/merge_requests"
_obj_cls = ProjectMergeRequest
_from_parent_attrs = {"project_id": "id"}
_optional_get_attrs = (
"render_html",
"include_diverged_commits_count",
"include_rebase_in_progress",
)
_create_attrs = RequiredOptional(
required=("source_branch", "target_branch", "title"),
optional=(
"allow_collaboration",
"allow_maintainer_to_push",
"approvals_before_merge",
"assignee_id",
"assignee_ids",
"description",
"labels",
"milestone_id",
"remove_source_branch",
"reviewer_ids",
"squash",
"target_project_id",
),
)
_update_attrs = RequiredOptional(
optional=(
"target_branch",
"assignee_id",
"title",
"description",
"state_event",
"labels",
"milestone_id",
"remove_source_branch",
"discussion_locked",
"allow_maintainer_to_push",
"squash",
"reviewer_ids",
),
)
_list_filters = (
"state",
"order_by",
"sort",
"milestone",
"view",
"labels",
"created_after",
"created_before",
"updated_after",
"updated_before",
"scope",
"iids",
"author_id",
"assignee_id",
"approver_ids",
"approved_by_ids",
"my_reaction_emoji",
"source_branch",
"target_branch",
"search",
"wip",
)
_types = {
"approver_ids": types.ArrayAttribute,
"approved_by_ids": types.ArrayAttribute,
"iids": types.ArrayAttribute,
"labels": types.CommaSeparatedListAttribute,
}
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> ProjectMergeRequest:
return cast(ProjectMergeRequest, super().get(id=id, lazy=lazy, **kwargs))
class ProjectDeploymentMergeRequest(MergeRequest):
pass
class ProjectDeploymentMergeRequestManager(MergeRequestManager):
_path = "/projects/{project_id}/deployments/{deployment_id}/merge_requests"
_obj_cls = ProjectDeploymentMergeRequest
_from_parent_attrs = {"deployment_id": "id", "project_id": "project_id"}
class ProjectMergeRequestDiff(RESTObject):
pass
class ProjectMergeRequestDiffManager(RetrieveMixin, RESTManager):
_path = "/projects/{project_id}/merge_requests/{mr_iid}/versions"
_obj_cls = ProjectMergeRequestDiff
_from_parent_attrs = {"project_id": "project_id", "mr_iid": "iid"}
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> ProjectMergeRequestDiff:
return cast(ProjectMergeRequestDiff, super().get(id=id, lazy=lazy, **kwargs))
| 18,532 | Python | .py | 468 | 31.200855 | 96 | 0.64945 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,074 | award_emojis.py | python-gitlab_python-gitlab/gitlab/v4/objects/award_emojis.py | from typing import Any, cast, Union
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import NoUpdateMixin, ObjectDeleteMixin
from gitlab.types import RequiredOptional
__all__ = [
"GroupEpicAwardEmoji",
"GroupEpicAwardEmojiManager",
"GroupEpicNoteAwardEmoji",
"GroupEpicNoteAwardEmojiManager",
"ProjectIssueAwardEmoji",
"ProjectIssueAwardEmojiManager",
"ProjectIssueNoteAwardEmoji",
"ProjectIssueNoteAwardEmojiManager",
"ProjectMergeRequestAwardEmoji",
"ProjectMergeRequestAwardEmojiManager",
"ProjectMergeRequestNoteAwardEmoji",
"ProjectMergeRequestNoteAwardEmojiManager",
"ProjectSnippetAwardEmoji",
"ProjectSnippetAwardEmojiManager",
"ProjectSnippetNoteAwardEmoji",
"ProjectSnippetNoteAwardEmojiManager",
]
class GroupEpicAwardEmoji(ObjectDeleteMixin, RESTObject):
pass
class GroupEpicAwardEmojiManager(NoUpdateMixin, RESTManager):
_path = "/groups/{group_id}/epics/{epic_iid}/award_emoji"
_obj_cls = GroupEpicAwardEmoji
_from_parent_attrs = {"group_id": "group_id", "epic_iid": "iid"}
_create_attrs = RequiredOptional(required=("name",))
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> GroupEpicAwardEmoji:
return cast(GroupEpicAwardEmoji, super().get(id=id, lazy=lazy, **kwargs))
class GroupEpicNoteAwardEmoji(ObjectDeleteMixin, RESTObject):
pass
class GroupEpicNoteAwardEmojiManager(NoUpdateMixin, RESTManager):
_path = "/groups/{group_id}/epics/{epic_iid}/notes/{note_id}/award_emoji"
_obj_cls = GroupEpicNoteAwardEmoji
_from_parent_attrs = {
"group_id": "group_id",
"epic_iid": "epic_iid",
"note_id": "id",
}
_create_attrs = RequiredOptional(required=("name",))
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> GroupEpicNoteAwardEmoji:
return cast(GroupEpicNoteAwardEmoji, super().get(id=id, lazy=lazy, **kwargs))
class ProjectIssueAwardEmoji(ObjectDeleteMixin, RESTObject):
pass
class ProjectIssueAwardEmojiManager(NoUpdateMixin, RESTManager):
_path = "/projects/{project_id}/issues/{issue_iid}/award_emoji"
_obj_cls = ProjectIssueAwardEmoji
_from_parent_attrs = {"project_id": "project_id", "issue_iid": "iid"}
_create_attrs = RequiredOptional(required=("name",))
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> ProjectIssueAwardEmoji:
return cast(ProjectIssueAwardEmoji, super().get(id=id, lazy=lazy, **kwargs))
class ProjectIssueNoteAwardEmoji(ObjectDeleteMixin, RESTObject):
pass
class ProjectIssueNoteAwardEmojiManager(NoUpdateMixin, RESTManager):
_path = "/projects/{project_id}/issues/{issue_iid}/notes/{note_id}/award_emoji"
_obj_cls = ProjectIssueNoteAwardEmoji
_from_parent_attrs = {
"project_id": "project_id",
"issue_iid": "issue_iid",
"note_id": "id",
}
_create_attrs = RequiredOptional(required=("name",))
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> ProjectIssueNoteAwardEmoji:
return cast(ProjectIssueNoteAwardEmoji, super().get(id=id, lazy=lazy, **kwargs))
class ProjectMergeRequestAwardEmoji(ObjectDeleteMixin, RESTObject):
pass
class ProjectMergeRequestAwardEmojiManager(NoUpdateMixin, RESTManager):
_path = "/projects/{project_id}/merge_requests/{mr_iid}/award_emoji"
_obj_cls = ProjectMergeRequestAwardEmoji
_from_parent_attrs = {"project_id": "project_id", "mr_iid": "iid"}
_create_attrs = RequiredOptional(required=("name",))
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> ProjectMergeRequestAwardEmoji:
return cast(
ProjectMergeRequestAwardEmoji, super().get(id=id, lazy=lazy, **kwargs)
)
class ProjectMergeRequestNoteAwardEmoji(ObjectDeleteMixin, RESTObject):
pass
class ProjectMergeRequestNoteAwardEmojiManager(NoUpdateMixin, RESTManager):
_path = "/projects/{project_id}/merge_requests/{mr_iid}/notes/{note_id}/award_emoji"
_obj_cls = ProjectMergeRequestNoteAwardEmoji
_from_parent_attrs = {
"project_id": "project_id",
"mr_iid": "mr_iid",
"note_id": "id",
}
_create_attrs = RequiredOptional(required=("name",))
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> ProjectMergeRequestNoteAwardEmoji:
return cast(
ProjectMergeRequestNoteAwardEmoji, super().get(id=id, lazy=lazy, **kwargs)
)
class ProjectSnippetAwardEmoji(ObjectDeleteMixin, RESTObject):
pass
class ProjectSnippetAwardEmojiManager(NoUpdateMixin, RESTManager):
_path = "/projects/{project_id}/snippets/{snippet_id}/award_emoji"
_obj_cls = ProjectSnippetAwardEmoji
_from_parent_attrs = {"project_id": "project_id", "snippet_id": "id"}
_create_attrs = RequiredOptional(required=("name",))
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> ProjectSnippetAwardEmoji:
return cast(ProjectSnippetAwardEmoji, super().get(id=id, lazy=lazy, **kwargs))
class ProjectSnippetNoteAwardEmoji(ObjectDeleteMixin, RESTObject):
pass
class ProjectSnippetNoteAwardEmojiManager(NoUpdateMixin, RESTManager):
_path = "/projects/{project_id}/snippets/{snippet_id}/notes/{note_id}/award_emoji"
_obj_cls = ProjectSnippetNoteAwardEmoji
_from_parent_attrs = {
"project_id": "project_id",
"snippet_id": "snippet_id",
"note_id": "id",
}
_create_attrs = RequiredOptional(required=("name",))
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> ProjectSnippetNoteAwardEmoji:
return cast(
ProjectSnippetNoteAwardEmoji, super().get(id=id, lazy=lazy, **kwargs)
)
| 5,881 | Python | .py | 132 | 38.780303 | 88 | 0.704573 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,075 | broadcast_messages.py | python-gitlab_python-gitlab/gitlab/v4/objects/broadcast_messages.py | from typing import Any, cast, Union
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import CRUDMixin, ObjectDeleteMixin, SaveMixin
from gitlab.types import ArrayAttribute, RequiredOptional
__all__ = [
"BroadcastMessage",
"BroadcastMessageManager",
]
class BroadcastMessage(SaveMixin, ObjectDeleteMixin, RESTObject):
pass
class BroadcastMessageManager(CRUDMixin, RESTManager):
_path = "/broadcast_messages"
_obj_cls = BroadcastMessage
_create_attrs = RequiredOptional(
required=("message",),
optional=("starts_at", "ends_at", "color", "font", "target_access_levels"),
)
_update_attrs = RequiredOptional(
optional=(
"message",
"starts_at",
"ends_at",
"color",
"font",
"target_access_levels",
)
)
_types = {"target_access_levels": ArrayAttribute}
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> BroadcastMessage:
return cast(BroadcastMessage, super().get(id=id, lazy=lazy, **kwargs))
| 1,103 | Python | .py | 32 | 27.96875 | 83 | 0.65381 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,076 | geo_nodes.py | python-gitlab_python-gitlab/gitlab/v4/objects/geo_nodes.py | from typing import Any, cast, Dict, List, TYPE_CHECKING, Union
from gitlab import cli
from gitlab import exceptions as exc
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import (
DeleteMixin,
ObjectDeleteMixin,
RetrieveMixin,
SaveMixin,
UpdateMixin,
)
from gitlab.types import RequiredOptional
__all__ = [
"GeoNode",
"GeoNodeManager",
]
class GeoNode(SaveMixin, ObjectDeleteMixin, RESTObject):
@cli.register_custom_action(cls_names="GeoNode")
@exc.on_http_error(exc.GitlabRepairError)
def repair(self, **kwargs: Any) -> None:
"""Repair the OAuth authentication of the geo node.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabRepairError: If the server failed to perform the request
"""
path = f"/geo_nodes/{self.encoded_id}/repair"
server_data = self.manager.gitlab.http_post(path, **kwargs)
if TYPE_CHECKING:
assert isinstance(server_data, dict)
self._update_attrs(server_data)
@cli.register_custom_action(cls_names="GeoNode")
@exc.on_http_error(exc.GitlabGetError)
def status(self, **kwargs: Any) -> Dict[str, Any]:
"""Get the status of the geo node.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the server failed to perform the request
Returns:
The status of the geo node
"""
path = f"/geo_nodes/{self.encoded_id}/status"
result = self.manager.gitlab.http_get(path, **kwargs)
if TYPE_CHECKING:
assert isinstance(result, dict)
return result
class GeoNodeManager(RetrieveMixin, UpdateMixin, DeleteMixin, RESTManager):
_path = "/geo_nodes"
_obj_cls = GeoNode
_update_attrs = RequiredOptional(
optional=("enabled", "url", "files_max_capacity", "repos_max_capacity"),
)
def get(self, id: Union[str, int], lazy: bool = False, **kwargs: Any) -> GeoNode:
return cast(GeoNode, super().get(id=id, lazy=lazy, **kwargs))
@cli.register_custom_action(cls_names="GeoNodeManager")
@exc.on_http_error(exc.GitlabGetError)
def status(self, **kwargs: Any) -> List[Dict[str, Any]]:
"""Get the status of all the geo nodes.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the server failed to perform the request
Returns:
The status of all the geo nodes
"""
result = self.gitlab.http_list("/geo_nodes/status", **kwargs)
if TYPE_CHECKING:
assert isinstance(result, list)
return result
@cli.register_custom_action(cls_names="GeoNodeManager")
@exc.on_http_error(exc.GitlabGetError)
def current_failures(self, **kwargs: Any) -> List[Dict[str, Any]]:
"""Get the list of failures on the current geo node.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the server failed to perform the request
Returns:
The list of failures
"""
result = self.gitlab.http_list("/geo_nodes/current/failures", **kwargs)
if TYPE_CHECKING:
assert isinstance(result, list)
return result
| 3,686 | Python | .py | 89 | 33.393258 | 85 | 0.651566 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,077 | epics.py | python-gitlab_python-gitlab/gitlab/v4/objects/epics.py | from typing import Any, cast, Dict, Optional, TYPE_CHECKING, Union
from gitlab import exceptions as exc
from gitlab import types
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import (
CreateMixin,
CRUDMixin,
DeleteMixin,
ListMixin,
ObjectDeleteMixin,
SaveMixin,
UpdateMixin,
)
from gitlab.types import RequiredOptional
from .events import GroupEpicResourceLabelEventManager # noqa: F401
from .notes import GroupEpicNoteManager # noqa: F401
__all__ = [
"GroupEpic",
"GroupEpicManager",
"GroupEpicIssue",
"GroupEpicIssueManager",
]
class GroupEpic(ObjectDeleteMixin, SaveMixin, RESTObject):
_id_attr = "iid"
issues: "GroupEpicIssueManager"
resourcelabelevents: GroupEpicResourceLabelEventManager
notes: GroupEpicNoteManager
class GroupEpicManager(CRUDMixin, RESTManager):
_path = "/groups/{group_id}/epics"
_obj_cls = GroupEpic
_from_parent_attrs = {"group_id": "id"}
_list_filters = ("author_id", "labels", "order_by", "sort", "search")
_create_attrs = RequiredOptional(
required=("title",),
optional=("labels", "description", "start_date", "end_date"),
)
_update_attrs = RequiredOptional(
optional=("title", "labels", "description", "start_date", "end_date"),
)
_types = {"labels": types.CommaSeparatedListAttribute}
def get(self, id: Union[str, int], lazy: bool = False, **kwargs: Any) -> GroupEpic:
return cast(GroupEpic, super().get(id=id, lazy=lazy, **kwargs))
class GroupEpicIssue(ObjectDeleteMixin, SaveMixin, RESTObject):
_id_attr = "epic_issue_id"
# Define type for 'manager' here So mypy won't complain about
# 'self.manager.update()' call in the 'save' method.
manager: "GroupEpicIssueManager"
def save(self, **kwargs: Any) -> None:
"""Save the changes made to the object to the server.
The object is updated to match what the server returns.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raise:
GitlabAuthenticationError: If authentication is not correct
GitlabUpdateError: If the server cannot perform the request
"""
updated_data = self._get_updated_data()
# Nothing to update. Server fails if sent an empty dict.
if not updated_data:
return
# call the manager
obj_id = self.encoded_id
self.manager.update(obj_id, updated_data, **kwargs)
class GroupEpicIssueManager(
ListMixin, CreateMixin, UpdateMixin, DeleteMixin, RESTManager
):
_path = "/groups/{group_id}/epics/{epic_iid}/issues"
_obj_cls = GroupEpicIssue
_from_parent_attrs = {"group_id": "group_id", "epic_iid": "iid"}
_create_attrs = RequiredOptional(required=("issue_id",))
_update_attrs = RequiredOptional(optional=("move_before_id", "move_after_id"))
@exc.on_http_error(exc.GitlabCreateError)
def create(
self, data: Optional[Dict[str, Any]] = None, **kwargs: Any
) -> GroupEpicIssue:
"""Create a new object.
Args:
data: Parameters to send to the server to create the
resource
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabCreateError: If the server cannot perform the request
Returns:
A new instance of the manage object class build with
the data sent by the server
"""
if TYPE_CHECKING:
assert data is not None
self._create_attrs.validate_attrs(data=data)
path = f"{self.path}/{data.pop('issue_id')}"
server_data = self.gitlab.http_post(path, **kwargs)
if TYPE_CHECKING:
assert isinstance(server_data, dict)
# The epic_issue_id attribute doesn't exist when creating the resource,
# but is used everywhere elese. Let's create it to be consistent client
# side
server_data["epic_issue_id"] = server_data["id"]
return self._obj_cls(self, server_data)
| 4,150 | Python | .py | 99 | 34.868687 | 87 | 0.665177 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,078 | settings.py | python-gitlab_python-gitlab/gitlab/v4/objects/settings.py | from typing import Any, cast, Dict, Optional, Union
from gitlab import exceptions as exc
from gitlab import types
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import GetWithoutIdMixin, SaveMixin, UpdateMixin
from gitlab.types import RequiredOptional
__all__ = [
"ApplicationSettings",
"ApplicationSettingsManager",
]
class ApplicationSettings(SaveMixin, RESTObject):
_id_attr = None
class ApplicationSettingsManager(GetWithoutIdMixin, UpdateMixin, RESTManager):
_path = "/application/settings"
_obj_cls = ApplicationSettings
_update_attrs = RequiredOptional(
optional=(
"id",
"default_projects_limit",
"signup_enabled",
"password_authentication_enabled_for_web",
"gravatar_enabled",
"sign_in_text",
"created_at",
"updated_at",
"home_page_url",
"default_branch_protection",
"restricted_visibility_levels",
"max_attachment_size",
"session_expire_delay",
"default_project_visibility",
"default_snippet_visibility",
"default_group_visibility",
"outbound_local_requests_whitelist",
"disabled_oauth_sign_in_sources",
"domain_whitelist",
"domain_blacklist_enabled",
"domain_blacklist",
"domain_allowlist",
"domain_denylist_enabled",
"domain_denylist",
"external_authorization_service_enabled",
"external_authorization_service_url",
"external_authorization_service_default_label",
"external_authorization_service_timeout",
"import_sources",
"user_oauth_applications",
"after_sign_out_path",
"container_registry_token_expire_delay",
"repository_storages",
"plantuml_enabled",
"plantuml_url",
"terminal_max_session_time",
"polling_interval_multiplier",
"rsa_key_restriction",
"dsa_key_restriction",
"ecdsa_key_restriction",
"ed25519_key_restriction",
"first_day_of_week",
"enforce_terms",
"terms",
"performance_bar_allowed_group_id",
"instance_statistics_visibility_private",
"user_show_add_ssh_key_message",
"file_template_project_id",
"local_markdown_version",
"asset_proxy_enabled",
"asset_proxy_url",
"asset_proxy_whitelist",
"asset_proxy_allowlist",
"geo_node_allowed_ips",
"allow_local_requests_from_hooks_and_services",
"allow_local_requests_from_web_hooks_and_services",
"allow_local_requests_from_system_hooks",
),
)
_types = {
"asset_proxy_allowlist": types.ArrayAttribute,
"disabled_oauth_sign_in_sources": types.ArrayAttribute,
"domain_allowlist": types.ArrayAttribute,
"domain_denylist": types.ArrayAttribute,
"import_sources": types.ArrayAttribute,
"restricted_visibility_levels": types.ArrayAttribute,
}
@exc.on_http_error(exc.GitlabUpdateError)
def update(
self,
id: Optional[Union[str, int]] = None,
new_data: Optional[Dict[str, Any]] = None,
**kwargs: Any,
) -> Dict[str, Any]:
"""Update an object on the server.
Args:
id: ID of the object to update (can be None if not required)
new_data: the update data for the object
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
The new object data (*not* a RESTObject)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabUpdateError: If the server cannot perform the request
"""
new_data = new_data or {}
data = new_data.copy()
if "domain_whitelist" in data and data["domain_whitelist"] is None:
data.pop("domain_whitelist")
return super().update(id, data, **kwargs)
def get(self, **kwargs: Any) -> ApplicationSettings:
return cast(ApplicationSettings, super().get(**kwargs))
| 4,295 | Python | .py | 109 | 29.165138 | 78 | 0.604551 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,079 | merge_request_approvals.py | python-gitlab_python-gitlab/gitlab/v4/objects/merge_request_approvals.py | from typing import Any, cast, List, Optional, TYPE_CHECKING, Union
from gitlab import exceptions as exc
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import (
CreateMixin,
CRUDMixin,
DeleteMixin,
GetWithoutIdMixin,
ListMixin,
ObjectDeleteMixin,
SaveMixin,
UpdateMethod,
UpdateMixin,
)
from gitlab.types import RequiredOptional
__all__ = [
"ProjectApproval",
"ProjectApprovalManager",
"ProjectApprovalRule",
"ProjectApprovalRuleManager",
"ProjectMergeRequestApproval",
"ProjectMergeRequestApprovalManager",
"ProjectMergeRequestApprovalRule",
"ProjectMergeRequestApprovalRuleManager",
"ProjectMergeRequestApprovalState",
"ProjectMergeRequestApprovalStateManager",
]
class ProjectApproval(SaveMixin, RESTObject):
_id_attr = None
class ProjectApprovalManager(GetWithoutIdMixin, UpdateMixin, RESTManager):
_path = "/projects/{project_id}/approvals"
_obj_cls = ProjectApproval
_from_parent_attrs = {"project_id": "id"}
_update_attrs = RequiredOptional(
optional=(
"approvals_before_merge",
"reset_approvals_on_push",
"disable_overriding_approvers_per_merge_request",
"merge_requests_author_approval",
"merge_requests_disable_committers_approval",
),
)
_update_method = UpdateMethod.POST
def get(self, **kwargs: Any) -> ProjectApproval:
return cast(ProjectApproval, super().get(**kwargs))
class ProjectApprovalRule(SaveMixin, ObjectDeleteMixin, RESTObject):
_id_attr = "id"
_repr_attr = "name"
class ProjectApprovalRuleManager(
ListMixin, CreateMixin, UpdateMixin, DeleteMixin, RESTManager
):
_path = "/projects/{project_id}/approval_rules"
_obj_cls = ProjectApprovalRule
_from_parent_attrs = {"project_id": "id"}
_create_attrs = RequiredOptional(
required=("name", "approvals_required"),
optional=("user_ids", "group_ids", "protected_branch_ids", "usernames"),
)
class ProjectMergeRequestApproval(SaveMixin, RESTObject):
_id_attr = None
class ProjectMergeRequestApprovalManager(GetWithoutIdMixin, UpdateMixin, RESTManager):
_path = "/projects/{project_id}/merge_requests/{mr_iid}/approvals"
_obj_cls = ProjectMergeRequestApproval
_from_parent_attrs = {"project_id": "project_id", "mr_iid": "iid"}
_update_attrs = RequiredOptional(required=("approvals_required",))
_update_method = UpdateMethod.POST
def get(self, **kwargs: Any) -> ProjectMergeRequestApproval:
return cast(ProjectMergeRequestApproval, super().get(**kwargs))
@exc.on_http_error(exc.GitlabUpdateError)
def set_approvers(
self,
approvals_required: int,
approver_ids: Optional[List[int]] = None,
approver_group_ids: Optional[List[int]] = None,
approval_rule_name: str = "name",
*,
approver_usernames: Optional[List[str]] = None,
**kwargs: Any,
) -> RESTObject:
"""Change MR-level allowed approvers and approver groups.
Args:
approvals_required: The number of required approvals for this rule
approver_ids: User IDs that can approve MRs
approver_group_ids: Group IDs whose members can approve MRs
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabUpdateError: If the server failed to perform the request
"""
approver_ids = approver_ids or []
approver_group_ids = approver_group_ids or []
approver_usernames = approver_usernames or []
data = {
"name": approval_rule_name,
"approvals_required": approvals_required,
"rule_type": "regular",
"user_ids": approver_ids,
"group_ids": approver_group_ids,
"usernames": approver_usernames,
}
if TYPE_CHECKING:
assert self._parent is not None
approval_rules: ProjectMergeRequestApprovalRuleManager = (
self._parent.approval_rules
)
# update any existing approval rule matching the name
existing_approval_rules = approval_rules.list(iterator=True)
for ar in existing_approval_rules:
if ar.name == approval_rule_name:
ar.user_ids = data["user_ids"]
ar.approvals_required = data["approvals_required"]
ar.group_ids = data["group_ids"]
ar.usernames = data["usernames"]
ar.save()
return ar
# if there was no rule matching the rule name, create a new one
return approval_rules.create(data=data, **kwargs)
class ProjectMergeRequestApprovalRule(SaveMixin, ObjectDeleteMixin, RESTObject):
_repr_attr = "name"
class ProjectMergeRequestApprovalRuleManager(CRUDMixin, RESTManager):
_path = "/projects/{project_id}/merge_requests/{merge_request_iid}/approval_rules"
_obj_cls = ProjectMergeRequestApprovalRule
_from_parent_attrs = {"project_id": "project_id", "merge_request_iid": "iid"}
_update_attrs = RequiredOptional(
required=(
"id",
"merge_request_iid",
"name",
"approvals_required",
),
optional=("user_ids", "group_ids", "usernames"),
)
# Important: When approval_project_rule_id is set, the name, users and
# groups of project-level rule will be copied. The approvals_required
# specified will be used.
_create_attrs = RequiredOptional(
required=("name", "approvals_required"),
optional=("approval_project_rule_id", "user_ids", "group_ids", "usernames"),
)
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> ProjectMergeRequestApprovalRule:
return cast(
ProjectMergeRequestApprovalRule, super().get(id=id, lazy=lazy, **kwargs)
)
class ProjectMergeRequestApprovalState(RESTObject):
pass
class ProjectMergeRequestApprovalStateManager(GetWithoutIdMixin, RESTManager):
_path = "/projects/{project_id}/merge_requests/{mr_iid}/approval_state"
_obj_cls = ProjectMergeRequestApprovalState
_from_parent_attrs = {"project_id": "project_id", "mr_iid": "iid"}
def get(self, **kwargs: Any) -> ProjectMergeRequestApprovalState:
return cast(ProjectMergeRequestApprovalState, super().get(**kwargs))
| 6,424 | Python | .py | 152 | 34.776316 | 86 | 0.673662 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,080 | notification_settings.py | python-gitlab_python-gitlab/gitlab/v4/objects/notification_settings.py | from typing import Any, cast
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import GetWithoutIdMixin, SaveMixin, UpdateMixin
from gitlab.types import RequiredOptional
__all__ = [
"NotificationSettings",
"NotificationSettingsManager",
"GroupNotificationSettings",
"GroupNotificationSettingsManager",
"ProjectNotificationSettings",
"ProjectNotificationSettingsManager",
]
class NotificationSettings(SaveMixin, RESTObject):
_id_attr = None
class NotificationSettingsManager(GetWithoutIdMixin, UpdateMixin, RESTManager):
_path = "/notification_settings"
_obj_cls = NotificationSettings
_update_attrs = RequiredOptional(
optional=(
"level",
"notification_email",
"new_note",
"new_issue",
"reopen_issue",
"close_issue",
"reassign_issue",
"new_merge_request",
"reopen_merge_request",
"close_merge_request",
"reassign_merge_request",
"merge_merge_request",
),
)
def get(self, **kwargs: Any) -> NotificationSettings:
return cast(NotificationSettings, super().get(**kwargs))
class GroupNotificationSettings(NotificationSettings):
pass
class GroupNotificationSettingsManager(NotificationSettingsManager):
_path = "/groups/{group_id}/notification_settings"
_obj_cls = GroupNotificationSettings
_from_parent_attrs = {"group_id": "id"}
def get(self, **kwargs: Any) -> GroupNotificationSettings:
return cast(GroupNotificationSettings, super().get(id=id, **kwargs))
class ProjectNotificationSettings(NotificationSettings):
pass
class ProjectNotificationSettingsManager(NotificationSettingsManager):
_path = "/projects/{project_id}/notification_settings"
_obj_cls = ProjectNotificationSettings
_from_parent_attrs = {"project_id": "id"}
def get(self, **kwargs: Any) -> ProjectNotificationSettings:
return cast(ProjectNotificationSettings, super().get(id=id, **kwargs))
| 2,061 | Python | .py | 51 | 33.72549 | 79 | 0.707329 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,081 | invitations.py | python-gitlab_python-gitlab/gitlab/v4/objects/invitations.py | from typing import Any, cast, Union
from gitlab.base import RESTManager, RESTObject
from gitlab.exceptions import GitlabInvitationError
from gitlab.mixins import CRUDMixin, ObjectDeleteMixin, SaveMixin
from gitlab.types import ArrayAttribute, CommaSeparatedListAttribute, RequiredOptional
__all__ = [
"ProjectInvitation",
"ProjectInvitationManager",
"GroupInvitation",
"GroupInvitationManager",
]
class InvitationMixin(CRUDMixin):
def create(self, *args: Any, **kwargs: Any) -> RESTObject:
invitation = super().create(*args, **kwargs)
if invitation.status == "error":
raise GitlabInvitationError(invitation.message)
return invitation
class ProjectInvitation(SaveMixin, ObjectDeleteMixin, RESTObject):
_id_attr = "email"
class ProjectInvitationManager(InvitationMixin, RESTManager):
_path = "/projects/{project_id}/invitations"
_obj_cls = ProjectInvitation
_from_parent_attrs = {"project_id": "id"}
_create_attrs = RequiredOptional(
required=("access_level",),
optional=(
"expires_at",
"invite_source",
"tasks_to_be_done",
"tasks_project_id",
),
exclusive=("email", "user_id"),
)
_update_attrs = RequiredOptional(
optional=("access_level", "expires_at"),
)
_list_filters = ("query",)
_types = {
"email": CommaSeparatedListAttribute,
"user_id": CommaSeparatedListAttribute,
"tasks_to_be_done": ArrayAttribute,
}
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> ProjectInvitation:
return cast(ProjectInvitation, super().get(id=id, lazy=lazy, **kwargs))
class GroupInvitation(SaveMixin, ObjectDeleteMixin, RESTObject):
_id_attr = "email"
class GroupInvitationManager(InvitationMixin, RESTManager):
_path = "/groups/{group_id}/invitations"
_obj_cls = GroupInvitation
_from_parent_attrs = {"group_id": "id"}
_create_attrs = RequiredOptional(
required=("access_level",),
optional=(
"expires_at",
"invite_source",
"tasks_to_be_done",
"tasks_project_id",
),
exclusive=("email", "user_id"),
)
_update_attrs = RequiredOptional(
optional=("access_level", "expires_at"),
)
_list_filters = ("query",)
_types = {
"email": CommaSeparatedListAttribute,
"user_id": CommaSeparatedListAttribute,
"tasks_to_be_done": ArrayAttribute,
}
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> GroupInvitation:
return cast(GroupInvitation, super().get(id=id, lazy=lazy, **kwargs))
| 2,734 | Python | .py | 75 | 29.693333 | 86 | 0.649641 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,082 | namespaces.py | python-gitlab_python-gitlab/gitlab/v4/objects/namespaces.py | from typing import Any, cast, TYPE_CHECKING, Union
from gitlab import cli
from gitlab import exceptions as exc
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import RetrieveMixin
from gitlab.utils import EncodedId
__all__ = [
"Namespace",
"NamespaceManager",
]
class Namespace(RESTObject):
pass
class NamespaceManager(RetrieveMixin, RESTManager):
_path = "/namespaces"
_obj_cls = Namespace
_list_filters = ("search",)
def get(self, id: Union[str, int], lazy: bool = False, **kwargs: Any) -> Namespace:
return cast(Namespace, super().get(id=id, lazy=lazy, **kwargs))
@cli.register_custom_action(
cls_names="NamespaceManager", required=("namespace", "parent_id")
)
@exc.on_http_error(exc.GitlabGetError)
def exists(self, namespace: str, **kwargs: Any) -> Namespace:
"""Get existence of a namespace by path.
Args:
namespace: The path to the namespace.
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the server failed to perform the request
Returns:
Data on namespace existence returned from the server.
"""
path = f"{self.path}/{EncodedId(namespace)}/exists"
server_data = self.gitlab.http_get(path, **kwargs)
if TYPE_CHECKING:
assert isinstance(server_data, dict)
return self._obj_cls(self, server_data)
| 1,535 | Python | .py | 38 | 33.736842 | 87 | 0.674293 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,083 | wikis.py | python-gitlab_python-gitlab/gitlab/v4/objects/wikis.py | from typing import Any, cast, Union
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import CRUDMixin, ObjectDeleteMixin, SaveMixin, UploadMixin
from gitlab.types import RequiredOptional
__all__ = [
"ProjectWiki",
"ProjectWikiManager",
"GroupWiki",
"GroupWikiManager",
]
class ProjectWiki(SaveMixin, ObjectDeleteMixin, UploadMixin, RESTObject):
_id_attr = "slug"
_repr_attr = "slug"
_upload_path = "/projects/{project_id}/wikis/attachments"
class ProjectWikiManager(CRUDMixin, RESTManager):
_path = "/projects/{project_id}/wikis"
_obj_cls = ProjectWiki
_from_parent_attrs = {"project_id": "id"}
_create_attrs = RequiredOptional(
required=("title", "content"), optional=("format",)
)
_update_attrs = RequiredOptional(optional=("title", "content", "format"))
_list_filters = ("with_content",)
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> ProjectWiki:
return cast(ProjectWiki, super().get(id=id, lazy=lazy, **kwargs))
class GroupWiki(SaveMixin, ObjectDeleteMixin, UploadMixin, RESTObject):
_id_attr = "slug"
_repr_attr = "slug"
_upload_path = "/groups/{group_id}/wikis/attachments"
class GroupWikiManager(CRUDMixin, RESTManager):
_path = "/groups/{group_id}/wikis"
_obj_cls = GroupWiki
_from_parent_attrs = {"group_id": "id"}
_create_attrs = RequiredOptional(
required=("title", "content"), optional=("format",)
)
_update_attrs = RequiredOptional(optional=("title", "content", "format"))
_list_filters = ("with_content",)
def get(self, id: Union[str, int], lazy: bool = False, **kwargs: Any) -> GroupWiki:
return cast(GroupWiki, super().get(id=id, lazy=lazy, **kwargs))
| 1,775 | Python | .py | 42 | 37.452381 | 87 | 0.679256 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,084 | features.py | python-gitlab_python-gitlab/gitlab/v4/objects/features.py | """
GitLab API:
https://docs.gitlab.com/ee/api/features.html
"""
from typing import Any, Optional, TYPE_CHECKING, Union
from gitlab import exceptions as exc
from gitlab import utils
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import DeleteMixin, ListMixin, ObjectDeleteMixin
__all__ = [
"Feature",
"FeatureManager",
]
class Feature(ObjectDeleteMixin, RESTObject):
_id_attr = "name"
class FeatureManager(ListMixin, DeleteMixin, RESTManager):
_path = "/features/"
_obj_cls = Feature
@exc.on_http_error(exc.GitlabSetError)
def set(
self,
name: str,
value: Union[bool, int],
feature_group: Optional[str] = None,
user: Optional[str] = None,
group: Optional[str] = None,
project: Optional[str] = None,
**kwargs: Any,
) -> Feature:
"""Create or update the object.
Args:
name: The value to set for the object
value: The value to set for the object
feature_group: A feature group name
user: A GitLab username
group: A GitLab group
project: A GitLab project in form group/project
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabSetError: If an error occurred
Returns:
The created/updated attribute
"""
name = utils.EncodedId(name)
path = f"{self.path}/{name}"
data = {
"value": value,
"feature_group": feature_group,
"user": user,
"group": group,
"project": project,
}
data = utils.remove_none_from_dict(data)
server_data = self.gitlab.http_post(path, post_data=data, **kwargs)
if TYPE_CHECKING:
assert isinstance(server_data, dict)
return self._obj_cls(self, server_data)
| 1,973 | Python | .py | 58 | 26.068966 | 75 | 0.617122 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,085 | statistics.py | python-gitlab_python-gitlab/gitlab/v4/objects/statistics.py | from typing import Any, cast
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import GetWithoutIdMixin, RefreshMixin
from gitlab.types import ArrayAttribute
__all__ = [
"GroupIssuesStatistics",
"GroupIssuesStatisticsManager",
"ProjectAdditionalStatistics",
"ProjectAdditionalStatisticsManager",
"IssuesStatistics",
"IssuesStatisticsManager",
"ProjectIssuesStatistics",
"ProjectIssuesStatisticsManager",
"ApplicationStatistics",
"ApplicationStatisticsManager",
]
class ProjectAdditionalStatistics(RefreshMixin, RESTObject):
_id_attr = None
class ProjectAdditionalStatisticsManager(GetWithoutIdMixin, RESTManager):
_path = "/projects/{project_id}/statistics"
_obj_cls = ProjectAdditionalStatistics
_from_parent_attrs = {"project_id": "id"}
def get(self, **kwargs: Any) -> ProjectAdditionalStatistics:
return cast(ProjectAdditionalStatistics, super().get(**kwargs))
class IssuesStatistics(RefreshMixin, RESTObject):
_id_attr = None
class IssuesStatisticsManager(GetWithoutIdMixin, RESTManager):
_path = "/issues_statistics"
_obj_cls = IssuesStatistics
_list_filters = ("iids",)
_types = {"iids": ArrayAttribute}
def get(self, **kwargs: Any) -> IssuesStatistics:
return cast(IssuesStatistics, super().get(**kwargs))
class GroupIssuesStatistics(RefreshMixin, RESTObject):
_id_attr = None
class GroupIssuesStatisticsManager(GetWithoutIdMixin, RESTManager):
_path = "/groups/{group_id}/issues_statistics"
_obj_cls = GroupIssuesStatistics
_from_parent_attrs = {"group_id": "id"}
_list_filters = ("iids",)
_types = {"iids": ArrayAttribute}
def get(self, **kwargs: Any) -> GroupIssuesStatistics:
return cast(GroupIssuesStatistics, super().get(**kwargs))
class ProjectIssuesStatistics(RefreshMixin, RESTObject):
_id_attr = None
class ProjectIssuesStatisticsManager(GetWithoutIdMixin, RESTManager):
_path = "/projects/{project_id}/issues_statistics"
_obj_cls = ProjectIssuesStatistics
_from_parent_attrs = {"project_id": "id"}
_list_filters = ("iids",)
_types = {"iids": ArrayAttribute}
def get(self, **kwargs: Any) -> ProjectIssuesStatistics:
return cast(ProjectIssuesStatistics, super().get(**kwargs))
class ApplicationStatistics(RESTObject):
_id_attr = None
class ApplicationStatisticsManager(GetWithoutIdMixin, RESTManager):
_path = "/application/statistics"
_obj_cls = ApplicationStatistics
def get(self, **kwargs: Any) -> ApplicationStatistics:
return cast(ApplicationStatistics, super().get(**kwargs))
| 2,638 | Python | .py | 60 | 39.25 | 73 | 0.744022 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,086 | ci_lint.py | python-gitlab_python-gitlab/gitlab/v4/objects/ci_lint.py | """
GitLab API:
https://docs.gitlab.com/ee/api/lint.html
"""
from typing import Any, cast
from gitlab.base import RESTManager, RESTObject
from gitlab.cli import register_custom_action
from gitlab.exceptions import GitlabCiLintError
from gitlab.mixins import CreateMixin, GetWithoutIdMixin
from gitlab.types import RequiredOptional
__all__ = [
"CiLint",
"CiLintManager",
"ProjectCiLint",
"ProjectCiLintManager",
]
class CiLint(RESTObject):
_id_attr = None
class CiLintManager(CreateMixin, RESTManager):
_path = "/ci/lint"
_obj_cls = CiLint
_create_attrs = RequiredOptional(
required=("content",), optional=("include_merged_yaml", "include_jobs")
)
@register_custom_action(
cls_names="CiLintManager",
required=("content",),
optional=("include_merged_yaml", "include_jobs"),
)
def validate(self, *args: Any, **kwargs: Any) -> None:
"""Raise an error if the CI Lint results are not valid.
This is a custom python-gitlab method to wrap lint endpoints."""
result = self.create(*args, **kwargs)
if result.status != "valid":
message = ",\n".join(result.errors)
raise GitlabCiLintError(message)
class ProjectCiLint(RESTObject):
_id_attr = None
class ProjectCiLintManager(GetWithoutIdMixin, CreateMixin, RESTManager):
_path = "/projects/{project_id}/ci/lint"
_obj_cls = ProjectCiLint
_from_parent_attrs = {"project_id": "id"}
_optional_get_attrs = ("dry_run", "include_jobs", "ref")
_create_attrs = RequiredOptional(
required=("content",), optional=("dry_run", "include_jobs", "ref")
)
def get(self, **kwargs: Any) -> ProjectCiLint:
return cast(ProjectCiLint, super().get(**kwargs))
@register_custom_action(
cls_names="ProjectCiLintManager",
required=("content",),
optional=("dry_run", "include_jobs", "ref"),
)
def validate(self, *args: Any, **kwargs: Any) -> None:
"""Raise an error if the Project CI Lint results are not valid.
This is a custom python-gitlab method to wrap lint endpoints."""
result = self.create(*args, **kwargs)
if not result.valid:
message = ",\n".join(result.errors)
raise GitlabCiLintError(message)
| 2,308 | Python | .py | 60 | 32.566667 | 79 | 0.665022 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,087 | audit_events.py | python-gitlab_python-gitlab/gitlab/v4/objects/audit_events.py | """
GitLab API:
https://docs.gitlab.com/ee/api/audit_events.html
"""
from typing import Any, cast, Union
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import RetrieveMixin
__all__ = [
"AuditEvent",
"AuditEventManager",
"GroupAuditEvent",
"GroupAuditEventManager",
"ProjectAuditEvent",
"ProjectAuditEventManager",
"ProjectAudit",
"ProjectAuditManager",
]
class AuditEvent(RESTObject):
_id_attr = "id"
class AuditEventManager(RetrieveMixin, RESTManager):
_path = "/audit_events"
_obj_cls = AuditEvent
_list_filters = ("created_after", "created_before", "entity_type", "entity_id")
def get(self, id: Union[str, int], lazy: bool = False, **kwargs: Any) -> AuditEvent:
return cast(AuditEvent, super().get(id=id, lazy=lazy, **kwargs))
class GroupAuditEvent(RESTObject):
_id_attr = "id"
class GroupAuditEventManager(RetrieveMixin, RESTManager):
_path = "/groups/{group_id}/audit_events"
_obj_cls = GroupAuditEvent
_from_parent_attrs = {"group_id": "id"}
_list_filters = ("created_after", "created_before")
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> GroupAuditEvent:
return cast(GroupAuditEvent, super().get(id=id, lazy=lazy, **kwargs))
class ProjectAuditEvent(RESTObject):
_id_attr = "id"
class ProjectAuditEventManager(RetrieveMixin, RESTManager):
_path = "/projects/{project_id}/audit_events"
_obj_cls = ProjectAuditEvent
_from_parent_attrs = {"project_id": "id"}
_list_filters = ("created_after", "created_before")
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> ProjectAuditEvent:
return cast(ProjectAuditEvent, super().get(id=id, lazy=lazy, **kwargs))
class ProjectAudit(ProjectAuditEvent):
pass
class ProjectAuditManager(ProjectAuditEventManager):
pass
| 1,910 | Python | .py | 51 | 32.960784 | 88 | 0.696244 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,088 | tags.py | python-gitlab_python-gitlab/gitlab/v4/objects/tags.py | from typing import Any, cast, Union
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import NoUpdateMixin, ObjectDeleteMixin
from gitlab.types import RequiredOptional
__all__ = [
"ProjectTag",
"ProjectTagManager",
"ProjectProtectedTag",
"ProjectProtectedTagManager",
]
class ProjectTag(ObjectDeleteMixin, RESTObject):
_id_attr = "name"
_repr_attr = "name"
class ProjectTagManager(NoUpdateMixin, RESTManager):
_path = "/projects/{project_id}/repository/tags"
_obj_cls = ProjectTag
_from_parent_attrs = {"project_id": "id"}
_create_attrs = RequiredOptional(
required=("tag_name", "ref"), optional=("message",)
)
def get(self, id: Union[str, int], lazy: bool = False, **kwargs: Any) -> ProjectTag:
return cast(ProjectTag, super().get(id=id, lazy=lazy, **kwargs))
class ProjectProtectedTag(ObjectDeleteMixin, RESTObject):
_id_attr = "name"
_repr_attr = "name"
class ProjectProtectedTagManager(NoUpdateMixin, RESTManager):
_path = "/projects/{project_id}/protected_tags"
_obj_cls = ProjectProtectedTag
_from_parent_attrs = {"project_id": "id"}
_create_attrs = RequiredOptional(
required=("name",), optional=("create_access_level",)
)
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> ProjectProtectedTag:
return cast(ProjectProtectedTag, super().get(id=id, lazy=lazy, **kwargs))
| 1,453 | Python | .py | 36 | 35.583333 | 88 | 0.695374 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,089 | deployments.py | python-gitlab_python-gitlab/gitlab/v4/objects/deployments.py | """
GitLab API:
https://docs.gitlab.com/ee/api/deployments.html
"""
from typing import Any, cast, Dict, Optional, TYPE_CHECKING, Union
from gitlab import cli
from gitlab import exceptions as exc
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import CreateMixin, RetrieveMixin, SaveMixin, UpdateMixin
from gitlab.types import RequiredOptional
from .merge_requests import ProjectDeploymentMergeRequestManager # noqa: F401
__all__ = [
"ProjectDeployment",
"ProjectDeploymentManager",
]
class ProjectDeployment(SaveMixin, RESTObject):
mergerequests: ProjectDeploymentMergeRequestManager
@cli.register_custom_action(
cls_names="ProjectDeployment",
required=("status",),
optional=("comment", "represented_as"),
)
@exc.on_http_error(exc.GitlabDeploymentApprovalError)
def approval(
self,
status: str,
comment: Optional[str] = None,
represented_as: Optional[str] = None,
**kwargs: Any,
) -> Dict[str, Any]:
"""Approve or reject a blocked deployment.
Args:
status: Either "approved" or "rejected"
comment: A comment to go with the approval
represented_as: The name of the User/Group/Role to use for the
approval, when the user belongs to multiple
approval rules.
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabMRApprovalError: If the approval failed
Returns:
A dict containing the result.
https://docs.gitlab.com/ee/api/deployments.html#approve-or-reject-a-blocked-deployment
"""
path = f"{self.manager.path}/{self.encoded_id}/approval"
data = {"status": status}
if comment is not None:
data["comment"] = comment
if represented_as is not None:
data["represented_as"] = represented_as
server_data = self.manager.gitlab.http_post(path, post_data=data, **kwargs)
if TYPE_CHECKING:
assert isinstance(server_data, dict)
return server_data
class ProjectDeploymentManager(RetrieveMixin, CreateMixin, UpdateMixin, RESTManager):
_path = "/projects/{project_id}/deployments"
_obj_cls = ProjectDeployment
_from_parent_attrs = {"project_id": "id"}
_list_filters = (
"order_by",
"sort",
"updated_after",
"updated_before",
"environment",
"status",
)
_create_attrs = RequiredOptional(
required=("sha", "ref", "tag", "status", "environment")
)
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> ProjectDeployment:
return cast(ProjectDeployment, super().get(id=id, lazy=lazy, **kwargs))
| 2,889 | Python | .py | 74 | 31.310811 | 94 | 0.651429 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,090 | cluster_agents.py | python-gitlab_python-gitlab/gitlab/v4/objects/cluster_agents.py | from typing import Any, cast, Union
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import NoUpdateMixin, ObjectDeleteMixin, SaveMixin
from gitlab.types import RequiredOptional
__all__ = [
"ProjectClusterAgent",
"ProjectClusterAgentManager",
]
class ProjectClusterAgent(SaveMixin, ObjectDeleteMixin, RESTObject):
_repr_attr = "name"
class ProjectClusterAgentManager(NoUpdateMixin, RESTManager):
_path = "/projects/{project_id}/cluster_agents"
_obj_cls = ProjectClusterAgent
_from_parent_attrs = {"project_id": "id"}
_create_attrs = RequiredOptional(required=("name",))
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> ProjectClusterAgent:
return cast(ProjectClusterAgent, super().get(id=id, lazy=lazy, **kwargs))
| 817 | Python | .py | 19 | 38.894737 | 81 | 0.73957 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,091 | job_token_scope.py | python-gitlab_python-gitlab/gitlab/v4/objects/job_token_scope.py | from typing import Any, cast
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import (
CreateMixin,
DeleteMixin,
GetWithoutIdMixin,
ListMixin,
ObjectDeleteMixin,
RefreshMixin,
SaveMixin,
UpdateMethod,
UpdateMixin,
)
from gitlab.types import RequiredOptional
__all__ = [
"ProjectJobTokenScope",
"ProjectJobTokenScopeManager",
]
class ProjectJobTokenScope(RefreshMixin, SaveMixin, RESTObject):
_id_attr = None
allowlist: "AllowlistProjectManager"
groups_allowlist: "AllowlistGroupManager"
class ProjectJobTokenScopeManager(GetWithoutIdMixin, UpdateMixin, RESTManager):
_path = "/projects/{project_id}/job_token_scope"
_obj_cls = ProjectJobTokenScope
_from_parent_attrs = {"project_id": "id"}
_update_method = UpdateMethod.PATCH
def get(self, **kwargs: Any) -> ProjectJobTokenScope:
return cast(ProjectJobTokenScope, super().get(**kwargs))
class AllowlistProject(ObjectDeleteMixin, RESTObject):
_id_attr = "target_project_id" # note: only true for create endpoint
def get_id(self) -> int:
"""Returns the id of the resource. This override deals with
the fact that either an `id` or a `target_project_id` attribute
is returned by the server depending on the endpoint called."""
target_project_id = cast(int, super().get_id())
if target_project_id is not None:
return target_project_id
return cast(int, self.id)
class AllowlistProjectManager(ListMixin, CreateMixin, DeleteMixin, RESTManager):
_path = "/projects/{project_id}/job_token_scope/allowlist"
_obj_cls = AllowlistProject
_from_parent_attrs = {"project_id": "project_id"}
_create_attrs = RequiredOptional(required=("target_project_id",))
class AllowlistGroup(ObjectDeleteMixin, RESTObject):
_id_attr = "target_group_id" # note: only true for create endpoint
def get_id(self) -> int:
"""Returns the id of the resource. This override deals with
the fact that either an `id` or a `target_group_id` attribute
is returned by the server depending on the endpoint called."""
target_group_id = cast(int, super().get_id())
if target_group_id is not None:
return target_group_id
return cast(int, self.id)
class AllowlistGroupManager(ListMixin, CreateMixin, DeleteMixin, RESTManager):
_path = "/projects/{project_id}/job_token_scope/groups_allowlist"
_obj_cls = AllowlistGroup
_from_parent_attrs = {"project_id": "project_id"}
_create_attrs = RequiredOptional(required=("target_group_id",))
| 2,622 | Python | .py | 59 | 38.864407 | 80 | 0.713949 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,092 | draft_notes.py | python-gitlab_python-gitlab/gitlab/v4/objects/draft_notes.py | from typing import Any, cast, Union
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import CRUDMixin, ObjectDeleteMixin, SaveMixin
from gitlab.types import RequiredOptional
__all__ = [
"ProjectMergeRequestDraftNote",
"ProjectMergeRequestDraftNoteManager",
]
class ProjectMergeRequestDraftNote(ObjectDeleteMixin, SaveMixin, RESTObject):
def publish(self, **kwargs: Any) -> None:
path = f"{self.manager.path}/{self.encoded_id}/publish"
self.manager.gitlab.http_put(path, **kwargs)
class ProjectMergeRequestDraftNoteManager(CRUDMixin, RESTManager):
_path = "/projects/{project_id}/merge_requests/{mr_iid}/draft_notes"
_obj_cls = ProjectMergeRequestDraftNote
_from_parent_attrs = {"project_id": "project_id", "mr_iid": "iid"}
_create_attrs = RequiredOptional(
required=("note",),
optional=(
"commit_id",
"in_reply_to_discussion_id",
"position",
"resolve_discussion",
),
)
_update_attrs = RequiredOptional(optional=("position",))
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> ProjectMergeRequestDraftNote:
return cast(
ProjectMergeRequestDraftNote, super().get(id=id, lazy=lazy, **kwargs)
)
def bulk_publish(self, **kwargs: Any) -> None:
path = f"{self.path}/bulk_publish"
self.gitlab.http_post(path, **kwargs)
| 1,450 | Python | .py | 35 | 34.828571 | 81 | 0.673063 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,093 | pipelines.py | python-gitlab_python-gitlab/gitlab/v4/objects/pipelines.py | from typing import Any, cast, Dict, Optional, TYPE_CHECKING, Union
import requests
from gitlab import cli
from gitlab import exceptions as exc
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import (
CreateMixin,
CRUDMixin,
DeleteMixin,
GetWithoutIdMixin,
ListMixin,
ObjectDeleteMixin,
RefreshMixin,
RetrieveMixin,
SaveMixin,
UpdateMixin,
)
from gitlab.types import ArrayAttribute, RequiredOptional
__all__ = [
"ProjectMergeRequestPipeline",
"ProjectMergeRequestPipelineManager",
"ProjectPipeline",
"ProjectPipelineManager",
"ProjectPipelineJob",
"ProjectPipelineJobManager",
"ProjectPipelineBridge",
"ProjectPipelineBridgeManager",
"ProjectPipelineVariable",
"ProjectPipelineVariableManager",
"ProjectPipelineScheduleVariable",
"ProjectPipelineScheduleVariableManager",
"ProjectPipelineSchedulePipeline",
"ProjectPipelineSchedulePipelineManager",
"ProjectPipelineSchedule",
"ProjectPipelineScheduleManager",
"ProjectPipelineTestReport",
"ProjectPipelineTestReportManager",
"ProjectPipelineTestReportSummary",
"ProjectPipelineTestReportSummaryManager",
]
class ProjectMergeRequestPipeline(RESTObject):
pass
class ProjectMergeRequestPipelineManager(CreateMixin, ListMixin, RESTManager):
_path = "/projects/{project_id}/merge_requests/{mr_iid}/pipelines"
_obj_cls = ProjectMergeRequestPipeline
_from_parent_attrs = {"project_id": "project_id", "mr_iid": "iid"}
class ProjectPipeline(RefreshMixin, ObjectDeleteMixin, RESTObject):
bridges: "ProjectPipelineBridgeManager"
jobs: "ProjectPipelineJobManager"
test_report: "ProjectPipelineTestReportManager"
test_report_summary: "ProjectPipelineTestReportSummaryManager"
variables: "ProjectPipelineVariableManager"
@cli.register_custom_action(cls_names="ProjectPipeline")
@exc.on_http_error(exc.GitlabPipelineCancelError)
def cancel(self, **kwargs: Any) -> Union[Dict[str, Any], requests.Response]:
"""Cancel the job.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabPipelineCancelError: If the request failed
"""
path = f"{self.manager.path}/{self.encoded_id}/cancel"
return self.manager.gitlab.http_post(path, **kwargs)
@cli.register_custom_action(cls_names="ProjectPipeline")
@exc.on_http_error(exc.GitlabPipelineRetryError)
def retry(self, **kwargs: Any) -> Union[Dict[str, Any], requests.Response]:
"""Retry the job.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabPipelineRetryError: If the request failed
"""
path = f"{self.manager.path}/{self.encoded_id}/retry"
return self.manager.gitlab.http_post(path, **kwargs)
class ProjectPipelineManager(RetrieveMixin, CreateMixin, DeleteMixin, RESTManager):
_path = "/projects/{project_id}/pipelines"
_obj_cls = ProjectPipeline
_from_parent_attrs = {"project_id": "id"}
_list_filters = (
"scope",
"status",
"source",
"ref",
"sha",
"yaml_errors",
"name",
"username",
"order_by",
"sort",
)
_create_attrs = RequiredOptional(required=("ref",))
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> ProjectPipeline:
return cast(ProjectPipeline, super().get(id=id, lazy=lazy, **kwargs))
def create(
self, data: Optional[Dict[str, Any]] = None, **kwargs: Any
) -> ProjectPipeline:
"""Creates a new object.
Args:
data: Parameters to send to the server to create the
resource
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabCreateError: If the server cannot perform the request
Returns:
A new instance of the managed object class build with
the data sent by the server
"""
if TYPE_CHECKING:
assert self.path is not None
path = self.path[:-1] # drop the 's'
return cast(
ProjectPipeline, CreateMixin.create(self, data, path=path, **kwargs)
)
def latest(self, ref: Optional[str] = None, lazy: bool = False) -> ProjectPipeline:
"""Get the latest pipeline for the most recent commit
on a specific ref in a project
Args:
ref: The branch or tag to check for the latest pipeline.
Defaults to the default branch when not specified.
Returns:
A Pipeline instance
"""
data = {}
if ref:
data = {"ref": ref}
if TYPE_CHECKING:
assert self._obj_cls is not None
assert self.path is not None
server_data = self.gitlab.http_get(self.path + "/latest", query_data=data)
if TYPE_CHECKING:
assert not isinstance(server_data, requests.Response)
return self._obj_cls(self, server_data, lazy=lazy)
class ProjectPipelineJob(RESTObject):
pass
class ProjectPipelineJobManager(ListMixin, RESTManager):
_path = "/projects/{project_id}/pipelines/{pipeline_id}/jobs"
_obj_cls = ProjectPipelineJob
_from_parent_attrs = {"project_id": "project_id", "pipeline_id": "id"}
_list_filters = ("scope", "include_retried")
_types = {"scope": ArrayAttribute}
class ProjectPipelineBridge(RESTObject):
pass
class ProjectPipelineBridgeManager(ListMixin, RESTManager):
_path = "/projects/{project_id}/pipelines/{pipeline_id}/bridges"
_obj_cls = ProjectPipelineBridge
_from_parent_attrs = {"project_id": "project_id", "pipeline_id": "id"}
_list_filters = ("scope",)
class ProjectPipelineVariable(RESTObject):
_id_attr = "key"
class ProjectPipelineVariableManager(ListMixin, RESTManager):
_path = "/projects/{project_id}/pipelines/{pipeline_id}/variables"
_obj_cls = ProjectPipelineVariable
_from_parent_attrs = {"project_id": "project_id", "pipeline_id": "id"}
class ProjectPipelineScheduleVariable(SaveMixin, ObjectDeleteMixin, RESTObject):
_id_attr = "key"
class ProjectPipelineScheduleVariableManager(
CreateMixin, UpdateMixin, DeleteMixin, RESTManager
):
_path = "/projects/{project_id}/pipeline_schedules/{pipeline_schedule_id}/variables"
_obj_cls = ProjectPipelineScheduleVariable
_from_parent_attrs = {"project_id": "project_id", "pipeline_schedule_id": "id"}
_create_attrs = RequiredOptional(required=("key", "value"))
_update_attrs = RequiredOptional(required=("key", "value"))
class ProjectPipelineSchedulePipeline(RESTObject):
pass
class ProjectPipelineSchedulePipelineManager(ListMixin, RESTManager):
_path = "/projects/{project_id}/pipeline_schedules/{pipeline_schedule_id}/pipelines"
_obj_cls = ProjectPipelineSchedulePipeline
_from_parent_attrs = {"project_id": "project_id", "pipeline_schedule_id": "id"}
class ProjectPipelineSchedule(SaveMixin, ObjectDeleteMixin, RESTObject):
variables: ProjectPipelineScheduleVariableManager
pipelines: ProjectPipelineSchedulePipelineManager
@cli.register_custom_action(cls_names="ProjectPipelineSchedule")
@exc.on_http_error(exc.GitlabOwnershipError)
def take_ownership(self, **kwargs: Any) -> None:
"""Update the owner of a pipeline schedule.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabOwnershipError: If the request failed
"""
path = f"{self.manager.path}/{self.encoded_id}/take_ownership"
server_data = self.manager.gitlab.http_post(path, **kwargs)
if TYPE_CHECKING:
assert isinstance(server_data, dict)
self._update_attrs(server_data)
@cli.register_custom_action(cls_names="ProjectPipelineSchedule")
@exc.on_http_error(exc.GitlabPipelinePlayError)
def play(self, **kwargs: Any) -> Dict[str, Any]:
"""Trigger a new scheduled pipeline, which runs immediately.
The next scheduled run of this pipeline is not affected.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabPipelinePlayError: If the request failed
"""
path = f"{self.manager.path}/{self.encoded_id}/play"
server_data = self.manager.gitlab.http_post(path, **kwargs)
if TYPE_CHECKING:
assert isinstance(server_data, dict)
self._update_attrs(server_data)
return server_data
class ProjectPipelineScheduleManager(CRUDMixin, RESTManager):
_path = "/projects/{project_id}/pipeline_schedules"
_obj_cls = ProjectPipelineSchedule
_from_parent_attrs = {"project_id": "id"}
_create_attrs = RequiredOptional(
required=("description", "ref", "cron"), optional=("cron_timezone", "active")
)
_update_attrs = RequiredOptional(
optional=("description", "ref", "cron", "cron_timezone", "active"),
)
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> ProjectPipelineSchedule:
return cast(ProjectPipelineSchedule, super().get(id=id, lazy=lazy, **kwargs))
class ProjectPipelineTestReport(RESTObject):
_id_attr = None
class ProjectPipelineTestReportManager(GetWithoutIdMixin, RESTManager):
_path = "/projects/{project_id}/pipelines/{pipeline_id}/test_report"
_obj_cls = ProjectPipelineTestReport
_from_parent_attrs = {"project_id": "project_id", "pipeline_id": "id"}
def get(self, **kwargs: Any) -> ProjectPipelineTestReport:
return cast(ProjectPipelineTestReport, super().get(**kwargs))
class ProjectPipelineTestReportSummary(RESTObject):
_id_attr = None
class ProjectPipelineTestReportSummaryManager(GetWithoutIdMixin, RESTManager):
_path = "/projects/{project_id}/pipelines/{pipeline_id}/test_report_summary"
_obj_cls = ProjectPipelineTestReportSummary
_from_parent_attrs = {"project_id": "project_id", "pipeline_id": "id"}
def get(self, **kwargs: Any) -> ProjectPipelineTestReportSummary:
return cast(ProjectPipelineTestReportSummary, super().get(**kwargs))
| 10,664 | Python | .py | 239 | 37.485356 | 88 | 0.690154 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,094 | variables.py | python-gitlab_python-gitlab/gitlab/v4/objects/variables.py | """
GitLab API:
https://docs.gitlab.com/ee/api/instance_level_ci_variables.html
https://docs.gitlab.com/ee/api/project_level_variables.html
https://docs.gitlab.com/ee/api/group_level_variables.html
"""
from typing import Any, cast, Union
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import CRUDMixin, ObjectDeleteMixin, SaveMixin
from gitlab.types import RequiredOptional
__all__ = [
"Variable",
"VariableManager",
"GroupVariable",
"GroupVariableManager",
"ProjectVariable",
"ProjectVariableManager",
]
class Variable(SaveMixin, ObjectDeleteMixin, RESTObject):
_id_attr = "key"
class VariableManager(CRUDMixin, RESTManager):
_path = "/admin/ci/variables"
_obj_cls = Variable
_create_attrs = RequiredOptional(
required=("key", "value"), optional=("protected", "variable_type", "masked")
)
_update_attrs = RequiredOptional(
required=("key", "value"), optional=("protected", "variable_type", "masked")
)
def get(self, id: Union[str, int], lazy: bool = False, **kwargs: Any) -> Variable:
return cast(Variable, super().get(id=id, lazy=lazy, **kwargs))
class GroupVariable(SaveMixin, ObjectDeleteMixin, RESTObject):
_id_attr = "key"
class GroupVariableManager(CRUDMixin, RESTManager):
_path = "/groups/{group_id}/variables"
_obj_cls = GroupVariable
_from_parent_attrs = {"group_id": "id"}
_create_attrs = RequiredOptional(
required=("key", "value"), optional=("protected", "variable_type", "masked")
)
_update_attrs = RequiredOptional(
required=("key", "value"), optional=("protected", "variable_type", "masked")
)
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> GroupVariable:
return cast(GroupVariable, super().get(id=id, lazy=lazy, **kwargs))
class ProjectVariable(SaveMixin, ObjectDeleteMixin, RESTObject):
_id_attr = "key"
class ProjectVariableManager(CRUDMixin, RESTManager):
_path = "/projects/{project_id}/variables"
_obj_cls = ProjectVariable
_from_parent_attrs = {"project_id": "id"}
_create_attrs = RequiredOptional(
required=("key", "value"),
optional=("protected", "variable_type", "masked", "environment_scope"),
)
_update_attrs = RequiredOptional(
required=("key", "value"),
optional=("protected", "variable_type", "masked", "environment_scope"),
)
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> ProjectVariable:
return cast(ProjectVariable, super().get(id=id, lazy=lazy, **kwargs))
| 2,627 | Python | .py | 65 | 35.446154 | 86 | 0.678459 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,095 | push_rules.py | python-gitlab_python-gitlab/gitlab/v4/objects/push_rules.py | from typing import Any, cast
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import (
CreateMixin,
DeleteMixin,
GetWithoutIdMixin,
ObjectDeleteMixin,
SaveMixin,
UpdateMixin,
)
from gitlab.types import RequiredOptional
__all__ = [
"GroupPushRules",
"GroupPushRulesManager",
"ProjectPushRules",
"ProjectPushRulesManager",
]
class ProjectPushRules(SaveMixin, ObjectDeleteMixin, RESTObject):
_id_attr = None
class ProjectPushRulesManager(
GetWithoutIdMixin, CreateMixin, UpdateMixin, DeleteMixin, RESTManager
):
_path = "/projects/{project_id}/push_rule"
_obj_cls = ProjectPushRules
_from_parent_attrs = {"project_id": "id"}
_create_attrs = RequiredOptional(
optional=(
"author_email_regex",
"branch_name_regex",
"commit_committer_check",
"commit_message_negative_regex",
"commit_message_regex",
"deny_delete_tag",
"file_name_regex",
"max_file_size",
"member_check",
"prevent_secrets",
"reject_unsigned_commits",
),
)
_update_attrs = RequiredOptional(
optional=(
"author_email_regex",
"branch_name_regex",
"commit_committer_check",
"commit_message_negative_regex",
"commit_message_regex",
"deny_delete_tag",
"file_name_regex",
"max_file_size",
"member_check",
"prevent_secrets",
"reject_unsigned_commits",
),
)
def get(self, **kwargs: Any) -> ProjectPushRules:
return cast(ProjectPushRules, super().get(**kwargs))
class GroupPushRules(SaveMixin, ObjectDeleteMixin, RESTObject):
_id_attr = None
class GroupPushRulesManager(
GetWithoutIdMixin, CreateMixin, UpdateMixin, DeleteMixin, RESTManager
):
_path = "/groups/{group_id}/push_rule"
_obj_cls = GroupPushRules
_from_parent_attrs = {"group_id": "id"}
_create_attrs = RequiredOptional(
optional=(
"deny_delete_tag",
"member_check",
"prevent_secrets",
"commit_message_regex",
"commit_message_negative_regex",
"branch_name_regex",
"author_email_regex",
"file_name_regex",
"max_file_size",
"commit_committer_check",
"reject_unsigned_commits",
),
)
_update_attrs = RequiredOptional(
optional=(
"deny_delete_tag",
"member_check",
"prevent_secrets",
"commit_message_regex",
"commit_message_negative_regex",
"branch_name_regex",
"author_email_regex",
"file_name_regex",
"max_file_size",
"commit_committer_check",
"reject_unsigned_commits",
),
)
def get(self, **kwargs: Any) -> GroupPushRules:
return cast(GroupPushRules, super().get(**kwargs))
| 3,041 | Python | .py | 97 | 22.721649 | 73 | 0.589359 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,096 | sidekiq.py | python-gitlab_python-gitlab/gitlab/v4/objects/sidekiq.py | from typing import Any, Dict, Union
import requests
from gitlab import cli
from gitlab import exceptions as exc
from gitlab.base import RESTManager
__all__ = [
"SidekiqManager",
]
class SidekiqManager(RESTManager):
"""Manager for the Sidekiq methods.
This manager doesn't actually manage objects but provides helper function
for the sidekiq metrics API.
"""
@cli.register_custom_action(cls_names="SidekiqManager")
@exc.on_http_error(exc.GitlabGetError)
def queue_metrics(self, **kwargs: Any) -> Union[Dict[str, Any], requests.Response]:
"""Return the registered queues information.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the information couldn't be retrieved
Returns:
Information about the Sidekiq queues
"""
return self.gitlab.http_get("/sidekiq/queue_metrics", **kwargs)
@cli.register_custom_action(cls_names="SidekiqManager")
@exc.on_http_error(exc.GitlabGetError)
def process_metrics(
self, **kwargs: Any
) -> Union[Dict[str, Any], requests.Response]:
"""Return the registered sidekiq workers.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the information couldn't be retrieved
Returns:
Information about the register Sidekiq worker
"""
return self.gitlab.http_get("/sidekiq/process_metrics", **kwargs)
@cli.register_custom_action(cls_names="SidekiqManager")
@exc.on_http_error(exc.GitlabGetError)
def job_stats(self, **kwargs: Any) -> Union[Dict[str, Any], requests.Response]:
"""Return statistics about the jobs performed.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the information couldn't be retrieved
Returns:
Statistics about the Sidekiq jobs performed
"""
return self.gitlab.http_get("/sidekiq/job_stats", **kwargs)
@cli.register_custom_action(cls_names="SidekiqManager")
@exc.on_http_error(exc.GitlabGetError)
def compound_metrics(
self, **kwargs: Any
) -> Union[Dict[str, Any], requests.Response]:
"""Return all available metrics and statistics.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the information couldn't be retrieved
Returns:
All available Sidekiq metrics and statistics
"""
return self.gitlab.http_get("/sidekiq/compound_metrics", **kwargs)
| 2,996 | Python | .py | 69 | 35.202899 | 87 | 0.66988 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,097 | runners.py | python-gitlab_python-gitlab/gitlab/v4/objects/runners.py | from typing import Any, cast, List, Optional, Union
from gitlab import cli
from gitlab import exceptions as exc
from gitlab import types
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import (
CreateMixin,
CRUDMixin,
DeleteMixin,
ListMixin,
ObjectDeleteMixin,
SaveMixin,
)
from gitlab.types import RequiredOptional
__all__ = [
"RunnerJob",
"RunnerJobManager",
"Runner",
"RunnerManager",
"RunnerAll",
"RunnerAllManager",
"GroupRunner",
"GroupRunnerManager",
"ProjectRunner",
"ProjectRunnerManager",
]
class RunnerJob(RESTObject):
pass
class RunnerJobManager(ListMixin, RESTManager):
_path = "/runners/{runner_id}/jobs"
_obj_cls = RunnerJob
_from_parent_attrs = {"runner_id": "id"}
_list_filters = ("status",)
class Runner(SaveMixin, ObjectDeleteMixin, RESTObject):
jobs: RunnerJobManager
_repr_attr = "description"
class RunnerManager(CRUDMixin, RESTManager):
_path = "/runners"
_obj_cls = Runner
_create_attrs = RequiredOptional(
required=("token",),
optional=(
"description",
"info",
"active",
"locked",
"run_untagged",
"tag_list",
"access_level",
"maximum_timeout",
),
)
_update_attrs = RequiredOptional(
optional=(
"description",
"active",
"tag_list",
"run_untagged",
"locked",
"access_level",
"maximum_timeout",
),
)
_list_filters = ("scope", "type", "status", "paused", "tag_list")
_types = {"tag_list": types.CommaSeparatedListAttribute}
@cli.register_custom_action(cls_names="RunnerManager", optional=("scope",))
@exc.on_http_error(exc.GitlabListError)
def all(self, scope: Optional[str] = None, **kwargs: Any) -> List[Runner]:
"""List all the runners.
Args:
scope: The scope of runners to show, one of: specific,
shared, active, paused, online
all: If True, return all the items, without pagination
per_page: Number of items to retrieve per request
page: ID of the page to return (starts with page 1)
iterator: If set to True and no pagination option is
defined, return a generator instead of a list
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabListError: If the server failed to perform the request
Returns:
A list of runners matching the scope.
"""
path = "/runners/all"
query_data = {}
if scope is not None:
query_data["scope"] = scope
obj = self.gitlab.http_list(path, query_data, **kwargs)
return [self._obj_cls(self, item) for item in obj]
@cli.register_custom_action(cls_names="RunnerManager", required=("token",))
@exc.on_http_error(exc.GitlabVerifyError)
def verify(self, token: str, **kwargs: Any) -> None:
"""Validates authentication credentials for a registered Runner.
Args:
token: The runner's authentication token
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabVerifyError: If the server failed to verify the token
"""
path = "/runners/verify"
post_data = {"token": token}
self.gitlab.http_post(path, post_data=post_data, **kwargs)
def get(self, id: Union[str, int], lazy: bool = False, **kwargs: Any) -> Runner:
return cast(Runner, super().get(id=id, lazy=lazy, **kwargs))
class RunnerAll(RESTObject):
_repr_attr = "description"
class RunnerAllManager(ListMixin, RESTManager):
_path = "/runners/all"
_obj_cls = RunnerAll
_list_filters = ("scope", "type", "status", "paused", "tag_list")
_types = {"tag_list": types.CommaSeparatedListAttribute}
class GroupRunner(RESTObject):
pass
class GroupRunnerManager(ListMixin, RESTManager):
_path = "/groups/{group_id}/runners"
_obj_cls = GroupRunner
_from_parent_attrs = {"group_id": "id"}
_create_attrs = RequiredOptional(required=("runner_id",))
_list_filters = ("scope", "tag_list")
_types = {"tag_list": types.CommaSeparatedListAttribute}
class ProjectRunner(ObjectDeleteMixin, RESTObject):
pass
class ProjectRunnerManager(CreateMixin, DeleteMixin, ListMixin, RESTManager):
_path = "/projects/{project_id}/runners"
_obj_cls = ProjectRunner
_from_parent_attrs = {"project_id": "id"}
_create_attrs = RequiredOptional(required=("runner_id",))
_list_filters = ("scope", "tag_list")
_types = {"tag_list": types.CommaSeparatedListAttribute}
| 4,917 | Python | .py | 131 | 30.259542 | 84 | 0.638562 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,098 | group_access_tokens.py | python-gitlab_python-gitlab/gitlab/v4/objects/group_access_tokens.py | from typing import Any, cast, Union
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import (
CreateMixin,
DeleteMixin,
ObjectDeleteMixin,
ObjectRotateMixin,
RetrieveMixin,
RotateMixin,
)
from gitlab.types import ArrayAttribute, RequiredOptional
__all__ = [
"GroupAccessToken",
"GroupAccessTokenManager",
]
class GroupAccessToken(ObjectDeleteMixin, ObjectRotateMixin, RESTObject):
pass
class GroupAccessTokenManager(
CreateMixin, DeleteMixin, RetrieveMixin, RotateMixin, RESTManager
):
_path = "/groups/{group_id}/access_tokens"
_obj_cls = GroupAccessToken
_from_parent_attrs = {"group_id": "id"}
_create_attrs = RequiredOptional(
required=("name", "scopes"), optional=("access_level", "expires_at")
)
_types = {"scopes": ArrayAttribute}
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> GroupAccessToken:
return cast(GroupAccessToken, super().get(id=id, lazy=lazy, **kwargs))
| 1,023 | Python | .py | 31 | 28.677419 | 78 | 0.714721 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,099 | package_protection_rules.py | python-gitlab_python-gitlab/gitlab/v4/objects/package_protection_rules.py | from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import (
CreateMixin,
DeleteMixin,
ListMixin,
ObjectDeleteMixin,
SaveMixin,
UpdateMethod,
UpdateMixin,
)
from gitlab.types import RequiredOptional
__all__ = [
"ProjectPackageProtectionRule",
"ProjectPackageProtectionRuleManager",
]
class ProjectPackageProtectionRule(ObjectDeleteMixin, SaveMixin, RESTObject):
_repr_attr = "package_name_pattern"
class ProjectPackageProtectionRuleManager(
ListMixin, CreateMixin, DeleteMixin, UpdateMixin, RESTManager
):
_path = "/projects/{project_id}/packages/protection/rules"
_obj_cls = ProjectPackageProtectionRule
_from_parent_attrs = {"project_id": "id"}
_create_attrs = RequiredOptional(
required=(
"package_name_pattern",
"package_type",
"minimum_access_level_for_push",
),
)
_update_attrs = RequiredOptional(
optional=(
"package_name_pattern",
"package_type",
"minimum_access_level_for_push",
),
)
_update_method = UpdateMethod.PATCH
| 1,129 | Python | .py | 38 | 23.842105 | 77 | 0.686004 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |