_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q6300
|
_get_element_attr_or_none
|
train
|
def _get_element_attr_or_none(document, selector, attribute):
"""
Using a CSS selector, get the element and return the given attribute value, or None if no element.
Args:
document (HTMLElement) - HTMLElement document
selector (str) - CSS selector
attribute (str) - The attribute to get from the element
"""
element = document.cssselect(selector)
if element:
return element[0].get(attribute)
return None
|
python
|
{
"resource": ""
}
|
q6301
|
parse_profile_from_hcard
|
train
|
def parse_profile_from_hcard(hcard: str, handle: str):
"""
Parse all the fields we can from a hCard document to get a Profile.
:arg hcard: HTML hcard document (str)
:arg handle: User handle in username@domain.tld format
:returns: ``federation.entities.diaspora.entities.DiasporaProfile`` instance
"""
from federation.entities.diaspora.entities import DiasporaProfile # Circulars
doc = html.fromstring(hcard)
profile = DiasporaProfile(
name=_get_element_text_or_none(doc, ".fn"),
image_urls={
"small": _get_element_attr_or_none(doc, ".entity_photo_small .photo", "src"),
"medium": _get_element_attr_or_none(doc, ".entity_photo_medium .photo", "src"),
"large": _get_element_attr_or_none(doc, ".entity_photo .photo", "src"),
},
public=True if _get_element_text_or_none(doc, ".searchable") == "true" else False,
id=handle,
handle=handle,
guid=_get_element_text_or_none(doc, ".uid"),
public_key=_get_element_text_or_none(doc, ".key"),
)
return profile
|
python
|
{
"resource": ""
}
|
q6302
|
retrieve_and_parse_content
|
train
|
def retrieve_and_parse_content(
guid: str, handle: str, entity_type: str, sender_key_fetcher: Callable[[str], str]=None,
):
"""Retrieve remote content and return an Entity class instance.
This is basically the inverse of receiving an entity. Instead, we fetch it, then call "handle_receive".
:param sender_key_fetcher: Function to use to fetch sender public key. If not given, network will be used
to fetch the profile and the key. Function must take handle as only parameter and return a public key.
:returns: Entity object instance or ``None``
"""
if not validate_handle(handle):
return
_username, domain = handle.split("@")
url = get_fetch_content_endpoint(domain, entity_type.lower(), guid)
document, status_code, error = fetch_document(url)
if status_code == 200:
request = RequestType(body=document)
_sender, _protocol, entities = handle_receive(request, sender_key_fetcher=sender_key_fetcher)
if len(entities) > 1:
logger.warning("retrieve_and_parse_content - more than one entity parsed from remote even though we"
"expected only one! ID %s", guid)
if entities:
return entities[0]
return
elif status_code == 404:
logger.warning("retrieve_and_parse_content - remote content %s not found", guid)
return
if error:
raise error
raise Exception("retrieve_and_parse_content - unknown problem when fetching document: %s, %s, %s" % (
document, status_code, error,
))
|
python
|
{
"resource": ""
}
|
q6303
|
retrieve_and_parse_profile
|
train
|
def retrieve_and_parse_profile(handle):
"""
Retrieve the remote user and return a Profile object.
:arg handle: User handle in username@domain.tld format
:returns: ``federation.entities.Profile`` instance or None
"""
hcard = retrieve_diaspora_hcard(handle)
if not hcard:
return None
profile = parse_profile_from_hcard(hcard, handle)
try:
profile.validate()
except ValueError as ex:
logger.warning("retrieve_and_parse_profile - found profile %s but it didn't validate: %s",
profile, ex)
return None
return profile
|
python
|
{
"resource": ""
}
|
q6304
|
get_private_endpoint
|
train
|
def get_private_endpoint(id: str, guid: str) -> str:
"""Get remote endpoint for delivering private payloads."""
_username, domain = id.split("@")
return "https://%s/receive/users/%s" % (domain, guid)
|
python
|
{
"resource": ""
}
|
q6305
|
struct_to_xml
|
train
|
def struct_to_xml(node, struct):
"""
Turn a list of dicts into XML nodes with tag names taken from the dict
keys and element text taken from dict values. This is a list of dicts
so that the XML nodes can be ordered in the XML output.
"""
for obj in struct:
for k, v in obj.items():
etree.SubElement(node, k).text = v
|
python
|
{
"resource": ""
}
|
q6306
|
get_full_xml_representation
|
train
|
def get_full_xml_representation(entity, private_key):
"""Get full XML representation of an entity.
This contains the <XML><post>..</post></XML> wrapper.
Accepts either a Base entity or a Diaspora entity.
Author `private_key` must be given so that certain entities can be signed.
"""
from federation.entities.diaspora.mappers import get_outbound_entity
diaspora_entity = get_outbound_entity(entity, private_key)
xml = diaspora_entity.to_xml()
return "<XML><post>%s</post></XML>" % etree.tostring(xml).decode("utf-8")
|
python
|
{
"resource": ""
}
|
q6307
|
add_element_to_doc
|
train
|
def add_element_to_doc(doc, tag, value):
"""Set text value of an etree.Element of tag, appending a new element with given tag if it doesn't exist."""
element = doc.find(".//%s" % tag)
if element is None:
element = etree.SubElement(doc, tag)
element.text = value
|
python
|
{
"resource": ""
}
|
q6308
|
generate_host_meta
|
train
|
def generate_host_meta(template=None, *args, **kwargs):
"""Generate a host-meta XRD document.
Template specific key-value pairs need to be passed as ``kwargs``, see classes.
:arg template: Ready template to fill with args, for example "diaspora" (optional)
:returns: Rendered XRD document (str)
"""
if template == "diaspora":
hostmeta = DiasporaHostMeta(*args, **kwargs)
else:
hostmeta = BaseHostMeta(*args, **kwargs)
return hostmeta.render()
|
python
|
{
"resource": ""
}
|
q6309
|
generate_legacy_webfinger
|
train
|
def generate_legacy_webfinger(template=None, *args, **kwargs):
"""Generate a legacy webfinger XRD document.
Template specific key-value pairs need to be passed as ``kwargs``, see classes.
:arg template: Ready template to fill with args, for example "diaspora" (optional)
:returns: Rendered XRD document (str)
"""
if template == "diaspora":
webfinger = DiasporaWebFinger(*args, **kwargs)
else:
webfinger = BaseLegacyWebFinger(*args, **kwargs)
return webfinger.render()
|
python
|
{
"resource": ""
}
|
q6310
|
generate_nodeinfo2_document
|
train
|
def generate_nodeinfo2_document(**kwargs):
"""
Generate a NodeInfo2 document.
Pass in a dictionary as per NodeInfo2 1.0 schema:
https://github.com/jaywink/nodeinfo2/blob/master/schemas/1.0/schema.json
Minimum required schema:
{server:
baseUrl
name
software
version
}
openRegistrations
Protocols default will match what this library supports, ie "diaspora" currently.
:return: dict
:raises: KeyError on missing required items
"""
return {
"version": "1.0",
"server": {
"baseUrl": kwargs['server']['baseUrl'],
"name": kwargs['server']['name'],
"software": kwargs['server']['software'],
"version": kwargs['server']['version'],
},
"organization": {
"name": kwargs.get('organization', {}).get('name', None),
"contact": kwargs.get('organization', {}).get('contact', None),
"account": kwargs.get('organization', {}).get('account', None),
},
"protocols": kwargs.get('protocols', ["diaspora"]),
"relay": kwargs.get('relay', ''),
"services": {
"inbound": kwargs.get('service', {}).get('inbound', []),
"outbound": kwargs.get('service', {}).get('outbound', []),
},
"openRegistrations": kwargs['openRegistrations'],
"usage": {
"users": {
"total": kwargs.get('usage', {}).get('users', {}).get('total'),
"activeHalfyear": kwargs.get('usage', {}).get('users', {}).get('activeHalfyear'),
"activeMonth": kwargs.get('usage', {}).get('users', {}).get('activeMonth'),
"activeWeek": kwargs.get('usage', {}).get('users', {}).get('activeWeek'),
},
"localPosts": kwargs.get('usage', {}).get('localPosts'),
"localComments": kwargs.get('usage', {}).get('localComments'),
}
}
|
python
|
{
"resource": ""
}
|
q6311
|
generate_hcard
|
train
|
def generate_hcard(template=None, **kwargs):
"""Generate a hCard document.
Template specific key-value pairs need to be passed as ``kwargs``, see classes.
:arg template: Ready template to fill with args, for example "diaspora" (optional)
:returns: HTML document (str)
"""
if template == "diaspora":
hcard = DiasporaHCard(**kwargs)
else:
raise NotImplementedError()
return hcard.render()
|
python
|
{
"resource": ""
}
|
q6312
|
retrieve_remote_content
|
train
|
def retrieve_remote_content(
id: str, guid: str=None, handle: str=None, entity_type: str=None, sender_key_fetcher: Callable[[str], str]=None,
):
"""Retrieve remote content and return an Entity object.
Currently, due to no other protocols supported, always use the Diaspora protocol.
:param sender_key_fetcher: Function to use to fetch sender public key. If not given, network will be used
to fetch the profile and the key. Function must take handle as only parameter and return a public key.
:returns: Entity class instance or ``None``
"""
# TODO add support for AP
protocol_name = "diaspora"
if not guid:
guid = id
utils = importlib.import_module("federation.utils.%s" % protocol_name)
return utils.retrieve_and_parse_content(
guid=guid, handle=handle, entity_type=entity_type, sender_key_fetcher=sender_key_fetcher,
)
|
python
|
{
"resource": ""
}
|
q6313
|
retrieve_remote_profile
|
train
|
def retrieve_remote_profile(id: str) -> Optional[Profile]:
"""High level retrieve profile method.
Retrieve the profile from a remote location, using protocol based on the given ID.
"""
protocol = identify_protocol_by_id(id)
utils = importlib.import_module(f"federation.utils.{protocol.PROTOCOL_NAME}")
return utils.retrieve_and_parse_profile(id)
|
python
|
{
"resource": ""
}
|
q6314
|
handle_receive
|
train
|
def handle_receive(
request: RequestType,
user: UserType = None,
sender_key_fetcher: Callable[[str], str] = None,
skip_author_verification: bool = False
) -> Tuple[str, str, List]:
"""Takes a request and passes it to the correct protocol.
Returns a tuple of:
- sender id
- protocol name
- list of entities
NOTE! The returned sender is NOT necessarily the *author* of the entity. By sender here we're
talking about the sender of the *request*. If this object is being relayed by the sender, the author
could actually be a different identity.
:arg request: Request object of type RequestType - note not a HTTP request even though the structure is similar
:arg user: User that will be passed to `protocol.receive` (only required on private encrypted content)
MUST have a `private_key` and `id` if given.
:arg sender_key_fetcher: Function that accepts sender handle and returns public key (optional)
:arg skip_author_verification: Don't verify sender (test purposes, false default)
:returns: Tuple of sender id, protocol name and list of entity objects
"""
logger.debug("handle_receive: processing request: %s", request)
found_protocol = identify_protocol_by_request(request)
logger.debug("handle_receive: using protocol %s", found_protocol.PROTOCOL_NAME)
protocol = found_protocol.Protocol()
sender, message = protocol.receive(
request, user, sender_key_fetcher, skip_author_verification=skip_author_verification)
logger.debug("handle_receive: sender %s, message %s", sender, message)
mappers = importlib.import_module("federation.entities.%s.mappers" % found_protocol.PROTOCOL_NAME)
entities = mappers.message_to_objects(message, sender, sender_key_fetcher, user)
logger.debug("handle_receive: entities %s", entities)
return sender, found_protocol.PROTOCOL_NAME, entities
|
python
|
{
"resource": ""
}
|
q6315
|
identify_id
|
train
|
def identify_id(id: str) -> bool:
"""
Try to identify whether this is an ActivityPub ID.
"""
return re.match(r'^https?://', id, flags=re.IGNORECASE) is not None
|
python
|
{
"resource": ""
}
|
q6316
|
identify_request
|
train
|
def identify_request(request: RequestType) -> bool:
"""
Try to identify whether this is an ActivityPub request.
"""
# noinspection PyBroadException
try:
data = json.loads(decode_if_bytes(request.body))
if "@context" in data:
return True
except Exception:
pass
return False
|
python
|
{
"resource": ""
}
|
q6317
|
Protocol.receive
|
train
|
def receive(
self,
request: RequestType,
user: UserType = None,
sender_key_fetcher: Callable[[str], str] = None,
skip_author_verification: bool = False) -> Tuple[str, dict]:
"""
Receive a request.
For testing purposes, `skip_author_verification` can be passed. Authorship will not be verified.
"""
self.user = user
self.get_contact_key = sender_key_fetcher
self.payload = json.loads(decode_if_bytes(request.body))
self.request = request
self.extract_actor()
# Verify the message is from who it claims to be
if not skip_author_verification:
self.verify_signature()
return self.actor, self.payload
|
python
|
{
"resource": ""
}
|
q6318
|
get_configuration
|
train
|
def get_configuration():
"""
Combine defaults with the Django configuration.
"""
configuration = {
"get_object_function": None,
"hcard_path": "/hcard/users/",
"nodeinfo2_function": None,
"process_payload_function": None,
"search_path": None,
# TODO remove or default to True once AP support is more ready
"activitypub": False,
}
configuration.update(settings.FEDERATION)
if not all([
"get_private_key_function" in configuration,
"get_profile_function" in configuration,
"base_url" in configuration,
]):
raise ImproperlyConfigured("Missing required FEDERATION settings, please check documentation.")
return configuration
|
python
|
{
"resource": ""
}
|
q6319
|
get_function_from_config
|
train
|
def get_function_from_config(item):
"""
Import the function to get profile by handle.
"""
config = get_configuration()
func_path = config.get(item)
module_path, func_name = func_path.rsplit(".", 1)
module = importlib.import_module(module_path)
func = getattr(module, func_name)
return func
|
python
|
{
"resource": ""
}
|
q6320
|
ActivitypubFollow.post_receive
|
train
|
def post_receive(self) -> None:
"""
Post receive hook - send back follow ack.
"""
from federation.utils.activitypub import retrieve_and_parse_profile # Circulars
try:
from federation.utils.django import get_function_from_config
except ImportError:
logger.warning("ActivitypubFollow.post_receive - Unable to send automatic Accept back, only supported on "
"Django currently")
return
get_private_key_function = get_function_from_config("get_private_key_function")
key = get_private_key_function(self.target_id)
if not key:
logger.warning("ActivitypubFollow.post_receive - Failed to send automatic Accept back: could not find "
"profile to sign it with")
return
accept = ActivitypubAccept(
activity_id=f"{self.target_id}#accept-{uuid.uuid4()}",
actor_id=self.target_id,
target_id=self.activity_id,
)
try:
profile = retrieve_and_parse_profile(self.actor_id)
except Exception:
profile = None
if not profile:
logger.warning("ActivitypubFollow.post_receive - Failed to fetch remote profile for sending back Accept")
return
try:
handle_send(
accept,
UserType(id=self.target_id, private_key=key),
recipients=[{
"fid": profile.inboxes["private"],
"protocol": "activitypub",
"public": False,
}],
)
except Exception:
logger.exception("ActivitypubFollow.post_receive - Failed to send Accept back")
|
python
|
{
"resource": ""
}
|
q6321
|
retrieve_and_parse_document
|
train
|
def retrieve_and_parse_document(fid: str) -> Optional[Any]:
"""
Retrieve remote document by ID and return the entity.
"""
document, status_code, ex = fetch_document(fid, extra_headers={'accept': 'application/activity+json'})
if document:
document = json.loads(decode_if_bytes(document))
entities = message_to_objects(document, fid)
if entities:
return entities[0]
|
python
|
{
"resource": ""
}
|
q6322
|
retrieve_and_parse_profile
|
train
|
def retrieve_and_parse_profile(fid: str) -> Optional[ActivitypubProfile]:
"""
Retrieve the remote fid and return a Profile object.
"""
profile = retrieve_and_parse_document(fid)
if not profile:
return
try:
profile.validate()
except ValueError as ex:
logger.warning("retrieve_and_parse_profile - found profile %s but it didn't validate: %s",
profile, ex)
return
return profile
|
python
|
{
"resource": ""
}
|
q6323
|
identify_protocol
|
train
|
def identify_protocol(method, value):
# type: (str, Union[str, RequestType]) -> str
"""
Loop through protocols, import the protocol module and try to identify the id or request.
"""
for protocol_name in PROTOCOLS:
protocol = importlib.import_module(f"federation.protocols.{protocol_name}.protocol")
if getattr(protocol, f"identify_{method}")(value):
return protocol
else:
raise NoSuitableProtocolFoundError()
|
python
|
{
"resource": ""
}
|
q6324
|
identify_request
|
train
|
def identify_request(request: RequestType):
"""Try to identify whether this is a Diaspora request.
Try first public message. Then private message. The check if this is a legacy payload.
"""
# Private encrypted JSON payload
try:
data = json.loads(decode_if_bytes(request.body))
if "encrypted_magic_envelope" in data:
return True
except Exception:
pass
# Public XML payload
try:
xml = etree.fromstring(encode_if_text(request.body))
if xml.tag == MAGIC_ENV_TAG:
return True
except Exception:
pass
return False
|
python
|
{
"resource": ""
}
|
q6325
|
Protocol.get_json_payload_magic_envelope
|
train
|
def get_json_payload_magic_envelope(self, payload):
"""Encrypted JSON payload"""
private_key = self._get_user_key()
return EncryptedPayload.decrypt(payload=payload, private_key=private_key)
|
python
|
{
"resource": ""
}
|
q6326
|
Protocol.store_magic_envelope_doc
|
train
|
def store_magic_envelope_doc(self, payload):
"""Get the Magic Envelope, trying JSON first."""
try:
json_payload = json.loads(decode_if_bytes(payload))
except ValueError:
# XML payload
xml = unquote(decode_if_bytes(payload))
xml = xml.lstrip().encode("utf-8")
logger.debug("diaspora.protocol.store_magic_envelope_doc: xml payload: %s", xml)
self.doc = etree.fromstring(xml)
else:
logger.debug("diaspora.protocol.store_magic_envelope_doc: json payload: %s", json_payload)
self.doc = self.get_json_payload_magic_envelope(json_payload)
|
python
|
{
"resource": ""
}
|
q6327
|
Protocol.receive
|
train
|
def receive(
self,
request: RequestType,
user: UserType = None,
sender_key_fetcher: Callable[[str], str] = None,
skip_author_verification: bool = False) -> Tuple[str, str]:
"""Receive a payload.
For testing purposes, `skip_author_verification` can be passed. Authorship will not be verified."""
self.user = user
self.get_contact_key = sender_key_fetcher
self.store_magic_envelope_doc(request.body)
# Open payload and get actual message
self.content = self.get_message_content()
# Get sender handle
self.sender_handle = self.get_sender()
# Verify the message is from who it claims to be
if not skip_author_verification:
self.verify_signature()
return self.sender_handle, self.content
|
python
|
{
"resource": ""
}
|
q6328
|
Protocol.get_message_content
|
train
|
def get_message_content(self):
"""
Given the Slap XML, extract out the payload.
"""
body = self.doc.find(
".//{http://salmon-protocol.org/ns/magic-env}data").text
body = urlsafe_b64decode(body.encode("ascii"))
logger.debug("diaspora.protocol.get_message_content: %s", body)
return body
|
python
|
{
"resource": ""
}
|
q6329
|
Reaction.validate_reaction
|
train
|
def validate_reaction(self):
"""Ensure reaction is of a certain type.
Mainly for future expansion.
"""
if self.reaction not in self._reaction_valid_values:
raise ValueError("reaction should be one of: {valid}".format(
valid=", ".join(self._reaction_valid_values)
))
|
python
|
{
"resource": ""
}
|
q6330
|
Relationship.validate_relationship
|
train
|
def validate_relationship(self):
"""Ensure relationship is of a certain type."""
if self.relationship not in self._relationship_valid_values:
raise ValueError("relationship should be one of: {valid}".format(
valid=", ".join(self._relationship_valid_values)
))
|
python
|
{
"resource": ""
}
|
q6331
|
activitypub_object_view
|
train
|
def activitypub_object_view(func):
"""
Generic ActivityPub object view decorator.
Takes an ID and fetches it using the provided function. Renders the ActivityPub object
in JSON if the object is found. Falls back to decorated view, if the content
type doesn't match.
"""
def inner(request, *args, **kwargs):
def get(request, *args, **kwargs):
# TODO remove once AP support is more ready
config = get_configuration()
if not config.get("activitypub"):
return func(request, *args, **kwargs)
fallback = True
accept = request.META.get('HTTP_ACCEPT', '')
for content_type in (
'application/json', 'application/activity+json', 'application/ld+json',
):
if accept.find(content_type) > -1:
fallback = False
break
if fallback:
return func(request, *args, **kwargs)
get_object_function = get_function_from_config('get_object_function')
obj = get_object_function(request)
if not obj:
return HttpResponseNotFound()
as2_obj = obj.as_protocol('activitypub')
return JsonResponse(as2_obj.to_as2(), content_type='application/activity+json')
def post(request, *args, **kwargs):
process_payload_function = get_function_from_config('process_payload_function')
result = process_payload_function(request)
if result:
return JsonResponse({}, content_type='application/json', status=202)
else:
return JsonResponse({"result": "error"}, content_type='application/json', status=400)
if request.method == 'GET':
return get(request, *args, **kwargs)
elif request.method == 'POST' and request.path.endswith('/inbox/'):
return post(request, *args, **kwargs)
return HttpResponse(status=405)
return inner
|
python
|
{
"resource": ""
}
|
q6332
|
BaseEntity.validate
|
train
|
def validate(self):
"""Do validation.
1) Check `_required` have been given
2) Make sure all attrs in required have a non-empty value
3) Loop through attributes and call their `validate_<attr>` methods, if any.
4) Validate allowed children
5) Validate signatures
"""
attributes = []
validates = []
# Collect attributes and validation methods
for attr in dir(self):
if not attr.startswith("_"):
attr_type = type(getattr(self, attr))
if attr_type != "method":
if getattr(self, "validate_{attr}".format(attr=attr), None):
validates.append(getattr(self, "validate_{attr}".format(attr=attr)))
attributes.append(attr)
self._validate_empty_attributes(attributes)
self._validate_required(attributes)
self._validate_attributes(validates)
self._validate_children()
self._validate_signatures()
|
python
|
{
"resource": ""
}
|
q6333
|
BaseEntity._validate_required
|
train
|
def _validate_required(self, attributes):
"""Ensure required attributes are present."""
required_fulfilled = set(self._required).issubset(set(attributes))
if not required_fulfilled:
raise ValueError(
"Not all required attributes fulfilled. Required: {required}".format(required=set(self._required))
)
|
python
|
{
"resource": ""
}
|
q6334
|
BaseEntity._validate_empty_attributes
|
train
|
def _validate_empty_attributes(self, attributes):
"""Check that required attributes are not empty."""
attrs_to_check = set(self._required) & set(attributes)
for attr in attrs_to_check:
value = getattr(self, attr) # We should always have a value here
if value is None or value == "":
raise ValueError(
"Attribute %s cannot be None or an empty string since it is required." % attr
)
|
python
|
{
"resource": ""
}
|
q6335
|
BaseEntity._validate_children
|
train
|
def _validate_children(self):
"""Check that the children we have are allowed here."""
for child in self._children:
if child.__class__ not in self._allowed_children:
raise ValueError(
"Child %s is not allowed as a children for this %s type entity." % (
child, self.__class__
)
)
|
python
|
{
"resource": ""
}
|
q6336
|
ParticipationMixin.validate_participation
|
train
|
def validate_participation(self):
"""Ensure participation is of a certain type."""
if self.participation not in self._participation_valid_values:
raise ValueError("participation should be one of: {valid}".format(
valid=", ".join(self._participation_valid_values)
))
|
python
|
{
"resource": ""
}
|
q6337
|
RawContentMixin.tags
|
train
|
def tags(self):
"""Returns a `set` of unique tags contained in `raw_content`."""
if not self.raw_content:
return set()
return {word.strip("#").lower() for word in self.raw_content.split() if word.startswith("#") and len(word) > 1}
|
python
|
{
"resource": ""
}
|
q6338
|
handle_create_payload
|
train
|
def handle_create_payload(
entity: BaseEntity,
author_user: UserType,
protocol_name: str,
to_user_key: RsaKey = None,
parent_user: UserType = None,
) -> str:
"""Create a payload with the given protocol.
Any given user arguments must have ``private_key`` and ``handle`` attributes.
:arg entity: Entity object to send. Can be a base entity or a protocol specific one.
:arg author_user: User authoring the object.
:arg protocol_name: Protocol to create payload for.
:arg to_user_key: Public key of user private payload is being sent to, required for private payloads.
:arg parent_user: (Optional) User object of the parent object, if there is one. This must be given for the
Diaspora protocol if a parent object exists, so that a proper ``parent_author_signature`` can
be generated. If given, the payload will be sent as this user.
:returns: Built payload message (str)
"""
mappers = importlib.import_module(f"federation.entities.{protocol_name}.mappers")
protocol = importlib.import_module(f"federation.protocols.{protocol_name}.protocol")
protocol = protocol.Protocol()
outbound_entity = mappers.get_outbound_entity(entity, author_user.private_key)
if parent_user:
outbound_entity.sign_with_parent(parent_user.private_key)
send_as_user = parent_user if parent_user else author_user
data = protocol.build_send(entity=outbound_entity, from_user=send_as_user, to_user_key=to_user_key)
return data
|
python
|
{
"resource": ""
}
|
q6339
|
MagicEnvelope.get_sender
|
train
|
def get_sender(doc):
"""Get the key_id from the `sig` element which contains urlsafe_b64encoded Diaspora handle.
:param doc: ElementTree document
:returns: Diaspora handle
"""
key_id = doc.find(".//{%s}sig" % NAMESPACE).get("key_id")
return urlsafe_b64decode(key_id).decode("utf-8")
|
python
|
{
"resource": ""
}
|
q6340
|
MagicEnvelope.create_payload
|
train
|
def create_payload(self):
"""Create the payload doc.
Returns:
str
"""
doc = etree.fromstring(self.message)
self.payload = etree.tostring(doc, encoding="utf-8")
self.payload = urlsafe_b64encode(self.payload).decode("ascii")
return self.payload
|
python
|
{
"resource": ""
}
|
q6341
|
MagicEnvelope._build_signature
|
train
|
def _build_signature(self):
"""Create the signature using the private key."""
sig_contents = \
self.payload + "." + \
b64encode(b"application/xml").decode("ascii") + "." + \
b64encode(b"base64url").decode("ascii") + "." + \
b64encode(b"RSA-SHA256").decode("ascii")
sig_hash = SHA256.new(sig_contents.encode("ascii"))
cipher = PKCS1_v1_5.new(self.private_key)
sig = urlsafe_b64encode(cipher.sign(sig_hash))
key_id = urlsafe_b64encode(bytes(self.author_handle, encoding="utf-8"))
return sig, key_id
|
python
|
{
"resource": ""
}
|
q6342
|
MagicEnvelope.verify
|
train
|
def verify(self):
"""Verify Magic Envelope document against public key."""
if not self.public_key:
self.fetch_public_key()
data = self.doc.find(".//{http://salmon-protocol.org/ns/magic-env}data").text
sig = self.doc.find(".//{http://salmon-protocol.org/ns/magic-env}sig").text
sig_contents = '.'.join([
data,
b64encode(b"application/xml").decode("ascii"),
b64encode(b"base64url").decode("ascii"),
b64encode(b"RSA-SHA256").decode("ascii")
])
sig_hash = SHA256.new(sig_contents.encode("ascii"))
cipher = PKCS1_v1_5.new(RSA.importKey(self.public_key))
if not cipher.verify(sig_hash, urlsafe_b64decode(sig)):
raise SignatureVerificationError("Signature cannot be verified using the given public key")
|
python
|
{
"resource": ""
}
|
q6343
|
DiasporaEntityMixin.extract_mentions
|
train
|
def extract_mentions(self):
"""
Extract mentions from an entity with ``raw_content``.
:return: set
"""
if not hasattr(self, "raw_content"):
return set()
mentions = re.findall(r'@{[^;]+; [\w.-]+@[^}]+}', self.raw_content)
if not mentions:
return set()
mentions = {s.split(';')[1].strip(' }') for s in mentions}
mentions = {s for s in mentions}
return mentions
|
python
|
{
"resource": ""
}
|
q6344
|
fetch_country_by_ip
|
train
|
def fetch_country_by_ip(ip):
"""
Fetches country code by IP
Returns empty string if the request fails in non-200 code.
Uses the ipdata.co service which has the following rules:
* Max 1500 requests per day
See: https://ipdata.co/docs.html#python-library
"""
iplookup = ipdata.ipdata()
data = iplookup.lookup(ip)
if data.get('status') != 200:
return ''
return data.get('response', {}).get('country_code', '')
|
python
|
{
"resource": ""
}
|
q6345
|
fetch_document
|
train
|
def fetch_document(url=None, host=None, path="/", timeout=10, raise_ssl_errors=True, extra_headers=None):
"""Helper method to fetch remote document.
Must be given either the ``url`` or ``host``.
If ``url`` is given, only that will be tried without falling back to http from https.
If ``host`` given, `path` will be added to it. Will fall back to http on non-success status code.
:arg url: Full url to fetch, including protocol
:arg host: Domain part only without path or protocol
:arg path: Path without domain (defaults to "/")
:arg timeout: Seconds to wait for response (defaults to 10)
:arg raise_ssl_errors: Pass False if you want to try HTTP even for sites with SSL errors (default True)
:returns: Tuple of document (str or None), status code (int or None) and error (an exception class instance or None)
:raises ValueError: If neither url nor host are given as parameters
"""
if not url and not host:
raise ValueError("Need url or host.")
logger.debug("fetch_document: url=%s, host=%s, path=%s, timeout=%s, raise_ssl_errors=%s",
url, host, path, timeout, raise_ssl_errors)
headers = {'user-agent': USER_AGENT}
if extra_headers:
headers.update(extra_headers)
if url:
# Use url since it was given
logger.debug("fetch_document: trying %s", url)
try:
response = requests.get(url, timeout=timeout, headers=headers)
logger.debug("fetch_document: found document, code %s", response.status_code)
return response.text, response.status_code, None
except RequestException as ex:
logger.debug("fetch_document: exception %s", ex)
return None, None, ex
# Build url with some little sanitizing
host_string = host.replace("http://", "").replace("https://", "").strip("/")
path_string = path if path.startswith("/") else "/%s" % path
url = "https://%s%s" % (host_string, path_string)
logger.debug("fetch_document: trying %s", url)
try:
response = requests.get(url, timeout=timeout, headers=headers)
logger.debug("fetch_document: found document, code %s", response.status_code)
response.raise_for_status()
return response.text, response.status_code, None
except (HTTPError, SSLError, ConnectionError) as ex:
if isinstance(ex, SSLError) and raise_ssl_errors:
logger.debug("fetch_document: exception %s", ex)
return None, None, ex
# Try http then
url = url.replace("https://", "http://")
logger.debug("fetch_document: trying %s", url)
try:
response = requests.get(url, timeout=timeout, headers=headers)
logger.debug("fetch_document: found document, code %s", response.status_code)
response.raise_for_status()
return response.text, response.status_code, None
except RequestException as ex:
logger.debug("fetch_document: exception %s", ex)
return None, None, ex
except RequestException as ex:
logger.debug("fetch_document: exception %s", ex)
return None, None, ex
|
python
|
{
"resource": ""
}
|
q6346
|
fetch_host_ip
|
train
|
def fetch_host_ip(host: str) -> str:
"""
Fetch ip by host
"""
try:
ip = socket.gethostbyname(host)
except socket.gaierror:
return ''
return ip
|
python
|
{
"resource": ""
}
|
q6347
|
fetch_host_ip_and_country
|
train
|
def fetch_host_ip_and_country(host: str) -> Tuple:
"""
Fetch ip and country by host
"""
ip = fetch_host_ip(host)
if not host:
return '', ''
country = fetch_country_by_ip(ip)
return ip, country
|
python
|
{
"resource": ""
}
|
q6348
|
parse_http_date
|
train
|
def parse_http_date(date):
"""
Parse a date format as specified by HTTP RFC7231 section 7.1.1.1.
The three formats allowed by the RFC are accepted, even if only the first
one is still in widespread use.
Return an integer expressed in seconds since the epoch, in UTC.
Implementation copied from Django.
https://github.com/django/django/blob/master/django/utils/http.py#L157
License: BSD 3-clause
"""
MONTHS = 'jan feb mar apr may jun jul aug sep oct nov dec'.split()
__D = r'(?P<day>\d{2})'
__D2 = r'(?P<day>[ \d]\d)'
__M = r'(?P<mon>\w{3})'
__Y = r'(?P<year>\d{4})'
__Y2 = r'(?P<year>\d{2})'
__T = r'(?P<hour>\d{2}):(?P<min>\d{2}):(?P<sec>\d{2})'
RFC1123_DATE = re.compile(r'^\w{3}, %s %s %s %s GMT$' % (__D, __M, __Y, __T))
RFC850_DATE = re.compile(r'^\w{6,9}, %s-%s-%s %s GMT$' % (__D, __M, __Y2, __T))
ASCTIME_DATE = re.compile(r'^\w{3} %s %s %s %s$' % (__M, __D2, __T, __Y))
# email.utils.parsedate() does the job for RFC1123 dates; unfortunately
# RFC7231 makes it mandatory to support RFC850 dates too. So we roll
# our own RFC-compliant parsing.
for regex in RFC1123_DATE, RFC850_DATE, ASCTIME_DATE:
m = regex.match(date)
if m is not None:
break
else:
raise ValueError("%r is not in a valid HTTP date format" % date)
try:
year = int(m.group('year'))
if year < 100:
if year < 70:
year += 2000
else:
year += 1900
month = MONTHS.index(m.group('mon').lower()) + 1
day = int(m.group('day'))
hour = int(m.group('hour'))
min = int(m.group('min'))
sec = int(m.group('sec'))
result = datetime.datetime(year, month, day, hour, min, sec)
return calendar.timegm(result.utctimetuple())
except Exception as exc:
raise ValueError("%r is not a valid date" % date) from exc
|
python
|
{
"resource": ""
}
|
q6349
|
send_document
|
train
|
def send_document(url, data, timeout=10, *args, **kwargs):
"""Helper method to send a document via POST.
Additional ``*args`` and ``**kwargs`` will be passed on to ``requests.post``.
:arg url: Full url to send to, including protocol
:arg data: Dictionary (will be form-encoded), bytes, or file-like object to send in the body
:arg timeout: Seconds to wait for response (defaults to 10)
:returns: Tuple of status code (int or None) and error (exception class instance or None)
"""
logger.debug("send_document: url=%s, data=%s, timeout=%s", url, data, timeout)
headers = CaseInsensitiveDict({
'User-Agent': USER_AGENT,
})
if "headers" in kwargs:
# Update from kwargs
headers.update(kwargs.get("headers"))
kwargs.update({
"data": data, "timeout": timeout, "headers": headers
})
try:
response = requests.post(url, *args, **kwargs)
logger.debug("send_document: response status code %s", response.status_code)
return response.status_code, None
except RequestException as ex:
logger.debug("send_document: exception %s", ex)
return None, ex
|
python
|
{
"resource": ""
}
|
q6350
|
get_http_authentication
|
train
|
def get_http_authentication(private_key: RsaKey, private_key_id: str) -> HTTPSignatureHeaderAuth:
"""
Get HTTP signature authentication for a request.
"""
key = private_key.exportKey()
return HTTPSignatureHeaderAuth(
headers=["(request-target)", "user-agent", "host", "date"],
algorithm="rsa-sha256",
key=key,
key_id=private_key_id,
)
|
python
|
{
"resource": ""
}
|
q6351
|
verify_request_signature
|
train
|
def verify_request_signature(request: RequestType, public_key: Union[str, bytes]):
"""
Verify HTTP signature in request against a public key.
"""
key = encode_if_text(public_key)
date_header = request.headers.get("Date")
if not date_header:
raise ValueError("Rquest Date header is missing")
ts = parse_http_date(date_header)
dt = datetime.datetime.utcfromtimestamp(ts).replace(tzinfo=pytz.utc)
delta = datetime.timedelta(seconds=30)
now = datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
if dt < now - delta or dt > now + delta:
raise ValueError("Request Date is too far in future or past")
HTTPSignatureHeaderAuth.verify(request, key_resolver=lambda **kwargs: key)
|
python
|
{
"resource": ""
}
|
q6352
|
DiasporaRetraction.entity_type_to_remote
|
train
|
def entity_type_to_remote(value):
"""Convert entity type between our Entity names and Diaspora names."""
if value in DiasporaRetraction.mapped.values():
values = list(DiasporaRetraction.mapped.values())
index = values.index(value)
return list(DiasporaRetraction.mapped.keys())[index]
return value
|
python
|
{
"resource": ""
}
|
q6353
|
get_base_attributes
|
train
|
def get_base_attributes(entity):
"""Build a dict of attributes of an entity.
Returns attributes and their values, ignoring any properties, functions and anything that starts
with an underscore.
"""
attributes = {}
cls = entity.__class__
for attr, _ in inspect.getmembers(cls, lambda o: not isinstance(o, property) and not inspect.isroutine(o)):
if not attr.startswith("_"):
attributes[attr] = getattr(entity, attr)
return attributes
|
python
|
{
"resource": ""
}
|
q6354
|
pkcs7_unpad
|
train
|
def pkcs7_unpad(data):
"""
Remove the padding bytes that were added at point of encryption.
Implementation copied from pyaspora:
https://github.com/mjnovice/pyaspora/blob/master/pyaspora/diaspora/protocol.py#L209
"""
if isinstance(data, str):
return data[0:-ord(data[-1])]
else:
return data[0:-data[-1]]
|
python
|
{
"resource": ""
}
|
q6355
|
EncryptedPayload.decrypt
|
train
|
def decrypt(payload, private_key):
"""Decrypt an encrypted JSON payload and return the Magic Envelope document inside."""
cipher = PKCS1_v1_5.new(private_key)
aes_key_str = cipher.decrypt(b64decode(payload.get("aes_key")), sentinel=None)
aes_key = json.loads(aes_key_str.decode("utf-8"))
key = b64decode(aes_key.get("key"))
iv = b64decode(aes_key.get("iv"))
encrypted_magic_envelope = b64decode(payload.get("encrypted_magic_envelope"))
encrypter = AES.new(key, AES.MODE_CBC, iv)
content = encrypter.decrypt(encrypted_magic_envelope)
return etree.fromstring(pkcs7_unpad(content))
|
python
|
{
"resource": ""
}
|
q6356
|
EncryptedPayload.encrypt
|
train
|
def encrypt(payload, public_key):
"""
Encrypt a payload using an encrypted JSON wrapper.
See: https://diaspora.github.io/diaspora_federation/federation/encryption.html
:param payload: Payload document as a string.
:param public_key: Public key of recipient as an RSA object.
:return: Encrypted JSON wrapper as dict.
"""
iv, key, encrypter = EncryptedPayload.get_iv_key_encrypter()
aes_key_json = EncryptedPayload.get_aes_key_json(iv, key)
cipher = PKCS1_v1_5.new(public_key)
aes_key = b64encode(cipher.encrypt(aes_key_json))
padded_payload = pkcs7_pad(payload.encode("utf-8"), AES.block_size)
encrypted_me = b64encode(encrypter.encrypt(padded_payload))
return {
"aes_key": aes_key.decode("utf-8"),
"encrypted_magic_envelope": encrypted_me.decode("utf8"),
}
|
python
|
{
"resource": ""
}
|
q6357
|
ProsaicArgParser.template
|
train
|
def template(self):
"""Returns the template in JSON form"""
if self._template:
return self._template
template_json = self.read_template(self.args.tmplname)
self._template = loads(template_json)
return self._template
|
python
|
{
"resource": ""
}
|
q6358
|
BaseFormatter.override_level_names
|
train
|
def override_level_names(self, mapping):
"""Rename level names.
:param mapping: Mapping level names to new ones
:type mapping: dict
"""
if not isinstance(mapping, dict):
return
for key, val in mapping.items():
if key in self._level_names:
self._level_names[key] = val
|
python
|
{
"resource": ""
}
|
q6359
|
TextFormatter.override_colors
|
train
|
def override_colors(self, colors):
"""Override default color of elements.
:param colors: New color value for given elements
:type colors: dict
"""
if not isinstance(colors, dict):
return
for key in self._color[True]:
if key in colors:
self._color[True][key] = colors[key]
|
python
|
{
"resource": ""
}
|
q6360
|
JsonFormatter.__prepare_record
|
train
|
def __prepare_record(self, record, enabled_fields):
"""Prepare log record with given fields."""
message = record.getMessage()
if hasattr(record, 'prefix'):
message = "{}{}".format((str(record.prefix) + ' ') if record.prefix else '', message)
obj = {
'name': record.name,
'asctime': self.formatTime(record, self.datefmt),
'created': record.created,
'msecs': record.msecs,
'relativeCreated': record.relativeCreated,
'levelno': record.levelno,
'levelname': self._level_names[record.levelname],
'thread': record.thread,
'threadName': record.threadName,
'process': record.process,
'pathname': record.pathname,
'filename': record.filename,
'module': record.module,
'lineno': record.lineno,
'funcName': record.funcName,
'message': message,
'exception': record.exc_info[0].__name__ if record.exc_info else None,
'stacktrace': record.exc_text,
}
if not isinstance(enabled_fields, list):
enabled_fields = [str(enabled_fields)]
ef = {}
for item in enabled_fields:
if not isinstance(item, (str, tuple)):
continue
if not isinstance(item, tuple):
ef[item] = item
else:
ef[item[0]] = item[1]
result = {}
for key, val in obj.items():
if key in ef:
result[ef[key]] = val
return result
|
python
|
{
"resource": ""
}
|
q6361
|
JsonFormatter.__obj2json
|
train
|
def __obj2json(self, obj):
"""Serialize obj to a JSON formatted string.
This is useful for pretty printing log records in the console.
"""
return json.dumps(obj, indent=self._indent, sort_keys=self._sort_keys)
|
python
|
{
"resource": ""
}
|
q6362
|
validate_host
|
train
|
def validate_host(cert, name):
"""
Validates host name against certificate
@param cert: Certificate returned by host
@param name: Actual host name used for connection
@return: Returns true if host name matches certificate
"""
cn = None
for t, v in cert.get_subject().get_components():
if t == b'CN':
cn = v
break
if cn == name:
return True
# checking SAN
s_name = name.decode('ascii')
for i in range(cert.get_extension_count()):
ext = cert.get_extension(i)
if ext.get_short_name() == b'subjectAltName':
s = str(ext)
# SANs are usually have form like: DNS:hostname
if s.startswith('DNS:') and s[4:] == s_name:
return True
# TODO handle wildcards
return False
|
python
|
{
"resource": ""
}
|
q6363
|
revert_to_clear
|
train
|
def revert_to_clear(tds_sock):
"""
Reverts connection back to non-encrypted mode
Used when client sent ENCRYPT_OFF flag
@param tds_sock:
@return:
"""
enc_conn = tds_sock.conn.sock
clear_conn = enc_conn._transport
enc_conn.shutdown()
tds_sock.conn.sock = clear_conn
tds_sock._writer._transport = clear_conn
tds_sock._reader._transport = clear_conn
|
python
|
{
"resource": ""
}
|
q6364
|
tds7_crypt_pass
|
train
|
def tds7_crypt_pass(password):
""" Mangle password according to tds rules
:param password: Password str
:returns: Byte-string with encoded password
"""
encoded = bytearray(ucs2_codec.encode(password)[0])
for i, ch in enumerate(encoded):
encoded[i] = ((ch << 4) & 0xff | (ch >> 4)) ^ 0xA5
return encoded
|
python
|
{
"resource": ""
}
|
q6365
|
_TdsReader.unpack
|
train
|
def unpack(self, struc):
""" Unpacks given structure from stream
:param struc: A struct.Struct instance
:returns: Result of unpacking
"""
buf, offset = readall_fast(self, struc.size)
return struc.unpack_from(buf, offset)
|
python
|
{
"resource": ""
}
|
q6366
|
_TdsReader.read_ucs2
|
train
|
def read_ucs2(self, num_chars):
""" Reads num_chars UCS2 string from the stream """
buf = readall(self, num_chars * 2)
return ucs2_codec.decode(buf)[0]
|
python
|
{
"resource": ""
}
|
q6367
|
_TdsReader._read_packet
|
train
|
def _read_packet(self):
""" Reads next TDS packet from the underlying transport
If timeout is happened during reading of packet's header will
cancel current request.
Can only be called when transport's read pointer is at the begining
of the packet.
"""
try:
pos = 0
while pos < _header.size:
received = self._transport.recv_into(self._bufview[pos:_header.size-pos])
if received == 0:
raise tds_base.ClosedConnectionError()
pos += received
except tds_base.TimeoutError:
self._session.put_cancel()
raise
self._pos = _header.size
self._type, self._status, self._size, self._session._spid, _ = _header.unpack_from(self._bufview, 0)
self._have = pos
while pos < self._size:
received = self._transport.recv_into(self._bufview[pos:], self._size - pos)
if received == 0:
raise tds_base.ClosedConnectionError()
pos += received
self._have += received
|
python
|
{
"resource": ""
}
|
q6368
|
_TdsReader.read_whole_packet
|
train
|
def read_whole_packet(self):
""" Reads single packet and returns bytes payload of the packet
Can only be called when transport's read pointer is at the beginning
of the packet.
"""
self._read_packet()
return readall(self, self._size - _header.size)
|
python
|
{
"resource": ""
}
|
q6369
|
_TdsWriter.write
|
train
|
def write(self, data):
""" Writes given bytes buffer into the stream
Function returns only when entire buffer is written
"""
data_off = 0
while data_off < len(data):
left = len(self._buf) - self._pos
if left <= 0:
self._write_packet(final=False)
else:
to_write = min(left, len(data) - data_off)
self._buf[self._pos:self._pos + to_write] = data[data_off:data_off + to_write]
self._pos += to_write
data_off += to_write
|
python
|
{
"resource": ""
}
|
q6370
|
_TdsWriter.write_string
|
train
|
def write_string(self, s, codec):
""" Write string encoding it with codec into stream """
for i in range(0, len(s), self.bufsize):
chunk = s[i:i + self.bufsize]
buf, consumed = codec.encode(chunk)
assert consumed == len(chunk)
self.write(buf)
|
python
|
{
"resource": ""
}
|
q6371
|
_TdsWriter._write_packet
|
train
|
def _write_packet(self, final):
""" Writes single TDS packet into underlying transport.
Data for the packet is taken from internal buffer.
:param final: True means this is the final packet in substream.
"""
status = 1 if final else 0
_header.pack_into(self._buf, 0, self._type, status, self._pos, 0, self._packet_no)
self._packet_no = (self._packet_no + 1) % 256
self._transport.sendall(self._buf[:self._pos])
self._pos = 8
|
python
|
{
"resource": ""
}
|
q6372
|
_TdsSession.raise_db_exception
|
train
|
def raise_db_exception(self):
""" Raises exception from last server message
This function will skip messages: The statement has been terminated
"""
if not self.messages:
raise tds_base.Error("Request failed, server didn't send error message")
msg = None
while True:
msg = self.messages[-1]
if msg['msgno'] == 3621: # the statement has been terminated
self.messages = self.messages[:-1]
else:
break
error_msg = ' '.join(m['message'] for m in self.messages)
ex = _create_exception_by_message(msg, error_msg)
raise ex
|
python
|
{
"resource": ""
}
|
q6373
|
_TdsSession.tds7_process_result
|
train
|
def tds7_process_result(self):
""" Reads and processes COLMETADATA stream
This stream contains a list of returned columns.
Stream format link: http://msdn.microsoft.com/en-us/library/dd357363.aspx
"""
self.log_response_message('got COLMETADATA')
r = self._reader
# read number of columns and allocate the columns structure
num_cols = r.get_smallint()
# This can be a DUMMY results token from a cursor fetch
if num_cols == -1:
return
self.param_info = None
self.has_status = False
self.ret_status = None
self.skipped_to_status = False
self.rows_affected = tds_base.TDS_NO_COUNT
self.more_rows = True
self.row = [None] * num_cols
self.res_info = info = _Results()
#
# loop through the columns populating COLINFO struct from
# server response
#
header_tuple = []
for col in range(num_cols):
curcol = tds_base.Column()
info.columns.append(curcol)
self.get_type_info(curcol)
curcol.column_name = r.read_ucs2(r.get_byte())
precision = curcol.serializer.precision
scale = curcol.serializer.scale
size = curcol.serializer.size
header_tuple.append(
(curcol.column_name,
curcol.serializer.get_typeid(),
None,
size,
precision,
scale,
curcol.flags & tds_base.Column.fNullable))
info.description = tuple(header_tuple)
return info
|
python
|
{
"resource": ""
}
|
q6374
|
_TdsSession.process_param
|
train
|
def process_param(self):
""" Reads and processes RETURNVALUE stream.
This stream is used to send OUTPUT parameters from RPC to client.
Stream format url: http://msdn.microsoft.com/en-us/library/dd303881.aspx
"""
self.log_response_message('got RETURNVALUE message')
r = self._reader
if tds_base.IS_TDS72_PLUS(self):
ordinal = r.get_usmallint()
else:
r.get_usmallint() # ignore size
ordinal = self._out_params_indexes[self.return_value_index]
name = r.read_ucs2(r.get_byte())
r.get_byte() # 1 - OUTPUT of sp, 2 - result of udf
param = tds_base.Column()
param.column_name = name
self.get_type_info(param)
param.value = param.serializer.read(r)
self.output_params[ordinal] = param
self.return_value_index += 1
|
python
|
{
"resource": ""
}
|
q6375
|
_TdsSession.process_cancel
|
train
|
def process_cancel(self):
"""
Process the incoming token stream until it finds
an end token DONE with the cancel flag set.
At that point the connection should be ready to handle a new query.
In case when no cancel request is pending this function does nothing.
"""
self.log_response_message('got CANCEL message')
# silly cases, nothing to do
if not self.in_cancel:
return
while True:
token_id = self.get_token_id()
self.process_token(token_id)
if not self.in_cancel:
return
|
python
|
{
"resource": ""
}
|
q6376
|
_TdsSession.process_row
|
train
|
def process_row(self):
""" Reads and handles ROW stream.
This stream contains list of values of one returned row.
Stream format url: http://msdn.microsoft.com/en-us/library/dd357254.aspx
"""
self.log_response_message("got ROW message")
r = self._reader
info = self.res_info
info.row_count += 1
for i, curcol in enumerate(info.columns):
curcol.value = self.row[i] = curcol.serializer.read(r)
|
python
|
{
"resource": ""
}
|
q6377
|
_TdsSession.process_nbcrow
|
train
|
def process_nbcrow(self):
""" Reads and handles NBCROW stream.
This stream contains list of values of one returned row in a compressed way,
introduced in TDS 7.3.B
Stream format url: http://msdn.microsoft.com/en-us/library/dd304783.aspx
"""
self.log_response_message("got NBCROW message")
r = self._reader
info = self.res_info
if not info:
self.bad_stream('got row without info')
assert len(info.columns) > 0
info.row_count += 1
# reading bitarray for nulls, 1 represent null values for
# corresponding fields
nbc = readall(r, (len(info.columns) + 7) // 8)
for i, curcol in enumerate(info.columns):
if tds_base.my_ord(nbc[i // 8]) & (1 << (i % 8)):
value = None
else:
value = curcol.serializer.read(r)
self.row[i] = value
|
python
|
{
"resource": ""
}
|
q6378
|
_TdsSession.process_auth
|
train
|
def process_auth(self):
""" Reads and processes SSPI stream.
Stream info: http://msdn.microsoft.com/en-us/library/dd302844.aspx
"""
r = self._reader
w = self._writer
pdu_size = r.get_smallint()
if not self.authentication:
raise tds_base.Error('Got unexpected token')
packet = self.authentication.handle_next(readall(r, pdu_size))
if packet:
w.write(packet)
w.flush()
|
python
|
{
"resource": ""
}
|
q6379
|
_TdsSession.set_state
|
train
|
def set_state(self, state):
""" Switches state of the TDS session.
It also does state transitions checks.
:param state: New state, one of TDS_PENDING/TDS_READING/TDS_IDLE/TDS_DEAD/TDS_QUERING
"""
prior_state = self.state
if state == prior_state:
return state
if state == tds_base.TDS_PENDING:
if prior_state in (tds_base.TDS_READING, tds_base.TDS_QUERYING):
self.state = tds_base.TDS_PENDING
else:
raise tds_base.InterfaceError('logic error: cannot chage query state from {0} to {1}'.
format(tds_base.state_names[prior_state], tds_base.state_names[state]))
elif state == tds_base.TDS_READING:
# transition to READING are valid only from PENDING
if self.state != tds_base.TDS_PENDING:
raise tds_base.InterfaceError('logic error: cannot change query state from {0} to {1}'.
format(tds_base.state_names[prior_state], tds_base.state_names[state]))
else:
self.state = state
elif state == tds_base.TDS_IDLE:
if prior_state == tds_base.TDS_DEAD:
raise tds_base.InterfaceError('logic error: cannot change query state from {0} to {1}'.
format(tds_base.state_names[prior_state], tds_base.state_names[state]))
self.state = state
elif state == tds_base.TDS_DEAD:
self.state = state
elif state == tds_base.TDS_QUERYING:
if self.state == tds_base.TDS_DEAD:
raise tds_base.InterfaceError('logic error: cannot change query state from {0} to {1}'.
format(tds_base.state_names[prior_state], tds_base.state_names[state]))
elif self.state != tds_base.TDS_IDLE:
raise tds_base.InterfaceError('logic error: cannot change query state from {0} to {1}'.
format(tds_base.state_names[prior_state], tds_base.state_names[state]))
else:
self.rows_affected = tds_base.TDS_NO_COUNT
self.internal_sp_called = 0
self.state = state
else:
assert False
return self.state
|
python
|
{
"resource": ""
}
|
q6380
|
_TdsSession.querying_context
|
train
|
def querying_context(self, packet_type):
""" Context manager for querying.
Sets state to TDS_QUERYING, and reverts it to TDS_IDLE if exception happens inside managed block,
and to TDS_PENDING if managed block succeeds and flushes buffer.
"""
if self.set_state(tds_base.TDS_QUERYING) != tds_base.TDS_QUERYING:
raise tds_base.Error("Couldn't switch to state")
self._writer.begin_packet(packet_type)
try:
yield
except:
if self.state != tds_base.TDS_DEAD:
self.set_state(tds_base.TDS_IDLE)
raise
else:
self.set_state(tds_base.TDS_PENDING)
self._writer.flush()
|
python
|
{
"resource": ""
}
|
q6381
|
_TdsSession.cancel_if_pending
|
train
|
def cancel_if_pending(self):
""" Cancels current pending request.
Does nothing if no request is pending, otherwise sends cancel request,
and waits for response.
"""
if self.state == tds_base.TDS_IDLE:
return
if not self.in_cancel:
self.put_cancel()
self.process_cancel()
|
python
|
{
"resource": ""
}
|
q6382
|
_TdsSession.submit_rpc
|
train
|
def submit_rpc(self, rpc_name, params, flags=0):
""" Sends an RPC request.
This call will transition session into pending state.
If some operation is currently pending on the session, it will be
cancelled before sending this request.
Spec: http://msdn.microsoft.com/en-us/library/dd357576.aspx
:param rpc_name: Name of the RPC to call, can be an instance of :class:`InternalProc`
:param params: Stored proc parameters, should be a list of :class:`Column` instances.
:param flags: See spec for possible flags.
"""
logger.info('Sending RPC %s flags=%d', rpc_name, flags)
self.messages = []
self.output_params = {}
self.cancel_if_pending()
self.res_info = None
w = self._writer
with self.querying_context(tds_base.PacketType.RPC):
if tds_base.IS_TDS72_PLUS(self):
self._start_query()
if tds_base.IS_TDS71_PLUS(self) and isinstance(rpc_name, tds_base.InternalProc):
w.put_smallint(-1)
w.put_smallint(rpc_name.proc_id)
else:
if isinstance(rpc_name, tds_base.InternalProc):
rpc_name = rpc_name.name
w.put_smallint(len(rpc_name))
w.write_ucs2(rpc_name)
#
# TODO support flags
# bit 0 (1 as flag) in TDS7/TDS5 is "recompile"
# bit 1 (2 as flag) in TDS7+ is "no metadata" bit this will prevent sending of column infos
#
w.put_usmallint(flags)
self._out_params_indexes = []
for i, param in enumerate(params):
if param.flags & tds_base.fByRefValue:
self._out_params_indexes.append(i)
w.put_byte(len(param.column_name))
w.write_ucs2(param.column_name)
#
# TODO support other flags (use defaul null/no metadata)
# bit 1 (2 as flag) in TDS7+ is "default value" bit
# (what's the meaning of "default value" ?)
#
w.put_byte(param.flags)
# TYPE_INFO structure: https://msdn.microsoft.com/en-us/library/dd358284.aspx
serializer = param.choose_serializer(
type_factory=self._tds.type_factory,
collation=self._tds.collation or raw_collation
)
type_id = serializer.type
w.put_byte(type_id)
serializer.write_info(w)
serializer.write(w, param.value)
|
python
|
{
"resource": ""
}
|
q6383
|
_TdsSession.submit_plain_query
|
train
|
def submit_plain_query(self, operation):
""" Sends a plain query to server.
This call will transition session into pending state.
If some operation is currently pending on the session, it will be
cancelled before sending this request.
Spec: http://msdn.microsoft.com/en-us/library/dd358575.aspx
:param operation: A string representing sql statement.
"""
self.messages = []
self.cancel_if_pending()
self.res_info = None
logger.info("Sending query %s", operation[:100])
w = self._writer
with self.querying_context(tds_base.PacketType.QUERY):
if tds_base.IS_TDS72_PLUS(self):
self._start_query()
w.write_ucs2(operation)
|
python
|
{
"resource": ""
}
|
q6384
|
_TdsSession.submit_bulk
|
train
|
def submit_bulk(self, metadata, rows):
""" Sends insert bulk command.
Spec: http://msdn.microsoft.com/en-us/library/dd358082.aspx
:param metadata: A list of :class:`Column` instances.
:param rows: A collection of rows, each row is a collection of values.
:return:
"""
logger.info('Sending INSERT BULK')
num_cols = len(metadata)
w = self._writer
serializers = []
with self.querying_context(tds_base.PacketType.BULK):
w.put_byte(tds_base.TDS7_RESULT_TOKEN)
w.put_usmallint(num_cols)
for col in metadata:
if tds_base.IS_TDS72_PLUS(self):
w.put_uint(col.column_usertype)
else:
w.put_usmallint(col.column_usertype)
w.put_usmallint(col.flags)
serializer = col.choose_serializer(
type_factory=self._tds.type_factory,
collation=self._tds.collation,
)
type_id = serializer.type
w.put_byte(type_id)
serializers.append(serializer)
serializer.write_info(w)
w.put_byte(len(col.column_name))
w.write_ucs2(col.column_name)
for row in rows:
w.put_byte(tds_base.TDS_ROW_TOKEN)
for i, col in enumerate(metadata):
serializers[i].write(w, row[i])
# https://msdn.microsoft.com/en-us/library/dd340421.aspx
w.put_byte(tds_base.TDS_DONE_TOKEN)
w.put_usmallint(tds_base.TDS_DONE_FINAL)
w.put_usmallint(0) # curcmd
# row count
if tds_base.IS_TDS72_PLUS(self):
w.put_int8(0)
else:
w.put_int(0)
|
python
|
{
"resource": ""
}
|
q6385
|
_TdsSession.put_cancel
|
train
|
def put_cancel(self):
""" Sends a cancel request to the server.
Switches connection to IN_CANCEL state.
"""
logger.info('Sending CANCEL')
self._writer.begin_packet(tds_base.PacketType.CANCEL)
self._writer.flush()
self.in_cancel = 1
|
python
|
{
"resource": ""
}
|
q6386
|
iterdecode
|
train
|
def iterdecode(iterable, codec):
""" Uses an incremental decoder to decode each chunk in iterable.
This function is a generator.
:param iterable: Iterable object which yields raw data to be decoded
:param codec: An instance of codec
"""
decoder = codec.incrementaldecoder()
for chunk in iterable:
yield decoder.decode(chunk)
yield decoder.decode(b'', True)
|
python
|
{
"resource": ""
}
|
q6387
|
skipall
|
train
|
def skipall(stm, size):
""" Skips exactly size bytes in stm
If EOF is reached before size bytes are skipped
will raise :class:`ClosedConnectionError`
:param stm: Stream to skip bytes in, should have read method
this read method can return less than requested
number of bytes.
:param size: Number of bytes to skip.
"""
res = stm.recv(size)
if len(res) == size:
return
elif len(res) == 0:
raise ClosedConnectionError()
left = size - len(res)
while left:
buf = stm.recv(left)
if len(buf) == 0:
raise ClosedConnectionError()
left -= len(buf)
|
python
|
{
"resource": ""
}
|
q6388
|
read_chunks
|
train
|
def read_chunks(stm, size):
""" Reads exactly size bytes from stm and produces chunks
May call stm.read multiple times until required
number of bytes is read.
If EOF is reached before size bytes are read
will raise :class:`ClosedConnectionError`
:param stm: Stream to read bytes from, should have read method,
this read method can return less than requested
number of bytes.
:param size: Number of bytes to read.
"""
if size == 0:
yield b''
return
res = stm.recv(size)
if len(res) == 0:
raise ClosedConnectionError()
yield res
left = size - len(res)
while left:
buf = stm.recv(left)
if len(buf) == 0:
raise ClosedConnectionError()
yield buf
left -= len(buf)
|
python
|
{
"resource": ""
}
|
q6389
|
readall_fast
|
train
|
def readall_fast(stm, size):
"""
Slightly faster version of readall, it reads no more than two chunks.
Meaning that it can only be used to read small data that doesn't span
more that two packets.
:param stm: Stream to read from, should have read method.
:param size: Number of bytes to read.
:return:
"""
buf, offset = stm.read_fast(size)
if len(buf) - offset < size:
# slow case
buf = buf[offset:]
buf += stm.recv(size - len(buf))
return buf, 0
return buf, offset
|
python
|
{
"resource": ""
}
|
q6390
|
_decode_num
|
train
|
def _decode_num(buf):
""" Decodes little-endian integer from buffer
Buffer can be of any size
"""
return functools.reduce(lambda acc, val: acc * 256 + tds_base.my_ord(val), reversed(buf), 0)
|
python
|
{
"resource": ""
}
|
q6391
|
PlpReader.chunks
|
train
|
def chunks(self):
""" Generates chunks from stream, each chunk is an instace of bytes.
"""
if self.is_null():
return
total = 0
while True:
chunk_len = self._rdr.get_uint()
if chunk_len == 0:
if not self.is_unknown_len() and total != self._size:
msg = "PLP actual length (%d) doesn't match reported length (%d)" % (total, self._size)
self._rdr.session.bad_stream(msg)
return
total += chunk_len
left = chunk_len
while left:
buf = self._rdr.recv(left)
yield buf
left -= len(buf)
|
python
|
{
"resource": ""
}
|
q6392
|
Date.from_pydate
|
train
|
def from_pydate(cls, pydate):
"""
Creates sql date object from Python date object.
@param pydate: Python date
@return: sql date
"""
return cls(days=(datetime.datetime.combine(pydate, datetime.time(0, 0, 0)) - _datetime2_base_date).days)
|
python
|
{
"resource": ""
}
|
q6393
|
Time.to_pytime
|
train
|
def to_pytime(self):
"""
Converts sql time object into Python's time object
this will truncate nanoseconds to microseconds
@return: naive time
"""
nanoseconds = self._nsec
hours = nanoseconds // 1000000000 // 60 // 60
nanoseconds -= hours * 60 * 60 * 1000000000
minutes = nanoseconds // 1000000000 // 60
nanoseconds -= minutes * 60 * 1000000000
seconds = nanoseconds // 1000000000
nanoseconds -= seconds * 1000000000
return datetime.time(hours, minutes, seconds, nanoseconds // 1000)
|
python
|
{
"resource": ""
}
|
q6394
|
Time.from_pytime
|
train
|
def from_pytime(cls, pytime):
"""
Converts Python time object to sql time object
ignoring timezone
@param pytime: Python time object
@return: sql time object
"""
secs = pytime.hour * 60 * 60 + pytime.minute * 60 + pytime.second
nsec = secs * 10 ** 9 + pytime.microsecond * 1000
return cls(nsec=nsec)
|
python
|
{
"resource": ""
}
|
q6395
|
DateTime2.to_pydatetime
|
train
|
def to_pydatetime(self):
"""
Converts datetime2 object into Python's datetime.datetime object
@return: naive datetime.datetime
"""
return datetime.datetime.combine(self._date.to_pydate(), self._time.to_pytime())
|
python
|
{
"resource": ""
}
|
q6396
|
DateTime2.from_pydatetime
|
train
|
def from_pydatetime(cls, pydatetime):
"""
Creates sql datetime2 object from Python datetime object
ignoring timezone
@param pydatetime: Python datetime object
@return: sql datetime2 object
"""
return cls(date=Date.from_pydate(pydatetime.date),
time=Time.from_pytime(pydatetime.time))
|
python
|
{
"resource": ""
}
|
q6397
|
DateTimeOffset.to_pydatetime
|
train
|
def to_pydatetime(self):
"""
Converts datetimeoffset object into Python's datetime.datetime object
@return: time zone aware datetime.datetime
"""
dt = datetime.datetime.combine(self._date.to_pydate(), self._time.to_pytime())
from .tz import FixedOffsetTimezone
return dt.replace(tzinfo=_utc).astimezone(FixedOffsetTimezone(self._offset))
|
python
|
{
"resource": ""
}
|
q6398
|
TableSerializer.write_info
|
train
|
def write_info(self, w):
"""
Writes TVP_TYPENAME structure
spec: https://msdn.microsoft.com/en-us/library/dd302994.aspx
@param w: TdsWriter
@return:
"""
w.write_b_varchar("") # db_name, should be empty
w.write_b_varchar(self._table_type.typ_schema)
w.write_b_varchar(self._table_type.typ_name)
|
python
|
{
"resource": ""
}
|
q6399
|
TableSerializer.write
|
train
|
def write(self, w, val):
"""
Writes remaining part of TVP_TYPE_INFO structure, resuming from TVP_COLMETADATA
specs:
https://msdn.microsoft.com/en-us/library/dd302994.aspx
https://msdn.microsoft.com/en-us/library/dd305261.aspx
https://msdn.microsoft.com/en-us/library/dd303230.aspx
@param w: TdsWriter
@param val: TableValuedParam or None
@return:
"""
if val.is_null():
w.put_usmallint(tds_base.TVP_NULL_TOKEN)
else:
columns = self._table_type.columns
w.put_usmallint(len(columns))
for i, column in enumerate(columns):
w.put_uint(column.column_usertype)
w.put_usmallint(column.flags)
# TYPE_INFO structure: https://msdn.microsoft.com/en-us/library/dd358284.aspx
serializer = self._columns_serializers[i]
type_id = serializer.type
w.put_byte(type_id)
serializer.write_info(w)
w.write_b_varchar('') # ColName, must be empty in TVP according to spec
# here can optionally send TVP_ORDER_UNIQUE and TVP_COLUMN_ORDERING
# https://msdn.microsoft.com/en-us/library/dd305261.aspx
# terminating optional metadata
w.put_byte(tds_base.TVP_END_TOKEN)
# now sending rows using TVP_ROW
# https://msdn.microsoft.com/en-us/library/dd305261.aspx
if val.rows:
for row in val.rows:
w.put_byte(tds_base.TVP_ROW_TOKEN)
for i, col in enumerate(self._table_type.columns):
if not col.flags & tds_base.TVP_COLUMN_DEFAULT_FLAG:
self._columns_serializers[i].write(w, row[i])
# terminating rows
w.put_byte(tds_base.TVP_END_TOKEN)
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.