code
string | signature
string | docstring
string | loss_without_docstring
float64 | loss_with_docstring
float64 | factor
float64 |
|---|---|---|---|---|---|
return cls(Lnk.TOKENS, tuple(map(int, tokens)))
|
def tokens(cls, tokens)
|
Create a Lnk object for a token range.
Args:
tokens: a list of token identifiers
| 19.62063
| 13.7043
| 1.431713
|
cfrom = -1
try:
if self.lnk.type == Lnk.CHARSPAN:
cfrom = self.lnk.data[0]
except AttributeError:
pass # use default cfrom of -1
return cfrom
|
def cfrom(self)
|
The initial character position in the surface string.
Defaults to -1 if there is no valid cfrom value.
| 8.111328
| 6.787434
| 1.195051
|
cto = -1
try:
if self.lnk.type == Lnk.CHARSPAN:
cto = self.lnk.data[1]
except AttributeError:
pass # use default cto of -1
return cto
|
def cto(self)
|
The final character position in the surface string.
Defaults to -1 if there is no valid cto value.
| 8.579534
| 6.662775
| 1.287682
|
lemma, pos, sense, _ = split_pred_string(predstr)
return cls(Pred.SURFACE, lemma, pos, sense, predstr)
|
def surface(cls, predstr)
|
Instantiate a Pred from its quoted string representation.
| 7.150123
| 6.529386
| 1.095068
|
lemma, pos, sense, _ = split_pred_string(predstr)
return cls(Pred.ABSTRACT, lemma, pos, sense, predstr)
|
def abstract(cls, predstr)
|
Instantiate a Pred from its symbol string.
| 6.885175
| 6.456147
| 1.066453
|
if predstr.strip('"').lstrip("'").startswith('_'):
return cls.surface(predstr)
else:
return cls.abstract(predstr)
|
def surface_or_abstract(cls, predstr)
|
Instantiate a Pred from either its surface or abstract symbol.
| 4.398977
| 4.063365
| 1.082595
|
string_tokens = [lemma]
if pos is not None:
string_tokens.append(pos)
if sense is not None:
sense = str(sense)
string_tokens.append(sense)
predstr = '_'.join([''] + string_tokens + ['rel'])
return cls(Pred.REALPRED, lemma, pos, sense, predstr)
|
def realpred(cls, lemma, pos, sense=None)
|
Instantiate a Pred from its components.
| 3.685817
| 3.614161
| 1.019826
|
d = dict(self.sortinfo)
if CVARSORT in d:
del d[CVARSORT]
return d
|
def properties(self)
|
Morphosemantic property mapping.
Unlike :attr:`sortinfo`, this does not include `cvarsort`.
| 13.457416
| 4.541759
| 2.963041
|
if isinstance(self._fields, (tuple, list)): # tuples & lists > x,y,z
self.get_params["fields"] = ",".join([str(_) for _ in self._fields])
elif isinstance(self._fields, str):
self.get_params["fields"] = self._fields
|
def update_get_params(self)
|
Update HTTP GET params with the given fields that user wants to fetch.
| 3.907832
| 3.403267
| 1.148259
|
is_success, meta_data = AtlasRequest(
url_path=self.API_META_URL.format(self.id),
key=self.api_key,
server=self.server,
verify=self.verify,
user_agent=self._user_agent
).get(**self.get_params)
self.meta_data = meta_data
if not is_success:
return False
return True
|
def _fetch_meta_data(self)
|
Makes an API call to fetch meta data for the given probe and stores the raw data.
| 4.626628
| 4.147817
| 1.115437
|
if self.id is None:
self.id = self.meta_data.get("id")
self.is_anchor = self.meta_data.get("is_anchor")
self.country_code = self.meta_data.get("country_code")
self.description = self.meta_data.get("description")
self.is_public = self.meta_data.get("is_public")
self.asn_v4 = self.meta_data.get("asn_v4")
self.asn_v6 = self.meta_data.get("asn_v6")
self.address_v4 = self.meta_data.get("address_v4")
self.address_v6 = self.meta_data.get("address_v6")
self.prefix_v4 = self.meta_data.get("prefix_v4")
self.prefix_v6 = self.meta_data.get("prefix_v6")
self.geometry = self.meta_data.get("geometry")
self.tags = self.meta_data.get("tags")
self.status = self.meta_data.get("status", {}).get("name")
|
def _populate_data(self)
|
Assing some probe's raw meta data from API response to instance properties
| 1.725891
| 1.631167
| 1.058071
|
if self.id is None:
self.id = self.meta_data.get("id")
self.stop_time = None
self.creation_time = None
self.start_time = None
self.populate_times()
self.protocol = self.meta_data.get("af")
self.target_ip = self.meta_data.get("target_ip")
self.target_asn = self.meta_data.get("target_asn")
self.target = self.meta_data.get("target")
self.description = self.meta_data.get("description")
self.is_oneoff = self.meta_data.get("is_oneoff")
self.is_public = self.meta_data.get("is_public")
self.interval = self.meta_data.get("interval")
self.resolve_on_probe = self.meta_data.get("resolve_on_probe")
self.status_id = self.meta_data.get("status", {}).get("id")
self.status = self.meta_data.get("status", {}).get("name")
self.type = self.get_type()
self.result_url = self.meta_data.get("result")
|
def _populate_data(self)
|
Assinging some measurement's raw meta data from API response to instance properties
| 2.405807
| 2.285454
| 1.052661
|
mtype = None
if "type" not in self.meta_data:
return mtype
mtype = self.meta_data["type"]
if isinstance(mtype, dict):
mtype = self.meta_data.get("type", {}).get("name", "").upper()
elif isinstance(mtype, str):
mtype = mtype
return mtype
|
def get_type(self)
|
Getting type of measurement keeping backwards compatibility for
v2 API output changes.
| 3.088571
| 2.839467
| 1.087729
|
stop_time = self.meta_data.get("stop_time")
if stop_time:
stop_naive = datetime.utcfromtimestamp(stop_time)
self.stop_time = stop_naive.replace(tzinfo=tzutc())
creation_time = self.meta_data.get("creation_time")
if creation_time:
creation_naive = datetime.utcfromtimestamp(creation_time)
self.creation_time = creation_naive.replace(tzinfo=tzutc())
start_time = self.meta_data.get("start_time")
if start_time:
start_naive = datetime.utcfromtimestamp(start_time)
self.start_time = start_naive.replace(tzinfo=tzutc())
|
def populate_times(self)
|
Populates all different meta data times that comes with measurement if
they are present.
| 1.654524
| 1.578258
| 1.048323
|
if value not in self.types_available:
log = "Sources field 'type' should be in one of %s" % (
self.types_available
)
raise MalFormattedSource(log)
self._type = value
|
def set_type(self, value)
|
Setter for type attribute
| 8.80267
| 8.63006
| 1.020001
|
log = (
'Sources fields "tags" should be a dict in the format '
'{"include": [ "tag1", "tag2", "tagN" ],'
'"exclude": [ "tag1", "tag2", "tagN" ] }'
)
if not isinstance(value, dict):
raise MalFormattedSource(log)
if not set(value.keys()).issubset(set(["include", "exclude"])):
raise MalFormattedSource(log)
for tag_list in value.values():
if not isinstance(tag_list, list):
raise MalFormattedSource(log)
if [tag for tag in tag_list if not isinstance(tag, str)]:
raise MalFormattedSource(log)
self._tags = value
|
def set_tags(self, value)
|
Setter for tags attribute
| 2.929949
| 2.944769
| 0.994967
|
self.clean()
r = {
"type": self._type,
"requested": self._requested,
"value": self._value
}
if self._tags:
r["tags"] = self._tags
return r
|
def build_api_struct(self)
|
Calls the clean method of the class and returns the info in a structure
that Atlas API is accepting.
| 5.036486
| 3.721359
| 1.3534
|
if self.action == "remove" and value != "probes":
log = "Sources field 'type' when action is remove should always be 'probes'."
raise MalFormattedSource(log)
self._type = value
|
def set_type(self, value)
|
Setter for type attribute
| 15.065809
| 15.18946
| 0.991859
|
if self.action == "remove":
log = (
"Tag-based filtering can only be used when adding "
"participant probes for a measurement."
)
raise MalFormattedSource(log)
super(AtlasChangeSource, self).set_tags(value)
|
def set_tags(self, value)
|
Setter for tags attribute
| 17.86038
| 18.236629
| 0.979368
|
if value not in ("remove", "add"):
log = "Sources field 'action' should be 'remove' or 'add'."
raise MalFormattedSource(log)
self._action = value
|
def set_action(self, value)
|
Setter for action attribute
| 9.544115
| 9.249624
| 1.031838
|
if not all([self._type, self._requested, self._value, self._action]):
raise MalFormattedSource(
"<type, requested, value, action> fields are required."
)
|
def clean(self)
|
Cleans/checks user has entered all required attributes. This might save
some queries from being sent to server if they are totally wrong.
| 14.08461
| 11.200204
| 1.257532
|
data = super(AtlasChangeSource, self).build_api_struct()
data.update({"action": self._action})
return data
|
def build_api_struct(self)
|
Calls parent's method and just adds the addtional field 'action', that
is required to form the structure that Atlas API is accepting.
| 9.209903
| 4.815289
| 1.912637
|
for option, value in options.items():
setattr(self, option, value)
self._store_option(option)
|
def add_option(self, **options)
|
Adds an option and its value to the class as an attribute and stores it
to the used options set.
| 4.419557
| 3.719727
| 1.18814
|
for field in self.required_options:
setattr(self, field, kwargs.get(field))
self._store_option(field)
|
def _init_required_options(self, **kwargs)
|
Initialize the required option as class members. The value will be
either None or the specified value in the kwargs or __init__. The logic
here is to make the required options accesible to edit after a class
instance has been created.
| 4.324001
| 4.462869
| 0.968884
|
# make sure the correct measurement type is set.
if not self.measurement_type:
log = "Please define a valid measurement type."
raise MalFormattedMeasurement(log)
# make sure the required fields are set.
for roption in self.required_options:
if getattr(self, roption, None) is None:
log = "%s Measurement field: <%s> is required" % (
self.__class__.__name__, roption
)
raise MalFormattedMeasurement(log)
|
def clean(self)
|
Cleans/checks user entered data making sure required options are at
least present. This might save some queries from being sent if
they are totally wrong.
| 5.168334
| 4.959947
| 1.042014
|
new_option = option
new_value = getattr(self, option)
renaming_pairs = {
"dontfrag": "dont_fragment",
"maxhops": "max_hops",
"firsthop": "first_hop",
"use_NSID": "set_nsid_bit",
"cd": "set_cd_bit",
"do": "set_do_bit",
"qbuf": "include_qbuf",
"recursion_desired": "set_rd_bit",
"noabuf": "include_abuf"
}
if option in renaming_pairs.keys():
warninglog = (
"DeprecationWarning: {0} option has been deprecated and "
"renamed to {1}."
).format(option, renaming_pairs[option])
print(warninglog)
new_option = renaming_pairs[option]
# noabuf was changed to include_abuf so we need a double-negative
if option == "noabuf":
new_value = not new_value
return new_option, new_value
|
def v2_translator(self, option)
|
This is a temporary function that helps move from v1 API to v2 without
breaking already running script and keep backwards compatibility.
Translates option name from API v1 to renamed one of v2 API.
| 4.666062
| 4.777592
| 0.976656
|
self.clean()
data = {"type": self.measurement_type}
# add all options
for option in self.used_options:
option_key, option_value = self.v2_translator(option)
data.update({option_key: option_value})
return data
|
def build_api_struct(self)
|
Calls the clean method of the class and returns the info in a
structure that Atlas API is accepting.
| 7.079497
| 5.519007
| 1.282748
|
self.socketIO = SocketIO(
host=self.iosocket_server,
port=80,
resource=self.iosocket_resource,
proxies=self.proxies,
headers=self.headers,
transports=["websocket"],
Namespace=AtlasNamespace,
)
self.socketIO.on(self.EVENT_NAME_ERROR, self.handle_error)
|
def connect(self)
|
Initiate the channel we want to start streams from.
| 5.488478
| 5.220843
| 1.051263
|
# Remove the following list when deprecation time expires
if channel in self.CHANNELS:
warning = (
"The event name '{}' will soon be deprecated. Use "
"the real event name '{}' instead."
).format(channel, self.CHANNELS[channel])
self.handle_error(warning)
channel = self.CHANNELS[channel]
# -------------------------------------------------------
if channel == self.EVENT_NAME_ERROR:
self.error_callback = callback
elif channel == self.EVENT_NAME_RESULTS:
self.socketIO.on(channel, partial(self.unpack_results, callback))
else:
self.socketIO.on(channel, callback)
|
def bind_channel(self, channel, callback)
|
Bind given channel with the given callback
| 5.065621
| 5.17024
| 0.979765
|
if stream_type:
self.subscribe(stream_type, **stream_parameters)
else:
self.handle_error("You need to set a stream type")
|
def start_stream(self, stream_type, **stream_parameters)
|
Starts new stream for given type with given parameters
| 4.738581
| 4.434487
| 1.068575
|
parameters["stream_type"] = stream_type
if (stream_type == "result") and ("buffering" not in parameters):
parameters["buffering"] = True
self.socketIO.emit(self.EVENT_NAME_SUBSCRIBE, parameters)
|
def subscribe(self, stream_type, **parameters)
|
Subscribe to stream with give parameters.
| 4.545642
| 4.017692
| 1.131406
|
if seconds is None:
self.socketIO.wait()
else:
self.socketIO.wait(seconds=seconds)
|
def timeout(self, seconds=None)
|
Times out all streams after n seconds or wait forever if seconds is
None
| 3.744085
| 3.591796
| 1.042399
|
if not self.api_filters:
return self.url
# Reduce complex objects to simpler strings
for k, v in self.api_filters.items():
if isinstance(v, datetime): # datetime > UNIX timestamp
self.api_filters[k] = int(calendar.timegm(v.timetuple()))
if isinstance(v, (tuple, list)): # tuples & lists > x,y,z
self.api_filters[k] = ",".join([str(_) for _ in v])
if (
self.id_filter in self.api_filters and
len(str(self.api_filters[self.id_filter])) > self.URL_LENGTH_LIMIT
):
self.build_url_chunks()
return self.split_urls.pop(0)
filters = '&'.join("%s=%s" % (k, v) for (k, v) in self.api_filters.items())
return "%s?%s" % (self.url, filters)
|
def build_url(self)
|
Build the url path based on the filter options.
| 3.438411
| 3.25587
| 1.056065
|
CHUNK_SIZE = 500
id_filter = str(self.api_filters.pop(self.id_filter)).split(',')
chuncks = list(self.chunks(id_filter, CHUNK_SIZE))
filters = '&'.join("%s=%s" % (k, v) for (k, v) in self.api_filters.items())
for chunk in chuncks:
if filters:
url = "{0}?{1}&{2}={3}".format(self.url, filters, self.id_filter, ','.join(chunk))
else:
url = "{0}?{1}={2}".format(self.url, self.id_filter, ','.join(chunk))
self.split_urls.append(url)
|
def build_url_chunks(self)
|
If url is too big because of id filter is huge, break id and construct
several urls to call them in order to abstract this complexity from user.
| 2.732472
| 2.436943
| 1.12127
|
is_success, results = AtlasRequest(
url_path=self.atlas_url,
user_agent=self._user_agent,
server=self.server,
verify=self.verify,
).get()
if not is_success:
raise APIResponseError(results)
self.total_count = results.get("count")
self.atlas_url = self.build_next_url(results.get("next"))
self.current_batch = results.get("results", [])
|
def next_batch(self)
|
Querying API for the next batch of objects and store next url and
batch of objects.
| 4.717932
| 4.365951
| 1.080619
|
if not url:
if self.split_urls: # If we had a long request give the next part
self.total_count_flag = False # Reset flag for count
return self.split_urls.pop(0)
else:
return None
parsed_url = urlparse(url)
return "{0}?{1}".format(parsed_url.path, parsed_url.query)
|
def build_next_url(self, url)
|
Builds next url in a format compatible with cousteau. Path + query
| 6.270384
| 6.014937
| 1.042469
|
if not self.total_count_flag and value:
self._count.append(int(value))
self.total_count_flag = True
|
def set_total_count(self, value)
|
Setter for count attribute. Set should append only one count per splitted url.
| 5.321268
| 4.237707
| 1.255695
|
headers = {
"User-Agent": self.http_agent,
"Content-Type": "application/json",
"Accept": "application/json"
}
if self.headers:
headers.update(self.headers)
return headers
|
def get_headers(self)
|
Return header for the HTTP request.
| 2.490443
| 2.229055
| 1.117264
|
self.build_url()
try:
response = self.get_http_method(method)
is_success = response.ok
try:
response_message = response.json()
except ValueError:
response_message = response.text
except requests.exceptions.RequestException as exc:
is_success = False
response_message = exc.args
return is_success, response_message
|
def http_method(self, method)
|
Execute the given HTTP method and returns if it's success or not
and the response as a string if not success and as python object after
unjson if it's success.
| 3.17945
| 2.981255
| 1.06648
|
return self.http_methods[method](self.url, **self.http_method_args)
|
def get_http_method(self, method)
|
Gets the http method that will be called from the requests library
| 5.681156
| 4.81922
| 1.178854
|
if url_params:
self.http_method_args["params"].update(url_params)
return self.http_method("GET")
|
def get(self, **url_params)
|
Makes the HTTP GET to the url.
| 5.902634
| 4.704324
| 1.254725
|
self._construct_post_data()
post_args = {"json": self.post_data}
self.http_method_args.update(post_args)
return self.http_method("POST")
|
def post(self)
|
Makes the HTTP POST to the url sending post_data.
| 6.889374
| 5.997571
| 1.148694
|
if isinstance(time, int):
time = datetime.utcfromtimestamp(time)
elif isinstance(time, str):
time = parser.parse(time)
return time
|
def clean_time(self, time)
|
Transform time field to datetime object if there is any.
| 2.865424
| 2.28492
| 1.254059
|
definitions = [msm.build_api_struct() for msm in self.measurements]
probes = [source.build_api_struct() for source in self.sources]
self.post_data = {
"definitions": definitions,
"probes": probes,
"is_oneoff": self.is_oneoff
}
if self.is_oneoff:
self.post_data.update({"is_oneoff": self.is_oneoff})
if self.start_time:
self.post_data.update(
{"start_time": int(calendar.timegm(self.start_time.timetuple()))}
)
if self.stop_time:
self.post_data.update(
{"stop_time": int(calendar.timegm(self.stop_time.timetuple()))}
)
if self.bill_to:
self.post_data.update({"bill_to": self.bill_to})
|
def _construct_post_data(self)
|
Constructs the data structure that is required from the atlas API based
on measurements, sources and times user has specified.
| 2.389925
| 2.1941
| 1.089251
|
if isinstance(probe_ids, (tuple, list)): # tuples & lists > x,y,z
probe_ids = ",".join([str(_) for _ in probe_ids])
return probe_ids
|
def clean_probes(self, probe_ids)
|
Checks format of probe ids and transform it to something API
understands.
| 5.670562
| 5.382979
| 1.053424
|
url_params = {}
if self.start:
url_params.update(
{"start": int(calendar.timegm(self.start.timetuple()))}
)
if self.stop:
url_params.update(
{"stop": int(calendar.timegm(self.stop.timetuple()))}
)
if self.probe_ids:
url_params.update({"probe_ids": self.probe_ids})
self.http_method_args["params"].update(url_params)
|
def update_http_method_params(self)
|
Update HTTP url parameters based on msm_id and query filters if
there are any.
| 2.527469
| 2.392236
| 1.05653
|
# Expand tuple `id`
if type(id) is tuple:
if len(id) != 2:
raise ValueError()
id, service = id
# Validate parameters
if not service:
raise ValueError('Invalid value provided for the "service" parameter')
# Build query
query = {}
if isinstance(media, six.string_types):
query['type'] = media
elif isinstance(media, list):
query['type'] = ','.join(media)
if extended:
query['extended'] = extended
# Send request
response = self.http.get(
params=[service, id],
query=query
)
# Parse response
items = self.get_data(response, **kwargs)
if isinstance(items, requests.Response):
return items
if not items:
return None
count = len(items)
if count > 1:
return SearchMapper.process_many(self.client, items)
elif count == 1:
return SearchMapper.process(self.client, items[0])
return None
|
def lookup(self, id, service=None, media=None, extended=None, **kwargs)
|
Lookup items by their Trakt, IMDB, TMDB, TVDB, or TVRage ID.
**Note:** If you lookup an identifier without a :code:`media` type specified it
might return multiple items if the :code:`service` is not globally unique.
:param id: Identifier value to lookup
:type id: :class:`~python:str` or :class:`~python:int`
:param service: Identifier service
**Possible values:**
- :code:`trakt`
- :code:`imdb`
- :code:`tmdb`
- :code:`tvdb`
- :code:`tvrage`
:type service: :class:`~python:str`
:param media: Desired media type (or :code:`None` to return all matching items)
**Possible values:**
- :code:`movie`
- :code:`show`
- :code:`episode`
- :code:`person`
- :code:`list`
:type media: :class:`~python:str` or :class:`~python:list` of :class:`~python:str`
:param extended: Level of information to include in response
**Possible values:**
- :code:`None`: Minimal (e.g. title, year, ids) **(default)**
- :code:`full`: Complete
:type extended: :class:`~python:str`
:param kwargs: Extra request options
:type kwargs: :class:`~python:dict`
:return: Results
:rtype: :class:`trakt.objects.media.Media` or :class:`~python:list` of :class:`trakt.objects.media.Media`
| 3.254342
| 3.23434
| 1.006184
|
# Validate parameters
if not media:
warnings.warn(
"\"media\" parameter is now required on the Trakt['search'].query() method",
DeprecationWarning, stacklevel=2
)
if fields and not media:
raise ValueError('"fields" can only be used when the "media" parameter is defined')
# Build query
query = {
'query': query
}
if year:
query['year'] = year
if fields:
query['fields'] = fields
if extended:
query['extended'] = extended
# Serialize media items
if isinstance(media, list):
media = ','.join(media)
# Send request
response = self.http.get(
params=[media],
query=query
)
# Parse response
items = self.get_data(response, **kwargs)
if isinstance(items, requests.Response):
return items
if items is not None:
return SearchMapper.process_many(self.client, items)
return None
|
def query(self, query, media=None, year=None, fields=None, extended=None, **kwargs)
|
Search by titles, descriptions, translated titles, aliases, and people.
**Note:** Results are ordered by the most relevant score.
:param query: Search title or description
:type query: :class:`~python:str`
:param media: Desired media type (or :code:`None` to return all matching items)
**Possible values:**
- :code:`movie`
- :code:`show`
- :code:`episode`
- :code:`person`
- :code:`list`
:type media: :class:`~python:str` or :class:`~python:list` of :class:`~python:str`
:param year: Desired media year (or :code:`None` to return all matching items)
:type year: :class:`~python:str` or :class:`~python:int`
:param fields: Fields to search for :code:`query` (or :code:`None` to search all fields)
:type fields: :class:`~python:str` or :class:`~python:list`
:param extended: Level of information to include in response
**Possible values:**
- :code:`None`: Minimal (e.g. title, year, ids) **(default)**
- :code:`full`: Complete
:type extended: :class:`~python:str`
:param kwargs: Extra request options
:type kwargs: :class:`~python:dict`
:return: Results
:rtype: :class:`~python:list` of :class:`trakt.objects.media.Media`
| 3.779272
| 3.792075
| 0.996624
|
return {
'number': self.pk,
'episodes': [
episode.to_dict()
for episode in self.episodes.values()
]
}
|
def to_identifier(self)
|
Return the season identifier which is compatible with requests that require season definitions.
:return: Season identifier/definition
:rtype: :class:`~python:dict`
| 7.322082
| 5.666889
| 1.292081
|
result = self.to_identifier()
result.update({
'ids': dict([
(key, value) for (key, value) in self.keys[1:] # NOTE: keys[0] is the season identifier
])
})
if self.rating:
result['rating'] = self.rating.value
result['rated_at'] = to_iso8601_datetime(self.rating.timestamp)
result['in_watchlist'] = self.in_watchlist if self.in_watchlist is not None else 0
# Extended Info
if self.first_aired:
result['first_aired'] = to_iso8601_datetime(self.first_aired)
if self.episode_count:
result['episode_count'] = self.episode_count
if self.aired_episodes:
result['aired_episodes'] = self.aired_episodes
return result
|
def to_dict(self)
|
Dump season to a dictionary.
:return: Season dictionary
:rtype: :class:`~python:dict`
| 3.070529
| 3.044004
| 1.008714
|
result = self.to_identifier()
result.update({
'title': self.title,
'watched': 1 if self.is_watched else 0,
'collected': 1 if self.is_collected else 0,
'plays': self.plays if self.plays is not None else 0,
'in_watchlist': self.in_watchlist if self.in_watchlist is not None else 0,
'progress': self.progress,
'last_watched_at': to_iso8601_datetime(self.last_watched_at),
'collected_at': to_iso8601_datetime(self.collected_at),
'paused_at': to_iso8601_datetime(self.paused_at),
'ids': dict([
(key, value) for (key, value) in self.keys[1:] # NOTE: keys[0] is the (<season>, <episode>) identifier
])
})
if self.rating:
result['rating'] = self.rating.value
result['rated_at'] = to_iso8601_datetime(self.rating.timestamp)
# Extended Info
if self.first_aired:
result['first_aired'] = to_iso8601_datetime(self.first_aired)
if self.updated_at:
result['updated_at'] = to_iso8601_datetime(self.updated_at)
if self.overview:
result['overview'] = self.overview
if self.available_translations:
result['available_translations'] = self.available_translations
return result
|
def to_dict(self)
|
Dump episode to a dictionary.
:return: Episode dictionary
:rtype: :class:`~python:dict`
| 2.339957
| 2.275805
| 1.028188
|
print('Authentication aborted')
# Authentication aborted
self.is_authenticating.acquire()
self.is_authenticating.notify_all()
self.is_authenticating.release()
|
def on_aborted(self)
|
Device authentication aborted.
Triggered when device authentication was aborted (either with `DeviceOAuthPoller.stop()`
or via the "poll" event)
| 5.368611
| 4.288243
| 1.251937
|
# Acquire condition
self.is_authenticating.acquire()
# Store authorization for future calls
self.authorization = authorization
print('Authentication successful - authorization: %r' % self.authorization)
# Authentication complete
self.is_authenticating.notify_all()
self.is_authenticating.release()
|
def on_authenticated(self, authorization)
|
Device authenticated.
:param authorization: Authentication token details
:type authorization: dict
| 5.222447
| 6.071575
| 0.860147
|
print('Authentication expired')
# Authentication expired
self.is_authenticating.acquire()
self.is_authenticating.notify_all()
self.is_authenticating.release()
|
def on_expired(self)
|
Device authentication expired.
| 5.272637
| 4.136285
| 1.274728
|
return DeviceOAuthPoller(self.client, device_code, expires_in, interval)
|
def poll(self, device_code, expires_in, interval, **kwargs)
|
Construct the device authentication poller.
:param device_code: Device authentication code
:type device_code: str
:param expires_in: Device authentication code expiry (in seconds)
:type in: int
:param interval: Device authentication poll interval
:type interval: int
:rtype: DeviceOAuthPoller
| 8.224335
| 5.129593
| 1.603311
|
result = self.to_identifier()
result.update({
'watched': 1 if self.is_watched else 0,
'collected': 1 if self.is_collected else 0,
'plays': self.plays if self.plays is not None else 0,
'in_watchlist': self.in_watchlist if self.in_watchlist is not None else 0,
'progress': self.progress,
'last_watched_at': to_iso8601_datetime(self.last_watched_at),
'collected_at': to_iso8601_datetime(self.collected_at),
'paused_at': to_iso8601_datetime(self.paused_at)
})
if self.rating:
result['rating'] = self.rating.value
result['rated_at'] = to_iso8601_datetime(self.rating.timestamp)
# Extended Info
if self.released:
result['released'] = to_iso8601_date(self.released)
if self.updated_at:
result['updated_at'] = to_iso8601_datetime(self.updated_at)
if self.overview:
result['overview'] = self.overview
if self.tagline:
result['tagline'] = self.tagline
if self.runtime:
result['runtime'] = self.runtime
if self.certification:
result['certification'] = self.certification
if self.homepage:
result['homepage'] = self.homepage
if self.trailer:
result['trailer'] = self.trailer
if self.language:
result['language'] = self.language
if self.available_translations:
result['available_translations'] = self.available_translations
if self.genres:
result['genres'] = self.genres
return result
|
def to_dict(self)
|
Dump movie to a dictionary.
:return: Movie dictionary
:rtype: :class:`~python:dict`
| 1.872801
| 1.823606
| 1.026977
|
result = super(Progress, self).to_dict()
label = LABELS['last_progress_change'][self.progress_type]
result[label] = to_iso8601_datetime(self.last_progress_change)
if self.progress_type == 'watched':
result['reset_at'] = self.reset_at
result['seasons'] = [
season.to_dict()
for season in self.seasons.values()
]
if self.hidden_seasons:
result['hidden_seasons'] = [
popitems(season.to_dict(), ['number', 'ids'])
for season in self.hidden_seasons.values()
]
if self.next_episode:
result['next_episode'] = popitems(self.next_episode.to_dict(), ['season', 'number', 'title', 'ids'])
result['next_episode']['season'] = self.next_episode.keys[0][0]
if self.last_episode:
result['last_episode'] = popitems(self.last_episode.to_dict(), ['season', 'number', 'title', 'ids'])
result['last_episode']['season'] = self.last_episode.keys[0][0]
return result
|
def to_dict(self)
|
Dump progress to a dictionary.
:return: Progress dictionary
:rtype: :class:`~python:dict`
| 2.591719
| 2.537966
| 1.02118
|
if movie and (show or episode):
raise ValueError('Only one media type should be provided')
if not movie and not episode:
raise ValueError('Missing media item')
data = {
'progress': progress,
'app_version': kwargs.pop('app_version', self.client.version),
'app_date': kwargs.pop('app_date', None)
}
if movie:
# TODO validate
data['movie'] = movie
elif episode:
if show:
data['show'] = show
# TODO validate
data['episode'] = episode
response = self.http.post(
action,
data=data,
**popitems(kwargs, [
'authenticated',
'validate_token'
])
)
return self.get_data(response, **kwargs)
|
def action(self, action, movie=None, show=None, episode=None, progress=0.0, **kwargs)
|
Perform scrobble action.
:param action: Action to perform (either :code:`start`, :code:`pause` or :code:`stop`)
:type action: :class:`~python:str`
:param movie: Movie definition (or `None`)
**Example:**
.. code-block:: python
{
'title': 'Guardians of the Galaxy',
'year': 2014,
'ids': {
'tmdb': 118340
}
}
:type movie: :class:`~python:dict`
:param show: Show definition (or `None`)
**Example:**
.. code-block:: python
{
'title': 'Breaking Bad',
'year': 2008,
'ids': {
'tvdb': 81189
}
}
:type show: :class:`~python:dict`
:param episode: Episode definition (or `None`)
**Example:**
.. code-block:: python
{
"season": 3,
"number": 11
}
:type episode: :class:`~python:dict`
:param progress: Current movie/episode progress percentage
:type progress: :class:`~python:float`
:param kwargs: Extra request options
:type kwargs: :class:`~python:dict`
:return: Response (or `None`)
**Example:**
.. code-block:: python
{
'action': 'start',
'progress': 1.25,
'sharing': {
'facebook': true,
'twitter': true,
'tumblr': false
},
'movie': {
'title': 'Guardians of the Galaxy',
'year': 2014,
'ids': {
'trakt': 28,
'slug': 'guardians-of-the-galaxy-2014',
'imdb': 'tt2015381',
'tmdb': 118340
}
}
}
:rtype: :class:`~python:dict`
| 3.916425
| 4.059943
| 0.96465
|
return self.action(
'start',
movie, show, episode,
progress,
**kwargs
)
|
def start(self, movie=None, show=None, episode=None, progress=0.0, **kwargs)
|
Send the scrobble "start" action.
Use this method when the video initially starts playing or is un-paused. This will
remove any playback progress if it exists.
**Note:** A watching status will auto expire after the remaining runtime has elapsed.
There is no need to re-send every 15 minutes.
:param movie: Movie definition (or `None`)
**Example:**
.. code-block:: python
{
'title': 'Guardians of the Galaxy',
'year': 2014,
'ids': {
'tmdb': 118340
}
}
:type movie: :class:`~python:dict`
:param show: Show definition (or `None`)
**Example:**
.. code-block:: python
{
'title': 'Breaking Bad',
'year': 2008,
'ids': {
'tvdb': 81189
}
}
:type show: :class:`~python:dict`
:param episode: Episode definition (or `None`)
**Example:**
.. code-block:: python
{
"season": 3,
"number": 11
}
:type episode: :class:`~python:dict`
:param progress: Current movie/episode progress percentage
:type progress: :class:`~python:float`
:param kwargs: Extra request options
:type kwargs: :class:`~python:dict`
:return: Response (or `None`)
**Example:**
.. code-block:: python
{
'action': 'start',
'progress': 1.25,
'sharing': {
'facebook': true,
'twitter': true,
'tumblr': false
},
'movie': {
'title': 'Guardians of the Galaxy',
'year': 2014,
'ids': {
'trakt': 28,
'slug': 'guardians-of-the-galaxy-2014',
'imdb': 'tt2015381',
'tmdb': 118340
}
}
}
:rtype: :class:`~python:dict`
| 6.396092
| 7.429073
| 0.860954
|
return self.action(
'pause',
movie, show, episode,
progress,
**kwargs
)
|
def pause(self, movie=None, show=None, episode=None, progress=0.0, **kwargs)
|
Send the scrobble "pause' action.
Use this method when the video is paused. The playback progress will be saved and
:code:`Trakt['sync/playback'].get()` can be used to resume the video from this exact
position. Un-pause a video by calling the :code:`Trakt['scrobble'].start()` method again.
:param movie: Movie definition (or `None`)
**Example:**
.. code-block:: python
{
'title': 'Guardians of the Galaxy',
'year': 2014,
'ids': {
'tmdb': 118340
}
}
:type movie: :class:`~python:dict`
:param show: Show definition (or `None`)
**Example:**
.. code-block:: python
{
'title': 'Breaking Bad',
'year': 2008,
'ids': {
'tvdb': 81189
}
}
:type show: :class:`~python:dict`
:param episode: Episode definition (or `None`)
**Example:**
.. code-block:: python
{
"season": 3,
"number": 11
}
:type episode: :class:`~python:dict`
:param progress: Current movie/episode progress percentage
:type progress: :class:`~python:float`
:param kwargs: Extra request options
:type kwargs: :class:`~python:dict`
:return: Response (or `None`)
**Example:**
.. code-block:: python
{
'action': 'pause',
'progress': 75,
'sharing': {
'facebook': true,
'twitter': true,
'tumblr': false
},
'movie': {
'title': 'Guardians of the Galaxy',
'year': 2014,
'ids': {
'trakt': 28,
'slug': 'guardians-of-the-galaxy-2014',
'imdb': 'tt2015381',
'tmdb': 118340
}
}
}
:rtype: :class:`~python:dict`
| 6.419295
| 8.215443
| 0.781369
|
return self.action(
'stop',
movie, show, episode,
progress,
**kwargs
)
|
def stop(self, movie=None, show=None, episode=None, progress=0.0, **kwargs)
|
Send the scrobble "stop" action.
Use this method when the video is stopped or finishes playing on its own. If the
progress is above 80%, the video will be scrobbled and the :code:`action` will be set
to **scrobble**.
If the progress is less than 80%, it will be treated as a *pause* and the :code:`action`
will be set to **pause**. The playback progress will be saved and :code:`Trakt['sync/playback'].get()`
can be used to resume the video from this exact position.
**Note:** If you prefer to use a threshold higher than 80%, you should use :code:`Trakt['scrobble'].pause()`
yourself so it doesn't create duplicate scrobbles.
:param movie: Movie definition (or `None`)
**Example:**
.. code-block:: python
{
'title': 'Guardians of the Galaxy',
'year': 2014,
'ids': {
'tmdb': 118340
}
}
:type movie: :class:`~python:dict`
:param show: Show definition (or `None`)
**Example:**
.. code-block:: python
{
'title': 'Breaking Bad',
'year': 2008,
'ids': {
'tvdb': 81189
}
}
:type show: :class:`~python:dict`
:param episode: Episode definition (or `None`)
**Example:**
.. code-block:: python
{
"season": 3,
"number": 11
}
:type episode: :class:`~python:dict`
:param progress: Current movie/episode progress percentage
:type progress: :class:`~python:float`
:param kwargs: Extra request options
:type kwargs: :class:`~python:dict`
:return: Response (or `None`)
**Example:**
.. code-block:: python
{
'action': 'scrobble',
'progress': 99.9,
'sharing': {
'facebook': true,
'twitter': true,
'tumblr': false
},
'movie': {
'title': 'Guardians of the Galaxy',
'year': 2014,
'ids': {
'trakt': 28,
'slug': 'guardians-of-the-galaxy-2014',
'imdb': 'tt2015381',
'tmdb': 118340
}
}
}
:rtype: :class:`~python:dict`
| 6.401034
| 8.751034
| 0.73146
|
return self._client['users/*/lists/*'].delete(self.username, self.id, **kwargs)
|
def delete(self, **kwargs)
|
Delete the list.
:param kwargs: Extra request options
:type kwargs: :class:`~python:dict`
:return: Boolean to indicate if the request was successful
:rtype: :class:`~python:bool`
| 19.336355
| 17.910671
| 1.0796
|
item = self._client['users/*/lists/*'].update(self.username, self.id, return_type='data', **kwargs)
if not item:
return False
self._update(item)
return True
|
def update(self, **kwargs)
|
Update the list with the current object attributes.
:param kwargs: Extra request options
:type kwargs: :class:`~python:dict`
:return: Boolean to indicate if the request was successful
:rtype: :class:`~python:bool`
| 9.125752
| 9.941902
| 0.917908
|
return self._client['users/*/lists/*'].remove(self.username, self.id, items, **kwargs)
|
def remove(self, items, **kwargs)
|
Remove specified items from the list.
:param items: Items that should be removed from the list
:type items: :class:`~python:list`
:param kwargs: Extra request options
:type kwargs: :class:`~python:dict`
:return: Response
:rtype: :class:`~python:dict`
| 15.815752
| 16.174093
| 0.977845
|
return self._client['users/*/lists/*'].like(self.username, self.id, **kwargs)
|
def like(self, **kwargs)
|
Like the list.
:param kwargs: Extra request options
:type kwargs: :class:`~python:dict`
:return: Boolean to indicate if the request was successful
:rtype: :class:`~python:bool`
| 17.296215
| 16.275059
| 1.062744
|
return self._client['users/*/lists/*'].unlike(self.username, self.id, **kwargs)
|
def unlike(self, **kwargs)
|
Un-like the list.
:param kwargs: Extra request options
:type kwargs: :class:`~python:dict`
:return: Boolean to indicate if the request was successful
:rtype: :class:`~python:bool`
| 15.640798
| 14.271692
| 1.095932
|
if source not in ['all', 'my']:
raise ValueError('Unknown collection type: %s' % (source,))
if media not in ['dvd', 'movies', 'shows']:
raise ValueError('Unknown media type: %s' % (media,))
# Default `start_date` to today when only `days` is provided
if start_date is None and days:
start_date = datetime.utcnow()
# Request calendar collection
response = self.http.get(
'/calendars/%s/%s%s' % (
source, media,
('/' + collection) if collection else ''
),
params=[
start_date.strftime('%Y-%m-%d') if start_date else None,
days
],
query={
'query': query,
'years': years,
'genres': genres,
'languages': languages,
'countries': countries,
'runtimes': runtimes,
'ratings': ratings,
'certifications': certifications,
# TV
'networks': networks,
'status': status
},
**popitems(kwargs, [
'authenticated',
'validate_token'
])
)
# Parse response
items = self.get_data(response, **kwargs)
if isinstance(items, requests.Response):
return items
# Map items
if media == 'shows':
return SummaryMapper.episodes(
self.client, items,
parse_show=True
)
return SummaryMapper.movies(self.client, items)
|
def get(self, source, media, collection=None, start_date=None, days=None, query=None, years=None, genres=None,
languages=None, countries=None, runtimes=None, ratings=None, certifications=None, networks=None,
status=None, **kwargs)
|
Retrieve calendar items.
The `all` calendar displays info for all shows airing during the specified period. The `my` calendar displays
episodes for all shows that have been watched, collected, or watchlisted.
:param source: Calendar source (`all` or `my`)
:type source: str
:param media: Media type (`dvd`, `movies` or `shows`)
:type media: str
:param collection: Collection type (`new`, `premieres`)
:type collection: str or None
:param start_date: Start date (defaults to today)
:type start_date: datetime or None
:param days: Number of days to display (defaults to `7`)
:type days: int or None
:param query: Search title or description.
:type query: str or None
:param years: Year or range of years (e.g. `2014`, or `2014-2016`)
:type years: int or str or tuple or None
:param genres: Genre slugs (e.g. `action`)
:type genres: str or list of str or None
:param languages: Language codes (e.g. `en`)
:type languages: str or list of str or None
:param countries: Country codes (e.g. `us`)
:type countries: str or list of str or None
:param runtimes: Runtime range in minutes (e.g. `30-90`)
:type runtimes: str or tuple or None
:param ratings: Rating range between `0` and `100` (e.g. `75-100`)
:type ratings: str or tuple or None
:param certifications: US Content Certification (e.g. `pg-13`, `tv-pg`)
:type certifications: str or list of str or None
:param networks: (TV) Network name (e.g. `HBO`)
:type networks: str or list of str or None
:param status: (TV) Show status (e.g. `returning series`, `in production`, ended`)
:type status: str or list of str or None
:return: Items
:rtype: list of trakt.objects.video.Video
| 3.45672
| 3.141077
| 1.100489
|
for sk, season in iteritems(self.seasons):
# Yield each episode in season
for ek, episode in iteritems(season.episodes):
yield (sk, ek), episode
|
def episodes(self)
|
Return a flat episode iterator.
:returns: Iterator :code:`((season_num, episode_num), Episode)`
:rtype: iterator
| 6.947012
| 5.667383
| 1.225788
|
result = self.to_identifier()
result['seasons'] = [
season.to_dict()
for season in self.seasons.values()
]
result['in_watchlist'] = self.in_watchlist if self.in_watchlist is not None else 0
if self.rating:
result['rating'] = self.rating.value
result['rated_at'] = to_iso8601_datetime(self.rating.timestamp)
# Extended Info
if self.first_aired:
result['first_aired'] = to_iso8601_datetime(self.first_aired)
if self.updated_at:
result['updated_at'] = to_iso8601_datetime(self.updated_at)
if self.overview:
result['overview'] = self.overview
if self.airs:
result['airs'] = self.airs
if self.runtime:
result['runtime'] = self.runtime
if self.certification:
result['certification'] = self.certification
if self.network:
result['network'] = self.network
if self.country:
result['country'] = self.country
if self.status:
result['status'] = self.status
if self.homepage:
result['homepage'] = self.homepage
if self.language:
result['language'] = self.language
if self.available_translations:
result['available_translations'] = self.available_translations
if self.genres:
result['genres'] = self.genres
if self.aired_episodes:
result['aired_episodes'] = self.aired_episodes
return result
|
def to_dict(self)
|
Dump show to a dictionary.
:return: Show dictionary
:rtype: :class:`~python:dict`
| 1.883059
| 1.854316
| 1.015501
|
path = [self.path]
path.extend(self.params)
# Build URL
url = self.client.base_url + '/'.join(
str(value) for value in path
if value
)
# Append query parameters (if defined)
query = self.encode_query(self.query)
if query:
url += '?' + query
return url
|
def construct_url(self)
|
Construct a full trakt request URI, with `params` and `query`.
| 4.272203
| 3.67931
| 1.161142
|
search_type = ('officers' if not disqualified else
'disqualified-officers')
params = kwargs
params['q'] = term
baseuri = self._BASE_URI + 'search/{}'.format(search_type)
res = self.session.get(baseuri, params=params)
self.handle_http_error(res)
return res
|
def search_officers(self, term, disqualified=False, **kwargs)
|
Search for officers by name.
Args:
term (str): Officer name to search on.
disqualified (Optional[bool]): True to search for disqualified
officers
kwargs (dict): additional keywords passed into
requests.session.get params keyword.
| 3.329506
| 3.848781
| 0.865081
|
url_root = "company/{}/registered-office-address"
baseuri = self._BASE_URI + url_root.format(num)
res = self.session.get(baseuri)
self.handle_http_error(res)
return res
|
def address(self, num)
|
Search for company addresses by company number.
Args:
num (str): Company number to search on.
| 7.276281
| 6.572797
| 1.10703
|
baseuri = self._BASE_URI + "company/{}".format(num)
res = self.session.get(baseuri)
self.handle_http_error(res)
return res
|
def profile(self, num)
|
Search for company profile by company number.
Args:
num (str): Company number to search on.
| 5.92638
| 6.359854
| 0.931842
|
baseuri = self._BASE_URI + "company/{}/filing-history".format(num)
if transaction is not None:
baseuri += "/{}".format(transaction)
res = self.session.get(baseuri, params=kwargs)
self.handle_http_error(res)
return res
|
def filing_history(self, num, transaction=None, **kwargs)
|
Search for a company's filling history by company number.
Args:
num (str): Company number to search on.
transaction (Optional[str]): Filing record number.
kwargs (dict): additional keywords passed into
requests.session.get params keyword.
| 3.257395
| 3.835094
| 0.849365
|
baseuri = self._BASE_URI + "company/{}/charges".format(num)
if charge_id is not None:
baseuri += "/{}".format(charge_id)
res = self.session.get(baseuri, params=kwargs)
else:
res = self.session.get(baseuri, params=kwargs)
self.handle_http_error(res)
return res
|
def charges(self, num, charge_id=None, **kwargs)
|
Search for charges against a company by company number.
Args:
num (str): Company number to search on.
transaction (Optional[str]): Filing record number.
kwargs (dict): additional keywords passed into
requests.session.get params keyword.
| 2.654265
| 2.984305
| 0.889408
|
baseuri = self._BASE_URI + "company/{}/officers".format(num)
res = self.session.get(baseuri, params=kwargs)
self.handle_http_error(res)
return res
|
def officers(self, num, **kwargs)
|
Search for a company's registered officers by company number.
Args:
num (str): Company number to search on.
kwargs (dict): additional keywords passed into
requests.session.get *params* keyword.
| 4.319647
| 4.860064
| 0.888805
|
search_type = 'natural' if natural else 'corporate'
baseuri = (self._BASE_URI +
'disqualified-officers/{}/{}'.format(search_type, num))
res = self.session.get(baseuri, params=kwargs)
self.handle_http_error(res)
return res
|
def disqualified(self, num, natural=True, **kwargs)
|
Search for disqualified officers by officer ID.
Searches for natural disqualifications by default. Specify
natural=False to search for corporate disqualifications.
Args:
num (str): Company number to search on.
natural (Optional[bool]): Natural or corporate search
kwargs (dict): additional keywords passed into
requests.session.get *params* keyword.
| 4.659471
| 3.896141
| 1.195919
|
baseuri = (self._BASE_URI +
'company/{}/persons-with-significant-control'.format(num))
# Only append statements to the URL if statements is True
if statements is True:
baseuri += '-statements'
res = self.session.get(baseuri, params=kwargs)
self.handle_http_error(res)
return res
|
def persons_significant_control(self, num, statements=False, **kwargs)
|
Search for a list of persons with significant control.
Searches for persons of significant control based on company number for
a specified company. Specify statements=True to only search for
officers with statements.
Args:
num (str, int): Company number to search on.
statements (Optional[bool]): Search only for persons with
statements. Default is False.
kwargs (dict): additional keywords passed into requests.session.get
*params* keyword.
| 4.944697
| 5.730784
| 0.862831
|
# Dict mapping entity_type strings to url strings
entities = {'individual': 'individual',
'corporate': 'corporate-entity',
'legal': 'legal-person',
'statements': 'persons-with-significant-control-statements',
'secure': 'super-secure'}
# Make sure correct entity_type supplied
try:
entity = entities[entity_type]
except KeyError as e:
msg = ("Wrong entity_type supplied. Please choose from " +
"individual, corporate, legal, statements or secure")
raise Exception(msg) from e
# Construct the request and return the result
baseuri = (self._BASE_URI +
'company/{}/persons-with-significant-control/'.format(num) +
'{}/{}'.format(entity, entity_id))
res = self.session.get(baseuri, params=kwargs)
self.handle_http_error(res)
return res
|
def significant_control(self,
num,
entity_id,
entity_type='individual',
**kwargs)
|
Get details of a specific entity with significant control.
Args:
num (str, int): Company number to search on.
entity_id (str, int): Entity id to request details for
entity_type (str, int): What type of entity to search for. Defaults
to 'individual'. Other possible opetions are
'corporate' (for corporate entitys), 'legal' (for legal
persons), 'statements' (for a person with significant control
statement) and 'secure' (for a super secure person).
kwargs (dict): additional keywords passed into requests.session.get
*params* keyword.
| 4.521476
| 3.293795
| 1.372725
|
baseuri = '{}document/{}/content'.format(self._DOCUMENT_URI,
document_id)
res = self.session.get(baseuri, params=kwargs)
self.handle_http_error(res)
return res
|
def document(self, document_id, **kwargs)
|
Requests for a document by the document id.
Normally the response.content can be saved as a pdf file
Args:
document_id (str): The id of the document retrieved.
kwargs (dict): additional keywords passed into
requests.session.get *params* keyword.
| 5.310379
| 6.304672
| 0.842293
|
self.files['_{}'.format(filename)] = (filename, file)
|
def add_file(self, file, filename)
|
add file to webhook
:param file: file content
:param filename: filename
:return:
| 12.149422
| 14.104712
| 0.861373
|
self.embeds.append(embed.__dict__ if isinstance(embed, DiscordEmbed) else embed)
|
def add_embed(self, embed)
|
add embedded rich content
:param embed: embed object or dict
| 5.649657
| 4.820271
| 1.172062
|
data = dict()
embeds = self.embeds
self.embeds = list()
# convert DiscordEmbed to dict
for embed in embeds:
self.add_embed(embed)
for key, value in self.__dict__.items():
if value and key not in ['url', 'files', 'filename']:
data[key] = value
embeds_empty = all(not embed for embed in data["embeds"]) if 'embeds' in data else True
if embeds_empty and 'content' not in data and bool(self.files) is False:
logger.error('webhook message is empty! set content or embed data')
return data
|
def json(self)
|
convert webhook data to json
:return webhook data as json:
| 4.962892
| 4.502987
| 1.102133
|
if bool(self.files) is False:
response = requests.post(self.url, json=self.json, proxies=self.proxies)
else:
self.files['payload_json'] = (None, json.dumps(self.json))
response = requests.post(self.url, files=self.files, proxies=self.proxies)
if response.status_code in [200, 204]:
logger.debug("Webhook executed")
else:
logger.error('status code %s: %s' % (response.status_code, response.content.decode("utf-8")))
|
def execute(self)
|
execute Webhook
:return:
| 2.947848
| 2.595422
| 1.135788
|
self.timestamp = timestamp
|
def set_timestamp(self, timestamp=str(datetime.datetime.utcfromtimestamp(time.time())))
|
set timestamp of embed content
:param timestamp: (optional) timestamp of embed content
| 18.597803
| 28.235472
| 0.658668
|
self.footer = {
'text': kwargs.get('text'),
'icon_url': kwargs.get('icon_url'),
'proxy_icon_url': kwargs.get('proxy_icon_url')
}
|
def set_footer(self, **kwargs)
|
set footer information of embed
:keyword text: footer text
:keyword icon_url: url of footer icon (only supports http(s) and attachments)
:keyword proxy_icon_url: a proxied url of footer icon
| 1.968919
| 1.857605
| 1.059923
|
self.image = {
'url': kwargs.get('url'),
'proxy_url': kwargs.get('proxy_url'),
'height': kwargs.get('height'),
'width': kwargs.get('width'),
}
|
def set_image(self, **kwargs)
|
set image of embed
:keyword url: source url of image (only supports http(s) and attachments)
:keyword proxy_url: a proxied url of the image
:keyword height: height of image
:keyword width: width of image
| 2.38491
| 1.813862
| 1.314824
|
self.thumbnail = {
'url': kwargs.get('url'),
'proxy_url': kwargs.get('proxy_url'),
'height': kwargs.get('height'),
'width': kwargs.get('width'),
}
|
def set_thumbnail(self, **kwargs)
|
set thumbnail of embed
:keyword url: source url of thumbnail (only supports http(s) and attachments)
:keyword proxy_url: a proxied thumbnail of the image
:keyword height: height of thumbnail
:keyword width: width of thumbnail
| 2.148671
| 1.761064
| 1.220099
|
self.video = {
'url': kwargs.get('url'),
'height': kwargs.get('height'),
'width': kwargs.get('width'),
}
|
def set_video(self, **kwargs)
|
set video of embed
:keyword url: source url of video
:keyword height: height of video
:keyword width: width of video
| 2.715301
| 2.259626
| 1.20166
|
self.author = {
'name': kwargs.get('name'),
'url': kwargs.get('url'),
'icon_url': kwargs.get('icon_url'),
'proxy_icon_url': kwargs.get('proxy_icon_url'),
}
|
def set_author(self, **kwargs)
|
set author of embed
:keyword name: name of author
:keyword url: url of author
:keyword icon_url: url of author icon (only supports http(s) and attachments)
:keyword proxy_icon_url: a proxied url of author icon
| 1.955554
| 1.66183
| 1.176747
|
self.fields.append({
'name': kwargs.get('name'),
'value': kwargs.get('value'),
'inline': kwargs.get('inline', True)
})
|
def add_embed_field(self, **kwargs)
|
set field of embed
:keyword name: name of the field
:keyword value: value of the field
:keyword inline: (optional) whether or not this field should display inline
| 2.168451
| 2.430341
| 0.892242
|
request = self.request
if self.settings is None:
graph_settings = deepcopy(getattr(settings, 'SPAGHETTI_SAUCE', {}))
graph_settings.update(self.override_settings)
else:
graph_settings = self.settings
apps = graph_settings.get('apps', [])
excludes = [
"%s__%s" % (app, model)
for app, models in graph_settings.get('exclude', {}).items()
for model in models
]
models = ContentType.objects.filter(app_label__in=apps)
nodes = []
edges = []
for model in models:
if (model.model_class() is None):
continue
model.is_proxy = model.model_class()._meta.proxy
if (model.is_proxy and not graph_settings.get('show_proxy', False)):
continue
model.doc = model.model_class().__doc__
_id = "%s__%s" % (model.app_label, model.model)
if _id in excludes:
continue
label = self.get_node_label(model)
fields = [f for f in model.model_class()._meta.fields]
many = [f for f in model.model_class()._meta.many_to_many]
if graph_settings.get('show_fields', True):
label += "\n%s\n" % ("-" * len(model.model))
label += "\n".join([str(f.name) for f in fields])
edge_color = {'inherit': 'from'}
for f in fields + many:
if f.remote_field is not None:
m = f.remote_field.model._meta
to_id = "%s__%s" % (m.app_label, m.model_name)
if to_id in excludes:
pass
elif _id == to_id and graph_settings.get('ignore_self_referential', False):
pass
else:
if m.app_label != model.app_label:
edge_color = {'inherit': 'both'}
edge = {'from': _id, 'to': to_id, 'color': edge_color}
if str(f.name).endswith('_ptr'):
# fields that end in _ptr are pointing to a parent object
edge.update({
'arrows': {'to': {'scaleFactor': 0.75}}, # needed to draw from-to
'font': {'align': 'middle'},
'label': 'is a',
'dashes': True
})
elif type(f) == related.ForeignKey:
edge.update({
'arrows': {'to': {'scaleFactor': 0.75}}
})
elif type(f) == related.OneToOneField:
edge.update({
'font': {'align': 'middle'},
'label': '|'
})
elif type(f) == related.ManyToManyField:
edge.update({
'color': {'color': 'gray'},
'arrows': {'to': {'scaleFactor': 1}, 'from': {'scaleFactor': 1}},
})
edges.append(edge)
if model.is_proxy:
proxy = model.model_class()._meta.proxy_for_model._meta
model.proxy = proxy
edge = {
'to': _id,
'from': "%s__%s" % (proxy.app_label, proxy.model_name),
'color': edge_color,
}
edges.append(edge)
all_node_fields = fields
if graph_settings.get('show_m2m_field_detail', False):
all_node_fields = fields + many
nodes.append(
{
'id': _id,
'label': label,
'shape': 'box',
'group': model.app_label,
'title': get_template(self.meatball_template_name).render(
{'model': model, 'fields': all_node_fields}
)
}
)
data = {
'meatballs': json.dumps(nodes),
'spaghetti': json.dumps(edges)
}
return render(request, self.plate_template_name, data)
|
def plate(self)
|
Serves up a delicious plate with your models
| 2.662978
| 2.664647
| 0.999374
|
if model.is_proxy:
label = "(P) %s" % (model.name.title())
else:
label = "%s" % (model.name.title())
line = ""
new_label = []
for w in label.split(" "):
if len(line + w) > 15:
new_label.append(line)
line = w
else:
line += " "
line += w
new_label.append(line)
return "\n".join(new_label)
|
def get_node_label(self, model)
|
Defines how labels are constructed from models.
Default - uses verbose name, lines breaks where sensible
| 2.833642
| 2.782774
| 1.018279
|
vers = ["%(major)i.%(minor)i.%(micro)i" % __version_info__]
if release_level and __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s%(serial)i' % __version_info__)
return ''.join(vers)
|
def get_version(release_level=True)
|
Return the formatted version information
| 2.498693
| 2.430746
| 1.027953
|
if ssl_id not in _callbacks:
return ("", "")
else:
res = _callbacks[ssl_id](hint)
return res if isinstance(res, tuple) else (res, "")
|
def _python_psk_client_callback(ssl_id, hint)
|
Called by _sslpsk.c to return the (psk, identity) tuple for the socket with
the specified ssl socket.
| 3.690169
| 3.62961
| 1.016685
|
pass
if isinstance(sock._sslobj, _ssl._SSLSocket):
return sock._sslobj
else:
return sock._sslobj._sslobj
|
def _sslobj(sock)
|
Returns the underlying PySLLSocket object with which the C extension
functions interface.
| 3.882879
| 3.737118
| 1.039004
|
if termcolor is not None:
val = termcolor.colored(val, color)
elif colorama is not None:
val = TERMCOLOR2COLORAMA[color] + val + colorama.Style.RESET_ALL
return val
|
def _colorize(val, color)
|
Colorize a string using termcolor or colorama.
If any of them are available.
| 3.899948
| 3.017948
| 1.292252
|
try:
# Default time unit is a second, we should convert it to milliseconds.
return int(value) * 1000
except ValueError:
# Try to parse if we are unlucky to cast value into int.
m = self.time_format.match(value)
if not m:
raise ValueError("Could not parse time represented by '{t}'".format(t=value))
time = int(m.group('time'))
if m.group('units') != 'ms':
time *= 1000
return time
|
def _parse_time(self, value)
|
Parse string time representation to get number of milliseconds.
Raises the ``ValueError`` for invalid format.
| 4.384387
| 4.187819
| 1.046938
|
super(TimerPlugin, self).configure(options, config)
self.config = config
if self.enabled:
self.timer_top_n = int(options.timer_top_n)
self.timer_ok = self._parse_time(options.timer_ok)
self.timer_warning = self._parse_time(options.timer_warning)
self.timer_filter = self._parse_filter(options.timer_filter)
self.timer_fail = options.timer_fail
self.timer_no_color = True
self.json_file = options.json_file
# Windows + nosetests does not support colors (even with colorama).
if not IS_NT:
self.timer_no_color = options.timer_no_color
# determine if multiprocessing plugin enabled
self.multiprocessing_enabled = bool(getattr(options, 'multiprocess_workers', False))
|
def configure(self, options, config)
|
Configures the test timer plugin.
| 4.071554
| 3.804677
| 1.070144
|
if not self.enabled:
return
# if multiprocessing plugin enabled - get items from results queue
if self.multiprocessing_enabled:
for i in range(_results_queue.qsize()):
try:
k, v, s = _results_queue.get(False)
self._timed_tests[k] = {
'time': v,
'status': s,
}
except Queue.Empty:
pass
d = sorted(self._timed_tests.items(), key=lambda item: item[1]['time'], reverse=True)
if self.json_file:
dict_type = OrderedDict if self.timer_top_n else dict
with open(self.json_file, 'w') as f:
json.dump({'tests': dict_type((k, v) for k, v in d)}, f)
total_time = sum([vv['time'] for kk, vv in d])
for i, (test, time_and_status) in enumerate(d):
time_taken = time_and_status['time']
status = time_and_status['status']
if i < self.timer_top_n or self.timer_top_n == -1:
color = self._get_result_color(time_taken)
percent = 0 if total_time == 0 else time_taken / total_time * 100
line = self._format_report_line(
test=test,
time_taken=time_taken,
color=color,
status=status,
percent=percent,
)
_filter = self._COLOR_TO_FILTER.get(color)
if self.timer_filter is None or _filter is None or _filter in self.timer_filter:
stream.writeln(line)
|
def report(self, stream)
|
Report the test times.
| 3.003199
| 2.90174
| 1.034965
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.