sentence1 stringlengths 52 3.87M | sentence2 stringlengths 1 47.2k | label stringclasses 1 value |
|---|---|---|
def _expand_endpoint_name(endpoint_name, flags):
"""
Populate any ``{endpoint_name}`` tags in the flag names for the given
handler, based on the handlers module / file name.
"""
return tuple(flag.format(endpoint_name=endpoint_name) for flag in flags) | Populate any ``{endpoint_name}`` tags in the flag names for the given
handler, based on the handlers module / file name. | entailment |
def _is_endpoint_method(handler):
"""
from the context. Unfortunately, we can't directly detect whether
a handler is an Endpoint method, because at the time of decoration,
the class doesn't actually exist yet so it's impossible to get a
reference to it. So, we use the heuristic of seeing if the handler
takes only a single ``self`` param and there is an Endpoint class in
the handler's globals.
"""
params = signature(handler).parameters
has_self = len(params) == 1 and list(params.keys())[0] == 'self'
has_endpoint_class = any(isclass(g) and issubclass(g, Endpoint)
for g in handler.__globals__.values())
return has_self and has_endpoint_class | from the context. Unfortunately, we can't directly detect whether
a handler is an Endpoint method, because at the time of decoration,
the class doesn't actually exist yet so it's impossible to get a
reference to it. So, we use the heuristic of seeing if the handler
takes only a single ``self`` param and there is an Endpoint class in
the handler's globals. | entailment |
def any_hook(*hook_patterns):
"""
Assert that the currently executing hook matches one of the given patterns.
Each pattern will match one or more hooks, and can use the following
special syntax:
* ``db-relation-{joined,changed}`` can be used to match multiple hooks
(in this case, ``db-relation-joined`` and ``db-relation-changed``).
* ``{provides:mysql}-relation-joined`` can be used to match a relation
hook by the role and interface instead of the relation name. The role
must be one of ``provides``, ``requires``, or ``peer``.
* The previous two can be combined, of course: ``{provides:mysql}-relation-{joined,changed}``
"""
current_hook = hookenv.hook_name()
# expand {role:interface} patterns
i_pat = re.compile(r'{([^:}]+):([^}]+)}')
hook_patterns = _expand_replacements(i_pat, hookenv.role_and_interface_to_relations, hook_patterns)
# expand {A,B,C,...} patterns
c_pat = re.compile(r'{((?:[^:,}]+,?)+)}')
hook_patterns = _expand_replacements(c_pat, lambda v: v.split(','), hook_patterns)
return current_hook in hook_patterns | Assert that the currently executing hook matches one of the given patterns.
Each pattern will match one or more hooks, and can use the following
special syntax:
* ``db-relation-{joined,changed}`` can be used to match multiple hooks
(in this case, ``db-relation-joined`` and ``db-relation-changed``).
* ``{provides:mysql}-relation-joined`` can be used to match a relation
hook by the role and interface instead of the relation name. The role
must be one of ``provides``, ``requires``, or ``peer``.
* The previous two can be combined, of course: ``{provides:mysql}-relation-{joined,changed}`` | entailment |
def any_file_changed(filenames, hash_type='md5'):
"""
Check if any of the given files have changed since the last time this
was called.
:param list filenames: Names of files to check. Accepts callables returning
the filename.
:param str hash_type: Algorithm to use to check the files.
"""
changed = False
for filename in filenames:
if callable(filename):
filename = str(filename())
else:
filename = str(filename)
old_hash = unitdata.kv().get('reactive.files_changed.%s' % filename)
new_hash = host.file_hash(filename, hash_type=hash_type)
if old_hash != new_hash:
unitdata.kv().set('reactive.files_changed.%s' % filename, new_hash)
changed = True # mark as changed, but keep updating hashes
return changed | Check if any of the given files have changed since the last time this
was called.
:param list filenames: Names of files to check. Accepts callables returning
the filename.
:param str hash_type: Algorithm to use to check the files. | entailment |
def load_hook_files(pathname):
"""
Loads files either defined as a glob or a single file path
sorted by filenames.
"""
global hooks
if sys.version_info[0] > 2 and sys.version_info[1] > 4:
fsglob = sorted(glob.iglob(pathname, recursive=True))
else:
fsglob = sorted(glob.iglob(pathname))
for path in fsglob:
real_path = os.path.realpath(path)
# Append hooks file directory to the sys.path so submodules can be
# loaded too.
if os.path.dirname(real_path) not in sys.path:
sys.path.append(os.path.dirname(real_path))
module = imp.load_source(os.path.basename(path), real_path)
for name in dir(module):
obj = getattr(module, name)
if hasattr(obj, 'dredd_hooks') and callable(obj):
func_hooks = getattr(obj, 'dredd_hooks')
for hook, name in func_hooks:
if hook == BEFORE_ALL:
hooks._before_all.append(obj)
if hook == AFTER_ALL:
hooks._after_all.append(obj)
if hook == BEFORE_EACH:
hooks._before_each.append(obj)
if hook == AFTER_EACH:
hooks._after_each.append(obj)
if hook == BEFORE_EACH_VALIDATION:
hooks._before_each_validation.append(obj)
if hook == BEFORE_VALIDATION:
add_named_hook(hooks._before_validation, obj, name)
if hook == BEFORE:
add_named_hook(hooks._before, obj, name)
if hook == AFTER:
add_named_hook(hooks._after, obj, name) | Loads files either defined as a glob or a single file path
sorted by filenames. | entailment |
def main(relation_name=None):
"""
This is the main entry point for the reactive framework. It calls
:func:`~bus.discover` to find and load all reactive handlers (e.g.,
:func:`@when <decorators.when>` decorated blocks), and then
:func:`~bus.dispatch` to trigger handlers until the queue settles out.
Finally, :meth:`unitdata.kv().flush <charmhelpers.core.unitdata.Storage.flush>`
is called to persist the flags and other data.
:param str relation_name: Optional name of the relation which is being handled.
"""
hook_name = hookenv.hook_name()
restricted_mode = hook_name in ['meter-status-changed', 'collect-metrics']
hookenv.log('Reactive main running for hook %s' % hookenv.hook_name(), level=hookenv.INFO)
if restricted_mode:
hookenv.log('Restricted mode.', level=hookenv.INFO)
# work-around for https://bugs.launchpad.net/juju-core/+bug/1503039
# ensure that external handlers can tell what hook they're running in
if 'JUJU_HOOK_NAME' not in os.environ:
os.environ['JUJU_HOOK_NAME'] = hook_name
try:
bus.discover()
if not restricted_mode: # limit what gets run in restricted mode
hookenv._run_atstart()
bus.dispatch(restricted=restricted_mode)
except Exception:
tb = traceback.format_exc()
hookenv.log('Hook error:\n{}'.format(tb), level=hookenv.ERROR)
raise
except SystemExit as x:
if x.code not in (None, 0):
raise
if not restricted_mode: # limit what gets run in restricted mode
hookenv._run_atexit()
unitdata._KV.flush() | This is the main entry point for the reactive framework. It calls
:func:`~bus.discover` to find and load all reactive handlers (e.g.,
:func:`@when <decorators.when>` decorated blocks), and then
:func:`~bus.dispatch` to trigger handlers until the queue settles out.
Finally, :meth:`unitdata.kv().flush <charmhelpers.core.unitdata.Storage.flush>`
is called to persist the flags and other data.
:param str relation_name: Optional name of the relation which is being handled. | entailment |
def from_flag(cls, flag):
"""
Return an Endpoint subclass instance based on the given flag.
The instance that is returned depends on the endpoint name embedded
in the flag. Flags should be of the form ``endpoint.{name}.extra...``,
though for legacy purposes, the ``endpoint.`` prefix can be omitted.
The ``{name}}`` portion will be passed to
:meth:`~charms.reactive.endpoints.Endpoint.from_name`.
If the flag is not set, an appropriate Endpoint subclass cannot be
found, or the flag name can't be parsed, ``None`` will be returned.
"""
if not is_flag_set(flag) or '.' not in flag:
return None
parts = flag.split('.')
if parts[0] == 'endpoint':
return cls.from_name(parts[1])
else:
# some older handlers might not use the 'endpoint' prefix
return cls.from_name(parts[0]) | Return an Endpoint subclass instance based on the given flag.
The instance that is returned depends on the endpoint name embedded
in the flag. Flags should be of the form ``endpoint.{name}.extra...``,
though for legacy purposes, the ``endpoint.`` prefix can be omitted.
The ``{name}}`` portion will be passed to
:meth:`~charms.reactive.endpoints.Endpoint.from_name`.
If the flag is not set, an appropriate Endpoint subclass cannot be
found, or the flag name can't be parsed, ``None`` will be returned. | entailment |
def _startup(cls):
"""
Create Endpoint instances and manage automatic flags.
"""
for endpoint_name in sorted(hookenv.relation_types()):
# populate context based on attached relations
relf = relation_factory(endpoint_name)
if not relf or not issubclass(relf, cls):
continue
rids = sorted(hookenv.relation_ids(endpoint_name))
# ensure that relation IDs have the endpoint name prefix, in case
# juju decides to drop it at some point
rids = ['{}:{}'.format(endpoint_name, rid) if ':' not in rid
else rid for rid in rids]
endpoint = relf(endpoint_name, rids)
cls._endpoints[endpoint_name] = endpoint
endpoint.register_triggers()
endpoint._manage_departed()
endpoint._manage_flags()
for relation in endpoint.relations:
hookenv.atexit(relation._flush_data) | Create Endpoint instances and manage automatic flags. | entailment |
def expand_name(self, flag):
"""
Complete a flag for this endpoint by expanding the endpoint name.
If the flag does not already contain ``{endpoint_name}``, it will be
prefixed with ``endpoint.{endpoint_name}.``. Then, any occurance of
``{endpoint_name}`` will be replaced with ``self.endpoint_name``.
"""
if '{endpoint_name}' not in flag:
flag = 'endpoint.{endpoint_name}.' + flag
# use replace rather than format to prevent any other braces or braced
# strings from being touched
return flag.replace('{endpoint_name}', self.endpoint_name) | Complete a flag for this endpoint by expanding the endpoint name.
If the flag does not already contain ``{endpoint_name}``, it will be
prefixed with ``endpoint.{endpoint_name}.``. Then, any occurance of
``{endpoint_name}`` will be replaced with ``self.endpoint_name``. | entailment |
def _manage_flags(self):
"""
Manage automatic relation flags.
"""
already_joined = is_flag_set(self.expand_name('joined'))
hook_name = hookenv.hook_name()
rel_hook = hook_name.startswith(self.endpoint_name + '-relation-')
departed_hook = rel_hook and hook_name.endswith('-departed')
toggle_flag(self.expand_name('joined'), self.is_joined)
if departed_hook:
set_flag(self.expand_name('departed'))
elif self.is_joined:
clear_flag(self.expand_name('departed'))
if already_joined and not rel_hook:
# skip checking relation data outside hooks for this relation
# to save on API calls to the controller (unless we didn't have
# the joined flag before, since then we might migrating to Endpoints)
return
for unit in self.all_units:
for key, value in unit.received.items():
data_key = 'endpoint.{}.{}.{}.{}'.format(self.endpoint_name,
unit.relation.relation_id,
unit.unit_name,
key)
if data_changed(data_key, value):
set_flag(self.expand_name('changed'))
set_flag(self.expand_name('changed.{}'.format(key))) | Manage automatic relation flags. | entailment |
def all_joined_units(self):
"""
A list view of all the units of all relations attached to this
:class:`~charms.reactive.endpoints.Endpoint`.
This is actually a
:class:`~charms.reactive.endpoints.CombinedUnitsView`, so the units
will be in order by relation ID and then unit name, and you can access a
merged view of all the units' data as a single mapping. You should be
very careful when using the merged data collections, however, and
consider carefully what will happen when the endpoint has multiple
relations and multiple remote units on each. It is probably better to
iterate over each unit and handle its data individually. See
:class:`~charms.reactive.endpoints.CombinedUnitsView` for an
explanation of how the merged data collections work.
Note that, because a given application might be related multiple times
on a given endpoint, units may show up in this collection more than
once.
"""
if self._all_joined_units is None:
units = chain.from_iterable(rel.units for rel in self.relations)
self._all_joined_units = CombinedUnitsView(units)
return self._all_joined_units | A list view of all the units of all relations attached to this
:class:`~charms.reactive.endpoints.Endpoint`.
This is actually a
:class:`~charms.reactive.endpoints.CombinedUnitsView`, so the units
will be in order by relation ID and then unit name, and you can access a
merged view of all the units' data as a single mapping. You should be
very careful when using the merged data collections, however, and
consider carefully what will happen when the endpoint has multiple
relations and multiple remote units on each. It is probably better to
iterate over each unit and handle its data individually. See
:class:`~charms.reactive.endpoints.CombinedUnitsView` for an
explanation of how the merged data collections work.
Note that, because a given application might be related multiple times
on a given endpoint, units may show up in this collection more than
once. | entailment |
def all_departed_units(self):
"""
Collection of all units that were previously part of any relation on
this endpoint but which have since departed.
This collection is persistent and mutable. The departed units will
be kept until they are explicitly removed, to allow for reasonable
cleanup of units that have left.
Example: You need to run a command each time a unit departs the relation.
.. code-block:: python
@when('endpoint.{endpoint_name}.departed')
def handle_departed_unit(self):
for name, unit in self.all_departed_units.items():
# run the command to remove `unit` from the cluster
# ..
self.all_departed_units.clear()
clear_flag(self.expand_name('departed'))
Once a unit is departed, it will no longer show up in
:attr:`all_joined_units`. Note that units are considered departed as
soon as the departed hook is entered, which differs slightly from how
the Juju primitives behave (departing units are still returned from
``related-units`` until after the departed hook is complete).
This collection is a :class:`KeyList`, so can be used as a mapping to
look up units by their unit name, or iterated or accessed by index.
"""
if self._all_departed_units is None:
self._all_departed_units = CachedKeyList.load(
'reactive.endpoints.departed.{}'.format(self.endpoint_name),
RelatedUnit._deserialize,
'unit_name')
return self._all_departed_units | Collection of all units that were previously part of any relation on
this endpoint but which have since departed.
This collection is persistent and mutable. The departed units will
be kept until they are explicitly removed, to allow for reasonable
cleanup of units that have left.
Example: You need to run a command each time a unit departs the relation.
.. code-block:: python
@when('endpoint.{endpoint_name}.departed')
def handle_departed_unit(self):
for name, unit in self.all_departed_units.items():
# run the command to remove `unit` from the cluster
# ..
self.all_departed_units.clear()
clear_flag(self.expand_name('departed'))
Once a unit is departed, it will no longer show up in
:attr:`all_joined_units`. Note that units are considered departed as
soon as the departed hook is entered, which differs slightly from how
the Juju primitives behave (departing units are still returned from
``related-units`` until after the departed hook is complete).
This collection is a :class:`KeyList`, so can be used as a mapping to
look up units by their unit name, or iterated or accessed by index. | entailment |
def application_name(self):
"""
The name of the remote application for this relation, or ``None``.
This is equivalent to::
relation.units[0].unit_name.split('/')[0]
"""
if self._application_name is None and self.units:
self._application_name = self.units[0].unit_name.split('/')[0]
return self._application_name | The name of the remote application for this relation, or ``None``.
This is equivalent to::
relation.units[0].unit_name.split('/')[0] | entailment |
def joined_units(self):
"""
A list view of all the units joined on this relation.
This is actually a
:class:`~charms.reactive.endpoints.CombinedUnitsView`, so the units
will be in order by unit name, and you can access a merged view of all
of the units' data with ``self.units.received`` and
``self.units.received``. You should be very careful when using the
merged data collections, however, and consider carefully what will
happen when there are multiple remote units. It is probabaly better to
iterate over each unit and handle its data individually. See
:class:`~charms.reactive.endpoints.CombinedUnitsView` for an
explanation of how the merged data collections work.
The view can be iterated and indexed as a list, or you can look up units
by their unit name. For example::
by_index = relation.units[0]
by_name = relation.units['unit/0']
assert by_index is by_name
assert all(unit is relation.units[unit.unit_name]
for unit in relation.units)
print(', '.join(relation.units.keys()))
"""
if self._units is None:
self._units = CombinedUnitsView([
RelatedUnit(self, unit_name) for unit_name in
sorted(hookenv.related_units(self.relation_id))
])
return self._units | A list view of all the units joined on this relation.
This is actually a
:class:`~charms.reactive.endpoints.CombinedUnitsView`, so the units
will be in order by unit name, and you can access a merged view of all
of the units' data with ``self.units.received`` and
``self.units.received``. You should be very careful when using the
merged data collections, however, and consider carefully what will
happen when there are multiple remote units. It is probabaly better to
iterate over each unit and handle its data individually. See
:class:`~charms.reactive.endpoints.CombinedUnitsView` for an
explanation of how the merged data collections work.
The view can be iterated and indexed as a list, or you can look up units
by their unit name. For example::
by_index = relation.units[0]
by_name = relation.units['unit/0']
assert by_index is by_name
assert all(unit is relation.units[unit.unit_name]
for unit in relation.units)
print(', '.join(relation.units.keys())) | entailment |
def to_publish(self):
"""
This is the relation data that the local unit publishes so it is
visible to all related units. Use this to communicate with related
units. It is a writeable
:class:`~charms.reactive.endpoints.JSONUnitDataView`.
All values stored in this collection will be automatically JSON
encoded when they are published. This means that they need to be JSON
serializable! Mappings stored in this collection will be encoded with
sorted keys, to ensure that the encoded representation will only change
if the actual data changes.
Changes to this data are published at the end of a succesfull hook. The
data is reset when a hook fails.
"""
if self._data is None:
self._data = JSONUnitDataView(
hookenv.relation_get(unit=hookenv.local_unit(),
rid=self.relation_id),
writeable=True)
return self._data | This is the relation data that the local unit publishes so it is
visible to all related units. Use this to communicate with related
units. It is a writeable
:class:`~charms.reactive.endpoints.JSONUnitDataView`.
All values stored in this collection will be automatically JSON
encoded when they are published. This means that they need to be JSON
serializable! Mappings stored in this collection will be encoded with
sorted keys, to ensure that the encoded representation will only change
if the actual data changes.
Changes to this data are published at the end of a succesfull hook. The
data is reset when a hook fails. | entailment |
def _flush_data(self):
"""
If this relation's local unit data has been modified, publish it on the
relation. This should be automatically called.
"""
if self._data and self._data.modified:
hookenv.relation_set(self.relation_id, dict(self.to_publish.data)) | If this relation's local unit data has been modified, publish it on the
relation. This should be automatically called. | entailment |
def received(self):
"""
A :class:`~charms.reactive.endpoints.JSONUnitDataView` of the data
received from this remote unit over the relation, with values being
automatically decoded as JSON.
"""
if self._data is None:
self._data = JSONUnitDataView(hookenv.relation_get(
unit=self.unit_name,
rid=self.relation.relation_id))
return self._data | A :class:`~charms.reactive.endpoints.JSONUnitDataView` of the data
received from this remote unit over the relation, with values being
automatically decoded as JSON. | entailment |
def load(cls, cache_key, deserializer, key_attr):
"""
Load the persisted cache and return a new instance of this class.
"""
items = unitdata.kv().get(cache_key) or []
return cls(cache_key,
[deserializer(item) for item in items],
key_attr) | Load the persisted cache and return a new instance of this class. | entailment |
def received(self):
"""
Combined :class:`~charms.reactive.endpoints.JSONUnitDataView` of the
data of all units in this list, with automatic JSON decoding.
"""
if not hasattr(self, '_data'):
# NB: units are reversed so that lowest numbered unit takes precedence
self._data = JSONUnitDataView({key: value
for unit in reversed(self)
for key, value in unit.received_raw.items()})
return self._data | Combined :class:`~charms.reactive.endpoints.JSONUnitDataView` of the
data of all units in this list, with automatic JSON decoding. | entailment |
def AuthorizingClient(domain, auth, user_agent=None):
"""Creates a Podio client using an auth object."""
http_transport = transport.HttpTransport(domain, build_headers(auth, user_agent))
return client.Client(http_transport) | Creates a Podio client using an auth object. | entailment |
def from_image(cls, image):
"""
Create a PrintableImage from a PIL Image
:param image: a PIL Image
:return:
"""
(w, h) = image.size
# Thermal paper is 512 pixels wide
if w > 512:
ratio = 512. / w
h = int(h * ratio)
image = image.resize((512, h), Image.ANTIALIAS)
if image.mode != '1':
image = image.convert('1')
pixels = np.array(list(image.getdata())).reshape(h, w)
# Add white pixels so that image fits into bytes
extra_rows = int(math.ceil(h / 24)) * 24 - h
extra_pixels = np.ones((extra_rows, w), dtype=bool)
pixels = np.vstack((pixels, extra_pixels))
h += extra_rows
nb_stripes = h / 24
pixels = pixels.reshape(nb_stripes, 24, w).swapaxes(1, 2).reshape(-1, 8)
nh = int(w / 256)
nl = w % 256
data = []
pixels = np.invert(np.packbits(pixels))
stripes = np.split(pixels, nb_stripes)
for stripe in stripes:
data.extend([
ESC,
42, # *
33, # double density mode
nl,
nh])
data.extend(stripe)
data.extend([
27, # ESC
74, # J
48])
# account for double density mode
height = h * 2
return cls(data, height) | Create a PrintableImage from a PIL Image
:param image: a PIL Image
:return: | entailment |
def append(self, other):
"""
Append a Printable Image at the end of the current instance.
:param other: another PrintableImage
:return: PrintableImage containing data from both self and other
"""
self.data.extend(other.data)
self.height = self.height + other.height
return self | Append a Printable Image at the end of the current instance.
:param other: another PrintableImage
:return: PrintableImage containing data from both self and other | entailment |
def write_this(func):
"""
Decorator that writes the bytes to the wire
"""
@wraps(func)
def wrapper(self, *args, **kwargs):
byte_array = func(self, *args, **kwargs)
self.write_bytes(byte_array)
return wrapper | Decorator that writes the bytes to the wire | entailment |
def print_images(self, *printable_images):
"""
This method allows printing several images in one shot. This is useful if the client code does not want the
printer to make pause during printing
"""
printable_image = reduce(lambda x, y: x.append(y), list(printable_images))
self.print_image(printable_image) | This method allows printing several images in one shot. This is useful if the client code does not want the
printer to make pause during printing | entailment |
def encode_and_quote(data):
"""If ``data`` is unicode, return urllib.quote_plus(data.encode("utf-8"))
otherwise return urllib.quote_plus(data)"""
if data is None:
return None
if isinstance(data, unicode):
data = data.encode("utf-8")
return urllib.quote_plus(data) | If ``data`` is unicode, return urllib.quote_plus(data.encode("utf-8"))
otherwise return urllib.quote_plus(data) | entailment |
def _strify(s):
"""If s is a unicode string, encode it to UTF-8 and return the results,
otherwise return str(s), or None if s is None"""
if s is None:
return None
if isinstance(s, unicode):
return s.encode("utf-8")
return str(s) | If s is a unicode string, encode it to UTF-8 and return the results,
otherwise return str(s), or None if s is None | entailment |
def encode_file_header(boundary, paramname, filesize, filename=None,
filetype=None):
"""Returns the leading data for a multipart/form-data field that contains
file data.
``boundary`` is the boundary string used throughout a single request to
separate variables.
``paramname`` is the name of the variable in this request.
``filesize`` is the size of the file data.
``filename`` if specified is the filename to give to this field. This
field is only useful to the server for determining the original filename.
``filetype`` if specified is the MIME type of this file.
The actual file data should be sent after this header has been sent.
"""
return MultipartParam(paramname, filesize=filesize, filename=filename,
filetype=filetype).encode_hdr(boundary) | Returns the leading data for a multipart/form-data field that contains
file data.
``boundary`` is the boundary string used throughout a single request to
separate variables.
``paramname`` is the name of the variable in this request.
``filesize`` is the size of the file data.
``filename`` if specified is the filename to give to this field. This
field is only useful to the server for determining the original filename.
``filetype`` if specified is the MIME type of this file.
The actual file data should be sent after this header has been sent. | entailment |
def get_body_size(params, boundary):
"""Returns the number of bytes that the multipart/form-data encoding
of ``params`` will be."""
size = sum(p.get_size(boundary) for p in MultipartParam.from_params(params))
return size + len(boundary) + 6 | Returns the number of bytes that the multipart/form-data encoding
of ``params`` will be. | entailment |
def get_headers(params, boundary):
"""Returns a dictionary with Content-Type and Content-Length headers
for the multipart/form-data encoding of ``params``."""
headers = {}
boundary = urllib.quote_plus(boundary)
headers['Content-Type'] = "multipart/form-data; boundary=%s" % boundary
headers['Content-Length'] = str(get_body_size(params, boundary))
return headers | Returns a dictionary with Content-Type and Content-Length headers
for the multipart/form-data encoding of ``params``. | entailment |
def multipart_encode(params, boundary=None, cb=None):
"""Encode ``params`` as multipart/form-data.
``params`` should be a sequence of (name, value) pairs or MultipartParam
objects, or a mapping of names to values.
Values are either strings parameter values, or file-like objects to use as
the parameter value. The file-like objects must support .read() and either
.fileno() or both .seek() and .tell().
If ``boundary`` is set, then it as used as the MIME boundary. Otherwise
a randomly generated boundary will be used. In either case, if the
boundary string appears in the parameter values a ValueError will be
raised.
If ``cb`` is set, it should be a callback which will get called as blocks
of data are encoded. It will be called with (param, current, total),
indicating the current parameter being encoded, the current amount encoded,
and the total amount to encode.
Returns a tuple of `datagen`, `headers`, where `datagen` is a
generator that will yield blocks of data that make up the encoded
parameters, and `headers` is a dictionary with the assoicated
Content-Type and Content-Length headers.
Examples:
>>> datagen, headers = multipart_encode( [("key", "value1"), ("key", "value2")] )
>>> s = "".join(datagen)
>>> assert "value2" in s and "value1" in s
>>> p = MultipartParam("key", "value2")
>>> datagen, headers = multipart_encode( [("key", "value1"), p] )
>>> s = "".join(datagen)
>>> assert "value2" in s and "value1" in s
>>> datagen, headers = multipart_encode( {"key": "value1"} )
>>> s = "".join(datagen)
>>> assert "value2" not in s and "value1" in s
"""
if boundary is None:
boundary = gen_boundary()
else:
boundary = urllib.quote_plus(boundary)
headers = get_headers(params, boundary)
params = MultipartParam.from_params(params)
return MultipartYielder(params, boundary, cb), headers | Encode ``params`` as multipart/form-data.
``params`` should be a sequence of (name, value) pairs or MultipartParam
objects, or a mapping of names to values.
Values are either strings parameter values, or file-like objects to use as
the parameter value. The file-like objects must support .read() and either
.fileno() or both .seek() and .tell().
If ``boundary`` is set, then it as used as the MIME boundary. Otherwise
a randomly generated boundary will be used. In either case, if the
boundary string appears in the parameter values a ValueError will be
raised.
If ``cb`` is set, it should be a callback which will get called as blocks
of data are encoded. It will be called with (param, current, total),
indicating the current parameter being encoded, the current amount encoded,
and the total amount to encode.
Returns a tuple of `datagen`, `headers`, where `datagen` is a
generator that will yield blocks of data that make up the encoded
parameters, and `headers` is a dictionary with the assoicated
Content-Type and Content-Length headers.
Examples:
>>> datagen, headers = multipart_encode( [("key", "value1"), ("key", "value2")] )
>>> s = "".join(datagen)
>>> assert "value2" in s and "value1" in s
>>> p = MultipartParam("key", "value2")
>>> datagen, headers = multipart_encode( [("key", "value1"), p] )
>>> s = "".join(datagen)
>>> assert "value2" in s and "value1" in s
>>> datagen, headers = multipart_encode( {"key": "value1"} )
>>> s = "".join(datagen)
>>> assert "value2" not in s and "value1" in s | entailment |
def from_file(cls, paramname, filename):
"""Returns a new MultipartParam object constructed from the local
file at ``filename``.
``filesize`` is determined by os.path.getsize(``filename``)
``filetype`` is determined by mimetypes.guess_type(``filename``)[0]
``filename`` is set to os.path.basename(``filename``)
"""
return cls(paramname, filename=os.path.basename(filename),
filetype=mimetypes.guess_type(filename)[0],
filesize=os.path.getsize(filename),
fileobj=open(filename, "rb")) | Returns a new MultipartParam object constructed from the local
file at ``filename``.
``filesize`` is determined by os.path.getsize(``filename``)
``filetype`` is determined by mimetypes.guess_type(``filename``)[0]
``filename`` is set to os.path.basename(``filename``) | entailment |
def from_params(cls, params):
"""Returns a list of MultipartParam objects from a sequence of
name, value pairs, MultipartParam instances,
or from a mapping of names to values
The values may be strings or file objects, or MultipartParam objects.
MultipartParam object names must match the given names in the
name,value pairs or mapping, if applicable."""
if hasattr(params, 'items'):
params = params.items()
retval = []
for item in params:
if isinstance(item, cls):
retval.append(item)
continue
name, value = item
if isinstance(value, cls):
assert value.name == name
retval.append(value)
continue
if hasattr(value, 'read'):
# Looks like a file object
filename = getattr(value, 'name', None)
if filename is not None:
filetype = mimetypes.guess_type(filename)[0]
else:
filetype = None
retval.append(cls(name=name, filename=filename,
filetype=filetype, fileobj=value))
else:
retval.append(cls(name, value))
return retval | Returns a list of MultipartParam objects from a sequence of
name, value pairs, MultipartParam instances,
or from a mapping of names to values
The values may be strings or file objects, or MultipartParam objects.
MultipartParam object names must match the given names in the
name,value pairs or mapping, if applicable. | entailment |
def encode_hdr(self, boundary):
"""Returns the header of the encoding of this parameter"""
boundary = encode_and_quote(boundary)
headers = ["--%s" % boundary]
if self.filename:
disposition = 'form-data; name="%s"; filename="%s"' % (self.name,
self.filename)
else:
disposition = 'form-data; name="%s"' % self.name
headers.append("Content-Disposition: %s" % disposition)
if self.filetype:
filetype = self.filetype
else:
filetype = "text/plain; charset=utf-8"
headers.append("Content-Type: %s" % filetype)
headers.append("")
headers.append("")
return "\r\n".join(headers) | Returns the header of the encoding of this parameter | entailment |
def encode(self, boundary):
"""Returns the string encoding of this parameter"""
if self.value is None:
value = self.fileobj.read()
else:
value = self.value
if re.search("^--%s$" % re.escape(boundary), value, re.M):
raise ValueError("boundary found in encoded string")
return "%s%s\r\n" % (self.encode_hdr(boundary), value) | Returns the string encoding of this parameter | entailment |
def iter_encode(self, boundary, blocksize=4096):
"""Yields the encoding of this parameter
If self.fileobj is set, then blocks of ``blocksize`` bytes are read and
yielded."""
total = self.get_size(boundary)
current = 0
if self.value is not None:
block = self.encode(boundary)
current += len(block)
yield block
if self.cb:
self.cb(self, current, total)
else:
block = self.encode_hdr(boundary)
current += len(block)
yield block
if self.cb:
self.cb(self, current, total)
last_block = ""
encoded_boundary = "--%s" % encode_and_quote(boundary)
boundary_exp = re.compile("^%s$" % re.escape(encoded_boundary),
re.M)
while True:
block = self.fileobj.read(blocksize)
if not block:
current += 2
yield "\r\n"
if self.cb:
self.cb(self, current, total)
break
last_block += block
if boundary_exp.search(last_block):
raise ValueError("boundary found in file data")
last_block = last_block[-len(encoded_boundary) - 2:]
current += len(block)
yield block
if self.cb:
self.cb(self, current, total) | Yields the encoding of this parameter
If self.fileobj is set, then blocks of ``blocksize`` bytes are read and
yielded. | entailment |
def get_size(self, boundary):
"""Returns the size in bytes that this param will be when encoded
with the given boundary."""
if self.filesize is not None:
valuesize = self.filesize
else:
valuesize = len(self.value)
return len(self.encode_hdr(boundary)) + 2 + valuesize | Returns the size in bytes that this param will be when encoded
with the given boundary. | entailment |
def next(self):
"""generator function to yield multipart/form-data representation
of parameters"""
if self.param_iter is not None:
try:
block = self.param_iter.next()
self.current += len(block)
if self.cb:
self.cb(self.p, self.current, self.total)
return block
except StopIteration:
self.p = None
self.param_iter = None
if self.i is None:
raise StopIteration
elif self.i >= len(self.params):
self.param_iter = None
self.p = None
self.i = None
block = "--%s--\r\n" % self.boundary
self.current += len(block)
if self.cb:
self.cb(self.p, self.current, self.total)
return block
self.p = self.params[self.i]
self.param_iter = self.p.iter_encode(self.boundary)
self.i += 1
return self.next() | generator function to yield multipart/form-data representation
of parameters | entailment |
def get_options(silent=False, hook=True):
"""
Generate a query string with the appropriate options.
:param silent: If set to true, the object will not be bumped up in the stream and
notifications will not be generated.
:type silent: bool
:param hook: True if hooks should be executed for the change, false otherwise.
:type hook: bool
:return: The generated query string
:rtype: str
"""
options_ = {}
if silent:
options_['silent'] = silent
if not hook:
options_['hook'] = hook
if options_:
return '?' + urlencode(options_).lower()
else:
return '' | Generate a query string with the appropriate options.
:param silent: If set to true, the object will not be bumped up in the stream and
notifications will not be generated.
:type silent: bool
:param hook: True if hooks should be executed for the change, false otherwise.
:type hook: bool
:return: The generated query string
:rtype: str | entailment |
def find(self, item_id, basic=False, **kwargs):
"""
Get item
:param item_id: Item ID
:param basic: ?
:type item_id: int
:return: Item info
:rtype: dict
"""
if basic:
return self.transport.GET(url='/item/%d/basic' % item_id)
return self.transport.GET(kwargs, url='/item/%d' % item_id) | Get item
:param item_id: Item ID
:param basic: ?
:type item_id: int
:return: Item info
:rtype: dict | entailment |
def update(self, item_id, attributes, silent=False, hook=True):
"""
Updates the item using the supplied attributes. If 'silent' is true, Podio will send
no notifications to subscribed users and not post updates to the stream.
Important: webhooks will still be called.
"""
if not isinstance(attributes, dict):
raise TypeError('Must be of type dict')
attributes = json.dumps(attributes)
return self.transport.PUT(body=attributes,
type='application/json',
url='/item/%d%s' % (item_id, self.get_options(silent=silent,
hook=hook))) | Updates the item using the supplied attributes. If 'silent' is true, Podio will send
no notifications to subscribed users and not post updates to the stream.
Important: webhooks will still be called. | entailment |
def create(self, attributes, silent=False, hook=True):
"""
https://developers.podio.com/doc/tasks/create-task-22419
Creates the task using the supplied attributes. If 'silent' is true,
Podio will send no notifications to subscribed users and not post
updates to the stream. If 'hook' is false webhooks will not be called.
"""
# if not isinstance(attributes, dict):
# raise TypeError('Must be of type dict')
attributes = json.dumps(attributes)
return self.transport.POST(url='/task/%s' % self.get_options(silent=silent, hook=hook),
body=attributes,
type='application/json') | https://developers.podio.com/doc/tasks/create-task-22419
Creates the task using the supplied attributes. If 'silent' is true,
Podio will send no notifications to subscribed users and not post
updates to the stream. If 'hook' is false webhooks will not be called. | entailment |
def create_for(self, ref_type, ref_id, attributes, silent=False, hook=True):
"""
https://developers.podio.com/doc/tasks/create-task-with-reference-22420
If 'silent' is true, Podio will send no notifications and not post
updates to the stream. If 'hook' is false webhooks will not be called.
"""
# if not isinstance(attributes, dict):
# raise TypeError('Must be of type dict')
attributes = json.dumps(attributes)
return self.transport.POST(body=attributes,
type='application/json',
url='/task/%s/%s/%s' % (ref_type, ref_id,
self.get_options(silent=silent,
hook=hook))) | https://developers.podio.com/doc/tasks/create-task-with-reference-22420
If 'silent' is true, Podio will send no notifications and not post
updates to the stream. If 'hook' is false webhooks will not be called. | entailment |
def find_by_url(self, space_url, id_only=True):
"""
Returns a space ID given the URL of the space.
:param space_url: URL of the Space
:param id_only: ?
:return: space_id: Space url
:rtype: str
"""
resp = self.transport.GET(url='/space/url?%s' % urlencode({'url': space_url}))
if id_only:
return resp['space_id']
return resp | Returns a space ID given the URL of the space.
:param space_url: URL of the Space
:param id_only: ?
:return: space_id: Space url
:rtype: str | entailment |
def create(self, attributes):
"""
Create a new space
:param attributes: Refer to API. Pass in argument as dictionary
:type attributes: dict
:return: Details of newly created space
:rtype: dict
"""
if not isinstance(attributes, dict):
raise TypeError('Dictionary of values expected')
attributes = json.dumps(attributes)
return self.transport.POST(url='/space/', body=attributes, type='application/json') | Create a new space
:param attributes: Refer to API. Pass in argument as dictionary
:type attributes: dict
:return: Details of newly created space
:rtype: dict | entailment |
def find_by_ref(self, ref_type, ref_id):
"""
Returns an object of type "item", "status" or "task" as a
stream object. This is useful when a new status has been
posted and should be rendered directly in the stream without
reloading the entire stream.
For details, see: https://developers.podio.com/doc/stream/get-stream-object-80054
"""
return self.transport.GET(url='/stream/%s/%s' % (ref_type, ref_id)) | Returns an object of type "item", "status" or "task" as a
stream object. This is useful when a new status has been
posted and should be rendered directly in the stream without
reloading the entire stream.
For details, see: https://developers.podio.com/doc/stream/get-stream-object-80054 | entailment |
def find_raw(self, file_id):
"""Returns raw file as string. Pass to a file object"""
raw_handler = lambda resp, data: data
return self.transport.GET(url='/file/%d/raw' % file_id, handler=raw_handler) | Returns raw file as string. Pass to a file object | entailment |
def create(self, filename, filedata):
"""Create a file from raw data"""
attributes = {'filename': filename,
'source': filedata}
return self.transport.POST(url='/file/v2/', body=attributes, type='multipart/form-data') | Create a file from raw data | entailment |
def get(self, app_id, view_specifier):
"""
Retrieve the definition of a given view, provided the app_id and the view_id
:param app_id: the app id
:param view_specifier:
Can be one of the following:
1. The view ID
2. The view's name
3. "last" to look up the last view used
"""
return self.transport.GET(url='/view/app/{}/{}'.format(app_id, view_specifier)) | Retrieve the definition of a given view, provided the app_id and the view_id
:param app_id: the app id
:param view_specifier:
Can be one of the following:
1. The view ID
2. The view's name
3. "last" to look up the last view used | entailment |
def get_views(self, app_id, include_standard_views=False):
"""
Get all of the views for the specified app
:param app_id: the app containing the views
:param include_standard_views: defaults to false. Set to true if you wish to include standard views.
"""
include_standard = "true" if include_standard_views is True else "false"
return self.transport.GET(url='/view/app/{}/?include_standard_views={}'.format(app_id, include_standard)) | Get all of the views for the specified app
:param app_id: the app containing the views
:param include_standard_views: defaults to false. Set to true if you wish to include standard views. | entailment |
def update_last_view(self, app_id, attributes):
"""
Updates the last view for the active user
:param app_id: the app id
:param attributes: the body of the request in dictionary format
"""
if not isinstance(attributes, dict):
raise TypeError('Must be of type dict')
attribute_data = json.dumps(attributes)
return self.transport.PUT(url='/view/app/{}/last'.format(app_id),
body=attribute_data, type='application/json') | Updates the last view for the active user
:param app_id: the app id
:param attributes: the body of the request in dictionary format | entailment |
def update_view(self, view_id, attributes):
"""
Update an existing view using the details supplied via the attributes parameter
:param view_id: the view's id
:param attributes: a dictionary containing the modifications to be made to the view
:return:
"""
if not isinstance(attributes, dict):
raise TypeError('Must be of type dict')
attribute_data = json.dumps(attributes)
return self.transport.PUT(url='/view/{}'.format(view_id),
body=attribute_data, type='application/json') | Update an existing view using the details supplied via the attributes parameter
:param view_id: the view's id
:param attributes: a dictionary containing the modifications to be made to the view
:return: | entailment |
def is_active(self, timeout=2):
"""
:param timeout: int
:return: boolean
"""
try:
result = Result(*self.perform_request('HEAD', '/', params={'request_timeout': timeout}))
except ConnectionError:
return False
except TransportError:
return False
if result.response.status_code == 200:
return True
return False | :param timeout: int
:return: boolean | entailment |
def query(self, sql, timeout=10):
"""
Submit a query and return results.
:param sql: string
:param timeout: int
:return: pydrill.client.ResultQuery
"""
if not sql:
raise QueryError('No query passed to drill.')
result = ResultQuery(*self.perform_request(**{
'method': 'POST',
'url': '/query.json',
'body': {
"queryType": "SQL",
"query": sql
},
'params': {
'request_timeout': timeout
}
}))
return result | Submit a query and return results.
:param sql: string
:param timeout: int
:return: pydrill.client.ResultQuery | entailment |
def plan(self, sql, timeout=10):
"""
:param sql: string
:param timeout: int
:return: pydrill.client.ResultQuery
"""
sql = 'explain plan for ' + sql
return self.query(sql, timeout) | :param sql: string
:param timeout: int
:return: pydrill.client.ResultQuery | entailment |
def storage_detail(self, name, timeout=10):
"""
Get the definition of the named storage plugin.
:param name: The assigned name in the storage plugin definition.
:param timeout: int
:return: pydrill.client.Result
"""
result = Result(*self.perform_request(**{
'method': 'GET',
'url': '/storage/{0}.json'.format(name),
'params': {
'request_timeout': timeout
}
}))
return result | Get the definition of the named storage plugin.
:param name: The assigned name in the storage plugin definition.
:param timeout: int
:return: pydrill.client.Result | entailment |
def storage_enable(self, name, value=True, timeout=10):
"""
Enable or disable the named storage plugin.
:param name: The assigned name in the storage plugin definition.
:param value: Either True (to enable) or False (to disable).
:param timeout: int
:return: pydrill.client.Result
"""
value = 'true' if value else 'false'
result = Result(*self.perform_request(**{
'method': 'GET',
'url': '/storage/{0}/enable/{1}'.format(name, value),
'params': {
'request_timeout': timeout
}
}))
return result | Enable or disable the named storage plugin.
:param name: The assigned name in the storage plugin definition.
:param value: Either True (to enable) or False (to disable).
:param timeout: int
:return: pydrill.client.Result | entailment |
def storage_update(self, name, config, timeout=10):
"""
Create or update a storage plugin configuration.
:param name: The name of the storage plugin configuration to create or update.
:param config: Overwrites the existing configuration if there is any, and therefore, must include all
required attributes and definitions.
:param timeout: int
:return: pydrill.client.Result
"""
result = Result(*self.perform_request(**{
'method': 'POST',
'url': '/storage/{0}.json'.format(name),
'body': config,
'params': {
'request_timeout': timeout
}
}))
return result | Create or update a storage plugin configuration.
:param name: The name of the storage plugin configuration to create or update.
:param config: Overwrites the existing configuration if there is any, and therefore, must include all
required attributes and definitions.
:param timeout: int
:return: pydrill.client.Result | entailment |
def storage_delete(self, name, timeout=10):
"""
Delete a storage plugin configuration.
:param name: The name of the storage plugin configuration to delete.
:param timeout: int
:return: pydrill.client.Result
"""
result = Result(*self.perform_request(**{
'method': 'DELETE',
'url': '/storage/{0}.json'.format(name),
'params': {
'request_timeout': timeout
}
}))
return result | Delete a storage plugin configuration.
:param name: The name of the storage plugin configuration to delete.
:param timeout: int
:return: pydrill.client.Result | entailment |
def profile(self, query_id, timeout=10):
"""
Get the profile of the query that has the given queryid.
:param query_id: The UUID of the query in standard UUID format that Drill assigns to each query.
:param timeout: int
:return: pydrill.client.Result
"""
result = Result(*self.perform_request(**{
'method': 'GET',
'url': '/profiles/{0}.json'.format(query_id),
'params': {
'request_timeout': timeout
}
}))
return result | Get the profile of the query that has the given queryid.
:param query_id: The UUID of the query in standard UUID format that Drill assigns to each query.
:param timeout: int
:return: pydrill.client.Result | entailment |
def profile_cancel(self, query_id, timeout=10):
"""
Cancel the query that has the given queryid.
:param query_id: The UUID of the query in standard UUID format that Drill assigns to each query.
:param timeout: int
:return: pydrill.client.Result
"""
result = Result(*self.perform_request(**{
'method': 'GET',
'url': '/profiles/cancel/{0}'.format(query_id),
'params': {
'request_timeout': timeout
}
}))
return result | Cancel the query that has the given queryid.
:param query_id: The UUID of the query in standard UUID format that Drill assigns to each query.
:param timeout: int
:return: pydrill.client.Result | entailment |
def log_request_success(self, method, full_url, path, body, status_code, response, duration):
""" Log a successful API call. """
if body and not isinstance(body, dict):
body = body.decode('utf-8')
logger.info(
'%s %s [status:%s request:%.3fs]', method, full_url,
status_code, duration
)
logger.debug('> %s', body)
logger.debug('< %s', response)
if tracer.isEnabledFor(logging.INFO):
if self.url_prefix:
path = path.replace(self.url_prefix, '', 1)
tracer.info("curl -X%s 'http://localhost:8047%s' -d '%s'", method, path,
self._pretty_json(body) if body else '')
if tracer.isEnabledFor(logging.DEBUG):
tracer.debug('#[%s] (%.3fs)\n#%s', status_code, duration,
self._pretty_json(response).replace('\n', '\n#') if response else '') | Log a successful API call. | entailment |
def log_request_fail(self, method, full_url, body, duration, status_code=None, exception=None):
"""
Log an unsuccessful API call.
"""
logger.warning(
'%s %s [status:%s request:%.3fs]', method, full_url,
status_code or 'N/A', duration, exc_info=exception is not None
)
if body and not isinstance(body, dict):
body = body.decode('utf-8')
logger.debug('> %s', body) | Log an unsuccessful API call. | entailment |
def _raise_error(self, status_code, raw_data):
"""
Locate appropriate exception and raise it.
"""
error_message = raw_data
additional_info = None
try:
additional_info = json.loads(raw_data)
error_message = additional_info.get('error', error_message)
if isinstance(error_message, dict) and 'type' in error_message:
error_message = error_message['type']
except:
pass
raise HTTP_EXCEPTIONS.get(status_code, TransportError)(status_code, error_message, additional_info) | Locate appropriate exception and raise it. | entailment |
def perform_request(self, method, url, params=None, body=None):
"""
Perform the actual request.
Retrieve a connection.
Pass all the information to it's perform_request method and return the data.
:arg method: HTTP method to use
:arg url: absolute url (without host) to target
:arg params: dictionary of query parameters, will be handed over to the
underlying :class:`~pydrill.Connection` class for serialization
:arg body: body of the request, will be serializes using serializer and
passed to the connection
"""
if body is not None:
body = self.serializer.dumps(body)
# some clients or environments don't support sending GET with body
if method in ('HEAD', 'GET') and self.send_get_body_as != 'GET':
# send it as post instead
if self.send_get_body_as == 'POST':
method = 'POST'
# or as source parameter
elif self.send_get_body_as == 'source':
if params is None:
params = {}
params['source'] = body
body = None
if body is not None:
try:
body = body.encode('utf-8')
except (UnicodeDecodeError, AttributeError):
# bytes/str - no need to re-encode
pass
ignore = ()
timeout = None
if params:
timeout = params.pop('request_timeout', None)
ignore = params.pop('ignore', ())
if isinstance(ignore, int):
ignore = (ignore,)
for attempt in range(self.max_retries + 1):
connection = self.get_connection()
try:
response, data, duration = connection.perform_request(method, url, params, body, ignore=ignore,
timeout=timeout)
except TransportError as e:
retry = False
if isinstance(e, ConnectionTimeout):
retry = self.retry_on_timeout
elif isinstance(e, ConnectionError):
retry = True
elif e.status_code in self.retry_on_status:
retry = True
if retry:
if attempt == self.max_retries:
raise
else:
raise
else:
if data:
data = self.deserializer.loads(data, mimetype=response.headers.get('Content-Type'))
else:
data = {}
return response, data, duration | Perform the actual request.
Retrieve a connection.
Pass all the information to it's perform_request method and return the data.
:arg method: HTTP method to use
:arg url: absolute url (without host) to target
:arg params: dictionary of query parameters, will be handed over to the
underlying :class:`~pydrill.Connection` class for serialization
:arg body: body of the request, will be serializes using serializer and
passed to the connection | entailment |
def release(ctx, type_, repo, prebump=PREBUMP, config_file=None, commit=True, yes=False):
"""Make a new release.
"""
if prebump not in REL_TYPES:
raise ValueError(f"{type_} not in {REL_TYPES}")
prebump = REL_TYPES.index(prebump)
version = _read_version()
version = _bump_release(version, type_)
_write_version(version)
# Needs to happen before Towncrier deletes fragment files.
tag_content = _render_log()
ctx.run("towncrier")
if commit:
ctx.run(f'git commit -am "Release {version}"')
tag_content = tag_content.replace('"', '\\"')
ctx.run(f'git tag -a {version} -m "Version {version}\n\n{tag_content}"')
ctx.run(f"python setup.py sdist bdist_wheel")
dist_pattern = f'{PACKAGE_NAME.replace("-", "[-_]")}-*'
artifacts = list(ROOT.joinpath("dist").glob(dist_pattern))
filename_display = "\n".join(f" {a}" for a in artifacts)
print(f"[release] Will upload:\n{filename_display}")
if not yes:
try:
input("[release] Release ready. ENTER to upload, CTRL-C to abort: ")
except KeyboardInterrupt:
print("\nAborted!")
return
arg_display = " ".join(f'"{n}"' for n in artifacts)
cmd = f'twine upload --repository="{repo}"'
if config_file:
cmd = f'{cmd} --config-file="{config_file}"'
cmd = f"{cmd} {arg_display}"
ctx.run(cmd)
version = _prebump(version, prebump)
_write_version(version)
if commit:
ctx.run(f'git commit -am "Prebump to {version}"') | Make a new release. | entailment |
def init_menu():
"""Initialize menu before first request."""
# Register breadcrumb root
item = current_menu.submenu('breadcrumbs.settings')
item.register('', _('Account'))
item = current_menu.submenu('breadcrumbs.{0}'.format(
current_app.config['SECURITY_BLUEPRINT_NAME']))
if current_app.config.get('SECURITY_CHANGEABLE', True):
item.register('', _('Change password'))
# Register settings menu
item = current_menu.submenu('settings.change_password')
item.register(
"{0}.change_password".format(
current_app.config['SECURITY_BLUEPRINT_NAME']),
# NOTE: Menu item text (icon replaced by a user icon).
_('%(icon)s Change password',
icon='<i class="fa fa-key fa-fw"></i>'),
order=1)
# Register breadcrumb
item = current_menu.submenu('breadcrumbs.{0}.change_password'.format(
current_app.config['SECURITY_BLUEPRINT_NAME']))
item.register(
"{0}.change_password".format(
current_app.config['SECURITY_BLUEPRINT_NAME']),
_("Change password"),
order=0,
) | Initialize menu before first request. | entailment |
def check_security_settings():
"""Warn if session cookie is not secure in production."""
in_production = not (current_app.debug or current_app.testing)
secure = current_app.config.get('SESSION_COOKIE_SECURE')
if in_production and not secure:
current_app.logger.warning(
"SESSION_COOKIE_SECURE setting must be set to True to prevent the "
"session cookie from being leaked over an insecure channel."
) | Warn if session cookie is not secure in production. | entailment |
def jwt_proccessor():
"""Context processor for jwt."""
def jwt():
"""Context processor function to generate jwt."""
token = current_accounts.jwt_creation_factory()
return Markup(
render_template(
current_app.config['ACCOUNTS_JWT_DOM_TOKEN_TEMPLATE'],
token=token
)
)
def jwt_token():
"""Context processor function to generate jwt."""
return current_accounts.jwt_creation_factory()
return {
'jwt': jwt,
'jwt_token': jwt_token,
} | Context processor for jwt. | entailment |
def _to_binary(val):
"""Convert to binary."""
if isinstance(val, text_type):
return val.encode('utf-8')
assert isinstance(val, binary_type)
return val | Convert to binary. | entailment |
def _to_string(val):
"""Convert to text."""
if isinstance(val, binary_type):
return val.decode('utf-8')
assert isinstance(val, text_type)
return val | Convert to text. | entailment |
def _mysql_aes_key(key):
"""Format key."""
final_key = bytearray(16)
for i, c in enumerate(key):
final_key[i % 16] ^= key[i] if PY3 else ord(key[i])
return bytes(final_key) | Format key. | entailment |
def _mysql_aes_pad(val):
"""Padding."""
val = _to_string(val)
pad_value = 16 - (len(val) % 16)
return _to_binary('{0}{1}'.format(val, chr(pad_value) * pad_value)) | Padding. | entailment |
def _mysql_aes_unpad(val):
"""Reverse padding."""
val = _to_string(val)
pad_value = ord(val[-1])
return val[:-pad_value] | Reverse padding. | entailment |
def mysql_aes_encrypt(val, key):
"""Mysql AES encrypt value with secret key.
:param val: Plain text value.
:param key: The AES key.
:returns: The encrypted AES value.
"""
assert isinstance(val, binary_type) or isinstance(val, text_type)
assert isinstance(key, binary_type) or isinstance(key, text_type)
k = _mysql_aes_key(_to_binary(key))
v = _mysql_aes_pad(_to_binary(val))
e = _mysql_aes_engine(k).encryptor()
return e.update(v) + e.finalize() | Mysql AES encrypt value with secret key.
:param val: Plain text value.
:param key: The AES key.
:returns: The encrypted AES value. | entailment |
def mysql_aes_decrypt(encrypted_val, key):
"""Mysql AES decrypt value with secret key.
:param encrypted_val: Encrypted value.
:param key: The AES key.
:returns: The AES value decrypted.
"""
assert isinstance(encrypted_val, binary_type) \
or isinstance(encrypted_val, text_type)
assert isinstance(key, binary_type) or isinstance(key, text_type)
k = _mysql_aes_key(_to_binary(key))
d = _mysql_aes_engine(_to_binary(k)).decryptor()
return _mysql_aes_unpad(d.update(_to_binary(encrypted_val)) + d.finalize()) | Mysql AES decrypt value with secret key.
:param encrypted_val: Encrypted value.
:param key: The AES key.
:returns: The AES value decrypted. | entailment |
def from_string(cls, hash, **context):
"""Parse instance from configuration string in Modular Crypt Format."""
salt, checksum = parse_mc2(hash, cls.ident, handler=cls)
return cls(salt=salt, checksum=checksum) | Parse instance from configuration string in Modular Crypt Format. | entailment |
def _calc_checksum(self, secret):
"""Calculate string.
:param secret: The secret key.
:returns: The checksum.
"""
return str_to_uascii(
hashlib.sha256(mysql_aes_encrypt(self.salt, secret)).hexdigest()
) | Calculate string.
:param secret: The secret key.
:returns: The checksum. | entailment |
def _ip2country(ip):
"""Get user country."""
if ip:
match = geolite2.reader().get(ip)
return match.get('country', {}).get('iso_code') if match else None | Get user country. | entailment |
def _extract_info_from_useragent(user_agent):
"""Extract extra informations from user."""
parsed_string = user_agent_parser.Parse(user_agent)
return {
'os': parsed_string.get('os', {}).get('family'),
'browser': parsed_string.get('user_agent', {}).get('family'),
'browser_version': parsed_string.get('user_agent', {}).get('major'),
'device': parsed_string.get('device', {}).get('family'),
} | Extract extra informations from user. | entailment |
def add_session(session=None):
r"""Add a session to the SessionActivity table.
:param session: Flask Session object to add. If None, ``session``
is used. The object is expected to have a dictionary entry named
``"user_id"`` and a field ``sid_s``
"""
user_id, sid_s = session['user_id'], session.sid_s
with db.session.begin_nested():
session_activity = SessionActivity(
user_id=user_id,
sid_s=sid_s,
ip=request.remote_addr,
country=_ip2country(request.remote_addr),
**_extract_info_from_useragent(
request.headers.get('User-Agent', '')
)
)
db.session.merge(session_activity) | r"""Add a session to the SessionActivity table.
:param session: Flask Session object to add. If None, ``session``
is used. The object is expected to have a dictionary entry named
``"user_id"`` and a field ``sid_s`` | entailment |
def login_listener(app, user):
"""Connect to the user_logged_in signal for table population.
:param app: The Flask application.
:param user: The :class:`invenio_accounts.models.User` instance.
"""
@after_this_request
def add_user_session(response):
"""Regenerate current session and add to the SessionActivity table.
.. note:: `flask.session.regenerate()` actually calls Flask-KVSession's
`flask_kvsession.KVSession.regenerate`.
"""
# Regenerate the session to avoid session fixation vulnerabilities.
session.regenerate()
# Save the session first so that the sid_s gets generated.
app.session_interface.save_session(app, session, response)
add_session(session)
current_accounts.datastore.commit()
return response | Connect to the user_logged_in signal for table population.
:param app: The Flask application.
:param user: The :class:`invenio_accounts.models.User` instance. | entailment |
def logout_listener(app, user):
"""Connect to the user_logged_out signal.
:param app: The Flask application.
:param user: The :class:`invenio_accounts.models.User` instance.
"""
@after_this_request
def _commit(response=None):
if hasattr(session, 'sid_s'):
delete_session(session.sid_s)
# Regenerate the session to avoid session fixation vulnerabilities.
session.regenerate()
current_accounts.datastore.commit()
return response | Connect to the user_logged_out signal.
:param app: The Flask application.
:param user: The :class:`invenio_accounts.models.User` instance. | entailment |
def delete_session(sid_s):
"""Delete entries in the data- and kvsessionstore with the given sid_s.
On a successful deletion, the flask-kvsession store returns 1 while the
sqlalchemy datastore returns None.
:param sid_s: The session ID.
:returns: ``1`` if deletion was successful.
"""
# Remove entries from sessionstore
_sessionstore.delete(sid_s)
# Find and remove the corresponding SessionActivity entry
with db.session.begin_nested():
SessionActivity.query.filter_by(sid_s=sid_s).delete()
return 1 | Delete entries in the data- and kvsessionstore with the given sid_s.
On a successful deletion, the flask-kvsession store returns 1 while the
sqlalchemy datastore returns None.
:param sid_s: The session ID.
:returns: ``1`` if deletion was successful. | entailment |
def delete_user_sessions(user):
"""Delete all active user sessions.
:param user: User instance.
:returns: If ``True`` then the session is successfully deleted.
"""
with db.session.begin_nested():
for s in user.active_sessions:
_sessionstore.delete(s.sid_s)
SessionActivity.query.filter_by(user=user).delete()
return True | Delete all active user sessions.
:param user: User instance.
:returns: If ``True`` then the session is successfully deleted. | entailment |
def confirm_register_form_factory(Form, app):
"""Return confirmation for extended registration form."""
if app.config.get('RECAPTCHA_PUBLIC_KEY') and \
app.config.get('RECAPTCHA_PRIVATE_KEY'):
class ConfirmRegisterForm(Form):
recaptcha = FormField(RegistrationFormRecaptcha, separator='.')
return ConfirmRegisterForm
return Form | Return confirmation for extended registration form. | entailment |
def register_form_factory(Form, app):
"""Return extended registration form."""
if app.config.get('RECAPTCHA_PUBLIC_KEY') and \
app.config.get('RECAPTCHA_PRIVATE_KEY'):
class RegisterForm(Form):
recaptcha = FormField(RegistrationFormRecaptcha, separator='.')
return RegisterForm
return Form | Return extended registration form. | entailment |
def login_form_factory(Form, app):
"""Return extended login form."""
class LoginForm(Form):
def __init__(self, *args, **kwargs):
"""Init the login form.
.. note::
The ``remember me`` option will be completely disabled.
"""
super(LoginForm, self).__init__(*args, **kwargs)
self.remember.data = False
return LoginForm | Return extended login form. | entailment |
def on_model_change(self, form, User, is_created):
"""Hash password when saving."""
if form.password.data is not None:
pwd_ctx = current_app.extensions['security'].pwd_context
if pwd_ctx.identify(form.password.data) is None:
User.password = hash_password(form.password.data) | Hash password when saving. | entailment |
def after_model_change(self, form, User, is_created):
"""Send password instructions if desired."""
if is_created and form.notification.data is True:
send_reset_password_instructions(User) | Send password instructions if desired. | entailment |
def action_inactivate(self, ids):
"""Inactivate users."""
try:
count = 0
for user_id in ids:
user = _datastore.get_user(user_id)
if user is None:
raise ValueError(_("Cannot find user."))
if _datastore.deactivate_user(user):
count += 1
if count > 0:
flash(_('User(s) were successfully inactivated.'), 'success')
except Exception as exc:
if not self.handle_view_exception(exc):
raise
current_app.logger.exception(str(exc)) # pragma: no cover
flash(_('Failed to inactivate users.'),
'error') | Inactivate users. | entailment |
def delete_model(self, model):
"""Delete a specific session."""
if SessionActivity.is_current(sid_s=model.sid_s):
flash('You could not remove your current session', 'error')
return
delete_session(sid_s=model.sid_s)
db.session.commit() | Delete a specific session. | entailment |
def action_delete(self, ids):
"""Delete selected sessions."""
is_current = any(SessionActivity.is_current(sid_s=id_) for id_ in ids)
if is_current:
flash('You could not remove your current session', 'error')
return
for id_ in ids:
delete_session(sid_s=id_)
db.session.commit() | Delete selected sessions. | entailment |
def users_create(email, password, active):
"""Create a user."""
kwargs = dict(email=email, password=password, active='y' if active else '')
form = ConfirmRegisterForm(MultiDict(kwargs), csrf_enabled=False)
if form.validate():
kwargs['password'] = hash_password(kwargs['password'])
kwargs['active'] = active
_datastore.create_user(**kwargs)
click.secho('User created successfully.', fg='green')
kwargs['password'] = '****'
click.echo(kwargs)
else:
raise click.UsageError('Error creating user. %s' % form.errors) | Create a user. | entailment |
def send_security_email(data):
"""Celery task to send security email.
:param data: Contains the email data.
"""
msg = Message()
msg.__dict__.update(data)
current_app.extensions['mail'].send(msg) | Celery task to send security email.
:param data: Contains the email data. | entailment |
def clean_session_table():
"""Automatically clean session table.
To enable a periodically clean of the session table, you should configure
the task as a celery periodic task.
.. code-block:: python
from datetime import timedelta
CELERYBEAT_SCHEDULE = {
'session_cleaner': {
'task': 'invenio_accounts.tasks.clean_session_table',
'schedule': timedelta(days=1),
},
}
See `Invenio-Celery <https://invenio-celery.readthedocs.io/>`_
documentation for further details.
"""
sessions = SessionActivity.query_by_expired().all()
for session in sessions:
delete_session(sid_s=session.sid_s)
db.session.commit() | Automatically clean session table.
To enable a periodically clean of the session table, you should configure
the task as a celery periodic task.
.. code-block:: python
from datetime import timedelta
CELERYBEAT_SCHEDULE = {
'session_cleaner': {
'task': 'invenio_accounts.tasks.clean_session_table',
'schedule': timedelta(days=1),
},
}
See `Invenio-Celery <https://invenio-celery.readthedocs.io/>`_
documentation for further details. | entailment |
def upgrade():
"""Upgrade database."""
op.create_table(
'accounts_role',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=80), nullable=True),
sa.Column('description', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table(
'accounts_user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('email', sa.String(length=255), nullable=True),
sa.Column('password', sa.String(length=255), nullable=True),
sa.Column('active', sa.Boolean(name='active'), nullable=True),
sa.Column('confirmed_at', sa.DateTime(), nullable=True),
sa.Column('last_login_at', sa.DateTime(), nullable=True),
sa.Column('current_login_at', sa.DateTime(), nullable=True),
sa.Column('last_login_ip',
sqlalchemy_utils.types.ip_address.IPAddressType(),
nullable=True),
sa.Column('current_login_ip',
sqlalchemy_utils.types.ip_address.IPAddressType(),
nullable=True),
sa.Column('login_count', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email')
)
op.create_table(
'accounts_user_session_activity',
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('updated', sa.DateTime(), nullable=False),
sa.Column('sid_s', sa.String(length=255), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
['user_id'], [u'accounts_user.id'],
name='fk_accounts_session_activity_user_id',
),
sa.PrimaryKeyConstraint('sid_s')
)
op.create_table(
'accounts_userrole',
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
['role_id'], ['accounts_role.id'],
name='fk_accounts_userrole_role_id',
),
sa.ForeignKeyConstraint(
['user_id'], ['accounts_user.id'],
name='fk_accounts_userrole_user_id',
),
)
with op.batch_alter_table('transaction') as batch_op:
batch_op.add_column(sa.Column(
'user_id',
sa.Integer(),
sa.ForeignKey('accounts_user.id'),
nullable=True,
))
batch_op.create_index(
op.f('ix_transaction_user_id'), ['user_id'], unique=False
) | Upgrade database. | entailment |
def downgrade():
"""Downgrade database."""
ctx = op.get_context()
insp = Inspector.from_engine(ctx.connection.engine)
for fk in insp.get_foreign_keys('transaction'):
if fk['referred_table'] == 'accounts_user':
op.drop_constraint(
op.f(fk['name']), 'transaction', type_='foreignkey'
)
with op.batch_alter_table('transaction') as batch_op:
batch_op.drop_index(op.f('ix_transaction_user_id'))
batch_op.drop_column('user_id')
op.drop_table('accounts_userrole')
op.drop_table('accounts_user_session_activity')
op.drop_table('accounts_user')
op.drop_table('accounts_role') | Downgrade database. | entailment |
def jwt_create_token(user_id=None, additional_data=None):
"""Encode the JWT token.
:param int user_id: Addition of user_id.
:param dict additional_data: Additional information for the token.
:returns: The encoded token.
:rtype: str
.. note::
Definition of the JWT claims:
* exp: ((Expiration Time) expiration time of the JWT.
* sub: (subject) the principal that is the subject of the JWT.
* jti: (JWT ID) UID for the JWT.
"""
# Create an ID
uid = str(uuid.uuid4())
# The time in UTC now
now = datetime.utcnow()
# Build the token data
token_data = {
'exp': now + current_app.config['ACCOUNTS_JWT_EXPIRATION_DELTA'],
'sub': user_id or current_user.get_id(),
'jti': uid,
}
# Add any additional data to the token
if additional_data is not None:
token_data.update(additional_data)
# Encode the token and send it back
encoded_token = encode(
token_data,
current_app.config['ACCOUNTS_JWT_SECRET_KEY'],
current_app.config['ACCOUNTS_JWT_ALOGORITHM']
).decode('utf-8')
return encoded_token | Encode the JWT token.
:param int user_id: Addition of user_id.
:param dict additional_data: Additional information for the token.
:returns: The encoded token.
:rtype: str
.. note::
Definition of the JWT claims:
* exp: ((Expiration Time) expiration time of the JWT.
* sub: (subject) the principal that is the subject of the JWT.
* jti: (JWT ID) UID for the JWT. | entailment |
def jwt_decode_token(token):
"""Decode the JWT token.
:param str token: Additional information for the token.
:returns: The token data.
:rtype: dict
"""
try:
return decode(
token,
current_app.config['ACCOUNTS_JWT_SECRET_KEY'],
algorithms=[
current_app.config['ACCOUNTS_JWT_ALOGORITHM']
]
)
except DecodeError as exc:
raise_from(JWTDecodeError(), exc)
except ExpiredSignatureError as exc:
raise_from(JWTExpiredToken(), exc) | Decode the JWT token.
:param str token: Additional information for the token.
:returns: The token data.
:rtype: dict | entailment |
def set_session_info(app, response, **extra):
"""Add X-Session-ID and X-User-ID to http response."""
session_id = getattr(session, 'sid_s', None)
if session_id:
response.headers['X-Session-ID'] = session_id
if current_user.is_authenticated:
response.headers['X-User-ID'] = current_user.get_id() | Add X-Session-ID and X-User-ID to http response. | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.