_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q8000
|
crud_for_app
|
train
|
def crud_for_app(app_label, urlprefix=None):
"""
Returns list of ``url`` items to CRUD an app.
"""
if urlprefix is None:
urlprefix = app_label + '/'
app = apps.get_app_config(app_label)
urls = []
for model in app.get_models():
urls += crud_for_model(model, urlprefix)
return urls
|
python
|
{
"resource": ""
}
|
q8001
|
CRUDMixin.get_context_data
|
train
|
def get_context_data(self, **kwargs):
"""
Adds available urls and names.
"""
context = super(CRUDMixin, self).get_context_data(**kwargs)
context.update({
'model_verbose_name': self.model._meta.verbose_name,
'model_verbose_name_plural': self.model._meta.verbose_name_plural,
})
context['fields'] = utils.get_fields(self.model)
if hasattr(self, 'object') and self.object:
for action in utils.INSTANCE_ACTIONS:
try:
url = reverse(
utils.crud_url_name(self.model, action),
kwargs={'pk': self.object.pk})
except NoReverseMatch: # pragma: no cover
url = None
context['url_%s' % action] = url
for action in utils.LIST_ACTIONS:
try:
url = reverse(utils.crud_url_name(self.model, action))
except NoReverseMatch: # pragma: no cover
url = None
context['url_%s' % action] = url
return context
|
python
|
{
"resource": ""
}
|
q8002
|
CRUDMixin.get_template_names
|
train
|
def get_template_names(self):
"""
Adds crud_template_name to default template names.
"""
names = super(CRUDMixin, self).get_template_names()
if self.crud_template_name:
names.append(self.crud_template_name)
return names
|
python
|
{
"resource": ""
}
|
q8003
|
install
|
train
|
def install():
"""Install Fleaker.
In a function so we can protect this file so it's only run when we
explicitly invoke it and not, say, when py.test collects all Python
modules.
"""
_version_re = re.compile(r"__version__\s+=\s+(.*)") # pylint: disable=invalid-name
with open('./fleaker/__init__.py', 'rb') as file_:
version = ast.literal_eval(_version_re.search( # pylint: disable=invalid-name
file_.read().decode('utf-8')).group(1))
download_url = ('https://github.com/croscon/fleaker/archive/'
'v{}.tar.gz'.format(version))
setup(
name='fleaker',
version=version,
download_url=download_url,
description='Tools and extensions to make Flask development easier.',
url='https://github.com/croscon/fleaker',
author='Croscon Consulting',
author_email='open.source@croscon.com',
license='BSD',
packages=[
'fleaker',
'fleaker.marshmallow',
'fleaker.marshmallow.fields',
'fleaker.peewee',
'fleaker.peewee.fields',
'fleaker.peewee.mixins',
'fleaker.peewee.mixins.time',
],
zip_safe=False,
long_description=__doc__,
include_package_data=True,
platforms='any',
install_requires=[
'Flask',
'Flask-Classful',
'Flask-Login',
'Flask-Marshmallow',
'arrow',
'bcrypt',
'blinker',
'marshmallow',
'marshmallow-jsonschema',
'peewee',
'pendulum',
'phonenumbers',
'simplejson',
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Framework :: Flask',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
# @TODO: Pick specific Python versions; out of the gate flask does 2.6,
# 2.7, 3.3, 3.4, and 3.5
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords=['flask', 'web development', 'flask extension']
)
|
python
|
{
"resource": ""
}
|
q8004
|
JSONField.python_value
|
train
|
def python_value(self, value):
"""Return the JSON in the database as a ``dict``.
Returns:
dict: The field run through json.loads
"""
value = super(JSONField, self).python_value(value)
if value is not None:
return flask.json.loads(value, **self._load_kwargs)
|
python
|
{
"resource": ""
}
|
q8005
|
JSONField.db_value
|
train
|
def db_value(self, value):
"""Store the value in the database.
If the value is a dict like object, it is converted to a string before
storing.
"""
# Everything is encoded being before being surfaced
value = flask.json.dumps(value)
return super(JSONField, self).db_value(value)
|
python
|
{
"resource": ""
}
|
q8006
|
SearchMixin.search
|
train
|
def search(cls, term, fields=()):
"""Generic SQL search function that uses SQL ``LIKE`` to search the
database for matching records. The records are sorted by their
relavancey to the search term.
The query searches and sorts on the folling criteria, in order, where
the target string is ``exactly``:
1. Straight equality (``x = 'exactly'``)
2. Right hand ``LIKE`` (``x LIKE 'exact%'``)
3. Substring ``LIKE`` (``x LIKE %act%``)
Args:
term (str): The search term to apply to the query.
Keyword Args:
fields (list|tuple|None): An optional list of fields to apply the
search to. If not provided, the class variable
``Meta.search_fields`` will be used by default.
Returns:
peewee.SelectQuery: An unexecuted query for the records.
Raises:
AttributeError: Raised if `search_fields` isn't defined in the
class and `fields` aren't provided for the function.
"""
if not any((cls._meta.search_fields, fields)):
raise AttributeError(
"A list of searchable fields must be provided in the class's "
"search_fields or provided to this function in the `fields` "
"kwarg."
)
# If fields are provided, override the ones in the class
if not fields:
fields = cls._meta.search_fields
query = cls.select()
# Cache the LIKE terms
like_term = ''.join((term, '%'))
full_like_term = ''.join(('%', term, '%'))
# Cache the order by terms
# @TODO Peewee's order_by supports an `extend` kwarg will will allow
# for updating of the order by part of the query, but it's only
# supported in Peewee 2.8.5 and newer. Determine if we can support this
# before switching.
# http://docs.peewee-orm.com/en/stable/peewee/api.html#SelectQuery.order_by
order_by = []
# Store the clauses seperately because it is needed to perform an OR on
# them and that's somehow impossible with their query builder in
# a loop.
clauses = []
for field_name in fields:
# Cache the field, raising an exception if the field doesn't
# exist.
field = getattr(cls, field_name)
# Apply the search term case insensitively
clauses.append(
(field == term) |
(field ** like_term) |
(field ** full_like_term)
)
order_by.append(case(None, (
# Straight matches should show up first
(field == term, 0),
# Similar terms should show up second
(field ** like_term, 1),
# Substring matches should show up third
(field ** full_like_term, 2),
), default=3).asc())
# Apply the clauses to the query
query = query.where(reduce(operator.or_, clauses))
# Apply the sort order so it's influenced by the search term relevance.
query = query.order_by(*order_by)
return query
|
python
|
{
"resource": ""
}
|
q8007
|
ForeignKeyField._add_to_schema
|
train
|
def _add_to_schema(self, field_name, schema):
"""Set the ``attribute`` attr to the field in question so this always
gets deserialzed into the field name without ``_id``.
Args:
field_name (str): The name of the field (the attribute name being
set in the schema).
schema (marshmallow.Schema): The actual parent schema this field
belongs to.
"""
super(ForeignKeyField, self)._add_to_schema(field_name, schema)
if self.get_field_value('convert_fks', default=True):
self.attribute = field_name.replace('_id', '')
|
python
|
{
"resource": ""
}
|
q8008
|
ForeignKeyField._serialize
|
train
|
def _serialize(self, value, attr, obj):
"""Grab the ID value off the Peewee model so we serialize an ID back.
"""
# this might be an optional field
if value:
value = value.id
return super(ForeignKeyField, self)._serialize(value, attr, obj)
|
python
|
{
"resource": ""
}
|
q8009
|
_discover_ideal_backend
|
train
|
def _discover_ideal_backend(orm_backend):
"""Auto-discover the ideal backend based on what is installed.
Right now, handles discovery of:
* PeeWee
* SQLAlchemy
Args:
orm_backend (str): The ``orm_backend`` value that was passed to the
``create_app`` function. That is, the ORM Backend the User
indicated they wanted to use.
Returns:
str|fleaker.missing.MissingSentinel: Returns a string for the ideal
backend if it found one, or :obj:`fleaker.MISSING` if we couldn't
find one.
Raises:
RuntimeError: Raised if no user provided ORM Backend is given and BOTH
PeeWee and SQLAlchemy are installed.
"""
if orm_backend:
return orm_backend
if peewee is not MISSING and sqlalchemy is not MISSING:
raise RuntimeError('Both PeeWee and SQLAlchemy detected as installed, '
'but no explicit backend provided! Please specify '
'one!')
if peewee is not MISSING:
return _PEEWEE_BACKEND
elif sqlalchemy is not MISSING:
return _SQLALCHEMY_BACKEND
else:
return MISSING
|
python
|
{
"resource": ""
}
|
q8010
|
ORMAwareApp._init_peewee_ext
|
train
|
def _init_peewee_ext(cls, app, dummy_configuration=None,
dummy_configure_args=None):
"""Init the actual PeeWee extension with the app that was created.
Since PeeWee requires the ``DATABASE`` config parameter to be present
IMMEDIATELY upon initializing the application, we need to delay this
construction. This is because, in standard use, we will create the app
and attempt to init this extension BEFORE we configure the app, which
is totally fine. To fix this, we just need to set this up to try and
run after every call to configure. If there is not ``DATABASE`` config
parameter present when run, this method does nothing other than
reschedule itself to run in the future.
In all cases, this is a Post Configure Hook that should RUN ONCE!
Args:
app (flask.Flask): The application you want to init the PeeWee
Flask extension for. Hint: if you need to use this as
a callback, use a partial to provide this.
dummy_configuration (dict): The resulting application configuration
that the post_configure hook provides to all of it's callbacks.
We will NEVER use this, but since we utilize the post_configure
system to register this for complicated apps, we gotta accept
it.
dummy_configure_args (list[object]): The args passed to the
:meth:`configure` function that triggered this callback. Just
like the above arg, we'll never use it, but we must accept it.
"""
# the database still isn't present, go ahead and register the callback
# again, so we can try later.
if 'DATABASE' not in app.config:
app.add_post_configure_callback(partial(cls._init_peewee_ext, app),
run_once=True)
return
_PEEWEE_EXT.init_app(app)
|
python
|
{
"resource": ""
}
|
q8011
|
MultiStageConfigurableApp.configure
|
train
|
def configure(self, *args, **kwargs):
"""Configure the Application through a varied number of sources of
different types.
This function chains multiple possible configuration methods together
in order to just "make it work". You can pass multiple configuration
sources in to the method and each one will be tried in a sane fashion.
Later sources will override earlier sources if keys collide. For
example:
.. code:: python
from application import default_config
app.configure(default_config, os.environ, '.secrets')
In the above example, values stored in ``default_config`` will be
loaded first, then overwritten by those in ``os.environ``, and so on.
An endless number of configuration sources may be passed.
Configuration sources are type checked and processed according to the
following rules:
* ``string`` - if the source is a ``str``, we will assume it is a file
or module that should be loaded. If the file ends in ``.json``, then
:meth:`flask.Config.from_json` is used; if the file ends in ``.py``
or ``.cfg``, then :meth:`flask.Config.from_pyfile` is used; if the
module has any other extension we assume it is an import path, import
the module and pass that to :meth:`flask.Config.from_object`. See
below for a few more semantics on module loading.
* ``dict-like`` - if the source is ``dict-like``, then
:meth:`flask.Config.from_mapping` will be used. ``dict-like`` is
defined as anything implementing an ``items`` method that returns
a tuple of ``key``, ``val``.
* ``class`` or ``module`` - if the source is an uninstantiated
``class`` or ``module``, then :meth:`flask.Config.from_object` will
be used.
Just like Flask's standard configuration, only uppercased keys will be
loaded into the config.
If the item we are passed is a ``string`` and it is determined to be
a possible Python module, then a leading ``.`` is relevant. If
a leading ``.`` is provided, we assume that the module to import is
located in the current package and operate as such; if it begins with
anything else we assume the import path provided is absolute. This
allows you to source configuration stored in a module in your package,
or in another package.
Args:
*args (object):
Any object you want us to try to configure from.
Keyword Args:
whitelist_keys_from_mappings (bool):
Should we whitelist the keys we pull from mappings? Very useful
if you're passing in an entire OS ``environ`` and you want to
omit things like ``LESSPIPE``. If no whitelist is provided, we
use the pre-existing config keys as a whitelist.
whitelist (list[str]):
An explicit list of keys that should be allowed. If provided
and ``whitelist_keys`` is ``True``, we will use that as our
whitelist instead of pre-existing app config keys.
"""
whitelist_keys_from_mappings = kwargs.get(
'whitelist_keys_from_mappings', False
)
whitelist = kwargs.get('whitelist')
for item in args:
if isinstance(item, string_types):
_, ext = splitext(item)
if ext == '.json':
self._configure_from_json(item)
elif ext in ('.cfg', '.py'):
self._configure_from_pyfile(item)
else:
self._configure_from_module(item)
elif isinstance(item, (types.ModuleType, type)):
self._configure_from_object(item)
elif hasattr(item, 'items'):
# assume everything else is a mapping like object; ``.items()``
# is what Flask uses under the hood for this method
# @TODO: This doesn't handle the edge case of using a tuple of
# two element tuples to config; but Flask does that. IMO, if
# you do that, you're a monster.
self._configure_from_mapping(
item,
whitelist_keys=whitelist_keys_from_mappings,
whitelist=whitelist
)
else:
raise TypeError("Could not determine a valid type for this"
" configuration object: `{}`!".format(item))
# we just finished here, run the post configure callbacks
self._run_post_configure_callbacks(args)
|
python
|
{
"resource": ""
}
|
q8012
|
MultiStageConfigurableApp._configure_from_module
|
train
|
def _configure_from_module(self, item):
"""Configure from a module by import path.
Effectively, you give this an absolute or relative import path, it will
import it, and then pass the resulting object to
``_configure_from_object``.
Args:
item (str):
A string pointing to a valid import path.
Returns:
fleaker.App:
Returns itself.
"""
package = None
if item[0] == '.':
package = self.import_name
obj = importlib.import_module(item, package=package)
self.config.from_object(obj)
return self
|
python
|
{
"resource": ""
}
|
q8013
|
MultiStageConfigurableApp._configure_from_mapping
|
train
|
def _configure_from_mapping(self, item, whitelist_keys=False,
whitelist=None):
"""Configure from a mapping, or dict, like object.
Args:
item (dict):
A dict-like object that we can pluck values from.
Keyword Args:
whitelist_keys (bool):
Should we whitelist the keys before adding them to the
configuration? If no whitelist is provided, we use the
pre-existing config keys as a whitelist.
whitelist (list[str]):
An explicit list of keys that should be allowed. If provided
and ``whitelist_keys`` is true, we will use that as our
whitelist instead of pre-existing app config keys.
Returns:
fleaker.App:
Returns itself.
"""
if whitelist is None:
whitelist = self.config.keys()
if whitelist_keys:
item = {k: v for k, v in item.items() if k in whitelist}
self.config.from_mapping(item)
return self
|
python
|
{
"resource": ""
}
|
q8014
|
MultiStageConfigurableApp.configure_from_environment
|
train
|
def configure_from_environment(self, whitelist_keys=False, whitelist=None):
"""Configure from the entire set of available environment variables.
This is really a shorthand for grabbing ``os.environ`` and passing to
:meth:`_configure_from_mapping`.
As always, only uppercase keys are loaded.
Keyword Args:
whitelist_keys (bool):
Should we whitelist the keys by only pulling those that are
already present in the config? Useful for avoiding adding
things like ``LESSPIPE`` to your app config. If no whitelist is
provided, we use the current config keys as our whitelist.
whitelist (list[str]):
An explicit list of keys that should be allowed. If provided
and ``whitelist_keys`` is true, we will use that as our
whitelist instead of pre-existing app config keys.
Returns:
fleaker.base.BaseApplication:
Returns itself.
"""
self._configure_from_mapping(os.environ, whitelist_keys=whitelist_keys,
whitelist=whitelist)
return self
|
python
|
{
"resource": ""
}
|
q8015
|
MultiStageConfigurableApp._run_post_configure_callbacks
|
train
|
def _run_post_configure_callbacks(self, configure_args):
"""Run all post configure callbacks we have stored.
Functions are passed the configuration that resulted from the call to
:meth:`configure` as the first argument, in an immutable form; and are
given the arguments passed to :meth:`configure` for the second
argument.
Returns from callbacks are ignored in all fashion.
Args:
configure_args (list[object]):
The full list of arguments passed to :meth:`configure`.
Returns:
None:
Does not return anything.
"""
resulting_configuration = ImmutableDict(self.config)
# copy callbacks in case people edit them while running
multiple_callbacks = copy.copy(
self._post_configure_callbacks['multiple']
)
single_callbacks = copy.copy(self._post_configure_callbacks['single'])
# clear out the singles
self._post_configure_callbacks['single'] = []
for callback in multiple_callbacks:
callback(resulting_configuration, configure_args)
# now do the single run callbacks
for callback in single_callbacks:
callback(resulting_configuration, configure_args)
|
python
|
{
"resource": ""
}
|
q8016
|
Schema.make_instance
|
train
|
def make_instance(cls, data):
"""Validate the data and create a model instance from the data.
Args:
data (dict): The unserialized data to insert into the new model
instance through it's constructor.
Returns:
peewee.Model|sqlalchemy.Model: The model instance with it's data
inserted into it.
Raises:
AttributeError: This is raised if ``Meta.model`` isn't set on the
schema's definition.
"""
schema = cls()
if not hasattr(schema.Meta, 'model'):
raise AttributeError("In order to make an instance, a model for "
"the schema must be defined in the Meta "
"class.")
serialized_data = schema.load(data).data
return cls.Meta.model(**serialized_data)
|
python
|
{
"resource": ""
}
|
q8017
|
Schema.invalid_fields
|
train
|
def invalid_fields(self, data, original_data):
"""Validator that checks if any keys provided aren't in the schema.
Say your schema has support for keys ``a`` and ``b`` and the data
provided has keys ``a``, ``b``, and ``c``. When the data is loaded into
the schema, a :class:`marshmallow.ValidationError` will be raised
informing the developer that excess keys have been provided.
Raises:
marshmallow.ValidationError: Raised if extra keys exist in the
passed in data.
"""
errors = []
for field in original_data:
# Skip nested fields because they will loop infinitely
if isinstance(field, (set, list, tuple, dict)):
continue
if field not in self.fields.keys():
errors.append(field)
if errors:
raise ValidationError("Invalid field", field_names=errors)
|
python
|
{
"resource": ""
}
|
q8018
|
ArrowDateTimeField.python_value
|
train
|
def python_value(self, value):
"""Return the value in the data base as an arrow object.
Returns:
arrow.Arrow: An instance of arrow with the field filled in.
"""
value = super(ArrowDateTimeField, self).python_value(value)
if (isinstance(value, (datetime.datetime, datetime.date,
string_types))):
return arrow.get(value)
return value
|
python
|
{
"resource": ""
}
|
q8019
|
ArrowDateTimeField.db_value
|
train
|
def db_value(self, value):
"""Convert the Arrow instance to a datetime for saving in the db."""
if isinstance(value, string_types):
value = arrow.get(value)
if isinstance(value, arrow.Arrow):
value = value.datetime
return super(ArrowDateTimeField, self).db_value(value)
|
python
|
{
"resource": ""
}
|
q8020
|
Component._context_callbacks
|
train
|
def _context_callbacks(app, key, original_context=_CONTEXT_MISSING):
"""Register the callbacks we need to properly pop and push the
app-local context for a component.
Args:
app (flask.Flask): The app who this context belongs to. This is the
only sender our Blinker signal will listen to.
key (str): The key on ``_CONTEXT_LOCALS`` that this app's context
listens to.
Kwargs:
original_context (dict): The original context present whenever
these callbacks were registered. We will restore the context to
this value whenever the app context gets popped.
Returns:
(function, function): A two-element tuple of the dynamic functions
we generated as appcontext callbacks. The first element is the
callback for ``appcontext_pushed`` (i.e., get and store the
current context) and the second element is the callback for
``appcontext_popped`` (i.e., restore the current context to
to it's original value).
"""
def _get_context(dummy_app):
"""Set the context proxy so that it points to a specific context.
"""
_CONTEXT_LOCALS.context = _CONTEXT_LOCALS(key) # pylint: disable=assigning-non-slot
def _clear_context(dummy_app):
"""Remove the context proxy that points to a specific context and
restore the original context, if there was one.
"""
try:
del _CONTEXT_LOCALS.context
except AttributeError:
pass
if original_context is not _CONTEXT_MISSING:
setattr(_CONTEXT_LOCALS, key, original_context)
# store for later so Blinker doesn't remove these listeners and so we
# don't add them twice
_CONTEXT_CALLBACK_MAP[app] = (_get_context, _clear_context)
# and listen for any app context changes
appcontext_pushed.connect(_get_context, app)
appcontext_popped.connect(_clear_context, app)
return (_get_context, _clear_context)
|
python
|
{
"resource": ""
}
|
q8021
|
Component.update_context
|
train
|
def update_context(self, context, app=None):
"""Replace the component's context with a new one.
Args:
context (dict): The new context to set this component's context to.
Keyword Args:
app (flask.Flask, optional): The app to update this context for. If
not provided, the result of ``Component.app`` will be used.
"""
if (app is None and self._context is _CONTEXT_MISSING
and not in_app_context()):
raise RuntimeError("Attempted to update component context without"
" a bound app context or eager app set! Please"
" pass the related app you want to update the"
" context for!")
if self._context is not _CONTEXT_MISSING:
self._context = ImmutableDict(context)
else:
key = self._get_context_name(app=app)
setattr(_CONTEXT_LOCALS, key, ImmutableDict(context))
|
python
|
{
"resource": ""
}
|
q8022
|
Component.clear_context
|
train
|
def clear_context(self, app=None):
"""Clear the component's context.
Keyword Args:
app (flask.Flask, optional): The app to clear this component's
context for. If omitted, the value from ``Component.app`` is
used.
"""
if (app is None and self._context is _CONTEXT_MISSING
and not in_app_context()):
raise RuntimeError("Attempted to clear component context without"
" a bound app context or eager app set! Please"
" pass the related app you want to update the"
" context for!")
if self._context is not _CONTEXT_MISSING:
self._context = DEFAULT_DICT
else:
key = self._get_context_name(app=app)
setattr(_CONTEXT_LOCALS, key, DEFAULT_DICT)
|
python
|
{
"resource": ""
}
|
q8023
|
Component.app
|
train
|
def app(self):
"""Internal method that will supply the app to use internally.
Returns:
flask.Flask: The app to use within the component.
Raises:
RuntimeError: This is raised if no app was provided to the
component and the method is being called outside of an
application context.
"""
app = self._app or current_app
if not in_app_context(app):
raise RuntimeError("This component hasn't been initialized yet "
"and an app context doesn't exist.")
# If current_app is the app, this must be used in order for their IDs
# to be the same, as current_app will wrap the app in a proxy.
if hasattr(app, '_get_current_object'):
app = app._get_current_object()
return app
|
python
|
{
"resource": ""
}
|
q8024
|
Component._get_context_name
|
train
|
def _get_context_name(self, app=None):
"""Generate the name of the context variable for this component & app.
Because we store the ``context`` in a Local so the component
can be used across multiple apps, we cannot store the context on the
instance itself. This function will generate a unique and predictable
key in which to store the context.
Returns:
str: The name of the context variable to set and get the context
from.
"""
elements = [
self.__class__.__name__,
'context',
text_type(id(self)),
]
if app:
elements.append(text_type(id(app)))
else:
try:
elements.append(text_type(id(self.app)))
except RuntimeError:
pass
return '_'.join(elements)
|
python
|
{
"resource": ""
}
|
q8025
|
BaseApplication.create_app
|
train
|
def create_app(cls, import_name, **settings):
"""Create a standard Fleaker web application.
This is the main entrypoint for creating your Fleaker application.
Instead of defining your own app factory function, it's preferred that
you use :meth:`create_app`, which is responsible for automatically
configuring extensions (such as your ORM), parsing setup code for
mixins, and calling relevant hooks (such as to setup logging).
Usage is easy:
.. code:: python
from fleaker import App
def my_create_app():
app = App.create_app(__name__)
return app
And the rest works like a normal Flask app with application factories
setup!
.. versionadded:: 0.1.0
This has always been the preferred way to create Fleaker
Applications.
"""
settings = cls.pre_create_app(**settings)
# now whitelist the settings
flask_kwargs = cls._whitelist_standard_flask_kwargs(settings)
app = cls(import_name, **flask_kwargs)
return cls.post_create_app(app, **settings)
|
python
|
{
"resource": ""
}
|
q8026
|
FleakerLogFormatter.format
|
train
|
def format(self, record):
"""Format the log record."""
levelname = getattr(record, 'levelname', None)
record.levelcolor = ''
record.endlevelcolor = ''
if levelname:
level_color = getattr(self.TermColors, levelname, '')
record.levelcolor = level_color
record.endlevelcolor = self.TermColors.ENDC if level_color else ''
return super(FleakerLogFormatter, self).format(record)
|
python
|
{
"resource": ""
}
|
q8027
|
FleakerBaseException.errorhandler_callback
|
train
|
def errorhandler_callback(cls, exc):
"""This function should be called in the global error handlers. This
will allow for consolidating of cleanup tasks if the exception
bubbles all the way to the top of the stack.
For example, this method will automatically rollback the database
session if the exception bubbles to the top.
This is the method that :meth:`register_errorhandler` adds as an
errorhandler. See the documentation there for more info.
Args:
exc (FleakerBaseException):
The exception that was thrown that we are to handle.
"""
# @TODO (orm, exc): Implement this when the ORM/DB stuff is done
# if not exc.prevent_rollback:
# db.session.rollback()
if exc.flash_message:
flash(exc.flash_message, exc.flash_level)
if exc.redirect is not MISSING:
return redirect(url_for(exc.redirect, **exc.redirect_args))
error_result = exc.error_page()
if error_result is not None:
return error_result, exc.status_code or 500
|
python
|
{
"resource": ""
}
|
q8028
|
ErrorAwareApp.post_create_app
|
train
|
def post_create_app(cls, app, **settings):
"""Register the errorhandler for the AppException to the passed in
App.
Args:
app (fleaker.base.BaseApplication): A Flask application that
extends the Fleaker Base Application, such that the hooks are
implemented.
Kwargs:
register_errorhandler (bool): A boolean indicating if we want to
automatically register an errorhandler for the
:class:`AppException` exception class after we create this App.
Pass ``False`` to prevent registration. Default is ``True``.
Returns:
fleaker.base.BaseApplication: Returns the app it was given.
"""
register_errorhandler = settings.pop('register_errorhandler', True)
if register_errorhandler:
AppException.register_errorhandler(app)
return app
|
python
|
{
"resource": ""
}
|
q8029
|
create_app
|
train
|
def create_app():
"""Create the standard app for ``fleaker_config`` and register the two
routes required.
"""
app = App.create_app(__name__)
app.configure('.configs.settings')
# yes, I should use blueprints; but I don't really care for such a small
# toy app
@app.route('/config')
def get_config():
"""Get the current configuration of the app."""
return jsonify(app.config)
@app.route('/put_config', methods=['PUT'])
def put_config():
"""Add to the current configuration of the app.
Takes any JSON body and adds all keys to the configs with the provided
values.
"""
data = request.json()
for key, val in data.items():
app.config[key] = val
return jsonify({'message': 'Config updated!'})
return app
|
python
|
{
"resource": ""
}
|
q8030
|
FieldSignatureMixin.update_signature
|
train
|
def update_signature(self):
"""Update the signature field by hashing the ``signature_fields``.
Raises:
AttributeError: This is raised if ``Meta.signature_fields`` has no
values in it or if a field in there is not a field on the
model.
"""
if not self._meta.signature_fields:
raise AttributeError(
"No fields defined in {}.Meta.signature_fields. Please define "
"at least one.".format(type(self).__name__)
)
# If the field is archived, unset the signature so records in the
# future can have this value.
if getattr(self, 'archived', False):
self.signature = None
return
# Otherwise, combine the values of the fields together and SHA1 them
computed = [getattr(self, value) or ' '
for value in self._meta.signature_fields]
computed = ''.join([text_type(value) for value in computed])
# If computed is a falsey value, that means all the fields were
# None or blank and that will lead to some pain.
if computed:
self.signature = sha1(computed.encode('utf-8')).hexdigest()
|
python
|
{
"resource": ""
}
|
q8031
|
connect
|
train
|
def connect(service_account, credentials_file_path, api_version='v2'):
""" Connect to the google play interface
"""
# Create an httplib2.Http object to handle our HTTP requests an
# authorize it with the Credentials. Note that the first parameter,
# service_account_name, is the Email address created for the Service
# account. It must be the email address associated with the key that
# was created.
scope = 'https://www.googleapis.com/auth/androidpublisher'
credentials = ServiceAccountCredentials.from_p12_keyfile(service_account, credentials_file_path, scopes=scope)
http = httplib2.Http()
http = credentials.authorize(http)
service = build('androidpublisher', api_version, http=http, cache_discovery=False)
return service
|
python
|
{
"resource": ""
}
|
q8032
|
PendulumField._deserialize
|
train
|
def _deserialize(self, value, attr, obj):
"""Deserializes a string into a Pendulum object."""
if not self.context.get('convert_dates', True) or not value:
return value
value = super(PendulumField, self)._deserialize(value, attr, value)
timezone = self.get_field_value('timezone')
target = pendulum.instance(value)
if (timezone and (text_type(target) !=
text_type(target.in_timezone(timezone)))):
raise ValidationError(
"The provided datetime is not in the "
"{} timezone.".format(timezone)
)
return target
|
python
|
{
"resource": ""
}
|
q8033
|
PhoneNumberField._format_phone_number
|
train
|
def _format_phone_number(self, value, attr):
"""Format and validate a phone number."""
strict_validation = self.get_field_value(
'strict_phone_validation',
default=False
)
strict_region = self.get_field_value(
'strict_phone_region',
default=strict_validation
)
region = self.get_field_value('region', 'US')
phone_number_format = self.get_field_value(
'phone_number_format',
default=phonenumbers.PhoneNumberFormat.INTERNATIONAL
)
# Remove excess special chars, except for the plus sign
stripped_value = re.sub(r'[^\w+]', '', value)
try:
if not stripped_value.startswith('+') and not strict_region:
phone = phonenumbers.parse(stripped_value, region)
else:
phone = phonenumbers.parse(stripped_value)
if (not phonenumbers.is_possible_number(phone) or
not phonenumbers.is_valid_number(phone) and
strict_validation):
raise ValidationError(
"The value for {} ({}) is not a valid phone "
"number.".format(attr, value)
)
return phonenumbers.format_number(phone, phone_number_format)
except phonenumbers.phonenumberutil.NumberParseException as exc:
if strict_validation or strict_region:
raise ValidationError(exc)
|
python
|
{
"resource": ""
}
|
q8034
|
PhoneNumberField._deserialize
|
train
|
def _deserialize(self, value, attr, data):
"""Format and validate the phone number using libphonenumber."""
if value:
value = self._format_phone_number(value, attr)
return super(PhoneNumberField, self)._deserialize(value, attr, data)
|
python
|
{
"resource": ""
}
|
q8035
|
PhoneNumberField._serialize
|
train
|
def _serialize(self, value, attr, obj):
"""Format and validate the phone number user libphonenumber."""
value = super(PhoneNumberField, self)._serialize(value, attr, obj)
if value:
value = self._format_phone_number(value, attr)
return value
|
python
|
{
"resource": ""
}
|
q8036
|
FleakerJSONEncoder.default
|
train
|
def default(self, obj):
"""Encode individual objects into their JSON representation.
This method is used by :class:`flask.json.JSONEncoder` to encode
individual items in the JSON object.
Args:
obj (object): Any Python object we wish to convert to JSON.
Returns:
str: The stringified, valid JSON representation of our provided
object.
"""
if isinstance(obj, decimal.Decimal):
obj = format(obj, 'f')
str_digit = text_type(obj)
return (str_digit.rstrip('0').rstrip('.')
if '.' in str_digit
else str_digit)
elif isinstance(obj, phonenumbers.PhoneNumber):
return phonenumbers.format_number(
obj,
phonenumbers.PhoneNumberFormat.E164
)
elif isinstance(obj, pendulum.Pendulum):
return text_type(obj)
elif isinstance(obj, arrow.Arrow):
return text_type(obj)
elif isinstance(obj, (datetime.datetime, datetime.date)):
return obj.isoformat()
try:
return list(iter(obj))
except TypeError:
pass
return super(FleakerJSONEncoder, self).default(obj)
|
python
|
{
"resource": ""
}
|
q8037
|
ArrowField._serialize
|
train
|
def _serialize(self, value, attr, obj):
"""Convert the Arrow object into a string."""
if isinstance(value, arrow.arrow.Arrow):
value = value.datetime
return super(ArrowField, self)._serialize(value, attr, obj)
|
python
|
{
"resource": ""
}
|
q8038
|
ArrowField._deserialize
|
train
|
def _deserialize(self, value, attr, data):
"""Deserializes a string into an Arrow object."""
if not self.context.get('convert_dates', True) or not value:
return value
value = super(ArrowField, self)._deserialize(value, attr, data)
timezone = self.get_field_value('timezone')
target = arrow.get(value)
if timezone and text_type(target.to(timezone)) != text_type(target):
raise ValidationError(
"The provided datetime is not in the "
"{} timezone.".format(timezone)
)
return target
|
python
|
{
"resource": ""
}
|
q8039
|
PendulumDateTimeField.python_value
|
train
|
def python_value(self, value):
"""Return the value in the database as an Pendulum object.
Returns:
pendulum.Pendulum:
An instance of Pendulum with the field filled in.
"""
value = super(PendulumDateTimeField, self).python_value(value)
if isinstance(value, datetime.datetime):
value = pendulum.instance(value)
elif isinstance(value, datetime.date):
value = pendulum.instance(
datetime.datetime.combine(
value, datetime.datetime.min.time()
)
)
elif isinstance(value, string_types):
value = pendulum.parse(value)
return value
|
python
|
{
"resource": ""
}
|
q8040
|
PendulumDateTimeField.db_value
|
train
|
def db_value(self, value):
"""Convert the Pendulum instance to a datetime for saving in the db."""
if isinstance(value, pendulum.Pendulum):
value = datetime.datetime(
value.year, value.month, value.day, value.hour, value.minute,
value.second, value.microsecond, value.tzinfo
)
return super(PendulumDateTimeField, self).db_value(value)
|
python
|
{
"resource": ""
}
|
q8041
|
check_rollout
|
train
|
def check_rollout(edits_service, package_name, days):
"""Check if package_name has a release on staged rollout for too long"""
edit = edits_service.insert(body={}, packageName=package_name).execute()
response = edits_service.tracks().get(editId=edit['id'], track='production', packageName=package_name).execute()
releases = response['releases']
for release in releases:
if release['status'] == 'inProgress':
url = 'https://archive.mozilla.org/pub/mobile/releases/{}/SHA512SUMS'.format(release['name'])
resp = requests.head(url)
if resp.status_code != 200:
if resp.status_code != 404: # 404 is expected for release candidates
logger.warning("Could not check %s: %s", url, resp.status_code)
continue
age = time.time() - calendar.timegm(eu.parsedate(resp.headers['Last-Modified']))
if age >= days * DAY:
yield release, age
|
python
|
{
"resource": ""
}
|
q8042
|
FleakerJSONSchema.generate_json_schema
|
train
|
def generate_json_schema(cls, schema, context=DEFAULT_DICT):
"""Generate a JSON Schema from a Marshmallow schema.
Args:
schema (marshmallow.Schema|str): The Marshmallow schema, or the
Python path to one, to create the JSON schema for.
Keyword Args:
file_pointer (file, optional): The path or pointer to the file
to write this schema to. If not provided, the schema will be
dumped to ``sys.stdout``.
Returns:
dict: The JSON schema in dictionary form.
"""
schema = cls._get_schema(schema)
# Generate the JSON Schema
return cls(context=context).dump(schema).data
|
python
|
{
"resource": ""
}
|
q8043
|
FleakerJSONSchema.write_schema_to_file
|
train
|
def write_schema_to_file(cls, schema, file_pointer=stdout,
folder=MISSING, context=DEFAULT_DICT):
"""Given a Marshmallow schema, create a JSON Schema for it.
Args:
schema (marshmallow.Schema|str): The Marshmallow schema, or the
Python path to one, to create the JSON schema for.
Keyword Args:
file_pointer (file, optional): The pointer to the file to write
this schema to. If not provided, the schema will be dumped to
``sys.stdout``.
folder (str, optional): The folder in which to save the JSON
schema. The name of the schema file can be optionally
controlled my the schema's ``Meta.json_schema_filename``. If
that attribute is not set, the class's name will be used for
the filename. If writing the schema to a specific file is
desired, please pass in a ``file_pointer``.
context (dict, optional): The Marshmallow context to be pushed to
the schema generates the JSONSchema.
Returns:
dict: The JSON schema in dictionary form.
"""
schema = cls._get_schema(schema)
json_schema = cls.generate_json_schema(schema, context=context)
if folder:
schema_filename = getattr(
schema.Meta,
'json_schema_filename',
'.'.join([schema.__class__.__name__, 'json'])
)
json_path = os.path.join(folder, schema_filename)
file_pointer = open(json_path, 'w')
json.dump(json_schema, file_pointer, indent=2)
return json_schema
|
python
|
{
"resource": ""
}
|
q8044
|
FleakerJSONSchema._get_schema
|
train
|
def _get_schema(cls, schema):
"""Method that will fetch a Marshmallow schema flexibly.
Args:
schema (marshmallow.Schema|str): Either the schema class, an
instance of a schema, or a Python path to a schema.
Returns:
marshmallow.Schema: The desired schema.
Raises:
TypeError: This is raised if the provided object isn't
a Marshmallow schema.
"""
if isinstance(schema, string_types):
schema = cls._get_object_from_python_path(schema)
if isclass(schema):
schema = schema()
if not isinstance(schema, Schema):
raise TypeError("The schema must be a path to a Marshmallow "
"schema or a Marshmallow schema.")
return schema
|
python
|
{
"resource": ""
}
|
q8045
|
FleakerJSONSchema._get_object_from_python_path
|
train
|
def _get_object_from_python_path(python_path):
"""Method that will fetch a Marshmallow schema from a path to it.
Args:
python_path (str): The string path to the Marshmallow schema.
Returns:
marshmallow.Schema: The schema matching the provided path.
Raises:
TypeError: This is raised if the specified object isn't
a Marshmallow schema.
"""
# Dissect the path
python_path = python_path.split('.')
module_path = python_path[:-1]
object_class = python_path[-1]
if isinstance(module_path, list):
module_path = '.'.join(module_path)
# Grab the object
module = import_module(module_path)
schema = getattr(module, object_class)
if isclass(schema):
schema = schema()
return schema
|
python
|
{
"resource": ""
}
|
q8046
|
MarshmallowAwareApp.post_create_app
|
train
|
def post_create_app(cls, app, **settings):
"""Automatically register and init the Flask Marshmallow extension.
Args:
app (flask.Flask): The application instance in which to initialize
Flask Marshmallow upon.
Kwargs:
settings (dict): The settings passed to this method from the
parent app.
Returns:
flask.Flask: The Flask application that was passed in.
"""
super(MarshmallowAwareApp, cls).post_create_app(app, **settings)
marsh.init_app(app)
return app
|
python
|
{
"resource": ""
}
|
q8047
|
get_original_before_save
|
train
|
def get_original_before_save(sender, instance, created):
"""Event listener to get the original instance before it's saved."""
if not instance._meta.event_ready or created:
return
instance.get_original()
|
python
|
{
"resource": ""
}
|
q8048
|
post_save_event_listener
|
train
|
def post_save_event_listener(sender, instance, created):
"""Event listener to create creation and update events."""
if not instance._meta.event_ready:
return
if created:
instance.create_creation_event()
else:
instance.create_update_event()
# Reset the original key
instance._original = None
|
python
|
{
"resource": ""
}
|
q8049
|
validate_event_type
|
train
|
def validate_event_type(sender, event, created):
"""Verify that the Event's code is a valid one."""
if event.code not in sender.event_codes():
raise ValueError("The Event.code '{}' is not a valid Event "
"code.".format(event.code))
|
python
|
{
"resource": ""
}
|
q8050
|
EventMixin.get_original
|
train
|
def get_original(self):
"""Get the original instance of this instance before it's updated.
Returns:
fleaker.peewee.EventMixin:
The original instance of the model.
"""
pk_value = self._get_pk_value()
if isinstance(pk_value, int) and not self._original:
self._original = (
self.select().where(self.__class__.id == pk_value).get()
)
return self._original
|
python
|
{
"resource": ""
}
|
q8051
|
EventMixin.create_creation_event
|
train
|
def create_creation_event(self):
"""Parse the create message DSL to insert the data into the Event.
Returns:
fleaker.peewee.EventStorageMixin:
A new Event instance with data put in it
"""
event = self.create_audit_event(code='AUDIT_CREATE')
if self._meta.create_message:
event.body = self._meta.create_message['message']
event.code = self._meta.create_message['code']
event.meta = self.parse_meta(self._meta.create_message['meta'])
self.create_event_callback(event)
event.save()
return event
|
python
|
{
"resource": ""
}
|
q8052
|
EventMixin.create_update_event
|
train
|
def create_update_event(self):
"""Parse the update messages DSL to insert the data into the Event.
Returns:
list[fleaker.peewee.EventStorageMixin]:
All the events that were created for the update.
"""
events = []
for fields, rules in iteritems(self._meta.update_messages):
if not isinstance(fields, (list, tuple, set)):
fields = (fields,)
changed = any([
getattr(self, field) != getattr(self.get_original(), field)
for field in fields
])
if changed:
event = self.create_audit_event(code=rules['code'])
event.body = rules['message']
event.meta = self.parse_meta(rules['meta'])
events.append(event)
self.update_event_callback(events)
with db.database.atomic():
for event in events:
event.save()
return events
|
python
|
{
"resource": ""
}
|
q8053
|
EventMixin.create_deletion_event
|
train
|
def create_deletion_event(self):
"""Parse the delete message DSL to insert data into the Event.
Return:
Event: The Event with the relevant information put in it.
"""
event = self.create_audit_event(code='AUDIT_DELETE')
if self._meta.delete_message:
event.code = self._meta.delete_message['code']
event.body = self._meta.delete_message['message']
event.meta = self.parse_meta(self._meta.delete_message['meta'])
self.delete_event_callback(event)
event.save()
return event
|
python
|
{
"resource": ""
}
|
q8054
|
EventMixin.parse_meta
|
train
|
def parse_meta(self, meta):
"""Parses the meta field in the message, copies it's keys into a new
dict and replaces the values, which should be attribute paths relative
to the passed in object, with the current value at the end of that
path. This function will run recursively when it encounters other dicts
inside the meta dict.
Args:
meta (dict):
The dictionary of mappings to pull structure of the meta from.
Returns:
dict:
A copy of the keys from the meta dict with the values pulled
from the paths.
"""
res = {}
for key, val in meta.items():
if not val:
continue
elif isinstance(val, dict):
res[key] = self.parse_meta(val)
elif val.startswith('current_user.'):
res[key] = self.get_path_attribute(current_user, val)
elif val.startswith('original.'):
res[key] = self.get_path_attribute(self.get_original(), val)
else:
res[key] = self.get_path_attribute(self, val)
return res
|
python
|
{
"resource": ""
}
|
q8055
|
EventMixin.get_path_attribute
|
train
|
def get_path_attribute(obj, path):
"""Given a path like `related_record.related_record2.id`, this method
will be able to pull the value of ID from that object, returning None
if it doesn't exist.
Args:
obj (fleaker.db.Model):
The object to attempt to pull the value from
path (str):
The path to follow to pull the value from
Returns:
(int|str|None):
The value at the end of the path. None if it doesn't exist at
any point in the path.
"""
# Strip out ignored keys passed in
path = path.replace('original.', '').replace('current_user.', '')
attr_parts = path.split('.')
res = obj
try:
for part in attr_parts:
try:
res = getattr(res, part)
except AttributeError:
res = getattr(res.get(), part)
except (peewee.DoesNotExist, AttributeError):
return None
return res
|
python
|
{
"resource": ""
}
|
q8056
|
EventMixin.copy_foreign_keys
|
train
|
def copy_foreign_keys(self, event):
"""Copies possible foreign key values from the object into the Event,
skipping common keys like modified and created.
Args:
event (Event): The Event instance to copy the FKs into
obj (fleaker.db.Model): The object to pull the values from
"""
event_keys = set(event._meta.fields.keys())
obj_keys = self._meta.fields.keys()
matching_keys = event_keys.intersection(obj_keys)
for key in matching_keys:
# Skip created_by because that will always be the current_user
# for the Event.
if key == 'created_by':
continue
# Skip anything that isn't a FK
if not isinstance(self._meta.fields[key], peewee.ForeignKeyField):
continue
setattr(event, key, getattr(self, key))
# Attempt to set the obj's ID in the correct FK field on Event, if it
# exists. If this conflicts with desired behavior, handle this in the
# respective callback. This does rely on the FK matching the lower case
# version of the class name and that the event isn't trying to delete
# the current record, becuase that ends badly.
possible_key = self.__class__.__name__.lower()
if possible_key in event_keys and event.code != 'AUDIT_DELETE':
setattr(event, possible_key, self)
|
python
|
{
"resource": ""
}
|
q8057
|
EventMixin.create_audit_event
|
train
|
def create_audit_event(self, code='AUDIT'):
"""Creates a generic auditing Event logging the changes between saves
and the initial data in creates.
Kwargs:
code (str): The code to set the new Event to.
Returns:
Event: A new event with relevant info inserted into it
"""
event = self._meta.event_model(
code=code,
model=self.__class__.__name__,
)
# Use the logged in User, if possible
if current_user:
event.created_by = current_user.get_id()
self.copy_foreign_keys(event)
self.populate_audit_fields(event)
return event
|
python
|
{
"resource": ""
}
|
q8058
|
EventMixin.populate_audit_fields
|
train
|
def populate_audit_fields(self, event):
"""Populates the the audit JSON fields with raw data from the model, so
all changes can be tracked and diffed.
Args:
event (Event): The Event instance to attach the data to
instance (fleaker.db.Model): The newly created/updated model
"""
event.updated = self._data
event.original = self.get_original()._data
|
python
|
{
"resource": ""
}
|
q8059
|
EventStorageMixin.formatted_message
|
train
|
def formatted_message(self):
"""Method that will return the formatted message for the event.
This formatting is done with Jinja and the template text is stored in
the ``body`` attribute. The template is supplied the following
variables, as well as the built in Flask ones:
- ``event``: This is the event instance that this method belongs to.
- ``meta``: This is a dictionary of cached values that have been stored
when the event was created based upon the event's DSL.
- ``original``: This is a dump of the instance before the instance was
updated.
- ``updated``: This is a dump of the instance after it was updated.
- ``version``: This is the version of the event DSL.
This property is cached because Jinja rendering is slower than raw
Python string formatting.
"""
return render_template_string(
self.body,
event=self,
meta=self.meta,
original=self.original,
updated=self.updated,
version=self.version,
)
|
python
|
{
"resource": ""
}
|
q8060
|
CreatedMixin._get_cached_time
|
train
|
def _get_cached_time(self):
"""Method that will allow for consistent modified and archived
timestamps.
Returns:
self.Meta.datetime: This method will return a datetime that is
compatible with the current class's datetime library.
"""
if not self._cached_time:
self._cached_time = self._meta.datetime.utcnow()
return self._cached_time
|
python
|
{
"resource": ""
}
|
q8061
|
_get_list_of_completed_locales
|
train
|
def _get_list_of_completed_locales(product, channel):
""" Get all the translated locales supported by Google play
So, locale unsupported by Google play won't be downloaded
Idem for not translated locale
"""
return utils.load_json_url(_ALL_LOCALES_URL.format(product=product, channel=channel))
|
python
|
{
"resource": ""
}
|
q8062
|
FleakerFieldMixin.get_field_value
|
train
|
def get_field_value(self, key, default=MISSING):
"""Method to fetch a value from either the fields metadata or the
schemas context, in that order.
Args:
key (str): The name of the key to grab the value for.
Keyword Args:
default (object, optional): If the value doesn't exist in the
schema's ``context`` or the field's ``metadata``, this value
will be returned. By default this will be ``MISSING``.
Returns:
object: This will be the correct value to use given the parameters.
"""
meta_value = self.metadata.get(key)
context_value = self.context.get(key)
if context_value is not None:
return context_value
elif meta_value is not None:
return meta_value
return default
|
python
|
{
"resource": ""
}
|
q8063
|
Model.get_by_id
|
train
|
def get_by_id(cls, record_id, execute=True):
"""Return a single instance of the model queried by ID.
Args:
record_id (int): Integer representation of the ID to query on.
Keyword Args:
execute (bool, optional):
Should this method execute the query or return a query object
for further manipulation?
Returns:
cls | :py:class:`peewee.SelectQuery`:
If ``execute`` is ``True``, the query is executed, otherwise
a query is returned.
Raises:
:py:class:`peewee.DoesNotExist`:
Raised if a record with that ID doesn't exist.
"""
query = cls.base_query().where(cls.id == record_id)
if execute:
return query.get()
return query
|
python
|
{
"resource": ""
}
|
q8064
|
Model.update_instance
|
train
|
def update_instance(self, data):
"""Update a single record by id with the provided data.
Args:
data (dict): The new data to update the record with.
Returns:
self: This is an instance of itself with the updated data.
Raises:
AttributeError: This is raised if a key in the ``data`` isn't
a field on the model.
"""
for key, val in iteritems(data):
if not hasattr(self, key):
raise AttributeError(
"No field named {key} for model {model}".format(
key=key,
model=self.__class__.__name__
)
)
setattr(self, key, val)
self.save()
return self
|
python
|
{
"resource": ""
}
|
q8065
|
ProcessStarter.wait
|
train
|
def wait(self, log_file):
"Wait until the process is ready."
lines = map(self.log_line, self.filter_lines(self.get_lines(log_file)))
return any(
std.re.search(self.pattern, line)
for line in lines
)
|
python
|
{
"resource": ""
}
|
q8066
|
CompatStarter.prep
|
train
|
def prep(self, wait, args, env=None):
"""
Given the return value of a preparefunc, prepare this
CompatStarter.
"""
self.pattern = wait
self.env = env
self.args = args
# wait is a function, supersedes the default behavior
if callable(wait):
self.wait = lambda lines: wait()
|
python
|
{
"resource": ""
}
|
q8067
|
CompatStarter.wrap
|
train
|
def wrap(self, starter_cls):
"""
If starter_cls is not a ProcessStarter, assume it's the legacy
preparefunc and return it bound to a CompatStarter.
"""
if isinstance(starter_cls, type) and issubclass(starter_cls, ProcessStarter):
return starter_cls
depr_msg = 'Pass a ProcessStarter for preparefunc'
warnings.warn(depr_msg, DeprecationWarning, stacklevel=3)
return functools.partial(CompatStarter, starter_cls)
|
python
|
{
"resource": ""
}
|
q8068
|
BaseClient._construct_url
|
train
|
def _construct_url(self, url, base, quote):
"""
Adds the orderbook to the url if base and quote are specified.
"""
if not base and not quote:
return url
else:
url = url + base.lower() + quote.lower() + "/"
return url
|
python
|
{
"resource": ""
}
|
q8069
|
BaseClient._request
|
train
|
def _request(self, func, url, version=1, *args, **kwargs):
"""
Make a generic request, adding in any proxy defined by the instance.
Raises a ``requests.HTTPError`` if the response status isn't 200, and
raises a :class:`BitstampError` if the response contains a json encoded
error message.
"""
return_json = kwargs.pop('return_json', False)
url = self.api_url[version] + url
response = func(url, *args, **kwargs)
if 'proxies' not in kwargs:
kwargs['proxies'] = self.proxydict
# Check for error, raising an exception if appropriate.
response.raise_for_status()
try:
json_response = response.json()
except ValueError:
json_response = None
if isinstance(json_response, dict):
error = json_response.get('error')
if error:
raise BitstampError(error)
elif json_response.get('status') == "error":
raise BitstampError(json_response.get('reason'))
if return_json:
if json_response is None:
raise BitstampError(
"Could not decode json for: " + response.text)
return json_response
return response
|
python
|
{
"resource": ""
}
|
q8070
|
Public.ticker
|
train
|
def ticker(self, base="btc", quote="usd"):
"""
Returns dictionary.
"""
url = self._construct_url("ticker/", base, quote)
return self._get(url, return_json=True, version=2)
|
python
|
{
"resource": ""
}
|
q8071
|
Public.order_book
|
train
|
def order_book(self, group=True, base="btc", quote="usd"):
"""
Returns dictionary with "bids" and "asks".
Each is a list of open orders and each order is represented as a list
of price and amount.
"""
params = {'group': group}
url = self._construct_url("order_book/", base, quote)
return self._get(url, params=params, return_json=True, version=2)
|
python
|
{
"resource": ""
}
|
q8072
|
Public.transactions
|
train
|
def transactions(self, time=TransRange.HOUR, base="btc", quote="usd"):
"""
Returns transactions for the last 'timedelta' seconds.
Parameter time is specified by one of two values of TransRange class.
"""
params = {'time': time}
url = self._construct_url("transactions/", base, quote)
return self._get(url, params=params, return_json=True, version=2)
|
python
|
{
"resource": ""
}
|
q8073
|
Trading.get_nonce
|
train
|
def get_nonce(self):
"""
Get a unique nonce for the bitstamp API.
This integer must always be increasing, so use the current unix time.
Every time this variable is requested, it automatically increments to
allow for more than one API request per second.
This isn't a thread-safe function however, so you should only rely on a
single thread if you have a high level of concurrent API requests in
your application.
"""
nonce = getattr(self, '_nonce', 0)
if nonce:
nonce += 1
# If the unix time is greater though, use that instead (helps low
# concurrency multi-threaded apps always call with the largest nonce).
self._nonce = max(int(time.time()), nonce)
return self._nonce
|
python
|
{
"resource": ""
}
|
q8074
|
Trading._default_data
|
train
|
def _default_data(self, *args, **kwargs):
"""
Generate a one-time signature and other data required to send a secure
POST request to the Bitstamp API.
"""
data = super(Trading, self)._default_data(*args, **kwargs)
data['key'] = self.key
nonce = self.get_nonce()
msg = str(nonce) + self.username + self.key
signature = hmac.new(
self.secret.encode('utf-8'), msg=msg.encode('utf-8'),
digestmod=hashlib.sha256).hexdigest().upper()
data['signature'] = signature
data['nonce'] = nonce
return data
|
python
|
{
"resource": ""
}
|
q8075
|
Trading.cancel_order
|
train
|
def cancel_order(self, order_id, version=1):
"""
Cancel the order specified by order_id.
Version 1 (default for backwards compatibility reasons):
Returns True if order was successfully canceled, otherwise
raise a BitstampError.
Version 2:
Returns dictionary of the canceled order, containing the keys:
id, type, price, amount
"""
data = {'id': order_id}
return self._post("cancel_order/", data=data, return_json=True,
version=version)
|
python
|
{
"resource": ""
}
|
q8076
|
Trading.buy_limit_order
|
train
|
def buy_limit_order(self, amount, price, base="btc", quote="usd", limit_price=None):
"""
Order to buy amount of bitcoins for specified price.
"""
data = {'amount': amount, 'price': price}
if limit_price is not None:
data['limit_price'] = limit_price
url = self._construct_url("buy/", base, quote)
return self._post(url, data=data, return_json=True, version=2)
|
python
|
{
"resource": ""
}
|
q8077
|
Trading.buy_market_order
|
train
|
def buy_market_order(self, amount, base="btc", quote="usd"):
"""
Order to buy amount of bitcoins for market price.
"""
data = {'amount': amount}
url = self._construct_url("buy/market/", base, quote)
return self._post(url, data=data, return_json=True, version=2)
|
python
|
{
"resource": ""
}
|
q8078
|
Trading.check_bitstamp_code
|
train
|
def check_bitstamp_code(self, code):
"""
Returns JSON dictionary containing USD and BTC amount included in given
bitstamp code.
"""
data = {'code': code}
return self._post("check_code/", data=data, return_json=True,
version=1)
|
python
|
{
"resource": ""
}
|
q8079
|
Trading.redeem_bitstamp_code
|
train
|
def redeem_bitstamp_code(self, code):
"""
Returns JSON dictionary containing USD and BTC amount added to user's
account.
"""
data = {'code': code}
return self._post("redeem_code/", data=data, return_json=True,
version=1)
|
python
|
{
"resource": ""
}
|
q8080
|
Trading.withdrawal_requests
|
train
|
def withdrawal_requests(self, timedelta = 86400):
"""
Returns list of withdrawal requests.
Each request is represented as a dictionary.
By default, the last 24 hours (86400 seconds) are returned.
"""
data = {'timedelta': timedelta}
return self._post("withdrawal_requests/", return_json=True, version=1, data=data)
|
python
|
{
"resource": ""
}
|
q8081
|
Trading.litecoin_withdrawal
|
train
|
def litecoin_withdrawal(self, amount, address):
"""
Send litecoins to another litecoin wallet specified by address.
"""
data = {'amount': amount, 'address': address}
return self._post("ltc_withdrawal/", data=data, return_json=True,
version=2)
|
python
|
{
"resource": ""
}
|
q8082
|
Trading.ripple_withdrawal
|
train
|
def ripple_withdrawal(self, amount, address, currency):
"""
Returns true if successful.
"""
data = {'amount': amount, 'address': address, 'currency': currency}
response = self._post("ripple_withdrawal/", data=data,
return_json=True)
return self._expect_true(response)
|
python
|
{
"resource": ""
}
|
q8083
|
Trading.xrp_withdrawal
|
train
|
def xrp_withdrawal(self, amount, address, destination_tag=None):
"""
Sends xrps to another xrp wallet specified by address. Returns withdrawal id.
"""
data = {'amount': amount, 'address': address}
if destination_tag:
data['destination_tag'] = destination_tag
return self._post("xrp_withdrawal/", data=data, return_json=True,
version=2)["id"]
|
python
|
{
"resource": ""
}
|
q8084
|
Trading.transfer_to_main
|
train
|
def transfer_to_main(self, amount, currency, subaccount=None):
"""
Returns dictionary with status.
subaccount has to be the numerical id of the subaccount, not the name
"""
data = {'amount': amount,
'currency': currency,}
if subaccount is not None:
data['subAccount'] = subaccount
return self._post("transfer-to-main/", data=data, return_json=True,
version=2)
|
python
|
{
"resource": ""
}
|
q8085
|
Sound.resample
|
train
|
def resample(self, target_sr):
""" Returns a new sound with a samplerate of target_sr. """
y_hat = librosa.core.resample(self.y, self.sr, target_sr)
return Sound(y_hat, target_sr)
|
python
|
{
"resource": ""
}
|
q8086
|
Sound.as_ipywidget
|
train
|
def as_ipywidget(self):
""" Provides an IPywidgets player that can be used in a notebook. """
from IPython.display import Audio
return Audio(data=self.y, rate=self.sr)
|
python
|
{
"resource": ""
}
|
q8087
|
Sound.from_file
|
train
|
def from_file(cls, filename, sr=22050):
""" Loads an audiofile, uses sr=22050 by default. """
y, sr = librosa.load(filename, sr=sr)
return cls(y, sr)
|
python
|
{
"resource": ""
}
|
q8088
|
Sound.chunks
|
train
|
def chunks(self):
""" Returns a chunk iterator over the sound. """
if not hasattr(self, '_it'):
class ChunkIterator(object):
def __iter__(iter):
return iter
def __next__(iter):
try:
chunk = self._next_chunk()
except StopIteration:
if self.loop:
self._init_stretching()
return iter.__next__()
raise
return chunk
next = __next__
self._it = ChunkIterator()
return self._it
|
python
|
{
"resource": ""
}
|
q8089
|
Sound.pitch_shifter
|
train
|
def pitch_shifter(self, chunk, shift):
""" Pitch-Shift the given chunk by shift semi-tones. """
freq = numpy.fft.rfft(chunk)
N = len(freq)
shifted_freq = numpy.zeros(N, freq.dtype)
S = numpy.round(shift if shift > 0 else N + shift, 0)
s = N - S
shifted_freq[:S] = freq[s:]
shifted_freq[S:] = freq[:s]
shifted_chunk = numpy.fft.irfft(shifted_freq)
return shifted_chunk.astype(chunk.dtype)
|
python
|
{
"resource": ""
}
|
q8090
|
Sound._time_stretcher
|
train
|
def _time_stretcher(self, stretch_factor):
""" Real time time-scale without pitch modification.
:param int i: index of the beginning of the chunk to stretch
:param float stretch_factor: audio scale factor (if > 1 speed up the sound else slow it down)
.. warning:: This method needs to store the phase computed from the previous chunk. Thus, it can only be called chunk by chunk.
"""
start = self._i2
end = min(self._i2 + self._N, len(self._sy) - (self._N + self._H))
if start >= end:
raise StopIteration
# The not so clean code below basically implements a phase vocoder
out = numpy.zeros(self._N, dtype=numpy.complex)
while self._i2 < end:
if self._i1 + self._N + self._H > len(self.y):
raise StopIteration
a, b = self._i1, self._i1 + self._N
S1 = numpy.fft.fft(self._win * self.y[a: b])
S2 = numpy.fft.fft(self._win * self.y[a + self._H: b + self._H])
self._phi += (numpy.angle(S2) - numpy.angle(S1))
self._phi = self._phi - 2.0 * numpy.pi * numpy.round(self._phi / (2.0 * numpy.pi))
out.real, out.imag = numpy.cos(self._phi), numpy.sin(self._phi)
self._sy[self._i2: self._i2 + self._N] += self._win * numpy.fft.ifft(numpy.abs(S2) * out).real
self._i1 += int(self._H * self.stretch_factor)
self._i2 += self._H
chunk = self._sy[start:end]
if stretch_factor == 1.0:
chunk = self.y[start:end]
return chunk
|
python
|
{
"resource": ""
}
|
q8091
|
Parser.define
|
train
|
def define(self, p):
"""Defines a parser wrapped into this object."""
f = getattr(p, 'run', p)
if debug:
setattr(self, '_run', f)
else:
setattr(self, 'run', f)
self.named(getattr(p, 'name', p.__doc__))
|
python
|
{
"resource": ""
}
|
q8092
|
Sampler.play
|
train
|
def play(self, sound):
""" Adds and plays a new Sound to the Sampler.
:param sound: sound to play
.. note:: If the sound is already playing, it will restart from the beginning.
"""
self.is_done.clear() # hold is_done until the sound is played
if self.sr != sound.sr:
raise ValueError('You can only play sound with a samplerate of {} (here {}). Use the Sound.resample method for instance.', self.sr, sound.sr)
if sound in self.sounds:
self.remove(sound)
with self.chunk_available:
self.sounds.append(sound)
sound.playing = True
self.chunk_available.notify()
self.is_done.wait()
|
python
|
{
"resource": ""
}
|
q8093
|
Sampler.remove
|
train
|
def remove(self, sound):
""" Remove a currently played sound. """
with self.chunk_available:
sound.playing = False
self.sounds.remove(sound)
|
python
|
{
"resource": ""
}
|
q8094
|
Sampler.next_chunks
|
train
|
def next_chunks(self):
""" Gets a new chunk from all played sound and mix them together. """
with self.chunk_available:
while True:
playing_sounds = [s for s in self.sounds if s.playing]
chunks = []
for s in playing_sounds:
try:
chunks.append(next(s.chunks))
except StopIteration:
s.playing = False
self.sounds.remove(s)
self.is_done.set() # sound was played, release is_done to end the wait in play
if chunks:
break
self.chunk_available.wait()
return numpy.mean(chunks, axis=0)
|
python
|
{
"resource": ""
}
|
q8095
|
Sampler.run
|
train
|
def run(self):
""" Play loop, i.e. send all sound chunk by chunk to the soundcard. """
self.running = True
def chunks_producer():
while self.running:
self.chunks.put(self.next_chunks())
t = Thread(target=chunks_producer)
t.start()
with self.BackendStream(samplerate=self.sr, channels=1) as stream:
while self.running:
try:
stream.write(self.chunks.get(timeout=self.timeout)) # timeout so stream.write() thread can exit
except Empty:
self.running = False
|
python
|
{
"resource": ""
}
|
q8096
|
dependency_sort
|
train
|
def dependency_sort(dependency_tree):
"""
Sorts items 'dependencies first' in a given dependency tree.
A dependency tree is a dictionary mapping an object to a collection its
dependency objects.
Result is a properly sorted list of items, where each item is a 2-tuple
containing an object and its dependency list, as given in the input
dependency tree.
If B is directly or indirectly dependent on A and they are not both a part
of the same dependency cycle (i.e. then A is neither directly nor
indirectly dependent on B) then A needs to come before B.
If A and B are a part of the same dependency cycle, i.e. if they are both
directly or indirectly dependent on each other, then it does not matter
which comes first.
Any entries found listed as dependencies, but that do not have their own
dependencies listed as well, are logged & ignored.
@return: The sorted items.
@rtype: list
"""
sorted = []
processed = set()
for key, deps in dependency_tree.iteritems():
_sort_r(sorted, processed, key, deps, dependency_tree)
return sorted
|
python
|
{
"resource": ""
}
|
q8097
|
_sort_r
|
train
|
def _sort_r(sorted, processed, key, deps, dependency_tree):
"""Recursive topological sort implementation."""
if key in processed:
return
processed.add(key)
for dep_key in deps:
dep_deps = dependency_tree.get(dep_key)
if dep_deps is None:
log.debug('"%s" not found, skipped', Repr(dep_key))
continue
_sort_r(sorted, processed, dep_key, dep_deps, dependency_tree)
sorted.append((key, deps))
|
python
|
{
"resource": ""
}
|
q8098
|
TypedContent.resolve
|
train
|
def resolve(self, nobuiltin=False):
"""
Resolve the node's type reference and return the referenced type node.
Returns self if the type is defined locally, e.g. as a <complexType>
subnode. Otherwise returns the referenced external node.
@param nobuiltin: Flag indicating whether resolving to XSD built-in
types should not be allowed.
@return: The resolved (true) type.
@rtype: L{SchemaObject}
"""
cached = self.resolved_cache.get(nobuiltin)
if cached is not None:
return cached
resolved = self.__resolve_type(nobuiltin)
self.resolved_cache[nobuiltin] = resolved
return resolved
|
python
|
{
"resource": ""
}
|
q8099
|
Element.namespace
|
train
|
def namespace(self, prefix=None):
"""
Get this schema element's target namespace.
In case of reference elements, the target namespace is defined by the
referenced and not the referencing element node.
@param prefix: The default prefix.
@type prefix: str
@return: The schema element's target namespace
@rtype: (I{prefix},I{URI})
"""
e = self.__deref()
if e is not None:
return e.namespace(prefix)
return super(Element, self).namespace()
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.