code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
app_label = model._meta.app_label model_lower = model.__name__.lower() if convert: action = MAP_PERMISSION_ACTIONS.get(action, action) return '%s.%s_%s' % ( app_label, action, model_lower )
def crud_permission_name(model, action, convert=True)
Returns permission name using Django naming convention: app_label.action_object. If `convert` is True, `create` and `update` actions would be renamed to `add` and `change`.
2.662078
3.073465
0.866149
display_func = getattr(obj, 'get_%s_display' % field_name, None) if display_func: return display_func() value = getattr(obj, field_name) if isinstance(value, models.fields.files.FieldFile): if value: return mark_safe('<a href="%s">%s</a>' % ( value.url, os.path.basename(value.name), )) else: return '' if isinstance(value, models.Model): return format_value_instance(value) if isinstance(value, models.Manager): return mark_safe(', '.join( [format_value_instance(instance) for instance in value.all()] )) if value is None: value = "" return value
def format_value(obj, field_name)
Simple value formatting. If value is model instance returns link to detail view if exists.
2.176942
2.258763
0.963777
if fields is None: fields = utils.get_fields(type(obj)) elif isinstance(fields, six.string_types): field_names = [f.strip() for f in fields.split(',')] fields = utils.get_fields(type(obj), include=field_names) return { 'object': obj, 'fields': fields, }
def crud_fields(obj, fields=None)
Display object fields in table rows:: <table> {% crud_fields object 'id, %} </table> * ``fields`` fields to include If fields is ``None`` all fields will be displayed. If fields is ``string`` comma separated field names will be displayed. if field is dictionary, key should be field name and value field verbose name.
2.489669
2.960073
0.841083
include = [f.strip() for f in fields.split(',')] if fields else None return utils.get_fields( model, include )
def get_fields(model, fields=None)
Assigns fields for model.
5.781563
5.616192
1.029445
if url_prefix is None: url_prefix = r'^' urls = [] if list_view: urls.append(url( url_prefix + '$', list_view, name=utils.crud_url_name(model, utils.ACTION_LIST, name_prefix) )) if create_view: urls.append(url( url_prefix + r'new/$', create_view, name=utils.crud_url_name(model, utils.ACTION_CREATE, name_prefix) )) if detail_view: urls.append(url( url_prefix + r'(?P<pk>\d+)/$', detail_view, name=utils.crud_url_name(model, utils.ACTION_DETAIL, name_prefix) )) if update_view: urls.append(url( url_prefix + r'(?P<pk>\d+)/edit/$', update_view, name=utils.crud_url_name(model, utils.ACTION_UPDATE, name_prefix) )) if delete_view: urls.append(url( url_prefix + r'(?P<pk>\d+)/remove/$', delete_view, name=utils.crud_url_name(model, utils.ACTION_DELETE, name_prefix) )) if list_views is not None: for name, view in list_views.items(): urls.append(url( url_prefix + r'%s/$' % name, view, name=utils.crud_url_name(model, name, name_prefix) )) for name, view in kwargs.items(): urls.append(url( url_prefix + r'(?P<pk>\d+)/%s/$' % name, view, name=utils.crud_url_name(model, name, name_prefix) )) return urls
def crud_urls(model, list_view=None, create_view=None, update_view=None, detail_view=None, delete_view=None, url_prefix=None, name_prefix=None, list_views=None, **kwargs)
Returns a list of url patterns for model. :param list_view: :param create_view: :param update_view: :param detail_view: :param delete_view: :param url_prefix: prefix to prepend, default is `'$'` :param name_prefix: prefix to prepend to name, default is empty string :param list_views(dict): additional list views :param **kwargs: additional detail views :returns: urls
1.37173
1.393304
0.984516
model_lower = model.__name__.lower() if urlprefix is None: urlprefix = '' urlprefix += model_lower + '/' urls = crud_urls( model, list_view=CRUDListView.as_view(model=model), create_view=CRUDCreateView.as_view(model=model), detail_view=CRUDDetailView.as_view(model=model), update_view=CRUDUpdateView.as_view(model=model), delete_view=CRUDDeleteView.as_view(model=model), url_prefix=urlprefix, ) return urls
def crud_for_model(model, urlprefix=None)
Returns list of ``url`` items to CRUD a model.
2.012912
1.991118
1.010946
if urlprefix is None: urlprefix = app_label + '/' app = apps.get_app_config(app_label) urls = [] for model in app.get_models(): urls += crud_for_model(model, urlprefix) return urls
def crud_for_app(app_label, urlprefix=None)
Returns list of ``url`` items to CRUD an app.
2.117452
1.971929
1.073797
context = super(CRUDMixin, self).get_context_data(**kwargs) context.update({ 'model_verbose_name': self.model._meta.verbose_name, 'model_verbose_name_plural': self.model._meta.verbose_name_plural, }) context['fields'] = utils.get_fields(self.model) if hasattr(self, 'object') and self.object: for action in utils.INSTANCE_ACTIONS: try: url = reverse( utils.crud_url_name(self.model, action), kwargs={'pk': self.object.pk}) except NoReverseMatch: # pragma: no cover url = None context['url_%s' % action] = url for action in utils.LIST_ACTIONS: try: url = reverse(utils.crud_url_name(self.model, action)) except NoReverseMatch: # pragma: no cover url = None context['url_%s' % action] = url return context
def get_context_data(self, **kwargs)
Adds available urls and names.
1.997279
1.936523
1.031374
names = super(CRUDMixin, self).get_template_names() if self.crud_template_name: names.append(self.crud_template_name) return names
def get_template_names(self)
Adds crud_template_name to default template names.
3.129158
1.896467
1.649993
_version_re = re.compile(r"__version__\s+=\s+(.*)") # pylint: disable=invalid-name with open('./fleaker/__init__.py', 'rb') as file_: version = ast.literal_eval(_version_re.search( # pylint: disable=invalid-name file_.read().decode('utf-8')).group(1)) download_url = ('https://github.com/croscon/fleaker/archive/' 'v{}.tar.gz'.format(version)) setup( name='fleaker', version=version, download_url=download_url, description='Tools and extensions to make Flask development easier.', url='https://github.com/croscon/fleaker', author='Croscon Consulting', author_email='open.source@croscon.com', license='BSD', packages=[ 'fleaker', 'fleaker.marshmallow', 'fleaker.marshmallow.fields', 'fleaker.peewee', 'fleaker.peewee.fields', 'fleaker.peewee.mixins', 'fleaker.peewee.mixins.time', ], zip_safe=False, long_description=__doc__, include_package_data=True, platforms='any', install_requires=[ 'Flask', 'Flask-Classful', 'Flask-Login', 'Flask-Marshmallow', 'arrow', 'bcrypt', 'blinker', 'marshmallow', 'marshmallow-jsonschema', 'peewee', 'pendulum', 'phonenumbers', 'simplejson', ], classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Environment :: Web Environment', 'Framework :: Flask', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', # @TODO: Pick specific Python versions; out of the gate flask does 2.6, # 2.7, 3.3, 3.4, and 3.5 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Application Frameworks', 'Topic :: Software Development :: Libraries :: Python Modules', ], keywords=['flask', 'web development', 'flask extension'] )
def install()
Install Fleaker. In a function so we can protect this file so it's only run when we explicitly invoke it and not, say, when py.test collects all Python modules.
2.427759
2.349051
1.033506
value = super(JSONField, self).python_value(value) if value is not None: return flask.json.loads(value, **self._load_kwargs)
def python_value(self, value)
Return the JSON in the database as a ``dict``. Returns: dict: The field run through json.loads
4.902876
6.567343
0.746554
# Everything is encoded being before being surfaced value = flask.json.dumps(value) return super(JSONField, self).db_value(value)
def db_value(self, value)
Store the value in the database. If the value is a dict like object, it is converted to a string before storing.
18.574522
19.746979
0.940626
if not any((cls._meta.search_fields, fields)): raise AttributeError( "A list of searchable fields must be provided in the class's " "search_fields or provided to this function in the `fields` " "kwarg." ) # If fields are provided, override the ones in the class if not fields: fields = cls._meta.search_fields query = cls.select() # Cache the LIKE terms like_term = ''.join((term, '%')) full_like_term = ''.join(('%', term, '%')) # Cache the order by terms # @TODO Peewee's order_by supports an `extend` kwarg will will allow # for updating of the order by part of the query, but it's only # supported in Peewee 2.8.5 and newer. Determine if we can support this # before switching. # http://docs.peewee-orm.com/en/stable/peewee/api.html#SelectQuery.order_by order_by = [] # Store the clauses seperately because it is needed to perform an OR on # them and that's somehow impossible with their query builder in # a loop. clauses = [] for field_name in fields: # Cache the field, raising an exception if the field doesn't # exist. field = getattr(cls, field_name) # Apply the search term case insensitively clauses.append( (field == term) | (field ** like_term) | (field ** full_like_term) ) order_by.append(case(None, ( # Straight matches should show up first (field == term, 0), # Similar terms should show up second (field ** like_term, 1), # Substring matches should show up third (field ** full_like_term, 2), ), default=3).asc()) # Apply the clauses to the query query = query.where(reduce(operator.or_, clauses)) # Apply the sort order so it's influenced by the search term relevance. query = query.order_by(*order_by) return query
def search(cls, term, fields=())
Generic SQL search function that uses SQL ``LIKE`` to search the database for matching records. The records are sorted by their relavancey to the search term. The query searches and sorts on the folling criteria, in order, where the target string is ``exactly``: 1. Straight equality (``x = 'exactly'``) 2. Right hand ``LIKE`` (``x LIKE 'exact%'``) 3. Substring ``LIKE`` (``x LIKE %act%``) Args: term (str): The search term to apply to the query. Keyword Args: fields (list|tuple|None): An optional list of fields to apply the search to. If not provided, the class variable ``Meta.search_fields`` will be used by default. Returns: peewee.SelectQuery: An unexecuted query for the records. Raises: AttributeError: Raised if `search_fields` isn't defined in the class and `fields` aren't provided for the function.
5.075643
4.573767
1.109729
super(ForeignKeyField, self)._add_to_schema(field_name, schema) if self.get_field_value('convert_fks', default=True): self.attribute = field_name.replace('_id', '')
def _add_to_schema(self, field_name, schema)
Set the ``attribute`` attr to the field in question so this always gets deserialzed into the field name without ``_id``. Args: field_name (str): The name of the field (the attribute name being set in the schema). schema (marshmallow.Schema): The actual parent schema this field belongs to.
5.849725
5.483777
1.066733
# this might be an optional field if value: value = value.id return super(ForeignKeyField, self)._serialize(value, attr, obj)
def _serialize(self, value, attr, obj)
Grab the ID value off the Peewee model so we serialize an ID back.
7.507629
5.459179
1.375231
if orm_backend: return orm_backend if peewee is not MISSING and sqlalchemy is not MISSING: raise RuntimeError('Both PeeWee and SQLAlchemy detected as installed, ' 'but no explicit backend provided! Please specify ' 'one!') if peewee is not MISSING: return _PEEWEE_BACKEND elif sqlalchemy is not MISSING: return _SQLALCHEMY_BACKEND else: return MISSING
def _discover_ideal_backend(orm_backend)
Auto-discover the ideal backend based on what is installed. Right now, handles discovery of: * PeeWee * SQLAlchemy Args: orm_backend (str): The ``orm_backend`` value that was passed to the ``create_app`` function. That is, the ORM Backend the User indicated they wanted to use. Returns: str|fleaker.missing.MissingSentinel: Returns a string for the ideal backend if it found one, or :obj:`fleaker.MISSING` if we couldn't find one. Raises: RuntimeError: Raised if no user provided ORM Backend is given and BOTH PeeWee and SQLAlchemy are installed.
4.029334
3.931942
1.024769
global _SELECTED_BACKEND backend = settings.pop('orm_backend', None) backend = _discover_ideal_backend(backend) # did not specify a backend, bail early if backend is MISSING: return app _swap_backends_error = ('Cannot swap ORM backends after one is ' 'declared!') if backend == _PEEWEE_BACKEND: if (_SELECTED_BACKEND is not MISSING and _SELECTED_BACKEND != _PEEWEE_EXT): raise RuntimeError(_swap_backends_error) # @TODO (orm): Does this really need to be ``peewee_database``? can # it be ``orm_database``? database_uri = settings.pop('peewee_database', None) if database_uri: app.config['DATABASE'] = database_uri if 'DATABASE' not in app.config: # since there is no DATABASE in the config, we need to wait # until we init this; so we'll just do it after configure is # called. try: app.add_post_configure_callback( partial(cls._init_peewee_ext, app), run_once=True ) except NotImplementedError: # this composed app doesn't implement multi-stage # configuration, so there's no way we can proceed without # an explicit DB =/; yes it's possible this could swallow # another error, but if it does... the easiest fix is to do # the same # @TODO (docs): Multi Stage Configuration should be in # the docs err_msg = raise RuntimeError(err_msg) else: # the DATABASE is already present, go ahead and just init now cls._init_peewee_ext(app) _SELECTED_BACKEND = _PEEWEE_EXT elif backend == _SQLALCHEMY_BACKEND: # @TODO (orm): Finish SQLA implementation # do sqla bootstrap code if (_SELECTED_BACKEND is not MISSING and _SELECTED_BACKEND != _SQLA_EXT): raise RuntimeError(_swap_backends_error) _SELECTED_BACKEND = _SQLA_EXT _SQLA_EXT.init_app(app) else: err_msg = ("Explicit ORM backend provided, but could not recognize" " the value! Valid values are: '{}' and '{}';" " received: '{}' instead!") err_msg = err_msg.format(_PEEWEE_BACKEND, _SQLALCHEMY_BACKEND, backend) raise RuntimeError(err_msg) return app
def post_create_app(cls, app, **settings)
Init the extension for our chosen ORM Backend, if possible. This method will ensure that the ``db`` proxy is set to the right extension and that that extension is properly created and configured. Since it needs to call ``init_app`` it MUST be a Post Create Hook. If the chosen backend is PeeWee and no ``DATABASE`` config value is provided, we will delay initializing the extension until one is. Args: app (flask.Flask): The Flask application that was just made through the :meth:`create_app` factory that we should bind extensions to. Kwargs: orm_backend (str): If you want to explicitly specify an ORM Backend to use, you should send it in this kwarg. Valid values are either: ``'peewee'`` or ``'sqlalchemy'``. peewee_database (str): An explicit database connection URI we should immeditately add to the configuration that should be used to configure the PeeWee ORM Backend. This will result in the ``DATABASE`` key being set to this value in the config and will result in the PeeWee Flask extension being initialized IMMEDIATELY and not delayed until the next call to :meth:`configure`. Returns: flask.Flask: Returns the app it was given once this is done. Raises: RuntimeError: This is raised if we are asked to create the PeeWee ORM, but are not given a database URI in either the ``DATABASE`` config value, or the explicit ``peewee_database`` setting.
6.196434
5.70543
1.086059
# the database still isn't present, go ahead and register the callback # again, so we can try later. if 'DATABASE' not in app.config: app.add_post_configure_callback(partial(cls._init_peewee_ext, app), run_once=True) return _PEEWEE_EXT.init_app(app)
def _init_peewee_ext(cls, app, dummy_configuration=None, dummy_configure_args=None)
Init the actual PeeWee extension with the app that was created. Since PeeWee requires the ``DATABASE`` config parameter to be present IMMEDIATELY upon initializing the application, we need to delay this construction. This is because, in standard use, we will create the app and attempt to init this extension BEFORE we configure the app, which is totally fine. To fix this, we just need to set this up to try and run after every call to configure. If there is not ``DATABASE`` config parameter present when run, this method does nothing other than reschedule itself to run in the future. In all cases, this is a Post Configure Hook that should RUN ONCE! Args: app (flask.Flask): The application you want to init the PeeWee Flask extension for. Hint: if you need to use this as a callback, use a partial to provide this. dummy_configuration (dict): The resulting application configuration that the post_configure hook provides to all of it's callbacks. We will NEVER use this, but since we utilize the post_configure system to register this for complicated apps, we gotta accept it. dummy_configure_args (list[object]): The args passed to the :meth:`configure` function that triggered this callback. Just like the above arg, we'll never use it, but we must accept it.
7.898748
6.154811
1.283345
whitelist_keys_from_mappings = kwargs.get( 'whitelist_keys_from_mappings', False ) whitelist = kwargs.get('whitelist') for item in args: if isinstance(item, string_types): _, ext = splitext(item) if ext == '.json': self._configure_from_json(item) elif ext in ('.cfg', '.py'): self._configure_from_pyfile(item) else: self._configure_from_module(item) elif isinstance(item, (types.ModuleType, type)): self._configure_from_object(item) elif hasattr(item, 'items'): # assume everything else is a mapping like object; ``.items()`` # is what Flask uses under the hood for this method # @TODO: This doesn't handle the edge case of using a tuple of # two element tuples to config; but Flask does that. IMO, if # you do that, you're a monster. self._configure_from_mapping( item, whitelist_keys=whitelist_keys_from_mappings, whitelist=whitelist ) else: raise TypeError("Could not determine a valid type for this" " configuration object: `{}`!".format(item)) # we just finished here, run the post configure callbacks self._run_post_configure_callbacks(args)
def configure(self, *args, **kwargs)
Configure the Application through a varied number of sources of different types. This function chains multiple possible configuration methods together in order to just "make it work". You can pass multiple configuration sources in to the method and each one will be tried in a sane fashion. Later sources will override earlier sources if keys collide. For example: .. code:: python from application import default_config app.configure(default_config, os.environ, '.secrets') In the above example, values stored in ``default_config`` will be loaded first, then overwritten by those in ``os.environ``, and so on. An endless number of configuration sources may be passed. Configuration sources are type checked and processed according to the following rules: * ``string`` - if the source is a ``str``, we will assume it is a file or module that should be loaded. If the file ends in ``.json``, then :meth:`flask.Config.from_json` is used; if the file ends in ``.py`` or ``.cfg``, then :meth:`flask.Config.from_pyfile` is used; if the module has any other extension we assume it is an import path, import the module and pass that to :meth:`flask.Config.from_object`. See below for a few more semantics on module loading. * ``dict-like`` - if the source is ``dict-like``, then :meth:`flask.Config.from_mapping` will be used. ``dict-like`` is defined as anything implementing an ``items`` method that returns a tuple of ``key``, ``val``. * ``class`` or ``module`` - if the source is an uninstantiated ``class`` or ``module``, then :meth:`flask.Config.from_object` will be used. Just like Flask's standard configuration, only uppercased keys will be loaded into the config. If the item we are passed is a ``string`` and it is determined to be a possible Python module, then a leading ``.`` is relevant. If a leading ``.`` is provided, we assume that the module to import is located in the current package and operate as such; if it begins with anything else we assume the import path provided is absolute. This allows you to source configuration stored in a module in your package, or in another package. Args: *args (object): Any object you want us to try to configure from. Keyword Args: whitelist_keys_from_mappings (bool): Should we whitelist the keys we pull from mappings? Very useful if you're passing in an entire OS ``environ`` and you want to omit things like ``LESSPIPE``. If no whitelist is provided, we use the pre-existing config keys as a whitelist. whitelist (list[str]): An explicit list of keys that should be allowed. If provided and ``whitelist_keys`` is ``True``, we will use that as our whitelist instead of pre-existing app config keys.
4.908446
3.823874
1.283632
package = None if item[0] == '.': package = self.import_name obj = importlib.import_module(item, package=package) self.config.from_object(obj) return self
def _configure_from_module(self, item)
Configure from a module by import path. Effectively, you give this an absolute or relative import path, it will import it, and then pass the resulting object to ``_configure_from_object``. Args: item (str): A string pointing to a valid import path. Returns: fleaker.App: Returns itself.
5.292726
5.436705
0.973517
if whitelist is None: whitelist = self.config.keys() if whitelist_keys: item = {k: v for k, v in item.items() if k in whitelist} self.config.from_mapping(item) return self
def _configure_from_mapping(self, item, whitelist_keys=False, whitelist=None)
Configure from a mapping, or dict, like object. Args: item (dict): A dict-like object that we can pluck values from. Keyword Args: whitelist_keys (bool): Should we whitelist the keys before adding them to the configuration? If no whitelist is provided, we use the pre-existing config keys as a whitelist. whitelist (list[str]): An explicit list of keys that should be allowed. If provided and ``whitelist_keys`` is true, we will use that as our whitelist instead of pre-existing app config keys. Returns: fleaker.App: Returns itself.
2.656963
2.731826
0.972596
self._configure_from_mapping(os.environ, whitelist_keys=whitelist_keys, whitelist=whitelist) return self
def configure_from_environment(self, whitelist_keys=False, whitelist=None)
Configure from the entire set of available environment variables. This is really a shorthand for grabbing ``os.environ`` and passing to :meth:`_configure_from_mapping`. As always, only uppercase keys are loaded. Keyword Args: whitelist_keys (bool): Should we whitelist the keys by only pulling those that are already present in the config? Useful for avoiding adding things like ``LESSPIPE`` to your app config. If no whitelist is provided, we use the current config keys as our whitelist. whitelist (list[str]): An explicit list of keys that should be allowed. If provided and ``whitelist_keys`` is true, we will use that as our whitelist instead of pre-existing app config keys. Returns: fleaker.base.BaseApplication: Returns itself.
5.152677
4.941481
1.042739
if run_once: self._post_configure_callbacks['single'].append(callback) else: self._post_configure_callbacks['multiple'].append(callback) return self
def add_post_configure_callback(self, callback, run_once=False)
Add a new callback to be run after every call to :meth:`configure`. Functions run at the end of :meth:`configure` are given the application's resulting configuration and the arguments passed to :meth:`configure`, in that order. As a note, this first argument will be an immutable dictionary. The return value of all registered callbacks is entirely ignored. Callbacks are run in the order they are registered, but you should never depend on another callback. .. admonition:: The "Resulting" Configuration The first argument to the callback is always the "resulting" configuration from the call to :meth:`configure`. What this means is you will get the Application's FROZEN configuration after the call to :meth:`configure` finished. Moreover, this resulting configuration will be an :class:`~werkzeug.datastructures.ImmutableDict`. The purpose of a Post Configure callback is not to futher alter the configuration, but rather to do lazy initialization for anything that absolutely requires the configuration, so any attempt to alter the configuration of the app has been made intentionally difficult! Args: callback (function): The function you wish to run after :meth:`configure`. Will receive the application's current configuration as the first arugment, and the same arguments passed to :meth:`configure` as the second. Keyword Args: run_once (bool): Should this callback run every time configure is called? Or just once and be deregistered? Pass ``True`` to only run it once. Returns: fleaker.base.BaseApplication: Returns itself for a fluent interface.
2.762003
3.644216
0.757914
resulting_configuration = ImmutableDict(self.config) # copy callbacks in case people edit them while running multiple_callbacks = copy.copy( self._post_configure_callbacks['multiple'] ) single_callbacks = copy.copy(self._post_configure_callbacks['single']) # clear out the singles self._post_configure_callbacks['single'] = [] for callback in multiple_callbacks: callback(resulting_configuration, configure_args) # now do the single run callbacks for callback in single_callbacks: callback(resulting_configuration, configure_args)
def _run_post_configure_callbacks(self, configure_args)
Run all post configure callbacks we have stored. Functions are passed the configuration that resulted from the call to :meth:`configure` as the first argument, in an immutable form; and are given the arguments passed to :meth:`configure` for the second argument. Returns from callbacks are ignored in all fashion. Args: configure_args (list[object]): The full list of arguments passed to :meth:`configure`. Returns: None: Does not return anything.
4.197605
4.265694
0.984038
schema = cls() if not hasattr(schema.Meta, 'model'): raise AttributeError("In order to make an instance, a model for " "the schema must be defined in the Meta " "class.") serialized_data = schema.load(data).data return cls.Meta.model(**serialized_data)
def make_instance(cls, data)
Validate the data and create a model instance from the data. Args: data (dict): The unserialized data to insert into the new model instance through it's constructor. Returns: peewee.Model|sqlalchemy.Model: The model instance with it's data inserted into it. Raises: AttributeError: This is raised if ``Meta.model`` isn't set on the schema's definition.
4.891097
4.869215
1.004494
errors = [] for field in original_data: # Skip nested fields because they will loop infinitely if isinstance(field, (set, list, tuple, dict)): continue if field not in self.fields.keys(): errors.append(field) if errors: raise ValidationError("Invalid field", field_names=errors)
def invalid_fields(self, data, original_data)
Validator that checks if any keys provided aren't in the schema. Say your schema has support for keys ``a`` and ``b`` and the data provided has keys ``a``, ``b``, and ``c``. When the data is loaded into the schema, a :class:`marshmallow.ValidationError` will be raised informing the developer that excess keys have been provided. Raises: marshmallow.ValidationError: Raised if extra keys exist in the passed in data.
4.564576
4.994186
0.913978
value = super(ArrowDateTimeField, self).python_value(value) if (isinstance(value, (datetime.datetime, datetime.date, string_types))): return arrow.get(value) return value
def python_value(self, value)
Return the value in the data base as an arrow object. Returns: arrow.Arrow: An instance of arrow with the field filled in.
4.462394
5.3684
0.831234
if isinstance(value, string_types): value = arrow.get(value) if isinstance(value, arrow.Arrow): value = value.datetime return super(ArrowDateTimeField, self).db_value(value)
def db_value(self, value)
Convert the Arrow instance to a datetime for saving in the db.
3.001997
2.427161
1.236835
if context is not _CONTEXT_MISSING: self.update_context(context, app=app) # do not readd callbacks if already present; and if there's no context # present, there's no real need to add callbacks if (app not in _CONTEXT_CALLBACK_MAP and context is not _CONTEXT_MISSING): key = self._get_context_name(app=app) self._context_callbacks(app, key, original_context=context)
def init_app(self, app, context=DEFAULT_DICT)
Lazy constructor for the :class:`Component` class. This method will allow the component to be used like a Flask extension/singleton. Args: app (flask.Flask): The Application to base this Component upon. Useful for app wide singletons. Keyword Args: context (dict, optional): The contextual information to supply to this component.
7.643116
10.223806
0.74758
def _get_context(dummy_app): _CONTEXT_LOCALS.context = _CONTEXT_LOCALS(key) # pylint: disable=assigning-non-slot def _clear_context(dummy_app): try: del _CONTEXT_LOCALS.context except AttributeError: pass if original_context is not _CONTEXT_MISSING: setattr(_CONTEXT_LOCALS, key, original_context) # store for later so Blinker doesn't remove these listeners and so we # don't add them twice _CONTEXT_CALLBACK_MAP[app] = (_get_context, _clear_context) # and listen for any app context changes appcontext_pushed.connect(_get_context, app) appcontext_popped.connect(_clear_context, app) return (_get_context, _clear_context)
def _context_callbacks(app, key, original_context=_CONTEXT_MISSING)
Register the callbacks we need to properly pop and push the app-local context for a component. Args: app (flask.Flask): The app who this context belongs to. This is the only sender our Blinker signal will listen to. key (str): The key on ``_CONTEXT_LOCALS`` that this app's context listens to. Kwargs: original_context (dict): The original context present whenever these callbacks were registered. We will restore the context to this value whenever the app context gets popped. Returns: (function, function): A two-element tuple of the dynamic functions we generated as appcontext callbacks. The first element is the callback for ``appcontext_pushed`` (i.e., get and store the current context) and the second element is the callback for ``appcontext_popped`` (i.e., restore the current context to to it's original value).
4.377324
3.520534
1.243369
if (app is None and self._context is _CONTEXT_MISSING and not in_app_context()): raise RuntimeError("Attempted to update component context without" " a bound app context or eager app set! Please" " pass the related app you want to update the" " context for!") if self._context is not _CONTEXT_MISSING: self._context = ImmutableDict(context) else: key = self._get_context_name(app=app) setattr(_CONTEXT_LOCALS, key, ImmutableDict(context))
def update_context(self, context, app=None)
Replace the component's context with a new one. Args: context (dict): The new context to set this component's context to. Keyword Args: app (flask.Flask, optional): The app to update this context for. If not provided, the result of ``Component.app`` will be used.
6.944018
7.618564
0.91146
if (app is None and self._context is _CONTEXT_MISSING and not in_app_context()): raise RuntimeError("Attempted to clear component context without" " a bound app context or eager app set! Please" " pass the related app you want to update the" " context for!") if self._context is not _CONTEXT_MISSING: self._context = DEFAULT_DICT else: key = self._get_context_name(app=app) setattr(_CONTEXT_LOCALS, key, DEFAULT_DICT)
def clear_context(self, app=None)
Clear the component's context. Keyword Args: app (flask.Flask, optional): The app to clear this component's context for. If omitted, the value from ``Component.app`` is used.
8.106014
8.434924
0.961006
app = self._app or current_app if not in_app_context(app): raise RuntimeError("This component hasn't been initialized yet " "and an app context doesn't exist.") # If current_app is the app, this must be used in order for their IDs # to be the same, as current_app will wrap the app in a proxy. if hasattr(app, '_get_current_object'): app = app._get_current_object() return app
def app(self)
Internal method that will supply the app to use internally. Returns: flask.Flask: The app to use within the component. Raises: RuntimeError: This is raised if no app was provided to the component and the method is being called outside of an application context.
7.068629
6.989909
1.011262
elements = [ self.__class__.__name__, 'context', text_type(id(self)), ] if app: elements.append(text_type(id(app))) else: try: elements.append(text_type(id(self.app))) except RuntimeError: pass return '_'.join(elements)
def _get_context_name(self, app=None)
Generate the name of the context variable for this component & app. Because we store the ``context`` in a Local so the component can be used across multiple apps, we cannot store the context on the instance itself. This function will generate a unique and predictable key in which to store the context. Returns: str: The name of the context variable to set and get the context from.
3.156243
3.314031
0.952388
settings = cls.pre_create_app(**settings) # now whitelist the settings flask_kwargs = cls._whitelist_standard_flask_kwargs(settings) app = cls(import_name, **flask_kwargs) return cls.post_create_app(app, **settings)
def create_app(cls, import_name, **settings)
Create a standard Fleaker web application. This is the main entrypoint for creating your Fleaker application. Instead of defining your own app factory function, it's preferred that you use :meth:`create_app`, which is responsible for automatically configuring extensions (such as your ORM), parsing setup code for mixins, and calling relevant hooks (such as to setup logging). Usage is easy: .. code:: python from fleaker import App def my_create_app(): app = App.create_app(__name__) return app And the rest works like a normal Flask app with application factories setup! .. versionadded:: 0.1.0 This has always been the preferred way to create Fleaker Applications.
6.022463
8.678764
0.693931
# prevent any copy shenanigans from happening kwargs = deepcopy(kwargs) if not cls._flask_init_argspec_cache: cls._flask_init_argspec_cache = inspect.getargspec(Flask.__init__) return {key: val for key, val in iteritems(kwargs) if key in cls._flask_init_argspec_cache.args}
def _whitelist_standard_flask_kwargs(cls, kwargs)
Whitelist a dictionary of kwargs to remove any that are not valid for Flask's ``__init__`` constructor. Since many Fleaker app mixins define their own kwargs for use in construction and Flask itself does not accept ``**kwargs``, we need to whitelist anything unknown. Uses the proper argspec from the :meth:`flask.Flask.__init__` so it should handle all args. Args: kwargs (dict): The dictionary of kwargs you want to whitelist. Returns: dict: The whitelisted dictionary of kwargs.
4.08277
4.025585
1.014205
levelname = getattr(record, 'levelname', None) record.levelcolor = '' record.endlevelcolor = '' if levelname: level_color = getattr(self.TermColors, levelname, '') record.levelcolor = level_color record.endlevelcolor = self.TermColors.ENDC if level_color else '' return super(FleakerLogFormatter, self).format(record)
def format(self, record)
Format the log record.
4.381938
4.206008
1.041828
# @TODO (orm, exc): Implement this when the ORM/DB stuff is done # if not exc.prevent_rollback: # db.session.rollback() if exc.flash_message: flash(exc.flash_message, exc.flash_level) if exc.redirect is not MISSING: return redirect(url_for(exc.redirect, **exc.redirect_args)) error_result = exc.error_page() if error_result is not None: return error_result, exc.status_code or 500
def errorhandler_callback(cls, exc)
This function should be called in the global error handlers. This will allow for consolidating of cleanup tasks if the exception bubbles all the way to the top of the stack. For example, this method will automatically rollback the database session if the exception bubbles to the top. This is the method that :meth:`register_errorhandler` adds as an errorhandler. See the documentation there for more info. Args: exc (FleakerBaseException): The exception that was thrown that we are to handle.
5.657683
5.621089
1.00651
register_errorhandler = settings.pop('register_errorhandler', True) if register_errorhandler: AppException.register_errorhandler(app) return app
def post_create_app(cls, app, **settings)
Register the errorhandler for the AppException to the passed in App. Args: app (fleaker.base.BaseApplication): A Flask application that extends the Fleaker Base Application, such that the hooks are implemented. Kwargs: register_errorhandler (bool): A boolean indicating if we want to automatically register an errorhandler for the :class:`AppException` exception class after we create this App. Pass ``False`` to prevent registration. Default is ``True``. Returns: fleaker.base.BaseApplication: Returns the app it was given.
6.221341
4.677543
1.330045
app = App.create_app(__name__) app.configure('.configs.settings') # yes, I should use blueprints; but I don't really care for such a small # toy app @app.route('/config') def get_config(): return jsonify(app.config) @app.route('/put_config', methods=['PUT']) def put_config(): data = request.json() for key, val in data.items(): app.config[key] = val return jsonify({'message': 'Config updated!'}) return app
def create_app()
Create the standard app for ``fleaker_config`` and register the two routes required.
5.509187
5.526268
0.996909
if not self._meta.signature_fields: raise AttributeError( "No fields defined in {}.Meta.signature_fields. Please define " "at least one.".format(type(self).__name__) ) # If the field is archived, unset the signature so records in the # future can have this value. if getattr(self, 'archived', False): self.signature = None return # Otherwise, combine the values of the fields together and SHA1 them computed = [getattr(self, value) or ' ' for value in self._meta.signature_fields] computed = ''.join([text_type(value) for value in computed]) # If computed is a falsey value, that means all the fields were # None or blank and that will lead to some pain. if computed: self.signature = sha1(computed.encode('utf-8')).hexdigest()
def update_signature(self)
Update the signature field by hashing the ``signature_fields``. Raises: AttributeError: This is raised if ``Meta.signature_fields`` has no values in it or if a field in there is not a field on the model.
5.711108
4.938793
1.156377
# Create an httplib2.Http object to handle our HTTP requests an # authorize it with the Credentials. Note that the first parameter, # service_account_name, is the Email address created for the Service # account. It must be the email address associated with the key that # was created. scope = 'https://www.googleapis.com/auth/androidpublisher' credentials = ServiceAccountCredentials.from_p12_keyfile(service_account, credentials_file_path, scopes=scope) http = httplib2.Http() http = credentials.authorize(http) service = build('androidpublisher', api_version, http=http, cache_discovery=False) return service
def connect(service_account, credentials_file_path, api_version='v2')
Connect to the google play interface
2.997205
2.954935
1.014305
if not self.context.get('convert_dates', True) or not value: return value value = super(PendulumField, self)._deserialize(value, attr, value) timezone = self.get_field_value('timezone') target = pendulum.instance(value) if (timezone and (text_type(target) != text_type(target.in_timezone(timezone)))): raise ValidationError( "The provided datetime is not in the " "{} timezone.".format(timezone) ) return target
def _deserialize(self, value, attr, obj)
Deserializes a string into a Pendulum object.
4.8117
4.356035
1.104605
strict_validation = self.get_field_value( 'strict_phone_validation', default=False ) strict_region = self.get_field_value( 'strict_phone_region', default=strict_validation ) region = self.get_field_value('region', 'US') phone_number_format = self.get_field_value( 'phone_number_format', default=phonenumbers.PhoneNumberFormat.INTERNATIONAL ) # Remove excess special chars, except for the plus sign stripped_value = re.sub(r'[^\w+]', '', value) try: if not stripped_value.startswith('+') and not strict_region: phone = phonenumbers.parse(stripped_value, region) else: phone = phonenumbers.parse(stripped_value) if (not phonenumbers.is_possible_number(phone) or not phonenumbers.is_valid_number(phone) and strict_validation): raise ValidationError( "The value for {} ({}) is not a valid phone " "number.".format(attr, value) ) return phonenumbers.format_number(phone, phone_number_format) except phonenumbers.phonenumberutil.NumberParseException as exc: if strict_validation or strict_region: raise ValidationError(exc)
def _format_phone_number(self, value, attr)
Format and validate a phone number.
2.441501
2.405349
1.01503
if value: value = self._format_phone_number(value, attr) return super(PhoneNumberField, self)._deserialize(value, attr, data)
def _deserialize(self, value, attr, data)
Format and validate the phone number using libphonenumber.
4.319732
3.276127
1.318548
value = super(PhoneNumberField, self)._serialize(value, attr, obj) if value: value = self._format_phone_number(value, attr) return value
def _serialize(self, value, attr, obj)
Format and validate the phone number user libphonenumber.
3.717999
3.173973
1.171402
if isinstance(obj, decimal.Decimal): obj = format(obj, 'f') str_digit = text_type(obj) return (str_digit.rstrip('0').rstrip('.') if '.' in str_digit else str_digit) elif isinstance(obj, phonenumbers.PhoneNumber): return phonenumbers.format_number( obj, phonenumbers.PhoneNumberFormat.E164 ) elif isinstance(obj, pendulum.Pendulum): return text_type(obj) elif isinstance(obj, arrow.Arrow): return text_type(obj) elif isinstance(obj, (datetime.datetime, datetime.date)): return obj.isoformat() try: return list(iter(obj)) except TypeError: pass return super(FleakerJSONEncoder, self).default(obj)
def default(self, obj)
Encode individual objects into their JSON representation. This method is used by :class:`flask.json.JSONEncoder` to encode individual items in the JSON object. Args: obj (object): Any Python object we wish to convert to JSON. Returns: str: The stringified, valid JSON representation of our provided object.
2.889798
2.936093
0.984232
if isinstance(value, arrow.arrow.Arrow): value = value.datetime return super(ArrowField, self)._serialize(value, attr, obj)
def _serialize(self, value, attr, obj)
Convert the Arrow object into a string.
3.928347
3.897675
1.007869
if not self.context.get('convert_dates', True) or not value: return value value = super(ArrowField, self)._deserialize(value, attr, data) timezone = self.get_field_value('timezone') target = arrow.get(value) if timezone and text_type(target.to(timezone)) != text_type(target): raise ValidationError( "The provided datetime is not in the " "{} timezone.".format(timezone) ) return target
def _deserialize(self, value, attr, data)
Deserializes a string into an Arrow object.
4.240543
4.096843
1.035076
value = super(PendulumDateTimeField, self).python_value(value) if isinstance(value, datetime.datetime): value = pendulum.instance(value) elif isinstance(value, datetime.date): value = pendulum.instance( datetime.datetime.combine( value, datetime.datetime.min.time() ) ) elif isinstance(value, string_types): value = pendulum.parse(value) return value
def python_value(self, value)
Return the value in the database as an Pendulum object. Returns: pendulum.Pendulum: An instance of Pendulum with the field filled in.
2.189345
2.251736
0.972292
if isinstance(value, pendulum.Pendulum): value = datetime.datetime( value.year, value.month, value.day, value.hour, value.minute, value.second, value.microsecond, value.tzinfo ) return super(PendulumDateTimeField, self).db_value(value)
def db_value(self, value)
Convert the Pendulum instance to a datetime for saving in the db.
2.361167
1.81275
1.302533
edit = edits_service.insert(body={}, packageName=package_name).execute() response = edits_service.tracks().get(editId=edit['id'], track='production', packageName=package_name).execute() releases = response['releases'] for release in releases: if release['status'] == 'inProgress': url = 'https://archive.mozilla.org/pub/mobile/releases/{}/SHA512SUMS'.format(release['name']) resp = requests.head(url) if resp.status_code != 200: if resp.status_code != 404: # 404 is expected for release candidates logger.warning("Could not check %s: %s", url, resp.status_code) continue age = time.time() - calendar.timegm(eu.parsedate(resp.headers['Last-Modified'])) if age >= days * DAY: yield release, age
def check_rollout(edits_service, package_name, days)
Check if package_name has a release on staged rollout for too long
4.024228
3.961437
1.015851
schema = cls._get_schema(schema) # Generate the JSON Schema return cls(context=context).dump(schema).data
def generate_json_schema(cls, schema, context=DEFAULT_DICT)
Generate a JSON Schema from a Marshmallow schema. Args: schema (marshmallow.Schema|str): The Marshmallow schema, or the Python path to one, to create the JSON schema for. Keyword Args: file_pointer (file, optional): The path or pointer to the file to write this schema to. If not provided, the schema will be dumped to ``sys.stdout``. Returns: dict: The JSON schema in dictionary form.
8.309137
11.48087
0.723738
schema = cls._get_schema(schema) json_schema = cls.generate_json_schema(schema, context=context) if folder: schema_filename = getattr( schema.Meta, 'json_schema_filename', '.'.join([schema.__class__.__name__, 'json']) ) json_path = os.path.join(folder, schema_filename) file_pointer = open(json_path, 'w') json.dump(json_schema, file_pointer, indent=2) return json_schema
def write_schema_to_file(cls, schema, file_pointer=stdout, folder=MISSING, context=DEFAULT_DICT)
Given a Marshmallow schema, create a JSON Schema for it. Args: schema (marshmallow.Schema|str): The Marshmallow schema, or the Python path to one, to create the JSON schema for. Keyword Args: file_pointer (file, optional): The pointer to the file to write this schema to. If not provided, the schema will be dumped to ``sys.stdout``. folder (str, optional): The folder in which to save the JSON schema. The name of the schema file can be optionally controlled my the schema's ``Meta.json_schema_filename``. If that attribute is not set, the class's name will be used for the filename. If writing the schema to a specific file is desired, please pass in a ``file_pointer``. context (dict, optional): The Marshmallow context to be pushed to the schema generates the JSONSchema. Returns: dict: The JSON schema in dictionary form.
2.677068
2.53038
1.05797
if isinstance(schema, string_types): schema = cls._get_object_from_python_path(schema) if isclass(schema): schema = schema() if not isinstance(schema, Schema): raise TypeError("The schema must be a path to a Marshmallow " "schema or a Marshmallow schema.") return schema
def _get_schema(cls, schema)
Method that will fetch a Marshmallow schema flexibly. Args: schema (marshmallow.Schema|str): Either the schema class, an instance of a schema, or a Python path to a schema. Returns: marshmallow.Schema: The desired schema. Raises: TypeError: This is raised if the provided object isn't a Marshmallow schema.
3.976321
3.436062
1.157232
# Dissect the path python_path = python_path.split('.') module_path = python_path[:-1] object_class = python_path[-1] if isinstance(module_path, list): module_path = '.'.join(module_path) # Grab the object module = import_module(module_path) schema = getattr(module, object_class) if isclass(schema): schema = schema() return schema
def _get_object_from_python_path(python_path)
Method that will fetch a Marshmallow schema from a path to it. Args: python_path (str): The string path to the Marshmallow schema. Returns: marshmallow.Schema: The schema matching the provided path. Raises: TypeError: This is raised if the specified object isn't a Marshmallow schema.
3.122566
3.1991
0.976076
super(MarshmallowAwareApp, cls).post_create_app(app, **settings) marsh.init_app(app) return app
def post_create_app(cls, app, **settings)
Automatically register and init the Flask Marshmallow extension. Args: app (flask.Flask): The application instance in which to initialize Flask Marshmallow upon. Kwargs: settings (dict): The settings passed to this method from the parent app. Returns: flask.Flask: The Flask application that was passed in.
6.19774
6.19967
0.999689
if not instance._meta.event_ready or created: return instance.get_original()
def get_original_before_save(sender, instance, created)
Event listener to get the original instance before it's saved.
16.206806
12.937128
1.252736
if not instance._meta.event_ready: return if created: instance.create_creation_event() else: instance.create_update_event() # Reset the original key instance._original = None
def post_save_event_listener(sender, instance, created)
Event listener to create creation and update events.
6.653052
5.633306
1.181021
if event.code not in sender.event_codes(): raise ValueError("The Event.code '{}' is not a valid Event " "code.".format(event.code))
def validate_event_type(sender, event, created)
Verify that the Event's code is a valid one.
6.747631
5.054575
1.334955
pk_value = self._get_pk_value() if isinstance(pk_value, int) and not self._original: self._original = ( self.select().where(self.__class__.id == pk_value).get() ) return self._original
def get_original(self)
Get the original instance of this instance before it's updated. Returns: fleaker.peewee.EventMixin: The original instance of the model.
4.51004
4.093209
1.101835
event = self.create_audit_event(code='AUDIT_CREATE') if self._meta.create_message: event.body = self._meta.create_message['message'] event.code = self._meta.create_message['code'] event.meta = self.parse_meta(self._meta.create_message['meta']) self.create_event_callback(event) event.save() return event
def create_creation_event(self)
Parse the create message DSL to insert the data into the Event. Returns: fleaker.peewee.EventStorageMixin: A new Event instance with data put in it
4.377306
4.631412
0.945134
events = [] for fields, rules in iteritems(self._meta.update_messages): if not isinstance(fields, (list, tuple, set)): fields = (fields,) changed = any([ getattr(self, field) != getattr(self.get_original(), field) for field in fields ]) if changed: event = self.create_audit_event(code=rules['code']) event.body = rules['message'] event.meta = self.parse_meta(rules['meta']) events.append(event) self.update_event_callback(events) with db.database.atomic(): for event in events: event.save() return events
def create_update_event(self)
Parse the update messages DSL to insert the data into the Event. Returns: list[fleaker.peewee.EventStorageMixin]: All the events that were created for the update.
4.248996
4.231498
1.004135
event = self.create_audit_event(code='AUDIT_DELETE') if self._meta.delete_message: event.code = self._meta.delete_message['code'] event.body = self._meta.delete_message['message'] event.meta = self.parse_meta(self._meta.delete_message['meta']) self.delete_event_callback(event) event.save() return event
def create_deletion_event(self)
Parse the delete message DSL to insert data into the Event. Return: Event: The Event with the relevant information put in it.
4.573052
4.71726
0.96943
res = {} for key, val in meta.items(): if not val: continue elif isinstance(val, dict): res[key] = self.parse_meta(val) elif val.startswith('current_user.'): res[key] = self.get_path_attribute(current_user, val) elif val.startswith('original.'): res[key] = self.get_path_attribute(self.get_original(), val) else: res[key] = self.get_path_attribute(self, val) return res
def parse_meta(self, meta)
Parses the meta field in the message, copies it's keys into a new dict and replaces the values, which should be attribute paths relative to the passed in object, with the current value at the end of that path. This function will run recursively when it encounters other dicts inside the meta dict. Args: meta (dict): The dictionary of mappings to pull structure of the meta from. Returns: dict: A copy of the keys from the meta dict with the values pulled from the paths.
2.52021
2.544248
0.990552
# Strip out ignored keys passed in path = path.replace('original.', '').replace('current_user.', '') attr_parts = path.split('.') res = obj try: for part in attr_parts: try: res = getattr(res, part) except AttributeError: res = getattr(res.get(), part) except (peewee.DoesNotExist, AttributeError): return None return res
def get_path_attribute(obj, path)
Given a path like `related_record.related_record2.id`, this method will be able to pull the value of ID from that object, returning None if it doesn't exist. Args: obj (fleaker.db.Model): The object to attempt to pull the value from path (str): The path to follow to pull the value from Returns: (int|str|None): The value at the end of the path. None if it doesn't exist at any point in the path.
5.344165
5.60202
0.953971
event_keys = set(event._meta.fields.keys()) obj_keys = self._meta.fields.keys() matching_keys = event_keys.intersection(obj_keys) for key in matching_keys: # Skip created_by because that will always be the current_user # for the Event. if key == 'created_by': continue # Skip anything that isn't a FK if not isinstance(self._meta.fields[key], peewee.ForeignKeyField): continue setattr(event, key, getattr(self, key)) # Attempt to set the obj's ID in the correct FK field on Event, if it # exists. If this conflicts with desired behavior, handle this in the # respective callback. This does rely on the FK matching the lower case # version of the class name and that the event isn't trying to delete # the current record, becuase that ends badly. possible_key = self.__class__.__name__.lower() if possible_key in event_keys and event.code != 'AUDIT_DELETE': setattr(event, possible_key, self)
def copy_foreign_keys(self, event)
Copies possible foreign key values from the object into the Event, skipping common keys like modified and created. Args: event (Event): The Event instance to copy the FKs into obj (fleaker.db.Model): The object to pull the values from
6.422025
6.260762
1.025758
event = self._meta.event_model( code=code, model=self.__class__.__name__, ) # Use the logged in User, if possible if current_user: event.created_by = current_user.get_id() self.copy_foreign_keys(event) self.populate_audit_fields(event) return event
def create_audit_event(self, code='AUDIT')
Creates a generic auditing Event logging the changes between saves and the initial data in creates. Kwargs: code (str): The code to set the new Event to. Returns: Event: A new event with relevant info inserted into it
4.843187
6.007983
0.806125
event.updated = self._data event.original = self.get_original()._data
def populate_audit_fields(self, event)
Populates the the audit JSON fields with raw data from the model, so all changes can be tracked and diffed. Args: event (Event): The Event instance to attach the data to instance (fleaker.db.Model): The newly created/updated model
20.045429
22.650028
0.885007
return render_template_string( self.body, event=self, meta=self.meta, original=self.original, updated=self.updated, version=self.version, )
def formatted_message(self)
Method that will return the formatted message for the event. This formatting is done with Jinja and the template text is stored in the ``body`` attribute. The template is supplied the following variables, as well as the built in Flask ones: - ``event``: This is the event instance that this method belongs to. - ``meta``: This is a dictionary of cached values that have been stored when the event was created based upon the event's DSL. - ``original``: This is a dump of the instance before the instance was updated. - ``updated``: This is a dump of the instance after it was updated. - ``version``: This is the version of the event DSL. This property is cached because Jinja rendering is slower than raw Python string formatting.
5.473231
2.274683
2.406151
if not self._cached_time: self._cached_time = self._meta.datetime.utcnow() return self._cached_time
def _get_cached_time(self)
Method that will allow for consistent modified and archived timestamps. Returns: self.Meta.datetime: This method will return a datetime that is compatible with the current class's datetime library.
5.113789
3.673822
1.391953
return utils.load_json_url(_ALL_LOCALES_URL.format(product=product, channel=channel))
def _get_list_of_completed_locales(product, channel)
Get all the translated locales supported by Google play So, locale unsupported by Google play won't be downloaded Idem for not translated locale
7.511588
6.70364
1.120524
meta_value = self.metadata.get(key) context_value = self.context.get(key) if context_value is not None: return context_value elif meta_value is not None: return meta_value return default
def get_field_value(self, key, default=MISSING)
Method to fetch a value from either the fields metadata or the schemas context, in that order. Args: key (str): The name of the key to grab the value for. Keyword Args: default (object, optional): If the value doesn't exist in the schema's ``context`` or the field's ``metadata``, this value will be returned. By default this will be ``MISSING``. Returns: object: This will be the correct value to use given the parameters.
2.914965
2.58848
1.12613
query = cls.base_query().where(cls.id == record_id) if execute: return query.get() return query
def get_by_id(cls, record_id, execute=True)
Return a single instance of the model queried by ID. Args: record_id (int): Integer representation of the ID to query on. Keyword Args: execute (bool, optional): Should this method execute the query or return a query object for further manipulation? Returns: cls | :py:class:`peewee.SelectQuery`: If ``execute`` is ``True``, the query is executed, otherwise a query is returned. Raises: :py:class:`peewee.DoesNotExist`: Raised if a record with that ID doesn't exist.
3.869024
6.465184
0.59844
for key, val in iteritems(data): if not hasattr(self, key): raise AttributeError( "No field named {key} for model {model}".format( key=key, model=self.__class__.__name__ ) ) setattr(self, key, val) self.save() return self
def update_instance(self, data)
Update a single record by id with the provided data. Args: data (dict): The new data to update the record with. Returns: self: This is an instance of itself with the updated data. Raises: AttributeError: This is raised if a key in the ``data`` isn't a field on the model.
2.651794
2.814338
0.942244
return "" if not self.is_enabled else (.format(SITE_KEY=self.site_key, THEME=self.theme, TYPE=self.type, SIZE=self.size, TABINDEX=self.tabindex))
def get_code(self)
Returns the new ReCaptcha code :return:
9.50575
6.904636
1.37672
from subprocess import Popen, STDOUT info = self.getinfo(name) if not restart and not info.isrunning(): restart = True if restart: if info.pid is not None: info.terminate() controldir = info.controldir.ensure(dir=1) #controldir.remove() preparefunc = CompatStarter.wrap(preparefunc) starter = preparefunc(controldir, self) args = [str(x) for x in starter.args] self.log.debug("%s$ %s", controldir, " ".join(args)) stdout = open(str(info.logpath), "wb", 0) kwargs = {'env': starter.env} if sys.platform == "win32": kwargs["startupinfo"] = sinfo = std.subprocess.STARTUPINFO() if sys.version_info >= (2,7): sinfo.dwFlags |= std.subprocess.STARTF_USESHOWWINDOW sinfo.wShowWindow |= std.subprocess.SW_HIDE else: kwargs["close_fds"] = True kwargs["preexec_fn"] = os.setpgrp # no CONTROL-C popen = Popen(args, cwd=str(controldir), stdout=stdout, stderr=STDOUT, **kwargs) info.pid = pid = popen.pid info.pidpath.write(str(pid)) self.log.debug("process %r started pid=%s", name, pid) stdout.close() f = info.logpath.open() if not restart: f.seek(0, 2) else: if not starter.wait(f): raise RuntimeError("Could not start process %s" % name) self.log.debug("%s process startup detected", name) logfiles = self.config.__dict__.setdefault("_extlogfiles", {}) logfiles[name] = f self.getinfo(name) return info.pid, info.logpath
def ensure(self, name, preparefunc, restart=False)
returns (PID, logfile) from a newly started or already running process. @param name: name of the external process, used for caching info across test runs. @param preparefunc: A subclass of ProcessStarter. @param restart: force restarting the process if it is running. @return: (PID, logfile) logfile will be seeked to the end if the server was running, otherwise seeked to the line after where the waitpattern matched.
3.579314
3.56816
1.003126
"Wait until the process is ready." lines = map(self.log_line, self.filter_lines(self.get_lines(log_file))) return any( std.re.search(self.pattern, line) for line in lines )
def wait(self, log_file)
Wait until the process is ready.
7.013587
6.024477
1.164182
self.pattern = wait self.env = env self.args = args # wait is a function, supersedes the default behavior if callable(wait): self.wait = lambda lines: wait()
def prep(self, wait, args, env=None)
Given the return value of a preparefunc, prepare this CompatStarter.
9.864146
9.567836
1.030969
if isinstance(starter_cls, type) and issubclass(starter_cls, ProcessStarter): return starter_cls depr_msg = 'Pass a ProcessStarter for preparefunc' warnings.warn(depr_msg, DeprecationWarning, stacklevel=3) return functools.partial(CompatStarter, starter_cls)
def wrap(self, starter_cls)
If starter_cls is not a ProcessStarter, assume it's the legacy preparefunc and return it bound to a CompatStarter.
5.45576
3.275433
1.665661
return self._connection.post('monitoring/start_date', OrderedDict(( ('StartDay', start_day), ('DataLimit', data_limit), ('MonthThreshold', month_threshold), ('SetMonthData', 1) )))
def set_start_date(self, start_day: int, data_limit: str, month_threshold: int)
Sets network usage alarm for LTE :param start_day: number of day when monitoring starts :param data_limit: Maximal data limit as string eg.: 1000MB or 1GB and so on :param month_threshold: Alarm threshold in % as int number eg.: 90 :return: dict
6.006033
5.011262
1.198507
return self._request(requests.get, *args, **kwargs)
def _get(self, *args, **kwargs)
Make a GET request.
6.053679
3.465037
1.747075
data = self._default_data() data.update(kwargs.get('data') or {}) kwargs['data'] = data return self._request(requests.post, *args, **kwargs)
def _post(self, *args, **kwargs)
Make a POST request.
3.534223
2.807323
1.25893
if not base and not quote: return url else: url = url + base.lower() + quote.lower() + "/" return url
def _construct_url(self, url, base, quote)
Adds the orderbook to the url if base and quote are specified.
5.429379
3.296819
1.646853
return_json = kwargs.pop('return_json', False) url = self.api_url[version] + url response = func(url, *args, **kwargs) if 'proxies' not in kwargs: kwargs['proxies'] = self.proxydict # Check for error, raising an exception if appropriate. response.raise_for_status() try: json_response = response.json() except ValueError: json_response = None if isinstance(json_response, dict): error = json_response.get('error') if error: raise BitstampError(error) elif json_response.get('status') == "error": raise BitstampError(json_response.get('reason')) if return_json: if json_response is None: raise BitstampError( "Could not decode json for: " + response.text) return json_response return response
def _request(self, func, url, version=1, *args, **kwargs)
Make a generic request, adding in any proxy defined by the instance. Raises a ``requests.HTTPError`` if the response status isn't 200, and raises a :class:`BitstampError` if the response contains a json encoded error message.
2.767287
2.535127
1.091577
url = self._construct_url("ticker/", base, quote) return self._get(url, return_json=True, version=2)
def ticker(self, base="btc", quote="usd")
Returns dictionary.
5.93607
5.524869
1.074427
params = {'group': group} url = self._construct_url("order_book/", base, quote) return self._get(url, params=params, return_json=True, version=2)
def order_book(self, group=True, base="btc", quote="usd")
Returns dictionary with "bids" and "asks". Each is a list of open orders and each order is represented as a list of price and amount.
4.512556
4.924284
0.916388
params = {'time': time} url = self._construct_url("transactions/", base, quote) return self._get(url, params=params, return_json=True, version=2)
def transactions(self, time=TransRange.HOUR, base="btc", quote="usd")
Returns transactions for the last 'timedelta' seconds. Parameter time is specified by one of two values of TransRange class.
5.036873
4.818932
1.045226
nonce = getattr(self, '_nonce', 0) if nonce: nonce += 1 # If the unix time is greater though, use that instead (helps low # concurrency multi-threaded apps always call with the largest nonce). self._nonce = max(int(time.time()), nonce) return self._nonce
def get_nonce(self)
Get a unique nonce for the bitstamp API. This integer must always be increasing, so use the current unix time. Every time this variable is requested, it automatically increments to allow for more than one API request per second. This isn't a thread-safe function however, so you should only rely on a single thread if you have a high level of concurrent API requests in your application.
9.470716
9.023917
1.049513
data = super(Trading, self)._default_data(*args, **kwargs) data['key'] = self.key nonce = self.get_nonce() msg = str(nonce) + self.username + self.key signature = hmac.new( self.secret.encode('utf-8'), msg=msg.encode('utf-8'), digestmod=hashlib.sha256).hexdigest().upper() data['signature'] = signature data['nonce'] = nonce return data
def _default_data(self, *args, **kwargs)
Generate a one-time signature and other data required to send a secure POST request to the Bitstamp API.
2.608825
2.284977
1.141729
url = self._construct_url("balance/", base, quote) return self._post(url, return_json=True, version=2)
def account_balance(self, base="btc", quote="usd")
Returns dictionary:: {u'btc_reserved': u'0', u'fee': u'0.5000', u'btc_available': u'2.30856098', u'usd_reserved': u'0', u'btc_balance': u'2.30856098', u'usd_balance': u'114.64', u'usd_available': u'114.64', ---If base and quote were specified: u'fee': u'', ---If base and quote were not specified: u'btcusd_fee': u'0.25', u'btceur_fee': u'0.25', u'eurusd_fee': u'0.20', } There could be reasons to set base and quote to None (or False), because the result then will contain the fees for all currency pairs For backwards compatibility this can not be the default however.
6.561864
7.595578
0.863906
data = { 'offset': offset, 'limit': limit, 'sort': 'desc' if descending else 'asc', } url = self._construct_url("user_transactions/", base, quote) return self._post(url, data=data, return_json=True, version=2)
def user_transactions(self, offset=0, limit=100, descending=True, base=None, quote=None)
Returns descending list of transactions. Every transaction (dictionary) contains:: {u'usd': u'-39.25', u'datetime': u'2013-03-26 18:49:13', u'fee': u'0.20', u'btc': u'0.50000000', u'type': 2, u'id': 213642} Instead of the keys btc and usd, it can contain other currency codes
3.127712
4.396718
0.711374
data = {'id': order_id} return self._post("order_status/", data=data, return_json=True, version=1)
def order_status(self, order_id)
Returns dictionary. - status: 'Finished' or 'In Queue' or 'Open' - transactions: list of transactions Each transaction is a dictionary with the following keys: btc, usd, price, type, fee, datetime, tid or btc, eur, .... or eur, usd, ....
6.736786
8.196759
0.821884
data = {'id': order_id} return self._post("cancel_order/", data=data, return_json=True, version=version)
def cancel_order(self, order_id, version=1)
Cancel the order specified by order_id. Version 1 (default for backwards compatibility reasons): Returns True if order was successfully canceled, otherwise raise a BitstampError. Version 2: Returns dictionary of the canceled order, containing the keys: id, type, price, amount
5.102269
7.280703
0.700793
data = {'amount': amount, 'price': price} if limit_price is not None: data['limit_price'] = limit_price url = self._construct_url("buy/", base, quote) return self._post(url, data=data, return_json=True, version=2)
def buy_limit_order(self, amount, price, base="btc", quote="usd", limit_price=None)
Order to buy amount of bitcoins for specified price.
2.782329
2.736427
1.016775
data = {'amount': amount} url = self._construct_url("buy/market/", base, quote) return self._post(url, data=data, return_json=True, version=2)
def buy_market_order(self, amount, base="btc", quote="usd")
Order to buy amount of bitcoins for market price.
4.55176
4.323431
1.052812
data = {'code': code} return self._post("check_code/", data=data, return_json=True, version=1)
def check_bitstamp_code(self, code)
Returns JSON dictionary containing USD and BTC amount included in given bitstamp code.
8.577513
7.617772
1.125987
data = {'code': code} return self._post("redeem_code/", data=data, return_json=True, version=1)
def redeem_bitstamp_code(self, code)
Returns JSON dictionary containing USD and BTC amount added to user's account.
6.196199
6.092347
1.017046
data = {'timedelta': timedelta} return self._post("withdrawal_requests/", return_json=True, version=1, data=data)
def withdrawal_requests(self, timedelta = 86400)
Returns list of withdrawal requests. Each request is represented as a dictionary. By default, the last 24 hours (86400 seconds) are returned.
7.595359
10.771719
0.70512
data = {'amount': amount, 'address': address} return self._post("ltc_withdrawal/", data=data, return_json=True, version=2)
def litecoin_withdrawal(self, amount, address)
Send litecoins to another litecoin wallet specified by address.
4.781276
4.970049
0.962018