code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
if not is_flag_set(flag) or '.' not in flag: return None parts = flag.split('.') if parts[0] == 'endpoint': return cls.from_name(parts[1]) else: # some older handlers might not use the 'endpoint' prefix return cls.from_name(parts[0])
def from_flag(cls, flag)
Return an Endpoint subclass instance based on the given flag. The instance that is returned depends on the endpoint name embedded in the flag. Flags should be of the form ``endpoint.{name}.extra...``, though for legacy purposes, the ``endpoint.`` prefix can be omitted. The ``{name}}`` portion will be passed to :meth:`~charms.reactive.endpoints.Endpoint.from_name`. If the flag is not set, an appropriate Endpoint subclass cannot be found, or the flag name can't be parsed, ``None`` will be returned.
4.60495
3.346782
1.375934
for endpoint_name in sorted(hookenv.relation_types()): # populate context based on attached relations relf = relation_factory(endpoint_name) if not relf or not issubclass(relf, cls): continue rids = sorted(hookenv.relation_ids(endpoint_name)) # ensure that relation IDs have the endpoint name prefix, in case # juju decides to drop it at some point rids = ['{}:{}'.format(endpoint_name, rid) if ':' not in rid else rid for rid in rids] endpoint = relf(endpoint_name, rids) cls._endpoints[endpoint_name] = endpoint endpoint.register_triggers() endpoint._manage_departed() endpoint._manage_flags() for relation in endpoint.relations: hookenv.atexit(relation._flush_data)
def _startup(cls)
Create Endpoint instances and manage automatic flags.
5.785892
5.375533
1.076338
if '{endpoint_name}' not in flag: flag = 'endpoint.{endpoint_name}.' + flag # use replace rather than format to prevent any other braces or braced # strings from being touched return flag.replace('{endpoint_name}', self.endpoint_name)
def expand_name(self, flag)
Complete a flag for this endpoint by expanding the endpoint name. If the flag does not already contain ``{endpoint_name}``, it will be prefixed with ``endpoint.{endpoint_name}.``. Then, any occurance of ``{endpoint_name}`` will be replaced with ``self.endpoint_name``.
8.779333
5.786479
1.517215
already_joined = is_flag_set(self.expand_name('joined')) hook_name = hookenv.hook_name() rel_hook = hook_name.startswith(self.endpoint_name + '-relation-') departed_hook = rel_hook and hook_name.endswith('-departed') toggle_flag(self.expand_name('joined'), self.is_joined) if departed_hook: set_flag(self.expand_name('departed')) elif self.is_joined: clear_flag(self.expand_name('departed')) if already_joined and not rel_hook: # skip checking relation data outside hooks for this relation # to save on API calls to the controller (unless we didn't have # the joined flag before, since then we might migrating to Endpoints) return for unit in self.all_units: for key, value in unit.received.items(): data_key = 'endpoint.{}.{}.{}.{}'.format(self.endpoint_name, unit.relation.relation_id, unit.unit_name, key) if data_changed(data_key, value): set_flag(self.expand_name('changed')) set_flag(self.expand_name('changed.{}'.format(key)))
def _manage_flags(self)
Manage automatic relation flags.
4.678569
4.531033
1.032561
if self._all_joined_units is None: units = chain.from_iterable(rel.units for rel in self.relations) self._all_joined_units = CombinedUnitsView(units) return self._all_joined_units
def all_joined_units(self)
A list view of all the units of all relations attached to this :class:`~charms.reactive.endpoints.Endpoint`. This is actually a :class:`~charms.reactive.endpoints.CombinedUnitsView`, so the units will be in order by relation ID and then unit name, and you can access a merged view of all the units' data as a single mapping. You should be very careful when using the merged data collections, however, and consider carefully what will happen when the endpoint has multiple relations and multiple remote units on each. It is probably better to iterate over each unit and handle its data individually. See :class:`~charms.reactive.endpoints.CombinedUnitsView` for an explanation of how the merged data collections work. Note that, because a given application might be related multiple times on a given endpoint, units may show up in this collection more than once.
3.595505
2.87501
1.250606
if self._all_departed_units is None: self._all_departed_units = CachedKeyList.load( 'reactive.endpoints.departed.{}'.format(self.endpoint_name), RelatedUnit._deserialize, 'unit_name') return self._all_departed_units
def all_departed_units(self)
Collection of all units that were previously part of any relation on this endpoint but which have since departed. This collection is persistent and mutable. The departed units will be kept until they are explicitly removed, to allow for reasonable cleanup of units that have left. Example: You need to run a command each time a unit departs the relation. .. code-block:: python @when('endpoint.{endpoint_name}.departed') def handle_departed_unit(self): for name, unit in self.all_departed_units.items(): # run the command to remove `unit` from the cluster # .. self.all_departed_units.clear() clear_flag(self.expand_name('departed')) Once a unit is departed, it will no longer show up in :attr:`all_joined_units`. Note that units are considered departed as soon as the departed hook is entered, which differs slightly from how the Juju primitives behave (departing units are still returned from ``related-units`` until after the departed hook is complete). This collection is a :class:`KeyList`, so can be used as a mapping to look up units by their unit name, or iterated or accessed by index.
7.587205
5.260621
1.442264
if self._application_name is None and self.units: self._application_name = self.units[0].unit_name.split('/')[0] return self._application_name
def application_name(self)
The name of the remote application for this relation, or ``None``. This is equivalent to:: relation.units[0].unit_name.split('/')[0]
3.691688
2.761628
1.33678
if self._units is None: self._units = CombinedUnitsView([ RelatedUnit(self, unit_name) for unit_name in sorted(hookenv.related_units(self.relation_id)) ]) return self._units
def joined_units(self)
A list view of all the units joined on this relation. This is actually a :class:`~charms.reactive.endpoints.CombinedUnitsView`, so the units will be in order by unit name, and you can access a merged view of all of the units' data with ``self.units.received`` and ``self.units.received``. You should be very careful when using the merged data collections, however, and consider carefully what will happen when there are multiple remote units. It is probabaly better to iterate over each unit and handle its data individually. See :class:`~charms.reactive.endpoints.CombinedUnitsView` for an explanation of how the merged data collections work. The view can be iterated and indexed as a list, or you can look up units by their unit name. For example:: by_index = relation.units[0] by_name = relation.units['unit/0'] assert by_index is by_name assert all(unit is relation.units[unit.unit_name] for unit in relation.units) print(', '.join(relation.units.keys()))
6.063197
4.712393
1.286649
if self._data is None: self._data = JSONUnitDataView( hookenv.relation_get(unit=hookenv.local_unit(), rid=self.relation_id), writeable=True) return self._data
def to_publish(self)
This is the relation data that the local unit publishes so it is visible to all related units. Use this to communicate with related units. It is a writeable :class:`~charms.reactive.endpoints.JSONUnitDataView`. All values stored in this collection will be automatically JSON encoded when they are published. This means that they need to be JSON serializable! Mappings stored in this collection will be encoded with sorted keys, to ensure that the encoded representation will only change if the actual data changes. Changes to this data are published at the end of a succesfull hook. The data is reset when a hook fails.
7.360253
3.759358
1.957849
if self._data and self._data.modified: hookenv.relation_set(self.relation_id, dict(self.to_publish.data))
def _flush_data(self)
If this relation's local unit data has been modified, publish it on the relation. This should be automatically called.
9.675928
5.656691
1.710528
if self._data is None: self._data = JSONUnitDataView(hookenv.relation_get( unit=self.unit_name, rid=self.relation.relation_id)) return self._data
def received(self)
A :class:`~charms.reactive.endpoints.JSONUnitDataView` of the data received from this remote unit over the relation, with values being automatically decoded as JSON.
8.069991
3.769447
2.140895
items = unitdata.kv().get(cache_key) or [] return cls(cache_key, [deserializer(item) for item in items], key_attr)
def load(cls, cache_key, deserializer, key_attr)
Load the persisted cache and return a new instance of this class.
4.824339
4.434745
1.08785
if not hasattr(self, '_data'): # NB: units are reversed so that lowest numbered unit takes precedence self._data = JSONUnitDataView({key: value for unit in reversed(self) for key, value in unit.received_raw.items()}) return self._data
def received(self)
Combined :class:`~charms.reactive.endpoints.JSONUnitDataView` of the data of all units in this list, with automatic JSON decoding.
13.369301
5.85908
2.281809
http_transport = transport.HttpTransport(domain, build_headers(auth, user_agent)) return client.Client(http_transport)
def AuthorizingClient(domain, auth, user_agent=None)
Creates a Podio client using an auth object.
5.449992
5.897699
0.924088
(w, h) = image.size # Thermal paper is 512 pixels wide if w > 512: ratio = 512. / w h = int(h * ratio) image = image.resize((512, h), Image.ANTIALIAS) if image.mode != '1': image = image.convert('1') pixels = np.array(list(image.getdata())).reshape(h, w) # Add white pixels so that image fits into bytes extra_rows = int(math.ceil(h / 24)) * 24 - h extra_pixels = np.ones((extra_rows, w), dtype=bool) pixels = np.vstack((pixels, extra_pixels)) h += extra_rows nb_stripes = h / 24 pixels = pixels.reshape(nb_stripes, 24, w).swapaxes(1, 2).reshape(-1, 8) nh = int(w / 256) nl = w % 256 data = [] pixels = np.invert(np.packbits(pixels)) stripes = np.split(pixels, nb_stripes) for stripe in stripes: data.extend([ ESC, 42, # * 33, # double density mode nl, nh]) data.extend(stripe) data.extend([ 27, # ESC 74, # J 48]) # account for double density mode height = h * 2 return cls(data, height)
def from_image(cls, image)
Create a PrintableImage from a PIL Image :param image: a PIL Image :return:
4.087038
4.079084
1.00195
self.data.extend(other.data) self.height = self.height + other.height return self
def append(self, other)
Append a Printable Image at the end of the current instance. :param other: another PrintableImage :return: PrintableImage containing data from both self and other
4.133202
3.996556
1.034191
@wraps(func) def wrapper(self, *args, **kwargs): byte_array = func(self, *args, **kwargs) self.write_bytes(byte_array) return wrapper
def write_this(func)
Decorator that writes the bytes to the wire
2.557307
2.108843
1.212659
printable_image = reduce(lambda x, y: x.append(y), list(printable_images)) self.print_image(printable_image)
def print_images(self, *printable_images)
This method allows printing several images in one shot. This is useful if the client code does not want the printer to make pause during printing
3.537251
3.763838
0.939799
if data is None: return None if isinstance(data, unicode): data = data.encode("utf-8") return urllib.quote_plus(data)
def encode_and_quote(data)
If ``data`` is unicode, return urllib.quote_plus(data.encode("utf-8")) otherwise return urllib.quote_plus(data)
2.798136
2.111281
1.325326
if s is None: return None if isinstance(s, unicode): return s.encode("utf-8") return str(s)
def _strify(s)
If s is a unicode string, encode it to UTF-8 and return the results, otherwise return str(s), or None if s is None
2.49833
2.29508
1.088559
return MultipartParam(paramname, filesize=filesize, filename=filename, filetype=filetype).encode_hdr(boundary)
def encode_file_header(boundary, paramname, filesize, filename=None, filetype=None)
Returns the leading data for a multipart/form-data field that contains file data. ``boundary`` is the boundary string used throughout a single request to separate variables. ``paramname`` is the name of the variable in this request. ``filesize`` is the size of the file data. ``filename`` if specified is the filename to give to this field. This field is only useful to the server for determining the original filename. ``filetype`` if specified is the MIME type of this file. The actual file data should be sent after this header has been sent.
7.632334
9.494196
0.803895
size = sum(p.get_size(boundary) for p in MultipartParam.from_params(params)) return size + len(boundary) + 6
def get_body_size(params, boundary)
Returns the number of bytes that the multipart/form-data encoding of ``params`` will be.
6.689108
5.708531
1.171774
headers = {} boundary = urllib.quote_plus(boundary) headers['Content-Type'] = "multipart/form-data; boundary=%s" % boundary headers['Content-Length'] = str(get_body_size(params, boundary)) return headers
def get_headers(params, boundary)
Returns a dictionary with Content-Type and Content-Length headers for the multipart/form-data encoding of ``params``.
2.435441
2.429897
1.002282
if boundary is None: boundary = gen_boundary() else: boundary = urllib.quote_plus(boundary) headers = get_headers(params, boundary) params = MultipartParam.from_params(params) return MultipartYielder(params, boundary, cb), headers
def multipart_encode(params, boundary=None, cb=None)
Encode ``params`` as multipart/form-data. ``params`` should be a sequence of (name, value) pairs or MultipartParam objects, or a mapping of names to values. Values are either strings parameter values, or file-like objects to use as the parameter value. The file-like objects must support .read() and either .fileno() or both .seek() and .tell(). If ``boundary`` is set, then it as used as the MIME boundary. Otherwise a randomly generated boundary will be used. In either case, if the boundary string appears in the parameter values a ValueError will be raised. If ``cb`` is set, it should be a callback which will get called as blocks of data are encoded. It will be called with (param, current, total), indicating the current parameter being encoded, the current amount encoded, and the total amount to encode. Returns a tuple of `datagen`, `headers`, where `datagen` is a generator that will yield blocks of data that make up the encoded parameters, and `headers` is a dictionary with the assoicated Content-Type and Content-Length headers. Examples: >>> datagen, headers = multipart_encode( [("key", "value1"), ("key", "value2")] ) >>> s = "".join(datagen) >>> assert "value2" in s and "value1" in s >>> p = MultipartParam("key", "value2") >>> datagen, headers = multipart_encode( [("key", "value1"), p] ) >>> s = "".join(datagen) >>> assert "value2" in s and "value1" in s >>> datagen, headers = multipart_encode( {"key": "value1"} ) >>> s = "".join(datagen) >>> assert "value2" not in s and "value1" in s
4.476054
5.903381
0.758219
return cls(paramname, filename=os.path.basename(filename), filetype=mimetypes.guess_type(filename)[0], filesize=os.path.getsize(filename), fileobj=open(filename, "rb"))
def from_file(cls, paramname, filename)
Returns a new MultipartParam object constructed from the local file at ``filename``. ``filesize`` is determined by os.path.getsize(``filename``) ``filetype`` is determined by mimetypes.guess_type(``filename``)[0] ``filename`` is set to os.path.basename(``filename``)
3.330622
2.375017
1.402357
if hasattr(params, 'items'): params = params.items() retval = [] for item in params: if isinstance(item, cls): retval.append(item) continue name, value = item if isinstance(value, cls): assert value.name == name retval.append(value) continue if hasattr(value, 'read'): # Looks like a file object filename = getattr(value, 'name', None) if filename is not None: filetype = mimetypes.guess_type(filename)[0] else: filetype = None retval.append(cls(name=name, filename=filename, filetype=filetype, fileobj=value)) else: retval.append(cls(name, value)) return retval
def from_params(cls, params)
Returns a list of MultipartParam objects from a sequence of name, value pairs, MultipartParam instances, or from a mapping of names to values The values may be strings or file objects, or MultipartParam objects. MultipartParam object names must match the given names in the name,value pairs or mapping, if applicable.
2.310853
2.107651
1.096411
boundary = encode_and_quote(boundary) headers = ["--%s" % boundary] if self.filename: disposition = 'form-data; name="%s"; filename="%s"' % (self.name, self.filename) else: disposition = 'form-data; name="%s"' % self.name headers.append("Content-Disposition: %s" % disposition) if self.filetype: filetype = self.filetype else: filetype = "text/plain; charset=utf-8" headers.append("Content-Type: %s" % filetype) headers.append("") headers.append("") return "\r\n".join(headers)
def encode_hdr(self, boundary)
Returns the header of the encoding of this parameter
1.993963
2.020974
0.986635
if self.value is None: value = self.fileobj.read() else: value = self.value if re.search("^--%s$" % re.escape(boundary), value, re.M): raise ValueError("boundary found in encoded string") return "%s%s\r\n" % (self.encode_hdr(boundary), value)
def encode(self, boundary)
Returns the string encoding of this parameter
3.985901
3.776459
1.05546
total = self.get_size(boundary) current = 0 if self.value is not None: block = self.encode(boundary) current += len(block) yield block if self.cb: self.cb(self, current, total) else: block = self.encode_hdr(boundary) current += len(block) yield block if self.cb: self.cb(self, current, total) last_block = "" encoded_boundary = "--%s" % encode_and_quote(boundary) boundary_exp = re.compile("^%s$" % re.escape(encoded_boundary), re.M) while True: block = self.fileobj.read(blocksize) if not block: current += 2 yield "\r\n" if self.cb: self.cb(self, current, total) break last_block += block if boundary_exp.search(last_block): raise ValueError("boundary found in file data") last_block = last_block[-len(encoded_boundary) - 2:] current += len(block) yield block if self.cb: self.cb(self, current, total)
def iter_encode(self, boundary, blocksize=4096)
Yields the encoding of this parameter If self.fileobj is set, then blocks of ``blocksize`` bytes are read and yielded.
2.748856
2.695169
1.01992
if self.filesize is not None: valuesize = self.filesize else: valuesize = len(self.value) return len(self.encode_hdr(boundary)) + 2 + valuesize
def get_size(self, boundary)
Returns the size in bytes that this param will be when encoded with the given boundary.
6.453417
4.961617
1.300668
if self.param_iter is not None: try: block = self.param_iter.next() self.current += len(block) if self.cb: self.cb(self.p, self.current, self.total) return block except StopIteration: self.p = None self.param_iter = None if self.i is None: raise StopIteration elif self.i >= len(self.params): self.param_iter = None self.p = None self.i = None block = "--%s--\r\n" % self.boundary self.current += len(block) if self.cb: self.cb(self.p, self.current, self.total) return block self.p = self.params[self.i] self.param_iter = self.p.iter_encode(self.boundary) self.i += 1 return self.next()
def next(self)
generator function to yield multipart/form-data representation of parameters
2.473699
2.278236
1.085796
options_ = {} if silent: options_['silent'] = silent if not hook: options_['hook'] = hook if options_: return '?' + urlencode(options_).lower() else: return ''
def get_options(silent=False, hook=True)
Generate a query string with the appropriate options. :param silent: If set to true, the object will not be bumped up in the stream and notifications will not be generated. :type silent: bool :param hook: True if hooks should be executed for the change, false otherwise. :type hook: bool :return: The generated query string :rtype: str
3.817655
3.711566
1.028583
if basic: return self.transport.GET(url='/item/%d/basic' % item_id) return self.transport.GET(kwargs, url='/item/%d' % item_id)
def find(self, item_id, basic=False, **kwargs)
Get item :param item_id: Item ID :param basic: ? :type item_id: int :return: Item info :rtype: dict
4.117577
4.158949
0.990052
if not isinstance(attributes, dict): raise TypeError('Must be of type dict') attributes = json.dumps(attributes) return self.transport.PUT(body=attributes, type='application/json', url='/item/%d%s' % (item_id, self.get_options(silent=silent, hook=hook)))
def update(self, item_id, attributes, silent=False, hook=True)
Updates the item using the supplied attributes. If 'silent' is true, Podio will send no notifications to subscribed users and not post updates to the stream. Important: webhooks will still be called.
4.710938
5.213529
0.903599
# if not isinstance(attributes, dict): # raise TypeError('Must be of type dict') attributes = json.dumps(attributes) return self.transport.POST(url='/task/%s' % self.get_options(silent=silent, hook=hook), body=attributes, type='application/json')
def create(self, attributes, silent=False, hook=True)
https://developers.podio.com/doc/tasks/create-task-22419 Creates the task using the supplied attributes. If 'silent' is true, Podio will send no notifications to subscribed users and not post updates to the stream. If 'hook' is false webhooks will not be called.
5.245873
5.182841
1.012162
# if not isinstance(attributes, dict): # raise TypeError('Must be of type dict') attributes = json.dumps(attributes) return self.transport.POST(body=attributes, type='application/json', url='/task/%s/%s/%s' % (ref_type, ref_id, self.get_options(silent=silent, hook=hook)))
def create_for(self, ref_type, ref_id, attributes, silent=False, hook=True)
https://developers.podio.com/doc/tasks/create-task-with-reference-22420 If 'silent' is true, Podio will send no notifications and not post updates to the stream. If 'hook' is false webhooks will not be called.
4.789159
4.645075
1.031019
resp = self.transport.GET(url='/space/url?%s' % urlencode({'url': space_url})) if id_only: return resp['space_id'] return resp
def find_by_url(self, space_url, id_only=True)
Returns a space ID given the URL of the space. :param space_url: URL of the Space :param id_only: ? :return: space_id: Space url :rtype: str
4.637131
5.131639
0.903635
if not isinstance(attributes, dict): raise TypeError('Dictionary of values expected') attributes = json.dumps(attributes) return self.transport.POST(url='/space/', body=attributes, type='application/json')
def create(self, attributes)
Create a new space :param attributes: Refer to API. Pass in argument as dictionary :type attributes: dict :return: Details of newly created space :rtype: dict
6.718139
6.950589
0.966557
return self.transport.GET(url='/stream/%s/%s' % (ref_type, ref_id))
def find_by_ref(self, ref_type, ref_id)
Returns an object of type "item", "status" or "task" as a stream object. This is useful when a new status has been posted and should be rendered directly in the stream without reloading the entire stream. For details, see: https://developers.podio.com/doc/stream/get-stream-object-80054
7.222016
5.590748
1.29178
raw_handler = lambda resp, data: data return self.transport.GET(url='/file/%d/raw' % file_id, handler=raw_handler)
def find_raw(self, file_id)
Returns raw file as string. Pass to a file object
7.785974
7.254694
1.073233
attributes = {'filename': filename, 'source': filedata} return self.transport.POST(url='/file/v2/', body=attributes, type='multipart/form-data')
def create(self, filename, filedata)
Create a file from raw data
9.905498
8.821619
1.122866
return self.transport.GET(url='/view/app/{}/{}'.format(app_id, view_specifier))
def get(self, app_id, view_specifier)
Retrieve the definition of a given view, provided the app_id and the view_id :param app_id: the app id :param view_specifier: Can be one of the following: 1. The view ID 2. The view's name 3. "last" to look up the last view used
6.5591
8.69174
0.754636
include_standard = "true" if include_standard_views is True else "false" return self.transport.GET(url='/view/app/{}/?include_standard_views={}'.format(app_id, include_standard))
def get_views(self, app_id, include_standard_views=False)
Get all of the views for the specified app :param app_id: the app containing the views :param include_standard_views: defaults to false. Set to true if you wish to include standard views.
4.244465
7.065674
0.600716
if not isinstance(attributes, dict): raise TypeError('Must be of type dict') attribute_data = json.dumps(attributes) return self.transport.PUT(url='/view/app/{}/last'.format(app_id), body=attribute_data, type='application/json')
def update_last_view(self, app_id, attributes)
Updates the last view for the active user :param app_id: the app id :param attributes: the body of the request in dictionary format
4.593884
5.244324
0.875973
if not isinstance(attributes, dict): raise TypeError('Must be of type dict') attribute_data = json.dumps(attributes) return self.transport.PUT(url='/view/{}'.format(view_id), body=attribute_data, type='application/json')
def update_view(self, view_id, attributes)
Update an existing view using the details supplied via the attributes parameter :param view_id: the view's id :param attributes: a dictionary containing the modifications to be made to the view :return:
4.173009
4.954741
0.842226
try: result = Result(*self.perform_request('HEAD', '/', params={'request_timeout': timeout})) except ConnectionError: return False except TransportError: return False if result.response.status_code == 200: return True return False
def is_active(self, timeout=2)
:param timeout: int :return: boolean
4.53291
4.584225
0.988806
if not sql: raise QueryError('No query passed to drill.') result = ResultQuery(*self.perform_request(**{ 'method': 'POST', 'url': '/query.json', 'body': { "queryType": "SQL", "query": sql }, 'params': { 'request_timeout': timeout } })) return result
def query(self, sql, timeout=10)
Submit a query and return results. :param sql: string :param timeout: int :return: pydrill.client.ResultQuery
5.227281
4.173288
1.252557
sql = 'explain plan for ' + sql return self.query(sql, timeout)
def plan(self, sql, timeout=10)
:param sql: string :param timeout: int :return: pydrill.client.ResultQuery
5.869057
6.327452
0.927555
result = Result(*self.perform_request(**{ 'method': 'GET', 'url': '/storage/{0}.json'.format(name), 'params': { 'request_timeout': timeout } })) return result
def storage_detail(self, name, timeout=10)
Get the definition of the named storage plugin. :param name: The assigned name in the storage plugin definition. :param timeout: int :return: pydrill.client.Result
4.79603
4.980785
0.962906
value = 'true' if value else 'false' result = Result(*self.perform_request(**{ 'method': 'GET', 'url': '/storage/{0}/enable/{1}'.format(name, value), 'params': { 'request_timeout': timeout } })) return result
def storage_enable(self, name, value=True, timeout=10)
Enable or disable the named storage plugin. :param name: The assigned name in the storage plugin definition. :param value: Either True (to enable) or False (to disable). :param timeout: int :return: pydrill.client.Result
4.072359
4.564155
0.892248
result = Result(*self.perform_request(**{ 'method': 'POST', 'url': '/storage/{0}.json'.format(name), 'body': config, 'params': { 'request_timeout': timeout } })) return result
def storage_update(self, name, config, timeout=10)
Create or update a storage plugin configuration. :param name: The name of the storage plugin configuration to create or update. :param config: Overwrites the existing configuration if there is any, and therefore, must include all required attributes and definitions. :param timeout: int :return: pydrill.client.Result
4.427218
4.396521
1.006982
result = Result(*self.perform_request(**{ 'method': 'DELETE', 'url': '/storage/{0}.json'.format(name), 'params': { 'request_timeout': timeout } })) return result
def storage_delete(self, name, timeout=10)
Delete a storage plugin configuration. :param name: The name of the storage plugin configuration to delete. :param timeout: int :return: pydrill.client.Result
4.781444
4.73768
1.009237
result = Result(*self.perform_request(**{ 'method': 'GET', 'url': '/profiles/{0}.json'.format(query_id), 'params': { 'request_timeout': timeout } })) return result
def profile(self, query_id, timeout=10)
Get the profile of the query that has the given queryid. :param query_id: The UUID of the query in standard UUID format that Drill assigns to each query. :param timeout: int :return: pydrill.client.Result
4.643326
5.225423
0.888603
result = Result(*self.perform_request(**{ 'method': 'GET', 'url': '/profiles/cancel/{0}'.format(query_id), 'params': { 'request_timeout': timeout } })) return result
def profile_cancel(self, query_id, timeout=10)
Cancel the query that has the given queryid. :param query_id: The UUID of the query in standard UUID format that Drill assigns to each query. :param timeout: int :return: pydrill.client.Result
4.860689
5.306886
0.915921
if body and not isinstance(body, dict): body = body.decode('utf-8') logger.info( '%s %s [status:%s request:%.3fs]', method, full_url, status_code, duration ) logger.debug('> %s', body) logger.debug('< %s', response) if tracer.isEnabledFor(logging.INFO): if self.url_prefix: path = path.replace(self.url_prefix, '', 1) tracer.info("curl -X%s 'http://localhost:8047%s' -d '%s'", method, path, self._pretty_json(body) if body else '') if tracer.isEnabledFor(logging.DEBUG): tracer.debug('#[%s] (%.3fs)\n#%s', status_code, duration, self._pretty_json(response).replace('\n', '\n#') if response else '')
def log_request_success(self, method, full_url, path, body, status_code, response, duration)
Log a successful API call.
3.412171
3.393953
1.005368
logger.warning( '%s %s [status:%s request:%.3fs]', method, full_url, status_code or 'N/A', duration, exc_info=exception is not None ) if body and not isinstance(body, dict): body = body.decode('utf-8') logger.debug('> %s', body)
def log_request_fail(self, method, full_url, body, duration, status_code=None, exception=None)
Log an unsuccessful API call.
4.016573
3.915334
1.025857
error_message = raw_data additional_info = None try: additional_info = json.loads(raw_data) error_message = additional_info.get('error', error_message) if isinstance(error_message, dict) and 'type' in error_message: error_message = error_message['type'] except: pass raise HTTP_EXCEPTIONS.get(status_code, TransportError)(status_code, error_message, additional_info)
def _raise_error(self, status_code, raw_data)
Locate appropriate exception and raise it.
2.507479
2.370185
1.057926
if body is not None: body = self.serializer.dumps(body) # some clients or environments don't support sending GET with body if method in ('HEAD', 'GET') and self.send_get_body_as != 'GET': # send it as post instead if self.send_get_body_as == 'POST': method = 'POST' # or as source parameter elif self.send_get_body_as == 'source': if params is None: params = {} params['source'] = body body = None if body is not None: try: body = body.encode('utf-8') except (UnicodeDecodeError, AttributeError): # bytes/str - no need to re-encode pass ignore = () timeout = None if params: timeout = params.pop('request_timeout', None) ignore = params.pop('ignore', ()) if isinstance(ignore, int): ignore = (ignore,) for attempt in range(self.max_retries + 1): connection = self.get_connection() try: response, data, duration = connection.perform_request(method, url, params, body, ignore=ignore, timeout=timeout) except TransportError as e: retry = False if isinstance(e, ConnectionTimeout): retry = self.retry_on_timeout elif isinstance(e, ConnectionError): retry = True elif e.status_code in self.retry_on_status: retry = True if retry: if attempt == self.max_retries: raise else: raise else: if data: data = self.deserializer.loads(data, mimetype=response.headers.get('Content-Type')) else: data = {} return response, data, duration
def perform_request(self, method, url, params=None, body=None)
Perform the actual request. Retrieve a connection. Pass all the information to it's perform_request method and return the data. :arg method: HTTP method to use :arg url: absolute url (without host) to target :arg params: dictionary of query parameters, will be handed over to the underlying :class:`~pydrill.Connection` class for serialization :arg body: body of the request, will be serializes using serializer and passed to the connection
2.772356
2.747727
1.008963
if prebump not in REL_TYPES: raise ValueError(f"{type_} not in {REL_TYPES}") prebump = REL_TYPES.index(prebump) version = _read_version() version = _bump_release(version, type_) _write_version(version) # Needs to happen before Towncrier deletes fragment files. tag_content = _render_log() ctx.run("towncrier") if commit: ctx.run(f'git commit -am "Release {version}"') tag_content = tag_content.replace('"', '\\"') ctx.run(f'git tag -a {version} -m "Version {version}\n\n{tag_content}"') ctx.run(f"python setup.py sdist bdist_wheel") dist_pattern = f'{PACKAGE_NAME.replace("-", "[-_]")}-*' artifacts = list(ROOT.joinpath("dist").glob(dist_pattern)) filename_display = "\n".join(f" {a}" for a in artifacts) print(f"[release] Will upload:\n{filename_display}") if not yes: try: input("[release] Release ready. ENTER to upload, CTRL-C to abort: ") except KeyboardInterrupt: print("\nAborted!") return arg_display = " ".join(f'"{n}"' for n in artifacts) cmd = f'twine upload --repository="{repo}"' if config_file: cmd = f'{cmd} --config-file="{config_file}"' cmd = f"{cmd} {arg_display}" ctx.run(cmd) version = _prebump(version, prebump) _write_version(version) if commit: ctx.run(f'git commit -am "Prebump to {version}"')
def release(ctx, type_, repo, prebump=PREBUMP, config_file=None, commit=True, yes=False)
Make a new release.
3.598572
3.576672
1.006123
# Register breadcrumb root item = current_menu.submenu('breadcrumbs.settings') item.register('', _('Account')) item = current_menu.submenu('breadcrumbs.{0}'.format( current_app.config['SECURITY_BLUEPRINT_NAME'])) if current_app.config.get('SECURITY_CHANGEABLE', True): item.register('', _('Change password')) # Register settings menu item = current_menu.submenu('settings.change_password') item.register( "{0}.change_password".format( current_app.config['SECURITY_BLUEPRINT_NAME']), # NOTE: Menu item text (icon replaced by a user icon). _('%(icon)s Change password', icon='<i class="fa fa-key fa-fw"></i>'), order=1) # Register breadcrumb item = current_menu.submenu('breadcrumbs.{0}.change_password'.format( current_app.config['SECURITY_BLUEPRINT_NAME'])) item.register( "{0}.change_password".format( current_app.config['SECURITY_BLUEPRINT_NAME']), _("Change password"), order=0, )
def init_menu()
Initialize menu before first request.
3.716066
3.610713
1.029178
in_production = not (current_app.debug or current_app.testing) secure = current_app.config.get('SESSION_COOKIE_SECURE') if in_production and not secure: current_app.logger.warning( "SESSION_COOKIE_SECURE setting must be set to True to prevent the " "session cookie from being leaked over an insecure channel." )
def check_security_settings()
Warn if session cookie is not secure in production.
4.015264
3.348799
1.199016
def jwt(): token = current_accounts.jwt_creation_factory() return Markup( render_template( current_app.config['ACCOUNTS_JWT_DOM_TOKEN_TEMPLATE'], token=token ) ) def jwt_token(): return current_accounts.jwt_creation_factory() return { 'jwt': jwt, 'jwt_token': jwt_token, }
def jwt_proccessor()
Context processor for jwt.
4.930639
4.853578
1.015877
if isinstance(val, text_type): return val.encode('utf-8') assert isinstance(val, binary_type) return val
def _to_binary(val)
Convert to binary.
2.992292
2.952949
1.013323
if isinstance(val, binary_type): return val.decode('utf-8') assert isinstance(val, text_type) return val
def _to_string(val)
Convert to text.
3.236504
2.925861
1.106172
final_key = bytearray(16) for i, c in enumerate(key): final_key[i % 16] ^= key[i] if PY3 else ord(key[i]) return bytes(final_key)
def _mysql_aes_key(key)
Format key.
3.070192
2.897821
1.059483
val = _to_string(val) pad_value = 16 - (len(val) % 16) return _to_binary('{0}{1}'.format(val, chr(pad_value) * pad_value))
def _mysql_aes_pad(val)
Padding.
3.19035
3.268468
0.976099
val = _to_string(val) pad_value = ord(val[-1]) return val[:-pad_value]
def _mysql_aes_unpad(val)
Reverse padding.
4.92151
4.436374
1.109354
assert isinstance(val, binary_type) or isinstance(val, text_type) assert isinstance(key, binary_type) or isinstance(key, text_type) k = _mysql_aes_key(_to_binary(key)) v = _mysql_aes_pad(_to_binary(val)) e = _mysql_aes_engine(k).encryptor() return e.update(v) + e.finalize()
def mysql_aes_encrypt(val, key)
Mysql AES encrypt value with secret key. :param val: Plain text value. :param key: The AES key. :returns: The encrypted AES value.
2.909171
3.150092
0.923519
assert isinstance(encrypted_val, binary_type) \ or isinstance(encrypted_val, text_type) assert isinstance(key, binary_type) or isinstance(key, text_type) k = _mysql_aes_key(_to_binary(key)) d = _mysql_aes_engine(_to_binary(k)).decryptor() return _mysql_aes_unpad(d.update(_to_binary(encrypted_val)) + d.finalize())
def mysql_aes_decrypt(encrypted_val, key)
Mysql AES decrypt value with secret key. :param encrypted_val: Encrypted value. :param key: The AES key. :returns: The AES value decrypted.
2.930976
3.116244
0.940548
salt, checksum = parse_mc2(hash, cls.ident, handler=cls) return cls(salt=salt, checksum=checksum)
def from_string(cls, hash, **context)
Parse instance from configuration string in Modular Crypt Format.
14.992203
11.874853
1.262517
return str_to_uascii( hashlib.sha256(mysql_aes_encrypt(self.salt, secret)).hexdigest() )
def _calc_checksum(self, secret)
Calculate string. :param secret: The secret key. :returns: The checksum.
9.679361
11.837439
0.817691
if ip: match = geolite2.reader().get(ip) return match.get('country', {}).get('iso_code') if match else None
def _ip2country(ip)
Get user country.
4.546156
4.255741
1.068241
parsed_string = user_agent_parser.Parse(user_agent) return { 'os': parsed_string.get('os', {}).get('family'), 'browser': parsed_string.get('user_agent', {}).get('family'), 'browser_version': parsed_string.get('user_agent', {}).get('major'), 'device': parsed_string.get('device', {}).get('family'), }
def _extract_info_from_useragent(user_agent)
Extract extra informations from user.
1.935692
1.928828
1.003559
r user_id, sid_s = session['user_id'], session.sid_s with db.session.begin_nested(): session_activity = SessionActivity( user_id=user_id, sid_s=sid_s, ip=request.remote_addr, country=_ip2country(request.remote_addr), **_extract_info_from_useragent( request.headers.get('User-Agent', '') ) ) db.session.merge(session_activity)
def add_session(session=None)
r"""Add a session to the SessionActivity table. :param session: Flask Session object to add. If None, ``session`` is used. The object is expected to have a dictionary entry named ``"user_id"`` and a field ``sid_s``
3.907303
3.170888
1.232243
@after_this_request def add_user_session(response): # Regenerate the session to avoid session fixation vulnerabilities. session.regenerate() # Save the session first so that the sid_s gets generated. app.session_interface.save_session(app, session, response) add_session(session) current_accounts.datastore.commit() return response
def login_listener(app, user)
Connect to the user_logged_in signal for table population. :param app: The Flask application. :param user: The :class:`invenio_accounts.models.User` instance.
8.434688
8.779072
0.960772
@after_this_request def _commit(response=None): if hasattr(session, 'sid_s'): delete_session(session.sid_s) # Regenerate the session to avoid session fixation vulnerabilities. session.regenerate() current_accounts.datastore.commit() return response
def logout_listener(app, user)
Connect to the user_logged_out signal. :param app: The Flask application. :param user: The :class:`invenio_accounts.models.User` instance.
7.873902
8.455322
0.931236
# Remove entries from sessionstore _sessionstore.delete(sid_s) # Find and remove the corresponding SessionActivity entry with db.session.begin_nested(): SessionActivity.query.filter_by(sid_s=sid_s).delete() return 1
def delete_session(sid_s)
Delete entries in the data- and kvsessionstore with the given sid_s. On a successful deletion, the flask-kvsession store returns 1 while the sqlalchemy datastore returns None. :param sid_s: The session ID. :returns: ``1`` if deletion was successful.
6.578903
5.88689
1.117551
with db.session.begin_nested(): for s in user.active_sessions: _sessionstore.delete(s.sid_s) SessionActivity.query.filter_by(user=user).delete() return True
def delete_user_sessions(user)
Delete all active user sessions. :param user: User instance. :returns: If ``True`` then the session is successfully deleted.
6.462956
7.71587
0.837619
if app.config.get('RECAPTCHA_PUBLIC_KEY') and \ app.config.get('RECAPTCHA_PRIVATE_KEY'): class ConfirmRegisterForm(Form): recaptcha = FormField(RegistrationFormRecaptcha, separator='.') return ConfirmRegisterForm return Form
def confirm_register_form_factory(Form, app)
Return confirmation for extended registration form.
3.594279
3.438788
1.045217
if app.config.get('RECAPTCHA_PUBLIC_KEY') and \ app.config.get('RECAPTCHA_PRIVATE_KEY'): class RegisterForm(Form): recaptcha = FormField(RegistrationFormRecaptcha, separator='.') return RegisterForm return Form
def register_form_factory(Form, app)
Return extended registration form.
4.127737
3.75593
1.098992
class LoginForm(Form): def __init__(self, *args, **kwargs): super(LoginForm, self).__init__(*args, **kwargs) self.remember.data = False return LoginForm
def login_form_factory(Form, app)
Return extended login form.
2.888049
2.781996
1.038121
if form.password.data is not None: pwd_ctx = current_app.extensions['security'].pwd_context if pwd_ctx.identify(form.password.data) is None: User.password = hash_password(form.password.data)
def on_model_change(self, form, User, is_created)
Hash password when saving.
3.832567
3.262139
1.174863
if is_created and form.notification.data is True: send_reset_password_instructions(User)
def after_model_change(self, form, User, is_created)
Send password instructions if desired.
8.792593
5.735867
1.532914
try: count = 0 for user_id in ids: user = _datastore.get_user(user_id) if user is None: raise ValueError(_("Cannot find user.")) if _datastore.deactivate_user(user): count += 1 if count > 0: flash(_('User(s) were successfully inactivated.'), 'success') except Exception as exc: if not self.handle_view_exception(exc): raise current_app.logger.exception(str(exc)) # pragma: no cover flash(_('Failed to inactivate users.'), 'error')
def action_inactivate(self, ids)
Inactivate users.
3.000631
2.851774
1.052198
if SessionActivity.is_current(sid_s=model.sid_s): flash('You could not remove your current session', 'error') return delete_session(sid_s=model.sid_s) db.session.commit()
def delete_model(self, model)
Delete a specific session.
8.612946
7.557204
1.1397
is_current = any(SessionActivity.is_current(sid_s=id_) for id_ in ids) if is_current: flash('You could not remove your current session', 'error') return for id_ in ids: delete_session(sid_s=id_) db.session.commit()
def action_delete(self, ids)
Delete selected sessions.
6.176412
5.454924
1.132264
kwargs = dict(email=email, password=password, active='y' if active else '') form = ConfirmRegisterForm(MultiDict(kwargs), csrf_enabled=False) if form.validate(): kwargs['password'] = hash_password(kwargs['password']) kwargs['active'] = active _datastore.create_user(**kwargs) click.secho('User created successfully.', fg='green') kwargs['password'] = '****' click.echo(kwargs) else: raise click.UsageError('Error creating user. %s' % form.errors)
def users_create(email, password, active)
Create a user.
3.706342
3.676259
1.008183
msg = Message() msg.__dict__.update(data) current_app.extensions['mail'].send(msg)
def send_security_email(data)
Celery task to send security email. :param data: Contains the email data.
4.97227
6.058494
0.820711
sessions = SessionActivity.query_by_expired().all() for session in sessions: delete_session(sid_s=session.sid_s) db.session.commit()
def clean_session_table()
Automatically clean session table. To enable a periodically clean of the session table, you should configure the task as a celery periodic task. .. code-block:: python from datetime import timedelta CELERYBEAT_SCHEDULE = { 'session_cleaner': { 'task': 'invenio_accounts.tasks.clean_session_table', 'schedule': timedelta(days=1), }, } See `Invenio-Celery <https://invenio-celery.readthedocs.io/>`_ documentation for further details.
7.149331
9.046233
0.79031
op.create_table( 'accounts_role', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=80), nullable=True), sa.Column('description', sa.String(length=255), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name') ) op.create_table( 'accounts_user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('email', sa.String(length=255), nullable=True), sa.Column('password', sa.String(length=255), nullable=True), sa.Column('active', sa.Boolean(name='active'), nullable=True), sa.Column('confirmed_at', sa.DateTime(), nullable=True), sa.Column('last_login_at', sa.DateTime(), nullable=True), sa.Column('current_login_at', sa.DateTime(), nullable=True), sa.Column('last_login_ip', sqlalchemy_utils.types.ip_address.IPAddressType(), nullable=True), sa.Column('current_login_ip', sqlalchemy_utils.types.ip_address.IPAddressType(), nullable=True), sa.Column('login_count', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email') ) op.create_table( 'accounts_user_session_activity', sa.Column('created', sa.DateTime(), nullable=False), sa.Column('updated', sa.DateTime(), nullable=False), sa.Column('sid_s', sa.String(length=255), nullable=False), sa.Column('user_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['user_id'], [u'accounts_user.id'], name='fk_accounts_session_activity_user_id', ), sa.PrimaryKeyConstraint('sid_s') ) op.create_table( 'accounts_userrole', sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('role_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['role_id'], ['accounts_role.id'], name='fk_accounts_userrole_role_id', ), sa.ForeignKeyConstraint( ['user_id'], ['accounts_user.id'], name='fk_accounts_userrole_user_id', ), ) with op.batch_alter_table('transaction') as batch_op: batch_op.add_column(sa.Column( 'user_id', sa.Integer(), sa.ForeignKey('accounts_user.id'), nullable=True, )) batch_op.create_index( op.f('ix_transaction_user_id'), ['user_id'], unique=False )
def upgrade()
Upgrade database.
1.398619
1.39508
1.002536
ctx = op.get_context() insp = Inspector.from_engine(ctx.connection.engine) for fk in insp.get_foreign_keys('transaction'): if fk['referred_table'] == 'accounts_user': op.drop_constraint( op.f(fk['name']), 'transaction', type_='foreignkey' ) with op.batch_alter_table('transaction') as batch_op: batch_op.drop_index(op.f('ix_transaction_user_id')) batch_op.drop_column('user_id') op.drop_table('accounts_userrole') op.drop_table('accounts_user_session_activity') op.drop_table('accounts_user') op.drop_table('accounts_role')
def downgrade()
Downgrade database.
2.409164
2.336812
1.030962
# Create an ID uid = str(uuid.uuid4()) # The time in UTC now now = datetime.utcnow() # Build the token data token_data = { 'exp': now + current_app.config['ACCOUNTS_JWT_EXPIRATION_DELTA'], 'sub': user_id or current_user.get_id(), 'jti': uid, } # Add any additional data to the token if additional_data is not None: token_data.update(additional_data) # Encode the token and send it back encoded_token = encode( token_data, current_app.config['ACCOUNTS_JWT_SECRET_KEY'], current_app.config['ACCOUNTS_JWT_ALOGORITHM'] ).decode('utf-8') return encoded_token
def jwt_create_token(user_id=None, additional_data=None)
Encode the JWT token. :param int user_id: Addition of user_id. :param dict additional_data: Additional information for the token. :returns: The encoded token. :rtype: str .. note:: Definition of the JWT claims: * exp: ((Expiration Time) expiration time of the JWT. * sub: (subject) the principal that is the subject of the JWT. * jti: (JWT ID) UID for the JWT.
2.621327
2.739392
0.956901
try: return decode( token, current_app.config['ACCOUNTS_JWT_SECRET_KEY'], algorithms=[ current_app.config['ACCOUNTS_JWT_ALOGORITHM'] ] ) except DecodeError as exc: raise_from(JWTDecodeError(), exc) except ExpiredSignatureError as exc: raise_from(JWTExpiredToken(), exc)
def jwt_decode_token(token)
Decode the JWT token. :param str token: Additional information for the token. :returns: The token data. :rtype: dict
3.115422
3.550906
0.87736
session_id = getattr(session, 'sid_s', None) if session_id: response.headers['X-Session-ID'] = session_id if current_user.is_authenticated: response.headers['X-User-ID'] = current_user.get_id()
def set_session_info(app, response, **extra)
Add X-Session-ID and X-User-ID to http response.
2.7305
2.263755
1.206181
sessions = SessionActivity.query_by_user( user_id=current_user.get_id() ).all() master_session = None for index, session in enumerate(sessions): if SessionActivity.is_current(session.sid_s): master_session = session del sessions[index] return render_template( current_app.config['ACCOUNTS_SETTINGS_SECURITY_TEMPLATE'], formclass=RevokeForm, sessions=[master_session] + sessions, is_current=SessionActivity.is_current )
def security()
View for security page.
5.15315
5.153803
0.999873
form = RevokeForm(request.form) if not form.validate_on_submit(): abort(403) sid_s = form.data['sid_s'] if SessionActivity.query.filter_by( user_id=current_user.get_id(), sid_s=sid_s).count() == 1: delete_session(sid_s=sid_s) db.session.commit() if not SessionActivity.is_current(sid_s=sid_s): # if it's the same session doesn't show the message, otherwise # the session will be still open without the database record flash('Session {0} successfully removed.'.format(sid_s), 'success') else: flash('Unable to remove the session {0}.'.format(sid_s), 'error') return redirect(url_for('invenio_accounts.security'))
def revoke_session()
Revoke a session.
4.002703
3.922279
1.020504
with op.batch_alter_table('accounts_user_session_activity') as batch_op: batch_op.add_column(sa.Column('browser', sa.String(80), nullable=True)) batch_op.add_column( sa.Column('browser_version', sa.String(30), nullable=True)) batch_op.add_column( sa.Column('country', sa.String(3), nullable=True)) batch_op.add_column( sa.Column('device', sa.String(80), nullable=True)) batch_op.add_column( sa.Column('ip', sa.String(80), nullable=True)) batch_op.add_column( sa.Column('os', sa.String(80), nullable=True))
def upgrade()
Upgrade database.
1.65955
1.671211
0.993022
lifetime = current_app.permanent_session_lifetime expired_moment = datetime.utcnow() - lifetime return cls.query.filter(cls.created < expired_moment)
def query_by_expired(cls)
Query to select all expired sessions.
4.995847
4.288373
1.164975
if utils.get_hmac != get_hmac: utils.get_hmac = get_hmac if utils.hash_password != hash_password: utils.hash_password = hash_password changeable.hash_password = hash_password recoverable.hash_password = hash_password registerable.hash_password = hash_password # Disable remember me cookie generation as it does not work with # session activity tracking (a remember me token will bypass revoking # of a session). def patch_do_nothing(*args, **kwargs): pass LoginManager._set_cookie = patch_do_nothing # Disable loading user from headers and object because we want to be # sure we can load user only through the login form. def patch_reload_anonym(self, *args, **kwargs): self.reload_user() LoginManager._load_from_header = patch_reload_anonym LoginManager._load_from_request = patch_reload_anonym
def monkey_patch_flask_security()
Monkey-patch Flask-Security.
6.403735
6.255756
1.023655
self.init_config(app) # Monkey-patch Flask-Security InvenioAccounts.monkey_patch_flask_security() # Create user datastore if not self.datastore: self.datastore = SessionAwareSQLAlchemyUserDatastore( db, User, Role) if app.config['ACCOUNTS_SESSION_ACTIVITY_ENABLED']: self._enable_session_activity(app=app) # Initialize extension. _register_blueprint = app.config.get('ACCOUNTS_REGISTER_BLUEPRINT') if _register_blueprint is not None: register_blueprint = _register_blueprint state = self.security.init_app(app, datastore=self.datastore, register_blueprint=register_blueprint) self.register_anonymous_identity_loader(state) app.extensions['security'].register_form = register_form_factory( app.extensions['security'].register_form, app) app.extensions['security'].confirm_register_form = \ confirm_register_form_factory( app.extensions['security'].confirm_register_form, app ) app.extensions['security'].login_form = login_form_factory( app.extensions['security'].login_form, app) if app.config['ACCOUNTS_USE_CELERY']: from invenio_accounts.tasks import send_security_email @state.send_mail_task def delay_security_email(msg): send_security_email.delay(msg.__dict__) # Register context processor if app.config['ACCOUNTS_JWT_DOM_TOKEN']: from invenio_accounts.context_processors.jwt import \ jwt_proccessor app.context_processor(jwt_proccessor) # Register signal receiver if app.config.get('ACCOUNTS_USERINFO_HEADERS'): request_finished.connect(set_session_info, app) app.extensions['invenio-accounts'] = self
def init_app(self, app, sessionstore=None, register_blueprint=True)
Flask application initialization. The following actions are executed: #. Initialize the configuration. #. Monkey-patch Flask-Security. #. Create the user datastore. #. Create the sessionstore. #. Initialize the extension, the forms to register users and confirms their emails, the CLI and, if ``ACCOUNTS_USE_CELERY`` is ``True``, register a celery task to send emails. :param app: The Flask application. :param sessionstore: store for sessions. Passed to ``flask-kvsession``. If ``None`` then Redis is configured. (Default: ``None``) :param register_blueprint: If ``True``, the application registers the blueprints. (Default: ``True``)
3.419684
3.287631
1.040167