INSTRUCTION
stringlengths
1
8.43k
RESPONSE
stringlengths
75
104k
The list of compiler extensions. Example::
def compiler_extensions(self): """The list of compiler extensions. Example:: >>> attrs = AssetAttributes(environment, 'js/lib/external.min.js.coffee') >>> attrs.compiler_extensions ['.coffee'] """ try: index = self.extensions.index(self.format_extension) except ValueError: index = 0 extensions = self.extensions[index:] return [e for e in extensions if self.environment.compilers.get(e)]
The list of compilers used to build asset.
def compilers(self): """The list of compilers used to build asset.""" return [self.environment.compilers.get(e) for e in self.compiler_extensions]
The list of all processors ( preprocessors compilers postprocessors ) used to build asset.
def processors(self): """The list of all processors (preprocessors, compilers, postprocessors) used to build asset. """ return self.preprocessors + list(reversed(self.compilers)) + self.postprocessors
MIME type of the asset.
def mimetype(self): """MIME type of the asset.""" return (self.environment.mimetypes.get(self.format_extension) or self.compiler_mimetype or 'application/octet-stream')
Implicit MIME type of the asset by its compilers.
def compiler_mimetype(self): """Implicit MIME type of the asset by its compilers.""" for compiler in reversed(self.compilers): if compiler.result_mimetype: return compiler.result_mimetype return None
Implicit format extension on the asset by its compilers.
def compiler_format_extension(self): """Implicit format extension on the asset by its compilers.""" for extension, mimetype in self.environment.mimetypes.items(): if mimetype == self.compiler_mimetype: return extension return None
Register passed processor for passed mimetype.
def register(self, mimetype, processor): """Register passed `processor` for passed `mimetype`.""" if mimetype not in self or processor not in self[mimetype]: self.setdefault(mimetype, []).append(processor)
Remove passed processor for passed mimetype. If processor for this MIME type does not found in the registry nothing happens.
def unregister(self, mimetype, processor): """Remove passed `processor` for passed `mimetype`. If processor for this MIME type does not found in the registry, nothing happens. """ if mimetype in self and processor in self[mimetype]: self[mimetype].remove(processor)
Register: class: ~gears. processors. DirectivesProcessor as a preprocessor for text/ css and application/ javascript MIME types.
def register_defaults(self): """Register :class:`~gears.processors.DirectivesProcessor` as a preprocessor for `text/css` and `application/javascript` MIME types. """ self.register('text/css', DirectivesProcessor.as_handler()) self.register('application/javascript', DirectivesProcessor.as_handler())
The registry for supported suffixes of assets. It is built from MIME types and compilers registries and is cached at the first call. See: class: ~gears. environment. Suffixes for more information.
def suffixes(self): """The registry for supported suffixes of assets. It is built from MIME types and compilers registries, and is cached at the first call. See :class:`~gears.environment.Suffixes` for more information. """ if not hasattr(self, '_suffixes'): suffixes = Suffixes() for extension, mimetype in self.mimetypes.items(): suffixes.register(extension, root=True, mimetype=mimetype) for extension, compiler in self.compilers.items(): suffixes.register(extension, to=compiler.result_mimetype) self._suffixes = suffixes return self._suffixes
The list of search paths. It is built from registered finders which has paths property. Can be useful for compilers to resolve internal dependencies.
def paths(self): """The list of search paths. It is built from registered finders, which has ``paths`` property. Can be useful for compilers to resolve internal dependencies. """ if not hasattr(self, '_paths'): paths = [] for finder in self.finders: if hasattr(finder, 'paths'): paths.extend(finder.paths) self._paths = paths return self._paths
Register default compilers preprocessors and MIME types.
def register_defaults(self): """Register default compilers, preprocessors and MIME types.""" self.mimetypes.register_defaults() self.preprocessors.register_defaults() self.postprocessors.register_defaults()
Allow Gears plugins to inject themselves to the environment. For example if your plugin s package contains such entry_points definition in setup. py gears_plugin. register function will be called with current environment during register_entry_points call::
def register_entry_points(self, exclude=()): """Allow Gears plugins to inject themselves to the environment. For example, if your plugin's package contains such ``entry_points`` definition in ``setup.py``, ``gears_plugin.register`` function will be called with current environment during ``register_entry_points`` call:: entry_points = { 'gears': [ 'register = gears_plugin:register', ], } Here is an example of such function:: def register(environment): assets_dir = os.path.join(os.path.dirname(__file__), 'assets') assets_dir = os.path.absolute_path(assets_dir) environment.register(FileSystemFinder([assets_dir])) If you want to disable this behavior for some plugins, list their packages using ``exclude`` argument:: environment.register_entry_points(exclude=['plugin']) """ for entry_point in iter_entry_points('gears', 'register'): if entry_point.module_name not in exclude: register = entry_point.load() register(self)
Find files using: attr: finders registry. The item parameter can be an instance of: class: ~gears. asset_attributes. AssetAttributes class a path to the asset or a logical path to the asset. If item is a logical path logical parameter must be set to True.
def find(self, item, logical=False): """Find files using :attr:`finders` registry. The ``item`` parameter can be an instance of :class:`~gears.asset_attributes.AssetAttributes` class, a path to the asset or a logical path to the asset. If ``item`` is a logical path, `logical` parameter must be set to ``True``. Returns a tuple with :class:`~gears.asset_attributes.AssetAttributes` instance for found file path as first item, and absolute path to this file as second item. If nothing is found, :class:`gears.exceptions.FileNotFound` exception is rased. """ if isinstance(item, AssetAttributes): for path in item.search_paths: try: return self.find(path, logical) except FileNotFound: continue raise FileNotFound(item.path) if logical: asset_attributes = AssetAttributes(self, item) suffixes = self.suffixes.find(asset_attributes.mimetype) if not suffixes: return self.find(item) path = asset_attributes.path_without_suffix for suffix in suffixes: try: return self.find(path + suffix) except FileNotFound: continue else: for finder in self.finders: try: absolute_path = finder.find(item) except FileNotFound: continue return AssetAttributes(self, item), absolute_path raise FileNotFound(item)
Yield two - tuples for all files found in the directory given by path parameter. Result can be filtered by the second parameter mimetype that must be a MIME type of assets compiled source code. Each tuple has: class: ~gears. asset_attributes. AssetAttributes instance for found file path as first item and absolute path to this file as second item.
def list(self, path, mimetype=None): """Yield two-tuples for all files found in the directory given by ``path`` parameter. Result can be filtered by the second parameter, ``mimetype``, that must be a MIME type of assets compiled source code. Each tuple has :class:`~gears.asset_attributes.AssetAttributes` instance for found file path as first item, and absolute path to this file as second item. Usage example:: # Yield all files from 'js/templates' directory. environment.list('js/templates/*') # Yield only files that are in 'js/templates' directory and have # 'application/javascript' MIME type of compiled source code. environment.list('js/templates/*', mimetype='application/javascript') """ basename_pattern = os.path.basename(path) if path.endswith('**'): paths = [path] else: paths = AssetAttributes(self, path).search_paths paths = map(lambda p: p if p.endswith('*') else p + '*', paths) results = unique(self._list_paths(paths), lambda x: x[0]) for logical_path, absolute_path in results: asset_attributes = AssetAttributes(self, logical_path) if mimetype is not None and asset_attributes.mimetype != mimetype: continue basename = os.path.basename(asset_attributes.path_without_suffix) if not fnmatch(basename, basename_pattern) and basename != 'index': continue yield asset_attributes, absolute_path
Save handled public assets to: attr: root directory.
def save(self): """Save handled public assets to :attr:`root` directory.""" for asset_attributes, absolute_path in self.list('**'): logical_path = os.path.normpath(asset_attributes.logical_path) check_asset = build_asset(self, logical_path, check=True) if check_asset.is_public: asset = build_asset(self, logical_path) source = bytes(asset) self.save_file(logical_path, source, asset.gzippable) if self.fingerprinting: self.save_file(asset.hexdigest_path, source, asset.gzippable) self.manifest.files[logical_path] = asset.hexdigest_path self.manifest.dump()
+ ----------------------------------------------------------------------- + | + --- splitter ------------------------------------------------------ + | | | + -- list widget -------------- + + - IdaSettingsView ------------- + | | | | | | | | | | | | | - plugin name | | | | | | | | - plugin name | | | | | | | | - plugin name | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | + ---------------------------- + + ------------------------------- + | | | + ------------------------------------------------------------------- + | + ----------------------------------------------------------------------- +
def PopulateForm(self): """ +-----------------------------------------------------------------------+ | +--- splitter ------------------------------------------------------+ | | | +-- list widget--------------+ +- IdaSettingsView -------------+ | | | | | | | | | | | | | - plugin name | | | | | | | | - plugin name | | | | | | | | - plugin name | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | +----------------------------+ +-------------------------------+ | | | +-------------------------------------------------------------------+ | +-----------------------------------------------------------------------+ """ hbox = QtWidgets.QHBoxLayout(self.parent) self._splitter = QtWidgets.QSplitter(QtCore.Qt.Horizontal) self._plugin_list = QtWidgets.QListWidget() plugin_names = set([]) for scope, fn in (("idb", ida_settings.IDASettings.get_idb_plugin_names), ("directory", ida_settings.IDASettings.get_directory_plugin_names), ("user", ida_settings.IDASettings.get_user_plugin_names), ("system", ida_settings.IDASettings.get_system_plugin_names)): for plugin_name in fn(): plugin_names.add(plugin_name) for plugin_name in plugin_names: self._plugin_list.addItem(plugin_name) self._splitter.addWidget(self._plugin_list) hbox.addWidget(self._splitter) self.parent.setLayout(hbox) self._plugin_list.currentItemChanged.connect(self._handle_plugin_changed)
Converts the class into an actual handler function that can be used when registering different types of processors in: class: ~gears. environment. Environment class instance.
def as_handler(cls, **initkwargs): """Converts the class into an actual handler function that can be used when registering different types of processors in :class:`~gears.environment.Environment` class instance. The arguments passed to :meth:`as_handler` are forwarded to the constructor of the class. """ @wraps(cls, updated=()) def handler(asset, *args, **kwargs): return handler.handler_class(**initkwargs)(asset, *args, **kwargs) handler.handler_class = cls handler.supports_check_mode = cls.supports_check_mode return handler
Runs: attr: executable with input as stdin.: class: AssetHandlerError exception is raised if execution is failed otherwise stdout is returned.
def run(self, input): """Runs :attr:`executable` with ``input`` as stdin. :class:`AssetHandlerError` exception is raised, if execution is failed, otherwise stdout is returned. """ p = self.get_process() output, errors = p.communicate(input=input.encode('utf-8')) if p.returncode != 0: raise AssetHandlerError(errors) return output.decode('utf-8')
Returns: class: subprocess. Popen instance with args from: meth: get_args result and piped stdin stdout and stderr.
def get_process(self): """Returns :class:`subprocess.Popen` instance with args from :meth:`get_args` result and piped stdin, stdout and stderr. """ return Popen(self.get_args(), stdin=PIPE, stdout=PIPE, stderr=PIPE)
This nasty piece of code is here to force the loading of IDA s Qt bindings. Without it Python attempts to load PySide from the site - packages directory and failing as it does not play nicely with IDA.
def import_qtcore(): """ This nasty piece of code is here to force the loading of IDA's Qt bindings. Without it, Python attempts to load PySide from the site-packages directory, and failing, as it does not play nicely with IDA. via: github.com/tmr232/Cute """ has_ida = False try: # if we're running under IDA, # then we'll use IDA's Qt bindings import idaapi has_ida = True except ImportError: # not running under IDA, # so use default Qt installation has_ida = False if has_ida: old_path = sys.path[:] try: ida_python_path = os.path.dirname(idaapi.__file__) sys.path.insert(0, ida_python_path) if idaapi.IDA_SDK_VERSION >= 690: from PyQt5 import QtCore return QtCore else: from PySide import QtCore return QtCore finally: sys.path = old_path else: try: from PyQt5 import QtCore return QtCore except ImportError: pass try: from PySide import QtCore return QtCore except ImportError: pass raise ImportError("No module named PySide or PyQt")
Get the netnode used to store settings metadata in the current IDB. Note that this implicitly uses the open IDB via the idc iterface.
def get_meta_netnode(): """ Get the netnode used to store settings metadata in the current IDB. Note that this implicitly uses the open IDB via the idc iterface. """ node_name = "$ {org:s}.{application:s}".format( org=IDA_SETTINGS_ORGANIZATION, application=IDA_SETTINGS_APPLICATION) return netnode.Netnode(node_name)
Add the given plugin name to the list of plugin names registered in the current IDB. Note that this implicitly uses the open IDB via the idc iterface.
def add_netnode_plugin_name(plugin_name): """ Add the given plugin name to the list of plugin names registered in the current IDB. Note that this implicitly uses the open IDB via the idc iterface. """ current_names = set(get_netnode_plugin_names()) if plugin_name in current_names: return current_names.add(plugin_name) get_meta_netnode()[PLUGIN_NAMES_KEY] = json.dumps(list(current_names))
Remove the given plugin name to the list of plugin names registered in the current IDB. Note that this implicitly uses the open IDB via the idc iterface.
def del_netnode_plugin_name(plugin_name): """ Remove the given plugin name to the list of plugin names registered in the current IDB. Note that this implicitly uses the open IDB via the idc iterface. """ current_names = set(get_netnode_plugin_names()) if plugin_name not in current_names: return try: current_names.remove(plugin_name) except KeyError: return get_meta_netnode()[PLUGIN_NAMES_KEY] = json.dumps(list(current_names))
Import settings from the given file system path to given settings instance.
def import_settings(settings, config_path): """ Import settings from the given file system path to given settings instance. type settings: IDASettingsInterface type config_path: str """ other = QtCore.QSettings(config_path, QtCore.QSettings.IniFormat) for k in other.allKeys(): settings[k] = other.value(k)
Export the given settings instance to the given file system path.
def export_settings(settings, config_path): """ Export the given settings instance to the given file system path. type settings: IDASettingsInterface type config_path: str """ other = QtCore.QSettings(config_path, QtCore.QSettings.IniFormat) for k, v in settings.iteritems(): other.setValue(k, v)
Fetch the IDASettings instance for the curren plugin with directory scope.
def directory(self): """ Fetch the IDASettings instance for the curren plugin with directory scope. rtype: IDASettingsInterface """ if self._config_directory is None: ensure_ida_loaded() return DirectoryIDASettings(self._plugin_name, directory=self._config_directory)
Fetch the settings value with the highest precedence for the given key or raise KeyError. Precedence: - IDB scope - directory scope - user scope - system scope
def get_value(self, key): """ Fetch the settings value with the highest precedence for the given key, or raise KeyError. Precedence: - IDB scope - directory scope - user scope - system scope type key: basestring rtype value: Union[basestring, int, float, List, Dict] """ try: return self.idb.get_value(key) except (KeyError, EnvironmentError): pass try: return self.directory.get_value(key) except (KeyError, EnvironmentError): pass try: return self.user.get_value(key) except KeyError: pass try: return self.system.get_value(key) except KeyError: pass raise KeyError("key not found")
Enumerate the keys found at any scope for the current plugin.
def iterkeys(self): """ Enumerate the keys found at any scope for the current plugin. rtype: Generator[str] """ visited_keys = set() try: for key in self.idb.iterkeys(): if key not in visited_keys: yield key visited_keys.add(key) except (PermissionError, EnvironmentError): pass try: for key in self.directory.iterkeys(): if key not in visited_keys: yield key visited_keys.add(key) except (PermissionError, EnvironmentError): pass try: for key in self.user.iterkeys(): if key not in visited_keys: yield key visited_keys.add(key) except (PermissionError, EnvironmentError): pass try: for key in self.system.iterkeys(): if key not in visited_keys: yield key visited_keys.add(key) except (PermissionError, EnvironmentError): pass
Get the names of all plugins at the directory scope. Provide a config directory path to use this method outside of IDA. As this is a static method you can call the directly on IDASettings:
def get_directory_plugin_names(config_directory=None): """ Get the names of all plugins at the directory scope. Provide a config directory path to use this method outside of IDA. As this is a static method, you can call the directly on IDASettings: import ida_settings print( ida_settings.IDASettings.get_directory_plugin_names("/tmp/ida/1/") ) type config_directory: str rtype: Sequence[str] """ ensure_ida_loaded() return QtCore.QSettings(get_directory_config_path(directory=config_directory), QtCore.QSettings.IniFormat).childGroups()[:]
Returns the response that should be used for any given exception.
def simple_error_handler(exc, *args): """ Returns the response that should be used for any given exception. By default we handle the REST framework `APIException`, and also Django's builtin `Http404` and `PermissionDenied` exceptions. Any unhandled exceptions may return `None`, which will cause a 500 error to be raised. """ if isinstance(exc, exceptions.APIException): headers = {} if getattr(exc, 'auth_header', None): headers['WWW-Authenticate'] = exc.auth_header if getattr(exc, 'wait', None): headers['X-Throttle-Wait-Seconds'] = '%d' % exc.wait return Response({'error': exc.detail}, status=exc.status_code, headers=headers) elif isinstance(exc, Http404): return Response({'error': 'Not found'}, status=status.HTTP_404_NOT_FOUND) elif isinstance(exc, PermissionDenied): return Response({'error': 'Permission denied'}, status=status.HTTP_403_FORBIDDEN) # Note: Unhandled exceptions will raise a 500 error. return None
Returns a given table for the given user.
def table(name, auth=None, eager=True): """Returns a given table for the given user.""" auth = auth or [] dynamodb = boto.connect_dynamodb(*auth) table = dynamodb.get_table(name) return Table(table=table, eager=eager)
Returns a list of tables for the given user.
def tables(auth=None, eager=True): """Returns a list of tables for the given user.""" auth = auth or [] dynamodb = boto.connect_dynamodb(*auth) return [table(t, auth, eager=eager) for t in dynamodb.list_tables()]
Fetch packages and summary from Crates. io
def fetch_items(self, category, **kwargs): """Fetch packages and summary from Crates.io :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items """ from_date = kwargs['from_date'] if category == CATEGORY_CRATES: return self.__fetch_crates(from_date) else: return self.__fetch_summary()
Extracts the identifier from an item depending on its type.
def metadata_id(item): """Extracts the identifier from an item depending on its type.""" if Crates.metadata_category(item) == CATEGORY_CRATES: return str(item['id']) else: ts = item['fetched_on'] ts = str_to_datetime(ts) return str(ts.timestamp())
Extracts the update time from an item.
def metadata_updated_on(item): """Extracts the update time from an item. Depending on the item, the timestamp is extracted from the 'updated_at' or 'fetched_on' fields. This date is converted to UNIX timestamp format. :param item: item generated by the backend :returns: a UNIX timestamp """ if Crates.metadata_category(item) == CATEGORY_CRATES: ts = item['updated_at'] else: ts = item['fetched_on'] ts = str_to_datetime(ts) return ts.timestamp()
Init client
def _init_client(self, from_archive=False): """Init client""" return CratesClient(self.sleep_time, self.archive, from_archive)
Fetch summary
def __fetch_summary(self): """Fetch summary""" raw_summary = self.client.summary() summary = json.loads(raw_summary) summary['fetched_on'] = str(datetime_utcnow()) yield summary
Fetch crates
def __fetch_crates(self, from_date): """Fetch crates""" from_date = datetime_to_utc(from_date) crates_groups = self.client.crates() for raw_crates in crates_groups: crates = json.loads(raw_crates) for crate_container in crates['crates']: if str_to_datetime(crate_container['updated_at']) < from_date: continue crate_id = crate_container['id'] crate = self.__fetch_crate_data(crate_id) crate['owner_team_data'] = self.__fetch_crate_owner_team(crate_id) crate['owner_user_data'] = self.__fetch_crate_owner_user(crate_id) crate['version_downloads_data'] = self.__fetch_crate_version_downloads(crate_id) crate['versions_data'] = self.__fetch_crate_versions(crate_id) yield crate
Get crate team owner
def __fetch_crate_owner_team(self, crate_id): """Get crate team owner""" raw_owner_team = self.client.crate_attribute(crate_id, 'owner_team') owner_team = json.loads(raw_owner_team) return owner_team
Get crate user owners
def __fetch_crate_owner_user(self, crate_id): """Get crate user owners""" raw_owner_user = self.client.crate_attribute(crate_id, 'owner_user') owner_user = json.loads(raw_owner_user) return owner_user
Get crate versions data
def __fetch_crate_versions(self, crate_id): """Get crate versions data""" raw_versions = self.client.crate_attribute(crate_id, "versions") version_downloads = json.loads(raw_versions) return version_downloads
Get crate version downloads
def __fetch_crate_version_downloads(self, crate_id): """Get crate version downloads""" raw_version_downloads = self.client.crate_attribute(crate_id, "downloads") version_downloads = json.loads(raw_version_downloads) return version_downloads
Get crate data
def __fetch_crate_data(self, crate_id): """Get crate data""" raw_crate = self.client.crate(crate_id) crate = json.loads(raw_crate) return crate['crate']
Get Crates. io summary
def summary(self): """Get Crates.io summary""" path = urijoin(CRATES_API_URL, CATEGORY_SUMMARY) raw_content = self.fetch(path) return raw_content
Get crates in alphabetical order
def crates(self, from_page=1): """Get crates in alphabetical order""" path = urijoin(CRATES_API_URL, CATEGORY_CRATES) raw_crates = self.__fetch_items(path, from_page) return raw_crates
Get a crate by its ID
def crate(self, crate_id): """Get a crate by its ID""" path = urijoin(CRATES_API_URL, CATEGORY_CRATES, crate_id) raw_crate = self.fetch(path) return raw_crate
Get crate attribute
def crate_attribute(self, crate_id, attribute): """Get crate attribute""" path = urijoin(CRATES_API_URL, CATEGORY_CRATES, crate_id, attribute) raw_attribute_data = self.fetch(path) return raw_attribute_data
Return the items from Crates. io API using pagination
def __fetch_items(self, path, page=1): """Return the items from Crates.io API using pagination""" fetch_data = True parsed_crates = 0 total_crates = 0 while fetch_data: logger.debug("Fetching page: %i", page) try: payload = {'sort': 'alphabetical', 'page': page} raw_content = self.fetch(path, payload=payload) content = json.loads(raw_content) parsed_crates += len(content['crates']) if not total_crates: total_crates = content['meta']['total'] except requests.exceptions.HTTPError as e: logger.error("HTTP exception raised - %s", e.response.text) raise e yield raw_content page += 1 if parsed_crates >= total_crates: fetch_data = False
Return the textual content associated to the Response object
def fetch(self, url, payload=None): """Return the textual content associated to the Response object""" response = super().fetch(url, payload=payload) return response.text
Fetch questions from the Kitsune url.
def fetch(self, category=CATEGORY_QUESTION, offset=DEFAULT_OFFSET): """Fetch questions from the Kitsune url. :param category: the category of items to fetch :offset: obtain questions after offset :returns: a generator of questions """ if not offset: offset = DEFAULT_OFFSET kwargs = {"offset": offset} items = super().fetch(category, **kwargs) return items
Fetch questions from the Kitsune url
def fetch_items(self, category, **kwargs): """Fetch questions from the Kitsune url :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items """ offset = kwargs['offset'] logger.info("Looking for questions at url '%s' using offset %s", self.url, str(offset)) nquestions = 0 # number of questions processed tquestions = 0 # number of questions from API data equestions = 0 # number of questions dropped by errors # Always get complete pages so the first item is always # the first one in the page page = int(offset / KitsuneClient.ITEMS_PER_PAGE) page_offset = page * KitsuneClient.ITEMS_PER_PAGE # drop questions from page before the offset drop_questions = offset - page_offset current_offset = offset questions_page = self.client.get_questions(offset) while True: try: raw_questions = next(questions_page) except StopIteration: break except requests.exceptions.HTTPError as e: # Continue with the next page if it is a 500 error if e.response.status_code == 500: logger.exception(e) logger.error("Problem getting Kitsune questions. " "Loosing %i questions. Going to the next page.", KitsuneClient.ITEMS_PER_PAGE) equestions += KitsuneClient.ITEMS_PER_PAGE current_offset += KitsuneClient.ITEMS_PER_PAGE questions_page = self.client.get_questions(current_offset) continue else: # If it is another error just propagate the exception raise e try: questions_data = json.loads(raw_questions) tquestions = questions_data['count'] questions = questions_data['results'] except (ValueError, KeyError) as ex: logger.error(ex) cause = ("Bad JSON format for mozilla_questions: %s" % (raw_questions)) raise ParseError(cause=cause) for question in questions: if drop_questions > 0: # Remove extra questions due to page base retrieval drop_questions -= 1 continue question['offset'] = current_offset current_offset += 1 question['answers_data'] = [] for raw_answers in self.client.get_question_answers(question['id']): answers = json.loads(raw_answers)['results'] question['answers_data'] += answers yield question nquestions += 1 logger.debug("Questions: %i/%i", nquestions + offset, tquestions) logger.info("Total number of questions: %i (%i total)", nquestions, tquestions) logger.info("Questions with errors dropped: %i", equestions)
Init client
def _init_client(self, from_archive=False): """Init client""" return KitsuneClient(self.url, self.archive, from_archive)
Retrieve questions from older to newer updated starting offset
def get_questions(self, offset=None): """Retrieve questions from older to newer updated starting offset""" page = KitsuneClient.FIRST_PAGE if offset: page += int(offset / KitsuneClient.ITEMS_PER_PAGE) while True: api_questions_url = urijoin(self.base_url, '/question') + '/' params = { "page": page, "ordering": "updated" } questions = self.fetch(api_questions_url, params) yield questions questions_json = json.loads(questions) next_uri = questions_json['next'] if not next_uri: break page += 1
Retrieve all answers for a question from older to newer ( updated )
def get_question_answers(self, question_id): """Retrieve all answers for a question from older to newer (updated)""" page = KitsuneClient.FIRST_PAGE while True: api_answers_url = urijoin(self.base_url, '/answer') + '/' params = { "page": page, "question": question_id, "ordering": "updated" } answers_raw = self.fetch(api_answers_url, params) yield answers_raw answers = json.loads(answers_raw) if not answers['next']: break page += 1
Return the textual content associated to the Response object
def fetch(self, url, params): """Return the textual content associated to the Response object""" logger.debug("Kitsune client calls API: %s params: %s", url, str(params)) response = super().fetch(url, payload=params) return response.text
Fetch items from the ReMo url.
def fetch(self, category=CATEGORY_EVENT, offset=REMO_DEFAULT_OFFSET): """Fetch items from the ReMo url. The method retrieves, from a ReMo URL, the set of items of the given `category`. :param category: the category of items to fetch :param offset: obtain items after offset :returns: a generator of items """ if not offset: offset = REMO_DEFAULT_OFFSET kwargs = {"offset": offset} items = super().fetch(category, **kwargs) return items
Fetch items
def fetch_items(self, category, **kwargs): """Fetch items :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items """ offset = kwargs['offset'] logger.info("Looking for events at url '%s' of %s category and %i offset", self.url, category, offset) nitems = 0 # number of items processed titems = 0 # number of items from API data # Always get complete pages so the first item is always # the first one in the page page = int(offset / ReMoClient.ITEMS_PER_PAGE) page_offset = page * ReMoClient.ITEMS_PER_PAGE # drop items from page before the offset drop_items = offset - page_offset logger.debug("%i items dropped to get %i offset starting in page %i (%i page offset)", drop_items, offset, page, page_offset) current_offset = offset for raw_items in self.client.get_items(category, offset): items_data = json.loads(raw_items) titems = items_data['count'] logger.info("Pending items to retrieve: %i, %i current offset", titems - current_offset, current_offset) items = items_data['results'] for item in items: if drop_items > 0: # Remove extra items due to page base retrieval drop_items -= 1 continue raw_item_details = self.client.fetch(item['_url']) item_details = json.loads(raw_item_details) item_details['offset'] = current_offset current_offset += 1 yield item_details nitems += 1 logger.info("Total number of events: %i (%i total, %i offset)", nitems, titems, offset)
Extracts the update time from a ReMo item.
def metadata_updated_on(item): """Extracts the update time from a ReMo item. The timestamp is extracted from 'end' field. This date is converted to a perceval format using a float value. :param item: item generated by the backend :returns: a UNIX timestamp """ if 'end' in item: # events updated field updated = item['end'] elif 'date_joined_program' in item: # users updated field that always appear updated = item['date_joined_program'] elif 'report_date' in item: # activities updated field updated = item['report_date'] else: raise ValueError("Can't find updated field for item " + str(item)) return float(str_to_datetime(updated).timestamp())
Extracts the category from a ReMo item.
def metadata_category(item): """Extracts the category from a ReMo item. This backend generates items types 'event', 'activity' or 'user'. To guess the type of item, the code will look for unique fields. """ if 'estimated_attendance' in item: category = CATEGORY_EVENT elif 'activity' in item: category = CATEGORY_ACTIVITY elif 'first_name' in item: category = CATEGORY_USER else: raise TypeError("Could not define the category of item " + str(item)) return category
Init client
def _init_client(self, from_archive=False): """Init client""" return ReMoClient(self.url, self.archive, from_archive)
Retrieve all items for category using pagination
def get_items(self, category=CATEGORY_EVENT, offset=REMO_DEFAULT_OFFSET): """Retrieve all items for category using pagination """ more = True # There are more items to be processed next_uri = None # URI for the next items page query page = ReMoClient.FIRST_PAGE page += int(offset / ReMoClient.ITEMS_PER_PAGE) if category == CATEGORY_EVENT: api = self.api_events_url elif category == CATEGORY_ACTIVITY: api = self.api_activities_url elif category == CATEGORY_USER: api = self.api_users_url else: raise ValueError(category + ' not supported in ReMo') while more: params = { "page": page, "orderby": "ASC" } logger.debug("ReMo client calls APIv2: %s params: %s", api, str(params)) raw_items = self.fetch(api, payload=params) yield raw_items items_data = json.loads(raw_items) next_uri = items_data['next'] if not next_uri: more = False else: # https://reps.mozilla.org/remo/api/remo/v1/events/?orderby=ASC&page=269 parsed_uri = urllib.parse.urlparse(next_uri) parsed_params = urllib.parse.parse_qs(parsed_uri.query) page = parsed_params['page'][0]
The buffer list this instance operates on.
def buffer_list(self): """ The buffer list this instance operates on. Only available in mode != AIOBLOCK_MODE_POLL. Changes on a submitted transfer are not fully applied until its next submission: kernel will still be using original buffer list. """ if self._iocb.aio_lio_opcode == libaio.IO_CMD_POLL: raise AttributeError return self._buffer_list
IO priority for this instance.
def io_priority(self): """ IO priority for this instance. """ return ( self._iocb.aio_reqprio if self._iocb.u.c.flags & libaio.IOCB_FLAG_IOPRIO else None )
Cancels all pending IO blocks. Waits until all non - cancellable IO blocks finish. De - initialises AIO context.
def close(self): """ Cancels all pending IO blocks. Waits until all non-cancellable IO blocks finish. De-initialises AIO context. """ if self._ctx is not None: # Note: same as io_destroy self._io_queue_release(self._ctx) del self._ctx
Submits transfers.
def submit(self, block_list): """ Submits transfers. block_list (list of AIOBlock) The IO blocks to hand off to kernel. Returns the number of successfully submitted blocks. """ # io_submit ioctl will only return an error for issues with the first # transfer block. If there are issues with a later block, it will stop # submission and return the number of submitted blocks. So it is safe # to only update self._submitted once io_submit returned. submitted_count = libaio.io_submit( self._ctx, len(block_list), (libaio.iocb_p * len(block_list))(*[ # pylint: disable=protected-access pointer(x._iocb) # pylint: enable=protected-access for x in block_list ]), ) submitted = self._submitted for block in block_list[:submitted_count]: # pylint: disable=protected-access submitted[addressof(block._iocb)] = (block, block._getSubmissionState()) # pylint: enable=protected-access return submitted_count
Cancel an IO block.
def cancel(self, block): """ Cancel an IO block. block (AIOBlock) The IO block to cancel. Returns cancelled block's event data (see getEvents), or None if the kernel returned EINPROGRESS. In the latter case, event completion will happen on a later getEvents call. """ event = libaio.io_event() try: # pylint: disable=protected-access libaio.io_cancel(self._ctx, byref(block._iocb), byref(event)) # pylint: enable=protected-access except OSError as exc: if exc.errno == errno.EINPROGRESS: return None raise return self._eventToPython(event)
Cancel all submitted IO blocks.
def cancelAll(self): """ Cancel all submitted IO blocks. Blocks until all submitted transfers have been finalised. Submitting more transfers or processing completion events while this method is running produces undefined behaviour. Returns the list of values returned by individual cancellations. See "cancel" documentation. """ cancel = self.cancel result = [] for block, _ in self._submitted.itervalues(): try: result.append(cancel(block)) except OSError as exc: # EINVAL should mean we requested to cancel a not-in-flight # transfer - maybe it was just completed and we just did # not process its completion event yet. if exc.errno != errno.EINVAL: raise return result
Returns a list of event data from submitted IO blocks.
def getEvents(self, min_nr=1, nr=None, timeout=None): """ Returns a list of event data from submitted IO blocks. min_nr (int, None) When timeout is None, minimum number of events to collect before returning. If None, waits for all submitted events. nr (int, None) Maximum number of events to return. If None, set to maxevents given at construction or to the number of currently submitted events, whichever is larger. timeout (float, None): Time to wait for events. If None, become blocking. Returns a list of 3-tuples, containing: - completed AIOBlock instance - res, file-object-type-dependent value - res2, another file-object-type-dependent value """ if min_nr is None: min_nr = len(self._submitted) if nr is None: nr = max(len(self._submitted), self._maxevents) if timeout is None: timeoutp = None else: sec = int(timeout) timeout = libaio.timespec(sec, int((timeout - sec) * 1e9)) timeoutp = byref(timeout) event_buffer = (libaio.io_event * nr)() actual_nr = libaio.io_getevents( self._ctx, min_nr, nr, event_buffer, timeoutp, ) return [ self._eventToPython(event_buffer[x]) for x in xrange(actual_nr) ]
Fetch events from the MozillaClub URL.
def fetch(self, category=CATEGORY_EVENT): """Fetch events from the MozillaClub URL. The method retrieves, from a MozillaClub URL, the events. The data is a Google spreadsheet retrieved using the feed API REST. :param category: the category of items to fetch :returns: a generator of events """ kwargs = {} items = super().fetch(category, **kwargs) return items
Fetch events
def fetch_items(self, category, **kwargs): """Fetch events :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items """ logger.info("Looking for events at url '%s'", self.url) nevents = 0 # number of events processed raw_cells = self.client.get_cells() parser = MozillaClubParser(raw_cells) for event in parser.parse(): yield event nevents += 1 logger.info("Total number of events: %i", nevents)
Init client
def _init_client(self, from_archive=False): """Init client""" return MozillaClubClient(self.url, self.archive, from_archive)
Retrieve all cells from the spreadsheet.
def get_cells(self): """Retrieve all cells from the spreadsheet.""" logger.info("Retrieving all cells spreadsheet data ...") logger.debug("MozillaClub client calls API: %s", self.base_url) raw_cells = self.fetch(self.base_url) return raw_cells.text
Parse the MozillaClub spreadsheet feed cells json.
def parse(self): """Parse the MozillaClub spreadsheet feed cells json.""" nevents_wrong = 0 feed_json = json.loads(self.feed) if 'entry' not in feed_json['feed']: return self.cells = feed_json['feed']['entry'] self.ncell = 0 event_fields = self.__get_event_fields() # Process all events reading the rows according to the event template # The only way to detect the end of row is looking to the # number of column. When the max number is reached (cell_cols) the next # cell is from the next row. while self.ncell < len(self.cells): # Process the next row (event) getting all cols to build the event event = self.__get_next_event(event_fields) if event['Date of Event'] is None or event['Club Name'] is None: logger.warning("Wrong event data: %s", event) nevents_wrong += 1 continue yield event logger.info("Total number of wrong events: %i", nevents_wrong)
Get the events fields ( columns ) from the cells received.
def __get_event_fields(self): """Get the events fields (columns) from the cells received.""" event_fields = {} # The cells in the first row are the column names # Check that the columns names are the same we have as template # Create the event template from the data retrieved while self.ncell < len(self.cells): cell = self.cells[self.ncell] row = cell['gs$cell']['row'] if int(row) > 1: # When the row number >1 the column row is finished break ncol = int(cell['gs$cell']['col']) name = cell['content']['$t'] event_fields[ncol] = name if ncol in EVENT_TEMPLATE: if event_fields[ncol] != EVENT_TEMPLATE[ncol]: logger.warning("Event template changed in spreadsheet %s vs %s", name, EVENT_TEMPLATE[ncol]) else: logger.warning("Event template changed in spreadsheet. New column: %s", name) self.ncell += 1 return event_fields
Return data files in directory * dirname *
def get_data_files(dirname): """Return data files in directory *dirname*""" flist = [] for dirpath, _dirnames, filenames in os.walk(dirname): for fname in filenames: flist.append(osp.join(dirpath, fname)) return flist
Calculates the md5 - hash of the file.: param file_path: full path to the file.
def md5(file_path): """Calculates the md5-hash of the file. :param file_path: full path to the file. """ hasher = hashlib.md5() with open(file_path, 'rb') as f: while True: buf = f.read(BLOCKSIZE) if not buf: break while len(buf) > 0: hasher.update(buf) buf = f.read(BLOCKSIZE) md5_hash = hasher.hexdigest().upper() return md5_hash
Shows file size.: param full_path: full path to the file.
def size(full_path): """Shows file size. :param full_path: full path to the file. """ file_size = os.path.getsize(full_path) str_file_size = str(file_size) print(str_file_size, 'b') # Show size in b, kb, mb or gb depending on the dimension if len(str_file_size) >= 10: print('{0:.2f}'.format(file_size / 1073741824), 'gb') elif len(str_file_size) >= 7: print('{0:.2f}'.format(file_size / 1048576), 'mb') elif len(str_file_size) >= 4: print('{0:.2f}'.format(file_size / 1024), 'kb')
Split the tuple ( obtained from scan ) to separate files. Alternately send full paths to the files in md5 and call it.: param directory: tuple of files in the directory.
def calculate(directory): """Split the tuple (obtained from scan) to separate files. Alternately send full paths to the files in md5 and call it. :param directory: tuple of files in the directory.""" # Set correct slashes for the OS if sys.platform == 'windows': slash = '\\' elif sys.platform == 'linux': slash = '/' else: print('#Error. Unknown platform.') return print('Files in the current directory and their md5-hashes:\n') for i in range(len(directory[2])): # Go through the list of files full_path = directory[0]+slash+directory[2][i] print(full_path) # Get the list of files with full paths size(full_path) print(md5(full_path))
Scan the directory and send the obtained tuple to calculate.: param tree: path to file or directory
def scan(tree): """Scan the directory and send the obtained tuple to calculate. :param tree: path to file or directory""" tree = os.path.normpath(tree) assert os.path.exists(tree), "#Error. The path '{}' is" \ " invalid or doesn't exist.".format(str(tree)) if os.path.isfile(tree): return md5(tree) elif os.path.isdir(tree): tree = os.walk(tree) for directory in tree: print('...................') print('Current directory:') print(directory[0]) # Current directory if not directory[2]: # Empty directory check print('An empty directory.') continue else: print('List of the files in the current directory:') print(directory[2]) # Files in the current directory print() calculate(directory)
List of export formats.
def export_formats(self, pid_type): """List of export formats.""" if pid_type not in self._export_formats: fmts = self.app.config.get('RECORDS_UI_EXPORT_FORMATS', {}).get( pid_type, {}) self._export_formats[pid_type] = sorted( [(k, v) for k, v in fmts.items() if v], key=lambda x: x[1]['order'], ) return self._export_formats[pid_type]
Load default permission factory.
def permission_factory(self): """Load default permission factory.""" if self._permission_factory is None: imp = self.app.config['RECORDS_UI_DEFAULT_PERMISSION_FACTORY'] self._permission_factory = obj_or_import_string(imp) return self._permission_factory
Flask application initialization.
def init_app(self, app): """Flask application initialization. :param app: The Flask application. """ self.init_config(app) app.extensions['invenio-records-ui'] = _RecordUIState(app)
Create Invenio - Records - UI blueprint.
def create_blueprint(endpoints): """Create Invenio-Records-UI blueprint. The factory installs one URL route per endpoint defined, and adds an error handler for rendering tombstones. :param endpoints: Dictionary of endpoints to be installed. See usage documentation for further details. :returns: The initialized blueprint. """ blueprint = Blueprint( 'invenio_records_ui', __name__, url_prefix='', template_folder='templates', static_folder='static', ) @blueprint.errorhandler(PIDDeletedError) def tombstone_errorhandler(error): return render_template( current_app.config['RECORDS_UI_TOMBSTONE_TEMPLATE'], pid=error.pid, record=error.record or {}, ), 410 @blueprint.context_processor def inject_export_formats(): return dict( export_formats=( current_app.extensions['invenio-records-ui'].export_formats) ) for endpoint, options in (endpoints or {}).items(): blueprint.add_url_rule(**create_url_rule(endpoint, **options)) return blueprint
Create Werkzeug URL rule for a specific endpoint.
def create_url_rule(endpoint, route=None, pid_type=None, template=None, permission_factory_imp=None, view_imp=None, record_class=None, methods=None): """Create Werkzeug URL rule for a specific endpoint. The method takes care of creating a persistent identifier resolver for the given persistent identifier type. :param endpoint: Name of endpoint. :param route: URL route (must include ``<pid_value>`` pattern). Required. :param pid_type: Persistent identifier type for endpoint. Required. :param template: Template to render. (Default: ``invenio_records_ui/detail.html``) :param permission_factory_imp: Import path to factory that creates a permission object for a given record. :param view_imp: Import path to view function. (Default: ``None``) :param record_class: Name of the record API class. :param methods: Method allowed for the endpoint. :returns: A dictionary that can be passed as keywords arguments to ``Blueprint.add_url_rule``. """ assert route assert pid_type permission_factory = import_string(permission_factory_imp) if \ permission_factory_imp else None view_method = import_string(view_imp) if view_imp else default_view_method record_class = import_string(record_class) if record_class else Record methods = methods or ['GET'] view_func = partial( record_view, resolver=Resolver(pid_type=pid_type, object_type='rec', getter=record_class.get_record), template=template or 'invenio_records_ui/detail.html', permission_factory=permission_factory, view_method=view_method) # Make view well-behaved for Flask-DebugToolbar view_func.__module__ = record_view.__module__ view_func.__name__ = record_view.__name__ return dict( endpoint=endpoint, rule=route, view_func=view_func, methods=methods, )
Display record view.
def record_view(pid_value=None, resolver=None, template=None, permission_factory=None, view_method=None, **kwargs): """Display record view. The two parameters ``resolver`` and ``template`` should not be included in the URL rule, but instead set by creating a partially evaluated function of the view. The template being rendered is passed two variables in the template context: - ``pid`` - ``record``. Procedure followed: #. PID and record are resolved. #. Permission are checked. #. ``view_method`` is called. :param pid_value: Persistent identifier value. :param resolver: An instance of a persistent identifier resolver. A persistent identifier resolver takes care of resolving persistent identifiers into internal objects. :param template: Template to render. :param permission_factory: Permission factory called to check if user has enough power to execute the action. :param view_method: Function that is called. :returns: Tuple (pid object, record object). """ try: pid, record = resolver.resolve(pid_value) except (PIDDoesNotExistError, PIDUnregistered): abort(404) except PIDMissingObjectError as e: current_app.logger.exception( "No object assigned to {0}.".format(e.pid), extra={'pid': e.pid}) abort(500) except PIDRedirectedError as e: try: return redirect(url_for( '.{0}'.format(e.destination_pid.pid_type), pid_value=e.destination_pid.pid_value)) except BuildError: current_app.logger.exception( "Invalid redirect - pid_type '{0}' endpoint missing.".format( e.destination_pid.pid_type), extra={ 'pid': e.pid, 'destination_pid': e.destination_pid, }) abort(500) # Check permissions permission_factory = permission_factory or current_permission_factory if permission_factory: # Note, cannot be done in one line due to overloading of boolean # operations in permission object. if not permission_factory(record).can(): from flask_login import current_user if not current_user.is_authenticated: return redirect(url_for( current_app.config['RECORDS_UI_LOGIN_ENDPOINT'], next=request.url)) abort(403) return view_method(pid, record, template=template, **kwargs)
r Display default view.
def default_view_method(pid, record, template=None, **kwargs): r"""Display default view. Sends record_viewed signal and renders template. :param pid: PID object. :param record: Record object. :param template: Template to render. :param \*\*kwargs: Additional view arguments based on URL rule. :returns: The rendered template. """ record_viewed.send( current_app._get_current_object(), pid=pid, record=record, ) return render_template( template, pid=pid, record=record, )
r Record serialization view.
def export(pid, record, template=None, **kwargs): r"""Record serialization view. Serializes record with given format and renders record export template. :param pid: PID object. :param record: Record object. :param template: Template to render. :param \*\*kwargs: Additional view arguments based on URL rule. :return: The rendered template. """ formats = current_app.config.get('RECORDS_UI_EXPORT_FORMATS', {}).get( pid.pid_type) fmt = formats.get(request.view_args.get('format')) if fmt is False: # If value is set to False, it means it was deprecated. abort(410) elif fmt is None: abort(404) else: serializer = obj_or_import_string(fmt['serializer']) data = serializer.serialize(pid, record) if isinstance(data, six.binary_type): data = data.decode('utf8') return render_template( template, pid=pid, record=record, data=data, format_title=fmt['title'], )
Load test data fixture.
def records(): """Load test data fixture.""" import uuid from invenio_records.api import Record from invenio_pidstore.models import PersistentIdentifier, PIDStatus # Record 1 - Live record with db.session.begin_nested(): pid1 = PersistentIdentifier.create( 'recid', '1', object_type='rec', object_uuid=rec1_uuid, status=PIDStatus.REGISTERED) Record.create({ 'title': 'Registered ', 'authors': [ {'name': 'Ellis Jonathan'}, {'name': 'Higgs Peter'}, ], 'access': 'open', 'keywords': ['CERN', 'higgs'], }, id_=rec1_uuid) PersistentIdentifier.create( 'recid', '2', object_type='rec', object_uuid=rec2_uuid, status=PIDStatus.REGISTERED) Record.create({ 'title': 'Registered ', 'authors': [ {'name': 'Ellis Jonathan'}, {'name': 'Higgs Peter'}, ], 'access': 'closed', 'keywords': ['CERN', 'higgs'], }, id_=rec2_uuid) # Record 3 - Deleted PID with record rec3_uuid = uuid.uuid4() pid = PersistentIdentifier.create( 'recid', '3', object_type='rec', object_uuid=rec3_uuid, status=PIDStatus.REGISTERED) pid.delete() Record.create({'title': 'Live '}, id_=rec3_uuid) # Record 4 - Deleted PID without a record PersistentIdentifier.create( 'recid', '4', status=PIDStatus.DELETED) # Record 5 - Registered PID without a record PersistentIdentifier.create( 'recid', '5', status=PIDStatus.REGISTERED) # Record 6 - Redirected PID pid = PersistentIdentifier.create( 'recid', '6', status=PIDStatus.REGISTERED) pid.redirect(pid1) # Record 7 - Redirected non existing endpoint doi = PersistentIdentifier.create( 'doi', '10.1234/foo', status=PIDStatus.REGISTERED) pid = PersistentIdentifier.create( 'recid', '7', status=PIDStatus.REGISTERED) pid.redirect(doi) # Record 8 - Unregistered PID PersistentIdentifier.create( 'recid', '8', status=PIDStatus.RESERVED) db.session.commit()
Send a Timer metric calculating duration of execution of the provided callable
def time_callable(self, name, target, rate=None, args=(), kwargs={}): # type: (str, Callable, float, Tuple, Dict) -> Chronometer """Send a Timer metric calculating duration of execution of the provided callable""" assert callable(target) if rate is None: rate = self._rate else: assert_sample_rate(rate) start_time = time() # type: float result = target(*args, **kwargs) self.since(name, start_time, rate) return result
Close the socket to free system resources.
def close(self): # type: () -> None """Close the socket to free system resources. After the socket is closed, further operations with socket will fail. Multiple calls to close will have no effect. """ if self._closed: return self._socket.close() self._closed = True
Remove the client from the users of the socket.
def remove_client(self, client): # type: (object) -> None """Remove the client from the users of the socket. If there are no more clients for the socket, it will close automatically. """ try: self._clients.remove(id(client)) except ValueError: pass if len(self._clients) < 1: self.close()
Increment a Counter metric
def increment(self, name, count=1, rate=1): # type: (str, int, float) -> None """Increment a Counter metric""" if self._should_send_metric(name, rate): self._request( Counter( self._create_metric_name_for_request(name), int(count), rate ).to_request() )
Send a Timer metric with the specified duration in milliseconds
def timing(self, name, milliseconds, rate=1): # type: (str, float, float) -> None """Send a Timer metric with the specified duration in milliseconds""" if self._should_send_metric(name, rate): milliseconds = int(milliseconds) self._request( Timer( self._create_metric_name_for_request(name), milliseconds, rate ).to_request() )
Send a Timer metric calculating the duration from the start time
def timing_since(self, name, start_time, rate=1): # type: (str, Union[float, datetime], float) -> None """Send a Timer metric calculating the duration from the start time""" duration = 0 # type: float if isinstance(start_time, datetime): duration = (datetime.now(start_time.tzinfo) - start_time).total_seconds() * 1000 elif is_numeric(start_time): assert start_time > 0 duration = (time() - start_time) * 1000 else: raise ValueError("start time should be a timestamp or a datetime") self.timing(name, duration, rate)
Send a Gauge metric with the specified value
def gauge(self, name, value, rate=1): # type: (str, float, float) -> None """Send a Gauge metric with the specified value""" if self._should_send_metric(name, rate): if not is_numeric(value): value = float(value) self._request( Gauge( self._create_metric_name_for_request(name), value, rate ).to_request() )
Send a GaugeDelta metric to change a Gauge by the specified value
def gauge_delta(self, name, delta, rate=1): # type: (str, float, float) -> None """Send a GaugeDelta metric to change a Gauge by the specified value""" if self._should_send_metric(name, rate): if not is_numeric(delta): delta = float(delta) self._request( GaugeDelta( self._create_metric_name_for_request(name), delta, rate ).to_request() )
Send a Set metric with the specified unique value
def set(self, name, value, rate=1): # type: (str, str, float) -> None """Send a Set metric with the specified unique value""" if self._should_send_metric(name, rate): value = str(value) self._request( Set( self._create_metric_name_for_request(name), value, rate ).to_request() )
Override parent by buffering the metric instead of sending now
def _request(self, data): # type: (str) -> None """Override parent by buffering the metric instead of sending now""" data = bytearray("{}\n".format(data).encode()) self._prepare_batches_for_storage(len(data)) self._batches[-1].extend(data)
Return a batch client with same settings of the client
def batch_client(self, size=512): # type: (int) -> BatchClient """Return a batch client with same settings of the client""" batch_client = BatchClient(self.host, self.port, self.prefix, size) self._configure_client(batch_client) return batch_client