sentence1
stringlengths
52
3.87M
sentence2
stringlengths
1
47.2k
label
stringclasses
1 value
def enable_loggly(graph): """ Enable loggly if it is configured and not debug/testing. """ if graph.metadata.debug or graph.metadata.testing: return False try: if not graph.config.logging.loggly.token: return False if not graph.config.logging.loggly.environment: return False except AttributeError: return False return True
Enable loggly if it is configured and not debug/testing.
entailment
def make_dict_config(graph): """ Build a dictionary configuration from conventions and configuration. """ formatters = {} handlers = {} loggers = {} # create the console handler formatters["ExtraFormatter"] = make_extra_console_formatter(graph) handlers["console"] = make_stream_handler(graph, formatter="ExtraFormatter") # maybe create the loggly handler if enable_loggly(graph): formatters["JSONFormatter"] = make_json_formatter(graph) handlers["LogglyHTTPSHandler"] = make_loggly_handler(graph, formatter="JSONFormatter") # configure the root logger to output to all handlers loggers[""] = { "handlers": handlers.keys(), "level": graph.config.logging.level, } # set log levels for libraries loggers.update(make_library_levels(graph)) return dict( version=1, disable_existing_loggers=False, formatters=formatters, handlers=handlers, loggers=loggers, )
Build a dictionary configuration from conventions and configuration.
entailment
def make_json_formatter(graph): """ Create the default json formatter. """ return { "()": graph.config.logging.json_formatter.formatter, "fmt": graph.config.logging.json_required_keys, }
Create the default json formatter.
entailment
def make_stream_handler(graph, formatter): """ Create the stream handler. Used for console/debug output. """ return { "class": graph.config.logging.stream_handler.class_, "formatter": formatter, "level": graph.config.logging.level, "stream": graph.config.logging.stream_handler.stream, }
Create the stream handler. Used for console/debug output.
entailment
def make_loggly_handler(graph, formatter): """ Create the loggly handler. Used for searchable aggregation. """ base_url = graph.config.logging.loggly.base_url loggly_url = "{}/inputs/{}/tag/{}".format( base_url, graph.config.logging.loggly.token, ",".join([ graph.metadata.name, graph.config.logging.loggly.environment, ]), ) return { "class": graph.config.logging.https_handler.class_, "formatter": formatter, "level": graph.config.logging.level, "url": loggly_url, }
Create the loggly handler. Used for searchable aggregation.
entailment
def make_library_levels(graph): """ Create third party library logging level configurations. Tunes down overly verbose logs in commonly used libraries. """ # inject the default components; these can, but probably shouldn't, be overridden levels = {} for level in ["DEBUG", "INFO", "WARN", "ERROR"]: levels.update({ component: { "level": level, } for component in graph.config.logging.levels.default[level.lower()] }) # override components; these can be set per application for level in ["DEBUG", "INFO", "WARN", "ERROR"]: levels.update({ component: { "level": level, } for component in graph.config.logging.levels.override[level.lower()] }) return levels
Create third party library logging level configurations. Tunes down overly verbose logs in commonly used libraries.
entailment
def process_request(self, request): """ Redirects the current request if there is a matching Redirect model with the current request URL as the old_path field. """ site = request.site cache_key = '{prefix}-{site}'.format(prefix=settings.REDIRECT_CACHE_KEY_PREFIX, site=site.domain) redirects = cache.get(cache_key) if redirects is None: redirects = {redirect.old_path: redirect.new_path for redirect in Redirect.objects.filter(site=site)} cache.set(cache_key, redirects, settings.REDIRECT_CACHE_TIMEOUT) redirect_to = redirects.get(request.path) if redirect_to: return redirect(redirect_to, permanent=True)
Redirects the current request if there is a matching Redirect model with the current request URL as the old_path field.
entailment
def context_logger(context_func, func, parent=None): """ The results of context_func will be executed and applied to a ContextLogger instance for the execution of func. The resulting ContextLogger instance will be available on parent.logger for the duration of func. :param context_func: callable which provides dictionary-like context information :param func: the function to wrap :param parent: object to attach the context logger to, if None, defaults to func.__self__ """ if parent is None: parent = func.__self__ def wrapped(*args, **kwargs): parent.logger = ContextLogger( getattr(parent, 'logger', getLogger(parent.__class__.__name__)), context_func(*args, **kwargs) or dict(), ) try: result = func(*args, **kwargs) return result finally: parent.logger = parent.logger.logger return wrapped
The results of context_func will be executed and applied to a ContextLogger instance for the execution of func. The resulting ContextLogger instance will be available on parent.logger for the duration of func. :param context_func: callable which provides dictionary-like context information :param func: the function to wrap :param parent: object to attach the context logger to, if None, defaults to func.__self__
entailment
def patched_get_current(self, request=None): """ Monkey patched version of Django's SiteManager.get_current() function. Returns the current Site based on a given request or the SITE_ID in the project's settings. If a request is given attempts to match a site with domain matching request.get_host(). If a request is not given or a site cannot be found based on the host of the request, we return the site which matches the configured SITE_ID setting. """ # Imported here to avoid circular import from django.conf import settings if request: try: return self._get_site_by_request(request) # pylint: disable=protected-access except Site.DoesNotExist: pass if getattr(settings, 'SITE_ID', ''): return self._get_site_by_id(settings.SITE_ID) # pylint: disable=protected-access raise ImproperlyConfigured( "You're using the Django \"sites framework\" without having " "set the SITE_ID setting. Create a site in your database and " "set the SITE_ID setting or pass a request to " "Site.objects.get_current() to fix this error." )
Monkey patched version of Django's SiteManager.get_current() function. Returns the current Site based on a given request or the SITE_ID in the project's settings. If a request is given attempts to match a site with domain matching request.get_host(). If a request is not given or a site cannot be found based on the host of the request, we return the site which matches the configured SITE_ID setting.
entailment
def patched_get_site_by_id(self, site_id): """ Monkey patched version of Django's SiteManager._get_site_by_id() function. Adds a configurable timeout to the in-memory SITE_CACHE for each cached Site. This allows for the use of an in-memory cache for Site models, avoiding one or more DB hits on every request made to the Django application, but also allows for changes made to models associated with the Site model and accessed via the Site model's relationship accessors to take effect without having to manual recycle all Django worker processes active in an application environment. """ now = datetime.datetime.utcnow() site = models.SITE_CACHE.get(site_id) cache_timeout = SITE_CACHE_TIMEOUTS.get(site_id, now) if not site or cache_timeout <= now: site = self.get(pk=site_id) models.SITE_CACHE[site_id] = site SITE_CACHE_TIMEOUTS[site_id] = now + get_site_cache_ttl() return models.SITE_CACHE[site_id]
Monkey patched version of Django's SiteManager._get_site_by_id() function. Adds a configurable timeout to the in-memory SITE_CACHE for each cached Site. This allows for the use of an in-memory cache for Site models, avoiding one or more DB hits on every request made to the Django application, but also allows for changes made to models associated with the Site model and accessed via the Site model's relationship accessors to take effect without having to manual recycle all Django worker processes active in an application environment.
entailment
def patched_get_site_by_request(self, request): """ Monkey patched version of Django's SiteManager._get_site_by_request() function. Adds a configurable timeout to the in-memory SITE_CACHE for each cached Site. This allows for the use of an in-memory cache for Site models, avoiding one or more DB hits on every request made to the Django application, but also allows for changes made to models associated with the Site model and accessed via the Site model's relationship accessors to take effect without having to manual recycle all Django worker processes active in an application environment. """ host = request.get_host() now = datetime.datetime.utcnow() site = models.SITE_CACHE.get(host) cache_timeout = SITE_CACHE_TIMEOUTS.get(host, now) if not site or cache_timeout <= now: site = self.get(domain__iexact=host) models.SITE_CACHE[host] = site SITE_CACHE_TIMEOUTS[host] = now + get_site_cache_ttl() return models.SITE_CACHE[host]
Monkey patched version of Django's SiteManager._get_site_by_request() function. Adds a configurable timeout to the in-memory SITE_CACHE for each cached Site. This allows for the use of an in-memory cache for Site models, avoiding one or more DB hits on every request made to the Django application, but also allows for changes made to models associated with the Site model and accessed via the Site model's relationship accessors to take effect without having to manual recycle all Django worker processes active in an application environment.
entailment
def str_brief(obj, lim=20, dots='...', use_repr=True): """Truncates a string, starting from 'lim' chars. The given object can be a string, or something that can be casted to a string. >>> import string >>> str_brief(string.uppercase) 'ABCDEFGHIJKLMNOPQRST...' >>> str_brief(2 ** 50, lim=10, dots='0') '11258999060' """ if isinstance(obj, basestring) or not use_repr: full = str(obj) else: full = repr(obj) postfix = [] CLOSERS = {'(': ')', '{': '}', '[': ']', '"': '"', "'": "'", '<': '>'} for i, c in enumerate(full): if i >= lim + len(postfix): return full[:i] + dots + ''.join(reversed(postfix)) if postfix and postfix[-1] == c: postfix.pop(-1) continue closer = CLOSERS.get(c, None) if closer is not None: postfix.append(closer) return full
Truncates a string, starting from 'lim' chars. The given object can be a string, or something that can be casted to a string. >>> import string >>> str_brief(string.uppercase) 'ABCDEFGHIJKLMNOPQRST...' >>> str_brief(2 ** 50, lim=10, dots='0') '11258999060'
entailment
def get_mirror_resources_by_name_map(self, scope=None): """ returns a map volume_name -> volume, cg_name->cg scope is either None or CG or Volume """ volumes_mirrors_by_name = dict() cgs_mirrors_by_name = dict() if ((scope is None) or (scope.lower() == 'volume')): mirror_list = self.xcli_client.cmd.mirror_list(scope='Volume') for xcli_mirror in mirror_list: name = MirroredEntities.get_mirrored_object_name(xcli_mirror) volumes_mirrors_by_name[name] = xcli_mirror if ((scope is None) or (scope.lower() == CG)): for xcli_mirror in self.xcli_client.cmd.mirror_list(scope='CG'): name = MirroredEntities.get_mirrored_object_name(xcli_mirror) cgs_mirrors_by_name[name] = xcli_mirror res = Bunch(volumes=volumes_mirrors_by_name, cgs=cgs_mirrors_by_name) return res
returns a map volume_name -> volume, cg_name->cg scope is either None or CG or Volume
entailment
def get_host_port_names(self, host_name): """ return a list of the port names of XIV host """ port_names = list() host = self.get_hosts_by_name(host_name) fc_ports = host.fc_ports iscsi_ports = host.iscsi_ports port_names.extend(fc_ports.split(',') if fc_ports != '' else []) port_names.extend(iscsi_ports.split(',') if iscsi_ports != '' else []) return port_names
return a list of the port names of XIV host
entailment
def get_cluster_port_names(self, cluster_name): """ return a list of the port names under XIV CLuster """ port_names = list() for host_name in self.get_hosts_by_clusters()[cluster_name]: port_names.extend(self.get_hosts_by_name(host_name)) return port_names
return a list of the port names under XIV CLuster
entailment
def flush(self): """remove all stale clients from pool""" now = time.time() to_remove = [] for k, entry in self.pool.items(): if entry.timestamp < now: entry.client.close() to_remove.append(k) for k in to_remove: del self.pool[k]
remove all stale clients from pool
entailment
def get(self, user, password, endpoints): """Gets an existing connection or opens a new one """ now = time.time() # endpoints can either be str or list if isinstance(endpoints, str): endpoints = [endpoints] for ep in endpoints: if ep not in self.pool: continue entry = self.pool[ep] if (not entry.client.is_connected() or entry.timestamp + self.time_to_live < now): xlog.debug("XCLIClientPool: clearing stale client %s", ep) del self.pool[ep] entry.client.close() continue user_client = entry.user_clients.get(user, None) if not user_client or not user_client.is_connected(): user_client = entry.client.get_user_client(user, password) entry.user_clients[user] = user_client return user_client xlog.debug("XCLIClientPool: connecting to %s", endpoints) client = self.connector(None, None, endpoints) user_client = {user: client.get_user_client(user, password)} for ep in endpoints: self.pool[ep] = PoolEntry(client, now, user_client) return user_client[user]
Gets an existing connection or opens a new one
entailment
def size(value: int, suffixes: list = None) -> str: """ >>> size(1024) '1 KB' """ suffixes = suffixes or [ 'bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'] order = int(log2(value) / 10) if value else 0 return '{:.4g} {}'.format(value / (1 << (order * 10)), suffixes[order])
>>> size(1024) '1 KB'
entailment
def parse_size(value): """ >>> parse_size('1M') 1048576 >>> parse_size('512K512K') 1048576 >>> parse_size('512K 512K') 1048576 >>> parse_size('512K 512K 4') 1048580 """ if isinstance(value, (int, float)): return value elif not pattern_valid.fullmatch(value): raise ValueError(value) result = 0 for m in pattern.finditer(value): v = m.groupdict() n = int(v['n']) unit = v['unit'] if not unit: result += n elif unit in size_levels: result += n << size_levels[unit] else: raise ValueError(value) return result
>>> parse_size('1M') 1048576 >>> parse_size('512K512K') 1048576 >>> parse_size('512K 512K') 1048576 >>> parse_size('512K 512K 4') 1048580
entailment
def parse_duration(value): """ >>> parse_duration('1h') 3600 >>> parse_duration('1m') 60 >>> parse_duration('1m 2s') 62 >>> parse_duration('1') 1 """ if isinstance(value, (int, float)): return value elif not pattern_valid.fullmatch(value): raise ValueError(value) result = 0 for m in pattern.finditer(value): v = m.groupdict() n = int(v['n']) unit = v['unit'] if not unit: result += n elif unit in durations: result += n * durations[unit] else: raise ValueError(value) return result
>>> parse_duration('1h') 3600 >>> parse_duration('1m') 60 >>> parse_duration('1m 2s') 62 >>> parse_duration('1') 1
entailment
def send_event(self, action, properties, event_severity=EVENT_SEVERITY): """ send css_event and if fails send custom_event instead Args: action (ACTIONS): the action causing the event properties (dict): the action additional properties event_severity (string): the event severity Raises: XCLIError: if the xcli.cmd.custom_event failed KeyError: if action wasn't predefined TypeError: if properties is not None or dict """ # verify properties event_properties = dict() if (properties is None) else properties if type(event_properties) is not dict: raise TypeError('properties is not dict') # prepare event event_bunch = Bunch( Product=self.product_name, Version=self.product_version, Server=self.server_name, Platform=self.platform, Action=action, Properties=event_properties) event_description = self._get_description_prefix() + \ json.dumps(event_bunch) use_custom_event = True if CSS_PRODUCT_EVENT in dir(self.xcli.cmd): try: # send css product event log.debug("sending css_product_event " "description=%s severity=%s", event_description, event_severity) self.xcli.cmd.css_product_event(severity=event_severity, product=self.product_name, version=self.product_version, server=self.server_name, platform=self.platform, action=action, properties=event_properties) use_custom_event = False except (UnrecognizedCommandError, OperationForbiddenForUserCategoryError): log.warning("failed css_product_event " "description=%s severity=%s", event_description, event_severity) if use_custom_event: # send custom event log.debug("sending custom_event description=%s severity=%s", event_description, event_severity) self.xcli.cmd.custom_event( description=event_description, severity=event_severity)
send css_event and if fails send custom_event instead Args: action (ACTIONS): the action causing the event properties (dict): the action additional properties event_severity (string): the event severity Raises: XCLIError: if the xcli.cmd.custom_event failed KeyError: if action wasn't predefined TypeError: if properties is not None or dict
entailment
def _create_mirror(self, resource_type, resource_name, target_name, mirror_type, slave_resource_name, create_slave='no', remote_pool=None, rpo=None, remote_rpo=None, schedule=None, remote_schedule=None, activate_mirror='no'): '''creates a mirror and returns a mirror object. resource_type must be 'vol' or 'cg', target name must be a valid target from target_list, mirror type must be 'sync' or 'async', slave_resource_name would be the slave_vol or slave_cg name''' kwargs = { resource_type: resource_name, 'target': target_name, 'type': mirror_type, 'slave_' + resource_type: slave_resource_name, 'create_slave': create_slave, 'remote_pool': remote_pool, 'rpo': rpo, 'remote_rpo': remote_rpo, 'schedule': schedule, 'remote_schedule': remote_schedule } if mirror_type == 'sync': kwargs['type'] = 'sync_best_effort' kwargs['rpo'] = None else: kwargs['type'] = 'async_interval' if kwargs['remote_schedule'] is None: kwargs['remote_schedule'] = kwargs['schedule'] # avoids a python3 issue of the dict changing # during iteration keys = set(kwargs.keys()).copy() for k in keys: if kwargs[k] is None: kwargs.pop(k) logger.info('creating mirror with arguments: %s' % kwargs) self.xcli_client.cmd.mirror_create(**kwargs) if activate_mirror == 'yes': logger.info('Activating mirror %s' % resource_name) self.activate_mirror(resource_name) return self.get_mirror_resources()[resource_name]
creates a mirror and returns a mirror object. resource_type must be 'vol' or 'cg', target name must be a valid target from target_list, mirror type must be 'sync' or 'async', slave_resource_name would be the slave_vol or slave_cg name
entailment
def chained(wrapping_exc): # pylint: disable=W0212 """ Embeds the current exception information into the given one (which will replace the current one). For example:: try: ... except OSError as ex: raise chained(MyError("database not found!")) """ t, v, tb = sys.exc_info() if not t: return wrapping_exc wrapping_exc._inner_exc = v lines = traceback.format_exception(t, v, tb) wrapping_exc._inner_tb = "".join(lines[1:]) return wrapping_exc
Embeds the current exception information into the given one (which will replace the current one). For example:: try: ... except OSError as ex: raise chained(MyError("database not found!"))
entailment
def get_user(self, username, *, mode=OsuMode.osu, event_days=31): """Get a user profile. Parameters ---------- username : str or int A `str` representing the user's username, or an `int` representing the user's id. mode : :class:`osuapi.enums.OsuMode` The osu! game mode for which to look up. Defaults to osu!standard. event_days : int The number of days in the past to look for events. Defaults to 31 (the maximum). """ return self._make_req(endpoints.USER, dict( k=self.key, u=username, type=_username_type(username), m=mode.value, event_days=event_days ), JsonList(User))
Get a user profile. Parameters ---------- username : str or int A `str` representing the user's username, or an `int` representing the user's id. mode : :class:`osuapi.enums.OsuMode` The osu! game mode for which to look up. Defaults to osu!standard. event_days : int The number of days in the past to look for events. Defaults to 31 (the maximum).
entailment
def get_user_best(self, username, *, mode=OsuMode.osu, limit=50): """Get a user's best scores. Parameters ---------- username : str or int A `str` representing the user's username, or an `int` representing the user's id. mode : :class:`osuapi.enums.OsuMode` The osu! game mode for which to look up. Defaults to osu!standard. limit The maximum number of results to return. Defaults to 50, maximum 100. """ return self._make_req(endpoints.USER_BEST, dict( k=self.key, u=username, type=_username_type(username), m=mode.value, limit=limit ), JsonList(SoloScore))
Get a user's best scores. Parameters ---------- username : str or int A `str` representing the user's username, or an `int` representing the user's id. mode : :class:`osuapi.enums.OsuMode` The osu! game mode for which to look up. Defaults to osu!standard. limit The maximum number of results to return. Defaults to 50, maximum 100.
entailment
def get_user_recent(self, username, *, mode=OsuMode.osu, limit=10): """Get a user's most recent scores, within the last 24 hours. Parameters ---------- username : str or int A `str` representing the user's username, or an `int` representing the user's id. mode : :class:`osuapi.enums.OsuMode` The osu! game mode for which to look up. Defaults to osu!standard. limit The maximum number of results to return. Defaults to 10, maximum 50. """ return self._make_req(endpoints.USER_RECENT, dict( k=self.key, u=username, type=_username_type(username), m=mode.value, limit=limit ), JsonList(RecentScore))
Get a user's most recent scores, within the last 24 hours. Parameters ---------- username : str or int A `str` representing the user's username, or an `int` representing the user's id. mode : :class:`osuapi.enums.OsuMode` The osu! game mode for which to look up. Defaults to osu!standard. limit The maximum number of results to return. Defaults to 10, maximum 50.
entailment
def get_scores(self, beatmap_id, *, username=None, mode=OsuMode.osu, mods=None, limit=50): """Get the top scores for a given beatmap. Parameters ---------- beatmap_id Individual Beatmap ID to lookup. username : str or int A `str` representing the user's username, or an `int` representing the user's id. If specified, restricts returned scores to the specified user. mode : :class:`osuapi.enums.OsuMode` The osu! game mode for which to look up. Defaults to osu!standard. mods : :class:`osuap:class:`osuapi.enums.OsuMod` If specified, restricts returned scores to the specified mods. limit Number of results to return. Defaults to 50, maximum 100. """ return self._make_req(endpoints.SCORES, dict( k=self.key, b=beatmap_id, u=username, type=_username_type(username), m=mode.value, mods=mods.value if mods else None, limit=limit), JsonList(BeatmapScore))
Get the top scores for a given beatmap. Parameters ---------- beatmap_id Individual Beatmap ID to lookup. username : str or int A `str` representing the user's username, or an `int` representing the user's id. If specified, restricts returned scores to the specified user. mode : :class:`osuapi.enums.OsuMode` The osu! game mode for which to look up. Defaults to osu!standard. mods : :class:`osuap:class:`osuapi.enums.OsuMod` If specified, restricts returned scores to the specified mods. limit Number of results to return. Defaults to 50, maximum 100.
entailment
def get_beatmaps(self, *, since=None, beatmapset_id=None, beatmap_id=None, username=None, mode=None, include_converted=False, beatmap_hash=None, limit=500): """Get beatmaps. Parameters ---------- since : datetime If specified, restrict results to beatmaps *ranked* after this date. beatmapset_id If specified, restrict results to a specific beatmap set. beatmap_id If specified, restrict results to a specific beatmap. username : str or int A `str` representing the user's username, or an `int` representing the user's id. If specified, restrict results to a specific user. mode : :class:`osuapi.enums.OsuMode` If specified, restrict results to a specific osu! game mode. include_converted : bool Whether or not to include autoconverts. Defaults to false. beatmap_hash If specified, restricts results to a specific beatmap hash. limit Number of results to return. Defaults to 500, maximum 500. """ return self._make_req(endpoints.BEATMAPS, dict( k=self.key, s=beatmapset_id, b=beatmap_id, u=username, since="{:%Y-%m-%d %H:%M:%S}".format(since) if since is not None else None, type=_username_type(username), m=mode.value if mode else None, a=int(include_converted), h=beatmap_hash, limit=limit ), JsonList(Beatmap))
Get beatmaps. Parameters ---------- since : datetime If specified, restrict results to beatmaps *ranked* after this date. beatmapset_id If specified, restrict results to a specific beatmap set. beatmap_id If specified, restrict results to a specific beatmap. username : str or int A `str` representing the user's username, or an `int` representing the user's id. If specified, restrict results to a specific user. mode : :class:`osuapi.enums.OsuMode` If specified, restrict results to a specific osu! game mode. include_converted : bool Whether or not to include autoconverts. Defaults to false. beatmap_hash If specified, restricts results to a specific beatmap hash. limit Number of results to return. Defaults to 500, maximum 500.
entailment
def get_match(self, match_id): """Get a multiplayer match. Parameters ---------- match_id The ID of the match to retrieve. This is the ID that you see in a online multiplayer match summary. This does not correspond the in-game game ID.""" return self._make_req(endpoints.MATCH, dict( k=self.key, mp=match_id), Match)
Get a multiplayer match. Parameters ---------- match_id The ID of the match to retrieve. This is the ID that you see in a online multiplayer match summary. This does not correspond the in-game game ID.
entailment
async def copy(self, key_source, storage_dest, key_dest): """ Return True if data are copied * optimized for http->fs copy * not supported return_status """ from aioworkers.storage.filesystem import FileSystemStorage if not isinstance(storage_dest, FileSystemStorage): return super().copy(key_source, storage_dest, key_dest) url = self.raw_key(key_source) logger = self.context.logger async with self._semaphore: async with self.session.get(url) as response: if response.status == 404: return elif response.status >= 400: if logger.getEffectiveLevel() == logging.DEBUG: logger.debug( 'HttpStorage request to %s ' 'returned code %s:\n%s' % ( url, response.status, (await response.read()).decode())) return async with storage_dest.raw_key(key_dest).open('wb') as f: async for chunk in response.content.iter_any(): await f.write(chunk) return True
Return True if data are copied * optimized for http->fs copy * not supported return_status
entailment
def import_name(stref: str): """ >>> import_name('datetime.datetime.utcnow') is not None True >>> import_name('aioworkers.utils.import_name') is not None True """ h = stref p = [] m = None try: r = importlib.util.find_spec(stref) except (AttributeError, ImportError): r = None if r is not None: return importlib.import_module(stref) while '.' in h: h, t = h.rsplit('.', 1) p.append(t) if h in sys.modules: m = sys.modules[h] break if m is None: m = importlib.import_module(h) for i in reversed(p): if hasattr(m, i): m = getattr(m, i) else: h += '.' + i m = importlib.import_module(h) logger.debug('Imported "%s" as %r', stref, m) return m
>>> import_name('datetime.datetime.utcnow') is not None True >>> import_name('aioworkers.utils.import_name') is not None True
entailment
def options(self, **options): """A context-manager for setting connection options; the original values of the options will be restored when the context-manager exits. For example:: with c.options(gui_mode = False): c.cmd.vol_list() """ self._contexts.append(self._contexts[-1].copy()) self.set_options(**options) try: yield finally: self._contexts.pop(-1)
A context-manager for setting connection options; the original values of the options will be restored when the context-manager exits. For example:: with c.options(gui_mode = False): c.cmd.vol_list()
entailment
def set_options(self, **options): """Sets the value of the given options (as keyword arguments). Note that underscored in the option's name will be replaced with hyphens (i.e., ``c.set_options(gui_mode = True)`` will set the option ``gui-mode``) """ opt2 = self._contexts[-1] for k, v in options.items(): k2 = k.replace("_", "-") if v is None: opt2.pop(k2, None) else: opt2[k2] = v
Sets the value of the given options (as keyword arguments). Note that underscored in the option's name will be replaced with hyphens (i.e., ``c.set_options(gui_mode = True)`` will set the option ``gui-mode``)
entailment
def connect_ssl(cls, user, password, endpoints, ca_certs=None, validate=None): """ Creates an SSL transport to the first endpoint (aserver) to which we successfully connect """ if isinstance(endpoints, basestring): endpoints = [endpoints] transport = SingleEndpointTransport( SocketTransport.connect_ssl, endpoints, ca_certs=ca_certs, validate=validate) return cls(transport, user, password)
Creates an SSL transport to the first endpoint (aserver) to which we successfully connect
entailment
def connect_multiendpoint_ssl(cls, user, password, endpoints, auto_discover=True, ca_certs=None, validate=None): """ Creates a MultiEndpointTransport, so that if the current endpoint (aserver) fails, it would automatically move to the next available endpoint. If ``auto_discover`` is ``True``, we will execute ipinterface_list on the system to discover all management IP interfaces and add them to the list of endpoints """ if isinstance(endpoints, basestring): endpoints = [endpoints] client, transport = cls._initiate_client_for_multi_endpoint(user, password, endpoints, ca_certs, validate) if auto_discover and user: all_endpoints = [ipif.address for ipif in client.cmd.ipinterface_list() if ipif.type.lower() == "management"] transport.add_endpoints(all_endpoints) return client
Creates a MultiEndpointTransport, so that if the current endpoint (aserver) fails, it would automatically move to the next available endpoint. If ``auto_discover`` is ``True``, we will execute ipinterface_list on the system to discover all management IP interfaces and add them to the list of endpoints
entailment
def execute_remote(self, remote_target, cmd, **kwargs): """ Executes the given command (with the given arguments) on the given remote target of the connected machine """ data = self._build_command(cmd, kwargs, self._contexts[-1], remote_target) with self._lock: rootelem = self.transport.send(data) try: return self._build_response(rootelem) except ElementNotFoundException: xlog.exception("XCLIClient.execute") raise chained(CorruptResponse(rootelem)) except Exception as e: xlog.exception("XCLIClient.execute") raise e
Executes the given command (with the given arguments) on the given remote target of the connected machine
entailment
def get_user_client(self, user, password, populate=True): """ Returns a new client for the given user. This is a lightweight client that only uses different credentials and shares the transport with the underlying client """ return XCLIClientForUser(weakproxy(self), user, password, populate=populate)
Returns a new client for the given user. This is a lightweight client that only uses different credentials and shares the transport with the underlying client
entailment
def get_remote_client(self, target_name, user=None, password=None): """ Returns a new client for the remote target. This is a lightweight client that only uses different credentials and shares the transport with the underlying client """ if user: base = self.get_user_client(user, password, populate=False) else: base = weakproxy(self) return RemoteXCLIClient(base, target_name, populate=True)
Returns a new client for the remote target. This is a lightweight client that only uses different credentials and shares the transport with the underlying client
entailment
def as_user(self, user, password): """ A context-manager for ``get_user_client``. Allows the execution of commands as a different user with ease. Example: >>> c.cmd.vol_list() >>> with c.as_user("user", "password"): ... c.cmd.vol_list() """ with self.options(user=user, password=password): yield self
A context-manager for ``get_user_client``. Allows the execution of commands as a different user with ease. Example: >>> c.cmd.vol_list() >>> with c.as_user("user", "password"): ... c.cmd.vol_list()
entailment
def accuracy(self, mode: OsuMode): """Calculated accuracy. See Also -------- <https://osu.ppy.sh/help/wiki/Accuracy> """ if mode is OsuMode.osu: return ( (6 * self.count300 + 2 * self.count100 + self.count50) / (6 * (self.count300 + self.count100 + self.count50 + self.countmiss))) if mode is OsuMode.taiko: return ( (self.count300 + self.countgeki + (0.5*(self.count100 + self.countkatu))) / (self.count300 + self.countgeki + self.count100 + self.countkatu + self.countmiss)) if mode is OsuMode.mania: return ( (6 * (self.countgeki + self.count300) + 4 * self.countkatu + 2 * self.count100 + self.count50) / (6 * (self.countgeki + self.count300 + self.countkatu + self.count100 + self.count50 + self.countmiss))) if mode is OsuMode.ctb: return ( (self.count50 + self.count100 + self.count300) / (self.count50 + self.count100 + self.count300 + self.countmiss + self.countkatu))
Calculated accuracy. See Also -------- <https://osu.ppy.sh/help/wiki/Accuracy>
entailment
def create_mirror(self, resource_name, target_name, mirror_type, slave_resource_name, rpo=None, remote_rpo=None, schedule=None, remote_schedule=None, activate_mirror='no'): '''creates a mirror and returns a mirror object. target name must be a valid target from target_list, mirror type must be 'sync' or 'async', slave_resource_name would be the slave_cg name''' return self._create_mirror('cg', resource_name, target_name, mirror_type, slave_resource_name, rpo=rpo, remote_rpo=remote_rpo, schedule=schedule, remote_schedule=remote_schedule, activate_mirror=activate_mirror)
creates a mirror and returns a mirror object. target name must be a valid target from target_list, mirror type must be 'sync' or 'async', slave_resource_name would be the slave_cg name
entailment
def get_cg_volumes(self, group_id): """ return all non snapshots volumes in cg """ for volume in self.xcli_client.cmd.vol_list(cg=group_id): if volume.snapshot_of == '': yield volume.name
return all non snapshots volumes in cg
entailment
def _certificate_required(cls, hostname, port=XCLI_DEFAULT_PORT, ca_certs=None, validate=None): ''' returns true if connection should verify certificate ''' if not ca_certs: return False xlog.debug("CONNECT SSL %s:%s, cert_file=%s", hostname, port, ca_certs) certificate = ssl.get_server_certificate((hostname, port), ca_certs=None) # handle XIV pre-defined certifications # if a validation function was given - we let the user check # the certificate himself, with the user's own validate function. # if the validate returned True - the user checked the cert # and we don't need check it, so we return false. if validate: return not validate(certificate) return True
returns true if connection should verify certificate
entailment
def add_converter(cls, klass, conv, score=0): """ Add converter :param klass: class or str :param conv: callable :param score: :return: """ if isinstance(klass, str): klass = import_name(klass) item = klass, conv, score cls.converters.append(item) cls.converters.sort(key=lambda x: x[0]) return cls
Add converter :param klass: class or str :param conv: callable :param score: :return:
entailment
def _populate_bunch_with_element(element): """ Helper function to recursively populates a Bunch from an XML tree. Returns leaf XML elements as a simple value, branch elements are returned as Bunches containing their subelements as value or recursively generated Bunch members. """ if 'value' in element.attrib: return element.get('value') current_bunch = Bunch() if element.get('id'): current_bunch['nextra_element_id'] = element.get('id') for subelement in element.getchildren(): current_bunch[subelement.tag] = _populate_bunch_with_element( subelement) return current_bunch
Helper function to recursively populates a Bunch from an XML tree. Returns leaf XML elements as a simple value, branch elements are returned as Bunches containing their subelements as value or recursively generated Bunch members.
entailment
def all(self, element_type=None, response_path=None): """ Generates Bunches, each representing a single subelement of the response. If an element_type is requested, only elements whose tag matches the element_type are returned. If the response has no subelements (for example, in a <return>-less command), yields None. """ path = self.RETURN_PATH if response_path is not None: path += "/" + response_path response_element = self.response_etree.find(path) if response_element is None: return for subelement in self.response_etree.find(path).getchildren(): if element_type is None or subelement.tag == element_type: yield _populate_bunch_with_element(subelement)
Generates Bunches, each representing a single subelement of the response. If an element_type is requested, only elements whose tag matches the element_type are returned. If the response has no subelements (for example, in a <return>-less command), yields None.
entailment
def as_single_element(self): """ Processes the response as a single-element response, like config_get or system_counters_get. If there is more then one element in the response or no elements this raises a ResponseError """ if self.as_return_etree is None: return None if len(self.as_return_etree.getchildren()) == 1: return _populate_bunch_with_element(self.as_return_etree. getchildren()[0]) return _populate_bunch_with_element(self.as_return_etree)
Processes the response as a single-element response, like config_get or system_counters_get. If there is more then one element in the response or no elements this raises a ResponseError
entailment
def enabled_flags(self): """Return the objects for each individual set flag.""" if not self.value: yield self.__flags_members__[0] return val = self.value while val: lowest_bit = val & -val val ^= lowest_bit yield self.__flags_members__[lowest_bit]
Return the objects for each individual set flag.
entailment
def contains_any(self, other): """Check if any flags are set. (OsuMod.Hidden | OsuMod.HardRock) in flags # Check if either hidden or hardrock are enabled. OsuMod.keyMod in flags # Check if any keymod is enabled. """ return self.value == other.value or self.value & other.value
Check if any flags are set. (OsuMod.Hidden | OsuMod.HardRock) in flags # Check if either hidden or hardrock are enabled. OsuMod.keyMod in flags # Check if any keymod is enabled.
entailment
def get_percentage(self): """Get the cumulative time percentage of each stopwatch (including the current split). Returns ------- cumulative_elapsed_time_percentage : List[float] """ cumulative_elapsed_time = self.get_cumulative_elapsed_time() sum_elapsed_time = sum(cumulative_elapsed_time, datetime.timedelta()) if not sum_elapsed_time: raise ValueError("cannot get percentage if there is no any elapsed time") return [div_timedelta(t, sum_elapsed_time) for t in cumulative_elapsed_time]
Get the cumulative time percentage of each stopwatch (including the current split). Returns ------- cumulative_elapsed_time_percentage : List[float]
entailment
def get_mean_per_split(self): """Get the mean elapsed time per split of each stopwatch (excluding the current split). Returns ------- mean_elapsed_time_per_split : List[datetime.timedelta] """ return [div_timedelta(sum(stopwatch.split_elapsed_time, datetime.timedelta()), len(stopwatch.split_elapsed_time)) if stopwatch.split_elapsed_time else datetime.timedelta() for stopwatch in self]
Get the mean elapsed time per split of each stopwatch (excluding the current split). Returns ------- mean_elapsed_time_per_split : List[datetime.timedelta]
entailment
def get_statistics(self): """Get all statistics as a dictionary. Returns ------- statistics : Dict[str, List] """ return { 'cumulative_elapsed_time': self.get_cumulative_elapsed_time(), 'percentage': self.get_percentage(), 'n_splits': self.get_n_splits(), 'mean_per_split': self.get_mean_per_split(), }
Get all statistics as a dictionary. Returns ------- statistics : Dict[str, List]
entailment
def conform_query(cls, query): """Converts the query string from a target uri, uses cls.default_query to populate default arguments. :param query: Unparsed query string :type query: urllib.parse.unsplit(uri).query :returns: Dictionary of parsed values, everything in cls.default_query will be set if not passed. """ query = parse_qs(query, keep_blank_values=True) # Load yaml of passed values for key, vals in query.items(): # Multiple values of the same name could be passed use first # Also params without strings will be treated as true values query[key] = yaml.load(vals[0] or 'true', Loader=yaml.FullLoader) # If expected, populate with defaults for key, val in cls.default_query.items(): if key not in query: query[key] = val return query
Converts the query string from a target uri, uses cls.default_query to populate default arguments. :param query: Unparsed query string :type query: urllib.parse.unsplit(uri).query :returns: Dictionary of parsed values, everything in cls.default_query will be set if not passed.
entailment
def load_target(cls, scheme, path, fragment, username, password, hostname, port, query, load_method, **kwargs): """Override this method to use values from the parsed uri to initialize the expected target. """ raise NotImplementedError("load_target must be overridden")
Override this method to use values from the parsed uri to initialize the expected target.
entailment
def hog(concurrency, requests, limit, timeout, params, paramfile, headers, headerfile, method, url): '''Sending multiple `HTTP` requests `ON` `GREEN` thread''' params = parse_from_list_and_file(params, paramfile) headers = parse_from_list_and_file(headers, headerfile) # Running information click.echo(HR) click.echo("Hog is running with {} threads, ".format(concurrency) + "{} requests ".format(requests) + "and timeout in {} second(s).".format(timeout)) if limit != 0: click.echo(">>> Limit: {} request(s) per second.".format(limit)) click.echo(HR) # Let's begin! result = Hog(callback).run(url, params, headers, method, timeout, concurrency, requests, limit) sys.stdout.write("\n") print_result(result)
Sending multiple `HTTP` requests `ON` `GREEN` thread
entailment
def _append_value(self, value, _file, _name): """Call this function to write contents. Keyword arguments: * value - dict, content to be dumped * _file - FileIO, output file * _name - str, name of current content dict """ _tabs = '\t' * self._tctr _keys = '{tabs}<key>{name}</key>\n'.format(tabs=_tabs, name=_name) _file.seek(self._sptr, os.SEEK_SET) _file.write(_keys) self._append_dict(value, _file)
Call this function to write contents. Keyword arguments: * value - dict, content to be dumped * _file - FileIO, output file * _name - str, name of current content dict
entailment
def _append_array(self, value, _file): """Call this function to write array contents. Keyword arguments: * value - dict, content to be dumped * _file - FileIO, output file """ _tabs = '\t' * self._tctr _labs = '{tabs}<array>\n'.format(tabs=_tabs) _file.write(_labs) self._tctr += 1 for _item in value: if _item is None: continue _item = self.object_hook(_item) _type = type(_item).__name__ _MAGIC_TYPES[_type](self, _item, _file) self._tctr -= 1 _tabs = '\t' * self._tctr _labs = '{tabs}</array>\n'.format(tabs=_tabs) _file.write(_labs)
Call this function to write array contents. Keyword arguments: * value - dict, content to be dumped * _file - FileIO, output file
entailment
def _append_dict(self, value, _file): """Call this function to write dict contents. Keyword arguments: * value - dict, content to be dumped * _file - FileIO, output file """ _tabs = '\t' * self._tctr _labs = '{tabs}<dict>\n'.format(tabs=_tabs) _file.write(_labs) self._tctr += 1 for (_item, _text) in value.items(): if _text is None: continue _tabs = '\t' * self._tctr _keys = '{tabs}<key>{item}</key>\n'.format(tabs=_tabs, item=_item) _file.write(_keys) _text = self.object_hook(_text) _type = type(_text).__name__ _MAGIC_TYPES[_type](self, _text, _file) self._tctr -= 1 _tabs = '\t' * self._tctr _labs = '{tabs}</dict>\n'.format(tabs=_tabs) _file.write(_labs)
Call this function to write dict contents. Keyword arguments: * value - dict, content to be dumped * _file - FileIO, output file
entailment
def _append_data(self, value, _file): """Call this function to write data contents. Keyword arguments: * value - dict, content to be dumped * _file - FileIO, output file """ # binascii.b2a_base64(value) -> plistlib.Data # binascii.a2b_base64(Data) -> value(bytes) _tabs = '\t' * self._tctr _text = base64.b64encode(value).decode() # value.hex() # str(value)[2:-1] _labs = '{tabs}<data>{text}</data>\n'.format(tabs=_tabs, text=_text) # _labs = '{tabs}<data>\n'.format(tabs=_tabs) # _list = [] # for _item in textwrap.wrap(value.hex(), 32): # _text = ' '.join(textwrap.wrap(_item, 2)) # _item = '{tabs}\t{text}'.format(tabs=_tabs, text=_text) # _list.append(_item) # _labs += '\n'.join(_list) # _data = [H for H in iter( # functools.partial(io.StringIO(value.hex()).read, 2), '') # ] # to split bytes string into length-2 hex string list # _labs += '\n{tabs}</data>\n'.format(tabs=_tabs) _file.write(_labs)
Call this function to write data contents. Keyword arguments: * value - dict, content to be dumped * _file - FileIO, output file
entailment
def _append_date(self, value, _file): """Call this function to write date contents. Keyword arguments: * value - dict, content to be dumped * _file - FileIO, output file """ _tabs = '\t' * self._tctr _text = value.strftime('%Y-%m-%dT%H:%M:%S.%fZ') _labs = '{tabs}<date>{text}</date>\n'.format(tabs=_tabs, text=_text) _file.write(_labs)
Call this function to write date contents. Keyword arguments: * value - dict, content to be dumped * _file - FileIO, output file
entailment
def _append_integer(self, value, _file): """Call this function to write integer contents. Keyword arguments: * value - dict, content to be dumped * _file - FileIO, output file """ _tabs = '\t' * self._tctr _text = value _labs = '{tabs}<integer>{text}</integer>\n'.format(tabs=_tabs, text=_text) _file.write(_labs)
Call this function to write integer contents. Keyword arguments: * value - dict, content to be dumped * _file - FileIO, output file
entailment
def register_output_name(self, input_folder, rel_name, rel_output_name): """Register proper and improper file mappings.""" self.improper_input_file_mapping[rel_name].add(rel_output_name) self.proper_input_file_mapping[os.path.join(input_folder, rel_name)] = rel_output_name self.proper_input_file_mapping[rel_output_name] = rel_output_name
Register proper and improper file mappings.
entailment
def _discover(self): """Find and install all extensions""" for ep in pkg_resources.iter_entry_points('yamlsettings10'): ext = ep.load() if callable(ext): ext = ext() self.add(ext)
Find and install all extensions
entailment
def get_extension(self, protocol): """Retrieve extension for the given protocol :param protocol: name of the protocol :type protocol: string :raises NoProtocolError: no extension registered for protocol """ if protocol not in self.registry: raise NoProtocolError("No protocol for %s" % protocol) index = self.registry[protocol] return self.extensions[index]
Retrieve extension for the given protocol :param protocol: name of the protocol :type protocol: string :raises NoProtocolError: no extension registered for protocol
entailment
def add(self, extension): """Adds an extension to the registry :param extension: Extension object :type extension: yamlsettings.extensions.base.YamlSettingsExtension """ index = len(self.extensions) self.extensions[index] = extension for protocol in extension.protocols: self.registry[protocol] = index
Adds an extension to the registry :param extension: Extension object :type extension: yamlsettings.extensions.base.YamlSettingsExtension
entailment
def _load_first(self, target_uris, load_method, **kwargs): """Load first yamldict target found in uri list. :param target_uris: Uris to try and open :param load_method: load callback :type target_uri: list or string :type load_method: callback :returns: yamldict """ if isinstance(target_uris, string_types): target_uris = [target_uris] # TODO: Move the list logic into the extension, otherwise a # load will always try all missing files first. # TODO: How would multiple protocols work, should the registry hold # persist copies? for target_uri in target_uris: target = urlsplit(target_uri, scheme=self.default_protocol) extension = self.get_extension(target.scheme) query = extension.conform_query(target.query) try: yaml_dict = extension.load_target( target.scheme, target.path, target.fragment, target.username, target.password, target.hostname, target.port, query, load_method, **kwargs ) return yaml_dict except extension.not_found_exception: pass raise IOError("unable to load: {0}".format(target_uris))
Load first yamldict target found in uri list. :param target_uris: Uris to try and open :param load_method: load callback :type target_uri: list or string :type load_method: callback :returns: yamldict
entailment
def load(self, target_uris, fields=None, **kwargs): """Load first yamldict target found in uri. :param target_uris: Uris to try and open :param fields: Fields to filter. Default: None :type target_uri: list or string :type fields: list :returns: yamldict """ yaml_dict = self._load_first( target_uris, yamlsettings.yamldict.load, **kwargs ) # TODO: Move this into the extension, otherwise every load from # a persistant location will refilter fields. if fields: yaml_dict.limit(fields) return yaml_dict
Load first yamldict target found in uri. :param target_uris: Uris to try and open :param fields: Fields to filter. Default: None :type target_uri: list or string :type fields: list :returns: yamldict
entailment
def load_all(self, target_uris, **kwargs): ''' Load *all* YAML settings from a list of file paths given. - File paths in the list gets the priority by their orders of the list. ''' yaml_series = self._load_first( target_uris, yamlsettings.yamldict.load_all, **kwargs ) yaml_dicts = [] for yaml_dict in yaml_series: yaml_dicts.append(yaml_dict) # return YAMLDict objects return yaml_dicts
Load *all* YAML settings from a list of file paths given. - File paths in the list gets the priority by their orders of the list.
entailment
def load_all(stream): """ Parse all YAML documents in a stream and produce corresponding YAMLDict objects. """ loader = YAMLDictLoader(stream) try: while loader.check_data(): yield loader.get_data() finally: loader.dispose()
Parse all YAML documents in a stream and produce corresponding YAMLDict objects.
entailment
def dump(data, stream=None, **kwargs): """ Serialize YAMLDict into a YAML stream. If stream is None, return the produced string instead. """ return yaml.dump_all( [data], stream=stream, Dumper=YAMLDictDumper, **kwargs )
Serialize YAMLDict into a YAML stream. If stream is None, return the produced string instead.
entailment
def dump_all(data_list, stream=None, **kwargs): """ Serialize YAMLDict into a YAML stream. If stream is None, return the produced string instead. """ return yaml.dump_all( data_list, stream=stream, Dumper=YAMLDictDumper, **kwargs )
Serialize YAMLDict into a YAML stream. If stream is None, return the produced string instead.
entailment
def traverse(self, callback): ''' Traverse through all keys and values (in-order) and replace keys and values with the return values from the callback function. ''' def _traverse_node(path, node, callback): ret_val = callback(path, node) if ret_val is not None: # replace node with the return value node = ret_val else: # traverse deep into the hierarchy if isinstance(node, YAMLDict): for k, v in node.items(): node[k] = _traverse_node(path + [k], v, callback) elif isinstance(node, list): for i, v in enumerate(node): node[i] = _traverse_node(path + ['[{0}]'.format(i)], v, callback) else: pass return node _traverse_node([], self, callback)
Traverse through all keys and values (in-order) and replace keys and values with the return values from the callback function.
entailment
def update(self, yaml_dict): ''' Update the content (i.e. keys and values) with yaml_dict. ''' def _update_node(base_node, update_node): if isinstance(update_node, YAMLDict) or \ isinstance(update_node, dict): if not (isinstance(base_node, YAMLDict)): # NOTE: A regular dictionary is replaced by a new # YAMLDict object. new_node = YAMLDict() else: new_node = base_node for k, v in update_node.items(): new_node[k] = _update_node(new_node.get(k), v) elif isinstance(update_node, list) or isinstance( update_node, tuple ): # NOTE: A list/tuple is replaced by a new list/tuple. new_node = [] for v in update_node: new_node.append(_update_node(None, v)) if isinstance(update_node, tuple): new_node = tuple(new_node) else: new_node = update_node return new_node # Convert non-YAMLDict objects to a YAMLDict if not (isinstance(yaml_dict, YAMLDict) or isinstance(yaml_dict, dict)): yaml_dict = YAMLDict(yaml_dict) _update_node(self, yaml_dict)
Update the content (i.e. keys and values) with yaml_dict.
entailment
def rebase(self, yaml_dict): ''' Use yaml_dict as self's new base and update with existing reverse of update. ''' base = yaml_dict.clone() base.update(self) self.clear() self.update(base)
Use yaml_dict as self's new base and update with existing reverse of update.
entailment
def limit(self, keys): ''' Remove all keys other than the keys specified. ''' if not isinstance(keys, list) and not isinstance(keys, tuple): keys = [keys] remove_keys = [k for k in self.keys() if k not in keys] for k in remove_keys: self.pop(k)
Remove all keys other than the keys specified.
entailment
def save(yaml_dict, filepath): ''' Save YAML settings to the specified file path. ''' yamldict.dump(yaml_dict, open(filepath, 'w'), default_flow_style=False)
Save YAML settings to the specified file path.
entailment
def save_all(yaml_dicts, filepath): ''' Save *all* YAML settings to the specified file path. ''' yamldict.dump_all(yaml_dicts, open(filepath, 'w'), default_flow_style=False)
Save *all* YAML settings to the specified file path.
entailment
def update_from_file(yaml_dict, filepaths): ''' Override YAML settings with loaded values from filepaths. - File paths in the list gets the priority by their orders of the list. ''' # load YAML settings with only fields in yaml_dict yaml_dict.update(registry.load(filepaths, list(yaml_dict)))
Override YAML settings with loaded values from filepaths. - File paths in the list gets the priority by their orders of the list.
entailment
def update_from_env(yaml_dict, prefix=None): ''' Override YAML settings with values from the environment variables. - The letter '_' is delimit the hierarchy of the YAML settings such that the value of 'config.databases.local' will be overridden by CONFIG_DATABASES_LOCAL. ''' prefix = prefix or "" def _set_env_var(path, node): env_path = "{0}{1}{2}".format( prefix.upper(), '_' if prefix else '', '_'.join([str(key).upper() for key in path]) ) env_val = os.environ.get(env_path, None) if env_val is not None: # convert the value to a YAML-defined type env_dict = yamldict.load('val: {0}'.format(env_val)) return env_dict.val else: return None # traverse yaml_dict with the callback function yaml_dict.traverse(_set_env_var)
Override YAML settings with values from the environment variables. - The letter '_' is delimit the hierarchy of the YAML settings such that the value of 'config.databases.local' will be overridden by CONFIG_DATABASES_LOCAL.
entailment
def circle(rad=0.5): """Draw a circle""" _ctx = _state["ctx"] _ctx.arc(0, 0, rad, 0, 2 * math.pi) _ctx.set_line_width(0) _ctx.stroke_preserve() # _ctx.set_source_rgb(0.3, 0.4, 0.6) _ctx.fill()
Draw a circle
entailment
def line(x, y, width=0.1): """Draw a line""" ctx = _state["ctx"] ctx.move_to(0, 0) ctx.line_to(x, y) ctx.close_path() # _ctx.set_source_rgb (0.3, 0.2, 0.5) ctx.set_line_width(width) ctx.stroke()
Draw a line
entailment
def triangle(rad=0.5): """Draw a triangle""" # half_height = math.sqrt(3) * side / 6 # half_height = side / 2 ctx = _state["ctx"] side = 3 * rad / math.sqrt(3) ctx.move_to(0, -rad / 2) ctx.line_to(-side / 2, -rad / 2) ctx.line_to(0, rad) ctx.line_to(side / 2, -rad / 2) ctx.close_path() ctx.fill()
Draw a triangle
entailment
def box(side=1): """Draw a box""" half_side = side / 2 _state["ctx"].rectangle(-half_side, -half_side, side, side) _state["ctx"].fill()
Draw a box
entailment
def _dump_header(self): """Initially dump file heads and tails.""" with open(self._file, 'w') as _file: _file.write(self._hsrt) self._sptr = _file.tell() _file.write(self._hend)
Initially dump file heads and tails.
entailment
def find_version( package_name: str, version_module_name: str = '_version', version_variable_name: str = 'VERSION') -> str: """Simulate behaviour of "from package_name._version import VERSION", and return VERSION.""" version_module = importlib.import_module( '{}.{}'.format(package_name.replace('-', '_'), version_module_name)) return getattr(version_module, version_variable_name)
Simulate behaviour of "from package_name._version import VERSION", and return VERSION.
entailment
def find_packages(root_directory: str = '.') -> t.List[str]: """Find packages to pack.""" exclude = ['test*', 'test.*'] if ('bdist_wheel' in sys.argv or 'bdist' in sys.argv) else [] packages_list = setuptools.find_packages(root_directory, exclude=exclude) return packages_list
Find packages to pack.
entailment
def parse_requirements( requirements_path: str = 'requirements.txt') -> t.List[str]: """Read contents of requirements.txt file and return data from its relevant lines. Only non-empty and non-comment lines are relevant. """ requirements = [] with HERE.joinpath(requirements_path).open() as reqs_file: for requirement in [line.strip() for line in reqs_file.read().splitlines()]: if not requirement or requirement.startswith('#'): continue requirements.append(requirement) return requirements
Read contents of requirements.txt file and return data from its relevant lines. Only non-empty and non-comment lines are relevant.
entailment
def partition_version_classifiers( classifiers: t.Sequence[str], version_prefix: str = 'Programming Language :: Python :: ', only_suffix: str = ' :: Only') -> t.Tuple[t.List[str], t.List[str]]: """Find version number classifiers in given list and partition them into 2 groups.""" versions_min, versions_only = [], [] for classifier in classifiers: version = classifier.replace(version_prefix, '') versions = versions_min if version.endswith(only_suffix): version = version.replace(only_suffix, '') versions = versions_only try: versions.append(tuple([int(_) for _ in version.split('.')])) except ValueError: pass return versions_min, versions_only
Find version number classifiers in given list and partition them into 2 groups.
entailment
def find_required_python_version( classifiers: t.Sequence[str], version_prefix: str = 'Programming Language :: Python :: ', only_suffix: str = ' :: Only') -> t.Optional[str]: """Determine the minimum required Python version.""" versions_min, versions_only = partition_version_classifiers( classifiers, version_prefix, only_suffix) if len(versions_only) > 1: raise ValueError( 'more than one "{}" version encountered in {}'.format(only_suffix, versions_only)) only_version = None if len(versions_only) == 1: only_version = versions_only[0] for version in versions_min: if version[:len(only_version)] != only_version: raise ValueError( 'the "{}" version {} is inconsistent with version {}' .format(only_suffix, only_version, version)) min_supported_version = None for version in versions_min: if min_supported_version is None or \ (len(version) >= len(min_supported_version) and version < min_supported_version): min_supported_version = version if min_supported_version is None: if only_version is not None: return '.'.join([str(_) for _ in only_version]) else: return '>=' + '.'.join([str(_) for _ in min_supported_version]) return None
Determine the minimum required Python version.
entailment
def parse_rst(text: str) -> docutils.nodes.document: """Parse text assuming it's an RST markup.""" parser = docutils.parsers.rst.Parser() components = (docutils.parsers.rst.Parser,) settings = docutils.frontend.OptionParser(components=components).get_default_values() document = docutils.utils.new_document('<rst-doc>', settings=settings) parser.parse(text, document) return document
Parse text assuming it's an RST markup.
entailment
def resolve_relative_rst_links(text: str, base_link: str): """Resolve all relative links in a given RST document. All links of form `link`_ become `link <base_link/link>`_. """ document = parse_rst(text) visitor = SimpleRefCounter(document) document.walk(visitor) for target in visitor.references: name = target.attributes['name'] uri = target.attributes['refuri'] new_link = '`{} <{}{}>`_'.format(name, base_link, uri) if name == uri: text = text.replace('`<{}>`_'.format(uri), new_link) else: text = text.replace('`{} <{}>`_'.format(name, uri), new_link) return text
Resolve all relative links in a given RST document. All links of form `link`_ become `link <base_link/link>`_.
entailment
def visit_reference(self, node: docutils.nodes.reference) -> None: """Called for "reference" nodes.""" # if len(node.children) != 1 or not isinstance(node.children[0], docutils.nodes.Text) \ # or not all(_ in node.attributes for _ in ('name', 'refuri')): # return path = pathlib.Path(node.attributes['refuri']) try: if path.is_absolute(): return resolved_path = path.resolve() except FileNotFoundError: # in resolve(), prior to Python 3.6 return # except OSError: # in is_absolute() and resolve(), on URLs in Windows # return try: resolved_path.relative_to(HERE) except ValueError: return if not path.is_file(): return assert node.attributes['name'] == node.children[0].astext() self.references.append(node)
Called for "reference" nodes.
entailment
def try_fields(cls, *names) -> t.Optional[t.Any]: """Return first existing of given class field names.""" for name in names: if hasattr(cls, name): return getattr(cls, name) raise AttributeError((cls, names))
Return first existing of given class field names.
entailment
def parse_readme(cls, readme_path: str = 'README.rst', encoding: str = 'utf-8') -> str: """Parse readme and resolve relative links in it if it is feasible. Links are resolved if readme is in rst format and the package is hosted on GitHub. """ with HERE.joinpath(readme_path).open(encoding=encoding) as readme_file: long_description = readme_file.read() # type: str if readme_path.endswith('.rst') and cls.download_url.startswith('https://github.com/'): base_url = '{}/blob/v{}/'.format(cls.download_url, cls.version) long_description = resolve_relative_rst_links(long_description, base_url) return long_description
Parse readme and resolve relative links in it if it is feasible. Links are resolved if readme is in rst format and the package is hosted on GitHub.
entailment
def prepare(cls) -> None: """Fill in possibly missing package metadata.""" if cls.version is None: cls.version = find_version(cls.name) if cls.long_description is None: cls.long_description = cls.parse_readme() if cls.packages is None: cls.packages = find_packages(cls.root_directory) if cls.install_requires is None: cls.install_requires = parse_requirements() if cls.python_requires is None: cls.python_requires = find_required_python_version(cls.classifiers)
Fill in possibly missing package metadata.
entailment
def setup(cls) -> None: """Run setuptools.setup() with correct arguments.""" cls.prepare() setuptools.setup( name=cls.name, version=cls.version, description=cls.description, long_description=cls.long_description, url=cls.url, download_url=cls.download_url, author=cls.author, author_email=cls.author_email, maintainer=cls.try_fields('maintainer', 'author'), maintainer_email=cls.try_fields('maintainer_email', 'author_email'), license=cls.license_str, classifiers=cls.classifiers, keywords=cls.keywords, packages=cls.packages, package_dir={'': cls.root_directory}, include_package_data=True, package_data=cls.package_data, exclude_package_data=cls.exclude_package_data, install_requires=cls.install_requires, extras_require=cls.extras_require, python_requires=cls.python_requires, entry_points=cls.entry_points, test_suite=cls.test_suite)
Run setuptools.setup() with correct arguments.
entailment
def surface_to_image(surface): """Renders current buffer surface to IPython image""" from IPython.display import Image buf = BytesIO() surface.write_to_png(buf) data = buf.getvalue() buf.close() return Image(data=data)
Renders current buffer surface to IPython image
entailment
def get_npimage(transparent=False, y_origin="top"): """ Returns a WxHx[3-4] numpy array representing the RGB picture. If `transparent` is True the image is WxHx4 and represents a RGBA picture, i.e. array[i,j] is the [r,g,b,a] value of the pixel at position [i,j]. If `transparent` is false, a RGB array is returned. Parameter y_origin ("top" or "bottom") decides whether point (0,0) lies in the top-left or bottom-left corner of the screen. """ image_surface = render_record_surface() img = 0 + np.frombuffer(image_surface.get_data(), np.uint8) img.shape = (HEIGHT, WIDTH, 4) img = img[:, :, [2, 1, 0, 3]] if y_origin == "bottom": img = img[::-1] return img if transparent else img[:, :, : 3]
Returns a WxHx[3-4] numpy array representing the RGB picture. If `transparent` is True the image is WxHx4 and represents a RGBA picture, i.e. array[i,j] is the [r,g,b,a] value of the pixel at position [i,j]. If `transparent` is false, a RGB array is returned. Parameter y_origin ("top" or "bottom") decides whether point (0,0) lies in the top-left or bottom-left corner of the screen.
entailment
def check_limits(user_rule): """Stop recursion if resolution is too low on number of components is too high """ def wrapper(*args, **kwargs): """The body of the decorator """ global _state _state["cnt_elements"] += 1 _state["depth"] += 1 matrix = _state["ctx"].get_matrix() # TODO: add check of transparency = 0 if _state["depth"] >= MAX_DEPTH: logger.info("stop recursion by reaching max depth {}".format(MAX_DEPTH)) else: min_size_scaled = SIZE_MIN_FEATURE / min(WIDTH, HEIGHT) current_scale = max([abs(matrix[i]) for i in range(2)]) if (current_scale < min_size_scaled): logger.info("stop recursion by reaching min feature size") # TODO: check feature size with respect to current ink size else: if _state["cnt_elements"] > MAX_ELEMENTS: logger.info("stop recursion by reaching max elements") else: user_rule(*args, **kwargs) _state["depth"] -= 1 return wrapper
Stop recursion if resolution is too low on number of components is too high
entailment
def init(canvas_size=(512, 512), max_depth=12, face_color=None, background_color=None): """Initializes global state""" global _background_color _background_color = background_color global _ctx global cnt_elements global MAX_DEPTH global WIDTH global HEIGHT _init_state() sys.setrecursionlimit(20000) MAX_DEPTH = max_depth WIDTH, HEIGHT = canvas_size if face_color is not None: r, g, b = htmlcolor_to_rgb(face_color) _state["ctx"].set_source_rgb(r, g, b) hue, saturation, brightness = colorsys.rgb_to_hsv(r, g, b) _state["color"] = (hue, saturation, brightness, 1) logger.debug("Init done")
Initializes global state
entailment