sentence1
stringlengths
52
3.87M
sentence2
stringlengths
1
47.2k
label
stringclasses
1 value
def raise_for_api_error(headers: MutableMapping, data: MutableMapping) -> None: """ Check request response for Slack API error Args: headers: Response headers data: Response data Raises: :class:`slack.exceptions.SlackAPIError` """ if not data["ok"]: raise exceptions.SlackAPIError(data.get("error", "unknow_error"), headers, data) if "warning" in data: LOG.warning("Slack API WARNING: %s", data["warning"])
Check request response for Slack API error Args: headers: Response headers data: Response data Raises: :class:`slack.exceptions.SlackAPIError`
entailment
def decode_body(headers: MutableMapping, body: bytes) -> dict: """ Decode the response body For 'application/json' content-type load the body as a dictionary Args: headers: Response headers body: Response body Returns: decoded body """ type_, encoding = parse_content_type(headers) decoded_body = body.decode(encoding) # There is one api that just returns `ok` instead of json. In order to have a consistent API we decided to modify the returned payload into a dict. if type_ == "application/json": payload = json.loads(decoded_body) else: if decoded_body == "ok": payload = {"ok": True} else: payload = {"ok": False, "data": decoded_body} return payload
Decode the response body For 'application/json' content-type load the body as a dictionary Args: headers: Response headers body: Response body Returns: decoded body
entailment
def parse_content_type(headers: MutableMapping) -> Tuple[Optional[str], str]: """ Find content-type and encoding of the response Args: headers: Response headers Returns: :py:class:`tuple` (content-type, encoding) """ content_type = headers.get("content-type") if not content_type: return None, "utf-8" else: type_, parameters = cgi.parse_header(content_type) encoding = parameters.get("charset", "utf-8") return type_, encoding
Find content-type and encoding of the response Args: headers: Response headers Returns: :py:class:`tuple` (content-type, encoding)
entailment
def prepare_request( url: Union[str, methods], data: Optional[MutableMapping], headers: Optional[MutableMapping], global_headers: MutableMapping, token: str, as_json: Optional[bool] = None, ) -> Tuple[str, Union[str, MutableMapping], MutableMapping]: """ Prepare outgoing request Create url, headers, add token to the body and if needed json encode it Args: url: :class:`slack.methods` item or string of url data: Outgoing data headers: Custom headers global_headers: Global headers token: Slack API token as_json: Post JSON to the slack API Returns: :py:class:`tuple` (url, body, headers) """ if isinstance(url, methods): as_json = as_json or url.value[3] real_url = url.value[0] else: real_url = url as_json = False if not headers: headers = {**global_headers} else: headers = {**global_headers, **headers} payload: Optional[Union[str, MutableMapping]] = None if real_url.startswith(HOOK_URL) or (real_url.startswith(ROOT_URL) and as_json): payload, headers = _prepare_json_request(data, token, headers) elif real_url.startswith(ROOT_URL) and not as_json: payload = _prepare_form_encoded_request(data, token) else: real_url = ROOT_URL + real_url payload = _prepare_form_encoded_request(data, token) return real_url, payload, headers
Prepare outgoing request Create url, headers, add token to the body and if needed json encode it Args: url: :class:`slack.methods` item or string of url data: Outgoing data headers: Custom headers global_headers: Global headers token: Slack API token as_json: Post JSON to the slack API Returns: :py:class:`tuple` (url, body, headers)
entailment
def decode_response(status: int, headers: MutableMapping, body: bytes) -> dict: """ Decode incoming response Args: status: Response status headers: Response headers body: Response body Returns: Response data """ data = decode_body(headers, body) raise_for_status(status, headers, data) raise_for_api_error(headers, data) return data
Decode incoming response Args: status: Response status headers: Response headers body: Response body Returns: Response data
entailment
def find_iteration( url: Union[methods, str], itermode: Optional[str] = None, iterkey: Optional[str] = None, ) -> Tuple[str, str]: """ Find iteration mode and iteration key for a given :class:`slack.methods` Args: url: :class:`slack.methods` or string url itermode: Custom iteration mode iterkey: Custom iteration key Returns: :py:class:`tuple` (itermode, iterkey) """ if isinstance(url, methods): if not itermode: itermode = url.value[1] if not iterkey: iterkey = url.value[2] if not iterkey or not itermode: raise ValueError("Iteration not supported for: {}".format(url)) elif itermode not in ITERMODE: raise ValueError("Iteration not supported for: {}".format(itermode)) return itermode, iterkey
Find iteration mode and iteration key for a given :class:`slack.methods` Args: url: :class:`slack.methods` or string url itermode: Custom iteration mode iterkey: Custom iteration key Returns: :py:class:`tuple` (itermode, iterkey)
entailment
def prepare_iter_request( url: Union[methods, str], data: MutableMapping, *, iterkey: Optional[str] = None, itermode: Optional[str] = None, limit: int = 200, itervalue: Optional[Union[str, int]] = None, ) -> Tuple[MutableMapping, str, str]: """ Prepare outgoing iteration request Args: url: :class:`slack.methods` item or string of url data: Outgoing data limit: Maximum number of results to return per call. iterkey: Key in response data to iterate over (required for url string). itermode: Iteration mode (required for url string) (one of `cursor`, `page` or `timeline`) itervalue: Value for current iteration (cursor hash, page or timestamp depending on the itermode) Returns: :py:class:`tuple` (data, iterkey, itermode) """ itermode, iterkey = find_iteration(url, itermode, iterkey) if itermode == "cursor": data["limit"] = limit if itervalue: data["cursor"] = itervalue elif itermode == "page": data["count"] = limit if itervalue: data["page"] = itervalue elif itermode == "timeline": data["count"] = limit if itervalue: data["latest"] = itervalue return data, iterkey, itermode
Prepare outgoing iteration request Args: url: :class:`slack.methods` item or string of url data: Outgoing data limit: Maximum number of results to return per call. iterkey: Key in response data to iterate over (required for url string). itermode: Iteration mode (required for url string) (one of `cursor`, `page` or `timeline`) itervalue: Value for current iteration (cursor hash, page or timestamp depending on the itermode) Returns: :py:class:`tuple` (data, iterkey, itermode)
entailment
def decode_iter_request(data: dict) -> Optional[Union[str, int]]: """ Decode incoming response from an iteration request Args: data: Response data Returns: Next itervalue """ if "response_metadata" in data: return data["response_metadata"].get("next_cursor") elif "paging" in data: current_page = int(data["paging"].get("page", 1)) max_page = int(data["paging"].get("pages", 1)) if current_page < max_page: return current_page + 1 elif "has_more" in data and data["has_more"] and "latest" in data: return data["messages"][-1]["ts"] return None
Decode incoming response from an iteration request Args: data: Response data Returns: Next itervalue
entailment
def discard_event(event: events.Event, bot_id: str = None) -> bool: """ Check if the incoming event needs to be discarded Args: event: Incoming :class:`slack.events.Event` bot_id: Id of connected bot Returns: boolean """ if event["type"] in SKIP_EVENTS: return True elif bot_id and isinstance(event, events.Message): if event.get("bot_id") == bot_id: LOG.debug("Ignoring event: %s", event) return True elif "message" in event and event["message"].get("bot_id") == bot_id: LOG.debug("Ignoring event: %s", event) return True return False
Check if the incoming event needs to be discarded Args: event: Incoming :class:`slack.events.Event` bot_id: Id of connected bot Returns: boolean
entailment
def validate_request_signature( body: str, headers: MutableMapping, signing_secret: str ) -> None: """ Validate incoming request signature using the application signing secret. Contrary to the ``team_id`` and ``verification_token`` verification this method is not called by ``slack-sansio`` when creating object from incoming HTTP request. Because the body of the request needs to be provided as text and not decoded as json beforehand. Args: body: Raw request body headers: Request headers signing_secret: Application signing_secret Raise: :class:`slack.exceptions.InvalidSlackSignature`: when provided and calculated signature do not match :class:`slack.exceptions.InvalidTimestamp`: when incoming request timestamp is more than 5 minutes old """ request_timestamp = int(headers["X-Slack-Request-Timestamp"]) if (int(time.time()) - request_timestamp) > (60 * 5): raise exceptions.InvalidTimestamp(timestamp=request_timestamp) slack_signature = headers["X-Slack-Signature"] calculated_signature = ( "v0=" + hmac.new( signing_secret.encode("utf-8"), f"""v0:{headers["X-Slack-Request-Timestamp"]}:{body}""".encode("utf-8"), digestmod=hashlib.sha256, ).hexdigest() ) if not hmac.compare_digest(slack_signature, calculated_signature): raise exceptions.InvalidSlackSignature(slack_signature, calculated_signature)
Validate incoming request signature using the application signing secret. Contrary to the ``team_id`` and ``verification_token`` verification this method is not called by ``slack-sansio`` when creating object from incoming HTTP request. Because the body of the request needs to be provided as text and not decoded as json beforehand. Args: body: Raw request body headers: Request headers signing_secret: Application signing_secret Raise: :class:`slack.exceptions.InvalidSlackSignature`: when provided and calculated signature do not match :class:`slack.exceptions.InvalidTimestamp`: when incoming request timestamp is more than 5 minutes old
entailment
def backup(mongo_username, mongo_password, local_backup_directory_path, database=None, attached_directory_path=None, custom_prefix="backup", mongo_backup_directory_path="/tmp/mongo_dump", s3_bucket=None, s3_access_key_id=None, s3_secret_key=None, purge_local=None, purge_attached=None, cleanup=True, silent=False): """ Runs a backup operation to At Least a local directory. You must provide mongodb credentials along with a a directory for a dump operation and a directory to contain your compressed backup. backup_prefix: optionally provide a prefix to be prepended to your backups, by default the prefix is "backup". database: optionally provide the name of one specific database to back up (instead of backing up all databases on the MongoDB server) attached_directory_path: makes a second copy of the backup to a different directory. This directory is checked before other operations and will raise an error if it cannot be found. s3_bucket: if you have an Amazon Web Services S3 account you can automatically upload the backup to an S3 Bucket you provide; requires s3_access_key_id and s3_secret key to be passed as well s3_access_key_id, s3_secret_key: credentials for your AWS account. purge_local: An integer value, the number of days of backups to purge from local_backup_directory_path after operations have completed. purge_attached: An integer value, the number of days of backups to purge from attached_directory_path after operations have completed. cleanup: set to False to leave the mongo_backup_directory_path after operations have completed. """ if attached_directory_path: if not path.exists(attached_directory_path): raise Exception("ERROR. Would have to create %s for your attached storage, make sure that file paths already exist and re-run" % (attached_directory_path)) # Dump mongo, tarbz, copy to attached storage, upload to s3, purge, clean. full_file_name_path = local_backup_directory_path + custom_prefix + time_string() mongodump(mongo_username, mongo_password, mongo_backup_directory_path, database, silent=silent) local_backup_file = tarbz(mongo_backup_directory_path, full_file_name_path, silent=silent) if attached_directory_path: copy(local_backup_file, attached_directory_path + local_backup_file.split("/")[-1]) if s3_bucket: s3_upload(local_backup_file, s3_bucket, s3_access_key_id, s3_secret_key) if purge_local: purge_date = (datetime.utcnow().replace(second=0, microsecond=0) - timedelta(days=purge_local)) purge_old_files(purge_date, local_backup_directory_path, custom_prefix=custom_prefix) if purge_attached and attached_directory_path: purge_date = (datetime.utcnow().replace(second=0, microsecond=0) - timedelta(days=purge_attached)) purge_old_files(purge_date, attached_directory_path, custom_prefix=custom_prefix) if cleanup: rmtree(mongo_backup_directory_path)
Runs a backup operation to At Least a local directory. You must provide mongodb credentials along with a a directory for a dump operation and a directory to contain your compressed backup. backup_prefix: optionally provide a prefix to be prepended to your backups, by default the prefix is "backup". database: optionally provide the name of one specific database to back up (instead of backing up all databases on the MongoDB server) attached_directory_path: makes a second copy of the backup to a different directory. This directory is checked before other operations and will raise an error if it cannot be found. s3_bucket: if you have an Amazon Web Services S3 account you can automatically upload the backup to an S3 Bucket you provide; requires s3_access_key_id and s3_secret key to be passed as well s3_access_key_id, s3_secret_key: credentials for your AWS account. purge_local: An integer value, the number of days of backups to purge from local_backup_directory_path after operations have completed. purge_attached: An integer value, the number of days of backups to purge from attached_directory_path after operations have completed. cleanup: set to False to leave the mongo_backup_directory_path after operations have completed.
entailment
def restore(mongo_user, mongo_password, backup_tbz_path, backup_directory_output_path="/tmp/mongo_dump", drop_database=False, cleanup=True, silent=False, skip_system_and_user_files=False): """ Runs mongorestore with source data from the provided .tbz backup, using the provided username and password. The contents of the .tbz will be dumped into the provided backup directory, and that folder will be deleted after a successful mongodb restore unless cleanup is set to False. Note: the skip_system_and_user_files is intended for use with the changes in user architecture introduced in mongodb version 2.6. Warning: Setting drop_database to True will drop the ENTIRE CURRENTLY RUNNING DATABASE before restoring. Mongorestore requires a running mongod process, in addition the provided user must have restore permissions for the database. A mongolia superuser will have more than adequate permissions, but a regular user may not. By default this function will clean up the output of the untar operation. """ if not path.exists(backup_tbz_path): raise Exception("the provided tar file %s does not exist." % (backup_tbz_path)) untarbz(backup_tbz_path, backup_directory_output_path, silent=silent) if skip_system_and_user_files: system_and_users_path = "%s/admin" % backup_directory_output_path if path.exists(system_and_users_path): rmtree(system_and_users_path) mongorestore(mongo_user, mongo_password, backup_directory_output_path, drop_database=drop_database, silent=silent) if cleanup: rmtree(backup_directory_output_path)
Runs mongorestore with source data from the provided .tbz backup, using the provided username and password. The contents of the .tbz will be dumped into the provided backup directory, and that folder will be deleted after a successful mongodb restore unless cleanup is set to False. Note: the skip_system_and_user_files is intended for use with the changes in user architecture introduced in mongodb version 2.6. Warning: Setting drop_database to True will drop the ENTIRE CURRENTLY RUNNING DATABASE before restoring. Mongorestore requires a running mongod process, in addition the provided user must have restore permissions for the database. A mongolia superuser will have more than adequate permissions, but a regular user may not. By default this function will clean up the output of the untar operation.
entailment
def mongodump(mongo_user, mongo_password, mongo_dump_directory_path, database=None, silent=False): """ Runs mongodump using the provided credentials on the running mongod process. WARNING: This function will delete the contents of the provided directory before it runs. """ if path.exists(mongo_dump_directory_path): # If a backup dump already exists, delete it rmtree(mongo_dump_directory_path) if silent: dump_command = ("mongodump --quiet -u %s -p %s -o %s" % (mongo_user, mongo_password, mongo_dump_directory_path)) else: dump_command = ("mongodump -u %s -p %s -o %s" % (mongo_user, mongo_password, mongo_dump_directory_path)) if database: dump_command += (" --db %s" % database) call(dump_command, silent=silent)
Runs mongodump using the provided credentials on the running mongod process. WARNING: This function will delete the contents of the provided directory before it runs.
entailment
def mongorestore(mongo_user, mongo_password, backup_directory_path, drop_database=False, silent=False): """ Warning: Setting drop_database to True will drop the ENTIRE CURRENTLY RUNNING DATABASE before restoring. Mongorestore requires a running mongod process, in addition the provided user must have restore permissions for the database. A mongolia superuser will have more than adequate permissions, but a regular user may not. """ if not path.exists(backup_directory_path): raise Exception("the provided tar directory %s does not exist." % (backup_directory_path)) if silent: mongorestore_command = ("mongorestore --quiet -u %s -p %s %s" % (mongo_user, mongo_password, backup_directory_path)) else: mongorestore_command = ("mongorestore -v -u %s -p %s %s" % (mongo_user, mongo_password, backup_directory_path)) if drop_database: mongorestore_command = mongorestore_command + " --drop" call(mongorestore_command, silent=silent)
Warning: Setting drop_database to True will drop the ENTIRE CURRENTLY RUNNING DATABASE before restoring. Mongorestore requires a running mongod process, in addition the provided user must have restore permissions for the database. A mongolia superuser will have more than adequate permissions, but a regular user may not.
entailment
def get_backup_file_time_tag(file_name, custom_prefix="backup"): """ Returns a datetime object computed from a file name string, with formatting based on DATETIME_FORMAT.""" name_string = file_name[len(custom_prefix):] time_tag = name_string.split(".", 1)[0] return datetime.strptime(time_tag, DATETIME_FORMAT)
Returns a datetime object computed from a file name string, with formatting based on DATETIME_FORMAT.
entailment
def purge_old_files(date_time, directory_path, custom_prefix="backup"): """ Takes a datetime object and a directory path, runs through files in the directory and deletes those tagged with a date from before the provided datetime. If your backups have a custom_prefix that is not the default ("backup"), provide it with the "custom_prefix" kwarg. """ for file_name in listdir(directory_path): try: file_date_time = get_backup_file_time_tag(file_name, custom_prefix=custom_prefix) except ValueError as e: if "does not match format" in e.message: print("WARNING. file(s) in %s do not match naming convention." % (directory_path)) continue raise e if file_date_time < date_time: remove(directory_path + file_name)
Takes a datetime object and a directory path, runs through files in the directory and deletes those tagged with a date from before the provided datetime. If your backups have a custom_prefix that is not the default ("backup"), provide it with the "custom_prefix" kwarg.
entailment
def get_download_uri(package_name, version, source, index_url=None): """ Use setuptools to search for a package's URI @returns: URI string """ tmpdir = None force_scan = True develop_ok = False if not index_url: index_url = 'http://cheeseshop.python.org/pypi' if version: pkg_spec = "%s==%s" % (package_name, version) else: pkg_spec = package_name req = pkg_resources.Requirement.parse(pkg_spec) pkg_index = MyPackageIndex(index_url) try: pkg_index.fetch_distribution(req, tmpdir, force_scan, source, develop_ok) except DownloadURI as url: #Remove #egg=pkg-dev clean_url = url.value.split("#")[0] #If setuptools is asked for an egg and there isn't one, it will #return source if available, which we don't want. if not source and not clean_url.endswith(".egg") and \ not clean_url.endswith(".EGG"): return else: return clean_url
Use setuptools to search for a package's URI @returns: URI string
entailment
def get_pkglist(): """ Return list of all installed packages Note: It returns one project name per pkg no matter how many versions of a particular package is installed @returns: list of project name strings for every installed pkg """ dists = Distributions() projects = [] for (dist, _active) in dists.get_distributions("all"): if dist.project_name not in projects: projects.append(dist.project_name) return projects
Return list of all installed packages Note: It returns one project name per pkg no matter how many versions of a particular package is installed @returns: list of project name strings for every installed pkg
entailment
def register(self, command: str, handler: Any): """ Register a new handler for a specific slash command Args: command: Slash command handler: Callback """ if not command.startswith("/"): command = f"/{command}" LOG.info("Registering %s to %s", command, handler) self._routes[command].append(handler)
Register a new handler for a specific slash command Args: command: Slash command handler: Callback
entailment
def dispatch(self, command: Command) -> Iterator[Any]: """ Yields handlers matching the incoming :class:`slack.actions.Command`. Args: command: :class:`slack.actions.Command` Yields: handler """ LOG.debug("Dispatching command %s", command["command"]) for callback in self._routes[command["command"]]: yield callback
Yields handlers matching the incoming :class:`slack.actions.Command`. Args: command: :class:`slack.actions.Command` Yields: handler
entailment
def setpreferredapi(api): """ Set the preferred Qt API. Will raise a RuntimeError if a Qt API was already selected. Note that QT_API environment variable (if set) will take precedence. """ global __PREFERRED_API if __SELECTED_API is not None: raise RuntimeError("A Qt api {} was already selected" .format(__SELECTED_API)) if api.lower() not in {"pyqt4", "pyqt5", "pyside", "pyside2"}: raise ValueError(api) __PREFERRED_API = api.lower()
Set the preferred Qt API. Will raise a RuntimeError if a Qt API was already selected. Note that QT_API environment variable (if set) will take precedence.
entailment
def selectapi(api): """ Select an Qt API to use. This can only be set once and before any of the Qt modules are explicitly imported. """ global __SELECTED_API, USED_API if api.lower() not in {"pyqt4", "pyqt5", "pyside", "pyside2"}: raise ValueError(api) if __SELECTED_API is not None and __SELECTED_API.lower() != api.lower(): raise RuntimeError("A Qt API {} was already selected" .format(__SELECTED_API)) elif __SELECTED_API is None: __SELECTED_API = api.lower() from . import _api USED_API = _api.USED_API
Select an Qt API to use. This can only be set once and before any of the Qt modules are explicitly imported.
entailment
def get_highest_version(versions): """ Returns highest available version for a package in a list of versions Uses pkg_resources to parse the versions @param versions: List of PyPI package versions @type versions: List of strings @returns: string of a PyPI package version """ sorted_versions = [] for ver in versions: sorted_versions.append((pkg_resources.parse_version(ver), ver)) sorted_versions = sorted(sorted_versions) sorted_versions.reverse() return sorted_versions[0][1]
Returns highest available version for a package in a list of versions Uses pkg_resources to parse the versions @param versions: List of PyPI package versions @type versions: List of strings @returns: string of a PyPI package version
entailment
def get_distributions(self, show, pkg_name="", version=""): """ Yield installed packages @param show: Type of package(s) to show; active, non-active or all @type show: string: "active", "non-active", "all" @param pkg_name: PyPI project name @type pkg_name: string @param version: project's PyPI version @type version: string @returns: yields tuples of distribution and True or False depending on active state. e.g. (dist, True) """ #pylint: disable-msg=W0612 #'name' is a placeholder for the sorted list for name, dist in self.get_alpha(show, pkg_name, version): ver = dist.version for package in self.environment[dist.project_name]: if ver == package.version: if show == "nonactive" and dist not in self.working_set: yield (dist, self.query_activated(dist)) elif show == "active" and dist in self.working_set: yield (dist, self.query_activated(dist)) elif show == "all": yield (dist, self.query_activated(dist))
Yield installed packages @param show: Type of package(s) to show; active, non-active or all @type show: string: "active", "non-active", "all" @param pkg_name: PyPI project name @type pkg_name: string @param version: project's PyPI version @type version: string @returns: yields tuples of distribution and True or False depending on active state. e.g. (dist, True)
entailment
def get_alpha(self, show, pkg_name="", version=""): """ Return list of alphabetized packages @param pkg_name: PyPI project name @type pkg_name: string @param version: project's PyPI version @type version: string @returns: Alphabetized list of tuples. Each tuple contains a string and a pkg_resources Distribution object. The string is the project name + version. """ alpha_list = [] for dist in self.get_packages(show): if pkg_name and dist.project_name != pkg_name: #Only checking for a single package name pass elif version and dist.version != version: #Only checking for a single version of a package pass else: alpha_list.append((dist.project_name + dist.version, dist)) alpha_list.sort() return alpha_list
Return list of alphabetized packages @param pkg_name: PyPI project name @type pkg_name: string @param version: project's PyPI version @type version: string @returns: Alphabetized list of tuples. Each tuple contains a string and a pkg_resources Distribution object. The string is the project name + version.
entailment
def get_packages(self, show): """ Return list of Distributions filtered by active status or all @param show: Type of package(s) to show; active, non-active or all @type show: string: "active", "non-active", "all" @returns: list of pkg_resources Distribution objects """ if show == 'nonactive' or show == "all": all_packages = [] for package in self.environment: #There may be multiple versions of same packages for i in range(len(self.environment[package])): if self.environment[package][i]: all_packages.append(self.environment[package][i]) return all_packages else: # Only activated packages return self.working_set
Return list of Distributions filtered by active status or all @param show: Type of package(s) to show; active, non-active or all @type show: string: "active", "non-active", "all" @returns: list of pkg_resources Distribution objects
entailment
def case_sensitive_name(self, package_name): """ Return case-sensitive package name given any-case package name @param project_name: PyPI project name @type project_name: string """ if len(self.environment[package_name]): return self.environment[package_name][0].project_name
Return case-sensitive package name given any-case package name @param project_name: PyPI project name @type project_name: string
entailment
def cache_incr(self, key): """ Non-atomic cache increment operation. Not optimal but consistent across different cache backends. """ cache.set(key, cache.get(key, 0) + 1, self.expire_after())
Non-atomic cache increment operation. Not optimal but consistent across different cache backends.
entailment
def call_plugins(plugins, method, *arg, **kw): """Call all method on plugins in list, that define it, with provided arguments. The first response that is not None is returned. """ for plug in plugins: func = getattr(plug, method, None) if func is None: continue #LOG.debug("call plugin %s: %s", plug.name, method) result = func(*arg, **kw) if result is not None: return result return None
Call all method on plugins in list, that define it, with provided arguments. The first response that is not None is returned.
entailment
def load_plugins(builtin=True, others=True): """Load plugins, either builtin, others, or both. """ for entry_point in pkg_resources.iter_entry_points('yolk.plugins'): #LOG.debug("load plugin %s" % entry_point) try: plugin = entry_point.load() except KeyboardInterrupt: raise except Exception as err_msg: # never want a plugin load to exit yolk # but we can't log here because the logger is not yet # configured warn("Unable to load plugin %s: %s" % \ (entry_point, err_msg), RuntimeWarning) continue if plugin.__module__.startswith('yolk.plugins'): if builtin: yield plugin elif others: yield plugin
Load plugins, either builtin, others, or both.
entailment
def s3_connect(bucket_name, s3_access_key_id, s3_secret_key): """ Returns a Boto connection to the provided S3 bucket. """ conn = connect_s3(s3_access_key_id, s3_secret_key) try: return conn.get_bucket(bucket_name) except S3ResponseError as e: if e.status == 403: raise Exception("Bad Amazon S3 credentials.") raise
Returns a Boto connection to the provided S3 bucket.
entailment
def s3_list(s3_bucket, s3_access_key_id, s3_secret_key, prefix=None): """ Lists the contents of the S3 bucket that end in .tbz and match the passed prefix, if any. """ bucket = s3_connect(s3_bucket, s3_access_key_id, s3_secret_key) return sorted([key.name for key in bucket.list() if key.name.endswith(".tbz") and (prefix is None or key.name.startswith(prefix))])
Lists the contents of the S3 bucket that end in .tbz and match the passed prefix, if any.
entailment
def s3_download(output_file_path, s3_bucket, s3_access_key_id, s3_secret_key, s3_file_key=None, prefix=None): """ Downloads the file matching the provided key, in the provided bucket, from Amazon S3. If s3_file_key is none, it downloads the last file from the provided bucket with the .tbz extension, filtering by prefix if it is provided. """ bucket = s3_connect(s3_bucket, s3_access_key_id, s3_secret_key) if not s3_file_key: keys = s3_list(s3_bucket, s3_access_key_id, s3_secret_key, prefix) if not keys: raise Exception("Target S3 bucket is empty") s3_file_key = keys[-1] key = Key(bucket, s3_file_key) with open(output_file_path, "w+") as f: f.write(key.read())
Downloads the file matching the provided key, in the provided bucket, from Amazon S3. If s3_file_key is none, it downloads the last file from the provided bucket with the .tbz extension, filtering by prefix if it is provided.
entailment
def s3_upload(source_file_path, bucket_name, s3_access_key_id, s3_secret_key): """ Uploads the to Amazon S3 the contents of the provided file, keyed with the name of the file. """ key = s3_key(bucket_name, s3_access_key_id, s3_secret_key) file_name = source_file_path.split("/")[-1] key.key = file_name if key.exists(): raise Exception("s3 key %s already exists for current period." % (file_name)) key.set_contents_from_filename(source_file_path)
Uploads the to Amazon S3 the contents of the provided file, keyed with the name of the file.
entailment
def fix_pyqt5_QGraphicsItem_itemChange(): """ Attempt to remedy: https://www.riverbankcomputing.com/pipermail/pyqt/2016-February/037015.html """ from PyQt5.QtWidgets import QGraphicsObject, QGraphicsItem class Obj(QGraphicsObject): def itemChange(self, change, value): return QGraphicsObject.itemChange(self, change, value) obj = Obj() parent = Obj() obj.setParentItem(parent) if obj.parentItem() is None: # There was probably already some signal defined using QObject's # subclass from QtWidgets. # We will monkey patch the QGraphicsItem.itemChange and explicitly # sip.cast all input and output QGraphicsItem instances import sip QGraphicsItem_itemChange_old = QGraphicsItem.itemChange # All the QGraphicsItem.ItemChange flags which accept/return # a QGraphicsItem changeset = { QGraphicsItem.ItemParentChange, QGraphicsItem.ItemParentHasChanged, QGraphicsItem.ItemChildAddedChange, QGraphicsItem.ItemChildRemovedChange, } def QGraphicsItem_itemChange(self, change, value): if change in changeset: if isinstance(value, QGraphicsItem): value = sip.cast(value, QGraphicsItem) rval = QGraphicsItem_itemChange_old(self, change, value) if isinstance(rval, QGraphicsItem): rval = sip.cast(rval, QGraphicsItem) return rval else: return QGraphicsItem_itemChange_old(self, change, value) QGraphicsItem.itemChange = QGraphicsItem_itemChange warnings.warn("Monkey patching QGraphicsItem.itemChange", RuntimeWarning)
Attempt to remedy: https://www.riverbankcomputing.com/pipermail/pyqt/2016-February/037015.html
entailment
def setup_opt_parser(): """ Setup the optparser @returns: opt_parser.OptionParser """ #pylint: disable-msg=C0301 #line too long usage = "usage: %prog [options]" opt_parser = optparse.OptionParser(usage=usage) opt_parser.add_option("--version", action='store_true', dest= "yolk_version", default=False, help= "Show yolk version and exit.") opt_parser.add_option("--debug", action='store_true', dest= "debug", default=False, help= "Show debugging information.") opt_parser.add_option("-q", "--quiet", action='store_true', dest= "quiet", default=False, help= "Show less output.") group_local = optparse.OptionGroup(opt_parser, "Query installed Python packages", "The following options show information about installed Python packages. Activated packages are normal packages on sys.path that can be imported. Non-activated packages need 'pkg_resources.require()' before they can be imported, such as packages installed with 'easy_install --multi-version'. PKG_SPEC can be either a package name or package name and version e.g. Paste==0.9") group_local.add_option("-l", "--list", action='store_true', dest= "show_all", default=False, help= "List all Python packages installed by distutils or setuptools. Use PKG_SPEC to narrow results.") group_local.add_option("-a", "--activated", action='store_true', dest="show_active", default=False, help= 'List activated packages installed by distutils or ' + 'setuptools. Use PKG_SPEC to narrow results.') group_local.add_option("-n", "--non-activated", action='store_true', dest="show_non_active", default=False, help= 'List non-activated packages installed by distutils or ' + 'setuptools. Use PKG_SPEC to narrow results.') group_local.add_option("-m", "--metadata", action='store_true', dest= "metadata", default=False, help= 'Show all metadata for packages installed by ' + 'setuptools (use with -l -a or -n)') group_local.add_option("-f", "--fields", action="store", dest= "fields", default=False, help= 'Show specific metadata fields. ' + '(use with -m or -M)') group_local.add_option("-d", "--depends", action='store', dest= "show_deps", metavar='PKG_SPEC', help= "Show dependencies for a package installed by " + "setuptools if they are available.") group_local.add_option("--entry-points", action='store', dest="show_entry_points", default=False, help= 'List entry points for a module. e.g. --entry-points nose.plugins', metavar="MODULE") group_local.add_option("--entry-map", action='store', dest="show_entry_map", default=False, help= 'List entry map for a package. e.g. --entry-map yolk', metavar="PACKAGE_NAME") group_pypi = optparse.OptionGroup(opt_parser, "PyPI (Cheese Shop) options", "The following options query the Python Package Index:") group_pypi.add_option("-C", "--changelog", action='store', dest="show_pypi_changelog", metavar='HOURS', default=False, help= "Show detailed ChangeLog for PyPI for last n hours. ") group_pypi.add_option("-D", "--download-links", action='store', metavar="PKG_SPEC", dest="show_download_links", default=False, help= "Show download URL's for package listed on PyPI. Use with -T to specify egg, source etc.") group_pypi.add_option("-F", "--fetch-package", action='store', metavar="PKG_SPEC", dest="fetch", default=False, help= "Download package source or egg. You can specify a file type with -T") group_pypi.add_option("-H", "--browse-homepage", action='store', metavar="PKG_SPEC", dest="browse_website", default=False, help= "Launch web browser at home page for package.") group_pypi.add_option("-I", "--pypi-index", action='store', dest="pypi_index", default=False, help= "Specify PyPI mirror for package index.") group_pypi.add_option("-L", "--latest-releases", action='store', dest="show_pypi_releases", metavar="HOURS", default=False, help= "Show PyPI releases for last n hours. ") group_pypi.add_option("-M", "--query-metadata", action='store', dest="query_metadata_pypi", default=False, metavar="PKG_SPEC", help= "Show metadata for a package listed on PyPI. Use -f to show particular fields.") group_pypi.add_option("-S", "", action="store", dest="pypi_search", default=False, help= "Search PyPI by spec and optional AND/OR operator.", metavar='SEARCH_SPEC <AND/OR SEARCH_SPEC>') group_pypi.add_option("-T", "--file-type", action="store", dest= "file_type", default="all", help= "You may specify 'source', 'egg', 'svn' or 'all' when using -D.") group_pypi.add_option("-U", "--show-updates", action='store_true', dest="show_updates", metavar='<PKG_NAME>', default=False, help= "Check PyPI for updates on package(s).") group_pypi.add_option("-V", "--versions-available", action= 'store', dest="versions_available", default=False, metavar='PKG_SPEC', help="Show available versions for given package " + "listed on PyPI.") opt_parser.add_option_group(group_local) opt_parser.add_option_group(group_pypi) # add opts from plugins all_plugins = [] for plugcls in load_plugins(others=True): plug = plugcls() try: plug.add_options(opt_parser) except AttributeError: pass return opt_parser
Setup the optparser @returns: opt_parser.OptionParser
entailment
def validate_pypi_opts(opt_parser): """ Check parse options that require pkg_spec @returns: pkg_spec """ (options, remaining_args) = opt_parser.parse_args() options_pkg_specs = [ options.versions_available, options.query_metadata_pypi, options.show_download_links, options.browse_website, options.fetch, options.show_deps, ] for pkg_spec in options_pkg_specs: if pkg_spec: return pkg_spec
Check parse options that require pkg_spec @returns: pkg_spec
entailment
def write(self, inline): """ Write a line to stdout if it isn't in a blacklist Try to get the name of the calling module to see if we want to filter it. If there is no calling module, use current frame in case there's a traceback before there is any calling module """ frame = inspect.currentframe().f_back if frame: mod = frame.f_globals.get('__name__') else: mod = sys._getframe(0).f_globals.get('__name__') if not mod in self.modulenames: self.stdout.write(inline)
Write a line to stdout if it isn't in a blacklist Try to get the name of the calling module to see if we want to filter it. If there is no calling module, use current frame in case there's a traceback before there is any calling module
entailment
def get_plugin(self, method): """ Return plugin object if CLI option is activated and method exists @param method: name of plugin's method we're calling @type method: string @returns: list of plugins with `method` """ all_plugins = [] for entry_point in pkg_resources.iter_entry_points('yolk.plugins'): plugin_obj = entry_point.load() plugin = plugin_obj() plugin.configure(self.options, None) if plugin.enabled: if not hasattr(plugin, method): self.logger.warn("Error: plugin has no method: %s" % method) plugin = None else: all_plugins.append(plugin) return all_plugins
Return plugin object if CLI option is activated and method exists @param method: name of plugin's method we're calling @type method: string @returns: list of plugins with `method`
entailment
def set_log_level(self): """ Set log level according to command-line options @returns: logger object """ if self.options.debug: self.logger.setLevel(logging.DEBUG) elif self.options.quiet: self.logger.setLevel(logging.ERROR) else: self.logger.setLevel(logging.INFO) self.logger.addHandler(logging.StreamHandler()) return self.logger
Set log level according to command-line options @returns: logger object
entailment
def run(self): """ Perform actions based on CLI options @returns: status code """ opt_parser = setup_opt_parser() (self.options, remaining_args) = opt_parser.parse_args() logger = self.set_log_level() pkg_spec = validate_pypi_opts(opt_parser) if not pkg_spec: pkg_spec = remaining_args self.pkg_spec = pkg_spec if not self.options.pypi_search and (len(sys.argv) == 1 or\ len(remaining_args) > 2): opt_parser.print_help() return 2 #Options that depend on querying installed packages, not PyPI. #We find the proper case for package names if they are installed, #otherwise PyPI returns the correct case. if self.options.show_deps or self.options.show_all or \ self.options.show_active or self.options.show_non_active or \ (self.options.show_updates and pkg_spec): want_installed = True else: want_installed = False #show_updates may or may not have a pkg_spec if not want_installed or self.options.show_updates: self.pypi = CheeseShop(self.options.debug) #XXX: We should return 2 here if we couldn't create xmlrpc server if pkg_spec: (self.project_name, self.version, self.all_versions) = \ self.parse_pkg_ver(want_installed) if want_installed and not self.project_name: logger.error("%s is not installed." % pkg_spec[0]) return 1 #I could prefix all these with 'cmd_' and the methods also #and then iterate over the `options` dictionary keys... commands = ['show_deps', 'query_metadata_pypi', 'fetch', 'versions_available', 'show_updates', 'browse_website', 'show_download_links', 'pypi_search', 'show_pypi_changelog', 'show_pypi_releases', 'yolk_version', 'show_all', 'show_active', 'show_non_active', 'show_entry_map', 'show_entry_points'] #Run first command it finds, and only the first command, then return #XXX: Check if more than one command was set in options and give error? for action in commands: if getattr(self.options, action): return getattr(self, action)() opt_parser.print_help()
Perform actions based on CLI options @returns: status code
entailment
def show_updates(self): """ Check installed packages for available updates on PyPI @param project_name: optional package name to check; checks every installed pacakge if none specified @type project_name: string @returns: None """ dists = Distributions() if self.project_name: #Check for a single package pkg_list = [self.project_name] else: #Check for every installed package pkg_list = get_pkglist() found = None for pkg in pkg_list: for (dist, active) in dists.get_distributions("all", pkg, dists.get_highest_installed(pkg)): (project_name, versions) = \ self.pypi.query_versions_pypi(dist.project_name) if versions: #PyPI returns them in chronological order, #but who knows if its guaranteed in the API? #Make sure we grab the highest version: newest = get_highest_version(versions) if newest != dist.version: #We may have newer than what PyPI knows about if pkg_resources.parse_version(dist.version) < \ pkg_resources.parse_version(newest): found = True print(" %s %s (%s)" % (project_name, dist.version, newest)) if not found and self.project_name: self.logger.info("You have the latest version installed.") elif not found: self.logger.info("No newer packages found at The Cheese Shop") return 0
Check installed packages for available updates on PyPI @param project_name: optional package name to check; checks every installed pacakge if none specified @type project_name: string @returns: None
entailment
def show_distributions(self, show): """ Show list of installed activated OR non-activated packages @param show: type of pkgs to show (all, active or nonactive) @type show: string @returns: None or 2 if error """ show_metadata = self.options.metadata #Search for any plugins with active CLI options with add_column() method plugins = self.get_plugin("add_column") #Some locations show false positive for 'development' packages: ignores = ["/UNIONFS", "/KNOPPIX.IMG"] #Check if we're in a workingenv #See http://cheeseshop.python.org/pypi/workingenv.py workingenv = os.environ.get('WORKING_ENV') if workingenv: ignores.append(workingenv) dists = Distributions() results = None for (dist, active) in dists.get_distributions(show, self.project_name, self.version): metadata = get_metadata(dist) for prefix in ignores: if dist.location.startswith(prefix): dist.location = dist.location.replace(prefix, "") #Case-insensitve search because of Windows if dist.location.lower().startswith(get_python_lib().lower()): develop = "" else: develop = dist.location if metadata: add_column_text = "" for my_plugin in plugins: #See if package is 'owned' by a package manager such as #portage, apt, rpm etc. #add_column_text += my_plugin.add_column(filename) + " " add_column_text += my_plugin.add_column(dist) + " " self.print_metadata(metadata, develop, active, add_column_text) else: print(str(dist) + " has no metadata") results = True if not results and self.project_name: if self.version: pkg_spec = "%s==%s" % (self.project_name, self.version) else: pkg_spec = "%s" % self.project_name if show == "all": self.logger.error("There are no versions of %s installed." \ % pkg_spec) else: self.logger.error("There are no %s versions of %s installed." \ % \ (show, pkg_spec)) return 2 elif show == "all" and results and self.options.fields: print("Versions with '*' are non-active.") print("Versions with '!' are deployed in development mode.")
Show list of installed activated OR non-activated packages @param show: type of pkgs to show (all, active or nonactive) @type show: string @returns: None or 2 if error
entailment
def print_metadata(self, metadata, develop, active, installed_by): """ Print out formatted metadata @param metadata: package's metadata @type metadata: pkg_resources Distribution obj @param develop: path to pkg if its deployed in development mode @type develop: string @param active: show if package is activated or not @type active: boolean @param installed_by: Shows if pkg was installed by a package manager other than setuptools @type installed_by: string @returns: None """ show_metadata = self.options.metadata if self.options.fields: fields = self.options.fields.split(',') fields = map(str.strip, fields) else: fields = [] version = metadata['Version'] #When showing all packages, note which are not active: if active: if fields: active_status = "" else: active_status = "active" else: if fields: active_status = "*" else: active_status = "non-active" if develop: if fields: development_status = "! (%s)" % develop else: development_status = "development (%s)" % develop else: development_status = installed_by status = "%s %s" % (active_status, development_status) if fields: print('%s (%s)%s %s' % (metadata['Name'], version, active_status, development_status)) else: # Need intelligent justification print(metadata['Name'].ljust(15) + " - " + version.ljust(12) + \ " - " + status) if fields: #Only show specific fields, using case-insensitive search fields = map(str.lower, fields) for field in metadata.keys(): if field.lower() in fields: print(' %s: %s' % (field, metadata[field])) print() elif show_metadata: #Print all available metadata fields for field in metadata.keys(): if field != 'Name' and field != 'Summary': print(' %s: %s' % (field, metadata[field]))
Print out formatted metadata @param metadata: package's metadata @type metadata: pkg_resources Distribution obj @param develop: path to pkg if its deployed in development mode @type develop: string @param active: show if package is activated or not @type active: boolean @param installed_by: Shows if pkg was installed by a package manager other than setuptools @type installed_by: string @returns: None
entailment
def show_deps(self): """ Show dependencies for package(s) @returns: 0 - sucess 1 - No dependency info supplied """ pkgs = pkg_resources.Environment() for pkg in pkgs[self.project_name]: if not self.version: print(pkg.project_name, pkg.version) i = len(pkg._dep_map.values()[0]) if i: while i: if not self.version or self.version and \ pkg.version == self.version: if self.version and i == len(pkg._dep_map.values()[0]): print(pkg.project_name, pkg.version) print(" " + str(pkg._dep_map.values()[0][i - 1])) i -= 1 else: self.logger.info(\ "No dependency information was supplied with the package.") return 1 return 0
Show dependencies for package(s) @returns: 0 - sucess 1 - No dependency info supplied
entailment
def show_pypi_changelog(self): """ Show detailed PyPI ChangeLog for the last `hours` @returns: 0 = sucess or 1 if failed to retrieve from XML-RPC server """ hours = self.options.show_pypi_changelog if not hours.isdigit(): self.logger.error("Error: You must supply an integer.") return 1 try: changelog = self.pypi.changelog(int(hours)) except XMLRPCFault as err_msg: self.logger.error(err_msg) self.logger.error("ERROR: Couldn't retrieve changelog.") return 1 last_pkg = '' for entry in changelog: pkg = entry[0] if pkg != last_pkg: print("%s %s\n\t%s" % (entry[0], entry[1], entry[3])) last_pkg = pkg else: print("\t%s" % entry[3]) return 0
Show detailed PyPI ChangeLog for the last `hours` @returns: 0 = sucess or 1 if failed to retrieve from XML-RPC server
entailment
def show_pypi_releases(self): """ Show PyPI releases for the last number of `hours` @returns: 0 = success or 1 if failed to retrieve from XML-RPC server """ try: hours = int(self.options.show_pypi_releases) except ValueError: self.logger.error("ERROR: You must supply an integer.") return 1 try: latest_releases = self.pypi.updated_releases(hours) except XMLRPCFault as err_msg: self.logger.error(err_msg) self.logger.error("ERROR: Couldn't retrieve latest releases.") return 1 for release in latest_releases: print("%s %s" % (release[0], release[1])) return 0
Show PyPI releases for the last number of `hours` @returns: 0 = success or 1 if failed to retrieve from XML-RPC server
entailment
def show_download_links(self): """ Query PyPI for pkg download URI for a packge @returns: 0 """ #In case they specify version as 'dev' instead of using -T svn, #don't show three svn URI's if self.options.file_type == "all" and self.version == "dev": self.options.file_type = "svn" if self.options.file_type == "svn": version = "dev" else: if self.version: version = self.version else: version = self.all_versions[0] if self.options.file_type == "all": #Search for source, egg, and svn self.print_download_uri(version, True) self.print_download_uri(version, False) self.print_download_uri("dev", True) else: if self.options.file_type == "source": source = True else: source = False self.print_download_uri(version, source) return 0
Query PyPI for pkg download URI for a packge @returns: 0
entailment
def print_download_uri(self, version, source): """ @param version: version number or 'dev' for svn @type version: string @param source: download source or egg @type source: boolean @returns: None """ if version == "dev": pkg_type = "subversion" source = True elif source: pkg_type = "source" else: pkg_type = "egg" #Use setuptools monkey-patch to grab url url = get_download_uri(self.project_name, version, source, self.options.pypi_index) if url: print("%s" % url) else: self.logger.info("No download URL found for %s" % pkg_type)
@param version: version number or 'dev' for svn @type version: string @param source: download source or egg @type source: boolean @returns: None
entailment
def fetch(self): """ Download a package @returns: 0 = success or 1 if failed download """ #Default type to download source = True directory = "." if self.options.file_type == "svn": version = "dev" svn_uri = get_download_uri(self.project_name, \ "dev", True) if svn_uri: directory = self.project_name + "_svn" return self.fetch_svn(svn_uri, directory) else: self.logger.error(\ "ERROR: No subversion repository found for %s" % \ self.project_name) return 1 elif self.options.file_type == "source": source = True elif self.options.file_type == "egg": source = False uri = get_download_uri(self.project_name, self.version, source) if uri: return self.fetch_uri(directory, uri) else: self.logger.error("No %s URI found for package: %s " % \ (self.options.file_type, self.project_name)) return 1
Download a package @returns: 0 = success or 1 if failed download
entailment
def fetch_uri(self, directory, uri): """ Use ``urllib.urlretrieve`` to download package to file in sandbox dir. @param directory: directory to download to @type directory: string @param uri: uri to download @type uri: string @returns: 0 = success or 1 for failed download """ filename = os.path.basename(urlparse(uri)[2]) if os.path.exists(filename): self.logger.error("ERROR: File exists: " + filename) return 1 try: downloaded_filename, headers = urlretrieve(uri, filename) self.logger.info("Downloaded ./" + filename) except IOError as err_msg: self.logger.error("Error downloading package %s from URL %s" \ % (filename, uri)) self.logger.error(str(err_msg)) return 1 if headers.gettype() in ["text/html"]: dfile = open(downloaded_filename) if re.search("404 Not Found", "".join(dfile.readlines())): dfile.close() self.logger.error("'404 Not Found' error") return 1 dfile.close() return 0
Use ``urllib.urlretrieve`` to download package to file in sandbox dir. @param directory: directory to download to @type directory: string @param uri: uri to download @type uri: string @returns: 0 = success or 1 for failed download
entailment
def fetch_svn(self, svn_uri, directory): """ Fetch subversion repository @param svn_uri: subversion repository uri to check out @type svn_uri: string @param directory: directory to download to @type directory: string @returns: 0 = success or 1 for failed download """ if not command_successful("svn --version"): self.logger.error("ERROR: Do you have subversion installed?") return 1 if os.path.exists(directory): self.logger.error("ERROR: Checkout directory exists - %s" \ % directory) return 1 try: os.mkdir(directory) except OSError as err_msg: self.logger.error("ERROR: " + str(err_msg)) return 1 cwd = os.path.realpath(os.curdir) os.chdir(directory) self.logger.info("Doing subversion checkout for %s" % svn_uri) status, output = run_command("/usr/bin/svn co %s" % svn_uri) self.logger.info(output) os.chdir(cwd) self.logger.info("subversion checkout is in directory './%s'" \ % directory) return 0
Fetch subversion repository @param svn_uri: subversion repository uri to check out @type svn_uri: string @param directory: directory to download to @type directory: string @returns: 0 = success or 1 for failed download
entailment
def browse_website(self, browser=None): """ Launch web browser at project's homepage @param browser: name of web browser to use @type browser: string @returns: 0 if homepage found, 1 if no homepage found """ if len(self.all_versions): metadata = self.pypi.release_data(self.project_name, \ self.all_versions[0]) self.logger.debug("DEBUG: browser: %s" % browser) if metadata.has_key("home_page"): self.logger.info("Launching browser: %s" \ % metadata["home_page"]) if browser == 'konqueror': browser = webbrowser.Konqueror() else: browser = webbrowser.get() browser.open(metadata["home_page"], 2) return 0 self.logger.error("No homepage URL found.") return 1
Launch web browser at project's homepage @param browser: name of web browser to use @type browser: string @returns: 0 if homepage found, 1 if no homepage found
entailment
def query_metadata_pypi(self): """ Show pkg metadata queried from PyPI @returns: 0 """ if self.version and self.version in self.all_versions: metadata = self.pypi.release_data(self.project_name, self.version) else: #Give highest version metadata = self.pypi.release_data(self.project_name, \ self.all_versions[0]) if metadata: for key in metadata.keys(): if not self.options.fields or (self.options.fields and \ self.options.fields==key): print("%s: %s" % (key, metadata[key])) return 0
Show pkg metadata queried from PyPI @returns: 0
entailment
def versions_available(self): """ Query PyPI for a particular version or all versions of a package @returns: 0 if version(s) found or 1 if none found """ if self.version: spec = "%s==%s" % (self.project_name, self.version) else: spec = self.project_name if self.all_versions and self.version in self.all_versions: print_pkg_versions(self.project_name, [self.version]) elif not self.version and self.all_versions: print_pkg_versions(self.project_name, self.all_versions) else: if self.version: self.logger.error("No pacakge found for version %s" \ % self.version) else: self.logger.error("No pacakge found for %s" % self.project_name) return 1 return 0
Query PyPI for a particular version or all versions of a package @returns: 0 if version(s) found or 1 if none found
entailment
def parse_search_spec(self, spec): """ Parse search args and return spec dict for PyPI * Owwww, my eyes!. Re-write this. @param spec: Cheese Shop package search spec e.g. name=Cheetah license=ZPL license=ZPL AND name=Cheetah @type spec: string @returns: tuple with spec and operator """ usage = \ """You can search PyPI by the following: name version author author_email maintainer maintainer_email home_page license summary description keywords platform download_url e.g. yolk -S name=Cheetah yolk -S name=yolk AND license=PSF """ if not spec: self.logger.error(usage) return (None, None) try: spec = (" ").join(spec) operator = 'AND' first = second = "" if " AND " in spec: (first, second) = spec.split('AND') elif " OR " in spec: (first, second) = spec.split('OR') operator = 'OR' else: first = spec (key1, term1) = first.split('=') key1 = key1.strip() if second: (key2, term2) = second.split('=') key2 = key2.strip() spec = {} spec[key1] = term1 if second: spec[key2] = term2 except: self.logger.error(usage) spec = operator = None return (spec, operator)
Parse search args and return spec dict for PyPI * Owwww, my eyes!. Re-write this. @param spec: Cheese Shop package search spec e.g. name=Cheetah license=ZPL license=ZPL AND name=Cheetah @type spec: string @returns: tuple with spec and operator
entailment
def pypi_search(self): """ Search PyPI by metadata keyword e.g. yolk -S name=yolk AND license=GPL @param spec: Cheese Shop search spec @type spec: list of strings spec examples: ["name=yolk"] ["license=GPL"] ["name=yolk", "AND", "license=GPL"] @returns: 0 on success or 1 if mal-formed search spec """ spec = self.pkg_spec #Add remainging cli arguments to options.pypi_search search_arg = self.options.pypi_search spec.insert(0, search_arg.strip()) (spec, operator) = self.parse_search_spec(spec) if not spec: return 1 for pkg in self.pypi.search(spec, operator): if pkg['summary']: summary = pkg['summary'].encode('utf-8') else: summary = "" print("""%s (%s): %s """ % (pkg['name'].encode('utf-8'), pkg["version"], summary)) return 0
Search PyPI by metadata keyword e.g. yolk -S name=yolk AND license=GPL @param spec: Cheese Shop search spec @type spec: list of strings spec examples: ["name=yolk"] ["license=GPL"] ["name=yolk", "AND", "license=GPL"] @returns: 0 on success or 1 if mal-formed search spec
entailment
def show_entry_map(self): """ Show entry map for a package @param dist: package @param type: srting @returns: 0 for success or 1 if error """ pprinter = pprint.PrettyPrinter() try: entry_map = pkg_resources.get_entry_map(self.options.show_entry_map) if entry_map: pprinter.pprint(entry_map) except pkg_resources.DistributionNotFound: self.logger.error("Distribution not found: %s" \ % self.options.show_entry_map) return 1 return 0
Show entry map for a package @param dist: package @param type: srting @returns: 0 for success or 1 if error
entailment
def show_entry_points(self): """ Show entry points for a module @returns: 0 for success or 1 if error """ found = False for entry_point in \ pkg_resources.iter_entry_points(self.options.show_entry_points): found = True try: plugin = entry_point.load() print(plugin.__module__) print(" %s" % entry_point) if plugin.__doc__: print(plugin.__doc__) print except ImportError: pass if not found: self.logger.error("No entry points found for %s" \ % self.options.show_entry_points) return 1 return 0
Show entry points for a module @returns: 0 for success or 1 if error
entailment
def parse_pkg_ver(self, want_installed): """ Return tuple with project_name and version from CLI args If the user gave the wrong case for the project name, this corrects it @param want_installed: whether package we want is installed or not @type want_installed: boolean @returns: tuple(project_name, version, all_versions) """ all_versions = [] arg_str = ("").join(self.pkg_spec) if "==" not in arg_str: #No version specified project_name = arg_str version = None else: (project_name, version) = arg_str.split("==") project_name = project_name.strip() version = version.strip() #Find proper case for package name if want_installed: dists = Distributions() project_name = dists.case_sensitive_name(project_name) else: (project_name, all_versions) = \ self.pypi.query_versions_pypi(project_name) if not len(all_versions): msg = "I'm afraid we have no '%s' at " % project_name msg += "The Cheese Shop. A little Red Leicester, perhaps?" self.logger.error(msg) sys.exit(2) return (project_name, version, all_versions)
Return tuple with project_name and version from CLI args If the user gave the wrong case for the project name, this corrects it @param want_installed: whether package we want is installed or not @type want_installed: boolean @returns: tuple(project_name, version, all_versions)
entailment
def install_backport_hook(api): """ Install a backport import hook for Qt4 api Parameters ---------- api : str The Qt4 api whose structure should be intercepted ('pyqt4' or 'pyside'). Example ------- >>> install_backport_hook("pyqt4") >>> import PyQt4 Loaded module AnyQt._backport as a substitute for PyQt4 """ if api == USED_API: raise ValueError sys.meta_path.insert(0, ImportHookBackport(api))
Install a backport import hook for Qt4 api Parameters ---------- api : str The Qt4 api whose structure should be intercepted ('pyqt4' or 'pyside'). Example ------- >>> install_backport_hook("pyqt4") >>> import PyQt4 Loaded module AnyQt._backport as a substitute for PyQt4
entailment
def install_deny_hook(api): """ Install a deny import hook for Qt api. Parameters ---------- api : str The Qt api whose import should be prevented Example ------- >>> install_deny_import("pyqt4") >>> import PyQt4 Traceback (most recent call last):... ImportError: Import of PyQt4 is denied. """ if api == USED_API: raise ValueError sys.meta_path.insert(0, ImportHookDeny(api))
Install a deny import hook for Qt api. Parameters ---------- api : str The Qt api whose import should be prevented Example ------- >>> install_deny_import("pyqt4") >>> import PyQt4 Traceback (most recent call last):... ImportError: Import of PyQt4 is denied.
entailment
def run_command(cmd, env=None, max_timeout=None): """ Run command and return its return status code and its output """ arglist = cmd.split() output = os.tmpfile() try: pipe = Popen(arglist, stdout=output, stderr=STDOUT, env=env) except Exception as errmsg: return 1, errmsg # Wait only max_timeout seconds. if max_timeout: start = time.time() while pipe.poll() is None: time.sleep(0.1) if time.time() - start > max_timeout: os.kill(pipe.pid, signal.SIGINT) pipe.wait() return 1, "Time exceeded" pipe.wait() output.seek(0) return pipe.returncode, output.read()
Run command and return its return status code and its output
entailment
async def iter( self, url: Union[str, methods], data: Optional[MutableMapping] = None, headers: Optional[MutableMapping] = None, *, limit: int = 200, iterkey: Optional[str] = None, itermode: Optional[str] = None, minimum_time: Optional[int] = None, as_json: Optional[bool] = None ) -> AsyncIterator[dict]: """ Iterate over a slack API method supporting pagination When using :class:`slack.methods` the request is made `as_json` if available Args: url: :class:`slack.methods` or url string data: JSON encodable MutableMapping headers: limit: Maximum number of results to return per call. iterkey: Key in response data to iterate over (required for url string). itermode: Iteration mode (required for url string) (one of `cursor`, `page` or `timeline`) minimum_time: Minimum elapsed time (in seconds) between two calls to the Slack API (default to 0). If not reached the client will sleep for the remaining time. as_json: Post JSON to the slack API Returns: Async iterator over `response_data[key]` """ itervalue = None if not data: data = {} last_request_time = None while True: current_time = time.time() if ( minimum_time and last_request_time and last_request_time + minimum_time > current_time ): await self.sleep(last_request_time + minimum_time - current_time) data, iterkey, itermode = sansio.prepare_iter_request( url, data, iterkey=iterkey, itermode=itermode, limit=limit, itervalue=itervalue, ) last_request_time = time.time() response_data = await self.query(url, data, headers, as_json) itervalue = sansio.decode_iter_request(response_data) for item in response_data[iterkey]: yield item if not itervalue: break
Iterate over a slack API method supporting pagination When using :class:`slack.methods` the request is made `as_json` if available Args: url: :class:`slack.methods` or url string data: JSON encodable MutableMapping headers: limit: Maximum number of results to return per call. iterkey: Key in response data to iterate over (required for url string). itermode: Iteration mode (required for url string) (one of `cursor`, `page` or `timeline`) minimum_time: Minimum elapsed time (in seconds) between two calls to the Slack API (default to 0). If not reached the client will sleep for the remaining time. as_json: Post JSON to the slack API Returns: Async iterator over `response_data[key]`
entailment
async def _incoming_from_rtm( self, url: str, bot_id: str ) -> AsyncIterator[events.Event]: """ Connect and discard incoming RTM event if necessary. :param url: Websocket url :param bot_id: Bot ID :return: Incoming events """ async for data in self._rtm(url): event = events.Event.from_rtm(json.loads(data)) if sansio.need_reconnect(event): break elif sansio.discard_event(event, bot_id): continue else: yield event
Connect and discard incoming RTM event if necessary. :param url: Websocket url :param bot_id: Bot ID :return: Incoming events
entailment
def login(request, template_name='registration/login.html', redirect_field_name=REDIRECT_FIELD_NAME, authentication_form=AuthenticationForm, current_app=None, extra_context=None): """ Displays the login form and handles the login action. """ redirect_to = request.POST.get(redirect_field_name, request.GET.get(redirect_field_name, '')) if request.method == "POST": form = authentication_form(data=request.POST, request=request) if form.is_valid(): netloc = urlparse(redirect_to)[1] # Use default setting if redirect_to is empty if not redirect_to: redirect_to = settings.LOGIN_REDIRECT_URL # Heavier security check -- don't allow redirection to a different # host. elif netloc and netloc != request.get_host(): redirect_to = settings.LOGIN_REDIRECT_URL # Okay, security checks complete. Log the user in. auth_login(request, form.get_user()) return redirect(redirect_to) else: form = authentication_form(request) current_site = get_current_site(request) context = { 'form': form, redirect_field_name: redirect_to, 'site': current_site, 'site_name': current_site.name, } if extra_context is not None: context.update(extra_context) request.current_app = current_app return TemplateResponse(request, template_name, context)
Displays the login form and handles the login action.
entailment
def package_manager_owns(self, dist): """ Returns True if package manager 'owns' file Returns False if package manager does not 'own' file There is currently no way to determine if distutils or setuptools installed a package. A future feature of setuptools will make a package manifest which can be checked. 'filename' must be the full path to file """ #Installed by distutils/setuptools or external package manager? #If location is in site-packages dir, check for .egg-info file if dist.location.lower() == get_python_lib().lower(): filename = os.path.join(dist.location, dist.egg_name() + ".egg-info") else: filename = dist.location status, output = getstatusoutput("/usr/bin/acmefile -q %s" % filename) #status == 0 (file was installed by Acme) #status == 256 (file was not installed by Acme) if status == 0: return self.name else: return ""
Returns True if package manager 'owns' file Returns False if package manager does not 'own' file There is currently no way to determine if distutils or setuptools installed a package. A future feature of setuptools will make a package manifest which can be checked. 'filename' must be the full path to file
entailment
def check_proxy_setting(): """ If the environmental variable 'HTTP_PROXY' is set, it will most likely be in one of these forms: proxyhost:8080 http://proxyhost:8080 urlllib2 requires the proxy URL to start with 'http://' This routine does that, and returns the transport for xmlrpc. """ try: http_proxy = os.environ['HTTP_PROXY'] except KeyError: return if not http_proxy.startswith('http://'): match = re.match('(http://)?([-_\.A-Za-z]+):(\d+)', http_proxy) #if not match: # raise Exception('Proxy format not recognised: [%s]' % http_proxy) os.environ['HTTP_PROXY'] = 'http://%s:%s' % (match.group(2), match.group(3)) return
If the environmental variable 'HTTP_PROXY' is set, it will most likely be in one of these forms: proxyhost:8080 http://proxyhost:8080 urlllib2 requires the proxy URL to start with 'http://' This routine does that, and returns the transport for xmlrpc.
entailment
def filter_url(pkg_type, url): """ Returns URL of specified file type 'source', 'egg', or 'all' """ bad_stuff = ["?modtime", "#md5="] for junk in bad_stuff: if junk in url: url = url.split(junk)[0] break #pkg_spec==dev (svn) if url.endswith("-dev"): url = url.split("#egg=")[0] if pkg_type == "all": return url elif pkg_type == "source": valid_source_types = [".tgz", ".tar.gz", ".zip", ".tbz2", ".tar.bz2"] for extension in valid_source_types: if url.lower().endswith(extension): return url elif pkg_type == "egg": if url.lower().endswith(".egg"): return url
Returns URL of specified file type 'source', 'egg', or 'all'
entailment
def request(self, host, handler, request_body, verbose): '''Send xml-rpc request using proxy''' #We get a traceback if we don't have this attribute: self.verbose = verbose url = 'http://' + host + handler request = urllib2.Request(url) request.add_data(request_body) # Note: 'Host' and 'Content-Length' are added automatically request.add_header('User-Agent', self.user_agent) request.add_header('Content-Type', 'text/xml') proxy_handler = urllib2.ProxyHandler() opener = urllib2.build_opener(proxy_handler) fhandle = opener.open(request) return(self.parse_response(fhandle))
Send xml-rpc request using proxy
entailment
def get_cache(self): """ Get a package name list from disk cache or PyPI """ #This is used by external programs that import `CheeseShop` and don't #want a cache file written to ~/.pypi and query PyPI every time. if self.no_cache: self.pkg_list = self.list_packages() return if not os.path.exists(self.yolk_dir): os.mkdir(self.yolk_dir) if os.path.exists(self.pkg_cache_file): self.pkg_list = self.query_cached_package_list() else: self.logger.debug("DEBUG: Fetching package list cache from PyPi...") self.fetch_pkg_list()
Get a package name list from disk cache or PyPI
entailment
def get_xmlrpc_server(self): """ Returns PyPI's XML-RPC server instance """ check_proxy_setting() if os.environ.has_key('XMLRPC_DEBUG'): debug = 1 else: debug = 0 try: return xmlrpclib.Server(XML_RPC_SERVER, transport=ProxyTransport(), verbose=debug) except IOError: self.logger("ERROR: Can't connect to XML-RPC server: %s" \ % XML_RPC_SERVER)
Returns PyPI's XML-RPC server instance
entailment
def query_versions_pypi(self, package_name): """Fetch list of available versions for a package from The CheeseShop""" if not package_name in self.pkg_list: self.logger.debug("Package %s not in cache, querying PyPI..." \ % package_name) self.fetch_pkg_list() #I have to set version=[] for edge cases like "Magic file extensions" #but I'm not sure why this happens. It's included with Python or #because it has a space in it's name? versions = [] for pypi_pkg in self.pkg_list: if pypi_pkg.lower() == package_name.lower(): if self.debug: self.logger.debug("DEBUG: %s" % package_name) versions = self.package_releases(pypi_pkg) package_name = pypi_pkg break return (package_name, versions)
Fetch list of available versions for a package from The CheeseShop
entailment
def query_cached_package_list(self): """Return list of pickled package names from PYPI""" if self.debug: self.logger.debug("DEBUG: reading pickled cache file") return cPickle.load(open(self.pkg_cache_file, "r"))
Return list of pickled package names from PYPI
entailment
def fetch_pkg_list(self): """Fetch and cache master list of package names from PYPI""" self.logger.debug("DEBUG: Fetching package name list from PyPI") package_list = self.list_packages() cPickle.dump(package_list, open(self.pkg_cache_file, "w")) self.pkg_list = package_list
Fetch and cache master list of package names from PYPI
entailment
def search(self, spec, operator): '''Query PYPI via XMLRPC interface using search spec''' return self.xmlrpc.search(spec, operator.lower())
Query PYPI via XMLRPC interface using search spec
entailment
def release_data(self, package_name, version): """Query PYPI via XMLRPC interface for a pkg's metadata""" try: return self.xmlrpc.release_data(package_name, version) except xmlrpclib.Fault: #XXX Raises xmlrpclib.Fault if you give non-existant version #Could this be server bug? return
Query PYPI via XMLRPC interface for a pkg's metadata
entailment
def package_releases(self, package_name): """Query PYPI via XMLRPC interface for a pkg's available versions""" if self.debug: self.logger.debug("DEBUG: querying PyPI for versions of " \ + package_name) return self.xmlrpc.package_releases(package_name)
Query PYPI via XMLRPC interface for a pkg's available versions
entailment
def get_download_urls(self, package_name, version="", pkg_type="all"): """Query PyPI for pkg download URI for a packge""" if version: versions = [version] else: #If they don't specify version, show em all. (package_name, versions) = self.query_versions_pypi(package_name) all_urls = [] for ver in versions: metadata = self.release_data(package_name, ver) for urls in self.release_urls(package_name, ver): if pkg_type == "source" and urls['packagetype'] == "sdist": all_urls.append(urls['url']) elif pkg_type == "egg" and \ urls['packagetype'].startswith("bdist"): all_urls.append(urls['url']) elif pkg_type == "all": #All all_urls.append(urls['url']) #Try the package's metadata directly in case there's nothing #returned by XML-RPC's release_urls() if metadata and metadata.has_key('download_url') and \ metadata['download_url'] != "UNKNOWN" and \ metadata['download_url'] != None: if metadata['download_url'] not in all_urls: if pkg_type != "all": url = filter_url(pkg_type, metadata['download_url']) if url: all_urls.append(url) return all_urls
Query PyPI for pkg download URI for a packge
entailment
def clone(self) -> "Event": """ Clone the event Returns: :class:`slack.events.Event` """ return self.__class__(copy.deepcopy(self.event), copy.deepcopy(self.metadata))
Clone the event Returns: :class:`slack.events.Event`
entailment
def from_rtm(cls, raw_event: MutableMapping) -> "Event": """ Create an event with data coming from the RTM API. If the event type is a message a :class:`slack.events.Message` is returned. Args: raw_event: JSON decoded data from the RTM API Returns: :class:`slack.events.Event` or :class:`slack.events.Message` """ if raw_event["type"].startswith("message"): return Message(raw_event) else: return Event(raw_event)
Create an event with data coming from the RTM API. If the event type is a message a :class:`slack.events.Message` is returned. Args: raw_event: JSON decoded data from the RTM API Returns: :class:`slack.events.Event` or :class:`slack.events.Message`
entailment
def from_http( cls, raw_body: MutableMapping, verification_token: Optional[str] = None, team_id: Optional[str] = None, ) -> "Event": """ Create an event with data coming from the HTTP Event API. If the event type is a message a :class:`slack.events.Message` is returned. Args: raw_body: Decoded body of the Event API request verification_token: Slack verification token used to verify the request came from slack team_id: Verify the event is for the correct team Returns: :class:`slack.events.Event` or :class:`slack.events.Message` Raises: :class:`slack.exceptions.FailedVerification`: when `verification_token` or `team_id` does not match the incoming event's. """ if verification_token and raw_body["token"] != verification_token: raise exceptions.FailedVerification(raw_body["token"], raw_body["team_id"]) if team_id and raw_body["team_id"] != team_id: raise exceptions.FailedVerification(raw_body["token"], raw_body["team_id"]) if raw_body["event"]["type"].startswith("message"): return Message(raw_body["event"], metadata=raw_body) else: return Event(raw_body["event"], metadata=raw_body)
Create an event with data coming from the HTTP Event API. If the event type is a message a :class:`slack.events.Message` is returned. Args: raw_body: Decoded body of the Event API request verification_token: Slack verification token used to verify the request came from slack team_id: Verify the event is for the correct team Returns: :class:`slack.events.Event` or :class:`slack.events.Message` Raises: :class:`slack.exceptions.FailedVerification`: when `verification_token` or `team_id` does not match the incoming event's.
entailment
def response(self, in_thread: Optional[bool] = None) -> "Message": """ Create a response message. Depending on the incoming message the response can be in a thread. By default the response follow where the incoming message was posted. Args: in_thread (boolean): Overwrite the `threading` behaviour Returns: a new :class:`slack.event.Message` """ data = {"channel": self["channel"]} if in_thread: if "message" in self: data["thread_ts"] = ( self["message"].get("thread_ts") or self["message"]["ts"] ) else: data["thread_ts"] = self.get("thread_ts") or self["ts"] elif in_thread is None: if "message" in self and "thread_ts" in self["message"]: data["thread_ts"] = self["message"]["thread_ts"] elif "thread_ts" in self: data["thread_ts"] = self["thread_ts"] return Message(data)
Create a response message. Depending on the incoming message the response can be in a thread. By default the response follow where the incoming message was posted. Args: in_thread (boolean): Overwrite the `threading` behaviour Returns: a new :class:`slack.event.Message`
entailment
def serialize(self) -> dict: """ Serialize the message for sending to slack API Returns: serialized message """ data = {**self} if "attachments" in self: data["attachments"] = json.dumps(self["attachments"]) return data
Serialize the message for sending to slack API Returns: serialized message
entailment
def register(self, event_type: str, handler: Any, **detail: Any) -> None: """ Register a new handler for a specific :class:`slack.events.Event` `type` (See `slack event types documentation <https://api.slack.com/events>`_ for a list of event types). The arbitrary keyword argument is used as a key/value pair to compare against what is in the incoming :class:`slack.events.Event` Args: event_type: Event type the handler is interested in handler: Callback **detail: Additional key for routing """ LOG.info("Registering %s, %s to %s", event_type, detail, handler) if len(detail) > 1: raise ValueError("Only one detail can be provided for additional routing") elif not detail: detail_key, detail_value = "*", "*" else: detail_key, detail_value = detail.popitem() if detail_key not in self._routes[event_type]: self._routes[event_type][detail_key] = {} if detail_value not in self._routes[event_type][detail_key]: self._routes[event_type][detail_key][detail_value] = [] self._routes[event_type][detail_key][detail_value].append(handler)
Register a new handler for a specific :class:`slack.events.Event` `type` (See `slack event types documentation <https://api.slack.com/events>`_ for a list of event types). The arbitrary keyword argument is used as a key/value pair to compare against what is in the incoming :class:`slack.events.Event` Args: event_type: Event type the handler is interested in handler: Callback **detail: Additional key for routing
entailment
def dispatch(self, event: Event) -> Iterator[Any]: """ Yields handlers matching the routing of the incoming :class:`slack.events.Event`. Args: event: :class:`slack.events.Event` Yields: handler """ LOG.debug('Dispatching event "%s"', event.get("type")) if event["type"] in self._routes: for detail_key, detail_values in self._routes.get( event["type"], {} ).items(): event_value = event.get(detail_key, "*") yield from detail_values.get(event_value, []) else: return
Yields handlers matching the routing of the incoming :class:`slack.events.Event`. Args: event: :class:`slack.events.Event` Yields: handler
entailment
def register( self, pattern: str, handler: Any, flags: int = 0, channel: str = "*", subtype: Optional[str] = None, ) -> None: """ Register a new handler for a specific :class:`slack.events.Message`. The routing is based on regex pattern matching the message text and the incoming slack channel. Args: pattern: Regex pattern matching the message text. handler: Callback flags: Regex flags. channel: Slack channel ID. Use * for any. subtype: Message subtype """ LOG.debug('Registering message endpoint "%s: %s"', pattern, handler) match = re.compile(pattern, flags) if subtype not in self._routes[channel]: self._routes[channel][subtype] = dict() if match in self._routes[channel][subtype]: self._routes[channel][subtype][match].append(handler) else: self._routes[channel][subtype][match] = [handler]
Register a new handler for a specific :class:`slack.events.Message`. The routing is based on regex pattern matching the message text and the incoming slack channel. Args: pattern: Regex pattern matching the message text. handler: Callback flags: Regex flags. channel: Slack channel ID. Use * for any. subtype: Message subtype
entailment
def dispatch(self, message: Message) -> Iterator[Any]: """ Yields handlers matching the routing of the incoming :class:`slack.events.Message` Args: message: :class:`slack.events.Message` Yields: handler """ if "text" in message: text = message["text"] or "" elif "message" in message: text = message["message"].get("text", "") else: text = "" msg_subtype = message.get("subtype") for subtype, matchs in itertools.chain( self._routes[message["channel"]].items(), self._routes["*"].items() ): if msg_subtype == subtype or subtype is None: for match, endpoints in matchs.items(): if match.search(text): yield from endpoints
Yields handlers matching the routing of the incoming :class:`slack.events.Message` Args: message: :class:`slack.events.Message` Yields: handler
entailment
def query( # type: ignore self, url: Union[str, methods], data: Optional[MutableMapping] = None, headers: Optional[MutableMapping] = None, as_json: Optional[bool] = None, ) -> dict: """ Query the slack API When using :class:`slack.methods` the request is made `as_json` if available Args: url: :class:`slack.methods` or url string data: JSON encodable MutableMapping headers: Custom headers as_json: Post JSON to the slack API Returns: dictionary of slack API response data """ url, body, headers = sansio.prepare_request( url=url, data=data, headers=headers, global_headers=self._headers, token=self._token, ) return self._make_query(url, body, headers)
Query the slack API When using :class:`slack.methods` the request is made `as_json` if available Args: url: :class:`slack.methods` or url string data: JSON encodable MutableMapping headers: Custom headers as_json: Post JSON to the slack API Returns: dictionary of slack API response data
entailment
def rtm( # type: ignore self, url: Optional[str] = None, bot_id: Optional[str] = None ) -> Iterator[events.Event]: """ Iterate over event from the RTM API Args: url: Websocket connection url bot_id: Connecting bot ID Returns: :class:`slack.events.Event` or :class:`slack.events.Message` """ while True: bot_id = bot_id or self._find_bot_id() url = url or self._find_rtm_url() for event in self._incoming_from_rtm(url, bot_id): yield event url = None
Iterate over event from the RTM API Args: url: Websocket connection url bot_id: Connecting bot ID Returns: :class:`slack.events.Event` or :class:`slack.events.Message`
entailment
def login(self, request, extra_context=None): """ Displays the login form for the given HttpRequest. """ context = { 'title': _('Log in'), 'app_path': request.get_full_path(), } if (REDIRECT_FIELD_NAME not in request.GET and REDIRECT_FIELD_NAME not in request.POST): context[REDIRECT_FIELD_NAME] = request.get_full_path() context.update(extra_context or {}) defaults = { 'extra_context': context, 'current_app': self.name, 'authentication_form': self.login_form or AdminAuthenticationForm, 'template_name': self.login_template or 'admin/login.html', } return login(request, **defaults)
Displays the login form for the given HttpRequest.
entailment
def get_config(config_file): """Get configuration from a file.""" def load(fp): try: return yaml.safe_load(fp) except yaml.YAMLError as e: sys.stderr.write(text_type(e)) sys.exit(1) # TODO document exit codes if config_file == '-': return load(sys.stdin) if not os.path.exists(config_file): sys.stderr.write('ERROR: Must either run next to config.yaml or' ' specify a config file.\n' + __doc__) sys.exit(2) with open(config_file) as fp: return load(fp)
Get configuration from a file.
entailment
def get_options(config_options, local_options, cli_options): """ Figure out what options to use based on the four places it can come from. Order of precedence: * cli_options specified by the user at the command line * local_options specified in the config file for the metric * config_options specified in the config file at the base * DEFAULT_OPTIONS hard coded defaults """ options = DEFAULT_OPTIONS.copy() if config_options is not None: options.update(config_options) if local_options is not None: options.update(local_options) if cli_options is not None: options.update(cli_options) return options
Figure out what options to use based on the four places it can come from. Order of precedence: * cli_options specified by the user at the command line * local_options specified in the config file for the metric * config_options specified in the config file at the base * DEFAULT_OPTIONS hard coded defaults
entailment
def output_results(results, metric, options): """ Output the results to stdout. TODO: add AMPQ support for efficiency """ formatter = options['Formatter'] context = metric.copy() # XXX might need to sanitize this try: context['dimension'] = list(metric['Dimensions'].values())[0] except AttributeError: context['dimension'] = '' for result in results: stat_keys = metric['Statistics'] if not isinstance(stat_keys, list): stat_keys = [stat_keys] for statistic in stat_keys: context['statistic'] = statistic # get and then sanitize metric name, first copy the unit name from the # result to the context to keep the default format happy context['Unit'] = result['Unit'] metric_name = (formatter % context).replace('/', '.').lower() line = '{0} {1} {2}\n'.format( metric_name, result[statistic], timegm(result['Timestamp'].timetuple()), ) sys.stdout.write(line)
Output the results to stdout. TODO: add AMPQ support for efficiency
entailment
def download_to_path(self, gsuri, localpath, binary_mode=False, tmpdir=None): """ This method is analogous to "gsutil cp gsuri localpath", but in a programatically accesible way. The only difference is that we have to make a guess about the encoding of the file to not upset downstream file operations. If you are downloading a VCF, then "False" is great. If this is a BAM file you are asking for, you should enable the "binary_mode" to make sure file doesn't get corrupted. gsuri: full GS-based URI, e.g. gs://cohorts/rocks.txt localpath: the path for the downloaded file, e.g. /mnt/cohorts/yep.txt binary_mode: (logical) if yes, the binary file operations will be used; if not, standard ascii-based ones. """ bucket_name, gs_rel_path = self.parse_uri(gsuri) # And now request the handles for bucket and the file bucket = self._client.get_bucket(bucket_name) # Just assignment, no downloading (yet) ablob = bucket.get_blob(gs_rel_path) if not ablob: raise GoogleStorageIOError( "No such file on Google Storage: '{}'".format(gs_rel_path)) # A tmp file to serve intermediate phase # should be on same filesystem as localpath tmp_fid, tmp_file_path = tempfile.mkstemp(text=(not binary_mode), dir=tmpdir) # set chunk_size to reasonable default # https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2222 ablob.chunk_size = 1<<30 # Download starts in a sec.... ablob.download_to_filename(client=self._client, filename=tmp_file_path) # ... end download ends. Let's move our finished file over. # You will see that below, instead of directly writing to a file # we are instead first using a different file and then move it to # its final location. We are doing this because we don't want # corrupted/incomplete data to be around as much as possible. return os.rename(tmp_file_path, localpath)
This method is analogous to "gsutil cp gsuri localpath", but in a programatically accesible way. The only difference is that we have to make a guess about the encoding of the file to not upset downstream file operations. If you are downloading a VCF, then "False" is great. If this is a BAM file you are asking for, you should enable the "binary_mode" to make sure file doesn't get corrupted. gsuri: full GS-based URI, e.g. gs://cohorts/rocks.txt localpath: the path for the downloaded file, e.g. /mnt/cohorts/yep.txt binary_mode: (logical) if yes, the binary file operations will be used; if not, standard ascii-based ones.
entailment
def round_float(f, digits, rounding=ROUND_HALF_UP): """ Accurate float rounding from http://stackoverflow.com/a/15398691. """ return Decimal(str(f)).quantize(Decimal(10) ** (-1 * digits), rounding=rounding)
Accurate float rounding from http://stackoverflow.com/a/15398691.
entailment
def float_str(f, min_digits=2, max_digits=6): """ Returns a string representing a float, where the number of significant digits is min_digits unless it takes more digits to hit a non-zero digit (and the number is 0 < x < 1). We stop looking for a non-zero digit after max_digits. """ if f >= 1 or f <= 0: return str(round_float(f, min_digits)) start_str = str(round_float(f, max_digits)) digits = start_str.split(".")[1] non_zero_indices = [] for i, digit in enumerate(digits): if digit != "0": non_zero_indices.append(i + 1) # Only saw 0s. if len(non_zero_indices) == 0: num_digits = min_digits else: # Of the non-zero digits, pick the num_digit'th of those (including any zeros) min_non_zero_indices = range(non_zero_indices[0], non_zero_indices[-1] + 1)[:min_digits] num_digits = min_non_zero_indices[-1] return str(round_float(f, num_digits))
Returns a string representing a float, where the number of significant digits is min_digits unless it takes more digits to hit a non-zero digit (and the number is 0 < x < 1). We stop looking for a non-zero digit after max_digits.
entailment
def default_format(self): """ Returns full name (first and last) if name is available. If not, returns username if available. If not available too, returns the user id as a string. """ user = self.user if user.first_name is not None: return self.full_name elif user.username is not None: return user.username else: return str(user.id)
Returns full name (first and last) if name is available. If not, returns username if available. If not available too, returns the user id as a string.
entailment
def full_name(self): """ Returns the first and last name of the user separated by a space. """ formatted_user = [] if self.user.first_name is not None: formatted_user.append(self.user.first_name) if self.user.last_name is not None: formatted_user.append(self.user.last_name) return " ".join(formatted_user)
Returns the first and last name of the user separated by a space.
entailment
def full_format(self): """ Returns the full name (first and last parts), and the username between brackets if the user has it. If there is no info about the user, returns the user id between < and >. """ formatted_user = self.full_name if self.user.username is not None: formatted_user += " [" + self.user.username + "]" if not formatted_user: formatted_user = self._id() return formatted_user
Returns the full name (first and last parts), and the username between brackets if the user has it. If there is no info about the user, returns the user id between < and >.
entailment