id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
10,200
riptano/ccm
ccmlib/common.py
is_modern_windows_install
def is_modern_windows_install(version): """ The 2.1 release line was when Cassandra received beta windows support. Many features are gated based on that added compatibility. Handles floats, strings, and LooseVersions by first converting all three types to a string, then to a LooseVersion. """ version = LooseVersion(str(version)) if is_win() and version >= LooseVersion('2.1'): return True else: return False
python
def is_modern_windows_install(version): """ The 2.1 release line was when Cassandra received beta windows support. Many features are gated based on that added compatibility. Handles floats, strings, and LooseVersions by first converting all three types to a string, then to a LooseVersion. """ version = LooseVersion(str(version)) if is_win() and version >= LooseVersion('2.1'): return True else: return False
[ "def", "is_modern_windows_install", "(", "version", ")", ":", "version", "=", "LooseVersion", "(", "str", "(", "version", ")", ")", "if", "is_win", "(", ")", "and", "version", ">=", "LooseVersion", "(", "'2.1'", ")", ":", "return", "True", "else", ":", "return", "False" ]
The 2.1 release line was when Cassandra received beta windows support. Many features are gated based on that added compatibility. Handles floats, strings, and LooseVersions by first converting all three types to a string, then to a LooseVersion.
[ "The", "2", ".", "1", "release", "line", "was", "when", "Cassandra", "received", "beta", "windows", "support", ".", "Many", "features", "are", "gated", "based", "on", "that", "added", "compatibility", "." ]
275699f79d102b5039b79cc17fa6305dccf18412
https://github.com/riptano/ccm/blob/275699f79d102b5039b79cc17fa6305dccf18412/ccmlib/common.py#L354-L365
10,201
riptano/ccm
ccmlib/common.py
get_jdk_version
def get_jdk_version(): """ Retrieve the Java version as reported in the quoted string returned by invoking 'java -version'. Works for Java 1.8, Java 9 and should also be fine for Java 10. """ try: version = subprocess.check_output(['java', '-version'], stderr=subprocess.STDOUT) except OSError: print_("ERROR: Could not find java. Is it in your path?") exit(1) return _get_jdk_version(version)
python
def get_jdk_version(): """ Retrieve the Java version as reported in the quoted string returned by invoking 'java -version'. Works for Java 1.8, Java 9 and should also be fine for Java 10. """ try: version = subprocess.check_output(['java', '-version'], stderr=subprocess.STDOUT) except OSError: print_("ERROR: Could not find java. Is it in your path?") exit(1) return _get_jdk_version(version)
[ "def", "get_jdk_version", "(", ")", ":", "try", ":", "version", "=", "subprocess", ".", "check_output", "(", "[", "'java'", ",", "'-version'", "]", ",", "stderr", "=", "subprocess", ".", "STDOUT", ")", "except", "OSError", ":", "print_", "(", "\"ERROR: Could not find java. Is it in your path?\"", ")", "exit", "(", "1", ")", "return", "_get_jdk_version", "(", "version", ")" ]
Retrieve the Java version as reported in the quoted string returned by invoking 'java -version'. Works for Java 1.8, Java 9 and should also be fine for Java 10.
[ "Retrieve", "the", "Java", "version", "as", "reported", "in", "the", "quoted", "string", "returned", "by", "invoking", "java", "-", "version", "." ]
275699f79d102b5039b79cc17fa6305dccf18412
https://github.com/riptano/ccm/blob/275699f79d102b5039b79cc17fa6305dccf18412/ccmlib/common.py#L706-L719
10,202
riptano/ccm
ccmlib/common.py
wait_for_any_log
def wait_for_any_log(nodes, pattern, timeout, filename='system.log', marks=None): """ Look for a pattern in the system.log of any in a given list of nodes. @param nodes The list of nodes whose logs to scan @param pattern The target pattern @param timeout How long to wait for the pattern. Note that strictly speaking, timeout is not really a timeout, but a maximum number of attempts. This implies that the all the grepping takes no time at all, so it is somewhat inaccurate, but probably close enough. @param marks A dict of nodes to marks in the file. Keys must match the first param list. @return The first node in whose log the pattern was found """ if marks is None: marks = {} for _ in range(timeout): for node in nodes: found = node.grep_log(pattern, filename=filename, from_mark=marks.get(node, None)) if found: return node time.sleep(1) raise TimeoutError(time.strftime("%d %b %Y %H:%M:%S", time.gmtime()) + " Unable to find: " + repr(pattern) + " in any node log within " + str(timeout) + "s")
python
def wait_for_any_log(nodes, pattern, timeout, filename='system.log', marks=None): """ Look for a pattern in the system.log of any in a given list of nodes. @param nodes The list of nodes whose logs to scan @param pattern The target pattern @param timeout How long to wait for the pattern. Note that strictly speaking, timeout is not really a timeout, but a maximum number of attempts. This implies that the all the grepping takes no time at all, so it is somewhat inaccurate, but probably close enough. @param marks A dict of nodes to marks in the file. Keys must match the first param list. @return The first node in whose log the pattern was found """ if marks is None: marks = {} for _ in range(timeout): for node in nodes: found = node.grep_log(pattern, filename=filename, from_mark=marks.get(node, None)) if found: return node time.sleep(1) raise TimeoutError(time.strftime("%d %b %Y %H:%M:%S", time.gmtime()) + " Unable to find: " + repr(pattern) + " in any node log within " + str(timeout) + "s")
[ "def", "wait_for_any_log", "(", "nodes", ",", "pattern", ",", "timeout", ",", "filename", "=", "'system.log'", ",", "marks", "=", "None", ")", ":", "if", "marks", "is", "None", ":", "marks", "=", "{", "}", "for", "_", "in", "range", "(", "timeout", ")", ":", "for", "node", "in", "nodes", ":", "found", "=", "node", ".", "grep_log", "(", "pattern", ",", "filename", "=", "filename", ",", "from_mark", "=", "marks", ".", "get", "(", "node", ",", "None", ")", ")", "if", "found", ":", "return", "node", "time", ".", "sleep", "(", "1", ")", "raise", "TimeoutError", "(", "time", ".", "strftime", "(", "\"%d %b %Y %H:%M:%S\"", ",", "time", ".", "gmtime", "(", ")", ")", "+", "\" Unable to find: \"", "+", "repr", "(", "pattern", ")", "+", "\" in any node log within \"", "+", "str", "(", "timeout", ")", "+", "\"s\"", ")" ]
Look for a pattern in the system.log of any in a given list of nodes. @param nodes The list of nodes whose logs to scan @param pattern The target pattern @param timeout How long to wait for the pattern. Note that strictly speaking, timeout is not really a timeout, but a maximum number of attempts. This implies that the all the grepping takes no time at all, so it is somewhat inaccurate, but probably close enough. @param marks A dict of nodes to marks in the file. Keys must match the first param list. @return The first node in whose log the pattern was found
[ "Look", "for", "a", "pattern", "in", "the", "system", ".", "log", "of", "any", "in", "a", "given", "list", "of", "nodes", "." ]
275699f79d102b5039b79cc17fa6305dccf18412
https://github.com/riptano/ccm/blob/275699f79d102b5039b79cc17fa6305dccf18412/ccmlib/common.py#L769-L793
10,203
riptano/ccm
ccmlib/repository.py
download_version
def download_version(version, url=None, verbose=False, binary=False): """Download, extract, and build Cassandra tarball. if binary == True, download precompiled tarball, otherwise build from source tarball. """ assert_jdk_valid_for_cassandra_version(version) archive_url = ARCHIVE if CCM_CONFIG.has_option('repositories', 'cassandra'): archive_url = CCM_CONFIG.get('repositories', 'cassandra') if binary: archive_url = "%s/%s/apache-cassandra-%s-bin.tar.gz" % (archive_url, version.split('-')[0], version) if url is None else url else: archive_url = "%s/%s/apache-cassandra-%s-src.tar.gz" % (archive_url, version.split('-')[0], version) if url is None else url _, target = tempfile.mkstemp(suffix=".tar.gz", prefix="ccm-") try: __download(archive_url, target, show_progress=verbose) common.info("Extracting {} as version {} ...".format(target, version)) tar = tarfile.open(target) dir = tar.next().name.split("/")[0] # pylint: disable=all tar.extractall(path=__get_dir()) tar.close() target_dir = os.path.join(__get_dir(), version) if os.path.exists(target_dir): rmdirs(target_dir) shutil.move(os.path.join(__get_dir(), dir), target_dir) if binary: # Binary installs don't have a build.xml that is needed # for pulling the version from. Write the version number # into a file to read later in common.get_version_from_build() with open(os.path.join(target_dir, '0.version.txt'), 'w') as f: f.write(version) else: compile_version(version, target_dir, verbose=verbose) except urllib.error.URLError as e: msg = "Invalid version {}".format(version) if url is None else "Invalid url {}".format(url) msg = msg + " (underlying error is: {})".format(str(e)) raise ArgumentError(msg) except tarfile.ReadError as e: raise ArgumentError("Unable to uncompress downloaded file: {}".format(str(e))) except CCMError as e: # wipe out the directory if anything goes wrong. Otherwise we will assume it has been compiled the next time it runs. try: rmdirs(target_dir) common.error("Deleted {} due to error".format(target_dir)) except: raise CCMError("Building C* version {} failed. Attempted to delete {} but failed. This will need to be manually deleted".format(version, target_dir)) raise e
python
def download_version(version, url=None, verbose=False, binary=False): """Download, extract, and build Cassandra tarball. if binary == True, download precompiled tarball, otherwise build from source tarball. """ assert_jdk_valid_for_cassandra_version(version) archive_url = ARCHIVE if CCM_CONFIG.has_option('repositories', 'cassandra'): archive_url = CCM_CONFIG.get('repositories', 'cassandra') if binary: archive_url = "%s/%s/apache-cassandra-%s-bin.tar.gz" % (archive_url, version.split('-')[0], version) if url is None else url else: archive_url = "%s/%s/apache-cassandra-%s-src.tar.gz" % (archive_url, version.split('-')[0], version) if url is None else url _, target = tempfile.mkstemp(suffix=".tar.gz", prefix="ccm-") try: __download(archive_url, target, show_progress=verbose) common.info("Extracting {} as version {} ...".format(target, version)) tar = tarfile.open(target) dir = tar.next().name.split("/")[0] # pylint: disable=all tar.extractall(path=__get_dir()) tar.close() target_dir = os.path.join(__get_dir(), version) if os.path.exists(target_dir): rmdirs(target_dir) shutil.move(os.path.join(__get_dir(), dir), target_dir) if binary: # Binary installs don't have a build.xml that is needed # for pulling the version from. Write the version number # into a file to read later in common.get_version_from_build() with open(os.path.join(target_dir, '0.version.txt'), 'w') as f: f.write(version) else: compile_version(version, target_dir, verbose=verbose) except urllib.error.URLError as e: msg = "Invalid version {}".format(version) if url is None else "Invalid url {}".format(url) msg = msg + " (underlying error is: {})".format(str(e)) raise ArgumentError(msg) except tarfile.ReadError as e: raise ArgumentError("Unable to uncompress downloaded file: {}".format(str(e))) except CCMError as e: # wipe out the directory if anything goes wrong. Otherwise we will assume it has been compiled the next time it runs. try: rmdirs(target_dir) common.error("Deleted {} due to error".format(target_dir)) except: raise CCMError("Building C* version {} failed. Attempted to delete {} but failed. This will need to be manually deleted".format(version, target_dir)) raise e
[ "def", "download_version", "(", "version", ",", "url", "=", "None", ",", "verbose", "=", "False", ",", "binary", "=", "False", ")", ":", "assert_jdk_valid_for_cassandra_version", "(", "version", ")", "archive_url", "=", "ARCHIVE", "if", "CCM_CONFIG", ".", "has_option", "(", "'repositories'", ",", "'cassandra'", ")", ":", "archive_url", "=", "CCM_CONFIG", ".", "get", "(", "'repositories'", ",", "'cassandra'", ")", "if", "binary", ":", "archive_url", "=", "\"%s/%s/apache-cassandra-%s-bin.tar.gz\"", "%", "(", "archive_url", ",", "version", ".", "split", "(", "'-'", ")", "[", "0", "]", ",", "version", ")", "if", "url", "is", "None", "else", "url", "else", ":", "archive_url", "=", "\"%s/%s/apache-cassandra-%s-src.tar.gz\"", "%", "(", "archive_url", ",", "version", ".", "split", "(", "'-'", ")", "[", "0", "]", ",", "version", ")", "if", "url", "is", "None", "else", "url", "_", ",", "target", "=", "tempfile", ".", "mkstemp", "(", "suffix", "=", "\".tar.gz\"", ",", "prefix", "=", "\"ccm-\"", ")", "try", ":", "__download", "(", "archive_url", ",", "target", ",", "show_progress", "=", "verbose", ")", "common", ".", "info", "(", "\"Extracting {} as version {} ...\"", ".", "format", "(", "target", ",", "version", ")", ")", "tar", "=", "tarfile", ".", "open", "(", "target", ")", "dir", "=", "tar", ".", "next", "(", ")", ".", "name", ".", "split", "(", "\"/\"", ")", "[", "0", "]", "# pylint: disable=all", "tar", ".", "extractall", "(", "path", "=", "__get_dir", "(", ")", ")", "tar", ".", "close", "(", ")", "target_dir", "=", "os", ".", "path", ".", "join", "(", "__get_dir", "(", ")", ",", "version", ")", "if", "os", ".", "path", ".", "exists", "(", "target_dir", ")", ":", "rmdirs", "(", "target_dir", ")", "shutil", ".", "move", "(", "os", ".", "path", ".", "join", "(", "__get_dir", "(", ")", ",", "dir", ")", ",", "target_dir", ")", "if", "binary", ":", "# Binary installs don't have a build.xml that is needed", "# for pulling the version from. Write the version number", "# into a file to read later in common.get_version_from_build()", "with", "open", "(", "os", ".", "path", ".", "join", "(", "target_dir", ",", "'0.version.txt'", ")", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "version", ")", "else", ":", "compile_version", "(", "version", ",", "target_dir", ",", "verbose", "=", "verbose", ")", "except", "urllib", ".", "error", ".", "URLError", "as", "e", ":", "msg", "=", "\"Invalid version {}\"", ".", "format", "(", "version", ")", "if", "url", "is", "None", "else", "\"Invalid url {}\"", ".", "format", "(", "url", ")", "msg", "=", "msg", "+", "\" (underlying error is: {})\"", ".", "format", "(", "str", "(", "e", ")", ")", "raise", "ArgumentError", "(", "msg", ")", "except", "tarfile", ".", "ReadError", "as", "e", ":", "raise", "ArgumentError", "(", "\"Unable to uncompress downloaded file: {}\"", ".", "format", "(", "str", "(", "e", ")", ")", ")", "except", "CCMError", "as", "e", ":", "# wipe out the directory if anything goes wrong. Otherwise we will assume it has been compiled the next time it runs.", "try", ":", "rmdirs", "(", "target_dir", ")", "common", ".", "error", "(", "\"Deleted {} due to error\"", ".", "format", "(", "target_dir", ")", ")", "except", ":", "raise", "CCMError", "(", "\"Building C* version {} failed. Attempted to delete {} but failed. This will need to be manually deleted\"", ".", "format", "(", "version", ",", "target_dir", ")", ")", "raise", "e" ]
Download, extract, and build Cassandra tarball. if binary == True, download precompiled tarball, otherwise build from source tarball.
[ "Download", "extract", "and", "build", "Cassandra", "tarball", "." ]
275699f79d102b5039b79cc17fa6305dccf18412
https://github.com/riptano/ccm/blob/275699f79d102b5039b79cc17fa6305dccf18412/ccmlib/repository.py#L331-L381
10,204
riptano/ccm
ccmlib/repository.py
get_tagged_version_numbers
def get_tagged_version_numbers(series='stable'): """Retrieve git tags and find version numbers for a release series series - 'stable', 'oldstable', or 'testing'""" releases = [] if series == 'testing': # Testing releases always have a hyphen after the version number: tag_regex = re.compile('^refs/tags/cassandra-([0-9]+\.[0-9]+\.[0-9]+-.*$)') else: # Stable and oldstable releases are just a number: tag_regex = re.compile('^refs/tags/cassandra-([0-9]+\.[0-9]+\.[0-9]+$)') tag_url = urllib.request.urlopen(GITHUB_TAGS) for ref in (i.get('ref', '') for i in json.loads(tag_url.read())): m = tag_regex.match(ref) if m: releases.append(LooseVersion(m.groups()[0])) # Sort by semver: releases.sort(reverse=True) stable_major_version = LooseVersion(str(releases[0].version[0]) + "." + str(releases[0].version[1])) stable_releases = [r for r in releases if r >= stable_major_version] oldstable_releases = [r for r in releases if r not in stable_releases] oldstable_major_version = LooseVersion(str(oldstable_releases[0].version[0]) + "." + str(oldstable_releases[0].version[1])) oldstable_releases = [r for r in oldstable_releases if r >= oldstable_major_version] if series == 'testing': return [r.vstring for r in releases] elif series == 'stable': return [r.vstring for r in stable_releases] elif series == 'oldstable': return [r.vstring for r in oldstable_releases] else: raise AssertionError("unknown release series: {series}".format(series=series))
python
def get_tagged_version_numbers(series='stable'): """Retrieve git tags and find version numbers for a release series series - 'stable', 'oldstable', or 'testing'""" releases = [] if series == 'testing': # Testing releases always have a hyphen after the version number: tag_regex = re.compile('^refs/tags/cassandra-([0-9]+\.[0-9]+\.[0-9]+-.*$)') else: # Stable and oldstable releases are just a number: tag_regex = re.compile('^refs/tags/cassandra-([0-9]+\.[0-9]+\.[0-9]+$)') tag_url = urllib.request.urlopen(GITHUB_TAGS) for ref in (i.get('ref', '') for i in json.loads(tag_url.read())): m = tag_regex.match(ref) if m: releases.append(LooseVersion(m.groups()[0])) # Sort by semver: releases.sort(reverse=True) stable_major_version = LooseVersion(str(releases[0].version[0]) + "." + str(releases[0].version[1])) stable_releases = [r for r in releases if r >= stable_major_version] oldstable_releases = [r for r in releases if r not in stable_releases] oldstable_major_version = LooseVersion(str(oldstable_releases[0].version[0]) + "." + str(oldstable_releases[0].version[1])) oldstable_releases = [r for r in oldstable_releases if r >= oldstable_major_version] if series == 'testing': return [r.vstring for r in releases] elif series == 'stable': return [r.vstring for r in stable_releases] elif series == 'oldstable': return [r.vstring for r in oldstable_releases] else: raise AssertionError("unknown release series: {series}".format(series=series))
[ "def", "get_tagged_version_numbers", "(", "series", "=", "'stable'", ")", ":", "releases", "=", "[", "]", "if", "series", "==", "'testing'", ":", "# Testing releases always have a hyphen after the version number:", "tag_regex", "=", "re", ".", "compile", "(", "'^refs/tags/cassandra-([0-9]+\\.[0-9]+\\.[0-9]+-.*$)'", ")", "else", ":", "# Stable and oldstable releases are just a number:", "tag_regex", "=", "re", ".", "compile", "(", "'^refs/tags/cassandra-([0-9]+\\.[0-9]+\\.[0-9]+$)'", ")", "tag_url", "=", "urllib", ".", "request", ".", "urlopen", "(", "GITHUB_TAGS", ")", "for", "ref", "in", "(", "i", ".", "get", "(", "'ref'", ",", "''", ")", "for", "i", "in", "json", ".", "loads", "(", "tag_url", ".", "read", "(", ")", ")", ")", ":", "m", "=", "tag_regex", ".", "match", "(", "ref", ")", "if", "m", ":", "releases", ".", "append", "(", "LooseVersion", "(", "m", ".", "groups", "(", ")", "[", "0", "]", ")", ")", "# Sort by semver:", "releases", ".", "sort", "(", "reverse", "=", "True", ")", "stable_major_version", "=", "LooseVersion", "(", "str", "(", "releases", "[", "0", "]", ".", "version", "[", "0", "]", ")", "+", "\".\"", "+", "str", "(", "releases", "[", "0", "]", ".", "version", "[", "1", "]", ")", ")", "stable_releases", "=", "[", "r", "for", "r", "in", "releases", "if", "r", ">=", "stable_major_version", "]", "oldstable_releases", "=", "[", "r", "for", "r", "in", "releases", "if", "r", "not", "in", "stable_releases", "]", "oldstable_major_version", "=", "LooseVersion", "(", "str", "(", "oldstable_releases", "[", "0", "]", ".", "version", "[", "0", "]", ")", "+", "\".\"", "+", "str", "(", "oldstable_releases", "[", "0", "]", ".", "version", "[", "1", "]", ")", ")", "oldstable_releases", "=", "[", "r", "for", "r", "in", "oldstable_releases", "if", "r", ">=", "oldstable_major_version", "]", "if", "series", "==", "'testing'", ":", "return", "[", "r", ".", "vstring", "for", "r", "in", "releases", "]", "elif", "series", "==", "'stable'", ":", "return", "[", "r", ".", "vstring", "for", "r", "in", "stable_releases", "]", "elif", "series", "==", "'oldstable'", ":", "return", "[", "r", ".", "vstring", "for", "r", "in", "oldstable_releases", "]", "else", ":", "raise", "AssertionError", "(", "\"unknown release series: {series}\"", ".", "format", "(", "series", "=", "series", ")", ")" ]
Retrieve git tags and find version numbers for a release series series - 'stable', 'oldstable', or 'testing
[ "Retrieve", "git", "tags", "and", "find", "version", "numbers", "for", "a", "release", "series" ]
275699f79d102b5039b79cc17fa6305dccf18412
https://github.com/riptano/ccm/blob/275699f79d102b5039b79cc17fa6305dccf18412/ccmlib/repository.py#L477-L511
10,205
riptano/ccm
ccmlib/remote.py
SSHClient.__connect
def __connect(host, port, username, password, private_key): """ Establish remote connection :param host: Hostname or IP address to connect to :param port: Port number to use for SSH :param username: Username credentials for SSH access :param password: Password credentials for SSH access (or private key passphrase) :param private_key: Private key to bypass clear text password :return: Paramiko SSH client instance if connection was established :raises Exception if connection was unsuccessful """ # Initialize the SSH connection ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) if private_key is not None and password is not None: private_key = paramiko.RSAKey.from_private_key_file(private_key, password) elif private_key is not None: private_key = paramiko.RSAKey.from_private_key_file(private_key, password) # Establish the SSH connection try: ssh.connect(host, port, username, password, private_key) except Exception as e: raise e # Return the established SSH connection return ssh
python
def __connect(host, port, username, password, private_key): """ Establish remote connection :param host: Hostname or IP address to connect to :param port: Port number to use for SSH :param username: Username credentials for SSH access :param password: Password credentials for SSH access (or private key passphrase) :param private_key: Private key to bypass clear text password :return: Paramiko SSH client instance if connection was established :raises Exception if connection was unsuccessful """ # Initialize the SSH connection ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) if private_key is not None and password is not None: private_key = paramiko.RSAKey.from_private_key_file(private_key, password) elif private_key is not None: private_key = paramiko.RSAKey.from_private_key_file(private_key, password) # Establish the SSH connection try: ssh.connect(host, port, username, password, private_key) except Exception as e: raise e # Return the established SSH connection return ssh
[ "def", "__connect", "(", "host", ",", "port", ",", "username", ",", "password", ",", "private_key", ")", ":", "# Initialize the SSH connection", "ssh", "=", "paramiko", ".", "SSHClient", "(", ")", "ssh", ".", "set_missing_host_key_policy", "(", "paramiko", ".", "AutoAddPolicy", "(", ")", ")", "if", "private_key", "is", "not", "None", "and", "password", "is", "not", "None", ":", "private_key", "=", "paramiko", ".", "RSAKey", ".", "from_private_key_file", "(", "private_key", ",", "password", ")", "elif", "private_key", "is", "not", "None", ":", "private_key", "=", "paramiko", ".", "RSAKey", ".", "from_private_key_file", "(", "private_key", ",", "password", ")", "# Establish the SSH connection", "try", ":", "ssh", ".", "connect", "(", "host", ",", "port", ",", "username", ",", "password", ",", "private_key", ")", "except", "Exception", "as", "e", ":", "raise", "e", "# Return the established SSH connection", "return", "ssh" ]
Establish remote connection :param host: Hostname or IP address to connect to :param port: Port number to use for SSH :param username: Username credentials for SSH access :param password: Password credentials for SSH access (or private key passphrase) :param private_key: Private key to bypass clear text password :return: Paramiko SSH client instance if connection was established :raises Exception if connection was unsuccessful
[ "Establish", "remote", "connection" ]
275699f79d102b5039b79cc17fa6305dccf18412
https://github.com/riptano/ccm/blob/275699f79d102b5039b79cc17fa6305dccf18412/ccmlib/remote.py#L142-L169
10,206
riptano/ccm
ccmlib/remote.py
SSHClient.execute_ccm_command
def execute_ccm_command(self, ccm_args, is_displayed=True): """ Execute a CCM command on the remote server :param ccm_args: CCM arguments to execute remotely :param is_displayed: True if information should be display; false to return output (default: true) :return: A tuple defining the execution of the command * output - The output of the execution if the output was not displayed * exit_status - The exit status of remotely executed script """ return self.execute(["ccm"] + ccm_args, profile=self.profile)
python
def execute_ccm_command(self, ccm_args, is_displayed=True): """ Execute a CCM command on the remote server :param ccm_args: CCM arguments to execute remotely :param is_displayed: True if information should be display; false to return output (default: true) :return: A tuple defining the execution of the command * output - The output of the execution if the output was not displayed * exit_status - The exit status of remotely executed script """ return self.execute(["ccm"] + ccm_args, profile=self.profile)
[ "def", "execute_ccm_command", "(", "self", ",", "ccm_args", ",", "is_displayed", "=", "True", ")", ":", "return", "self", ".", "execute", "(", "[", "\"ccm\"", "]", "+", "ccm_args", ",", "profile", "=", "self", ".", "profile", ")" ]
Execute a CCM command on the remote server :param ccm_args: CCM arguments to execute remotely :param is_displayed: True if information should be display; false to return output (default: true) :return: A tuple defining the execution of the command * output - The output of the execution if the output was not displayed * exit_status - The exit status of remotely executed script
[ "Execute", "a", "CCM", "command", "on", "the", "remote", "server" ]
275699f79d102b5039b79cc17fa6305dccf18412
https://github.com/riptano/ccm/blob/275699f79d102b5039b79cc17fa6305dccf18412/ccmlib/remote.py#L244-L255
10,207
riptano/ccm
ccmlib/remote.py
SSHClient.execute_python_script
def execute_python_script(self, script): """ Execute a python script of the remote server :param script: Inline script to convert to a file and execute remotely :return: The output of the script execution """ # Create the local file to copy to remote file_handle, filename = tempfile.mkstemp() temp_file = os.fdopen(file_handle, "wt") temp_file.write(script) temp_file.close() # Put the file into the remote user directory self.put(filename, "python_execute.py") command = ["python", "python_execute.py"] # Execute the python script on the remote system, clean up, and return the output output = self.execute(command, False) self.remove("python_execute.py") os.unlink(filename) return output
python
def execute_python_script(self, script): """ Execute a python script of the remote server :param script: Inline script to convert to a file and execute remotely :return: The output of the script execution """ # Create the local file to copy to remote file_handle, filename = tempfile.mkstemp() temp_file = os.fdopen(file_handle, "wt") temp_file.write(script) temp_file.close() # Put the file into the remote user directory self.put(filename, "python_execute.py") command = ["python", "python_execute.py"] # Execute the python script on the remote system, clean up, and return the output output = self.execute(command, False) self.remove("python_execute.py") os.unlink(filename) return output
[ "def", "execute_python_script", "(", "self", ",", "script", ")", ":", "# Create the local file to copy to remote", "file_handle", ",", "filename", "=", "tempfile", ".", "mkstemp", "(", ")", "temp_file", "=", "os", ".", "fdopen", "(", "file_handle", ",", "\"wt\"", ")", "temp_file", ".", "write", "(", "script", ")", "temp_file", ".", "close", "(", ")", "# Put the file into the remote user directory", "self", ".", "put", "(", "filename", ",", "\"python_execute.py\"", ")", "command", "=", "[", "\"python\"", ",", "\"python_execute.py\"", "]", "# Execute the python script on the remote system, clean up, and return the output", "output", "=", "self", ".", "execute", "(", "command", ",", "False", ")", "self", ".", "remove", "(", "\"python_execute.py\"", ")", "os", ".", "unlink", "(", "filename", ")", "return", "output" ]
Execute a python script of the remote server :param script: Inline script to convert to a file and execute remotely :return: The output of the script execution
[ "Execute", "a", "python", "script", "of", "the", "remote", "server" ]
275699f79d102b5039b79cc17fa6305dccf18412
https://github.com/riptano/ccm/blob/275699f79d102b5039b79cc17fa6305dccf18412/ccmlib/remote.py#L257-L278
10,208
riptano/ccm
ccmlib/remote.py
SSHClient.__put_dir
def __put_dir(self, ftp, local_path, remote_path=None): """ Helper function to perform copy operation to remote server :param ftp: SFTP handle to perform copy operation(s) :param local_path: Local path to copy to; can be file or directory :param remote_path: Remote path to copy to (default: None - Copies file or directory to home directory directory on the remote server) """ # Determine if local_path should be put into remote user directory if remote_path is None: remote_path = os.path.basename(local_path) remote_path += self.separator # Iterate over the local path and perform copy operations to remote server for current_path, directories, files in os.walk(local_path): # Create the remote directory (if needed) try: ftp.listdir(remote_path) except IOError: ftp.mkdir(remote_path) # Copy the files in the current directory to the remote path for filename in files: ftp.put(os.path.join(current_path, filename), remote_path + filename) # Copy the directory in the current directory to the remote path for directory in directories: self.__put_dir(ftp, os.path.join(current_path, directory), remote_path + directory)
python
def __put_dir(self, ftp, local_path, remote_path=None): """ Helper function to perform copy operation to remote server :param ftp: SFTP handle to perform copy operation(s) :param local_path: Local path to copy to; can be file or directory :param remote_path: Remote path to copy to (default: None - Copies file or directory to home directory directory on the remote server) """ # Determine if local_path should be put into remote user directory if remote_path is None: remote_path = os.path.basename(local_path) remote_path += self.separator # Iterate over the local path and perform copy operations to remote server for current_path, directories, files in os.walk(local_path): # Create the remote directory (if needed) try: ftp.listdir(remote_path) except IOError: ftp.mkdir(remote_path) # Copy the files in the current directory to the remote path for filename in files: ftp.put(os.path.join(current_path, filename), remote_path + filename) # Copy the directory in the current directory to the remote path for directory in directories: self.__put_dir(ftp, os.path.join(current_path, directory), remote_path + directory)
[ "def", "__put_dir", "(", "self", ",", "ftp", ",", "local_path", ",", "remote_path", "=", "None", ")", ":", "# Determine if local_path should be put into remote user directory", "if", "remote_path", "is", "None", ":", "remote_path", "=", "os", ".", "path", ".", "basename", "(", "local_path", ")", "remote_path", "+=", "self", ".", "separator", "# Iterate over the local path and perform copy operations to remote server", "for", "current_path", ",", "directories", ",", "files", "in", "os", ".", "walk", "(", "local_path", ")", ":", "# Create the remote directory (if needed)", "try", ":", "ftp", ".", "listdir", "(", "remote_path", ")", "except", "IOError", ":", "ftp", ".", "mkdir", "(", "remote_path", ")", "# Copy the files in the current directory to the remote path", "for", "filename", "in", "files", ":", "ftp", ".", "put", "(", "os", ".", "path", ".", "join", "(", "current_path", ",", "filename", ")", ",", "remote_path", "+", "filename", ")", "# Copy the directory in the current directory to the remote path", "for", "directory", "in", "directories", ":", "self", ".", "__put_dir", "(", "ftp", ",", "os", ".", "path", ".", "join", "(", "current_path", ",", "directory", ")", ",", "remote_path", "+", "directory", ")" ]
Helper function to perform copy operation to remote server :param ftp: SFTP handle to perform copy operation(s) :param local_path: Local path to copy to; can be file or directory :param remote_path: Remote path to copy to (default: None - Copies file or directory to home directory directory on the remote server)
[ "Helper", "function", "to", "perform", "copy", "operation", "to", "remote", "server" ]
275699f79d102b5039b79cc17fa6305dccf18412
https://github.com/riptano/ccm/blob/275699f79d102b5039b79cc17fa6305dccf18412/ccmlib/remote.py#L299-L326
10,209
riptano/ccm
ccmlib/remote.py
SSHClient.remove
def remove(self, remote_path): """ Delete a file or directory recursively on the remote server :param remote_path: Remote path to remove """ # Based on the remote file stats; remove a file or directory recursively ftp = self.ssh.open_sftp() if stat.S_ISDIR(ftp.stat(remote_path).st_mode): self.__remove_dir(ftp, remote_path) else: ftp.remove(remote_path) ftp.close()
python
def remove(self, remote_path): """ Delete a file or directory recursively on the remote server :param remote_path: Remote path to remove """ # Based on the remote file stats; remove a file or directory recursively ftp = self.ssh.open_sftp() if stat.S_ISDIR(ftp.stat(remote_path).st_mode): self.__remove_dir(ftp, remote_path) else: ftp.remove(remote_path) ftp.close()
[ "def", "remove", "(", "self", ",", "remote_path", ")", ":", "# Based on the remote file stats; remove a file or directory recursively", "ftp", "=", "self", ".", "ssh", ".", "open_sftp", "(", ")", "if", "stat", ".", "S_ISDIR", "(", "ftp", ".", "stat", "(", "remote_path", ")", ".", "st_mode", ")", ":", "self", ".", "__remove_dir", "(", "ftp", ",", "remote_path", ")", "else", ":", "ftp", ".", "remove", "(", "remote_path", ")", "ftp", ".", "close", "(", ")" ]
Delete a file or directory recursively on the remote server :param remote_path: Remote path to remove
[ "Delete", "a", "file", "or", "directory", "recursively", "on", "the", "remote", "server" ]
275699f79d102b5039b79cc17fa6305dccf18412
https://github.com/riptano/ccm/blob/275699f79d102b5039b79cc17fa6305dccf18412/ccmlib/remote.py#L328-L340
10,210
riptano/ccm
ccmlib/remote.py
SSHClient.__remove_dir
def __remove_dir(self, ftp, remote_path): """ Helper function to perform delete operation on the remote server :param ftp: SFTP handle to perform delete operation(s) :param remote_path: Remote path to remove """ # Iterate over the remote path and perform remove operations files = ftp.listdir(remote_path) for filename in files: # Attempt to remove the file (if exception then path is directory) path = remote_path + self.separator + filename try: ftp.remove(path) except IOError: self.__remove_dir(ftp, path) # Remove the original directory requested ftp.rmdir(remote_path)
python
def __remove_dir(self, ftp, remote_path): """ Helper function to perform delete operation on the remote server :param ftp: SFTP handle to perform delete operation(s) :param remote_path: Remote path to remove """ # Iterate over the remote path and perform remove operations files = ftp.listdir(remote_path) for filename in files: # Attempt to remove the file (if exception then path is directory) path = remote_path + self.separator + filename try: ftp.remove(path) except IOError: self.__remove_dir(ftp, path) # Remove the original directory requested ftp.rmdir(remote_path)
[ "def", "__remove_dir", "(", "self", ",", "ftp", ",", "remote_path", ")", ":", "# Iterate over the remote path and perform remove operations", "files", "=", "ftp", ".", "listdir", "(", "remote_path", ")", "for", "filename", "in", "files", ":", "# Attempt to remove the file (if exception then path is directory)", "path", "=", "remote_path", "+", "self", ".", "separator", "+", "filename", "try", ":", "ftp", ".", "remove", "(", "path", ")", "except", "IOError", ":", "self", ".", "__remove_dir", "(", "ftp", ",", "path", ")", "# Remove the original directory requested", "ftp", ".", "rmdir", "(", "remote_path", ")" ]
Helper function to perform delete operation on the remote server :param ftp: SFTP handle to perform delete operation(s) :param remote_path: Remote path to remove
[ "Helper", "function", "to", "perform", "delete", "operation", "on", "the", "remote", "server" ]
275699f79d102b5039b79cc17fa6305dccf18412
https://github.com/riptano/ccm/blob/275699f79d102b5039b79cc17fa6305dccf18412/ccmlib/remote.py#L342-L360
10,211
riptano/ccm
ccmlib/remote.py
RemoteOptionsParser.usage
def usage(self): """ Get the usage for the remote exectuion options :return Usage for the remote execution options """ # Retrieve the text for just the arguments usage = self.parser.format_help().split("optional arguments:")[1] # Remove any blank lines and return return "Remote Options:" + os.linesep + \ os.linesep.join([s for s in usage.splitlines() if s])
python
def usage(self): """ Get the usage for the remote exectuion options :return Usage for the remote execution options """ # Retrieve the text for just the arguments usage = self.parser.format_help().split("optional arguments:")[1] # Remove any blank lines and return return "Remote Options:" + os.linesep + \ os.linesep.join([s for s in usage.splitlines() if s])
[ "def", "usage", "(", "self", ")", ":", "# Retrieve the text for just the arguments", "usage", "=", "self", ".", "parser", ".", "format_help", "(", ")", ".", "split", "(", "\"optional arguments:\"", ")", "[", "1", "]", "# Remove any blank lines and return", "return", "\"Remote Options:\"", "+", "os", ".", "linesep", "+", "os", ".", "linesep", ".", "join", "(", "[", "s", "for", "s", "in", "usage", ".", "splitlines", "(", ")", "if", "s", "]", ")" ]
Get the usage for the remote exectuion options :return Usage for the remote execution options
[ "Get", "the", "usage", "for", "the", "remote", "exectuion", "options" ]
275699f79d102b5039b79cc17fa6305dccf18412
https://github.com/riptano/ccm/blob/275699f79d102b5039b79cc17fa6305dccf18412/ccmlib/remote.py#L490-L501
10,212
mattupstate/flask-jwt
flask_jwt/__init__.py
jwt_required
def jwt_required(realm=None): """View decorator that requires a valid JWT token to be present in the request :param realm: an optional realm """ def wrapper(fn): @wraps(fn) def decorator(*args, **kwargs): _jwt_required(realm or current_app.config['JWT_DEFAULT_REALM']) return fn(*args, **kwargs) return decorator return wrapper
python
def jwt_required(realm=None): """View decorator that requires a valid JWT token to be present in the request :param realm: an optional realm """ def wrapper(fn): @wraps(fn) def decorator(*args, **kwargs): _jwt_required(realm or current_app.config['JWT_DEFAULT_REALM']) return fn(*args, **kwargs) return decorator return wrapper
[ "def", "jwt_required", "(", "realm", "=", "None", ")", ":", "def", "wrapper", "(", "fn", ")", ":", "@", "wraps", "(", "fn", ")", "def", "decorator", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "_jwt_required", "(", "realm", "or", "current_app", ".", "config", "[", "'JWT_DEFAULT_REALM'", "]", ")", "return", "fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "decorator", "return", "wrapper" ]
View decorator that requires a valid JWT token to be present in the request :param realm: an optional realm
[ "View", "decorator", "that", "requires", "a", "valid", "JWT", "token", "to", "be", "present", "in", "the", "request" ]
c27084114e258863b82753fc574a362cd6c62fcd
https://github.com/mattupstate/flask-jwt/blob/c27084114e258863b82753fc574a362cd6c62fcd/flask_jwt/__init__.py#L168-L179
10,213
mattupstate/flask-jwt
flask_jwt/__init__.py
JWT.auth_request_handler
def auth_request_handler(self, callback): """Specifies the authentication response handler function. :param callable callback: the auth request handler function .. deprecated """ warnings.warn("This handler is deprecated. The recommended approach to have control over " "the authentication resource is to disable the built-in resource by " "setting JWT_AUTH_URL_RULE=None and registering your own authentication " "resource directly on your application.", DeprecationWarning, stacklevel=2) self.auth_request_callback = callback return callback
python
def auth_request_handler(self, callback): """Specifies the authentication response handler function. :param callable callback: the auth request handler function .. deprecated """ warnings.warn("This handler is deprecated. The recommended approach to have control over " "the authentication resource is to disable the built-in resource by " "setting JWT_AUTH_URL_RULE=None and registering your own authentication " "resource directly on your application.", DeprecationWarning, stacklevel=2) self.auth_request_callback = callback return callback
[ "def", "auth_request_handler", "(", "self", ",", "callback", ")", ":", "warnings", ".", "warn", "(", "\"This handler is deprecated. The recommended approach to have control over \"", "\"the authentication resource is to disable the built-in resource by \"", "\"setting JWT_AUTH_URL_RULE=None and registering your own authentication \"", "\"resource directly on your application.\"", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ")", "self", ".", "auth_request_callback", "=", "callback", "return", "callback" ]
Specifies the authentication response handler function. :param callable callback: the auth request handler function .. deprecated
[ "Specifies", "the", "authentication", "response", "handler", "function", "." ]
c27084114e258863b82753fc574a362cd6c62fcd
https://github.com/mattupstate/flask-jwt/blob/c27084114e258863b82753fc574a362cd6c62fcd/flask_jwt/__init__.py#L294-L306
10,214
jwass/mplleaflet
mplleaflet/leaflet_renderer.py
LeafletRenderer._svg_path
def _svg_path(self, pathcodes, data): """ Return the SVG path's 'd' element. """ def gen_path_elements(pathcodes, data): counts = {'M': 1, 'L': 1, 'C': 3, 'Z': 0} it = iter(data) for code in pathcodes: yield code for _ in range(counts[code]): p = next(it) yield str(p[0]) yield str(p[1]) return ' '.join(gen_path_elements(pathcodes, data))
python
def _svg_path(self, pathcodes, data): """ Return the SVG path's 'd' element. """ def gen_path_elements(pathcodes, data): counts = {'M': 1, 'L': 1, 'C': 3, 'Z': 0} it = iter(data) for code in pathcodes: yield code for _ in range(counts[code]): p = next(it) yield str(p[0]) yield str(p[1]) return ' '.join(gen_path_elements(pathcodes, data))
[ "def", "_svg_path", "(", "self", ",", "pathcodes", ",", "data", ")", ":", "def", "gen_path_elements", "(", "pathcodes", ",", "data", ")", ":", "counts", "=", "{", "'M'", ":", "1", ",", "'L'", ":", "1", ",", "'C'", ":", "3", ",", "'Z'", ":", "0", "}", "it", "=", "iter", "(", "data", ")", "for", "code", "in", "pathcodes", ":", "yield", "code", "for", "_", "in", "range", "(", "counts", "[", "code", "]", ")", ":", "p", "=", "next", "(", "it", ")", "yield", "str", "(", "p", "[", "0", "]", ")", "yield", "str", "(", "p", "[", "1", "]", ")", "return", "' '", ".", "join", "(", "gen_path_elements", "(", "pathcodes", ",", "data", ")", ")" ]
Return the SVG path's 'd' element.
[ "Return", "the", "SVG", "path", "s", "d", "element", "." ]
a83d7b69c56d5507dd7c17f5be377d23a31e84ab
https://github.com/jwass/mplleaflet/blob/a83d7b69c56d5507dd7c17f5be377d23a31e84ab/mplleaflet/leaflet_renderer.py#L69-L84
10,215
jwass/mplleaflet
mplleaflet/_display.py
fig_to_html
def fig_to_html(fig=None, template='base.html', tiles=None, crs=None, epsg=None, embed_links=False, float_precision=6): """ Convert a Matplotlib Figure to a Leaflet map Parameters ---------- fig : figure, default gcf() Figure used to convert to map template : string, default 'base.html' The Jinja2 template to use tiles : string or tuple The tiles argument is used to control the map tile source in the Leaflet map. Several simple shortcuts exist: 'osm', 'mapquest open', and 'mapbox bright' may be specified to use those tiles. The argument may be a tuple of two elements. The first element is the tile URL to use in the map's TileLayer, the second argument is the attribution to display. See http://leafletjs.com/reference.html#tilelayer for more information on formatting the URL. See also maptiles.mapbox() for specifying Mapbox tiles based on a Mapbox map ID. crs : dict, default assumes lon/lat pyproj definition of the current figure. If None, then it is assumed the plot is longitude, latitude in X, Y. epsg : int, default 4326 The EPSG code of the current plot. This can be used in place of the 'crs' parameter. embed_links : bool, default False Whether external links (except tiles) shall be explicitly embedded in the final html. float_precision : int, default 6 The precision to be used for the floats in the embedded geojson. Note: only one of 'crs' or 'epsg' may be specified. Both may be None, in which case the plot is assumed to be longitude / latitude. Returns ------- String of html of the resulting webpage """ if tiles is None: tiles = maptiles.osm elif isinstance(tiles, six.string_types): if tiles not in maptiles.tiles: raise ValueError('Unknown tile source "{}"'.format(tiles)) else: tiles = maptiles.tiles[tiles] template = env.get_template(template) if fig is None: fig = plt.gcf() dpi = fig.get_dpi() renderer = LeafletRenderer(crs=crs, epsg=epsg) exporter = Exporter(renderer) exporter.run(fig) attribution = _attribution + ' | ' + tiles[1] mapid = str(uuid.uuid4()).replace('-', '') FloatEncoder._formatter = ".{}f".format(float_precision) gjdata = json.dumps(renderer.geojson(), cls=FloatEncoder) params = { 'geojson': gjdata, 'width': fig.get_figwidth()*dpi, 'height': fig.get_figheight()*dpi, 'mapid': mapid, 'tile_url': tiles[0], 'attribution': attribution, 'links': [_leaflet_js,_leaflet_css], 'embed_links': embed_links, } html = template.render(params) return html
python
def fig_to_html(fig=None, template='base.html', tiles=None, crs=None, epsg=None, embed_links=False, float_precision=6): """ Convert a Matplotlib Figure to a Leaflet map Parameters ---------- fig : figure, default gcf() Figure used to convert to map template : string, default 'base.html' The Jinja2 template to use tiles : string or tuple The tiles argument is used to control the map tile source in the Leaflet map. Several simple shortcuts exist: 'osm', 'mapquest open', and 'mapbox bright' may be specified to use those tiles. The argument may be a tuple of two elements. The first element is the tile URL to use in the map's TileLayer, the second argument is the attribution to display. See http://leafletjs.com/reference.html#tilelayer for more information on formatting the URL. See also maptiles.mapbox() for specifying Mapbox tiles based on a Mapbox map ID. crs : dict, default assumes lon/lat pyproj definition of the current figure. If None, then it is assumed the plot is longitude, latitude in X, Y. epsg : int, default 4326 The EPSG code of the current plot. This can be used in place of the 'crs' parameter. embed_links : bool, default False Whether external links (except tiles) shall be explicitly embedded in the final html. float_precision : int, default 6 The precision to be used for the floats in the embedded geojson. Note: only one of 'crs' or 'epsg' may be specified. Both may be None, in which case the plot is assumed to be longitude / latitude. Returns ------- String of html of the resulting webpage """ if tiles is None: tiles = maptiles.osm elif isinstance(tiles, six.string_types): if tiles not in maptiles.tiles: raise ValueError('Unknown tile source "{}"'.format(tiles)) else: tiles = maptiles.tiles[tiles] template = env.get_template(template) if fig is None: fig = plt.gcf() dpi = fig.get_dpi() renderer = LeafletRenderer(crs=crs, epsg=epsg) exporter = Exporter(renderer) exporter.run(fig) attribution = _attribution + ' | ' + tiles[1] mapid = str(uuid.uuid4()).replace('-', '') FloatEncoder._formatter = ".{}f".format(float_precision) gjdata = json.dumps(renderer.geojson(), cls=FloatEncoder) params = { 'geojson': gjdata, 'width': fig.get_figwidth()*dpi, 'height': fig.get_figheight()*dpi, 'mapid': mapid, 'tile_url': tiles[0], 'attribution': attribution, 'links': [_leaflet_js,_leaflet_css], 'embed_links': embed_links, } html = template.render(params) return html
[ "def", "fig_to_html", "(", "fig", "=", "None", ",", "template", "=", "'base.html'", ",", "tiles", "=", "None", ",", "crs", "=", "None", ",", "epsg", "=", "None", ",", "embed_links", "=", "False", ",", "float_precision", "=", "6", ")", ":", "if", "tiles", "is", "None", ":", "tiles", "=", "maptiles", ".", "osm", "elif", "isinstance", "(", "tiles", ",", "six", ".", "string_types", ")", ":", "if", "tiles", "not", "in", "maptiles", ".", "tiles", ":", "raise", "ValueError", "(", "'Unknown tile source \"{}\"'", ".", "format", "(", "tiles", ")", ")", "else", ":", "tiles", "=", "maptiles", ".", "tiles", "[", "tiles", "]", "template", "=", "env", ".", "get_template", "(", "template", ")", "if", "fig", "is", "None", ":", "fig", "=", "plt", ".", "gcf", "(", ")", "dpi", "=", "fig", ".", "get_dpi", "(", ")", "renderer", "=", "LeafletRenderer", "(", "crs", "=", "crs", ",", "epsg", "=", "epsg", ")", "exporter", "=", "Exporter", "(", "renderer", ")", "exporter", ".", "run", "(", "fig", ")", "attribution", "=", "_attribution", "+", "' | '", "+", "tiles", "[", "1", "]", "mapid", "=", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", ".", "replace", "(", "'-'", ",", "''", ")", "FloatEncoder", ".", "_formatter", "=", "\".{}f\"", ".", "format", "(", "float_precision", ")", "gjdata", "=", "json", ".", "dumps", "(", "renderer", ".", "geojson", "(", ")", ",", "cls", "=", "FloatEncoder", ")", "params", "=", "{", "'geojson'", ":", "gjdata", ",", "'width'", ":", "fig", ".", "get_figwidth", "(", ")", "*", "dpi", ",", "'height'", ":", "fig", ".", "get_figheight", "(", ")", "*", "dpi", ",", "'mapid'", ":", "mapid", ",", "'tile_url'", ":", "tiles", "[", "0", "]", ",", "'attribution'", ":", "attribution", ",", "'links'", ":", "[", "_leaflet_js", ",", "_leaflet_css", "]", ",", "'embed_links'", ":", "embed_links", ",", "}", "html", "=", "template", ".", "render", "(", "params", ")", "return", "html" ]
Convert a Matplotlib Figure to a Leaflet map Parameters ---------- fig : figure, default gcf() Figure used to convert to map template : string, default 'base.html' The Jinja2 template to use tiles : string or tuple The tiles argument is used to control the map tile source in the Leaflet map. Several simple shortcuts exist: 'osm', 'mapquest open', and 'mapbox bright' may be specified to use those tiles. The argument may be a tuple of two elements. The first element is the tile URL to use in the map's TileLayer, the second argument is the attribution to display. See http://leafletjs.com/reference.html#tilelayer for more information on formatting the URL. See also maptiles.mapbox() for specifying Mapbox tiles based on a Mapbox map ID. crs : dict, default assumes lon/lat pyproj definition of the current figure. If None, then it is assumed the plot is longitude, latitude in X, Y. epsg : int, default 4326 The EPSG code of the current plot. This can be used in place of the 'crs' parameter. embed_links : bool, default False Whether external links (except tiles) shall be explicitly embedded in the final html. float_precision : int, default 6 The precision to be used for the floats in the embedded geojson. Note: only one of 'crs' or 'epsg' may be specified. Both may be None, in which case the plot is assumed to be longitude / latitude. Returns ------- String of html of the resulting webpage
[ "Convert", "a", "Matplotlib", "Figure", "to", "a", "Leaflet", "map" ]
a83d7b69c56d5507dd7c17f5be377d23a31e84ab
https://github.com/jwass/mplleaflet/blob/a83d7b69c56d5507dd7c17f5be377d23a31e84ab/mplleaflet/_display.py#L27-L107
10,216
jwass/mplleaflet
mplleaflet/_display.py
fig_to_geojson
def fig_to_geojson(fig=None, **kwargs): """ Returns a figure's GeoJSON representation as a dictionary All arguments passed to fig_to_html() Returns ------- GeoJSON dictionary """ if fig is None: fig = plt.gcf() renderer = LeafletRenderer(**kwargs) exporter = Exporter(renderer) exporter.run(fig) return renderer.geojson()
python
def fig_to_geojson(fig=None, **kwargs): """ Returns a figure's GeoJSON representation as a dictionary All arguments passed to fig_to_html() Returns ------- GeoJSON dictionary """ if fig is None: fig = plt.gcf() renderer = LeafletRenderer(**kwargs) exporter = Exporter(renderer) exporter.run(fig) return renderer.geojson()
[ "def", "fig_to_geojson", "(", "fig", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "fig", "is", "None", ":", "fig", "=", "plt", ".", "gcf", "(", ")", "renderer", "=", "LeafletRenderer", "(", "*", "*", "kwargs", ")", "exporter", "=", "Exporter", "(", "renderer", ")", "exporter", ".", "run", "(", "fig", ")", "return", "renderer", ".", "geojson", "(", ")" ]
Returns a figure's GeoJSON representation as a dictionary All arguments passed to fig_to_html() Returns ------- GeoJSON dictionary
[ "Returns", "a", "figure", "s", "GeoJSON", "representation", "as", "a", "dictionary" ]
a83d7b69c56d5507dd7c17f5be377d23a31e84ab
https://github.com/jwass/mplleaflet/blob/a83d7b69c56d5507dd7c17f5be377d23a31e84ab/mplleaflet/_display.py#L110-L127
10,217
jwass/mplleaflet
mplleaflet/_display.py
display
def display(fig=None, closefig=True, **kwargs): """ Convert a Matplotlib Figure to a Leaflet map. Embed in IPython notebook. Parameters ---------- fig : figure, default gcf() Figure used to convert to map closefig : boolean, default True Close the current Figure """ from IPython.display import HTML if fig is None: fig = plt.gcf() if closefig: plt.close(fig) html = fig_to_html(fig, **kwargs) # We embed everything in an iframe. iframe_html = '<iframe src="data:text/html;base64,{html}" width="{width}" height="{height}"></iframe>'\ .format(html = base64.b64encode(html.encode('utf8')).decode('utf8'), width = '100%', height= int(60.*fig.get_figheight()), ) return HTML(iframe_html)
python
def display(fig=None, closefig=True, **kwargs): """ Convert a Matplotlib Figure to a Leaflet map. Embed in IPython notebook. Parameters ---------- fig : figure, default gcf() Figure used to convert to map closefig : boolean, default True Close the current Figure """ from IPython.display import HTML if fig is None: fig = plt.gcf() if closefig: plt.close(fig) html = fig_to_html(fig, **kwargs) # We embed everything in an iframe. iframe_html = '<iframe src="data:text/html;base64,{html}" width="{width}" height="{height}"></iframe>'\ .format(html = base64.b64encode(html.encode('utf8')).decode('utf8'), width = '100%', height= int(60.*fig.get_figheight()), ) return HTML(iframe_html)
[ "def", "display", "(", "fig", "=", "None", ",", "closefig", "=", "True", ",", "*", "*", "kwargs", ")", ":", "from", "IPython", ".", "display", "import", "HTML", "if", "fig", "is", "None", ":", "fig", "=", "plt", ".", "gcf", "(", ")", "if", "closefig", ":", "plt", ".", "close", "(", "fig", ")", "html", "=", "fig_to_html", "(", "fig", ",", "*", "*", "kwargs", ")", "# We embed everything in an iframe.", "iframe_html", "=", "'<iframe src=\"data:text/html;base64,{html}\" width=\"{width}\" height=\"{height}\"></iframe>'", ".", "format", "(", "html", "=", "base64", ".", "b64encode", "(", "html", ".", "encode", "(", "'utf8'", ")", ")", ".", "decode", "(", "'utf8'", ")", ",", "width", "=", "'100%'", ",", "height", "=", "int", "(", "60.", "*", "fig", ".", "get_figheight", "(", ")", ")", ",", ")", "return", "HTML", "(", "iframe_html", ")" ]
Convert a Matplotlib Figure to a Leaflet map. Embed in IPython notebook. Parameters ---------- fig : figure, default gcf() Figure used to convert to map closefig : boolean, default True Close the current Figure
[ "Convert", "a", "Matplotlib", "Figure", "to", "a", "Leaflet", "map", ".", "Embed", "in", "IPython", "notebook", "." ]
a83d7b69c56d5507dd7c17f5be377d23a31e84ab
https://github.com/jwass/mplleaflet/blob/a83d7b69c56d5507dd7c17f5be377d23a31e84ab/mplleaflet/_display.py#L140-L165
10,218
jwass/mplleaflet
mplleaflet/_display.py
show
def show(fig=None, path='_map.html', **kwargs): """ Convert a Matplotlib Figure to a Leaflet map. Open in a browser Parameters ---------- fig : figure, default gcf() Figure used to convert to map path : string, default '_map.html' Filename where output html will be saved See fig_to_html() for description of keyword args. """ import webbrowser fullpath = os.path.abspath(path) with open(fullpath, 'w') as f: save_html(fig, fileobj=f, **kwargs) webbrowser.open('file://' + fullpath)
python
def show(fig=None, path='_map.html', **kwargs): """ Convert a Matplotlib Figure to a Leaflet map. Open in a browser Parameters ---------- fig : figure, default gcf() Figure used to convert to map path : string, default '_map.html' Filename where output html will be saved See fig_to_html() for description of keyword args. """ import webbrowser fullpath = os.path.abspath(path) with open(fullpath, 'w') as f: save_html(fig, fileobj=f, **kwargs) webbrowser.open('file://' + fullpath)
[ "def", "show", "(", "fig", "=", "None", ",", "path", "=", "'_map.html'", ",", "*", "*", "kwargs", ")", ":", "import", "webbrowser", "fullpath", "=", "os", ".", "path", ".", "abspath", "(", "path", ")", "with", "open", "(", "fullpath", ",", "'w'", ")", "as", "f", ":", "save_html", "(", "fig", ",", "fileobj", "=", "f", ",", "*", "*", "kwargs", ")", "webbrowser", ".", "open", "(", "'file://'", "+", "fullpath", ")" ]
Convert a Matplotlib Figure to a Leaflet map. Open in a browser Parameters ---------- fig : figure, default gcf() Figure used to convert to map path : string, default '_map.html' Filename where output html will be saved See fig_to_html() for description of keyword args.
[ "Convert", "a", "Matplotlib", "Figure", "to", "a", "Leaflet", "map", ".", "Open", "in", "a", "browser" ]
a83d7b69c56d5507dd7c17f5be377d23a31e84ab
https://github.com/jwass/mplleaflet/blob/a83d7b69c56d5507dd7c17f5be377d23a31e84ab/mplleaflet/_display.py#L167-L185
10,219
dmsimard/python-cachetclient
contrib/sensu-cachet.py
create_incident
def create_incident(**kwargs): """ Creates an incident """ incidents = cachet.Incidents(endpoint=ENDPOINT, api_token=API_TOKEN) if 'component_id' in kwargs: return incidents.post(name=kwargs['name'], message=kwargs['message'], status=kwargs['status'], component_id=kwargs['component_id'], component_status=kwargs['component_status']) else: return incidents.post(name=kwargs['name'], message=kwargs['message'], status=kwargs['status'])
python
def create_incident(**kwargs): """ Creates an incident """ incidents = cachet.Incidents(endpoint=ENDPOINT, api_token=API_TOKEN) if 'component_id' in kwargs: return incidents.post(name=kwargs['name'], message=kwargs['message'], status=kwargs['status'], component_id=kwargs['component_id'], component_status=kwargs['component_status']) else: return incidents.post(name=kwargs['name'], message=kwargs['message'], status=kwargs['status'])
[ "def", "create_incident", "(", "*", "*", "kwargs", ")", ":", "incidents", "=", "cachet", ".", "Incidents", "(", "endpoint", "=", "ENDPOINT", ",", "api_token", "=", "API_TOKEN", ")", "if", "'component_id'", "in", "kwargs", ":", "return", "incidents", ".", "post", "(", "name", "=", "kwargs", "[", "'name'", "]", ",", "message", "=", "kwargs", "[", "'message'", "]", ",", "status", "=", "kwargs", "[", "'status'", "]", ",", "component_id", "=", "kwargs", "[", "'component_id'", "]", ",", "component_status", "=", "kwargs", "[", "'component_status'", "]", ")", "else", ":", "return", "incidents", ".", "post", "(", "name", "=", "kwargs", "[", "'name'", "]", ",", "message", "=", "kwargs", "[", "'message'", "]", ",", "status", "=", "kwargs", "[", "'status'", "]", ")" ]
Creates an incident
[ "Creates", "an", "incident" ]
31bbc6d17ba5de088846e1ffae259b6755e672a0
https://github.com/dmsimard/python-cachetclient/blob/31bbc6d17ba5de088846e1ffae259b6755e672a0/contrib/sensu-cachet.py#L110-L124
10,220
dmsimard/python-cachetclient
contrib/sensu-cachet.py
incident_exists
def incident_exists(name, message, status): """ Check if an incident with these attributes already exists """ incidents = cachet.Incidents(endpoint=ENDPOINT) all_incidents = json.loads(incidents.get()) for incident in all_incidents['data']: if name == incident['name'] and \ status == incident['status'] and \ message.strip() == incident['message'].strip(): return True return False
python
def incident_exists(name, message, status): """ Check if an incident with these attributes already exists """ incidents = cachet.Incidents(endpoint=ENDPOINT) all_incidents = json.loads(incidents.get()) for incident in all_incidents['data']: if name == incident['name'] and \ status == incident['status'] and \ message.strip() == incident['message'].strip(): return True return False
[ "def", "incident_exists", "(", "name", ",", "message", ",", "status", ")", ":", "incidents", "=", "cachet", ".", "Incidents", "(", "endpoint", "=", "ENDPOINT", ")", "all_incidents", "=", "json", ".", "loads", "(", "incidents", ".", "get", "(", ")", ")", "for", "incident", "in", "all_incidents", "[", "'data'", "]", ":", "if", "name", "==", "incident", "[", "'name'", "]", "and", "status", "==", "incident", "[", "'status'", "]", "and", "message", ".", "strip", "(", ")", "==", "incident", "[", "'message'", "]", ".", "strip", "(", ")", ":", "return", "True", "return", "False" ]
Check if an incident with these attributes already exists
[ "Check", "if", "an", "incident", "with", "these", "attributes", "already", "exists" ]
31bbc6d17ba5de088846e1ffae259b6755e672a0
https://github.com/dmsimard/python-cachetclient/blob/31bbc6d17ba5de088846e1ffae259b6755e672a0/contrib/sensu-cachet.py#L127-L138
10,221
dmsimard/python-cachetclient
contrib/sensu-cachet.py
get_component
def get_component(id): """ Gets a Cachet component by id """ components = cachet.Components(endpoint=ENDPOINT) component = json.loads(components.get(id=id)) return component['data']
python
def get_component(id): """ Gets a Cachet component by id """ components = cachet.Components(endpoint=ENDPOINT) component = json.loads(components.get(id=id)) return component['data']
[ "def", "get_component", "(", "id", ")", ":", "components", "=", "cachet", ".", "Components", "(", "endpoint", "=", "ENDPOINT", ")", "component", "=", "json", ".", "loads", "(", "components", ".", "get", "(", "id", "=", "id", ")", ")", "return", "component", "[", "'data'", "]" ]
Gets a Cachet component by id
[ "Gets", "a", "Cachet", "component", "by", "id" ]
31bbc6d17ba5de088846e1ffae259b6755e672a0
https://github.com/dmsimard/python-cachetclient/blob/31bbc6d17ba5de088846e1ffae259b6755e672a0/contrib/sensu-cachet.py#L141-L147
10,222
dmsimard/python-cachetclient
cachetclient/cachet.py
api_token_required
def api_token_required(f, *args, **kwargs): """ Decorator helper function to ensure some methods aren't needlessly called without an api_token configured. """ try: if args[0].api_token is None: raise AttributeError('Parameter api_token is required.') except AttributeError: raise AttributeError('Parameter api_token is required.') return f(*args, **kwargs)
python
def api_token_required(f, *args, **kwargs): """ Decorator helper function to ensure some methods aren't needlessly called without an api_token configured. """ try: if args[0].api_token is None: raise AttributeError('Parameter api_token is required.') except AttributeError: raise AttributeError('Parameter api_token is required.') return f(*args, **kwargs)
[ "def", "api_token_required", "(", "f", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "if", "args", "[", "0", "]", ".", "api_token", "is", "None", ":", "raise", "AttributeError", "(", "'Parameter api_token is required.'", ")", "except", "AttributeError", ":", "raise", "AttributeError", "(", "'Parameter api_token is required.'", ")", "return", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
Decorator helper function to ensure some methods aren't needlessly called without an api_token configured.
[ "Decorator", "helper", "function", "to", "ensure", "some", "methods", "aren", "t", "needlessly", "called", "without", "an", "api_token", "configured", "." ]
31bbc6d17ba5de088846e1ffae259b6755e672a0
https://github.com/dmsimard/python-cachetclient/blob/31bbc6d17ba5de088846e1ffae259b6755e672a0/cachetclient/cachet.py#L23-L34
10,223
robdmc/behold
behold/logger.py
Behold.is_true
def is_true(self, item=None): """ If you are filtering on object values, you need to pass that object here. """ if item: values = [item] else: values = [] self._get_item_and_att_names(*values) return self._passes_all
python
def is_true(self, item=None): """ If you are filtering on object values, you need to pass that object here. """ if item: values = [item] else: values = [] self._get_item_and_att_names(*values) return self._passes_all
[ "def", "is_true", "(", "self", ",", "item", "=", "None", ")", ":", "if", "item", ":", "values", "=", "[", "item", "]", "else", ":", "values", "=", "[", "]", "self", ".", "_get_item_and_att_names", "(", "*", "values", ")", "return", "self", ".", "_passes_all" ]
If you are filtering on object values, you need to pass that object here.
[ "If", "you", "are", "filtering", "on", "object", "values", "you", "need", "to", "pass", "that", "object", "here", "." ]
ac1b7707e2d7472a50d837dda78be1e23af8fce5
https://github.com/robdmc/behold/blob/ac1b7707e2d7472a50d837dda78be1e23af8fce5/behold/logger.py#L519-L528
10,224
chorsley/python-Wappalyzer
Wappalyzer/Wappalyzer.py
WebPage.new_from_url
def new_from_url(cls, url, verify=True): """ Constructs a new WebPage object for the URL, using the `requests` module to fetch the HTML. Parameters ---------- url : str verify: bool """ response = requests.get(url, verify=verify, timeout=2.5) return cls.new_from_response(response)
python
def new_from_url(cls, url, verify=True): """ Constructs a new WebPage object for the URL, using the `requests` module to fetch the HTML. Parameters ---------- url : str verify: bool """ response = requests.get(url, verify=verify, timeout=2.5) return cls.new_from_response(response)
[ "def", "new_from_url", "(", "cls", ",", "url", ",", "verify", "=", "True", ")", ":", "response", "=", "requests", ".", "get", "(", "url", ",", "verify", "=", "verify", ",", "timeout", "=", "2.5", ")", "return", "cls", ".", "new_from_response", "(", "response", ")" ]
Constructs a new WebPage object for the URL, using the `requests` module to fetch the HTML. Parameters ---------- url : str verify: bool
[ "Constructs", "a", "new", "WebPage", "object", "for", "the", "URL", "using", "the", "requests", "module", "to", "fetch", "the", "HTML", "." ]
b785e29f12c8032c54279cfa9ce01ead702a386c
https://github.com/chorsley/python-Wappalyzer/blob/b785e29f12c8032c54279cfa9ce01ead702a386c/Wappalyzer/Wappalyzer.py#L66-L78
10,225
chorsley/python-Wappalyzer
Wappalyzer/Wappalyzer.py
WebPage.new_from_response
def new_from_response(cls, response): """ Constructs a new WebPage object for the response, using the `BeautifulSoup` module to parse the HTML. Parameters ---------- response : requests.Response object """ return cls(response.url, html=response.text, headers=response.headers)
python
def new_from_response(cls, response): """ Constructs a new WebPage object for the response, using the `BeautifulSoup` module to parse the HTML. Parameters ---------- response : requests.Response object """ return cls(response.url, html=response.text, headers=response.headers)
[ "def", "new_from_response", "(", "cls", ",", "response", ")", ":", "return", "cls", "(", "response", ".", "url", ",", "html", "=", "response", ".", "text", ",", "headers", "=", "response", ".", "headers", ")" ]
Constructs a new WebPage object for the response, using the `BeautifulSoup` module to parse the HTML. Parameters ---------- response : requests.Response object
[ "Constructs", "a", "new", "WebPage", "object", "for", "the", "response", "using", "the", "BeautifulSoup", "module", "to", "parse", "the", "HTML", "." ]
b785e29f12c8032c54279cfa9ce01ead702a386c
https://github.com/chorsley/python-Wappalyzer/blob/b785e29f12c8032c54279cfa9ce01ead702a386c/Wappalyzer/Wappalyzer.py#L81-L91
10,226
chorsley/python-Wappalyzer
Wappalyzer/Wappalyzer.py
Wappalyzer._prepare_app
def _prepare_app(self, app): """ Normalize app data, preparing it for the detection phase. """ # Ensure these keys' values are lists for key in ['url', 'html', 'script', 'implies']: try: value = app[key] except KeyError: app[key] = [] else: if not isinstance(value, list): app[key] = [value] # Ensure these keys exist for key in ['headers', 'meta']: try: value = app[key] except KeyError: app[key] = {} # Ensure the 'meta' key is a dict obj = app['meta'] if not isinstance(obj, dict): app['meta'] = {'generator': obj} # Ensure keys are lowercase for key in ['headers', 'meta']: obj = app[key] app[key] = {k.lower(): v for k, v in obj.items()} # Prepare regular expression patterns for key in ['url', 'html', 'script']: app[key] = [self._prepare_pattern(pattern) for pattern in app[key]] for key in ['headers', 'meta']: obj = app[key] for name, pattern in obj.items(): obj[name] = self._prepare_pattern(obj[name])
python
def _prepare_app(self, app): """ Normalize app data, preparing it for the detection phase. """ # Ensure these keys' values are lists for key in ['url', 'html', 'script', 'implies']: try: value = app[key] except KeyError: app[key] = [] else: if not isinstance(value, list): app[key] = [value] # Ensure these keys exist for key in ['headers', 'meta']: try: value = app[key] except KeyError: app[key] = {} # Ensure the 'meta' key is a dict obj = app['meta'] if not isinstance(obj, dict): app['meta'] = {'generator': obj} # Ensure keys are lowercase for key in ['headers', 'meta']: obj = app[key] app[key] = {k.lower(): v for k, v in obj.items()} # Prepare regular expression patterns for key in ['url', 'html', 'script']: app[key] = [self._prepare_pattern(pattern) for pattern in app[key]] for key in ['headers', 'meta']: obj = app[key] for name, pattern in obj.items(): obj[name] = self._prepare_pattern(obj[name])
[ "def", "_prepare_app", "(", "self", ",", "app", ")", ":", "# Ensure these keys' values are lists", "for", "key", "in", "[", "'url'", ",", "'html'", ",", "'script'", ",", "'implies'", "]", ":", "try", ":", "value", "=", "app", "[", "key", "]", "except", "KeyError", ":", "app", "[", "key", "]", "=", "[", "]", "else", ":", "if", "not", "isinstance", "(", "value", ",", "list", ")", ":", "app", "[", "key", "]", "=", "[", "value", "]", "# Ensure these keys exist", "for", "key", "in", "[", "'headers'", ",", "'meta'", "]", ":", "try", ":", "value", "=", "app", "[", "key", "]", "except", "KeyError", ":", "app", "[", "key", "]", "=", "{", "}", "# Ensure the 'meta' key is a dict", "obj", "=", "app", "[", "'meta'", "]", "if", "not", "isinstance", "(", "obj", ",", "dict", ")", ":", "app", "[", "'meta'", "]", "=", "{", "'generator'", ":", "obj", "}", "# Ensure keys are lowercase", "for", "key", "in", "[", "'headers'", ",", "'meta'", "]", ":", "obj", "=", "app", "[", "key", "]", "app", "[", "key", "]", "=", "{", "k", ".", "lower", "(", ")", ":", "v", "for", "k", ",", "v", "in", "obj", ".", "items", "(", ")", "}", "# Prepare regular expression patterns", "for", "key", "in", "[", "'url'", ",", "'html'", ",", "'script'", "]", ":", "app", "[", "key", "]", "=", "[", "self", ".", "_prepare_pattern", "(", "pattern", ")", "for", "pattern", "in", "app", "[", "key", "]", "]", "for", "key", "in", "[", "'headers'", ",", "'meta'", "]", ":", "obj", "=", "app", "[", "key", "]", "for", "name", ",", "pattern", "in", "obj", ".", "items", "(", ")", ":", "obj", "[", "name", "]", "=", "self", ".", "_prepare_pattern", "(", "obj", "[", "name", "]", ")" ]
Normalize app data, preparing it for the detection phase.
[ "Normalize", "app", "data", "preparing", "it", "for", "the", "detection", "phase", "." ]
b785e29f12c8032c54279cfa9ce01ead702a386c
https://github.com/chorsley/python-Wappalyzer/blob/b785e29f12c8032c54279cfa9ce01ead702a386c/Wappalyzer/Wappalyzer.py#L131-L170
10,227
chorsley/python-Wappalyzer
Wappalyzer/Wappalyzer.py
Wappalyzer._has_app
def _has_app(self, app, webpage): """ Determine whether the web page matches the app signature. """ # Search the easiest things first and save the full-text search of the # HTML for last for regex in app['url']: if regex.search(webpage.url): return True for name, regex in app['headers'].items(): if name in webpage.headers: content = webpage.headers[name] if regex.search(content): return True for regex in app['script']: for script in webpage.scripts: if regex.search(script): return True for name, regex in app['meta'].items(): if name in webpage.meta: content = webpage.meta[name] if regex.search(content): return True for regex in app['html']: if regex.search(webpage.html): return True
python
def _has_app(self, app, webpage): """ Determine whether the web page matches the app signature. """ # Search the easiest things first and save the full-text search of the # HTML for last for regex in app['url']: if regex.search(webpage.url): return True for name, regex in app['headers'].items(): if name in webpage.headers: content = webpage.headers[name] if regex.search(content): return True for regex in app['script']: for script in webpage.scripts: if regex.search(script): return True for name, regex in app['meta'].items(): if name in webpage.meta: content = webpage.meta[name] if regex.search(content): return True for regex in app['html']: if regex.search(webpage.html): return True
[ "def", "_has_app", "(", "self", ",", "app", ",", "webpage", ")", ":", "# Search the easiest things first and save the full-text search of the", "# HTML for last", "for", "regex", "in", "app", "[", "'url'", "]", ":", "if", "regex", ".", "search", "(", "webpage", ".", "url", ")", ":", "return", "True", "for", "name", ",", "regex", "in", "app", "[", "'headers'", "]", ".", "items", "(", ")", ":", "if", "name", "in", "webpage", ".", "headers", ":", "content", "=", "webpage", ".", "headers", "[", "name", "]", "if", "regex", ".", "search", "(", "content", ")", ":", "return", "True", "for", "regex", "in", "app", "[", "'script'", "]", ":", "for", "script", "in", "webpage", ".", "scripts", ":", "if", "regex", ".", "search", "(", "script", ")", ":", "return", "True", "for", "name", ",", "regex", "in", "app", "[", "'meta'", "]", ".", "items", "(", ")", ":", "if", "name", "in", "webpage", ".", "meta", ":", "content", "=", "webpage", ".", "meta", "[", "name", "]", "if", "regex", ".", "search", "(", "content", ")", ":", "return", "True", "for", "regex", "in", "app", "[", "'html'", "]", ":", "if", "regex", ".", "search", "(", "webpage", ".", "html", ")", ":", "return", "True" ]
Determine whether the web page matches the app signature.
[ "Determine", "whether", "the", "web", "page", "matches", "the", "app", "signature", "." ]
b785e29f12c8032c54279cfa9ce01ead702a386c
https://github.com/chorsley/python-Wappalyzer/blob/b785e29f12c8032c54279cfa9ce01ead702a386c/Wappalyzer/Wappalyzer.py#L189-L219
10,228
chorsley/python-Wappalyzer
Wappalyzer/Wappalyzer.py
Wappalyzer._get_implied_apps
def _get_implied_apps(self, detected_apps): """ Get the set of apps implied by `detected_apps`. """ def __get_implied_apps(apps): _implied_apps = set() for app in apps: try: _implied_apps.update(set(self.apps[app]['implies'])) except KeyError: pass return _implied_apps implied_apps = __get_implied_apps(detected_apps) all_implied_apps = set() # Descend recursively until we've found all implied apps while not all_implied_apps.issuperset(implied_apps): all_implied_apps.update(implied_apps) implied_apps = __get_implied_apps(all_implied_apps) return all_implied_apps
python
def _get_implied_apps(self, detected_apps): """ Get the set of apps implied by `detected_apps`. """ def __get_implied_apps(apps): _implied_apps = set() for app in apps: try: _implied_apps.update(set(self.apps[app]['implies'])) except KeyError: pass return _implied_apps implied_apps = __get_implied_apps(detected_apps) all_implied_apps = set() # Descend recursively until we've found all implied apps while not all_implied_apps.issuperset(implied_apps): all_implied_apps.update(implied_apps) implied_apps = __get_implied_apps(all_implied_apps) return all_implied_apps
[ "def", "_get_implied_apps", "(", "self", ",", "detected_apps", ")", ":", "def", "__get_implied_apps", "(", "apps", ")", ":", "_implied_apps", "=", "set", "(", ")", "for", "app", "in", "apps", ":", "try", ":", "_implied_apps", ".", "update", "(", "set", "(", "self", ".", "apps", "[", "app", "]", "[", "'implies'", "]", ")", ")", "except", "KeyError", ":", "pass", "return", "_implied_apps", "implied_apps", "=", "__get_implied_apps", "(", "detected_apps", ")", "all_implied_apps", "=", "set", "(", ")", "# Descend recursively until we've found all implied apps", "while", "not", "all_implied_apps", ".", "issuperset", "(", "implied_apps", ")", ":", "all_implied_apps", ".", "update", "(", "implied_apps", ")", "implied_apps", "=", "__get_implied_apps", "(", "all_implied_apps", ")", "return", "all_implied_apps" ]
Get the set of apps implied by `detected_apps`.
[ "Get", "the", "set", "of", "apps", "implied", "by", "detected_apps", "." ]
b785e29f12c8032c54279cfa9ce01ead702a386c
https://github.com/chorsley/python-Wappalyzer/blob/b785e29f12c8032c54279cfa9ce01ead702a386c/Wappalyzer/Wappalyzer.py#L221-L242
10,229
chorsley/python-Wappalyzer
Wappalyzer/Wappalyzer.py
Wappalyzer.get_categories
def get_categories(self, app_name): """ Returns a list of the categories for an app name. """ cat_nums = self.apps.get(app_name, {}).get("cats", []) cat_names = [self.categories.get("%s" % cat_num, "") for cat_num in cat_nums] return cat_names
python
def get_categories(self, app_name): """ Returns a list of the categories for an app name. """ cat_nums = self.apps.get(app_name, {}).get("cats", []) cat_names = [self.categories.get("%s" % cat_num, "") for cat_num in cat_nums] return cat_names
[ "def", "get_categories", "(", "self", ",", "app_name", ")", ":", "cat_nums", "=", "self", ".", "apps", ".", "get", "(", "app_name", ",", "{", "}", ")", ".", "get", "(", "\"cats\"", ",", "[", "]", ")", "cat_names", "=", "[", "self", ".", "categories", ".", "get", "(", "\"%s\"", "%", "cat_num", ",", "\"\"", ")", "for", "cat_num", "in", "cat_nums", "]", "return", "cat_names" ]
Returns a list of the categories for an app name.
[ "Returns", "a", "list", "of", "the", "categories", "for", "an", "app", "name", "." ]
b785e29f12c8032c54279cfa9ce01ead702a386c
https://github.com/chorsley/python-Wappalyzer/blob/b785e29f12c8032c54279cfa9ce01ead702a386c/Wappalyzer/Wappalyzer.py#L244-L252
10,230
chorsley/python-Wappalyzer
Wappalyzer/Wappalyzer.py
Wappalyzer.analyze
def analyze(self, webpage): """ Return a list of applications that can be detected on the web page. """ detected_apps = set() for app_name, app in self.apps.items(): if self._has_app(app, webpage): detected_apps.add(app_name) detected_apps |= self._get_implied_apps(detected_apps) return detected_apps
python
def analyze(self, webpage): """ Return a list of applications that can be detected on the web page. """ detected_apps = set() for app_name, app in self.apps.items(): if self._has_app(app, webpage): detected_apps.add(app_name) detected_apps |= self._get_implied_apps(detected_apps) return detected_apps
[ "def", "analyze", "(", "self", ",", "webpage", ")", ":", "detected_apps", "=", "set", "(", ")", "for", "app_name", ",", "app", "in", "self", ".", "apps", ".", "items", "(", ")", ":", "if", "self", ".", "_has_app", "(", "app", ",", "webpage", ")", ":", "detected_apps", ".", "add", "(", "app_name", ")", "detected_apps", "|=", "self", ".", "_get_implied_apps", "(", "detected_apps", ")", "return", "detected_apps" ]
Return a list of applications that can be detected on the web page.
[ "Return", "a", "list", "of", "applications", "that", "can", "be", "detected", "on", "the", "web", "page", "." ]
b785e29f12c8032c54279cfa9ce01ead702a386c
https://github.com/chorsley/python-Wappalyzer/blob/b785e29f12c8032c54279cfa9ce01ead702a386c/Wappalyzer/Wappalyzer.py#L254-L266
10,231
chorsley/python-Wappalyzer
Wappalyzer/Wappalyzer.py
Wappalyzer.analyze_with_categories
def analyze_with_categories(self, webpage): """ Return a list of applications and categories that can be detected on the web page. """ detected_apps = self.analyze(webpage) categorised_apps = {} for app_name in detected_apps: cat_names = self.get_categories(app_name) categorised_apps[app_name] = {"categories": cat_names} return categorised_apps
python
def analyze_with_categories(self, webpage): """ Return a list of applications and categories that can be detected on the web page. """ detected_apps = self.analyze(webpage) categorised_apps = {} for app_name in detected_apps: cat_names = self.get_categories(app_name) categorised_apps[app_name] = {"categories": cat_names} return categorised_apps
[ "def", "analyze_with_categories", "(", "self", ",", "webpage", ")", ":", "detected_apps", "=", "self", ".", "analyze", "(", "webpage", ")", "categorised_apps", "=", "{", "}", "for", "app_name", "in", "detected_apps", ":", "cat_names", "=", "self", ".", "get_categories", "(", "app_name", ")", "categorised_apps", "[", "app_name", "]", "=", "{", "\"categories\"", ":", "cat_names", "}", "return", "categorised_apps" ]
Return a list of applications and categories that can be detected on the web page.
[ "Return", "a", "list", "of", "applications", "and", "categories", "that", "can", "be", "detected", "on", "the", "web", "page", "." ]
b785e29f12c8032c54279cfa9ce01ead702a386c
https://github.com/chorsley/python-Wappalyzer/blob/b785e29f12c8032c54279cfa9ce01ead702a386c/Wappalyzer/Wappalyzer.py#L268-L279
10,232
user-cont/conu
conu/utils/filesystem.py
Directory.clean
def clean(self): """ remove the directory we operated on :return: None """ if self._initialized: logger.info("brace yourselves, removing %r", self.path) shutil.rmtree(self.path)
python
def clean(self): """ remove the directory we operated on :return: None """ if self._initialized: logger.info("brace yourselves, removing %r", self.path) shutil.rmtree(self.path)
[ "def", "clean", "(", "self", ")", ":", "if", "self", ".", "_initialized", ":", "logger", ".", "info", "(", "\"brace yourselves, removing %r\"", ",", "self", ".", "path", ")", "shutil", ".", "rmtree", "(", "self", ".", "path", ")" ]
remove the directory we operated on :return: None
[ "remove", "the", "directory", "we", "operated", "on" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/filesystem.py#L121-L129
10,233
user-cont/conu
conu/utils/filesystem.py
Directory.initialize
def initialize(self): """ create the directory if needed and configure it :return: None """ if not self._initialized: logger.info("initializing %r", self) if not os.path.exists(self.path): if self.mode is not None: os.makedirs(self.path, mode=self.mode) else: os.makedirs(self.path) self._set_mode() self._add_facl_rules() self._set_selinux_context() self._set_ownership() self._initialized = True logger.info("initialized") return logger.info("%r was already initialized", self)
python
def initialize(self): """ create the directory if needed and configure it :return: None """ if not self._initialized: logger.info("initializing %r", self) if not os.path.exists(self.path): if self.mode is not None: os.makedirs(self.path, mode=self.mode) else: os.makedirs(self.path) self._set_mode() self._add_facl_rules() self._set_selinux_context() self._set_ownership() self._initialized = True logger.info("initialized") return logger.info("%r was already initialized", self)
[ "def", "initialize", "(", "self", ")", ":", "if", "not", "self", ".", "_initialized", ":", "logger", ".", "info", "(", "\"initializing %r\"", ",", "self", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "path", ")", ":", "if", "self", ".", "mode", "is", "not", "None", ":", "os", ".", "makedirs", "(", "self", ".", "path", ",", "mode", "=", "self", ".", "mode", ")", "else", ":", "os", ".", "makedirs", "(", "self", ".", "path", ")", "self", ".", "_set_mode", "(", ")", "self", ".", "_add_facl_rules", "(", ")", "self", ".", "_set_selinux_context", "(", ")", "self", ".", "_set_ownership", "(", ")", "self", ".", "_initialized", "=", "True", "logger", ".", "info", "(", "\"initialized\"", ")", "return", "logger", ".", "info", "(", "\"%r was already initialized\"", ",", "self", ")" ]
create the directory if needed and configure it :return: None
[ "create", "the", "directory", "if", "needed", "and", "configure", "it" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/filesystem.py#L131-L151
10,234
user-cont/conu
conu/utils/filesystem.py
Directory._set_selinux_context
def _set_selinux_context(self): """ Set SELinux context or fields using chcon program. Raises CommandDoesNotExistException if the command is not present on the system. :return: None """ chcon_command_exists() # FIXME: do this using python API if possible if self.selinux_context: logger.debug("setting SELinux context of %s to %s", self.path, self.selinux_context) run_cmd(["chcon", self.selinux_context, self.path]) if any([self.selinux_user, self.selinux_role, self.selinux_type, self.selinux_range]): logger.debug("setting SELinux fields of %s", self.path, self.selinux_context) # chcon [OPTION]... [-u USER] [-r ROLE] [-l RANGE] [-t TYPE] FILE... pairs = [("-u", self.selinux_user), ("-r", self.selinux_role), ("-l", self.selinux_range), ("-t", self.selinux_type)] c = ["chcon"] for p in pairs: if p[1]: c += p c += [self.path] run_cmd(c)
python
def _set_selinux_context(self): """ Set SELinux context or fields using chcon program. Raises CommandDoesNotExistException if the command is not present on the system. :return: None """ chcon_command_exists() # FIXME: do this using python API if possible if self.selinux_context: logger.debug("setting SELinux context of %s to %s", self.path, self.selinux_context) run_cmd(["chcon", self.selinux_context, self.path]) if any([self.selinux_user, self.selinux_role, self.selinux_type, self.selinux_range]): logger.debug("setting SELinux fields of %s", self.path, self.selinux_context) # chcon [OPTION]... [-u USER] [-r ROLE] [-l RANGE] [-t TYPE] FILE... pairs = [("-u", self.selinux_user), ("-r", self.selinux_role), ("-l", self.selinux_range), ("-t", self.selinux_type)] c = ["chcon"] for p in pairs: if p[1]: c += p c += [self.path] run_cmd(c)
[ "def", "_set_selinux_context", "(", "self", ")", ":", "chcon_command_exists", "(", ")", "# FIXME: do this using python API if possible", "if", "self", ".", "selinux_context", ":", "logger", ".", "debug", "(", "\"setting SELinux context of %s to %s\"", ",", "self", ".", "path", ",", "self", ".", "selinux_context", ")", "run_cmd", "(", "[", "\"chcon\"", ",", "self", ".", "selinux_context", ",", "self", ".", "path", "]", ")", "if", "any", "(", "[", "self", ".", "selinux_user", ",", "self", ".", "selinux_role", ",", "self", ".", "selinux_type", ",", "self", ".", "selinux_range", "]", ")", ":", "logger", ".", "debug", "(", "\"setting SELinux fields of %s\"", ",", "self", ".", "path", ",", "self", ".", "selinux_context", ")", "# chcon [OPTION]... [-u USER] [-r ROLE] [-l RANGE] [-t TYPE] FILE...", "pairs", "=", "[", "(", "\"-u\"", ",", "self", ".", "selinux_user", ")", ",", "(", "\"-r\"", ",", "self", ".", "selinux_role", ")", ",", "(", "\"-l\"", ",", "self", ".", "selinux_range", ")", ",", "(", "\"-t\"", ",", "self", ".", "selinux_type", ")", "]", "c", "=", "[", "\"chcon\"", "]", "for", "p", "in", "pairs", ":", "if", "p", "[", "1", "]", ":", "c", "+=", "p", "c", "+=", "[", "self", ".", "path", "]", "run_cmd", "(", "c", ")" ]
Set SELinux context or fields using chcon program. Raises CommandDoesNotExistException if the command is not present on the system. :return: None
[ "Set", "SELinux", "context", "or", "fields", "using", "chcon", "program", ".", "Raises", "CommandDoesNotExistException", "if", "the", "command", "is", "not", "present", "on", "the", "system", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/filesystem.py#L153-L175
10,235
user-cont/conu
conu/utils/filesystem.py
Directory._set_mode
def _set_mode(self): """ set permission bits if needed using python API os.chmod :return: None """ if self.mode is not None: logger.debug("changing permission bits of %s to %s", self.path, oct(self.mode)) os.chmod(self.path, self.mode)
python
def _set_mode(self): """ set permission bits if needed using python API os.chmod :return: None """ if self.mode is not None: logger.debug("changing permission bits of %s to %s", self.path, oct(self.mode)) os.chmod(self.path, self.mode)
[ "def", "_set_mode", "(", "self", ")", ":", "if", "self", ".", "mode", "is", "not", "None", ":", "logger", ".", "debug", "(", "\"changing permission bits of %s to %s\"", ",", "self", ".", "path", ",", "oct", "(", "self", ".", "mode", ")", ")", "os", ".", "chmod", "(", "self", ".", "path", ",", "self", ".", "mode", ")" ]
set permission bits if needed using python API os.chmod :return: None
[ "set", "permission", "bits", "if", "needed", "using", "python", "API", "os", ".", "chmod" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/filesystem.py#L192-L200
10,236
user-cont/conu
conu/utils/filesystem.py
Directory._add_facl_rules
def _add_facl_rules(self): """ Apply ACL rules on the directory using setfacl program. Raises CommandDoesNotExistException if the command is not present on the system. :return: None """ setfacl_command_exists() # we are not using pylibacl b/c it's only for python 2 if self.facl_rules: logger.debug("adding ACLs %s to %s", self.facl_rules, self.path) r = ",".join(self.facl_rules) run_cmd(["setfacl", "-m", r, self.path])
python
def _add_facl_rules(self): """ Apply ACL rules on the directory using setfacl program. Raises CommandDoesNotExistException if the command is not present on the system. :return: None """ setfacl_command_exists() # we are not using pylibacl b/c it's only for python 2 if self.facl_rules: logger.debug("adding ACLs %s to %s", self.facl_rules, self.path) r = ",".join(self.facl_rules) run_cmd(["setfacl", "-m", r, self.path])
[ "def", "_add_facl_rules", "(", "self", ")", ":", "setfacl_command_exists", "(", ")", "# we are not using pylibacl b/c it's only for python 2", "if", "self", ".", "facl_rules", ":", "logger", ".", "debug", "(", "\"adding ACLs %s to %s\"", ",", "self", ".", "facl_rules", ",", "self", ".", "path", ")", "r", "=", "\",\"", ".", "join", "(", "self", ".", "facl_rules", ")", "run_cmd", "(", "[", "\"setfacl\"", ",", "\"-m\"", ",", "r", ",", "self", ".", "path", "]", ")" ]
Apply ACL rules on the directory using setfacl program. Raises CommandDoesNotExistException if the command is not present on the system. :return: None
[ "Apply", "ACL", "rules", "on", "the", "directory", "using", "setfacl", "program", ".", "Raises", "CommandDoesNotExistException", "if", "the", "command", "is", "not", "present", "on", "the", "system", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/filesystem.py#L202-L214
10,237
user-cont/conu
conu/backend/podman/image.py
PodmanImage.get_volume_options
def get_volume_options(volumes): """ Generates volume options to run methods. :param volumes: tuple or list of tuples in form target x source,target x source,target,mode. :return: list of the form ["-v", "/source:/target", "-v", "/other/source:/destination:z", ...] """ if not isinstance(volumes, list): volumes = [volumes] volumes = [Volume.create_from_tuple(v) for v in volumes] result = [] for v in volumes: result += ["-v", str(v)] return result
python
def get_volume_options(volumes): """ Generates volume options to run methods. :param volumes: tuple or list of tuples in form target x source,target x source,target,mode. :return: list of the form ["-v", "/source:/target", "-v", "/other/source:/destination:z", ...] """ if not isinstance(volumes, list): volumes = [volumes] volumes = [Volume.create_from_tuple(v) for v in volumes] result = [] for v in volumes: result += ["-v", str(v)] return result
[ "def", "get_volume_options", "(", "volumes", ")", ":", "if", "not", "isinstance", "(", "volumes", ",", "list", ")", ":", "volumes", "=", "[", "volumes", "]", "volumes", "=", "[", "Volume", ".", "create_from_tuple", "(", "v", ")", "for", "v", "in", "volumes", "]", "result", "=", "[", "]", "for", "v", "in", "volumes", ":", "result", "+=", "[", "\"-v\"", ",", "str", "(", "v", ")", "]", "return", "result" ]
Generates volume options to run methods. :param volumes: tuple or list of tuples in form target x source,target x source,target,mode. :return: list of the form ["-v", "/source:/target", "-v", "/other/source:/destination:z", ...]
[ "Generates", "volume", "options", "to", "run", "methods", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/podman/image.py#L367-L380
10,238
user-cont/conu
conu/backend/podman/image.py
PodmanImage.layers
def layers(self, rev=True): """ Get list of PodmanImage for every layer in image :param rev: get layers rev :return: list of :class:`conu.PodmanImage` """ image_layers = [ PodmanImage(None, identifier=x, pull_policy=PodmanImagePullPolicy.NEVER) for x in self.get_layer_ids() ] if not rev: image_layers.reverse() return image_layers
python
def layers(self, rev=True): """ Get list of PodmanImage for every layer in image :param rev: get layers rev :return: list of :class:`conu.PodmanImage` """ image_layers = [ PodmanImage(None, identifier=x, pull_policy=PodmanImagePullPolicy.NEVER) for x in self.get_layer_ids() ] if not rev: image_layers.reverse() return image_layers
[ "def", "layers", "(", "self", ",", "rev", "=", "True", ")", ":", "image_layers", "=", "[", "PodmanImage", "(", "None", ",", "identifier", "=", "x", ",", "pull_policy", "=", "PodmanImagePullPolicy", ".", "NEVER", ")", "for", "x", "in", "self", ".", "get_layer_ids", "(", ")", "]", "if", "not", "rev", ":", "image_layers", ".", "reverse", "(", ")", "return", "image_layers" ]
Get list of PodmanImage for every layer in image :param rev: get layers rev :return: list of :class:`conu.PodmanImage`
[ "Get", "list", "of", "PodmanImage", "for", "every", "layer", "in", "image" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/podman/image.py#L395-L408
10,239
user-cont/conu
conu/backend/podman/image.py
PodmanImage.get_metadata
def get_metadata(self): """ Provide metadata about this image. :return: ImageMetadata, Image metadata instance """ if self._metadata is None: self._metadata = ImageMetadata() inspect_to_metadata(self._metadata, self.inspect(refresh=True)) return self._metadata
python
def get_metadata(self): """ Provide metadata about this image. :return: ImageMetadata, Image metadata instance """ if self._metadata is None: self._metadata = ImageMetadata() inspect_to_metadata(self._metadata, self.inspect(refresh=True)) return self._metadata
[ "def", "get_metadata", "(", "self", ")", ":", "if", "self", ".", "_metadata", "is", "None", ":", "self", ".", "_metadata", "=", "ImageMetadata", "(", ")", "inspect_to_metadata", "(", "self", ".", "_metadata", ",", "self", ".", "inspect", "(", "refresh", "=", "True", ")", ")", "return", "self", ".", "_metadata" ]
Provide metadata about this image. :return: ImageMetadata, Image metadata instance
[ "Provide", "metadata", "about", "this", "image", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/podman/image.py#L416-L425
10,240
user-cont/conu
conu/backend/podman/container.py
PodmanContainer.is_running
def is_running(self): """ returns True if the container is running :return: bool """ try: return graceful_get(self.inspect(refresh=True), "State", "Running") except subprocess.CalledProcessError: return False
python
def is_running(self): """ returns True if the container is running :return: bool """ try: return graceful_get(self.inspect(refresh=True), "State", "Running") except subprocess.CalledProcessError: return False
[ "def", "is_running", "(", "self", ")", ":", "try", ":", "return", "graceful_get", "(", "self", ".", "inspect", "(", "refresh", "=", "True", ")", ",", "\"State\"", ",", "\"Running\"", ")", "except", "subprocess", ".", "CalledProcessError", ":", "return", "False" ]
returns True if the container is running :return: bool
[ "returns", "True", "if", "the", "container", "is", "running" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/podman/container.py#L125-L134
10,241
user-cont/conu
conu/backend/podman/container.py
PodmanContainer.is_port_open
def is_port_open(self, port, timeout=2): """ check if given port is open and receiving connections on container ip_address :param port: int, container port :param timeout: int, how many seconds to wait for connection; defaults to 2 :return: True if the connection has been established inside timeout, False otherwise """ addresses = self.get_IPv4s() if not addresses: return False return check_port(port, host=addresses[0], timeout=timeout)
python
def is_port_open(self, port, timeout=2): """ check if given port is open and receiving connections on container ip_address :param port: int, container port :param timeout: int, how many seconds to wait for connection; defaults to 2 :return: True if the connection has been established inside timeout, False otherwise """ addresses = self.get_IPv4s() if not addresses: return False return check_port(port, host=addresses[0], timeout=timeout)
[ "def", "is_port_open", "(", "self", ",", "port", ",", "timeout", "=", "2", ")", ":", "addresses", "=", "self", ".", "get_IPv4s", "(", ")", "if", "not", "addresses", ":", "return", "False", "return", "check_port", "(", "port", ",", "host", "=", "addresses", "[", "0", "]", ",", "timeout", "=", "timeout", ")" ]
check if given port is open and receiving connections on container ip_address :param port: int, container port :param timeout: int, how many seconds to wait for connection; defaults to 2 :return: True if the connection has been established inside timeout, False otherwise
[ "check", "if", "given", "port", "is", "open", "and", "receiving", "connections", "on", "container", "ip_address" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/podman/container.py#L173-L184
10,242
user-cont/conu
conu/backend/podman/container.py
PodmanContainer.wait_for_port
def wait_for_port(self, port, timeout=10, **probe_kwargs): """ block until specified port starts accepting connections, raises an exc ProbeTimeout if timeout is reached :param port: int, port number :param timeout: int or float (seconds), time to wait for establishing the connection :param probe_kwargs: arguments passed to Probe constructor :return: None """ Probe(timeout=timeout, fnc=functools.partial(self.is_port_open, port), **probe_kwargs).run()
python
def wait_for_port(self, port, timeout=10, **probe_kwargs): """ block until specified port starts accepting connections, raises an exc ProbeTimeout if timeout is reached :param port: int, port number :param timeout: int or float (seconds), time to wait for establishing the connection :param probe_kwargs: arguments passed to Probe constructor :return: None """ Probe(timeout=timeout, fnc=functools.partial(self.is_port_open, port), **probe_kwargs).run()
[ "def", "wait_for_port", "(", "self", ",", "port", ",", "timeout", "=", "10", ",", "*", "*", "probe_kwargs", ")", ":", "Probe", "(", "timeout", "=", "timeout", ",", "fnc", "=", "functools", ".", "partial", "(", "self", ".", "is_port_open", ",", "port", ")", ",", "*", "*", "probe_kwargs", ")", ".", "run", "(", ")" ]
block until specified port starts accepting connections, raises an exc ProbeTimeout if timeout is reached :param port: int, port number :param timeout: int or float (seconds), time to wait for establishing the connection :param probe_kwargs: arguments passed to Probe constructor :return: None
[ "block", "until", "specified", "port", "starts", "accepting", "connections", "raises", "an", "exc", "ProbeTimeout", "if", "timeout", "is", "reached" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/podman/container.py#L221-L231
10,243
user-cont/conu
conu/backend/podman/container.py
PodmanContainer.mount
def mount(self, mount_point=None): """ mount container filesystem :return: str, the location of the mounted file system """ cmd = ["podman", "mount", self._id or self.get_id()] output = run_cmd(cmd, return_output=True).rstrip("\n\r") return output
python
def mount(self, mount_point=None): """ mount container filesystem :return: str, the location of the mounted file system """ cmd = ["podman", "mount", self._id or self.get_id()] output = run_cmd(cmd, return_output=True).rstrip("\n\r") return output
[ "def", "mount", "(", "self", ",", "mount_point", "=", "None", ")", ":", "cmd", "=", "[", "\"podman\"", ",", "\"mount\"", ",", "self", ".", "_id", "or", "self", ".", "get_id", "(", ")", "]", "output", "=", "run_cmd", "(", "cmd", ",", "return_output", "=", "True", ")", ".", "rstrip", "(", "\"\\n\\r\"", ")", "return", "output" ]
mount container filesystem :return: str, the location of the mounted file system
[ "mount", "container", "filesystem" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/podman/container.py#L244-L252
10,244
user-cont/conu
conu/backend/podman/container.py
PodmanContainer.wait
def wait(self, timeout=None): """ Block until the container stops, then return its exit code. Similar to the ``podman wait`` command. :param timeout: int, microseconds to wait before polling for completion :return: int, exit code """ timeout = ["--interval=%s" % timeout] if timeout else [] cmdline = ["podman", "wait"] + timeout + [self._id or self.get_id()] return run_cmd(cmdline, return_output=True)
python
def wait(self, timeout=None): """ Block until the container stops, then return its exit code. Similar to the ``podman wait`` command. :param timeout: int, microseconds to wait before polling for completion :return: int, exit code """ timeout = ["--interval=%s" % timeout] if timeout else [] cmdline = ["podman", "wait"] + timeout + [self._id or self.get_id()] return run_cmd(cmdline, return_output=True)
[ "def", "wait", "(", "self", ",", "timeout", "=", "None", ")", ":", "timeout", "=", "[", "\"--interval=%s\"", "%", "timeout", "]", "if", "timeout", "else", "[", "]", "cmdline", "=", "[", "\"podman\"", ",", "\"wait\"", "]", "+", "timeout", "+", "[", "self", ".", "_id", "or", "self", ".", "get_id", "(", ")", "]", "return", "run_cmd", "(", "cmdline", ",", "return_output", "=", "True", ")" ]
Block until the container stops, then return its exit code. Similar to the ``podman wait`` command. :param timeout: int, microseconds to wait before polling for completion :return: int, exit code
[ "Block", "until", "the", "container", "stops", "then", "return", "its", "exit", "code", ".", "Similar", "to", "the", "podman", "wait", "command", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/podman/container.py#L306-L316
10,245
user-cont/conu
conu/apidefs/filesystem.py
Filesystem.read_file
def read_file(self, file_path): """ read file specified via 'file_path' and return its content - raises an ConuException if there is an issue accessing the file :param file_path: str, path to the file to read :return: str (not bytes), content of the file """ try: with open(self.p(file_path)) as fd: return fd.read() except IOError as ex: logger.error("error while accessing file %s: %r", file_path, ex) raise ConuException("There was an error while accessing file %s: %r", file_path, ex)
python
def read_file(self, file_path): """ read file specified via 'file_path' and return its content - raises an ConuException if there is an issue accessing the file :param file_path: str, path to the file to read :return: str (not bytes), content of the file """ try: with open(self.p(file_path)) as fd: return fd.read() except IOError as ex: logger.error("error while accessing file %s: %r", file_path, ex) raise ConuException("There was an error while accessing file %s: %r", file_path, ex)
[ "def", "read_file", "(", "self", ",", "file_path", ")", ":", "try", ":", "with", "open", "(", "self", ".", "p", "(", "file_path", ")", ")", "as", "fd", ":", "return", "fd", ".", "read", "(", ")", "except", "IOError", "as", "ex", ":", "logger", ".", "error", "(", "\"error while accessing file %s: %r\"", ",", "file_path", ",", "ex", ")", "raise", "ConuException", "(", "\"There was an error while accessing file %s: %r\"", ",", "file_path", ",", "ex", ")" ]
read file specified via 'file_path' and return its content - raises an ConuException if there is an issue accessing the file :param file_path: str, path to the file to read :return: str (not bytes), content of the file
[ "read", "file", "specified", "via", "file_path", "and", "return", "its", "content", "-", "raises", "an", "ConuException", "if", "there", "is", "an", "issue", "accessing", "the", "file" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/apidefs/filesystem.py#L106-L119
10,246
user-cont/conu
conu/apidefs/filesystem.py
Filesystem.get_file
def get_file(self, file_path, mode="r"): """ provide File object specified via 'file_path' :param file_path: str, path to the file :param mode: str, mode used when opening the file :return: File instance """ return open(self.p(file_path), mode=mode)
python
def get_file(self, file_path, mode="r"): """ provide File object specified via 'file_path' :param file_path: str, path to the file :param mode: str, mode used when opening the file :return: File instance """ return open(self.p(file_path), mode=mode)
[ "def", "get_file", "(", "self", ",", "file_path", ",", "mode", "=", "\"r\"", ")", ":", "return", "open", "(", "self", ".", "p", "(", "file_path", ")", ",", "mode", "=", "mode", ")" ]
provide File object specified via 'file_path' :param file_path: str, path to the file :param mode: str, mode used when opening the file :return: File instance
[ "provide", "File", "object", "specified", "via", "file_path" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/apidefs/filesystem.py#L121-L129
10,247
user-cont/conu
conu/apidefs/filesystem.py
Filesystem.file_is_present
def file_is_present(self, file_path): """ check if file 'file_path' is present, raises IOError if file_path is not a file :param file_path: str, path to the file :return: True if file exists, False if file does not exist """ p = self.p(file_path) if not os.path.exists(p): return False if not os.path.isfile(p): raise IOError("%s is not a file" % file_path) return True
python
def file_is_present(self, file_path): """ check if file 'file_path' is present, raises IOError if file_path is not a file :param file_path: str, path to the file :return: True if file exists, False if file does not exist """ p = self.p(file_path) if not os.path.exists(p): return False if not os.path.isfile(p): raise IOError("%s is not a file" % file_path) return True
[ "def", "file_is_present", "(", "self", ",", "file_path", ")", ":", "p", "=", "self", ".", "p", "(", "file_path", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "p", ")", ":", "return", "False", "if", "not", "os", ".", "path", ".", "isfile", "(", "p", ")", ":", "raise", "IOError", "(", "\"%s is not a file\"", "%", "file_path", ")", "return", "True" ]
check if file 'file_path' is present, raises IOError if file_path is not a file :param file_path: str, path to the file :return: True if file exists, False if file does not exist
[ "check", "if", "file", "file_path", "is", "present", "raises", "IOError", "if", "file_path", "is", "not", "a", "file" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/apidefs/filesystem.py#L131-L144
10,248
user-cont/conu
conu/apidefs/filesystem.py
Filesystem.directory_is_present
def directory_is_present(self, directory_path): """ check if directory 'directory_path' is present, raise IOError if it's not a directory :param directory_path: str, directory to check :return: True if directory exists, False if directory does not exist """ p = self.p(directory_path) if not os.path.exists(p): return False if not os.path.isdir(p): raise IOError("%s is not a directory" % directory_path) return True
python
def directory_is_present(self, directory_path): """ check if directory 'directory_path' is present, raise IOError if it's not a directory :param directory_path: str, directory to check :return: True if directory exists, False if directory does not exist """ p = self.p(directory_path) if not os.path.exists(p): return False if not os.path.isdir(p): raise IOError("%s is not a directory" % directory_path) return True
[ "def", "directory_is_present", "(", "self", ",", "directory_path", ")", ":", "p", "=", "self", ".", "p", "(", "directory_path", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "p", ")", ":", "return", "False", "if", "not", "os", ".", "path", ".", "isdir", "(", "p", ")", ":", "raise", "IOError", "(", "\"%s is not a directory\"", "%", "directory_path", ")", "return", "True" ]
check if directory 'directory_path' is present, raise IOError if it's not a directory :param directory_path: str, directory to check :return: True if directory exists, False if directory does not exist
[ "check", "if", "directory", "directory_path", "is", "present", "raise", "IOError", "if", "it", "s", "not", "a", "directory" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/apidefs/filesystem.py#L146-L158
10,249
user-cont/conu
conu/apidefs/filesystem.py
Filesystem.get_selinux_context
def get_selinux_context(self, file_path): """ Get SELinux file context of the selected file. :param file_path: str, path to the file :return: str, name of the SELinux file context """ # what if SELinux is not enabled? p = self.p(file_path) if not HAS_XATTR: raise RuntimeError("'xattr' python module is not available, hence we cannot " "determine the SELinux context for this file. " "In Fedora this module is available as python3-pyxattr -- " "other distributions may follow similar naming scheme.") return xattr.get(p, "security.selinux")
python
def get_selinux_context(self, file_path): """ Get SELinux file context of the selected file. :param file_path: str, path to the file :return: str, name of the SELinux file context """ # what if SELinux is not enabled? p = self.p(file_path) if not HAS_XATTR: raise RuntimeError("'xattr' python module is not available, hence we cannot " "determine the SELinux context for this file. " "In Fedora this module is available as python3-pyxattr -- " "other distributions may follow similar naming scheme.") return xattr.get(p, "security.selinux")
[ "def", "get_selinux_context", "(", "self", ",", "file_path", ")", ":", "# what if SELinux is not enabled?", "p", "=", "self", ".", "p", "(", "file_path", ")", "if", "not", "HAS_XATTR", ":", "raise", "RuntimeError", "(", "\"'xattr' python module is not available, hence we cannot \"", "\"determine the SELinux context for this file. \"", "\"In Fedora this module is available as python3-pyxattr -- \"", "\"other distributions may follow similar naming scheme.\"", ")", "return", "xattr", ".", "get", "(", "p", ",", "\"security.selinux\"", ")" ]
Get SELinux file context of the selected file. :param file_path: str, path to the file :return: str, name of the SELinux file context
[ "Get", "SELinux", "file", "context", "of", "the", "selected", "file", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/apidefs/filesystem.py#L160-L174
10,250
user-cont/conu
conu/utils/probes.py
Probe._wrapper
def _wrapper(self, q, start): """ _wrapper checks return status of Probe.fnc and provides the result for process managing :param q: Queue for function results :param start: Time of function run (used for logging) :return: Return value or Exception """ try: func_name = self.fnc.__name__ except AttributeError: func_name = str(self.fnc) logger.debug("Running \"%s\" with parameters: \"%s\":\t%s/%s" % (func_name, str(self.kwargs), round(time.time() - start), self.timeout)) try: result = self.fnc(**self.kwargs) # let's log only first 50 characters of the response logger.debug("callback result = %s", str(result)[:50]) q.put(result) except self.expected_exceptions as ex: logger.debug("expected exception was caught: %s", ex) q.put(False) except Exception as ex: logger.debug("adding exception %s to queue", ex) q.put(ex)
python
def _wrapper(self, q, start): """ _wrapper checks return status of Probe.fnc and provides the result for process managing :param q: Queue for function results :param start: Time of function run (used for logging) :return: Return value or Exception """ try: func_name = self.fnc.__name__ except AttributeError: func_name = str(self.fnc) logger.debug("Running \"%s\" with parameters: \"%s\":\t%s/%s" % (func_name, str(self.kwargs), round(time.time() - start), self.timeout)) try: result = self.fnc(**self.kwargs) # let's log only first 50 characters of the response logger.debug("callback result = %s", str(result)[:50]) q.put(result) except self.expected_exceptions as ex: logger.debug("expected exception was caught: %s", ex) q.put(False) except Exception as ex: logger.debug("adding exception %s to queue", ex) q.put(ex)
[ "def", "_wrapper", "(", "self", ",", "q", ",", "start", ")", ":", "try", ":", "func_name", "=", "self", ".", "fnc", ".", "__name__", "except", "AttributeError", ":", "func_name", "=", "str", "(", "self", ".", "fnc", ")", "logger", ".", "debug", "(", "\"Running \\\"%s\\\" with parameters: \\\"%s\\\":\\t%s/%s\"", "%", "(", "func_name", ",", "str", "(", "self", ".", "kwargs", ")", ",", "round", "(", "time", ".", "time", "(", ")", "-", "start", ")", ",", "self", ".", "timeout", ")", ")", "try", ":", "result", "=", "self", ".", "fnc", "(", "*", "*", "self", ".", "kwargs", ")", "# let's log only first 50 characters of the response", "logger", ".", "debug", "(", "\"callback result = %s\"", ",", "str", "(", "result", ")", "[", ":", "50", "]", ")", "q", ".", "put", "(", "result", ")", "except", "self", ".", "expected_exceptions", "as", "ex", ":", "logger", ".", "debug", "(", "\"expected exception was caught: %s\"", ",", "ex", ")", "q", ".", "put", "(", "False", ")", "except", "Exception", "as", "ex", ":", "logger", ".", "debug", "(", "\"adding exception %s to queue\"", ",", "ex", ")", "q", ".", "put", "(", "ex", ")" ]
_wrapper checks return status of Probe.fnc and provides the result for process managing :param q: Queue for function results :param start: Time of function run (used for logging) :return: Return value or Exception
[ "_wrapper", "checks", "return", "status", "of", "Probe", ".", "fnc", "and", "provides", "the", "result", "for", "process", "managing" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/probes.py#L92-L116
10,251
user-cont/conu
conu/backend/docker/skopeo.py
transport_param
def transport_param(image): """ Parse DockerImage info into skopeo parameter :param image: DockerImage :return: string. skopeo parameter specifying image """ transports = {SkopeoTransport.CONTAINERS_STORAGE: "containers-storage:", SkopeoTransport.DIRECTORY: "dir:", SkopeoTransport.DOCKER: "docker://", SkopeoTransport.DOCKER_ARCHIVE: "docker-archive", SkopeoTransport.DOCKER_DAEMON: "docker-daemon:", SkopeoTransport.OCI: "oci:", SkopeoTransport.OSTREE: "ostree:"} transport = image.transport tag = image.tag repository = image.name path = image.path if not transport: transport = SkopeoTransport.DOCKER command = transports[transport] path_required = [SkopeoTransport.DIRECTORY, SkopeoTransport.DOCKER_ARCHIVE, SkopeoTransport.OCI] if transport in path_required and path is None: raise ValueError(transports[transport] + " path is required to be specified") if transport == SkopeoTransport.DIRECTORY: return command + path if transport == SkopeoTransport.DOCKER_ARCHIVE: command += path if repository is None: return command command += ":" if transport in [SkopeoTransport.CONTAINERS_STORAGE, SkopeoTransport.DOCKER, SkopeoTransport.DOCKER_ARCHIVE, transport.DOCKER_DAEMON]: return command + repository + ":" + tag if transport == SkopeoTransport.OCI: return command + path + ":" + tag if transport == SkopeoTransport.OSTREE: return command + repository + ("@" + path if path else "") raise ConuException("This transport is not supported")
python
def transport_param(image): """ Parse DockerImage info into skopeo parameter :param image: DockerImage :return: string. skopeo parameter specifying image """ transports = {SkopeoTransport.CONTAINERS_STORAGE: "containers-storage:", SkopeoTransport.DIRECTORY: "dir:", SkopeoTransport.DOCKER: "docker://", SkopeoTransport.DOCKER_ARCHIVE: "docker-archive", SkopeoTransport.DOCKER_DAEMON: "docker-daemon:", SkopeoTransport.OCI: "oci:", SkopeoTransport.OSTREE: "ostree:"} transport = image.transport tag = image.tag repository = image.name path = image.path if not transport: transport = SkopeoTransport.DOCKER command = transports[transport] path_required = [SkopeoTransport.DIRECTORY, SkopeoTransport.DOCKER_ARCHIVE, SkopeoTransport.OCI] if transport in path_required and path is None: raise ValueError(transports[transport] + " path is required to be specified") if transport == SkopeoTransport.DIRECTORY: return command + path if transport == SkopeoTransport.DOCKER_ARCHIVE: command += path if repository is None: return command command += ":" if transport in [SkopeoTransport.CONTAINERS_STORAGE, SkopeoTransport.DOCKER, SkopeoTransport.DOCKER_ARCHIVE, transport.DOCKER_DAEMON]: return command + repository + ":" + tag if transport == SkopeoTransport.OCI: return command + path + ":" + tag if transport == SkopeoTransport.OSTREE: return command + repository + ("@" + path if path else "") raise ConuException("This transport is not supported")
[ "def", "transport_param", "(", "image", ")", ":", "transports", "=", "{", "SkopeoTransport", ".", "CONTAINERS_STORAGE", ":", "\"containers-storage:\"", ",", "SkopeoTransport", ".", "DIRECTORY", ":", "\"dir:\"", ",", "SkopeoTransport", ".", "DOCKER", ":", "\"docker://\"", ",", "SkopeoTransport", ".", "DOCKER_ARCHIVE", ":", "\"docker-archive\"", ",", "SkopeoTransport", ".", "DOCKER_DAEMON", ":", "\"docker-daemon:\"", ",", "SkopeoTransport", ".", "OCI", ":", "\"oci:\"", ",", "SkopeoTransport", ".", "OSTREE", ":", "\"ostree:\"", "}", "transport", "=", "image", ".", "transport", "tag", "=", "image", ".", "tag", "repository", "=", "image", ".", "name", "path", "=", "image", ".", "path", "if", "not", "transport", ":", "transport", "=", "SkopeoTransport", ".", "DOCKER", "command", "=", "transports", "[", "transport", "]", "path_required", "=", "[", "SkopeoTransport", ".", "DIRECTORY", ",", "SkopeoTransport", ".", "DOCKER_ARCHIVE", ",", "SkopeoTransport", ".", "OCI", "]", "if", "transport", "in", "path_required", "and", "path", "is", "None", ":", "raise", "ValueError", "(", "transports", "[", "transport", "]", "+", "\" path is required to be specified\"", ")", "if", "transport", "==", "SkopeoTransport", ".", "DIRECTORY", ":", "return", "command", "+", "path", "if", "transport", "==", "SkopeoTransport", ".", "DOCKER_ARCHIVE", ":", "command", "+=", "path", "if", "repository", "is", "None", ":", "return", "command", "command", "+=", "\":\"", "if", "transport", "in", "[", "SkopeoTransport", ".", "CONTAINERS_STORAGE", ",", "SkopeoTransport", ".", "DOCKER", ",", "SkopeoTransport", ".", "DOCKER_ARCHIVE", ",", "transport", ".", "DOCKER_DAEMON", "]", ":", "return", "command", "+", "repository", "+", "\":\"", "+", "tag", "if", "transport", "==", "SkopeoTransport", ".", "OCI", ":", "return", "command", "+", "path", "+", "\":\"", "+", "tag", "if", "transport", "==", "SkopeoTransport", ".", "OSTREE", ":", "return", "command", "+", "repository", "+", "(", "\"@\"", "+", "path", "if", "path", "else", "\"\"", ")", "raise", "ConuException", "(", "\"This transport is not supported\"", ")" ]
Parse DockerImage info into skopeo parameter :param image: DockerImage :return: string. skopeo parameter specifying image
[ "Parse", "DockerImage", "info", "into", "skopeo", "parameter" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/skopeo.py#L23-L65
10,252
user-cont/conu
conu/backend/nspawn/container.py
NspawnContainer.is_running
def is_running(self): """ return True when container is running, otherwise return False :return: bool """ cmd = ["machinectl", "--no-pager", "status", self.name] try: subprocess.check_call(cmd) return True except subprocess.CalledProcessError as ex: logger.info("nspawn container %s is not running probably: %s", self.name, ex.output) return False
python
def is_running(self): """ return True when container is running, otherwise return False :return: bool """ cmd = ["machinectl", "--no-pager", "status", self.name] try: subprocess.check_call(cmd) return True except subprocess.CalledProcessError as ex: logger.info("nspawn container %s is not running probably: %s", self.name, ex.output) return False
[ "def", "is_running", "(", "self", ")", ":", "cmd", "=", "[", "\"machinectl\"", ",", "\"--no-pager\"", ",", "\"status\"", ",", "self", ".", "name", "]", "try", ":", "subprocess", ".", "check_call", "(", "cmd", ")", "return", "True", "except", "subprocess", ".", "CalledProcessError", "as", "ex", ":", "logger", ".", "info", "(", "\"nspawn container %s is not running probably: %s\"", ",", "self", ".", "name", ",", "ex", ".", "output", ")", "return", "False" ]
return True when container is running, otherwise return False :return: bool
[ "return", "True", "when", "container", "is", "running", "otherwise", "return", "False" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/container.py#L147-L160
10,253
user-cont/conu
conu/backend/nspawn/container.py
NspawnContainer.copy_from
def copy_from(self, src, dest): """ copy a file or a directory from container or image to host system. :param src: str, path to a file or a directory within container or image :param dest: str, path to a file or a directory on host system :return: None """ logger.debug("copying %s from host to container at %s", src, dest) cmd = ["machinectl", "--no-pager", "copy-from", self.name, src, dest] run_cmd(cmd)
python
def copy_from(self, src, dest): """ copy a file or a directory from container or image to host system. :param src: str, path to a file or a directory within container or image :param dest: str, path to a file or a directory on host system :return: None """ logger.debug("copying %s from host to container at %s", src, dest) cmd = ["machinectl", "--no-pager", "copy-from", self.name, src, dest] run_cmd(cmd)
[ "def", "copy_from", "(", "self", ",", "src", ",", "dest", ")", ":", "logger", ".", "debug", "(", "\"copying %s from host to container at %s\"", ",", "src", ",", "dest", ")", "cmd", "=", "[", "\"machinectl\"", ",", "\"--no-pager\"", ",", "\"copy-from\"", ",", "self", ".", "name", ",", "src", ",", "dest", "]", "run_cmd", "(", "cmd", ")" ]
copy a file or a directory from container or image to host system. :param src: str, path to a file or a directory within container or image :param dest: str, path to a file or a directory on host system :return: None
[ "copy", "a", "file", "or", "a", "directory", "from", "container", "or", "image", "to", "host", "system", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/container.py#L174-L184
10,254
user-cont/conu
conu/backend/nspawn/container.py
NspawnContainer.delete
def delete(self, force=False, volumes=False): """ delete underlying image :param force: bool - force delete, do not care about errors :param volumes: not used anyhow :return: None """ try: self.image.rmi() except ConuException as ime: if not force: raise ime else: pass
python
def delete(self, force=False, volumes=False): """ delete underlying image :param force: bool - force delete, do not care about errors :param volumes: not used anyhow :return: None """ try: self.image.rmi() except ConuException as ime: if not force: raise ime else: pass
[ "def", "delete", "(", "self", ",", "force", "=", "False", ",", "volumes", "=", "False", ")", ":", "try", ":", "self", ".", "image", ".", "rmi", "(", ")", "except", "ConuException", "as", "ime", ":", "if", "not", "force", ":", "raise", "ime", "else", ":", "pass" ]
delete underlying image :param force: bool - force delete, do not care about errors :param volumes: not used anyhow :return: None
[ "delete", "underlying", "image" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/container.py#L220-L234
10,255
user-cont/conu
conu/backend/nspawn/container.py
NspawnContainer.cleanup
def cleanup(self, force=False, delete=False): """ Stop container and delete image if given param delete :param force: bool, force stop and delete, no errors raised :param delete: delete images :return: None """ # TODO: this method could be part of API, like: try: self.stop() except subprocess.CalledProcessError as stop: logger.debug("unable to stop container via stop", stop) if not force: raise stop try: self.kill() except subprocess.CalledProcessError as kill: logger.debug("unable to stop container via kill", kill) pass if delete: self.delete(force=force)
python
def cleanup(self, force=False, delete=False): """ Stop container and delete image if given param delete :param force: bool, force stop and delete, no errors raised :param delete: delete images :return: None """ # TODO: this method could be part of API, like: try: self.stop() except subprocess.CalledProcessError as stop: logger.debug("unable to stop container via stop", stop) if not force: raise stop try: self.kill() except subprocess.CalledProcessError as kill: logger.debug("unable to stop container via kill", kill) pass if delete: self.delete(force=force)
[ "def", "cleanup", "(", "self", ",", "force", "=", "False", ",", "delete", "=", "False", ")", ":", "# TODO: this method could be part of API, like:", "try", ":", "self", ".", "stop", "(", ")", "except", "subprocess", ".", "CalledProcessError", "as", "stop", ":", "logger", ".", "debug", "(", "\"unable to stop container via stop\"", ",", "stop", ")", "if", "not", "force", ":", "raise", "stop", "try", ":", "self", ".", "kill", "(", ")", "except", "subprocess", ".", "CalledProcessError", "as", "kill", ":", "logger", ".", "debug", "(", "\"unable to stop container via kill\"", ",", "kill", ")", "pass", "if", "delete", ":", "self", ".", "delete", "(", "force", "=", "force", ")" ]
Stop container and delete image if given param delete :param force: bool, force stop and delete, no errors raised :param delete: delete images :return: None
[ "Stop", "container", "and", "delete", "image", "if", "given", "param", "delete" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/container.py#L236-L257
10,256
user-cont/conu
conu/backend/nspawn/container.py
NspawnContainer.run_systemdrun
def run_systemdrun( self, command, internal_background=False, return_full_dict=False, **kwargs): """ execute command via systemd-run inside container :param command: list of command params :param internal_background: not used now :param kwargs: pass params to subprocess :return: dict with result """ internalkw = deepcopy(kwargs) or {} original_ignore_st = internalkw.get("ignore_status", False) original_return_st = internalkw.get("return_output", False) internalkw["ignore_status"] = True internalkw["return_output"] = False unit_name = constants.CONU_ARTIFACT_TAG + "unit_" + random_str() opts = ["-M", self.name, "--unit", unit_name] lpath = "/var/tmp/{}".format(unit_name) comout = {} if self._run_systemdrun_decide(): add_wait_var = "--wait" else: # keep service exist after it finish, to be able to read exit code add_wait_var = "-r" if internal_background: add_wait_var = "" if add_wait_var: opts.append(add_wait_var) # TODO: behave move similar to run_cmd function, unable to work with clean subprocess objects because systemd-run # does not support return stderr, stdout, and return code directly # find way how to do this in better way, machinectl shell is not possible # https://github.com/systemd/systemd/issues/5879 # https://github.com/systemd/systemd/issues/5878 bashworkaround = [ "/bin/bash", "-c", "({comm})>{path}.stdout 2>{path}.stderr".format( comm=" ".join(command), path=lpath)] whole_cmd = ["systemd-run"] + opts + bashworkaround comout['command'] = command comout['return_code'] = run_cmd(whole_cmd, **internalkw) or 0 if not internal_background: if not self._run_systemdrun_decide(): comout['return_code'] = self._systemctl_wait_until_finish( self.name, unit_name) if self.is_running(): self.copy_from( "{pin}.stdout".format( pin=lpath), "{pin}.stdout".format( pin=lpath)) with open("{pin}.stdout".format(pin=lpath)) as f: comout['stdout'] = f.read() self.copy_from( "{pin}.stderr".format( pin=lpath), "{pin}.stderr".format( pin=lpath)) with open("{pin}.stderr".format(pin=lpath)) as f: comout['stderr'] = f.read() logger.debug(comout) if not original_ignore_st and comout['return_code'] != 0: raise subprocess.CalledProcessError(comout['command'], comout) if return_full_dict: return comout if original_return_st: return comout['stdout'] else: return comout['return_code']
python
def run_systemdrun( self, command, internal_background=False, return_full_dict=False, **kwargs): """ execute command via systemd-run inside container :param command: list of command params :param internal_background: not used now :param kwargs: pass params to subprocess :return: dict with result """ internalkw = deepcopy(kwargs) or {} original_ignore_st = internalkw.get("ignore_status", False) original_return_st = internalkw.get("return_output", False) internalkw["ignore_status"] = True internalkw["return_output"] = False unit_name = constants.CONU_ARTIFACT_TAG + "unit_" + random_str() opts = ["-M", self.name, "--unit", unit_name] lpath = "/var/tmp/{}".format(unit_name) comout = {} if self._run_systemdrun_decide(): add_wait_var = "--wait" else: # keep service exist after it finish, to be able to read exit code add_wait_var = "-r" if internal_background: add_wait_var = "" if add_wait_var: opts.append(add_wait_var) # TODO: behave move similar to run_cmd function, unable to work with clean subprocess objects because systemd-run # does not support return stderr, stdout, and return code directly # find way how to do this in better way, machinectl shell is not possible # https://github.com/systemd/systemd/issues/5879 # https://github.com/systemd/systemd/issues/5878 bashworkaround = [ "/bin/bash", "-c", "({comm})>{path}.stdout 2>{path}.stderr".format( comm=" ".join(command), path=lpath)] whole_cmd = ["systemd-run"] + opts + bashworkaround comout['command'] = command comout['return_code'] = run_cmd(whole_cmd, **internalkw) or 0 if not internal_background: if not self._run_systemdrun_decide(): comout['return_code'] = self._systemctl_wait_until_finish( self.name, unit_name) if self.is_running(): self.copy_from( "{pin}.stdout".format( pin=lpath), "{pin}.stdout".format( pin=lpath)) with open("{pin}.stdout".format(pin=lpath)) as f: comout['stdout'] = f.read() self.copy_from( "{pin}.stderr".format( pin=lpath), "{pin}.stderr".format( pin=lpath)) with open("{pin}.stderr".format(pin=lpath)) as f: comout['stderr'] = f.read() logger.debug(comout) if not original_ignore_st and comout['return_code'] != 0: raise subprocess.CalledProcessError(comout['command'], comout) if return_full_dict: return comout if original_return_st: return comout['stdout'] else: return comout['return_code']
[ "def", "run_systemdrun", "(", "self", ",", "command", ",", "internal_background", "=", "False", ",", "return_full_dict", "=", "False", ",", "*", "*", "kwargs", ")", ":", "internalkw", "=", "deepcopy", "(", "kwargs", ")", "or", "{", "}", "original_ignore_st", "=", "internalkw", ".", "get", "(", "\"ignore_status\"", ",", "False", ")", "original_return_st", "=", "internalkw", ".", "get", "(", "\"return_output\"", ",", "False", ")", "internalkw", "[", "\"ignore_status\"", "]", "=", "True", "internalkw", "[", "\"return_output\"", "]", "=", "False", "unit_name", "=", "constants", ".", "CONU_ARTIFACT_TAG", "+", "\"unit_\"", "+", "random_str", "(", ")", "opts", "=", "[", "\"-M\"", ",", "self", ".", "name", ",", "\"--unit\"", ",", "unit_name", "]", "lpath", "=", "\"/var/tmp/{}\"", ".", "format", "(", "unit_name", ")", "comout", "=", "{", "}", "if", "self", ".", "_run_systemdrun_decide", "(", ")", ":", "add_wait_var", "=", "\"--wait\"", "else", ":", "# keep service exist after it finish, to be able to read exit code", "add_wait_var", "=", "\"-r\"", "if", "internal_background", ":", "add_wait_var", "=", "\"\"", "if", "add_wait_var", ":", "opts", ".", "append", "(", "add_wait_var", ")", "# TODO: behave move similar to run_cmd function, unable to work with clean subprocess objects because systemd-run", "# does not support return stderr, stdout, and return code directly", "# find way how to do this in better way, machinectl shell is not possible", "# https://github.com/systemd/systemd/issues/5879", "# https://github.com/systemd/systemd/issues/5878", "bashworkaround", "=", "[", "\"/bin/bash\"", ",", "\"-c\"", ",", "\"({comm})>{path}.stdout 2>{path}.stderr\"", ".", "format", "(", "comm", "=", "\" \"", ".", "join", "(", "command", ")", ",", "path", "=", "lpath", ")", "]", "whole_cmd", "=", "[", "\"systemd-run\"", "]", "+", "opts", "+", "bashworkaround", "comout", "[", "'command'", "]", "=", "command", "comout", "[", "'return_code'", "]", "=", "run_cmd", "(", "whole_cmd", ",", "*", "*", "internalkw", ")", "or", "0", "if", "not", "internal_background", ":", "if", "not", "self", ".", "_run_systemdrun_decide", "(", ")", ":", "comout", "[", "'return_code'", "]", "=", "self", ".", "_systemctl_wait_until_finish", "(", "self", ".", "name", ",", "unit_name", ")", "if", "self", ".", "is_running", "(", ")", ":", "self", ".", "copy_from", "(", "\"{pin}.stdout\"", ".", "format", "(", "pin", "=", "lpath", ")", ",", "\"{pin}.stdout\"", ".", "format", "(", "pin", "=", "lpath", ")", ")", "with", "open", "(", "\"{pin}.stdout\"", ".", "format", "(", "pin", "=", "lpath", ")", ")", "as", "f", ":", "comout", "[", "'stdout'", "]", "=", "f", ".", "read", "(", ")", "self", ".", "copy_from", "(", "\"{pin}.stderr\"", ".", "format", "(", "pin", "=", "lpath", ")", ",", "\"{pin}.stderr\"", ".", "format", "(", "pin", "=", "lpath", ")", ")", "with", "open", "(", "\"{pin}.stderr\"", ".", "format", "(", "pin", "=", "lpath", ")", ")", "as", "f", ":", "comout", "[", "'stderr'", "]", "=", "f", ".", "read", "(", ")", "logger", ".", "debug", "(", "comout", ")", "if", "not", "original_ignore_st", "and", "comout", "[", "'return_code'", "]", "!=", "0", ":", "raise", "subprocess", ".", "CalledProcessError", "(", "comout", "[", "'command'", "]", ",", "comout", ")", "if", "return_full_dict", ":", "return", "comout", "if", "original_return_st", ":", "return", "comout", "[", "'stdout'", "]", "else", ":", "return", "comout", "[", "'return_code'", "]" ]
execute command via systemd-run inside container :param command: list of command params :param internal_background: not used now :param kwargs: pass params to subprocess :return: dict with result
[ "execute", "command", "via", "systemd", "-", "run", "inside", "container" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/container.py#L306-L374
10,257
user-cont/conu
conu/backend/nspawn/container.py
NspawnContainer._wait_for_machine_booted
def _wait_for_machine_booted(name, suffictinet_texts=None): """ Internal method wait until machine is ready, in common case means there is running systemd-logind :param name: str with machine name :param suffictinet_texts: alternative text to check in output :return: True or exception """ # TODO: rewrite it using probes module in utils suffictinet_texts = suffictinet_texts or ["systemd-logind"] # optionally use: "Unit: machine" for foo in range(constants.DEFAULT_RETRYTIMEOUT): time.sleep(constants.DEFAULT_SLEEP) out = run_cmd( ["machinectl", "--no-pager", "status", name], ignore_status=True, return_output=True) for restr in suffictinet_texts: if restr in out: time.sleep(constants.DEFAULT_SLEEP) return True raise ConuException( "Unable to start machine %s within %d (machinectl status command dos not contain %s)" % (name, constants.DEFAULT_RETRYTIMEOUT, suffictinet_texts))
python
def _wait_for_machine_booted(name, suffictinet_texts=None): """ Internal method wait until machine is ready, in common case means there is running systemd-logind :param name: str with machine name :param suffictinet_texts: alternative text to check in output :return: True or exception """ # TODO: rewrite it using probes module in utils suffictinet_texts = suffictinet_texts or ["systemd-logind"] # optionally use: "Unit: machine" for foo in range(constants.DEFAULT_RETRYTIMEOUT): time.sleep(constants.DEFAULT_SLEEP) out = run_cmd( ["machinectl", "--no-pager", "status", name], ignore_status=True, return_output=True) for restr in suffictinet_texts: if restr in out: time.sleep(constants.DEFAULT_SLEEP) return True raise ConuException( "Unable to start machine %s within %d (machinectl status command dos not contain %s)" % (name, constants.DEFAULT_RETRYTIMEOUT, suffictinet_texts))
[ "def", "_wait_for_machine_booted", "(", "name", ",", "suffictinet_texts", "=", "None", ")", ":", "# TODO: rewrite it using probes module in utils", "suffictinet_texts", "=", "suffictinet_texts", "or", "[", "\"systemd-logind\"", "]", "# optionally use: \"Unit: machine\"", "for", "foo", "in", "range", "(", "constants", ".", "DEFAULT_RETRYTIMEOUT", ")", ":", "time", ".", "sleep", "(", "constants", ".", "DEFAULT_SLEEP", ")", "out", "=", "run_cmd", "(", "[", "\"machinectl\"", ",", "\"--no-pager\"", ",", "\"status\"", ",", "name", "]", ",", "ignore_status", "=", "True", ",", "return_output", "=", "True", ")", "for", "restr", "in", "suffictinet_texts", ":", "if", "restr", "in", "out", ":", "time", ".", "sleep", "(", "constants", ".", "DEFAULT_SLEEP", ")", "return", "True", "raise", "ConuException", "(", "\"Unable to start machine %s within %d (machinectl status command dos not contain %s)\"", "%", "(", "name", ",", "constants", ".", "DEFAULT_RETRYTIMEOUT", ",", "suffictinet_texts", ")", ")" ]
Internal method wait until machine is ready, in common case means there is running systemd-logind :param name: str with machine name :param suffictinet_texts: alternative text to check in output :return: True or exception
[ "Internal", "method", "wait", "until", "machine", "is", "ready", "in", "common", "case", "means", "there", "is", "running", "systemd", "-", "logind" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/container.py#L408-L431
10,258
user-cont/conu
conu/backend/nspawn/container.py
NspawnContainer._internal_reschedule
def _internal_reschedule(callback, retry=3, sleep_time=constants.DEFAULT_SLEEP): """ workaround method for internal_run_container method It sometimes fails because of Dbus or whatever, so try to start it moretimes :param callback: callback method list :param retry: how many times try to invoke command :param sleep_time: how long wait before subprocess.poll() to find if it failed :return: subprocess object """ for foo in range(retry): container_process = callback[0](callback[1], *callback[2], **callback[3]) time.sleep(sleep_time) container_process.poll() rcode = container_process.returncode if rcode is None: return container_process raise ConuException("Unable to start nspawn container - process failed for {}-times".format(retry))
python
def _internal_reschedule(callback, retry=3, sleep_time=constants.DEFAULT_SLEEP): """ workaround method for internal_run_container method It sometimes fails because of Dbus or whatever, so try to start it moretimes :param callback: callback method list :param retry: how many times try to invoke command :param sleep_time: how long wait before subprocess.poll() to find if it failed :return: subprocess object """ for foo in range(retry): container_process = callback[0](callback[1], *callback[2], **callback[3]) time.sleep(sleep_time) container_process.poll() rcode = container_process.returncode if rcode is None: return container_process raise ConuException("Unable to start nspawn container - process failed for {}-times".format(retry))
[ "def", "_internal_reschedule", "(", "callback", ",", "retry", "=", "3", ",", "sleep_time", "=", "constants", ".", "DEFAULT_SLEEP", ")", ":", "for", "foo", "in", "range", "(", "retry", ")", ":", "container_process", "=", "callback", "[", "0", "]", "(", "callback", "[", "1", "]", ",", "*", "callback", "[", "2", "]", ",", "*", "*", "callback", "[", "3", "]", ")", "time", ".", "sleep", "(", "sleep_time", ")", "container_process", ".", "poll", "(", ")", "rcode", "=", "container_process", ".", "returncode", "if", "rcode", "is", "None", ":", "return", "container_process", "raise", "ConuException", "(", "\"Unable to start nspawn container - process failed for {}-times\"", ".", "format", "(", "retry", ")", ")" ]
workaround method for internal_run_container method It sometimes fails because of Dbus or whatever, so try to start it moretimes :param callback: callback method list :param retry: how many times try to invoke command :param sleep_time: how long wait before subprocess.poll() to find if it failed :return: subprocess object
[ "workaround", "method", "for", "internal_run_container", "method", "It", "sometimes", "fails", "because", "of", "Dbus", "or", "whatever", "so", "try", "to", "start", "it", "moretimes" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/container.py#L434-L451
10,259
user-cont/conu
conu/backend/nspawn/container.py
NspawnContainer.internal_run_container
def internal_run_container(name, callback_method, foreground=False): """ Internal method what runs container process :param name: str - name of container :param callback_method: list - how to invoke container :param foreground: bool run in background by default :return: suprocess instance """ if not foreground: logger.info("Stating machine (boot nspawn container) {}".format(name)) # wait until machine is booted when running at background, unable to execute commands without logind # in running container nspawn_process = NspawnContainer._internal_reschedule(callback_method) NspawnContainer._wait_for_machine_booted(name) logger.info("machine: %s starting finished" % name) return nspawn_process else: logger.info("Stating machine (return process) {}".format(name)) return callback_method[0](callback_method[1], *callback_method[2], **callback_method[3])
python
def internal_run_container(name, callback_method, foreground=False): """ Internal method what runs container process :param name: str - name of container :param callback_method: list - how to invoke container :param foreground: bool run in background by default :return: suprocess instance """ if not foreground: logger.info("Stating machine (boot nspawn container) {}".format(name)) # wait until machine is booted when running at background, unable to execute commands without logind # in running container nspawn_process = NspawnContainer._internal_reschedule(callback_method) NspawnContainer._wait_for_machine_booted(name) logger.info("machine: %s starting finished" % name) return nspawn_process else: logger.info("Stating machine (return process) {}".format(name)) return callback_method[0](callback_method[1], *callback_method[2], **callback_method[3])
[ "def", "internal_run_container", "(", "name", ",", "callback_method", ",", "foreground", "=", "False", ")", ":", "if", "not", "foreground", ":", "logger", ".", "info", "(", "\"Stating machine (boot nspawn container) {}\"", ".", "format", "(", "name", ")", ")", "# wait until machine is booted when running at background, unable to execute commands without logind", "# in running container", "nspawn_process", "=", "NspawnContainer", ".", "_internal_reschedule", "(", "callback_method", ")", "NspawnContainer", ".", "_wait_for_machine_booted", "(", "name", ")", "logger", ".", "info", "(", "\"machine: %s starting finished\"", "%", "name", ")", "return", "nspawn_process", "else", ":", "logger", ".", "info", "(", "\"Stating machine (return process) {}\"", ".", "format", "(", "name", ")", ")", "return", "callback_method", "[", "0", "]", "(", "callback_method", "[", "1", "]", ",", "*", "callback_method", "[", "2", "]", ",", "*", "*", "callback_method", "[", "3", "]", ")" ]
Internal method what runs container process :param name: str - name of container :param callback_method: list - how to invoke container :param foreground: bool run in background by default :return: suprocess instance
[ "Internal", "method", "what", "runs", "container", "process" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/container.py#L455-L474
10,260
user-cont/conu
conu/helpers/docker_backend.py
get_container_output
def get_container_output(backend, image_name, command, image_tag="latest", additional_opts=None): """ Create a throw-away container based on provided image and tag, run the supplied command in it and return output. The container is stopped and removed after it exits. :param backend: instance of DockerBackend :param image_name: str, name of the container image :param command: list of str, command to run in the container :param image_tag: str, container image tag, defaults to "latest" :param additional_opts: list of str, by default this function creates the container using docker binary and run command; with this argument you can supply addition options to the "docker run" invocation :return: str (unicode), output of the container """ image = backend.ImageClass(image_name, tag=image_tag) # FIXME: use run_via_api and make this a generic function c = image.run_via_binary(DockerRunBuilder(command=command, additional_opts=additional_opts)) try: c.wait() return c.logs_unicode() finally: c.stop() c.wait() c.delete()
python
def get_container_output(backend, image_name, command, image_tag="latest", additional_opts=None): """ Create a throw-away container based on provided image and tag, run the supplied command in it and return output. The container is stopped and removed after it exits. :param backend: instance of DockerBackend :param image_name: str, name of the container image :param command: list of str, command to run in the container :param image_tag: str, container image tag, defaults to "latest" :param additional_opts: list of str, by default this function creates the container using docker binary and run command; with this argument you can supply addition options to the "docker run" invocation :return: str (unicode), output of the container """ image = backend.ImageClass(image_name, tag=image_tag) # FIXME: use run_via_api and make this a generic function c = image.run_via_binary(DockerRunBuilder(command=command, additional_opts=additional_opts)) try: c.wait() return c.logs_unicode() finally: c.stop() c.wait() c.delete()
[ "def", "get_container_output", "(", "backend", ",", "image_name", ",", "command", ",", "image_tag", "=", "\"latest\"", ",", "additional_opts", "=", "None", ")", ":", "image", "=", "backend", ".", "ImageClass", "(", "image_name", ",", "tag", "=", "image_tag", ")", "# FIXME: use run_via_api and make this a generic function", "c", "=", "image", ".", "run_via_binary", "(", "DockerRunBuilder", "(", "command", "=", "command", ",", "additional_opts", "=", "additional_opts", ")", ")", "try", ":", "c", ".", "wait", "(", ")", "return", "c", ".", "logs_unicode", "(", ")", "finally", ":", "c", ".", "stop", "(", ")", "c", ".", "wait", "(", ")", "c", ".", "delete", "(", ")" ]
Create a throw-away container based on provided image and tag, run the supplied command in it and return output. The container is stopped and removed after it exits. :param backend: instance of DockerBackend :param image_name: str, name of the container image :param command: list of str, command to run in the container :param image_tag: str, container image tag, defaults to "latest" :param additional_opts: list of str, by default this function creates the container using docker binary and run command; with this argument you can supply addition options to the "docker run" invocation :return: str (unicode), output of the container
[ "Create", "a", "throw", "-", "away", "container", "based", "on", "provided", "image", "and", "tag", "run", "the", "supplied", "command", "in", "it", "and", "return", "output", ".", "The", "container", "is", "stopped", "and", "removed", "after", "it", "exits", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/helpers/docker_backend.py#L4-L28
10,261
user-cont/conu
conu/backend/docker/image.py
DockerImage.pull
def pull(self): """ Pull this image from registry. Raises an exception if the image is not found in the registry. :return: None """ for json_e in self.d.pull(repository=self.name, tag=self.tag, stream=True, decode=True): logger.debug(json_e) status = graceful_get(json_e, "status") if status: logger.info(status) else: error = graceful_get(json_e, "error") logger.error(status) raise ConuException("There was an error while pulling the image %s: %s", self.name, error) self.using_transport(SkopeoTransport.DOCKER_DAEMON)
python
def pull(self): """ Pull this image from registry. Raises an exception if the image is not found in the registry. :return: None """ for json_e in self.d.pull(repository=self.name, tag=self.tag, stream=True, decode=True): logger.debug(json_e) status = graceful_get(json_e, "status") if status: logger.info(status) else: error = graceful_get(json_e, "error") logger.error(status) raise ConuException("There was an error while pulling the image %s: %s", self.name, error) self.using_transport(SkopeoTransport.DOCKER_DAEMON)
[ "def", "pull", "(", "self", ")", ":", "for", "json_e", "in", "self", ".", "d", ".", "pull", "(", "repository", "=", "self", ".", "name", ",", "tag", "=", "self", ".", "tag", ",", "stream", "=", "True", ",", "decode", "=", "True", ")", ":", "logger", ".", "debug", "(", "json_e", ")", "status", "=", "graceful_get", "(", "json_e", ",", "\"status\"", ")", "if", "status", ":", "logger", ".", "info", "(", "status", ")", "else", ":", "error", "=", "graceful_get", "(", "json_e", ",", "\"error\"", ")", "logger", ".", "error", "(", "status", ")", "raise", "ConuException", "(", "\"There was an error while pulling the image %s: %s\"", ",", "self", ".", "name", ",", "error", ")", "self", ".", "using_transport", "(", "SkopeoTransport", ".", "DOCKER_DAEMON", ")" ]
Pull this image from registry. Raises an exception if the image is not found in the registry. :return: None
[ "Pull", "this", "image", "from", "registry", ".", "Raises", "an", "exception", "if", "the", "image", "is", "not", "found", "in", "the", "registry", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L185-L202
10,262
user-cont/conu
conu/backend/docker/image.py
DockerImage.using_transport
def using_transport(self, transport=None, path=None, logs=True): """ change used transport :param transport: from where will be this image copied :param path in filesystem :param logs enable/disable :return: self """ if not transport: return self if self.transport == transport and self.path == path: return self path_required = [SkopeoTransport.DIRECTORY, SkopeoTransport.DOCKER_ARCHIVE, SkopeoTransport.OCI] if transport in path_required: if not path and logs: logging.debug("path not provided, temporary path was used") self.path = self.mount(path).mount_point elif transport == SkopeoTransport.OSTREE: if path and not os.path.isabs(path): raise ConuException("Path '", path, "' for OSTree transport is not absolute") if not path and logs: logging.debug("path not provided, default /ostree/repo path was used") self.path = path else: if path and logs: logging.warning("path %s was ignored!", path) self.path = None self.transport = transport return self
python
def using_transport(self, transport=None, path=None, logs=True): """ change used transport :param transport: from where will be this image copied :param path in filesystem :param logs enable/disable :return: self """ if not transport: return self if self.transport == transport and self.path == path: return self path_required = [SkopeoTransport.DIRECTORY, SkopeoTransport.DOCKER_ARCHIVE, SkopeoTransport.OCI] if transport in path_required: if not path and logs: logging.debug("path not provided, temporary path was used") self.path = self.mount(path).mount_point elif transport == SkopeoTransport.OSTREE: if path and not os.path.isabs(path): raise ConuException("Path '", path, "' for OSTree transport is not absolute") if not path and logs: logging.debug("path not provided, default /ostree/repo path was used") self.path = path else: if path and logs: logging.warning("path %s was ignored!", path) self.path = None self.transport = transport return self
[ "def", "using_transport", "(", "self", ",", "transport", "=", "None", ",", "path", "=", "None", ",", "logs", "=", "True", ")", ":", "if", "not", "transport", ":", "return", "self", "if", "self", ".", "transport", "==", "transport", "and", "self", ".", "path", "==", "path", ":", "return", "self", "path_required", "=", "[", "SkopeoTransport", ".", "DIRECTORY", ",", "SkopeoTransport", ".", "DOCKER_ARCHIVE", ",", "SkopeoTransport", ".", "OCI", "]", "if", "transport", "in", "path_required", ":", "if", "not", "path", "and", "logs", ":", "logging", ".", "debug", "(", "\"path not provided, temporary path was used\"", ")", "self", ".", "path", "=", "self", ".", "mount", "(", "path", ")", ".", "mount_point", "elif", "transport", "==", "SkopeoTransport", ".", "OSTREE", ":", "if", "path", "and", "not", "os", ".", "path", ".", "isabs", "(", "path", ")", ":", "raise", "ConuException", "(", "\"Path '\"", ",", "path", ",", "\"' for OSTree transport is not absolute\"", ")", "if", "not", "path", "and", "logs", ":", "logging", ".", "debug", "(", "\"path not provided, default /ostree/repo path was used\"", ")", "self", ".", "path", "=", "path", "else", ":", "if", "path", "and", "logs", ":", "logging", ".", "warning", "(", "\"path %s was ignored!\"", ",", "path", ")", "self", ".", "path", "=", "None", "self", ".", "transport", "=", "transport", "return", "self" ]
change used transport :param transport: from where will be this image copied :param path in filesystem :param logs enable/disable :return: self
[ "change", "used", "transport" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L230-L264
10,263
user-cont/conu
conu/backend/docker/image.py
DockerImage.save_to
def save_to(self, image): """ Save this image to another DockerImage :param image: DockerImage :return: """ if not isinstance(image, self.__class__): raise ConuException("Invalid target image type", type(image)) self.copy(image.name, image.tag, target_transport=image.transport, target_path=image.path, logs=False)
python
def save_to(self, image): """ Save this image to another DockerImage :param image: DockerImage :return: """ if not isinstance(image, self.__class__): raise ConuException("Invalid target image type", type(image)) self.copy(image.name, image.tag, target_transport=image.transport, target_path=image.path, logs=False)
[ "def", "save_to", "(", "self", ",", "image", ")", ":", "if", "not", "isinstance", "(", "image", ",", "self", ".", "__class__", ")", ":", "raise", "ConuException", "(", "\"Invalid target image type\"", ",", "type", "(", "image", ")", ")", "self", ".", "copy", "(", "image", ".", "name", ",", "image", ".", "tag", ",", "target_transport", "=", "image", ".", "transport", ",", "target_path", "=", "image", ".", "path", ",", "logs", "=", "False", ")" ]
Save this image to another DockerImage :param image: DockerImage :return:
[ "Save", "this", "image", "to", "another", "DockerImage" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L266-L276
10,264
user-cont/conu
conu/backend/docker/image.py
DockerImage.load_from
def load_from(self, image): """ Load from another DockerImage to this one :param image: :return: """ if not isinstance(image, self.__class__): raise ConuException("Invalid source image type", type(image)) image.save_to(self)
python
def load_from(self, image): """ Load from another DockerImage to this one :param image: :return: """ if not isinstance(image, self.__class__): raise ConuException("Invalid source image type", type(image)) image.save_to(self)
[ "def", "load_from", "(", "self", ",", "image", ")", ":", "if", "not", "isinstance", "(", "image", ",", "self", ".", "__class__", ")", ":", "raise", "ConuException", "(", "\"Invalid source image type\"", ",", "type", "(", "image", ")", ")", "image", ".", "save_to", "(", "self", ")" ]
Load from another DockerImage to this one :param image: :return:
[ "Load", "from", "another", "DockerImage", "to", "this", "one" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L278-L286
10,265
user-cont/conu
conu/backend/docker/image.py
DockerImage.skopeo_pull
def skopeo_pull(self): """ Pull image from Docker to local Docker daemon using skopeo :return: pulled image """ return self.copy(self.name, self.tag, SkopeoTransport.DOCKER, SkopeoTransport.DOCKER_DAEMON)\ .using_transport(SkopeoTransport.DOCKER_DAEMON)
python
def skopeo_pull(self): """ Pull image from Docker to local Docker daemon using skopeo :return: pulled image """ return self.copy(self.name, self.tag, SkopeoTransport.DOCKER, SkopeoTransport.DOCKER_DAEMON)\ .using_transport(SkopeoTransport.DOCKER_DAEMON)
[ "def", "skopeo_pull", "(", "self", ")", ":", "return", "self", ".", "copy", "(", "self", ".", "name", ",", "self", ".", "tag", ",", "SkopeoTransport", ".", "DOCKER", ",", "SkopeoTransport", ".", "DOCKER_DAEMON", ")", ".", "using_transport", "(", "SkopeoTransport", ".", "DOCKER_DAEMON", ")" ]
Pull image from Docker to local Docker daemon using skopeo :return: pulled image
[ "Pull", "image", "from", "Docker", "to", "local", "Docker", "daemon", "using", "skopeo" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L288-L295
10,266
user-cont/conu
conu/backend/docker/image.py
DockerImage.skopeo_push
def skopeo_push(self, repository=None, tag=None): """ Push image from Docker daemon to Docker using skopeo :param repository: repository to be pushed to :param tag: tag :return: pushed image """ return self.copy(repository, tag, SkopeoTransport.DOCKER_DAEMON, SkopeoTransport.DOCKER)\ .using_transport(SkopeoTransport.DOCKER)
python
def skopeo_push(self, repository=None, tag=None): """ Push image from Docker daemon to Docker using skopeo :param repository: repository to be pushed to :param tag: tag :return: pushed image """ return self.copy(repository, tag, SkopeoTransport.DOCKER_DAEMON, SkopeoTransport.DOCKER)\ .using_transport(SkopeoTransport.DOCKER)
[ "def", "skopeo_push", "(", "self", ",", "repository", "=", "None", ",", "tag", "=", "None", ")", ":", "return", "self", ".", "copy", "(", "repository", ",", "tag", ",", "SkopeoTransport", ".", "DOCKER_DAEMON", ",", "SkopeoTransport", ".", "DOCKER", ")", ".", "using_transport", "(", "SkopeoTransport", ".", "DOCKER", ")" ]
Push image from Docker daemon to Docker using skopeo :param repository: repository to be pushed to :param tag: tag :return: pushed image
[ "Push", "image", "from", "Docker", "daemon", "to", "Docker", "using", "skopeo" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L297-L305
10,267
user-cont/conu
conu/backend/docker/image.py
DockerImage.copy
def copy(self, repository=None, tag=None, source_transport=None, target_transport=SkopeoTransport.DOCKER, source_path=None, target_path=None, logs=True): """ Copy this image :param repository to be copied to :param tag :param source_transport Transport :param target_transport Transport :param source_path needed to specify for dir, docker-archive or oci transport :param target_path needed to specify for dir, docker-archive or oci transport :param logs enable/disable logs :return: the new DockerImage """ if not repository: repository = self.name if not tag: tag = self.tag if self.tag else "latest" if target_transport == SkopeoTransport.OSTREE and tag and logs: logging.warning("tag was ignored") target = (DockerImage(repository, tag, pull_policy=DockerImagePullPolicy.NEVER) .using_transport(target_transport, target_path)) self.using_transport(source_transport, source_path) try: run_cmd(["skopeo", "copy", transport_param(self), transport_param(target)]) except subprocess.CalledProcessError: raise ConuException("There was an error while copying repository", self.name) return target
python
def copy(self, repository=None, tag=None, source_transport=None, target_transport=SkopeoTransport.DOCKER, source_path=None, target_path=None, logs=True): """ Copy this image :param repository to be copied to :param tag :param source_transport Transport :param target_transport Transport :param source_path needed to specify for dir, docker-archive or oci transport :param target_path needed to specify for dir, docker-archive or oci transport :param logs enable/disable logs :return: the new DockerImage """ if not repository: repository = self.name if not tag: tag = self.tag if self.tag else "latest" if target_transport == SkopeoTransport.OSTREE and tag and logs: logging.warning("tag was ignored") target = (DockerImage(repository, tag, pull_policy=DockerImagePullPolicy.NEVER) .using_transport(target_transport, target_path)) self.using_transport(source_transport, source_path) try: run_cmd(["skopeo", "copy", transport_param(self), transport_param(target)]) except subprocess.CalledProcessError: raise ConuException("There was an error while copying repository", self.name) return target
[ "def", "copy", "(", "self", ",", "repository", "=", "None", ",", "tag", "=", "None", ",", "source_transport", "=", "None", ",", "target_transport", "=", "SkopeoTransport", ".", "DOCKER", ",", "source_path", "=", "None", ",", "target_path", "=", "None", ",", "logs", "=", "True", ")", ":", "if", "not", "repository", ":", "repository", "=", "self", ".", "name", "if", "not", "tag", ":", "tag", "=", "self", ".", "tag", "if", "self", ".", "tag", "else", "\"latest\"", "if", "target_transport", "==", "SkopeoTransport", ".", "OSTREE", "and", "tag", "and", "logs", ":", "logging", ".", "warning", "(", "\"tag was ignored\"", ")", "target", "=", "(", "DockerImage", "(", "repository", ",", "tag", ",", "pull_policy", "=", "DockerImagePullPolicy", ".", "NEVER", ")", ".", "using_transport", "(", "target_transport", ",", "target_path", ")", ")", "self", ".", "using_transport", "(", "source_transport", ",", "source_path", ")", "try", ":", "run_cmd", "(", "[", "\"skopeo\"", ",", "\"copy\"", ",", "transport_param", "(", "self", ")", ",", "transport_param", "(", "target", ")", "]", ")", "except", "subprocess", ".", "CalledProcessError", ":", "raise", "ConuException", "(", "\"There was an error while copying repository\"", ",", "self", ".", "name", ")", "return", "target" ]
Copy this image :param repository to be copied to :param tag :param source_transport Transport :param target_transport Transport :param source_path needed to specify for dir, docker-archive or oci transport :param target_path needed to specify for dir, docker-archive or oci transport :param logs enable/disable logs :return: the new DockerImage
[ "Copy", "this", "image" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L307-L340
10,268
user-cont/conu
conu/backend/docker/image.py
DockerImage.tag_image
def tag_image(self, repository=None, tag=None): """ Apply additional tags to the image or even add a new name :param repository: str, see constructor :param tag: str, see constructor :return: instance of DockerImage """ if not (repository or tag): raise ValueError("You need to specify either repository or tag.") r = repository or self.name t = "latest" if not tag else tag self.d.tag(image=self.get_full_name(), repository=r, tag=t) return DockerImage(r, tag=t)
python
def tag_image(self, repository=None, tag=None): """ Apply additional tags to the image or even add a new name :param repository: str, see constructor :param tag: str, see constructor :return: instance of DockerImage """ if not (repository or tag): raise ValueError("You need to specify either repository or tag.") r = repository or self.name t = "latest" if not tag else tag self.d.tag(image=self.get_full_name(), repository=r, tag=t) return DockerImage(r, tag=t)
[ "def", "tag_image", "(", "self", ",", "repository", "=", "None", ",", "tag", "=", "None", ")", ":", "if", "not", "(", "repository", "or", "tag", ")", ":", "raise", "ValueError", "(", "\"You need to specify either repository or tag.\"", ")", "r", "=", "repository", "or", "self", ".", "name", "t", "=", "\"latest\"", "if", "not", "tag", "else", "tag", "self", ".", "d", ".", "tag", "(", "image", "=", "self", ".", "get_full_name", "(", ")", ",", "repository", "=", "r", ",", "tag", "=", "t", ")", "return", "DockerImage", "(", "r", ",", "tag", "=", "t", ")" ]
Apply additional tags to the image or even add a new name :param repository: str, see constructor :param tag: str, see constructor :return: instance of DockerImage
[ "Apply", "additional", "tags", "to", "the", "image", "or", "even", "add", "a", "new", "name" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L342-L355
10,269
user-cont/conu
conu/backend/docker/image.py
DockerImage.inspect
def inspect(self, refresh=True): """ provide metadata about the image; flip refresh=True if cached metadata are enough :param refresh: bool, update the metadata with up to date content :return: dict """ if refresh or not self._inspect_data: identifier = self._id or self.get_full_name() if not identifier: raise ConuException("This image does not have a valid identifier.") self._inspect_data = self.d.inspect_image(identifier) return self._inspect_data
python
def inspect(self, refresh=True): """ provide metadata about the image; flip refresh=True if cached metadata are enough :param refresh: bool, update the metadata with up to date content :return: dict """ if refresh or not self._inspect_data: identifier = self._id or self.get_full_name() if not identifier: raise ConuException("This image does not have a valid identifier.") self._inspect_data = self.d.inspect_image(identifier) return self._inspect_data
[ "def", "inspect", "(", "self", ",", "refresh", "=", "True", ")", ":", "if", "refresh", "or", "not", "self", ".", "_inspect_data", ":", "identifier", "=", "self", ".", "_id", "or", "self", ".", "get_full_name", "(", ")", "if", "not", "identifier", ":", "raise", "ConuException", "(", "\"This image does not have a valid identifier.\"", ")", "self", ".", "_inspect_data", "=", "self", ".", "d", ".", "inspect_image", "(", "identifier", ")", "return", "self", ".", "_inspect_data" ]
provide metadata about the image; flip refresh=True if cached metadata are enough :param refresh: bool, update the metadata with up to date content :return: dict
[ "provide", "metadata", "about", "the", "image", ";", "flip", "refresh", "=", "True", "if", "cached", "metadata", "are", "enough" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L357-L369
10,270
user-cont/conu
conu/backend/docker/image.py
DockerImage.has_pkgs_signed_with
def has_pkgs_signed_with(self, allowed_keys): """ Check signature of packages installed in image. Raises exception when * rpm binary is not installed in image * parsing of rpm fails * there are packages in image that are not signed with one of allowed keys :param allowed_keys: list of allowed keys :return: bool """ if not allowed_keys or not isinstance(allowed_keys, list): raise ConuException("allowed_keys must be a list") command = ['rpm', '-qa', '--qf', '%{name} %{SIGPGP:pgpsig}\n'] cont = self.run_via_binary(command=command) try: out = cont.logs_unicode()[:-1].split('\n') check_signatures(out, allowed_keys) finally: cont.stop() cont.delete() return True
python
def has_pkgs_signed_with(self, allowed_keys): """ Check signature of packages installed in image. Raises exception when * rpm binary is not installed in image * parsing of rpm fails * there are packages in image that are not signed with one of allowed keys :param allowed_keys: list of allowed keys :return: bool """ if not allowed_keys or not isinstance(allowed_keys, list): raise ConuException("allowed_keys must be a list") command = ['rpm', '-qa', '--qf', '%{name} %{SIGPGP:pgpsig}\n'] cont = self.run_via_binary(command=command) try: out = cont.logs_unicode()[:-1].split('\n') check_signatures(out, allowed_keys) finally: cont.stop() cont.delete() return True
[ "def", "has_pkgs_signed_with", "(", "self", ",", "allowed_keys", ")", ":", "if", "not", "allowed_keys", "or", "not", "isinstance", "(", "allowed_keys", ",", "list", ")", ":", "raise", "ConuException", "(", "\"allowed_keys must be a list\"", ")", "command", "=", "[", "'rpm'", ",", "'-qa'", ",", "'--qf'", ",", "'%{name} %{SIGPGP:pgpsig}\\n'", "]", "cont", "=", "self", ".", "run_via_binary", "(", "command", "=", "command", ")", "try", ":", "out", "=", "cont", ".", "logs_unicode", "(", ")", "[", ":", "-", "1", "]", ".", "split", "(", "'\\n'", ")", "check_signatures", "(", "out", ",", "allowed_keys", ")", "finally", ":", "cont", ".", "stop", "(", ")", "cont", ".", "delete", "(", ")", "return", "True" ]
Check signature of packages installed in image. Raises exception when * rpm binary is not installed in image * parsing of rpm fails * there are packages in image that are not signed with one of allowed keys :param allowed_keys: list of allowed keys :return: bool
[ "Check", "signature", "of", "packages", "installed", "in", "image", ".", "Raises", "exception", "when" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L639-L662
10,271
user-cont/conu
conu/backend/docker/image.py
DockerImage.build
def build(cls, path, tag=None, dockerfile=None): """ Build the image from the provided dockerfile in path :param path : str, path to the directory containing the Dockerfile :param tag: str, A tag to add to the final image :param dockerfile: str, path within the build context to the Dockerfile :return: instance of DockerImage """ if not path: raise ConuException('Please specify path to the directory containing the Dockerfile') client = get_client() response = [line for line in client.build(path, rm=True, tag=tag, dockerfile=dockerfile, quiet=True)] if not response: raise ConuException('Failed to get ID of image') # The expected output is just one line with image ID if len(response) > 1: raise ConuException('Build failed: ' + str(response)) # get ID from output # b'{"stream":"sha256:39c7bac4e2da37983203df4fcf612a02de9e6f6456a7f3434d1fccbc9ad639a5\\n"}\r\n' response_utf = response[0].decode('utf-8') if response_utf[:11] != '{"stream":"' or response_utf[-6:] != '\\n"}\r\n': raise ConuException('Failed to parse ID from ' + response_utf) image_id = response_utf[11:-6] return cls(None, identifier=image_id)
python
def build(cls, path, tag=None, dockerfile=None): """ Build the image from the provided dockerfile in path :param path : str, path to the directory containing the Dockerfile :param tag: str, A tag to add to the final image :param dockerfile: str, path within the build context to the Dockerfile :return: instance of DockerImage """ if not path: raise ConuException('Please specify path to the directory containing the Dockerfile') client = get_client() response = [line for line in client.build(path, rm=True, tag=tag, dockerfile=dockerfile, quiet=True)] if not response: raise ConuException('Failed to get ID of image') # The expected output is just one line with image ID if len(response) > 1: raise ConuException('Build failed: ' + str(response)) # get ID from output # b'{"stream":"sha256:39c7bac4e2da37983203df4fcf612a02de9e6f6456a7f3434d1fccbc9ad639a5\\n"}\r\n' response_utf = response[0].decode('utf-8') if response_utf[:11] != '{"stream":"' or response_utf[-6:] != '\\n"}\r\n': raise ConuException('Failed to parse ID from ' + response_utf) image_id = response_utf[11:-6] return cls(None, identifier=image_id)
[ "def", "build", "(", "cls", ",", "path", ",", "tag", "=", "None", ",", "dockerfile", "=", "None", ")", ":", "if", "not", "path", ":", "raise", "ConuException", "(", "'Please specify path to the directory containing the Dockerfile'", ")", "client", "=", "get_client", "(", ")", "response", "=", "[", "line", "for", "line", "in", "client", ".", "build", "(", "path", ",", "rm", "=", "True", ",", "tag", "=", "tag", ",", "dockerfile", "=", "dockerfile", ",", "quiet", "=", "True", ")", "]", "if", "not", "response", ":", "raise", "ConuException", "(", "'Failed to get ID of image'", ")", "# The expected output is just one line with image ID", "if", "len", "(", "response", ")", ">", "1", ":", "raise", "ConuException", "(", "'Build failed: '", "+", "str", "(", "response", ")", ")", "# get ID from output", "# b'{\"stream\":\"sha256:39c7bac4e2da37983203df4fcf612a02de9e6f6456a7f3434d1fccbc9ad639a5\\\\n\"}\\r\\n'", "response_utf", "=", "response", "[", "0", "]", ".", "decode", "(", "'utf-8'", ")", "if", "response_utf", "[", ":", "11", "]", "!=", "'{\"stream\":\"'", "or", "response_utf", "[", "-", "6", ":", "]", "!=", "'\\\\n\"}\\r\\n'", ":", "raise", "ConuException", "(", "'Failed to parse ID from '", "+", "response_utf", ")", "image_id", "=", "response_utf", "[", "11", ":", "-", "6", "]", "return", "cls", "(", "None", ",", "identifier", "=", "image_id", ")" ]
Build the image from the provided dockerfile in path :param path : str, path to the directory containing the Dockerfile :param tag: str, A tag to add to the final image :param dockerfile: str, path within the build context to the Dockerfile :return: instance of DockerImage
[ "Build", "the", "image", "from", "the", "provided", "dockerfile", "in", "path" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L681-L711
10,272
user-cont/conu
conu/backend/docker/image.py
DockerImage.layers
def layers(self, rev=True): """ Get list of DockerImage for every layer in image :param rev: get layers rev :return: list of DockerImages """ image_layers = [ DockerImage(None, identifier=x, pull_policy=DockerImagePullPolicy.NEVER) for x in self.get_layer_ids() ] if not rev: image_layers.reverse() return image_layers
python
def layers(self, rev=True): """ Get list of DockerImage for every layer in image :param rev: get layers rev :return: list of DockerImages """ image_layers = [ DockerImage(None, identifier=x, pull_policy=DockerImagePullPolicy.NEVER) for x in self.get_layer_ids() ] if not rev: image_layers.reverse() return image_layers
[ "def", "layers", "(", "self", ",", "rev", "=", "True", ")", ":", "image_layers", "=", "[", "DockerImage", "(", "None", ",", "identifier", "=", "x", ",", "pull_policy", "=", "DockerImagePullPolicy", ".", "NEVER", ")", "for", "x", "in", "self", ".", "get_layer_ids", "(", ")", "]", "if", "not", "rev", ":", "image_layers", ".", "reverse", "(", ")", "return", "image_layers" ]
Get list of DockerImage for every layer in image :param rev: get layers rev :return: list of DockerImages
[ "Get", "list", "of", "DockerImage", "for", "every", "layer", "in", "image" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L725-L738
10,273
user-cont/conu
conu/backend/docker/image.py
S2IDockerImage.extend
def extend(self, source, new_image_name, s2i_args=None): """ extend this s2i-enabled image using provided source, raises ConuException if `s2i build` fails :param source: str, source used to extend the image, can be path or url :param new_image_name: str, name of the new, extended image :param s2i_args: list of str, additional options and arguments provided to `s2i build` :return: S2Image instance """ s2i_args = s2i_args or [] c = self._s2i_command(["build"] + s2i_args + [source, self.get_full_name()]) if new_image_name: c.append(new_image_name) try: run_cmd(c) except subprocess.CalledProcessError as ex: raise ConuException("s2i build failed: %s" % ex) return S2IDockerImage(new_image_name)
python
def extend(self, source, new_image_name, s2i_args=None): """ extend this s2i-enabled image using provided source, raises ConuException if `s2i build` fails :param source: str, source used to extend the image, can be path or url :param new_image_name: str, name of the new, extended image :param s2i_args: list of str, additional options and arguments provided to `s2i build` :return: S2Image instance """ s2i_args = s2i_args or [] c = self._s2i_command(["build"] + s2i_args + [source, self.get_full_name()]) if new_image_name: c.append(new_image_name) try: run_cmd(c) except subprocess.CalledProcessError as ex: raise ConuException("s2i build failed: %s" % ex) return S2IDockerImage(new_image_name)
[ "def", "extend", "(", "self", ",", "source", ",", "new_image_name", ",", "s2i_args", "=", "None", ")", ":", "s2i_args", "=", "s2i_args", "or", "[", "]", "c", "=", "self", ".", "_s2i_command", "(", "[", "\"build\"", "]", "+", "s2i_args", "+", "[", "source", ",", "self", ".", "get_full_name", "(", ")", "]", ")", "if", "new_image_name", ":", "c", ".", "append", "(", "new_image_name", ")", "try", ":", "run_cmd", "(", "c", ")", "except", "subprocess", ".", "CalledProcessError", "as", "ex", ":", "raise", "ConuException", "(", "\"s2i build failed: %s\"", "%", "ex", ")", "return", "S2IDockerImage", "(", "new_image_name", ")" ]
extend this s2i-enabled image using provided source, raises ConuException if `s2i build` fails :param source: str, source used to extend the image, can be path or url :param new_image_name: str, name of the new, extended image :param s2i_args: list of str, additional options and arguments provided to `s2i build` :return: S2Image instance
[ "extend", "this", "s2i", "-", "enabled", "image", "using", "provided", "source", "raises", "ConuException", "if", "s2i", "build", "fails" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L775-L793
10,274
user-cont/conu
conu/backend/docker/image.py
S2IDockerImage.usage
def usage(self): """ Provide output of `s2i usage` :return: str """ c = self._s2i_command(["usage", self.get_full_name()]) with open(os.devnull, "w") as fd: process = subprocess.Popen(c, stdout=fd, stderr=subprocess.PIPE) _, output = process.communicate() retcode = process.poll() if retcode: raise ConuException("`s2i usage` failed: %s" % output) return output.decode("utf-8").strip()
python
def usage(self): """ Provide output of `s2i usage` :return: str """ c = self._s2i_command(["usage", self.get_full_name()]) with open(os.devnull, "w") as fd: process = subprocess.Popen(c, stdout=fd, stderr=subprocess.PIPE) _, output = process.communicate() retcode = process.poll() if retcode: raise ConuException("`s2i usage` failed: %s" % output) return output.decode("utf-8").strip()
[ "def", "usage", "(", "self", ")", ":", "c", "=", "self", ".", "_s2i_command", "(", "[", "\"usage\"", ",", "self", ".", "get_full_name", "(", ")", "]", ")", "with", "open", "(", "os", ".", "devnull", ",", "\"w\"", ")", "as", "fd", ":", "process", "=", "subprocess", ".", "Popen", "(", "c", ",", "stdout", "=", "fd", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "_", ",", "output", "=", "process", ".", "communicate", "(", ")", "retcode", "=", "process", ".", "poll", "(", ")", "if", "retcode", ":", "raise", "ConuException", "(", "\"`s2i usage` failed: %s\"", "%", "output", ")", "return", "output", ".", "decode", "(", "\"utf-8\"", ")", ".", "strip", "(", ")" ]
Provide output of `s2i usage` :return: str
[ "Provide", "output", "of", "s2i", "usage" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L795-L808
10,275
user-cont/conu
conu/backend/origin/backend.py
OpenshiftBackend.http_request
def http_request(self, path="/", method="GET", host=None, port=None, json=False, data=None): """ perform a HTTP request :param path: str, path within the request, e.g. "/api/version" :param method: str, HTTP method :param host: str, if None, set to 127.0.0.1 :param port: str or int, if None, set to 8080 :param json: bool, should we expect json? :param data: data to send (can be dict, list, str) :return: dict """ host = host or '127.0.0.1' port = port or 8080 url = get_url(host=host, port=port, path=path) return self.http_session.request(method, url, json=json, data=data)
python
def http_request(self, path="/", method="GET", host=None, port=None, json=False, data=None): """ perform a HTTP request :param path: str, path within the request, e.g. "/api/version" :param method: str, HTTP method :param host: str, if None, set to 127.0.0.1 :param port: str or int, if None, set to 8080 :param json: bool, should we expect json? :param data: data to send (can be dict, list, str) :return: dict """ host = host or '127.0.0.1' port = port or 8080 url = get_url(host=host, port=port, path=path) return self.http_session.request(method, url, json=json, data=data)
[ "def", "http_request", "(", "self", ",", "path", "=", "\"/\"", ",", "method", "=", "\"GET\"", ",", "host", "=", "None", ",", "port", "=", "None", ",", "json", "=", "False", ",", "data", "=", "None", ")", ":", "host", "=", "host", "or", "'127.0.0.1'", "port", "=", "port", "or", "8080", "url", "=", "get_url", "(", "host", "=", "host", ",", "port", "=", "port", ",", "path", "=", "path", ")", "return", "self", ".", "http_session", ".", "request", "(", "method", ",", "url", ",", "json", "=", "json", ",", "data", "=", "data", ")" ]
perform a HTTP request :param path: str, path within the request, e.g. "/api/version" :param method: str, HTTP method :param host: str, if None, set to 127.0.0.1 :param port: str or int, if None, set to 8080 :param json: bool, should we expect json? :param data: data to send (can be dict, list, str) :return: dict
[ "perform", "a", "HTTP", "request" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/origin/backend.py#L62-L79
10,276
user-cont/conu
conu/backend/nspawn/image.py
NspawnImage.system_requirements
def system_requirements(): """ Check if all necessary packages are installed on system :return: None or raise exception if some tooling is missing """ command_exists("systemd-nspawn", ["systemd-nspawn", "--version"], "Command systemd-nspawn does not seems to be present on your system" "Do you have system with systemd") command_exists( "machinectl", ["machinectl", "--no-pager", "--help"], "Command machinectl does not seems to be present on your system" "Do you have system with systemd") if "Enforcing" in run_cmd(["getenforce"], return_output=True, ignore_status=True): logger.error("Please disable selinux (setenforce 0), selinux blocks some nspawn operations" "This may lead to strange behaviour")
python
def system_requirements(): """ Check if all necessary packages are installed on system :return: None or raise exception if some tooling is missing """ command_exists("systemd-nspawn", ["systemd-nspawn", "--version"], "Command systemd-nspawn does not seems to be present on your system" "Do you have system with systemd") command_exists( "machinectl", ["machinectl", "--no-pager", "--help"], "Command machinectl does not seems to be present on your system" "Do you have system with systemd") if "Enforcing" in run_cmd(["getenforce"], return_output=True, ignore_status=True): logger.error("Please disable selinux (setenforce 0), selinux blocks some nspawn operations" "This may lead to strange behaviour")
[ "def", "system_requirements", "(", ")", ":", "command_exists", "(", "\"systemd-nspawn\"", ",", "[", "\"systemd-nspawn\"", ",", "\"--version\"", "]", ",", "\"Command systemd-nspawn does not seems to be present on your system\"", "\"Do you have system with systemd\"", ")", "command_exists", "(", "\"machinectl\"", ",", "[", "\"machinectl\"", ",", "\"--no-pager\"", ",", "\"--help\"", "]", ",", "\"Command machinectl does not seems to be present on your system\"", "\"Do you have system with systemd\"", ")", "if", "\"Enforcing\"", "in", "run_cmd", "(", "[", "\"getenforce\"", "]", ",", "return_output", "=", "True", ",", "ignore_status", "=", "True", ")", ":", "logger", ".", "error", "(", "\"Please disable selinux (setenforce 0), selinux blocks some nspawn operations\"", "\"This may lead to strange behaviour\"", ")" ]
Check if all necessary packages are installed on system :return: None or raise exception if some tooling is missing
[ "Check", "if", "all", "necessary", "packages", "are", "installed", "on", "system" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/image.py#L156-L173
10,277
user-cont/conu
conu/backend/nspawn/image.py
NspawnImage._generate_id
def _generate_id(self): """ create new unique identifier """ name = self.name.replace(self.special_separator, "-").replace(".", "-") loc = "\/" if self.location: loc = self.location _id = "{PREFIX}{SEP}{NAME}{HASH}{SEP}".format( PREFIX=constants.CONU_ARTIFACT_TAG, NAME=name, HASH=hashlib.sha512(loc).hexdigest()[: 10], SEP=self.special_separator ) return _id
python
def _generate_id(self): """ create new unique identifier """ name = self.name.replace(self.special_separator, "-").replace(".", "-") loc = "\/" if self.location: loc = self.location _id = "{PREFIX}{SEP}{NAME}{HASH}{SEP}".format( PREFIX=constants.CONU_ARTIFACT_TAG, NAME=name, HASH=hashlib.sha512(loc).hexdigest()[: 10], SEP=self.special_separator ) return _id
[ "def", "_generate_id", "(", "self", ")", ":", "name", "=", "self", ".", "name", ".", "replace", "(", "self", ".", "special_separator", ",", "\"-\"", ")", ".", "replace", "(", "\".\"", ",", "\"-\"", ")", "loc", "=", "\"\\/\"", "if", "self", ".", "location", ":", "loc", "=", "self", ".", "location", "_id", "=", "\"{PREFIX}{SEP}{NAME}{HASH}{SEP}\"", ".", "format", "(", "PREFIX", "=", "constants", ".", "CONU_ARTIFACT_TAG", ",", "NAME", "=", "name", ",", "HASH", "=", "hashlib", ".", "sha512", "(", "loc", ")", ".", "hexdigest", "(", ")", "[", ":", "10", "]", ",", "SEP", "=", "self", ".", "special_separator", ")", "return", "_id" ]
create new unique identifier
[ "create", "new", "unique", "identifier" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/image.py#L209-L221
10,278
user-cont/conu
conu/backend/nspawn/image.py
NspawnImage.pull
def pull(self): """ Pull this image from URL. :return: None """ if not os.path.exists(CONU_IMAGES_STORE): os.makedirs(CONU_IMAGES_STORE) logger.debug( "Try to pull: {} -> {}".format(self.location, self.local_location)) if not self._is_local(): compressed_location = self.local_location + ".xz" run_cmd(["curl", "-f", "-L", "-o", compressed_location, self.location]) run_cmd(["xz", "-d", compressed_location]) else: if self.location.endswith("xz"): compressed_location = self.local_location + ".xz" run_cmd(["cp", self.location, compressed_location]) run_cmd(["xz", "-d", compressed_location]) else: run_cmd(["cp", self.location, self.local_location])
python
def pull(self): """ Pull this image from URL. :return: None """ if not os.path.exists(CONU_IMAGES_STORE): os.makedirs(CONU_IMAGES_STORE) logger.debug( "Try to pull: {} -> {}".format(self.location, self.local_location)) if not self._is_local(): compressed_location = self.local_location + ".xz" run_cmd(["curl", "-f", "-L", "-o", compressed_location, self.location]) run_cmd(["xz", "-d", compressed_location]) else: if self.location.endswith("xz"): compressed_location = self.local_location + ".xz" run_cmd(["cp", self.location, compressed_location]) run_cmd(["xz", "-d", compressed_location]) else: run_cmd(["cp", self.location, self.local_location])
[ "def", "pull", "(", "self", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "CONU_IMAGES_STORE", ")", ":", "os", ".", "makedirs", "(", "CONU_IMAGES_STORE", ")", "logger", ".", "debug", "(", "\"Try to pull: {} -> {}\"", ".", "format", "(", "self", ".", "location", ",", "self", ".", "local_location", ")", ")", "if", "not", "self", ".", "_is_local", "(", ")", ":", "compressed_location", "=", "self", ".", "local_location", "+", "\".xz\"", "run_cmd", "(", "[", "\"curl\"", ",", "\"-f\"", ",", "\"-L\"", ",", "\"-o\"", ",", "compressed_location", ",", "self", ".", "location", "]", ")", "run_cmd", "(", "[", "\"xz\"", ",", "\"-d\"", ",", "compressed_location", "]", ")", "else", ":", "if", "self", ".", "location", ".", "endswith", "(", "\"xz\"", ")", ":", "compressed_location", "=", "self", ".", "local_location", "+", "\".xz\"", "run_cmd", "(", "[", "\"cp\"", ",", "self", ".", "location", ",", "compressed_location", "]", ")", "run_cmd", "(", "[", "\"xz\"", ",", "\"-d\"", ",", "compressed_location", "]", ")", "else", ":", "run_cmd", "(", "[", "\"cp\"", ",", "self", ".", "location", ",", "self", ".", "local_location", "]", ")" ]
Pull this image from URL. :return: None
[ "Pull", "this", "image", "from", "URL", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/image.py#L232-L253
10,279
user-cont/conu
conu/backend/nspawn/image.py
NspawnImage.run_via_binary
def run_via_binary(self, command=None, foreground=False, volumes=None, additional_opts=None, default_options=None, name=None, *args, **kwargs): """ Create new instance NspawnContianer in case of not running at foreground, in case foreground run, return process object :param command: list - command to run :param foreground: bool - run process at foreground :param volumes: list - put additional bind mounts :param additional_opts: list of more boot options for systemd-nspawn command :param default_options: default boot option (-b) :param name: str - name of running instance :param args: pass thru params to subprocess.Popen :param kwargs: pass thru params to subprocess.Popen :return: process or NspawnContianer instance """ command = deepcopy(command) or [] volumes = deepcopy(volumes) or [] additional_opts = deepcopy(additional_opts) or [] internalkw = deepcopy(kwargs) or {} inernalargs = deepcopy(args) or [] if default_options is None: default_options = ["-b"] # TODO: reconsile parameters (changed from API definition) logger.info("run container via binary in background") machine_name = constants.CONU_ARTIFACT_TAG if name: machine_name += name else: machine_name += random_str() if not foreground: # WARN: avoid to run boot without stderr and stdout to terminal, it breaks terminal, # it systemd-nspawn does some magic with console # TODO: is able to avoid this behaviour in better way? internalkw["stdout"] = subprocess.PIPE internalkw["stderr"] = subprocess.PIPE additional_opts += default_options if volumes: additional_opts += self.get_volume_options(volumes=volumes) logger.debug("starting NSPAWN") systemd_command = [ "systemd-nspawn", "--machine", machine_name, "-i", self.local_location] + additional_opts + command logger.debug("Start command: %s" % " ".join(systemd_command)) callback_method = (subprocess.Popen, systemd_command, inernalargs, internalkw) self.container_process = NspawnContainer.internal_run_container( name=machine_name, callback_method=callback_method, foreground=foreground ) if foreground: return self.container_process else: return NspawnContainer(self, None, name=machine_name, start_process=self.container_process, start_action=callback_method)
python
def run_via_binary(self, command=None, foreground=False, volumes=None, additional_opts=None, default_options=None, name=None, *args, **kwargs): """ Create new instance NspawnContianer in case of not running at foreground, in case foreground run, return process object :param command: list - command to run :param foreground: bool - run process at foreground :param volumes: list - put additional bind mounts :param additional_opts: list of more boot options for systemd-nspawn command :param default_options: default boot option (-b) :param name: str - name of running instance :param args: pass thru params to subprocess.Popen :param kwargs: pass thru params to subprocess.Popen :return: process or NspawnContianer instance """ command = deepcopy(command) or [] volumes = deepcopy(volumes) or [] additional_opts = deepcopy(additional_opts) or [] internalkw = deepcopy(kwargs) or {} inernalargs = deepcopy(args) or [] if default_options is None: default_options = ["-b"] # TODO: reconsile parameters (changed from API definition) logger.info("run container via binary in background") machine_name = constants.CONU_ARTIFACT_TAG if name: machine_name += name else: machine_name += random_str() if not foreground: # WARN: avoid to run boot without stderr and stdout to terminal, it breaks terminal, # it systemd-nspawn does some magic with console # TODO: is able to avoid this behaviour in better way? internalkw["stdout"] = subprocess.PIPE internalkw["stderr"] = subprocess.PIPE additional_opts += default_options if volumes: additional_opts += self.get_volume_options(volumes=volumes) logger.debug("starting NSPAWN") systemd_command = [ "systemd-nspawn", "--machine", machine_name, "-i", self.local_location] + additional_opts + command logger.debug("Start command: %s" % " ".join(systemd_command)) callback_method = (subprocess.Popen, systemd_command, inernalargs, internalkw) self.container_process = NspawnContainer.internal_run_container( name=machine_name, callback_method=callback_method, foreground=foreground ) if foreground: return self.container_process else: return NspawnContainer(self, None, name=machine_name, start_process=self.container_process, start_action=callback_method)
[ "def", "run_via_binary", "(", "self", ",", "command", "=", "None", ",", "foreground", "=", "False", ",", "volumes", "=", "None", ",", "additional_opts", "=", "None", ",", "default_options", "=", "None", ",", "name", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "command", "=", "deepcopy", "(", "command", ")", "or", "[", "]", "volumes", "=", "deepcopy", "(", "volumes", ")", "or", "[", "]", "additional_opts", "=", "deepcopy", "(", "additional_opts", ")", "or", "[", "]", "internalkw", "=", "deepcopy", "(", "kwargs", ")", "or", "{", "}", "inernalargs", "=", "deepcopy", "(", "args", ")", "or", "[", "]", "if", "default_options", "is", "None", ":", "default_options", "=", "[", "\"-b\"", "]", "# TODO: reconsile parameters (changed from API definition)", "logger", ".", "info", "(", "\"run container via binary in background\"", ")", "machine_name", "=", "constants", ".", "CONU_ARTIFACT_TAG", "if", "name", ":", "machine_name", "+=", "name", "else", ":", "machine_name", "+=", "random_str", "(", ")", "if", "not", "foreground", ":", "# WARN: avoid to run boot without stderr and stdout to terminal, it breaks terminal,", "# it systemd-nspawn does some magic with console", "# TODO: is able to avoid this behaviour in better way?", "internalkw", "[", "\"stdout\"", "]", "=", "subprocess", ".", "PIPE", "internalkw", "[", "\"stderr\"", "]", "=", "subprocess", ".", "PIPE", "additional_opts", "+=", "default_options", "if", "volumes", ":", "additional_opts", "+=", "self", ".", "get_volume_options", "(", "volumes", "=", "volumes", ")", "logger", ".", "debug", "(", "\"starting NSPAWN\"", ")", "systemd_command", "=", "[", "\"systemd-nspawn\"", ",", "\"--machine\"", ",", "machine_name", ",", "\"-i\"", ",", "self", ".", "local_location", "]", "+", "additional_opts", "+", "command", "logger", ".", "debug", "(", "\"Start command: %s\"", "%", "\" \"", ".", "join", "(", "systemd_command", ")", ")", "callback_method", "=", "(", "subprocess", ".", "Popen", ",", "systemd_command", ",", "inernalargs", ",", "internalkw", ")", "self", ".", "container_process", "=", "NspawnContainer", ".", "internal_run_container", "(", "name", "=", "machine_name", ",", "callback_method", "=", "callback_method", ",", "foreground", "=", "foreground", ")", "if", "foreground", ":", "return", "self", ".", "container_process", "else", ":", "return", "NspawnContainer", "(", "self", ",", "None", ",", "name", "=", "machine_name", ",", "start_process", "=", "self", ".", "container_process", ",", "start_action", "=", "callback_method", ")" ]
Create new instance NspawnContianer in case of not running at foreground, in case foreground run, return process object :param command: list - command to run :param foreground: bool - run process at foreground :param volumes: list - put additional bind mounts :param additional_opts: list of more boot options for systemd-nspawn command :param default_options: default boot option (-b) :param name: str - name of running instance :param args: pass thru params to subprocess.Popen :param kwargs: pass thru params to subprocess.Popen :return: process or NspawnContianer instance
[ "Create", "new", "instance", "NspawnContianer", "in", "case", "of", "not", "running", "at", "foreground", "in", "case", "foreground", "run", "return", "process", "object" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/image.py#L345-L403
10,280
user-cont/conu
conu/utils/rpms.py
process_rpm_ql_line
def process_rpm_ql_line(line_str, allowed_keys): """ Checks single line of rpm-ql for correct keys :param line_str: line to process :param allowed_keys: list of allowed keys :return: bool """ try: name, key_str = line_str.split(' ', 1) except ValueError: logger.error("Failed to split line '{0}".format(repr(line_str))) return False if name in no_key_pkgs: return True if key_str == NONE_KEY: logger.error("Unsigned package {0}".format(name)) return False key_match = re.match(KEY, key_str) if not key_match: logger.error('Could not process line "{0}"'.format(line_str)) return False used_key = key_match.group(1) if used_key in allowed_keys: return True logger.error("Wrong key for '{0}' ({1})".format(name, used_key)) return False
python
def process_rpm_ql_line(line_str, allowed_keys): """ Checks single line of rpm-ql for correct keys :param line_str: line to process :param allowed_keys: list of allowed keys :return: bool """ try: name, key_str = line_str.split(' ', 1) except ValueError: logger.error("Failed to split line '{0}".format(repr(line_str))) return False if name in no_key_pkgs: return True if key_str == NONE_KEY: logger.error("Unsigned package {0}".format(name)) return False key_match = re.match(KEY, key_str) if not key_match: logger.error('Could not process line "{0}"'.format(line_str)) return False used_key = key_match.group(1) if used_key in allowed_keys: return True logger.error("Wrong key for '{0}' ({1})".format(name, used_key)) return False
[ "def", "process_rpm_ql_line", "(", "line_str", ",", "allowed_keys", ")", ":", "try", ":", "name", ",", "key_str", "=", "line_str", ".", "split", "(", "' '", ",", "1", ")", "except", "ValueError", ":", "logger", ".", "error", "(", "\"Failed to split line '{0}\"", ".", "format", "(", "repr", "(", "line_str", ")", ")", ")", "return", "False", "if", "name", "in", "no_key_pkgs", ":", "return", "True", "if", "key_str", "==", "NONE_KEY", ":", "logger", ".", "error", "(", "\"Unsigned package {0}\"", ".", "format", "(", "name", ")", ")", "return", "False", "key_match", "=", "re", ".", "match", "(", "KEY", ",", "key_str", ")", "if", "not", "key_match", ":", "logger", ".", "error", "(", "'Could not process line \"{0}\"'", ".", "format", "(", "line_str", ")", ")", "return", "False", "used_key", "=", "key_match", ".", "group", "(", "1", ")", "if", "used_key", "in", "allowed_keys", ":", "return", "True", "logger", ".", "error", "(", "\"Wrong key for '{0}' ({1})\"", ".", "format", "(", "name", ",", "used_key", ")", ")", "return", "False" ]
Checks single line of rpm-ql for correct keys :param line_str: line to process :param allowed_keys: list of allowed keys :return: bool
[ "Checks", "single", "line", "of", "rpm", "-", "ql", "for", "correct", "keys" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/rpms.py#L29-L55
10,281
user-cont/conu
conu/utils/rpms.py
check_signatures
def check_signatures(pkg_list, allowed_keys): """ Go through list of packages with signatures and check if all are properly signed :param pkg_list: list of packages in format '%{name} %{SIGPGP:pgpsig}' :param allowed_keys: list of allowed keys :return: bool """ all_passed = True for line_str in pkg_list: all_passed &= process_rpm_ql_line(line_str.strip(), allowed_keys) if not all_passed: raise PackageSignatureException( 'Error while checking rpm signatures, see logs for more info')
python
def check_signatures(pkg_list, allowed_keys): """ Go through list of packages with signatures and check if all are properly signed :param pkg_list: list of packages in format '%{name} %{SIGPGP:pgpsig}' :param allowed_keys: list of allowed keys :return: bool """ all_passed = True for line_str in pkg_list: all_passed &= process_rpm_ql_line(line_str.strip(), allowed_keys) if not all_passed: raise PackageSignatureException( 'Error while checking rpm signatures, see logs for more info')
[ "def", "check_signatures", "(", "pkg_list", ",", "allowed_keys", ")", ":", "all_passed", "=", "True", "for", "line_str", "in", "pkg_list", ":", "all_passed", "&=", "process_rpm_ql_line", "(", "line_str", ".", "strip", "(", ")", ",", "allowed_keys", ")", "if", "not", "all_passed", ":", "raise", "PackageSignatureException", "(", "'Error while checking rpm signatures, see logs for more info'", ")" ]
Go through list of packages with signatures and check if all are properly signed :param pkg_list: list of packages in format '%{name} %{SIGPGP:pgpsig}' :param allowed_keys: list of allowed keys :return: bool
[ "Go", "through", "list", "of", "packages", "with", "signatures", "and", "check", "if", "all", "are", "properly", "signed" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/rpms.py#L58-L72
10,282
user-cont/conu
conu/backend/docker/container.py
DockerContainer.get_ports
def get_ports(self): """ get ports specified in container metadata :return: list of str """ ports = [] container_ports = self.inspect(refresh=True)["NetworkSettings"]["Ports"] if not container_ports: return ports for p in container_ports: # TODO: gracefullness, error handling ports.append(p.split("/")[0]) return ports
python
def get_ports(self): """ get ports specified in container metadata :return: list of str """ ports = [] container_ports = self.inspect(refresh=True)["NetworkSettings"]["Ports"] if not container_ports: return ports for p in container_ports: # TODO: gracefullness, error handling ports.append(p.split("/")[0]) return ports
[ "def", "get_ports", "(", "self", ")", ":", "ports", "=", "[", "]", "container_ports", "=", "self", ".", "inspect", "(", "refresh", "=", "True", ")", "[", "\"NetworkSettings\"", "]", "[", "\"Ports\"", "]", "if", "not", "container_ports", ":", "return", "ports", "for", "p", "in", "container_ports", ":", "# TODO: gracefullness, error handling", "ports", ".", "append", "(", "p", ".", "split", "(", "\"/\"", ")", "[", "0", "]", ")", "return", "ports" ]
get ports specified in container metadata :return: list of str
[ "get", "ports", "specified", "in", "container", "metadata" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/container.py#L350-L363
10,283
user-cont/conu
conu/apidefs/backend.py
Backend._clean_tmp_dirs
def _clean_tmp_dirs(self): """ Remove temporary dir associated with this backend instance. :return: None """ def onerror(fnc, path, excinfo): # we might not have rights to do this, the files could be owned by root self.logger.info("we were not able to remove temporary file %s: %s", path, excinfo[1]) shutil.rmtree(self.tmpdir, onerror=onerror) self.tmpdir = None global _backend_tmpdir _backend_tmpdir = None
python
def _clean_tmp_dirs(self): """ Remove temporary dir associated with this backend instance. :return: None """ def onerror(fnc, path, excinfo): # we might not have rights to do this, the files could be owned by root self.logger.info("we were not able to remove temporary file %s: %s", path, excinfo[1]) shutil.rmtree(self.tmpdir, onerror=onerror) self.tmpdir = None global _backend_tmpdir _backend_tmpdir = None
[ "def", "_clean_tmp_dirs", "(", "self", ")", ":", "def", "onerror", "(", "fnc", ",", "path", ",", "excinfo", ")", ":", "# we might not have rights to do this, the files could be owned by root", "self", ".", "logger", ".", "info", "(", "\"we were not able to remove temporary file %s: %s\"", ",", "path", ",", "excinfo", "[", "1", "]", ")", "shutil", ".", "rmtree", "(", "self", ".", "tmpdir", ",", "onerror", "=", "onerror", ")", "self", ".", "tmpdir", "=", "None", "global", "_backend_tmpdir", "_backend_tmpdir", "=", "None" ]
Remove temporary dir associated with this backend instance. :return: None
[ "Remove", "temporary", "dir", "associated", "with", "this", "backend", "instance", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/apidefs/backend.py#L147-L161
10,284
user-cont/conu
conu/apidefs/backend.py
Backend._clean
def _clean(self): """ Method for cleaning according to object cleanup policy value :return: None """ if CleanupPolicy.EVERYTHING in self.cleanup: self.cleanup_containers() self.cleanup_volumes() self.cleanup_images() self._clean_tmp_dirs() else: if CleanupPolicy.CONTAINERS in self.cleanup: self.cleanup_containers() if CleanupPolicy.VOLUMES in self.cleanup: self.cleanup_volumes() if CleanupPolicy.IMAGES in self.cleanup: self.cleanup_images() if CleanupPolicy.TMP_DIRS in self.cleanup: self._clean_tmp_dirs()
python
def _clean(self): """ Method for cleaning according to object cleanup policy value :return: None """ if CleanupPolicy.EVERYTHING in self.cleanup: self.cleanup_containers() self.cleanup_volumes() self.cleanup_images() self._clean_tmp_dirs() else: if CleanupPolicy.CONTAINERS in self.cleanup: self.cleanup_containers() if CleanupPolicy.VOLUMES in self.cleanup: self.cleanup_volumes() if CleanupPolicy.IMAGES in self.cleanup: self.cleanup_images() if CleanupPolicy.TMP_DIRS in self.cleanup: self._clean_tmp_dirs()
[ "def", "_clean", "(", "self", ")", ":", "if", "CleanupPolicy", ".", "EVERYTHING", "in", "self", ".", "cleanup", ":", "self", ".", "cleanup_containers", "(", ")", "self", ".", "cleanup_volumes", "(", ")", "self", ".", "cleanup_images", "(", ")", "self", ".", "_clean_tmp_dirs", "(", ")", "else", ":", "if", "CleanupPolicy", ".", "CONTAINERS", "in", "self", ".", "cleanup", ":", "self", ".", "cleanup_containers", "(", ")", "if", "CleanupPolicy", ".", "VOLUMES", "in", "self", ".", "cleanup", ":", "self", ".", "cleanup_volumes", "(", ")", "if", "CleanupPolicy", ".", "IMAGES", "in", "self", ".", "cleanup", ":", "self", ".", "cleanup_images", "(", ")", "if", "CleanupPolicy", ".", "TMP_DIRS", "in", "self", ".", "cleanup", ":", "self", ".", "_clean_tmp_dirs", "(", ")" ]
Method for cleaning according to object cleanup policy value :return: None
[ "Method", "for", "cleaning", "according", "to", "object", "cleanup", "policy", "value" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/apidefs/backend.py#L187-L206
10,285
user-cont/conu
conu/backend/nspawn/backend.py
NspawnBackend.list_containers
def list_containers(self): """ list all available nspawn containers :return: collection of instances of :class:`conu.backend.nspawn.container.NspawnContainer` """ data = run_cmd(["machinectl", "list", "--no-legend", "--no-pager"], return_output=True) output = [] reg = re.compile(r"\s+") for line in data.split("\n"): stripped = line.strip() if stripped: parts = reg.split(stripped) name = parts[0] output.append(self.ContainerClass(None, None, name=name)) return output
python
def list_containers(self): """ list all available nspawn containers :return: collection of instances of :class:`conu.backend.nspawn.container.NspawnContainer` """ data = run_cmd(["machinectl", "list", "--no-legend", "--no-pager"], return_output=True) output = [] reg = re.compile(r"\s+") for line in data.split("\n"): stripped = line.strip() if stripped: parts = reg.split(stripped) name = parts[0] output.append(self.ContainerClass(None, None, name=name)) return output
[ "def", "list_containers", "(", "self", ")", ":", "data", "=", "run_cmd", "(", "[", "\"machinectl\"", ",", "\"list\"", ",", "\"--no-legend\"", ",", "\"--no-pager\"", "]", ",", "return_output", "=", "True", ")", "output", "=", "[", "]", "reg", "=", "re", ".", "compile", "(", "r\"\\s+\"", ")", "for", "line", "in", "data", ".", "split", "(", "\"\\n\"", ")", ":", "stripped", "=", "line", ".", "strip", "(", ")", "if", "stripped", ":", "parts", "=", "reg", ".", "split", "(", "stripped", ")", "name", "=", "parts", "[", "0", "]", "output", ".", "append", "(", "self", ".", "ContainerClass", "(", "None", ",", "None", ",", "name", "=", "name", ")", ")", "return", "output" ]
list all available nspawn containers :return: collection of instances of :class:`conu.backend.nspawn.container.NspawnContainer`
[ "list", "all", "available", "nspawn", "containers" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/backend.py#L44-L60
10,286
user-cont/conu
conu/backend/nspawn/backend.py
NspawnBackend.list_images
def list_images(self): """ list all available nspawn images :return: collection of instances of :class:`conu.backend.nspawn.image.NspawnImage` """ # Fedora-Cloud-Base-27-1.6.x86_64 raw no 601.7M Sun 2017-11-05 08:30:10 CET \ # Sun 2017-11-05 08:30:10 CET data = os.listdir(CONU_IMAGES_STORE) output = [] for name in data: output.append(self.ImageClass(name, pull_policy=ImagePullPolicy.NEVER)) return output
python
def list_images(self): """ list all available nspawn images :return: collection of instances of :class:`conu.backend.nspawn.image.NspawnImage` """ # Fedora-Cloud-Base-27-1.6.x86_64 raw no 601.7M Sun 2017-11-05 08:30:10 CET \ # Sun 2017-11-05 08:30:10 CET data = os.listdir(CONU_IMAGES_STORE) output = [] for name in data: output.append(self.ImageClass(name, pull_policy=ImagePullPolicy.NEVER)) return output
[ "def", "list_images", "(", "self", ")", ":", "# Fedora-Cloud-Base-27-1.6.x86_64 raw no 601.7M Sun 2017-11-05 08:30:10 CET \\", "# Sun 2017-11-05 08:30:10 CET", "data", "=", "os", ".", "listdir", "(", "CONU_IMAGES_STORE", ")", "output", "=", "[", "]", "for", "name", "in", "data", ":", "output", ".", "append", "(", "self", ".", "ImageClass", "(", "name", ",", "pull_policy", "=", "ImagePullPolicy", ".", "NEVER", ")", ")", "return", "output" ]
list all available nspawn images :return: collection of instances of :class:`conu.backend.nspawn.image.NspawnImage`
[ "list", "all", "available", "nspawn", "images" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/backend.py#L62-L74
10,287
user-cont/conu
conu/backend/nspawn/backend.py
NspawnBackend.cleanup_containers
def cleanup_containers(self): """ stop all container created by conu :return: None """ for cont in self.list_containers(): if CONU_ARTIFACT_TAG in cont.name: try: logger.debug("removing container %s created by conu", cont) # TODO: move this functionality to container.delete run_cmd(["machinectl", "terminate", cont.name]) except subprocess.CalledProcessError as e: logger.error("unable to remove container %s: %r", cont, e)
python
def cleanup_containers(self): """ stop all container created by conu :return: None """ for cont in self.list_containers(): if CONU_ARTIFACT_TAG in cont.name: try: logger.debug("removing container %s created by conu", cont) # TODO: move this functionality to container.delete run_cmd(["machinectl", "terminate", cont.name]) except subprocess.CalledProcessError as e: logger.error("unable to remove container %s: %r", cont, e)
[ "def", "cleanup_containers", "(", "self", ")", ":", "for", "cont", "in", "self", ".", "list_containers", "(", ")", ":", "if", "CONU_ARTIFACT_TAG", "in", "cont", ".", "name", ":", "try", ":", "logger", ".", "debug", "(", "\"removing container %s created by conu\"", ",", "cont", ")", "# TODO: move this functionality to container.delete", "run_cmd", "(", "[", "\"machinectl\"", ",", "\"terminate\"", ",", "cont", ".", "name", "]", ")", "except", "subprocess", ".", "CalledProcessError", "as", "e", ":", "logger", ".", "error", "(", "\"unable to remove container %s: %r\"", ",", "cont", ",", "e", ")" ]
stop all container created by conu :return: None
[ "stop", "all", "container", "created", "by", "conu" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/backend.py#L76-L89
10,288
user-cont/conu
conu/utils/__init__.py
check_port
def check_port(port, host, timeout=10): """ connect to port on host and return True on success :param port: int, port to check :param host: string, host address :param timeout: int, number of seconds spent trying :return: bool """ logger.info("trying to open connection to %s:%s", host, port) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: sock.settimeout(timeout) result = sock.connect_ex((host, port)) logger.info("was connection successful? errno: %s", result) if result == 0: logger.debug('port is opened: %s:%s' % (host, port)) return True else: logger.debug('port is closed: %s:%s' % (host, port)) return False finally: sock.close()
python
def check_port(port, host, timeout=10): """ connect to port on host and return True on success :param port: int, port to check :param host: string, host address :param timeout: int, number of seconds spent trying :return: bool """ logger.info("trying to open connection to %s:%s", host, port) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: sock.settimeout(timeout) result = sock.connect_ex((host, port)) logger.info("was connection successful? errno: %s", result) if result == 0: logger.debug('port is opened: %s:%s' % (host, port)) return True else: logger.debug('port is closed: %s:%s' % (host, port)) return False finally: sock.close()
[ "def", "check_port", "(", "port", ",", "host", ",", "timeout", "=", "10", ")", ":", "logger", ".", "info", "(", "\"trying to open connection to %s:%s\"", ",", "host", ",", "port", ")", "sock", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "try", ":", "sock", ".", "settimeout", "(", "timeout", ")", "result", "=", "sock", ".", "connect_ex", "(", "(", "host", ",", "port", ")", ")", "logger", ".", "info", "(", "\"was connection successful? errno: %s\"", ",", "result", ")", "if", "result", "==", "0", ":", "logger", ".", "debug", "(", "'port is opened: %s:%s'", "%", "(", "host", ",", "port", ")", ")", "return", "True", "else", ":", "logger", ".", "debug", "(", "'port is closed: %s:%s'", "%", "(", "host", ",", "port", ")", ")", "return", "False", "finally", ":", "sock", ".", "close", "(", ")" ]
connect to port on host and return True on success :param port: int, port to check :param host: string, host address :param timeout: int, number of seconds spent trying :return: bool
[ "connect", "to", "port", "on", "host", "and", "return", "True", "on", "success" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/__init__.py#L54-L76
10,289
user-cont/conu
conu/utils/__init__.py
get_selinux_status
def get_selinux_status(): """ get SELinux status of host :return: string, one of Enforced, Permissive, Disabled """ getenforce_command_exists() # alternatively, we could read directly from /sys/fs/selinux/{enforce,status}, but status is # empty (why?) and enforce doesn't tell whether SELinux is disabled or not o = run_cmd(["getenforce"], return_output=True).strip() # libselinux-utils logger.debug("SELinux is %r", o) return o
python
def get_selinux_status(): """ get SELinux status of host :return: string, one of Enforced, Permissive, Disabled """ getenforce_command_exists() # alternatively, we could read directly from /sys/fs/selinux/{enforce,status}, but status is # empty (why?) and enforce doesn't tell whether SELinux is disabled or not o = run_cmd(["getenforce"], return_output=True).strip() # libselinux-utils logger.debug("SELinux is %r", o) return o
[ "def", "get_selinux_status", "(", ")", ":", "getenforce_command_exists", "(", ")", "# alternatively, we could read directly from /sys/fs/selinux/{enforce,status}, but status is", "# empty (why?) and enforce doesn't tell whether SELinux is disabled or not", "o", "=", "run_cmd", "(", "[", "\"getenforce\"", "]", ",", "return_output", "=", "True", ")", ".", "strip", "(", ")", "# libselinux-utils", "logger", ".", "debug", "(", "\"SELinux is %r\"", ",", "o", ")", "return", "o" ]
get SELinux status of host :return: string, one of Enforced, Permissive, Disabled
[ "get", "SELinux", "status", "of", "host" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/__init__.py#L79-L90
10,290
user-cont/conu
conu/utils/__init__.py
random_str
def random_str(size=10): """ create random string of selected size :param size: int, length of the string :return: the string """ return ''.join(random.choice(string.ascii_lowercase) for _ in range(size))
python
def random_str(size=10): """ create random string of selected size :param size: int, length of the string :return: the string """ return ''.join(random.choice(string.ascii_lowercase) for _ in range(size))
[ "def", "random_str", "(", "size", "=", "10", ")", ":", "return", "''", ".", "join", "(", "random", ".", "choice", "(", "string", ".", "ascii_lowercase", ")", "for", "_", "in", "range", "(", "size", ")", ")" ]
create random string of selected size :param size: int, length of the string :return: the string
[ "create", "random", "string", "of", "selected", "size" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/__init__.py#L102-L109
10,291
user-cont/conu
conu/utils/__init__.py
run_cmd
def run_cmd(cmd, return_output=False, ignore_status=False, log_output=True, **kwargs): """ run provided command on host system using the same user as you invoked this code, raises subprocess.CalledProcessError if it fails :param cmd: list of str :param return_output: bool, return output of the command :param ignore_status: bool, do not fail in case nonzero return code :param log_output: bool, if True, log output to debug log :param kwargs: pass keyword arguments to subprocess.check_* functions; for more info, please check `help(subprocess.Popen)` :return: None or str """ logger.debug('command: "%s"' % ' '.join(cmd)) process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, **kwargs) output = process.communicate()[0] if log_output: logger.debug(output) if process.returncode > 0: if ignore_status: if return_output: return output else: return process.returncode else: raise subprocess.CalledProcessError(cmd=cmd, returncode=process.returncode) if return_output: return output
python
def run_cmd(cmd, return_output=False, ignore_status=False, log_output=True, **kwargs): """ run provided command on host system using the same user as you invoked this code, raises subprocess.CalledProcessError if it fails :param cmd: list of str :param return_output: bool, return output of the command :param ignore_status: bool, do not fail in case nonzero return code :param log_output: bool, if True, log output to debug log :param kwargs: pass keyword arguments to subprocess.check_* functions; for more info, please check `help(subprocess.Popen)` :return: None or str """ logger.debug('command: "%s"' % ' '.join(cmd)) process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, **kwargs) output = process.communicate()[0] if log_output: logger.debug(output) if process.returncode > 0: if ignore_status: if return_output: return output else: return process.returncode else: raise subprocess.CalledProcessError(cmd=cmd, returncode=process.returncode) if return_output: return output
[ "def", "run_cmd", "(", "cmd", ",", "return_output", "=", "False", ",", "ignore_status", "=", "False", ",", "log_output", "=", "True", ",", "*", "*", "kwargs", ")", ":", "logger", ".", "debug", "(", "'command: \"%s\"'", "%", "' '", ".", "join", "(", "cmd", ")", ")", "process", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "STDOUT", ",", "universal_newlines", "=", "True", ",", "*", "*", "kwargs", ")", "output", "=", "process", ".", "communicate", "(", ")", "[", "0", "]", "if", "log_output", ":", "logger", ".", "debug", "(", "output", ")", "if", "process", ".", "returncode", ">", "0", ":", "if", "ignore_status", ":", "if", "return_output", ":", "return", "output", "else", ":", "return", "process", ".", "returncode", "else", ":", "raise", "subprocess", ".", "CalledProcessError", "(", "cmd", "=", "cmd", ",", "returncode", "=", "process", ".", "returncode", ")", "if", "return_output", ":", "return", "output" ]
run provided command on host system using the same user as you invoked this code, raises subprocess.CalledProcessError if it fails :param cmd: list of str :param return_output: bool, return output of the command :param ignore_status: bool, do not fail in case nonzero return code :param log_output: bool, if True, log output to debug log :param kwargs: pass keyword arguments to subprocess.check_* functions; for more info, please check `help(subprocess.Popen)` :return: None or str
[ "run", "provided", "command", "on", "host", "system", "using", "the", "same", "user", "as", "you", "invoked", "this", "code", "raises", "subprocess", ".", "CalledProcessError", "if", "it", "fails" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/__init__.py#L112-L141
10,292
user-cont/conu
conu/utils/__init__.py
command_exists
def command_exists(command, noop_invocation, exc_msg): """ Verify that the provided command exists. Raise CommandDoesNotExistException in case of an error or if the command does not exist. :param command: str, command to check (python 3 only) :param noop_invocation: list of str, command to check (python 2 only) :param exc_msg: str, message of exception when command does not exist :return: bool, True if everything's all right (otherwise exception is thrown) """ try: found = bool(shutil.which(command)) # py3 only except AttributeError: # py2 branch try: p = subprocess.Popen(noop_invocation, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except OSError: found = False else: stdout, stderr = p.communicate() found = p.returncode == 0 if not found: logger.error("`%s` exited with a non-zero return code (%s)", noop_invocation, p.returncode) logger.error("command stdout = %s", stdout) logger.error("command stderr = %s", stderr) if not found: raise CommandDoesNotExistException(exc_msg) return True
python
def command_exists(command, noop_invocation, exc_msg): """ Verify that the provided command exists. Raise CommandDoesNotExistException in case of an error or if the command does not exist. :param command: str, command to check (python 3 only) :param noop_invocation: list of str, command to check (python 2 only) :param exc_msg: str, message of exception when command does not exist :return: bool, True if everything's all right (otherwise exception is thrown) """ try: found = bool(shutil.which(command)) # py3 only except AttributeError: # py2 branch try: p = subprocess.Popen(noop_invocation, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except OSError: found = False else: stdout, stderr = p.communicate() found = p.returncode == 0 if not found: logger.error("`%s` exited with a non-zero return code (%s)", noop_invocation, p.returncode) logger.error("command stdout = %s", stdout) logger.error("command stderr = %s", stderr) if not found: raise CommandDoesNotExistException(exc_msg) return True
[ "def", "command_exists", "(", "command", ",", "noop_invocation", ",", "exc_msg", ")", ":", "try", ":", "found", "=", "bool", "(", "shutil", ".", "which", "(", "command", ")", ")", "# py3 only", "except", "AttributeError", ":", "# py2 branch", "try", ":", "p", "=", "subprocess", ".", "Popen", "(", "noop_invocation", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "except", "OSError", ":", "found", "=", "False", "else", ":", "stdout", ",", "stderr", "=", "p", ".", "communicate", "(", ")", "found", "=", "p", ".", "returncode", "==", "0", "if", "not", "found", ":", "logger", ".", "error", "(", "\"`%s` exited with a non-zero return code (%s)\"", ",", "noop_invocation", ",", "p", ".", "returncode", ")", "logger", ".", "error", "(", "\"command stdout = %s\"", ",", "stdout", ")", "logger", ".", "error", "(", "\"command stderr = %s\"", ",", "stderr", ")", "if", "not", "found", ":", "raise", "CommandDoesNotExistException", "(", "exc_msg", ")", "return", "True" ]
Verify that the provided command exists. Raise CommandDoesNotExistException in case of an error or if the command does not exist. :param command: str, command to check (python 3 only) :param noop_invocation: list of str, command to check (python 2 only) :param exc_msg: str, message of exception when command does not exist :return: bool, True if everything's all right (otherwise exception is thrown)
[ "Verify", "that", "the", "provided", "command", "exists", ".", "Raise", "CommandDoesNotExistException", "in", "case", "of", "an", "error", "or", "if", "the", "command", "does", "not", "exist", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/__init__.py#L173-L200
10,293
user-cont/conu
conu/utils/__init__.py
check_docker_command_works
def check_docker_command_works(): """ Verify that dockerd and docker binary works fine. This is performed by calling `docker version`, which also checks server API version. :return: bool, True if all is good, otherwise ConuException or CommandDoesNotExistException is thrown """ try: out = subprocess.check_output(["docker", "version"], stderr=subprocess.STDOUT, universal_newlines=True) except OSError: logger.info("docker binary is not available") raise CommandDoesNotExistException( "docker command doesn't seem to be available on your system. " "Please install and configure docker." ) except subprocess.CalledProcessError as ex: logger.error("exception: %s", ex) logger.error("rc: %s, output: %r", ex.returncode, ex.output) raise ConuException( "`docker version` call failed, it seems that your docker daemon is misconfigured or " "this user can't communicate with dockerd." ) else: logger.info("docker environment info: %r", out) return True
python
def check_docker_command_works(): """ Verify that dockerd and docker binary works fine. This is performed by calling `docker version`, which also checks server API version. :return: bool, True if all is good, otherwise ConuException or CommandDoesNotExistException is thrown """ try: out = subprocess.check_output(["docker", "version"], stderr=subprocess.STDOUT, universal_newlines=True) except OSError: logger.info("docker binary is not available") raise CommandDoesNotExistException( "docker command doesn't seem to be available on your system. " "Please install and configure docker." ) except subprocess.CalledProcessError as ex: logger.error("exception: %s", ex) logger.error("rc: %s, output: %r", ex.returncode, ex.output) raise ConuException( "`docker version` call failed, it seems that your docker daemon is misconfigured or " "this user can't communicate with dockerd." ) else: logger.info("docker environment info: %r", out) return True
[ "def", "check_docker_command_works", "(", ")", ":", "try", ":", "out", "=", "subprocess", ".", "check_output", "(", "[", "\"docker\"", ",", "\"version\"", "]", ",", "stderr", "=", "subprocess", ".", "STDOUT", ",", "universal_newlines", "=", "True", ")", "except", "OSError", ":", "logger", ".", "info", "(", "\"docker binary is not available\"", ")", "raise", "CommandDoesNotExistException", "(", "\"docker command doesn't seem to be available on your system. \"", "\"Please install and configure docker.\"", ")", "except", "subprocess", ".", "CalledProcessError", "as", "ex", ":", "logger", ".", "error", "(", "\"exception: %s\"", ",", "ex", ")", "logger", ".", "error", "(", "\"rc: %s, output: %r\"", ",", "ex", ".", "returncode", ",", "ex", ".", "output", ")", "raise", "ConuException", "(", "\"`docker version` call failed, it seems that your docker daemon is misconfigured or \"", "\"this user can't communicate with dockerd.\"", ")", "else", ":", "logger", ".", "info", "(", "\"docker environment info: %r\"", ",", "out", ")", "return", "True" ]
Verify that dockerd and docker binary works fine. This is performed by calling `docker version`, which also checks server API version. :return: bool, True if all is good, otherwise ConuException or CommandDoesNotExistException is thrown
[ "Verify", "that", "dockerd", "and", "docker", "binary", "works", "fine", ".", "This", "is", "performed", "by", "calling", "docker", "version", "which", "also", "checks", "server", "API", "version", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/__init__.py#L250-L277
10,294
user-cont/conu
conu/utils/__init__.py
export_docker_container_to_directory
def export_docker_container_to_directory(client, container, path): """ take selected docker container, create an archive out of it and unpack it to a selected location :param client: instance of docker.APIClient :param container: instance of DockerContainer :param path: str, path to a directory, doesn't need to exist :return: None """ # we don't do this because of a bug in docker: # https://bugzilla.redhat.com/show_bug.cgi?id=1570828 # stream, _ = client.get_archive(container.get_id(), "/") check_docker_command_works() export_p = subprocess.Popen( ["docker", "export", container.get_id()], stderr=subprocess.PIPE, stdout=subprocess.PIPE ) try: os.mkdir(path, 0o0700) except OSError as ex: if ex.errno == errno.EEXIST: logger.debug("mount point %s exists already", path) else: logger.error("mount point %s can't be created: %s", path, ex) raise logger.debug("about to untar the image") # we can't use tarfile because of --no-same-owner: files in containers are owned # by root and tarfile is trying to `chown 0 file` when running as an unpriv user p = subprocess.Popen( ["tar", "--no-same-owner", "-C", path, "-x"], stdin=subprocess.PIPE, stderr=subprocess.PIPE, ) while True: data = export_p.stdout.read(1048576) if not data: break p.stdin.write(data) p.stdin.close() p.wait() export_p.wait() if export_p.returncode: logger.error(export_p.stderr.read()) raise ConuException("Failed to get rootfs of %s from docker." % container) if p.returncode: logger.error(p.stderr.read()) raise ConuException("Failed to unpack the archive.") logger.debug("image is unpacked")
python
def export_docker_container_to_directory(client, container, path): """ take selected docker container, create an archive out of it and unpack it to a selected location :param client: instance of docker.APIClient :param container: instance of DockerContainer :param path: str, path to a directory, doesn't need to exist :return: None """ # we don't do this because of a bug in docker: # https://bugzilla.redhat.com/show_bug.cgi?id=1570828 # stream, _ = client.get_archive(container.get_id(), "/") check_docker_command_works() export_p = subprocess.Popen( ["docker", "export", container.get_id()], stderr=subprocess.PIPE, stdout=subprocess.PIPE ) try: os.mkdir(path, 0o0700) except OSError as ex: if ex.errno == errno.EEXIST: logger.debug("mount point %s exists already", path) else: logger.error("mount point %s can't be created: %s", path, ex) raise logger.debug("about to untar the image") # we can't use tarfile because of --no-same-owner: files in containers are owned # by root and tarfile is trying to `chown 0 file` when running as an unpriv user p = subprocess.Popen( ["tar", "--no-same-owner", "-C", path, "-x"], stdin=subprocess.PIPE, stderr=subprocess.PIPE, ) while True: data = export_p.stdout.read(1048576) if not data: break p.stdin.write(data) p.stdin.close() p.wait() export_p.wait() if export_p.returncode: logger.error(export_p.stderr.read()) raise ConuException("Failed to get rootfs of %s from docker." % container) if p.returncode: logger.error(p.stderr.read()) raise ConuException("Failed to unpack the archive.") logger.debug("image is unpacked")
[ "def", "export_docker_container_to_directory", "(", "client", ",", "container", ",", "path", ")", ":", "# we don't do this because of a bug in docker:", "# https://bugzilla.redhat.com/show_bug.cgi?id=1570828", "# stream, _ = client.get_archive(container.get_id(), \"/\")", "check_docker_command_works", "(", ")", "export_p", "=", "subprocess", ".", "Popen", "(", "[", "\"docker\"", ",", "\"export\"", ",", "container", ".", "get_id", "(", ")", "]", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "try", ":", "os", ".", "mkdir", "(", "path", ",", "0o0700", ")", "except", "OSError", "as", "ex", ":", "if", "ex", ".", "errno", "==", "errno", ".", "EEXIST", ":", "logger", ".", "debug", "(", "\"mount point %s exists already\"", ",", "path", ")", "else", ":", "logger", ".", "error", "(", "\"mount point %s can't be created: %s\"", ",", "path", ",", "ex", ")", "raise", "logger", ".", "debug", "(", "\"about to untar the image\"", ")", "# we can't use tarfile because of --no-same-owner: files in containers are owned", "# by root and tarfile is trying to `chown 0 file` when running as an unpriv user", "p", "=", "subprocess", ".", "Popen", "(", "[", "\"tar\"", ",", "\"--no-same-owner\"", ",", "\"-C\"", ",", "path", ",", "\"-x\"", "]", ",", "stdin", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", ")", "while", "True", ":", "data", "=", "export_p", ".", "stdout", ".", "read", "(", "1048576", ")", "if", "not", "data", ":", "break", "p", ".", "stdin", ".", "write", "(", "data", ")", "p", ".", "stdin", ".", "close", "(", ")", "p", ".", "wait", "(", ")", "export_p", ".", "wait", "(", ")", "if", "export_p", ".", "returncode", ":", "logger", ".", "error", "(", "export_p", ".", "stderr", ".", "read", "(", ")", ")", "raise", "ConuException", "(", "\"Failed to get rootfs of %s from docker.\"", "%", "container", ")", "if", "p", ".", "returncode", ":", "logger", ".", "error", "(", "p", ".", "stderr", ".", "read", "(", ")", ")", "raise", "ConuException", "(", "\"Failed to unpack the archive.\"", ")", "logger", ".", "debug", "(", "\"image is unpacked\"", ")" ]
take selected docker container, create an archive out of it and unpack it to a selected location :param client: instance of docker.APIClient :param container: instance of DockerContainer :param path: str, path to a directory, doesn't need to exist :return: None
[ "take", "selected", "docker", "container", "create", "an", "archive", "out", "of", "it", "and", "unpack", "it", "to", "a", "selected", "location" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/__init__.py#L335-L386
10,295
user-cont/conu
conu/backend/podman/backend.py
PodmanBackend.get_version
def get_version(self): """ return 3-tuple of version info or None :return: (str, str, str) """ raw_version = run_cmd(["podman", "version"], return_output=True) regex = re.compile(r"Version:\s*(\d+)\.(\d+)\.(\d+)") match = regex.findall(raw_version) try: return match[0] except IndexError: logger.error("unable to parse version from `podman version`") return
python
def get_version(self): """ return 3-tuple of version info or None :return: (str, str, str) """ raw_version = run_cmd(["podman", "version"], return_output=True) regex = re.compile(r"Version:\s*(\d+)\.(\d+)\.(\d+)") match = regex.findall(raw_version) try: return match[0] except IndexError: logger.error("unable to parse version from `podman version`") return
[ "def", "get_version", "(", "self", ")", ":", "raw_version", "=", "run_cmd", "(", "[", "\"podman\"", ",", "\"version\"", "]", ",", "return_output", "=", "True", ")", "regex", "=", "re", ".", "compile", "(", "r\"Version:\\s*(\\d+)\\.(\\d+)\\.(\\d+)\"", ")", "match", "=", "regex", ".", "findall", "(", "raw_version", ")", "try", ":", "return", "match", "[", "0", "]", "except", "IndexError", ":", "logger", ".", "error", "(", "\"unable to parse version from `podman version`\"", ")", "return" ]
return 3-tuple of version info or None :return: (str, str, str)
[ "return", "3", "-", "tuple", "of", "version", "info", "or", "None" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/podman/backend.py#L92-L105
10,296
user-cont/conu
conu/backend/podman/backend.py
PodmanBackend.list_containers
def list_containers(self): """ List all available podman containers. :return: collection of instances of :class:`conu.PodmanContainer` """ containers = [] for container in self._list_podman_containers(): identifier = container["ID"] name = container["Names"] image_name = container["Image"] try: image_name, image_tag = parse_reference(image_name) except (IndexError, TypeError): image_name, image_tag = None, None image = PodmanImage(image_name, tag=image_tag, identifier=None) container = PodmanContainer(image, identifier, name=name) containers.append(container) return containers
python
def list_containers(self): """ List all available podman containers. :return: collection of instances of :class:`conu.PodmanContainer` """ containers = [] for container in self._list_podman_containers(): identifier = container["ID"] name = container["Names"] image_name = container["Image"] try: image_name, image_tag = parse_reference(image_name) except (IndexError, TypeError): image_name, image_tag = None, None image = PodmanImage(image_name, tag=image_tag, identifier=None) container = PodmanContainer(image, identifier, name=name) containers.append(container) return containers
[ "def", "list_containers", "(", "self", ")", ":", "containers", "=", "[", "]", "for", "container", "in", "self", ".", "_list_podman_containers", "(", ")", ":", "identifier", "=", "container", "[", "\"ID\"", "]", "name", "=", "container", "[", "\"Names\"", "]", "image_name", "=", "container", "[", "\"Image\"", "]", "try", ":", "image_name", ",", "image_tag", "=", "parse_reference", "(", "image_name", ")", "except", "(", "IndexError", ",", "TypeError", ")", ":", "image_name", ",", "image_tag", "=", "None", ",", "None", "image", "=", "PodmanImage", "(", "image_name", ",", "tag", "=", "image_tag", ",", "identifier", "=", "None", ")", "container", "=", "PodmanContainer", "(", "image", ",", "identifier", ",", "name", "=", "name", ")", "containers", ".", "append", "(", "container", ")", "return", "containers" ]
List all available podman containers. :return: collection of instances of :class:`conu.PodmanContainer`
[ "List", "all", "available", "podman", "containers", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/podman/backend.py#L116-L137
10,297
user-cont/conu
conu/backend/podman/backend.py
PodmanBackend.list_images
def list_images(self): """ List all available podman images. :return: collection of instances of :class:`conu.PodmanImage` """ images = [] for image in self._list_all_podman_images(): try: i_name, tag = parse_reference(image["names"][0]) except (IndexError, TypeError): i_name, tag = None, None d_im = PodmanImage(i_name, tag=tag, identifier=image["id"], pull_policy=PodmanImagePullPolicy.NEVER) images.append(d_im) return images
python
def list_images(self): """ List all available podman images. :return: collection of instances of :class:`conu.PodmanImage` """ images = [] for image in self._list_all_podman_images(): try: i_name, tag = parse_reference(image["names"][0]) except (IndexError, TypeError): i_name, tag = None, None d_im = PodmanImage(i_name, tag=tag, identifier=image["id"], pull_policy=PodmanImagePullPolicy.NEVER) images.append(d_im) return images
[ "def", "list_images", "(", "self", ")", ":", "images", "=", "[", "]", "for", "image", "in", "self", ".", "_list_all_podman_images", "(", ")", ":", "try", ":", "i_name", ",", "tag", "=", "parse_reference", "(", "image", "[", "\"names\"", "]", "[", "0", "]", ")", "except", "(", "IndexError", ",", "TypeError", ")", ":", "i_name", ",", "tag", "=", "None", ",", "None", "d_im", "=", "PodmanImage", "(", "i_name", ",", "tag", "=", "tag", ",", "identifier", "=", "image", "[", "\"id\"", "]", ",", "pull_policy", "=", "PodmanImagePullPolicy", ".", "NEVER", ")", "images", ".", "append", "(", "d_im", ")", "return", "images" ]
List all available podman images. :return: collection of instances of :class:`conu.PodmanImage`
[ "List", "all", "available", "podman", "images", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/podman/backend.py#L139-L155
10,298
user-cont/conu
conu/backend/docker/utils.py
inspect_to_metadata
def inspect_to_metadata(metadata_object, inspect_data): """ process data from `docker inspect` and update provided metadata object :param metadata_object: instance of Metadata :param inspect_data: dict, metadata from `docker inspect` or `dockert_client.images()` :return: instance of Metadata """ identifier = graceful_get(inspect_data, 'Id') if identifier: if ":" in identifier: # format of image name from docker inspect: # sha256:8f0e66c924c0c169352de487a3c2463d82da24e9442fc097dddaa5f800df7129 metadata_object.identifier = identifier.split(':')[1] else: # container metadata_object.identifier = identifier # format of Environment Variables from docker inspect: # ['DISTTAG=f26container', 'FGC=f26'] raw_env_vars = graceful_get(inspect_data, "Config", "Env") or [] if raw_env_vars: metadata_object.env_variables = {} for env_variable in raw_env_vars: splits = env_variable.split("=", 1) name = splits[0] value = splits[1] if len(splits) > 1 else None if value is not None: metadata_object.env_variables.update({name: value}) raw_exposed_ports = graceful_get(inspect_data, "Config", "ExposedPorts") if raw_exposed_ports: metadata_object.exposed_ports = list(raw_exposed_ports.keys()) # specific to images raw_repo_tags = graceful_get(inspect_data, 'RepoTags') if raw_repo_tags: metadata_object.name = raw_repo_tags[0] metadata_object.labels = graceful_get(inspect_data, 'Config', 'Labels') metadata_object.command = graceful_get(inspect_data, 'Config', 'Cmd') metadata_object.creation_timestamp = inspect_data.get('Created', None) # specific to images metadata_object.image_names = inspect_data.get('RepoTags', None) # specific to images digests = inspect_data.get("RepoDigests", None) if digests: metadata_object.repo_digests = digests metadata_object.digest = digests[0] return metadata_object
python
def inspect_to_metadata(metadata_object, inspect_data): """ process data from `docker inspect` and update provided metadata object :param metadata_object: instance of Metadata :param inspect_data: dict, metadata from `docker inspect` or `dockert_client.images()` :return: instance of Metadata """ identifier = graceful_get(inspect_data, 'Id') if identifier: if ":" in identifier: # format of image name from docker inspect: # sha256:8f0e66c924c0c169352de487a3c2463d82da24e9442fc097dddaa5f800df7129 metadata_object.identifier = identifier.split(':')[1] else: # container metadata_object.identifier = identifier # format of Environment Variables from docker inspect: # ['DISTTAG=f26container', 'FGC=f26'] raw_env_vars = graceful_get(inspect_data, "Config", "Env") or [] if raw_env_vars: metadata_object.env_variables = {} for env_variable in raw_env_vars: splits = env_variable.split("=", 1) name = splits[0] value = splits[1] if len(splits) > 1 else None if value is not None: metadata_object.env_variables.update({name: value}) raw_exposed_ports = graceful_get(inspect_data, "Config", "ExposedPorts") if raw_exposed_ports: metadata_object.exposed_ports = list(raw_exposed_ports.keys()) # specific to images raw_repo_tags = graceful_get(inspect_data, 'RepoTags') if raw_repo_tags: metadata_object.name = raw_repo_tags[0] metadata_object.labels = graceful_get(inspect_data, 'Config', 'Labels') metadata_object.command = graceful_get(inspect_data, 'Config', 'Cmd') metadata_object.creation_timestamp = inspect_data.get('Created', None) # specific to images metadata_object.image_names = inspect_data.get('RepoTags', None) # specific to images digests = inspect_data.get("RepoDigests", None) if digests: metadata_object.repo_digests = digests metadata_object.digest = digests[0] return metadata_object
[ "def", "inspect_to_metadata", "(", "metadata_object", ",", "inspect_data", ")", ":", "identifier", "=", "graceful_get", "(", "inspect_data", ",", "'Id'", ")", "if", "identifier", ":", "if", "\":\"", "in", "identifier", ":", "# format of image name from docker inspect:", "# sha256:8f0e66c924c0c169352de487a3c2463d82da24e9442fc097dddaa5f800df7129", "metadata_object", ".", "identifier", "=", "identifier", ".", "split", "(", "':'", ")", "[", "1", "]", "else", ":", "# container", "metadata_object", ".", "identifier", "=", "identifier", "# format of Environment Variables from docker inspect:", "# ['DISTTAG=f26container', 'FGC=f26']", "raw_env_vars", "=", "graceful_get", "(", "inspect_data", ",", "\"Config\"", ",", "\"Env\"", ")", "or", "[", "]", "if", "raw_env_vars", ":", "metadata_object", ".", "env_variables", "=", "{", "}", "for", "env_variable", "in", "raw_env_vars", ":", "splits", "=", "env_variable", ".", "split", "(", "\"=\"", ",", "1", ")", "name", "=", "splits", "[", "0", "]", "value", "=", "splits", "[", "1", "]", "if", "len", "(", "splits", ")", ">", "1", "else", "None", "if", "value", "is", "not", "None", ":", "metadata_object", ".", "env_variables", ".", "update", "(", "{", "name", ":", "value", "}", ")", "raw_exposed_ports", "=", "graceful_get", "(", "inspect_data", ",", "\"Config\"", ",", "\"ExposedPorts\"", ")", "if", "raw_exposed_ports", ":", "metadata_object", ".", "exposed_ports", "=", "list", "(", "raw_exposed_ports", ".", "keys", "(", ")", ")", "# specific to images", "raw_repo_tags", "=", "graceful_get", "(", "inspect_data", ",", "'RepoTags'", ")", "if", "raw_repo_tags", ":", "metadata_object", ".", "name", "=", "raw_repo_tags", "[", "0", "]", "metadata_object", ".", "labels", "=", "graceful_get", "(", "inspect_data", ",", "'Config'", ",", "'Labels'", ")", "metadata_object", ".", "command", "=", "graceful_get", "(", "inspect_data", ",", "'Config'", ",", "'Cmd'", ")", "metadata_object", ".", "creation_timestamp", "=", "inspect_data", ".", "get", "(", "'Created'", ",", "None", ")", "# specific to images", "metadata_object", ".", "image_names", "=", "inspect_data", ".", "get", "(", "'RepoTags'", ",", "None", ")", "# specific to images", "digests", "=", "inspect_data", ".", "get", "(", "\"RepoDigests\"", ",", "None", ")", "if", "digests", ":", "metadata_object", ".", "repo_digests", "=", "digests", "metadata_object", ".", "digest", "=", "digests", "[", "0", "]", "return", "metadata_object" ]
process data from `docker inspect` and update provided metadata object :param metadata_object: instance of Metadata :param inspect_data: dict, metadata from `docker inspect` or `dockert_client.images()` :return: instance of Metadata
[ "process", "data", "from", "docker", "inspect", "and", "update", "provided", "metadata", "object" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/utils.py#L13-L62
10,299
user-cont/conu
conu/backend/docker/utils.py
inspect_to_container_metadata
def inspect_to_container_metadata(c_metadata_object, inspect_data, image_instance): """ process data from `docker container inspect` and update provided container metadata object :param c_metadata_object: instance of ContainerMetadata :param inspect_data: dict, metadata from `docker inspect` or `dockert_client.images()` :param image_instance: instance of DockerImage :return: instance of ContainerMetadata """ inspect_to_metadata(c_metadata_object, inspect_data) status = ContainerStatus.get_from_docker( graceful_get(inspect_data, "State", "Status"), graceful_get(inspect_data, "State", "ExitCode"), ) image_id = graceful_get(inspect_data, "Image") if image_id: if ":" in image_id: # format of image name from docker inspect: # sha256:8f0e66c924c0c169352de487a3c2463d82da24e9442fc097dddaa5f800df7129 image_instance.identifier = image_id.split(':')[1] else: # container image_instance.identifier = image_id # format of Port mappings from docker inspect: # {'12345/tcp': [ # {'HostIp': '0.0.0.0', 'HostPort': '123'}, # {'HostIp': '0.0.0.0', 'HostPort': '1234'}]} port_mappings = dict() raw_port_mappings = graceful_get(inspect_data, 'HostConfig', 'PortBindings') or {} for key, value in raw_port_mappings.items(): for item in value: logger.debug("parsing ports: key = %s, item = %s", key, item) li = port_mappings.get(key, []) raw_host_port = item['HostPort'] if raw_host_port == "": int_port = None else: try: int_port = int(raw_host_port) except ValueError as ex: logger.error("could not parse port: %s", ex) continue li.append(int_port) port_mappings.update({key: li}) c_metadata_object.status = status c_metadata_object.port_mappings = port_mappings c_metadata_object.hostname = graceful_get(inspect_data, 'Config', 'Hostname') raw_networks = graceful_get(inspect_data, "NetworkSettings", "Networks").values() if raw_networks: c_metadata_object.ipv4_addresses = [ graceful_get(x, "IPAddress") for x in raw_networks if graceful_get(x, "IPAddress")] c_metadata_object.ipv6_addresses = [ graceful_get(x, "GlobalIPv6Address") for x in raw_networks if graceful_get(x, "GlobalIPv6Address")] c_metadata_object.image = image_instance name = graceful_get(inspect_data, "Name") if name: name = name[1:] if name.startswith("/") else name # remove / at the beginning c_metadata_object.name = name return c_metadata_object
python
def inspect_to_container_metadata(c_metadata_object, inspect_data, image_instance): """ process data from `docker container inspect` and update provided container metadata object :param c_metadata_object: instance of ContainerMetadata :param inspect_data: dict, metadata from `docker inspect` or `dockert_client.images()` :param image_instance: instance of DockerImage :return: instance of ContainerMetadata """ inspect_to_metadata(c_metadata_object, inspect_data) status = ContainerStatus.get_from_docker( graceful_get(inspect_data, "State", "Status"), graceful_get(inspect_data, "State", "ExitCode"), ) image_id = graceful_get(inspect_data, "Image") if image_id: if ":" in image_id: # format of image name from docker inspect: # sha256:8f0e66c924c0c169352de487a3c2463d82da24e9442fc097dddaa5f800df7129 image_instance.identifier = image_id.split(':')[1] else: # container image_instance.identifier = image_id # format of Port mappings from docker inspect: # {'12345/tcp': [ # {'HostIp': '0.0.0.0', 'HostPort': '123'}, # {'HostIp': '0.0.0.0', 'HostPort': '1234'}]} port_mappings = dict() raw_port_mappings = graceful_get(inspect_data, 'HostConfig', 'PortBindings') or {} for key, value in raw_port_mappings.items(): for item in value: logger.debug("parsing ports: key = %s, item = %s", key, item) li = port_mappings.get(key, []) raw_host_port = item['HostPort'] if raw_host_port == "": int_port = None else: try: int_port = int(raw_host_port) except ValueError as ex: logger.error("could not parse port: %s", ex) continue li.append(int_port) port_mappings.update({key: li}) c_metadata_object.status = status c_metadata_object.port_mappings = port_mappings c_metadata_object.hostname = graceful_get(inspect_data, 'Config', 'Hostname') raw_networks = graceful_get(inspect_data, "NetworkSettings", "Networks").values() if raw_networks: c_metadata_object.ipv4_addresses = [ graceful_get(x, "IPAddress") for x in raw_networks if graceful_get(x, "IPAddress")] c_metadata_object.ipv6_addresses = [ graceful_get(x, "GlobalIPv6Address") for x in raw_networks if graceful_get(x, "GlobalIPv6Address")] c_metadata_object.image = image_instance name = graceful_get(inspect_data, "Name") if name: name = name[1:] if name.startswith("/") else name # remove / at the beginning c_metadata_object.name = name return c_metadata_object
[ "def", "inspect_to_container_metadata", "(", "c_metadata_object", ",", "inspect_data", ",", "image_instance", ")", ":", "inspect_to_metadata", "(", "c_metadata_object", ",", "inspect_data", ")", "status", "=", "ContainerStatus", ".", "get_from_docker", "(", "graceful_get", "(", "inspect_data", ",", "\"State\"", ",", "\"Status\"", ")", ",", "graceful_get", "(", "inspect_data", ",", "\"State\"", ",", "\"ExitCode\"", ")", ",", ")", "image_id", "=", "graceful_get", "(", "inspect_data", ",", "\"Image\"", ")", "if", "image_id", ":", "if", "\":\"", "in", "image_id", ":", "# format of image name from docker inspect:", "# sha256:8f0e66c924c0c169352de487a3c2463d82da24e9442fc097dddaa5f800df7129", "image_instance", ".", "identifier", "=", "image_id", ".", "split", "(", "':'", ")", "[", "1", "]", "else", ":", "# container", "image_instance", ".", "identifier", "=", "image_id", "# format of Port mappings from docker inspect:", "# {'12345/tcp': [", "# {'HostIp': '0.0.0.0', 'HostPort': '123'},", "# {'HostIp': '0.0.0.0', 'HostPort': '1234'}]}", "port_mappings", "=", "dict", "(", ")", "raw_port_mappings", "=", "graceful_get", "(", "inspect_data", ",", "'HostConfig'", ",", "'PortBindings'", ")", "or", "{", "}", "for", "key", ",", "value", "in", "raw_port_mappings", ".", "items", "(", ")", ":", "for", "item", "in", "value", ":", "logger", ".", "debug", "(", "\"parsing ports: key = %s, item = %s\"", ",", "key", ",", "item", ")", "li", "=", "port_mappings", ".", "get", "(", "key", ",", "[", "]", ")", "raw_host_port", "=", "item", "[", "'HostPort'", "]", "if", "raw_host_port", "==", "\"\"", ":", "int_port", "=", "None", "else", ":", "try", ":", "int_port", "=", "int", "(", "raw_host_port", ")", "except", "ValueError", "as", "ex", ":", "logger", ".", "error", "(", "\"could not parse port: %s\"", ",", "ex", ")", "continue", "li", ".", "append", "(", "int_port", ")", "port_mappings", ".", "update", "(", "{", "key", ":", "li", "}", ")", "c_metadata_object", ".", "status", "=", "status", "c_metadata_object", ".", "port_mappings", "=", "port_mappings", "c_metadata_object", ".", "hostname", "=", "graceful_get", "(", "inspect_data", ",", "'Config'", ",", "'Hostname'", ")", "raw_networks", "=", "graceful_get", "(", "inspect_data", ",", "\"NetworkSettings\"", ",", "\"Networks\"", ")", ".", "values", "(", ")", "if", "raw_networks", ":", "c_metadata_object", ".", "ipv4_addresses", "=", "[", "graceful_get", "(", "x", ",", "\"IPAddress\"", ")", "for", "x", "in", "raw_networks", "if", "graceful_get", "(", "x", ",", "\"IPAddress\"", ")", "]", "c_metadata_object", ".", "ipv6_addresses", "=", "[", "graceful_get", "(", "x", ",", "\"GlobalIPv6Address\"", ")", "for", "x", "in", "raw_networks", "if", "graceful_get", "(", "x", ",", "\"GlobalIPv6Address\"", ")", "]", "c_metadata_object", ".", "image", "=", "image_instance", "name", "=", "graceful_get", "(", "inspect_data", ",", "\"Name\"", ")", "if", "name", ":", "name", "=", "name", "[", "1", ":", "]", "if", "name", ".", "startswith", "(", "\"/\"", ")", "else", "name", "# remove / at the beginning", "c_metadata_object", ".", "name", "=", "name", "return", "c_metadata_object" ]
process data from `docker container inspect` and update provided container metadata object :param c_metadata_object: instance of ContainerMetadata :param inspect_data: dict, metadata from `docker inspect` or `dockert_client.images()` :param image_instance: instance of DockerImage :return: instance of ContainerMetadata
[ "process", "data", "from", "docker", "container", "inspect", "and", "update", "provided", "container", "metadata", "object" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/utils.py#L65-L132