id int64 0 458k | file_name stringlengths 4 119 | file_path stringlengths 14 227 | content stringlengths 24 9.96M | size int64 24 9.96M | language stringclasses 1 value | extension stringclasses 14 values | total_lines int64 1 219k | avg_line_length float64 2.52 4.63M | max_line_length int64 5 9.91M | alphanum_fraction float64 0 1 | repo_name stringlengths 7 101 | repo_stars int64 100 139k | repo_forks int64 0 26.4k | repo_open_issues int64 0 2.27k | repo_license stringclasses 12 values | repo_extraction_date stringclasses 433 values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
22,300 | robots_txt.py | wummel_linkchecker/linkcheck/cache/robots_txt.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2006-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Cache robots.txt contents.
"""
from .. import robotparser2
from ..containers import LFUCache
from ..decorators import synchronized
from ..lock import get_lock
# lock objects
cache_lock = get_lock("robots.txt_cache_lock")
robot_lock = get_lock("robots.txt_robot_lock")
class RobotsTxt (object):
"""
Thread-safe cache of downloaded robots.txt files.
format: {cache key (string) -> robots.txt content (RobotFileParser)}
"""
def __init__ (self, useragent):
"""Initialize per-URL robots.txt cache."""
# mapping {URL -> parsed robots.txt}
self.cache = LFUCache(size=100)
self.hits = self.misses = 0
self.roboturl_locks = {}
self.useragent = useragent
def allows_url (self, url_data):
"""Ask robots.txt allowance."""
roboturl = url_data.get_robots_txt_url()
with self.get_lock(roboturl):
return self._allows_url(url_data, roboturl)
def _allows_url (self, url_data, roboturl):
"""Ask robots.txt allowance. Assumes only single thread per robots.txt
URL calls this function."""
with cache_lock:
if roboturl in self.cache:
self.hits += 1
rp = self.cache[roboturl]
return rp.can_fetch(self.useragent, url_data.url)
self.misses += 1
kwargs = dict(auth=url_data.auth, session=url_data.session)
if hasattr(url_data, "proxy") and hasattr(url_data, "proxy_type"):
kwargs["proxies"] = {url_data.proxytype: url_data.proxy}
rp = robotparser2.RobotFileParser(**kwargs)
rp.set_url(roboturl)
rp.read()
with cache_lock:
self.cache[roboturl] = rp
self.add_sitemap_urls(rp, url_data, roboturl)
return rp.can_fetch(self.useragent, url_data.url)
def add_sitemap_urls(self, rp, url_data, roboturl):
"""Add sitemap URLs to queue."""
if not rp.sitemap_urls or not url_data.allows_simple_recursion():
return
for sitemap_url, line in rp.sitemap_urls:
url_data.add_url(sitemap_url, line=line)
@synchronized(robot_lock)
def get_lock(self, roboturl):
"""Return lock for robots.txt url."""
return self.roboturl_locks.setdefault(roboturl, get_lock(roboturl))
| 3,086 | Python | .py | 72 | 36.569444 | 78 | 0.670549 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,301 | gxml.py | wummel_linkchecker/linkcheck/logger/gxml.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2000-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
A GraphXML logger.
"""
from .xmllog import _XMLLogger
from .graph import _GraphLogger
class GraphXMLLogger (_XMLLogger, _GraphLogger):
"""XML output mirroring the GML structure. Easy to parse with any XML
tool."""
LoggerName = 'gxml'
LoggerArgs = {
"filename": "linkchecker-out.gxml",
}
def __init__ (self, **kwargs):
"""Initialize graph node list and internal id counter."""
args = self.get_args(kwargs)
super(GraphXMLLogger, self).__init__(**args)
self.nodes = {}
self.nodeid = 0
def start_output (self):
"""Write start of checking info as xml comment."""
super(GraphXMLLogger, self).start_output()
self.xml_start_output()
self.xml_starttag(u'GraphXML')
self.xml_starttag(u'graph', attrs={u"isDirected": u"true"})
self.flush()
def log_url (self, url_data):
"""Write one node and all possible edges."""
node = self.get_node(url_data)
if node:
self.xml_starttag(u'node', attrs={u"name": u"%d" % node["id"]})
self.xml_tag(u"label", node["label"])
if self.has_part("realurl"):
self.xml_tag(u"url", node["url"])
self.xml_starttag(u"data")
if node["dltime"] >= 0 and self.has_part("dltime"):
self.xml_tag(u"dltime", u"%f" % node["dltime"])
if node["size"] >= 0 and self.has_part("dlsize"):
self.xml_tag(u"size", u"%d" % node["size"])
if node["checktime"] and self.has_part("checktime"):
self.xml_tag(u"checktime", u"%f" % node["checktime"])
if self.has_part("extern"):
self.xml_tag(u"extern", u"%d" % node["extern"])
self.xml_endtag(u"data")
self.xml_endtag(u"node")
def write_edge (self, node):
"""Write one edge."""
attrs = {
u"source": u"%d" % self.nodes[node["parent_url"]]["id"],
u"target": u"%d" % node["id"],
}
self.xml_starttag(u"edge", attrs=attrs)
self.xml_tag(u"label", node["label"])
self.xml_starttag(u"data")
if self.has_part("result"):
self.xml_tag(u"valid", u"%d" % node["valid"])
self.xml_endtag(u"data")
self.xml_endtag(u"edge")
def end_output (self, **kwargs):
"""Finish graph output, and print end of checking info as xml
comment."""
self.xml_endtag(u"graph")
self.xml_endtag(u"GraphXML")
self.xml_end_output()
self.close_fileoutput()
| 3,362 | Python | .py | 80 | 34.45 | 75 | 0.608191 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,302 | sitemapxml.py | wummel_linkchecker/linkcheck/logger/sitemapxml.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2012-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
A sitemap XML logger.
"""
from . import xmllog
from .. import log, LOG_CHECK
ChangeFreqs = (
'always',
'hourly',
'daily',
'weekly',
'monthly',
'yearly',
'never',
)
HTTP_SCHEMES = (u'http:', u'https:')
HTML_TYPES = ('text/html', "application/xhtml+xml")
class SitemapXmlLogger (xmllog._XMLLogger):
"""Sitemap XML output according to http://www.sitemaps.org/protocol.html
"""
LoggerName = 'sitemap'
LoggerArgs = {
"filename": "linkchecker-out.sitemap.xml",
"encoding": "utf-8",
}
def __init__ (self, **kwargs):
"""Initialize graph node list and internal id counter."""
args = self.get_args(kwargs)
super(SitemapXmlLogger, self).__init__(**args)
# All URLs must have the given prefix, which is determined
# by the first logged URL.
self.prefix = None
# If first URL does not have a valid HTTP scheme, disable this
# logger
self.disabled = False
if 'frequency' in args:
if args['frequency'] not in ChangeFreqs:
raise ValueError("Invalid change frequency %r" % args['frequency'])
self.frequency = args['frequency']
else:
self.frequency = 'daily'
self.priority = None
if 'priority' in args:
self.priority = float(args['priority'])
def start_output (self):
"""Write start of checking info as xml comment."""
super(SitemapXmlLogger, self).start_output()
self.xml_start_output()
attrs = {u"xmlns": u"http://www.sitemaps.org/schemas/sitemap/0.9"}
self.xml_starttag(u'urlset', attrs)
self.flush()
def log_filter_url(self, url_data, do_print):
"""Update accounting data and determine if URL should be included
in the sitemap.
"""
self.stats.log_url(url_data, do_print)
if self.disabled:
return
# initialize prefix and priority
if self.prefix is None:
if not url_data.url.startswith(HTTP_SCHEMES):
log.warn(LOG_CHECK, "Sitemap URL %r does not start with http: or https:.", url_data.url)
self.disabled = True
return
self.prefix = url_data.url
# first URL (ie. the homepage) gets priority 1.0 per default
priority = 1.0
elif url_data.url == self.prefix:
return
else:
# all other pages get priority 0.5 per default
priority = 0.5
if self.priority is not None:
priority = self.priority
# ignore the do_print flag and determine ourselves if we filter the url
if (url_data.valid
and url_data.url.startswith(HTTP_SCHEMES)
and url_data.url.startswith(self.prefix)
and url_data.content_type in HTML_TYPES):
self.log_url(url_data, priority=priority)
def log_url (self, url_data, priority=None):
"""Log URL data in sitemap format."""
self.xml_starttag(u'url')
self.xml_tag(u'loc', url_data.url)
if url_data.modified:
self.xml_tag(u'lastmod', self.format_modified(url_data.modified, sep="T"))
self.xml_tag(u'changefreq', self.frequency)
self.xml_tag(u'priority', "%.2f" % priority)
self.xml_endtag(u'url')
self.flush()
def end_output (self, **kwargs):
"""Write XML end tag."""
self.xml_endtag(u"urlset")
self.xml_end_output()
self.close_fileoutput()
| 4,328 | Python | .py | 110 | 31.809091 | 104 | 0.629278 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,303 | gml.py | wummel_linkchecker/linkcheck/logger/gml.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2000-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
A gml logger.
"""
from .graph import _GraphLogger
class GMLLogger (_GraphLogger):
"""GML means Graph Modeling Language. Use a GML tool to see
the sitemap graph."""
LoggerName = 'gml'
LoggerArgs = {
"filename": "linkchecker-out.gml",
}
def start_output (self):
"""Write start of checking info as gml comment."""
super(GMLLogger, self).start_output()
if self.has_part("intro"):
self.write_intro()
self.writeln()
self.writeln(u"graph [")
self.writeln(u" directed 1")
self.flush()
def comment (self, s, **args):
"""Write GML comment."""
self.writeln(s=u'comment "%s"' % s, **args)
def log_url (self, url_data):
"""Write one node."""
node = self.get_node(url_data)
if node:
self.writeln(u" node [")
self.writeln(u" id %d" % node["id"])
self.writeln(u' label "%s"' % node["label"])
if self.has_part("realurl"):
self.writeln(u' url "%s"' % node["url"])
if node["dltime"] >= 0 and self.has_part("dltime"):
self.writeln(u" dltime %d" % node["dltime"])
if node["size"] >= 0 and self.has_part("dlsize"):
self.writeln(u" size %d" % node["size"])
if node["checktime"] and self.has_part("checktime"):
self.writeln(u" checktime %d" % node["checktime"])
if self.has_part("extern"):
self.writeln(u" extern %d" % node["extern"])
self.writeln(u" ]")
def write_edge (self, node):
"""Write one edge."""
self.writeln(u" edge [")
self.writeln(u' label "%s"' % node["edge"])
self.writeln(u" source %d" % self.nodes[node["parent_url"]]["id"])
self.writeln(u" target %d" % node["id"])
if self.has_part("result"):
self.writeln(u" valid %d" % node["valid"])
self.writeln(u" ]")
def end_graph (self):
"""Write end of graph marker."""
self.writeln(u"]")
| 2,895 | Python | .py | 69 | 34.565217 | 77 | 0.589279 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,304 | sql.py | wummel_linkchecker/linkcheck/logger/sql.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2000-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
A SQL logger.
"""
import os
from . import _Logger
from .. import url as urlutil
def sqlify (s):
"""
Escape special SQL chars and strings.
"""
if not s:
return "NULL"
return "'%s'" % s.replace("'", "''").replace(os.linesep, r"\n")
def intify (s):
"""
Coerce a truth value to 0/1.
@param s: an object (usually a string)
@type s: object
@return: 1 if object truth value is True, else 0
@rtype: number
"""
if s:
return 1
return 0
class SQLLogger (_Logger):
"""
SQL output, should work with any SQL database (not tested).
"""
LoggerName = 'sql'
LoggerArgs = {
"filename": "linkchecker-out.sql",
'separator': ';',
'dbname': 'linksdb',
}
def __init__ (self, **kwargs):
"""Initialize database access data."""
args = self.get_args(kwargs)
super(SQLLogger, self).__init__(**args)
self.init_fileoutput(args)
self.dbname = args['dbname']
self.separator = args['separator']
def comment (self, s, **args):
"""
Write SQL comment.
"""
self.write(u"-- ")
self.writeln(s=s, **args)
def start_output (self):
"""
Write start of checking info as sql comment.
"""
super(SQLLogger, self).start_output()
if self.has_part("intro"):
self.write_intro()
self.writeln()
self.flush()
def log_url (self, url_data):
"""
Store url check info into the database.
"""
self.writeln(u"insert into %(table)s(urlname,"
"parentname,baseref,valid,result,warning,info,url,line,col,"
"name,checktime,dltime,size,cached,level,modified) values ("
"%(base_url)s,"
"%(url_parent)s,"
"%(base_ref)s,"
"%(valid)d,"
"%(result)s,"
"%(warning)s,"
"%(info)s,"
"%(url)s,"
"%(line)d,"
"%(column)d,"
"%(name)s,"
"%(checktime)d,"
"%(dltime)d,"
"%(size)d,"
"%(cached)d,"
"%(level)d,"
"%(modified)s"
")%(separator)s" %
{'table': self.dbname,
'base_url': sqlify(url_data.base_url),
'url_parent': sqlify((url_data.parent_url)),
'base_ref': sqlify((url_data.base_ref)),
'valid': intify(url_data.valid),
'result': sqlify(url_data.result),
'warning': sqlify(os.linesep.join(x[1] for x in url_data.warnings)),
'info': sqlify(os.linesep.join(url_data.info)),
'url': sqlify(urlutil.url_quote(url_data.url)),
'line': url_data.line,
'column': url_data.column,
'name': sqlify(url_data.name),
'checktime': url_data.checktime,
'dltime': url_data.dltime,
'size': url_data.size,
'cached': 0,
'separator': self.separator,
"level": url_data.level,
"modified": sqlify(self.format_modified(url_data.modified)),
})
self.flush()
def end_output (self, **kwargs):
"""
Write end of checking info as sql comment.
"""
if self.has_part("outro"):
self.write_outro()
self.close_fileoutput()
| 4,274 | Python | .py | 125 | 25.28 | 83 | 0.547412 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,305 | graph.py | wummel_linkchecker/linkcheck/logger/graph.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2000-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Base class for graph loggers.
"""
from . import _Logger
from ..decorators import notimplemented
import re
class _GraphLogger (_Logger):
"""Provide base method to get node data."""
def __init__ (self, **kwargs):
"""Initialize graph node list and internal id counter."""
args = self.get_args(kwargs)
super(_GraphLogger, self).__init__(**args)
self.init_fileoutput(args)
self.nodes = {}
self.nodeid = 0
def log_filter_url(self, url_data, do_print):
"""Update accounting data and log all valid URLs regardless the
do_print flag.
"""
self.stats.log_url(url_data, do_print)
# ignore the do_print flag and determine ourselves if we filter the url
if url_data.valid:
self.log_url(url_data)
def get_node (self, url_data):
"""Return new node data or None if node already exists."""
if not url_data.url:
return None
elif url_data.url in self.nodes:
return None
node = {
"url": url_data.url,
"parent_url": url_data.parent_url,
"id": self.nodeid,
"label": quote(url_data.title if url_data.title else url_data.name),
"extern": 1 if url_data.extern else 0,
"checktime": url_data.checktime,
"size": url_data.size,
"dltime": url_data.dltime,
"edge": quote(url_data.name),
"valid": 1 if url_data.valid else 0,
}
self.nodes[node["url"]] = node
self.nodeid += 1
return node
def write_edges (self):
"""
Write all edges we can find in the graph in a brute-force manner.
"""
for node in self.nodes.values():
if node["parent_url"] in self.nodes:
self.write_edge(node)
self.flush()
@notimplemented
def write_edge (self, node):
"""Write edge data for one node and its parent."""
pass
@notimplemented
def end_graph (self):
"""Write end-of-graph marker."""
pass
def end_output (self, **kwargs):
"""Write edges and end of checking info as gml comment."""
self.write_edges()
self.end_graph()
if self.has_part("outro"):
self.write_outro()
self.close_fileoutput()
_disallowed = re.compile(r"[^a-zA-Z0-9 '#(){}\-\[\]\.,;:\!\?]+")
def quote (s):
"""Replace disallowed characters in node or edge labels.
Also remove whitespace from beginning or end of label."""
return _disallowed.sub(" ", s).strip()
| 3,382 | Python | .py | 88 | 31.386364 | 80 | 0.624924 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,306 | dot.py | wummel_linkchecker/linkcheck/logger/dot.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2005-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
A DOT graph format logger. The specification has been taken from
http://www.graphviz.org/doc/info/lang.html
"""
from .graph import _GraphLogger
class DOTLogger (_GraphLogger):
"""
Generates .dot sitemap graphs. Use graphviz to see the sitemap graph.
"""
LoggerName = "dot"
LoggerArgs = {
"filename": "linkchecker-out.dot",
"encoding": "ascii",
}
def start_output (self):
"""Write start of checking info as DOT comment."""
super(DOTLogger, self).start_output()
if self.has_part("intro"):
self.write_intro()
self.writeln()
self.writeln(u"digraph G {")
self.writeln(u" graph [")
self.writeln(u" charset=\"%s\"," % self.get_charset_encoding())
self.writeln(u" ];")
self.flush()
def comment (self, s, **args):
"""Write DOT comment."""
self.write(u"// ")
self.writeln(s=s, **args)
def log_url (self, url_data):
"""Write one node."""
node = self.get_node(url_data)
if node is not None:
self.writeln(u' "%s" [' % dotquote(node["label"]))
if self.has_part("realurl"):
self.writeln(u' href="%s",' % dotquote(node["url"]))
if node["dltime"] >= 0 and self.has_part("dltime"):
self.writeln(u" dltime=%d," % node["dltime"])
if node["size"] >= 0 and self.has_part("dlsize"):
self.writeln(u" size=%d," % node["size"])
if node["checktime"] and self.has_part("checktime"):
self.writeln(u" checktime=%d," % node["checktime"])
if self.has_part("extern"):
self.writeln(u" extern=%d," % node["extern"])
self.writeln(u" ];")
def write_edge (self, node):
"""Write edge from parent to node."""
source = dotquote(self.nodes[node["parent_url"]]["label"])
target = dotquote(node["label"])
self.writeln(u' "%s" -> "%s" [' % (source, target))
self.writeln(u' label="%s",' % dotquote(node["edge"]))
if self.has_part("result"):
self.writeln(u" valid=%d," % node["valid"])
self.writeln(u" ];")
def end_graph (self):
"""Write end of graph marker."""
self.writeln(u"}")
def dotquote (s):
"""Quote string for usage in DOT output format."""
return s.replace('"', '\\"')
| 3,210 | Python | .py | 76 | 35.144737 | 74 | 0.599424 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,307 | text.py | wummel_linkchecker/linkcheck/logger/text.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2000-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
The default text logger.
"""
import time
from . import _Logger
from .. import ansicolor, strformat, configuration, i18n
class TextLogger (_Logger):
"""
A text logger, colorizing the output if possible.
Informal text output format spec:
Output consists of a set of URL logs separated by one or more
blank lines.
A URL log consists of two or more lines. Each line consists of
keyword and data, separated by whitespace.
Unknown keywords will be ignored.
"""
LoggerName = 'text'
LoggerArgs = {
"filename": "linkchecker-out.txt",
'colorparent': "default",
'colorurl': "default",
'colorname': "default",
'colorreal': "cyan",
'colorbase': "purple",
'colorvalid': "bold;green",
'colorinvalid': "bold;red",
'colorinfo': "default",
'colorwarning': "bold;yellow",
'colordltime': "default",
'colordlsize': "default",
'colorreset': "default",
}
def __init__ (self, **kwargs):
"""Initialize error counter and optional file output."""
args = self.get_args(kwargs)
super(TextLogger, self).__init__(**args)
self.output_encoding = args.get("encoding", i18n.default_encoding)
self.init_fileoutput(args)
self.colorparent = args.get('colorparent', 'default')
self.colorurl = args.get('colorurl', 'default')
self.colorname = args.get('colorname', 'default')
self.colorreal = args.get('colorreal', 'default')
self.colorbase = args.get('colorbase', 'default')
self.colorvalid = args.get('colorvalid', 'default')
self.colorinvalid = args.get('colorinvalid', 'default')
self.colorinfo = args.get('colorinfo', 'default')
self.colorwarning = args.get('colorwarning', 'default')
self.colordltime = args.get('colordltime', 'default')
self.colordlsize = args.get('colordlsize', 'default')
self.colorreset = args.get('colorreset', 'default')
def init_fileoutput (self, args):
"""Colorize file output if possible."""
super(TextLogger, self).init_fileoutput(args)
if self.fd is not None:
self.fd = ansicolor.Colorizer(self.fd)
def start_fileoutput (self):
"""Needed to make file descriptor color aware."""
init_color = self.fd is None
super(TextLogger, self).start_fileoutput()
if init_color:
self.fd = ansicolor.Colorizer(self.fd)
def start_output (self):
"""Write generic start checking info."""
super(TextLogger, self).start_output()
if self.has_part('intro'):
self.write_intro()
self.flush()
def write_intro (self):
"""Log introduction text."""
self.writeln(configuration.AppInfo)
self.writeln(configuration.Freeware)
self.writeln(_("Get the newest version at %(url)s") %
{'url': configuration.Url})
self.writeln(_("Write comments and bugs to %(url)s") %
{'url': configuration.SupportUrl})
self.writeln(_("Support this project at %(url)s") %
{'url': configuration.DonateUrl})
self.check_date()
self.writeln()
self.writeln(_("Start checking at %s") %
strformat.strtime(self.starttime))
def log_url (self, url_data):
"""Write url checking info."""
self.writeln()
if self.has_part('url'):
self.write_url(url_data)
if url_data.name and self.has_part('name'):
self.write_name(url_data)
if url_data.parent_url and self.has_part('parenturl'):
self.write_parent(url_data)
if url_data.base_ref and self.has_part('base'):
self.write_base(url_data)
if url_data.url and self.has_part('realurl'):
self.write_real(url_data)
if url_data.checktime and self.has_part('checktime'):
self.write_checktime(url_data)
if url_data.dltime >= 0 and self.has_part('dltime'):
self.write_dltime(url_data)
if url_data.size >= 0 and self.has_part('dlsize'):
self.write_size(url_data)
if url_data.info and self.has_part('info'):
self.write_info(url_data)
if url_data.modified and self.has_part('modified'):
self.write_modified(url_data)
if url_data.warnings and self.has_part('warning'):
self.write_warning(url_data)
if self.has_part('result'):
self.write_result(url_data)
self.flush()
def write_id (self):
"""Write unique ID of url_data."""
self.writeln()
self.write(self.part('id') + self.spaces('id'))
self.writeln(u"%d" % self.stats.number, color=self.colorinfo)
def write_url (self, url_data):
"""Write url_data.base_url."""
self.write(self.part('url') + self.spaces('url'))
txt = strformat.strline(url_data.base_url)
self.writeln(txt, color=self.colorurl)
def write_name (self, url_data):
"""Write url_data.name."""
self.write(self.part("name") + self.spaces("name"))
self.writeln(strformat.strline(url_data.name), color=self.colorname)
def write_parent (self, url_data):
"""Write url_data.parent_url."""
self.write(self.part('parenturl') + self.spaces("parenturl"))
txt = url_data.parent_url
if url_data.line > 0:
txt += _(", line %d") % url_data.line
if url_data.column > 0:
txt += _(", col %d") % url_data.column
if url_data.page > 0:
txt += _(", page %d") % url_data.page
self.writeln(txt, color=self.colorparent)
def write_base (self, url_data):
"""Write url_data.base_ref."""
self.write(self.part("base") + self.spaces("base"))
self.writeln(url_data.base_ref, color=self.colorbase)
def write_real (self, url_data):
"""Write url_data.url."""
self.write(self.part("realurl") + self.spaces("realurl"))
self.writeln(unicode(url_data.url), color=self.colorreal)
def write_dltime (self, url_data):
"""Write url_data.dltime."""
self.write(self.part("dltime") + self.spaces("dltime"))
self.writeln(_("%.3f seconds") % url_data.dltime,
color=self.colordltime)
def write_size (self, url_data):
"""Write url_data.size."""
self.write(self.part("dlsize") + self.spaces("dlsize"))
self.writeln(strformat.strsize(url_data.size),
color=self.colordlsize)
def write_checktime (self, url_data):
"""Write url_data.checktime."""
self.write(self.part("checktime") + self.spaces("checktime"))
self.writeln(_("%.3f seconds") % url_data.checktime,
color=self.colordltime)
def write_info (self, url_data):
"""Write url_data.info."""
self.write(self.part("info") + self.spaces("info"))
self.writeln(self.wrap(url_data.info, 65), color=self.colorinfo)
def write_modified(self, url_data):
"""Write url_data.modified."""
self.write(self.part("modified") + self.spaces("modified"))
self.writeln(self.format_modified(url_data.modified))
def write_warning (self, url_data):
"""Write url_data.warning."""
self.write(self.part("warning") + self.spaces("warning"))
warning_msgs = [u"[%s] %s" % x for x in url_data.warnings]
self.writeln(self.wrap(warning_msgs, 65), color=self.colorwarning)
def write_result (self, url_data):
"""Write url_data.result."""
self.write(self.part("result") + self.spaces("result"))
if url_data.valid:
color = self.colorvalid
self.write(_("Valid"), color=color)
else:
color = self.colorinvalid
self.write(_("Error"), color=color)
if url_data.result:
self.write(u": " + url_data.result, color=color)
self.writeln()
def write_outro (self, interrupt=False):
"""Write end of checking message."""
self.writeln()
if interrupt:
self.writeln(_("The check has been interrupted; results are not complete."))
self.write(_("That's it.") + " ")
self.write(_n("%d link", "%d links",
self.stats.number) % self.stats.number)
self.write(u" ")
if self.stats.num_urls is not None:
self.write(_n("in %d URL", "in %d URLs",
self.stats.num_urls) % self.stats.num_urls)
self.write(u" checked. ")
warning_text = _n("%d warning found", "%d warnings found",
self.stats.warnings_printed) % self.stats.warnings_printed
if self.stats.warnings_printed:
warning_color = self.colorwarning
else:
warning_color = self.colorinfo
self.write(warning_text, color=warning_color)
if self.stats.warnings != self.stats.warnings_printed:
self.write(_(" (%d ignored or duplicates not printed)") %
(self.stats.warnings - self.stats.warnings_printed))
self.write(u". ")
error_text = _n("%d error found", "%d errors found",
self.stats.errors_printed) % self.stats.errors_printed
if self.stats.errors_printed:
error_color = self.colorinvalid
else:
error_color = self.colorvalid
self.write(error_text, color=error_color)
if self.stats.errors != self.stats.errors_printed:
self.write(_(" (%d duplicates not printed)") %
(self.stats.errors - self.stats.errors_printed))
self.writeln(u".")
num = self.stats.internal_errors
if num:
self.writeln(_n("There was %(num)d internal error.",
"There were %(num)d internal errors.", num) % {"num": num})
self.stoptime = time.time()
duration = self.stoptime - self.starttime
self.writeln(_("Stopped checking at %(time)s (%(duration)s)") %
{"time": strformat.strtime(self.stoptime),
"duration": strformat.strduration_long(duration)})
def write_stats (self):
"""Write check statistic info."""
self.writeln()
self.writeln(_("Statistics:"))
if self.stats.downloaded_bytes is not None:
self.writeln(_("Downloaded: %s.") % strformat.strsize(self.stats.downloaded_bytes))
if self.stats.number > 0:
self.writeln(_(
"Content types: %(image)d image, %(text)d text, %(video)d video, "
"%(audio)d audio, %(application)d application, %(mail)d mail"
" and %(other)d other.") % self.stats.link_types)
self.writeln(_("URL lengths: min=%(min)d, max=%(max)d, avg=%(avg)d.") %
dict(min=self.stats.min_url_length,
max=self.stats.max_url_length,
avg=self.stats.avg_url_length))
else:
self.writeln(_("No statistics available since no URLs were checked."))
def end_output (self, **kwargs):
"""Write end of output info, and flush all output buffers."""
self.stats.downloaded_bytes = kwargs.get("downloaded_bytes")
self.stats.num_urls = kwargs.get("num_urls")
if self.has_part('stats'):
self.write_stats()
if self.has_part('outro'):
self.write_outro(interrupt=kwargs.get("interrupt"))
self.close_fileoutput()
| 12,360 | Python | .py | 268 | 36.761194 | 95 | 0.602984 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,308 | __init__.py | wummel_linkchecker/linkcheck/logger/__init__.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2000-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Output logging support for different formats.
"""
import sys
import os
import datetime
import time
import codecs
import abc
from .. import log, LOG_CHECK, strformat, dummy, configuration, i18n
_ = lambda x: x
Fields = dict(
realurl=_("Real URL"),
cachekey=_("Cache key"),
result=_("Result"),
base=_("Base"),
name=_("Name"),
parenturl=_("Parent URL"),
extern=_("Extern"),
info=_("Info"),
warning=_("Warning"),
dltime=_("D/L time"),
dlsize=_("Size"),
checktime=_("Check time"),
url=_("URL"),
level=_("Level"),
modified=_("Modified"),
)
del _
ContentTypes = dict(
image=0,
text=0,
video=0,
audio=0,
application=0,
mail=0,
other=0,
)
class LogStatistics (object):
"""Gather log statistics:
- number of errors, warnings and valid links
- type of contents (image, video, audio, text, ...)
- URL lengths
"""
def __init__ (self):
"""Initialize log statistics."""
self.reset()
def reset (self):
"""Reset all log statistics to default values."""
# number of logged URLs
self.number = 0
# number of encountered URL errors
self.errors = 0
# number of URL errors that were printed
self.errors_printed = 0
# number of URL warnings
self.warnings = 0
# number of URL warnings that were printed
self.warnings_printed = 0
# number of internal errors
self.internal_errors = 0
# link types
self.link_types = ContentTypes.copy()
# URL length statistics
self.max_url_length = 0
self.min_url_length = 0
self.avg_url_length = 0.0
self.avg_number = 0
# overall downloaded bytes
self.downloaded_bytes = None
def log_url (self, url_data, do_print):
"""Log URL statistics."""
self.number += 1
if not url_data.valid:
self.errors += 1
if do_print:
self.errors_printed += 1
num_warnings = len(url_data.warnings)
self.warnings += num_warnings
if do_print:
self.warnings_printed += num_warnings
if url_data.content_type:
key = url_data.content_type.split('/', 1)[0].lower()
if key not in self.link_types:
key = "other"
elif url_data.url.startswith(u"mailto:"):
key = "mail"
else:
key = "other"
self.link_types[key] += 1
if url_data.url:
l = len(url_data.url)
self.max_url_length = max(l, self.max_url_length)
if self.min_url_length == 0:
self.min_url_length = l
else:
self.min_url_length = min(l, self.min_url_length)
# track average number separately since empty URLs do not count
self.avg_number += 1
# calculate running average
self.avg_url_length += (l - self.avg_url_length) / self.avg_number
def log_internal_error (self):
"""Increase internal error count."""
self.internal_errors += 1
class _Logger (object):
"""
Base class for logging of checked urls. It defines the public API
(see below) and offers basic functionality for all loggers.
Each logger offers the following functions:
* start_output()
Initialize and start log output. Most loggers print a comment
with copyright information.
* end_output(**kwargs)
Finish log output, possibly flushing buffers. Most loggers also
print some statistics.
Custom keyword arguments can be given for different loggers.
* log_filter_url(url_data, do_print)
Log a checked URL. The url_data object is a transport form of
the UrlData class. The do_print flag indicates if this URL
should be logged or just used to update internal statistics.
Each subclassed logger must implement the following functions:
* start_output()
Also call the base class implementation of this.
* end_output(**kwargs)
See above.
* log_url(url_data)
Log a checked URL. Called by log_filter_url if do_print is True.
"""
__metaclass__ = abc.ABCMeta
# A lowercase name for this logger, usable for option values
LoggerName = None
# Default log configuration
LoggerArgs = {}
def __init__ (self, **args):
"""
Initialize a logger, looking for part restrictions in kwargs.
"""
if 'parts' in args and "all" not in args['parts']:
# only log given parts
self.logparts = args['parts']
else:
# log all parts
self.logparts = None
# number of spaces before log parts for alignment
self.logspaces = {}
# maximum indent of spaces for alignment
self.max_indent = 0
# log statistics
self.stats = LogStatistics()
# encoding of output
encoding = args.get("encoding", i18n.default_encoding)
try:
encoding = codecs.lookup(encoding).name
except LookupError:
encoding = i18n.default_encoding
self.output_encoding = encoding
# how to handle codec errors
self.codec_errors = "replace"
# Flag to see if logger is active. Can be deactivated on errors.
self.is_active = True
def get_args(self, kwargs):
"""Construct log configuration from default and user args."""
args = dict(self.LoggerArgs)
args.update(kwargs)
return args
def get_charset_encoding (self):
"""Translate the output encoding to a charset encoding name."""
if self.output_encoding == "utf-8-sig":
return "utf-8"
return self.output_encoding
def encode (self, s):
"""Encode string with output encoding."""
assert isinstance(s, unicode)
return s.encode(self.output_encoding, self.codec_errors)
def init_fileoutput (self, args):
"""
Initialize self.fd file descriptor from args. For file output
(used when the fileoutput arg is given), the self.fd
initialization is deferred until the first self.write() call.
This avoids creation of an empty file when no output is written.
"""
self.filename = None
self.close_fd = False
self.fd = None
if args.get('fileoutput'):
self.filename = os.path.expanduser(args['filename'])
elif 'fd' in args:
self.fd = args['fd']
else:
self.fd = self.create_fd()
def start_fileoutput (self):
"""Start output to configured file."""
path = os.path.dirname(self.filename)
try:
if path and not os.path.isdir(path):
os.makedirs(path)
self.fd = self.create_fd()
self.close_fd = True
except IOError:
msg = sys.exc_info()[1]
log.warn(LOG_CHECK,
"Could not open file %r for writing: %s\n"
"Disabling log output of %s", self.filename, msg, self)
self.fd = dummy.Dummy()
self.is_active = False
self.filename = None
def create_fd (self):
"""Create open file descriptor."""
if self.filename is None:
return i18n.get_encoded_writer(encoding=self.output_encoding,
errors=self.codec_errors)
return codecs.open(self.filename, "wb", self.output_encoding,
self.codec_errors)
def close_fileoutput (self):
"""
Flush and close the file output denoted by self.fd.
"""
if self.fd is not None:
try:
self.flush()
except IOError:
# ignore flush errors
pass
if self.close_fd:
try:
self.fd.close()
except IOError:
# ignore close errors
pass
self.fd = None
def check_date (self):
"""
Check for special dates.
"""
now = datetime.date.today()
if now.day == 7 and now.month == 1:
msg = _("Happy birthday for LinkChecker, I'm %d years old today!")
self.comment(msg % (now.year - 2000))
def comment (self, s, **args):
"""
Write a comment and a newline. This method just prints
the given string.
"""
self.writeln(s=s, **args)
def wrap (self, lines, width):
"""
Return wrapped version of given lines.
"""
sep = os.linesep+os.linesep
text = sep.join(lines)
kwargs = dict(subsequent_indent=" "*self.max_indent,
initial_indent=" "*self.max_indent,
break_long_words=False,
break_on_hyphens=False)
return strformat.wrap(text, width, **kwargs).lstrip()
def write (self, s, **args):
"""Write string to output descriptor. Strips control characters
from string before writing.
"""
if self.filename is not None:
self.start_fileoutput()
if self.fd is None:
# Happens when aborting threads times out
log.warn(LOG_CHECK, "writing to unitialized or closed file")
else:
try:
self.fd.write(s, **args)
except IOError:
msg = sys.exc_info()[1]
log.warn(LOG_CHECK,
"Could not write to output file: %s\n"
"Disabling log output of %s", msg, self)
self.close_fileoutput()
self.fd = dummy.Dummy()
self.is_active = False
def writeln (self, s=u"", **args):
"""
Write string to output descriptor plus a newline.
"""
self.write(u"%s%s" % (s, unicode(os.linesep)), **args)
def has_part (self, name):
"""
See if given part name will be logged.
"""
if self.logparts is None:
# log all parts
return True
return name in self.logparts
def part (self, name):
"""
Return translated part name.
"""
return _(Fields.get(name, u""))
def spaces (self, name):
"""
Return indent of spaces for given part name.
"""
return self.logspaces[name]
def start_output (self):
"""
Start log output.
"""
# map with spaces between part name and value
if self.logparts is None:
parts = Fields.keys()
else:
parts = self.logparts
values = (self.part(x) for x in parts)
# maximum indent for localized log part names
self.max_indent = max(len(x) for x in values)+1
for key in parts:
numspaces = (self.max_indent - len(self.part(key)))
self.logspaces[key] = u" " * numspaces
self.stats.reset()
self.starttime = time.time()
def log_filter_url (self, url_data, do_print):
"""
Log a new url with this logger if do_print is True. Else
only update accounting data.
"""
self.stats.log_url(url_data, do_print)
if do_print:
self.log_url(url_data)
def write_intro (self):
"""Write intro comments."""
self.comment(_("created by %(app)s at %(time)s") %
{"app": configuration.AppName,
"time": strformat.strtime(self.starttime)})
self.comment(_("Get the newest version at %(url)s") %
{'url': configuration.Url})
self.comment(_("Write comments and bugs to %(url)s") %
{'url': configuration.SupportUrl})
self.comment(_("Support this project at %(url)s") %
{'url': configuration.DonateUrl})
self.check_date()
def write_outro (self):
"""Write outro comments."""
self.stoptime = time.time()
duration = self.stoptime - self.starttime
self.comment(_("Stopped checking at %(time)s (%(duration)s)") %
{"time": strformat.strtime(self.stoptime),
"duration": strformat.strduration_long(duration)})
@abc.abstractmethod
def log_url (self, url_data):
"""
Log a new url with this logger.
"""
pass
@abc.abstractmethod
def end_output (self, **kwargs):
"""
End of output, used for cleanup (eg output buffer flushing).
"""
pass
def __str__ (self):
"""
Return class name.
"""
return self.__class__.__name__
def __repr__ (self):
"""
Return class name.
"""
return repr(self.__class__.__name__)
def flush (self):
"""
If the logger has internal buffers, flush them.
Ignore flush I/O errors since we are not responsible for proper
flushing of log output streams.
"""
if hasattr(self, "fd"):
try:
self.fd.flush()
except (IOError, AttributeError):
pass
def log_internal_error (self):
"""Indicate that an internal error occurred in the program."""
log.warn(LOG_CHECK, "internal error occurred")
self.stats.log_internal_error()
def format_modified(self, modified, sep=" "):
"""Format modification date in UTC if it's not None.
@param modified: modification date in UTC
@ptype modified: datetime or None
@return: formatted date or empty string
@rtype: unicode
"""
if modified is not None:
return modified.strftime("%Y-%m-%d{0}%H:%M:%S.%fZ".format(sep))
return u""
def _get_loggers():
"""Return list of Logger classes."""
from .. import loader
modules = loader.get_package_modules('logger')
return list(loader.get_plugins(modules, [_Logger]))
LoggerClasses = _get_loggers()
LoggerNames = [x.LoggerName for x in LoggerClasses]
LoggerKeys = ", ".join(repr(x) for x in LoggerNames)
| 14,996 | Python | .py | 413 | 27.246973 | 78 | 0.581109 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,309 | html.py | wummel_linkchecker/linkcheck/logger/html.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2000-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
A HTML logger.
"""
import time
import cgi
import os
from . import _Logger
from .. import strformat, configuration
# ss=1 enables show source
validate_html = "http://validator.w3.org/check?ss=1&uri=%(uri)s"
# options are the default
validate_css = "http://jigsaw.w3.org/css-validator/validator?" \
"uri=%(uri)s&warning=1&profile=css2&usermedium=all"
HTML_HEADER = """<!DOCTYPE HTML>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=%(encoding)s"/>
<title>%(title)s</title>
<style type="text/css">
<!--
h2 { font-family: Verdana,sans-serif; font-size: 22pt; font-weight: bold; }
body { font-family: Arial,sans-serif; font-size: 11pt; background-color: %(body)s; }
td { font-family: Arial,sans-serif; font-size: 11pt; }
code { font-family: Courier; }
a:link {color: %(link)s;}
a:visited {color: %(vlink)s;}
a:active {color: %(alink)s;}
a:hover { color: #34a4ef; }
table { border-collapse:collapse; }
table, th, td { border: 1px solid black; padding: 2px; }
td.url { background-color: %(url)s }
td.valid { background-color: %(valid)s }
td.error { background-color: %(error)s }
td.warning { background-color: %(warning)s }
-->
</style>
</head>
<body>
"""
class HtmlLogger (_Logger):
"""Logger with HTML output."""
LoggerName = 'html'
LoggerArgs = {
"filename": "linkchecker-out.html",
'colorbackground': '#fff7e5',
'colorurl': '#dcd5cf',
'colorborder': '#000000',
'colorlink': '#191c83',
'colorwarning': '#e0954e',
'colorerror': '#db4930',
'colorok': '#3ba557',
}
def __init__ (self, **kwargs):
"""Initialize default HTML color values."""
args = self.get_args(kwargs)
super(HtmlLogger, self).__init__(**args)
self.init_fileoutput(args)
self.colorbackground = args['colorbackground']
self.colorurl = args['colorurl']
self.colorborder = args['colorborder']
self.colorlink = args['colorlink']
self.colorwarning = args['colorwarning']
self.colorerror = args['colorerror']
self.colorok = args['colorok']
def part (self, name):
"""Return non-space-breakable part name."""
return super(HtmlLogger, self).part(name).replace(" ", " ")
def comment (self, s, **args):
"""Write HTML comment."""
self.write(u"<!-- ")
self.write(s, **args)
self.write(u" -->")
def start_output (self):
"""Write start of checking info."""
super(HtmlLogger, self).start_output()
header = {
"encoding": self.get_charset_encoding(),
"title": configuration.App,
"body": self.colorbackground,
"link": self.colorlink,
"vlink": self.colorlink,
"alink": self.colorlink,
"url": self.colorurl,
"error": self.colorerror,
"valid": self.colorok,
"warning": self.colorwarning,
}
self.write(HTML_HEADER % header)
self.comment("Generated by %s" % configuration.App)
if self.has_part('intro'):
self.write(u"<h2>"+configuration.App+
"</h2><br/><blockquote>"+
configuration.Freeware+"<br/><br/>"+
(_("Start checking at %s") %
strformat.strtime(self.starttime))+
os.linesep+"<br/>")
self.check_date()
self.flush()
def log_url (self, url_data):
"""Write url checking info as HTML."""
self.write_table_start()
if self.has_part("url"):
self.write_url(url_data)
if url_data.name and self.has_part("name"):
self.write_name(url_data)
if url_data.parent_url and self.has_part("parenturl"):
self.write_parent(url_data)
if url_data.base_ref and self.has_part("base"):
self.write_base(url_data)
if url_data.url and self.has_part("realurl"):
self.write_real(url_data)
if url_data.dltime >= 0 and self.has_part("dltime"):
self.write_dltime(url_data)
if url_data.size >= 0 and self.has_part("dlsize"):
self.write_size(url_data)
if url_data.checktime and self.has_part("checktime"):
self.write_checktime(url_data)
if url_data.info and self.has_part("info"):
self.write_info(url_data)
if url_data.modified and self.has_part("modified"):
self.write_modified(url_data)
if url_data.warnings and self.has_part("warning"):
self.write_warning(url_data)
if self.has_part("result"):
self.write_result(url_data)
self.write_table_end()
self.flush()
def write_table_start (self):
"""Start html table."""
self.writeln(u'<br/><br/><table>')
def write_table_end (self):
"""End html table."""
self.write(u'</table><br/>')
def write_id (self):
"""Write ID for current URL."""
self.writeln(u"<tr>")
self.writeln(u'<td>%s</td>' % self.part("id"))
self.write(u"<td>%d</td></tr>" % self.stats.number)
def write_url (self, url_data):
"""Write url_data.base_url."""
self.writeln(u"<tr>")
self.writeln(u'<td class="url">%s</td>' % self.part("url"))
self.write(u'<td class="url">')
self.write(u"`%s'" % cgi.escape(url_data.base_url))
self.writeln(u"</td></tr>")
def write_name (self, url_data):
"""Write url_data.name."""
args = (self.part("name"), cgi.escape(url_data.name))
self.writeln(u"<tr><td>%s</td><td>`%s'</td></tr>" % args)
def write_parent (self, url_data):
"""Write url_data.parent_url."""
self.write(u"<tr><td>"+self.part("parenturl")+
u'</td><td><a target="top" href="'+
url_data.parent_url+u'">'+
cgi.escape(url_data.parent_url)+u"</a>")
if url_data.line > 0:
self.write(_(", line %d") % url_data.line)
if url_data.column > 0:
self.write(_(", col %d") % url_data.column)
if url_data.page > 0:
self.write(_(", page %d") % url_data.page)
if not url_data.valid:
# on errors show HTML and CSS validation for parent url
vhtml = validate_html % {'uri': url_data.parent_url}
vcss = validate_css % {'uri': url_data.parent_url}
self.writeln()
self.writeln(u'(<a href="'+vhtml+u'">HTML</a>)')
self.write(u'(<a href="'+vcss+u'">CSS</a>)')
self.writeln(u"</td></tr>")
def write_base (self, url_data):
"""Write url_data.base_ref."""
self.writeln(u"<tr><td>"+self.part("base")+u"</td><td>"+
cgi.escape(url_data.base_ref)+u"</td></tr>")
def write_real (self, url_data):
"""Write url_data.url."""
self.writeln("<tr><td>"+self.part("realurl")+u"</td><td>"+
u'<a target="top" href="'+url_data.url+
u'">'+cgi.escape(url_data.url)+u"</a></td></tr>")
def write_dltime (self, url_data):
"""Write url_data.dltime."""
self.writeln(u"<tr><td>"+self.part("dltime")+u"</td><td>"+
(_("%.3f seconds") % url_data.dltime)+
u"</td></tr>")
def write_size (self, url_data):
"""Write url_data.size."""
self.writeln(u"<tr><td>"+self.part("dlsize")+u"</td><td>"+
strformat.strsize(url_data.size)+
u"</td></tr>")
def write_checktime (self, url_data):
"""Write url_data.checktime."""
self.writeln(u"<tr><td>"+self.part("checktime")+u"</td><td>"+
(_("%.3f seconds") % url_data.checktime)+u"</td></tr>")
def write_info (self, url_data):
"""Write url_data.info."""
sep = u"<br/>"+os.linesep
text = sep.join(cgi.escape(x) for x in url_data.info)
self.writeln(u'<tr><td valign="top">' + self.part("info")+
u"</td><td>"+text+u"</td></tr>")
def write_modified(self, url_data):
"""Write url_data.modified."""
text = cgi.escape(self.format_modified(url_data.modified))
self.writeln(u'<tr><td valign="top">' + self.part("modified") +
u"</td><td>"+text+u"</td></tr>")
def write_warning (self, url_data):
"""Write url_data.warnings."""
sep = u"<br/>"+os.linesep
text = sep.join(cgi.escape(x[1]) for x in url_data.warnings)
self.writeln(u'<tr><td class="warning" '+
u'valign="top">' + self.part("warning") +
u'</td><td class="warning">' + text + u"</td></tr>")
def write_result (self, url_data):
"""Write url_data.result."""
if url_data.valid:
self.write(u'<tr><td class="valid">')
self.write(self.part("result"))
self.write(u'</td><td class="valid">')
self.write(cgi.escape(_("Valid")))
else:
self.write(u'<tr><td class="error">')
self.write(self.part("result"))
self.write(u'</td><td class="error">')
self.write(cgi.escape(_("Error")))
if url_data.result:
self.write(u": "+cgi.escape(url_data.result))
self.writeln(u"</td></tr>")
def write_stats (self):
"""Write check statistic infos."""
self.writeln(u'<br/><i>%s</i><br/>' % _("Statistics"))
if self.stats.number > 0:
self.writeln(_(
"Content types: %(image)d image, %(text)d text, %(video)d video, "
"%(audio)d audio, %(application)d application, %(mail)d mail"
" and %(other)d other.") % self.stats.link_types)
self.writeln(u"<br/>")
self.writeln(_("URL lengths: min=%(min)d, max=%(max)d, avg=%(avg)d.") %
dict(min=self.stats.min_url_length,
max=self.stats.max_url_length,
avg=self.stats.avg_url_length))
else:
self.writeln(_("No statistics available since no URLs were checked."))
self.writeln(u"<br/>")
def write_outro (self):
"""Write end of check message."""
self.writeln(u"<br/>")
self.write(_("That's it.")+" ")
if self.stats.number >= 0:
self.write(_n("%d link checked.", "%d links checked.",
self.stats.number) % self.stats.number)
self.write(u" ")
self.write(_n("%d warning found", "%d warnings found",
self.stats.warnings_printed) % self.stats.warnings_printed)
if self.stats.warnings != self.stats.warnings_printed:
self.write(_(" (%d ignored or duplicates not printed)") %
(self.stats.warnings - self.stats.warnings_printed))
self.write(u". ")
self.write(_n("%d error found", "%d errors found",
self.stats.errors_printed) % self.stats.errors_printed)
if self.stats.errors != self.stats.errors_printed:
self.write(_(" (%d duplicates not printed)") %
(self.stats.errors - self.stats.errors_printed))
self.writeln(u".")
self.writeln(u"<br/>")
num = self.stats.internal_errors
if num:
self.write(_n("There was %(num)d internal error.",
"There were %(num)d internal errors.", num) % {"num": num})
self.writeln(u"<br/>")
self.stoptime = time.time()
duration = self.stoptime - self.starttime
self.writeln(_("Stopped checking at %(time)s (%(duration)s)") %
{"time": strformat.strtime(self.stoptime),
"duration": strformat.strduration_long(duration)})
self.writeln(u'</blockquote><br/><hr><small>'+
configuration.HtmlAppInfo+u"<br/>")
self.writeln(_("Get the newest version at %s") %
(u'<a href="'+configuration.Url+u'" target="_top">'+
configuration.Url+u"</a>.<br/>"))
self.writeln(_("Write comments and bugs to %s") %
(u'<a href="'+configuration.SupportUrl+u'">'+
configuration.SupportUrl+u"</a>.<br/>"))
self.writeln(_("Support this project at %s") %
(u'<a href="'+configuration.DonateUrl+u'">'+
configuration.DonateUrl+u"</a>."))
self.writeln(u"</small></body></html>")
def end_output (self, **kwargs):
"""Write end of checking info as HTML."""
if self.has_part("stats"):
self.write_stats()
if self.has_part("outro"):
self.write_outro()
self.close_fileoutput()
| 13,550 | Python | .py | 307 | 34.710098 | 85 | 0.559903 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,310 | blacklist.py | wummel_linkchecker/linkcheck/logger/blacklist.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2000-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
A blacklist logger.
"""
import os
import codecs
from . import _Logger
class BlacklistLogger (_Logger):
"""
Updates a blacklist of wrong links. If a link on the blacklist
is working (again), it is removed from the list. So after n days
we have only links on the list which failed for n days.
"""
LoggerName = "blacklist"
LoggerArgs = {
"filename": "~/.linkchecker/blacklist",
}
def __init__ (self, **kwargs):
"""Intialize with old blacklist data (if found, else not)."""
args = self.get_args(kwargs)
super(BlacklistLogger, self).__init__(**args)
self.init_fileoutput(args)
self.blacklist = {}
if self.filename is not None and os.path.exists(self.filename):
self.read_blacklist()
def comment (self, s, **args):
"""
Write nothing.
"""
pass
def log_url (self, url_data):
"""
Put invalid url in blacklist, delete valid url from blacklist.
"""
key = (url_data.parent_url, url_data.cache_url)
key = repr(key)
if key in self.blacklist:
if url_data.valid:
del self.blacklist[key]
else:
self.blacklist[key] += 1
else:
if not url_data.valid:
self.blacklist[key] = 1
def end_output (self, **kwargs):
"""
Write blacklist file.
"""
self.write_blacklist()
def read_blacklist (self):
"""
Read a previously stored blacklist from file fd.
"""
with codecs.open(self.filename, 'r', self.output_encoding,
self.codec_errors) as fd:
for line in fd:
line = line.rstrip()
if line.startswith('#') or not line:
continue
value, key = line.split(None, 1)
self.blacklist[key] = int(value)
def write_blacklist (self):
"""
Write the blacklist.
"""
oldmask = os.umask(0077)
for key, value in self.blacklist.items():
self.write(u"%d %s%s" % (value, repr(key), os.linesep))
self.close_fileoutput()
# restore umask
os.umask(oldmask)
| 3,049 | Python | .py | 86 | 27.848837 | 73 | 0.60874 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,311 | customxml.py | wummel_linkchecker/linkcheck/logger/customxml.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2000-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
An XML logger.
"""
from . import xmllog
from .. import strformat
class CustomXMLLogger (xmllog._XMLLogger):
"""
XML custom output for easy post-processing.
"""
LoggerName = "xml"
LoggerArgs = {
"filename": "linkchecker-out.xml",
}
def start_output (self):
"""
Write start of checking info as xml comment.
"""
super(CustomXMLLogger, self).start_output()
self.xml_start_output()
attrs = {"created": strformat.strtime(self.starttime)}
self.xml_starttag(u'linkchecker', attrs)
self.flush()
def log_url (self, url_data):
"""
Log URL data in custom XML format.
"""
self.xml_starttag(u'urldata')
if self.has_part('url'):
self.xml_tag(u"url", unicode(url_data.base_url))
if url_data.name and self.has_part('name'):
self.xml_tag(u"name", unicode(url_data.name))
if url_data.parent_url and self.has_part('parenturl'):
attrs = {
u'line': u"%d" % url_data.line,
u'column': u"%d" % url_data.column,
}
self.xml_tag(u"parent", unicode(url_data.parent_url),
attrs=attrs)
if url_data.base_ref and self.has_part('base'):
self.xml_tag(u"baseref", unicode(url_data.base_ref))
if self.has_part("realurl"):
self.xml_tag(u"realurl", unicode(url_data.url))
if self.has_part("extern"):
self.xml_tag(u"extern", u"%d" % (1 if url_data.extern else 0))
if url_data.dltime >= 0 and self.has_part("dltime"):
self.xml_tag(u"dltime", u"%f" % url_data.dltime)
if url_data.size >= 0 and self.has_part("dlsize"):
self.xml_tag(u"dlsize", u"%d" % url_data.size)
if url_data.checktime and self.has_part("checktime"):
self.xml_tag(u"checktime", u"%f" % url_data.checktime)
if self.has_part("level"):
self.xml_tag(u"level", u"%d" % url_data.level)
if url_data.info and self.has_part('info'):
self.xml_starttag(u"infos")
for info in url_data.info:
self.xml_tag(u"info", info)
self.xml_endtag(u"infos")
if url_data.modified and self.has_part('modified'):
self.xml_tag(u"modified", self.format_modified(url_data.modified))
if url_data.warnings and self.has_part('warning'):
self.xml_starttag(u"warnings")
for tag, data in url_data.warnings:
attrs = {}
if tag:
attrs["tag"] = tag
self.xml_tag(u"warning", data, attrs)
self.xml_endtag(u"warnings")
if self.has_part("result"):
attrs = {}
if url_data.result:
attrs["result"] = url_data.result
self.xml_tag(u"valid", u"%d" % (1 if url_data.valid else 0), attrs)
self.xml_endtag(u'urldata')
self.flush()
def end_output (self, **kwargs):
"""
Write XML end tag.
"""
self.xml_endtag(u"linkchecker")
self.xml_end_output()
self.close_fileoutput()
| 3,970 | Python | .py | 97 | 32.257732 | 79 | 0.596223 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,312 | xmllog.py | wummel_linkchecker/linkcheck/logger/xmllog.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2000-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Base class for XML loggers.
"""
import xml.sax.saxutils
from . import _Logger
xmlattr_entities = {
"&": "&",
"<": "<",
">": ">",
"\"": """,
}
def xmlquote (s):
"""
Quote characters for XML.
"""
return xml.sax.saxutils.escape(s)
def xmlquoteattr (s):
"""
Quote XML attribute, ready for inclusion with double quotes.
"""
return xml.sax.saxutils.escape(s, xmlattr_entities)
class _XMLLogger (_Logger):
"""Base class for XML output; easy to parse with any XML tool."""
def __init__ (self, **kwargs):
""" Initialize graph node list and internal id counter. """
args = self.get_args(kwargs)
super(_XMLLogger, self).__init__(**args)
self.init_fileoutput(args)
self.indent = u" "
self.level = 0
def comment (self, s, **args):
"""
Write XML comment.
"""
self.write(u"<!-- ")
self.write(s, **args)
self.writeln(u" -->")
def xml_start_output (self):
"""
Write start of checking info as xml comment.
"""
self.writeln(u'<?xml version="1.0" encoding="%s"?>' %
xmlquoteattr(self.get_charset_encoding()))
if self.has_part("intro"):
self.write_intro()
self.writeln()
def xml_end_output (self):
"""
Write end of checking info as xml comment.
"""
if self.has_part("outro"):
self.write_outro()
def xml_starttag (self, name, attrs=None):
"""
Write XML start tag.
"""
self.write(self.indent*self.level)
self.write(u"<%s" % xmlquote(name))
if attrs:
for name, value in attrs.items():
args = (xmlquote(name), xmlquoteattr(value))
self.write(u' %s="%s"' % args)
self.writeln(u">")
self.level += 1
def xml_endtag (self, name):
"""
Write XML end tag.
"""
self.level -= 1
assert self.level >= 0
self.write(self.indent*self.level)
self.writeln(u"</%s>" % xmlquote(name))
def xml_tag (self, name, content, attrs=None):
"""
Write XML tag with content.
"""
self.write(self.indent*self.level)
self.write(u"<%s" % xmlquote(name))
if attrs:
for aname, avalue in attrs.items():
args = (xmlquote(aname), xmlquoteattr(avalue))
self.write(u' %s="%s"' % args)
self.writeln(u">%s</%s>" % (xmlquote(content), xmlquote(name)))
| 3,361 | Python | .py | 99 | 27.121212 | 73 | 0.593346 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,313 | csvlog.py | wummel_linkchecker/linkcheck/logger/csvlog.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2000-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
A CSV logger.
"""
import csv
import os
from cStringIO import StringIO
from . import _Logger
from .. import strformat
Columns = (
u"urlname", u"parentname", u"baseref", u"result", u"warningstring",
u"infostring", u"valid", u"url", u"line", u"column", u"name",
u"dltime", u"size", u"checktime", u"cached", u"level", u"modified",
)
class CSVLogger (_Logger):
"""
CSV output, consisting of one line per entry. Entries are
separated by a separator (a semicolon per default).
"""
LoggerName = "csv"
LoggerArgs = {
"filename": "linkchecker-out.csv",
'separator': ';',
"quotechar": '"',
"dialect": "excel",
}
def __init__ (self, **kwargs):
"""Store default separator and (os dependent) line terminator."""
args = self.get_args(kwargs)
super(CSVLogger, self).__init__(**args)
self.init_fileoutput(args)
self.separator = args['separator']
self.quotechar = args['quotechar']
self.dialect = args['dialect']
self.linesep = os.linesep
def comment (self, s, **args):
"""Write CSV comment."""
self.writeln(s=u"# %s" % s, **args)
def start_output (self):
"""Write checking start info as csv comment."""
super(CSVLogger, self).start_output()
row = []
if self.has_part("intro"):
self.write_intro()
self.flush()
else:
# write empty string to initialize file output
self.write(u"")
self.queue = StringIO()
self.writer = csv.writer(self.queue, dialect=self.dialect,
delimiter=self.separator, lineterminator=self.linesep,
quotechar=self.quotechar)
for s in Columns:
if self.has_part(s):
row.append(s)
if row:
self.writerow(row)
def log_url (self, url_data):
"""Write csv formatted url check info."""
row = []
if self.has_part("urlname"):
row.append(url_data.base_url)
if self.has_part("parentname"):
row.append(url_data.parent_url)
if self.has_part("baseref"):
row.append(url_data.base_ref)
if self.has_part("result"):
row.append(url_data.result)
if self.has_part("warningstring"):
row.append(self.linesep.join(x[1] for x in url_data.warnings))
if self.has_part("infostring"):
row.append(self.linesep.join(url_data.info))
if self.has_part("valid"):
row.append(url_data.valid)
if self.has_part("url"):
row.append(url_data.url)
if self.has_part("line"):
row.append(url_data.line)
if self.has_part("column"):
row.append(url_data.column)
if self.has_part("name"):
row.append(url_data.name)
if self.has_part("dltime"):
row.append(url_data.dltime)
if self.has_part("dlsize"):
row.append(url_data.size)
if self.has_part("checktime"):
row.append(url_data.checktime)
if self.has_part("cached"):
row.append(0)
if self.has_part("level"):
row.append(url_data.level)
if self.has_part("modified"):
row.append(self.format_modified(url_data.modified))
self.writerow(map(strformat.unicode_safe, row))
self.flush()
def writerow (self, row):
"""Write one row in CSV format."""
self.writer.writerow([s.encode("utf-8", self.codec_errors) for s in row])
# Fetch UTF-8 output from the queue ...
data = self.queue.getvalue()
data = data.decode("utf-8")
# ... and write to the target stream
self.write(data)
# empty queue
self.queue.truncate(0)
def end_output (self, **kwargs):
"""Write end of checking info as csv comment."""
if self.has_part("outro"):
self.write_outro()
self.close_fileoutput()
| 4,790 | Python | .py | 126 | 30.214286 | 81 | 0.609284 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,314 | none.py | wummel_linkchecker/linkcheck/logger/none.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2000-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
A dummy logger.
"""
from . import _Logger
class NoneLogger (_Logger):
"""
Dummy logger printing nothing.
"""
LoggerName = 'none'
def comment (self, s, **args):
"""
Do nothing.
"""
pass
def start_output (self):
"""
Do nothing.
"""
pass
def log_url (self, url_data):
"""Do nothing."""
pass
def end_output (self, **kwargs):
"""
Do nothing.
"""
pass
| 1,281 | Python | .py | 43 | 25.27907 | 73 | 0.659626 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,315 | httpheaderinfo.py | wummel_linkchecker/linkcheck/plugins/httpheaderinfo.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Add HTTP server name information
"""
from . import _ConnectionPlugin
class HttpHeaderInfo(_ConnectionPlugin):
"""Add HTTP header info for each URL"""
def __init__(self, config):
"""Initialize configuration."""
super(HttpHeaderInfo, self).__init__(config)
self.prefixes = tuple(config["prefixes"])
def applies_to(self, url_data):
"""Check for HTTP and prefix config."""
return self.prefixes and url_data.is_http()
def check(self, url_data):
"""Check content for invalid anchors."""
headers = []
for name, value in url_data.headers.items():
if name.startswith(self.prefixes):
headers.append(name)
if headers:
items = [u"%s=%s" % (name.capitalize(), url_data.headers[name]) for name in headers]
info = u"HTTP headers %s" % u", ".join(items)
url_data.add_info(info)
@classmethod
def read_config(cls, configparser):
"""Read configuration file options."""
config = dict()
section = cls.__name__
option = "prefixes"
if configparser.has_option(section, option):
value = configparser.get(section, option)
names = [x.strip().lower() for x in value.split(",")]
else:
names = []
config[option] = names
return config
| 2,150 | Python | .py | 52 | 35.211538 | 96 | 0.659493 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,316 | parseword.py | wummel_linkchecker/linkcheck/plugins/parseword.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2010-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Parse hyperlinks in Word files.
"""
from . import _ParserPlugin
try:
import win32com
import pythoncom
has_win32com = True
Error = pythoncom.com_error
except ImportError:
has_win32com = False
Error = Exception
from .. import fileutil, log, LOG_PLUGIN
_initialized = False
def init_win32com ():
"""Initialize the win32com.client cache."""
global _initialized
if _initialized:
return
import win32com.client
if win32com.client.gencache.is_readonly:
#allow gencache to create the cached wrapper objects
win32com.client.gencache.is_readonly = False
# under py2exe the call in gencache to __init__() does not happen
# so we use Rebuild() to force the creation of the gen_py folder
# Note that the python...\win32com.client.gen_py dir must not exist
# to allow creation of the cache in %temp% for py2exe.
# This is ensured by excluding win32com.gen_py in setup.py
win32com.client.gencache.Rebuild()
_initialized = True
def has_word ():
"""Determine if Word is available on the current system."""
if not has_win32com:
return False
try:
import _winreg as winreg
except ImportError:
import winreg
try:
key = winreg.OpenKey(winreg.HKEY_CLASSES_ROOT, "Word.Application")
winreg.CloseKey(key)
return True
except (EnvironmentError, ImportError):
pass
return False
def constants (name):
"""Helper to return constants. Avoids importing win32com.client in
other modules."""
return getattr(win32com.client.constants, name)
def get_word_app ():
"""Return open Word.Application handle, or None if Word is not available
on this system."""
if not has_word():
return None
# Since this function is called from different threads, initialize
# the COM layer.
pythoncom.CoInitialize()
import win32com.client
app = win32com.client.gencache.EnsureDispatch("Word.Application")
app.Visible = False
return app
def close_word_app (app):
"""Close Word application object."""
app.Quit()
def open_wordfile (app, filename):
"""Open given Word file with application object."""
return app.Documents.Open(filename, ReadOnly=True,
AddToRecentFiles=False, Visible=False, NoEncodingDialog=True)
def close_wordfile (doc):
"""Close word file."""
doc.Close()
class WordParser(_ParserPlugin):
"""Word parsing plugin."""
def __init__(self, config):
"""Check for pdfminer."""
init_win32com()
if not has_word():
log.warn(LOG_PLUGIN, "Microsoft Word not found for WordParser plugin")
super(WordParser, self).__init__(config)
def applies_to(self, url_data, pagetype=None):
"""Check for Word pagetype."""
return has_word() and pagetype == 'word'
def check(self, url_data):
"""Parse Word data."""
content = url_data.get_content()
filename = get_temp_filename(content)
# open word file and parse hyperlinks
try:
app = get_word_app()
try:
doc = open_wordfile(app, filename)
if doc is None:
raise Error("could not open word file %r" % filename)
try:
for link in doc.Hyperlinks:
line = get_line_number(link.Range)
name=link.TextToDisplay
url_data.add_url(link.Address, name=name, line=line)
finally:
close_wordfile(doc)
finally:
close_word_app(app)
except Error as msg:
log.warn(LOG_PLUGIN, "Error parsing word file: %s", msg)
def get_line_number(doc, wrange):
"""Get line number for given range object."""
lineno = 1
wrange.Select()
wdFirstCharacterLineNumber = constants("wdFirstCharacterLineNumber")
wdGoToLine = constants("wdGoToLine")
wdGoToPrevious = constants("wdGoToPrevious")
while True:
curline = doc.Selection.Information(wdFirstCharacterLineNumber)
doc.Selection.GoTo(wdGoToLine, wdGoToPrevious, Count=1, Name="")
lineno += 1
prevline = doc.Selection.Information(wdFirstCharacterLineNumber)
if prevline == curline:
break
return lineno
def get_temp_filename (content):
"""Get temporary filename for content to parse."""
# store content in temporary file
fd, filename = fileutil.get_temp_file(mode='wb', suffix='.doc',
prefix='lc_')
try:
fd.write(content)
finally:
fd.close()
return filename
| 5,461 | Python | .py | 145 | 30.910345 | 82 | 0.66421 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,317 | viruscheck.py | wummel_linkchecker/linkcheck/plugins/viruscheck.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2000-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Check page content for virus infection with clamav.
"""
import os
import socket
from . import _ContentPlugin
from .. import log, LOG_PLUGIN
from ..socketutil import create_socket
class VirusCheck(_ContentPlugin):
"""Checks the page content for virus infections with clamav.
A local clamav daemon must be installed."""
def __init__(self, config):
"""Initialize clamav configuration."""
super(VirusCheck, self).__init__(config)
# XXX read config
self.clamav_conf = get_clamav_conf(canonical_clamav_conf())
if not self.clamav_conf:
log.warn(LOG_PLUGIN, "clamav daemon not found for VirusCheck plugin")
def applies_to(self, url_data):
"""Check for clamav and extern."""
return self.clamav_conf and not url_data.extern[0]
def check(self, url_data):
"""Try to ask GeoIP database for country info."""
data = url_data.get_content()
infected, errors = scan(data, self.clamav_conf)
if infected or errors:
for msg in infected:
url_data.add_warning(u"Virus scan infection: %s" % msg)
for msg in errors:
url_data.add_warning(u"Virus scan error: %s" % msg)
else:
url_data.add_info("No viruses in data found.")
@classmethod
def read_config(cls, configparser):
"""Read configuration file options."""
config = dict()
section = cls.__name__
option = "clamavconf"
if configparser.has_option(section, option):
value = configparser.get(section, option)
else:
value = None
config[option] = value
return config
class ClamavError (Exception):
"""Raised on clamav errors."""
pass
class ClamdScanner (object):
"""Virus scanner using a clamd daemon process."""
def __init__ (self, clamav_conf):
"""Initialize clamd daemon process sockets."""
self.infected = []
self.errors = []
self.sock, self.host = clamav_conf.new_connection()
self.sock_rcvbuf = \
self.sock.getsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF)
self.wsock = self.new_scansock()
def new_scansock (self):
"""Return a connected socket for sending scan data to it."""
port = None
try:
self.sock.sendall("STREAM")
port = None
for dummy in range(60):
data = self.sock.recv(self.sock_rcvbuf)
i = data.find("PORT")
if i != -1:
port = int(data[i+5:])
break
except socket.error:
self.sock.close()
raise
if port is None:
raise ClamavError(_("clamd is not ready for stream scanning"))
sockinfo = get_sockinfo(self.host, port=port)
wsock = create_socket(socket.AF_INET, socket.SOCK_STREAM)
try:
wsock.connect(sockinfo[0][4])
except socket.error:
wsock.close()
raise
return wsock
def scan (self, data):
"""Scan given data for viruses."""
self.wsock.sendall(data)
def close (self):
"""Get results and close clamd daemon sockets."""
self.wsock.close()
data = self.sock.recv(self.sock_rcvbuf)
while data:
if "FOUND\n" in data:
self.infected.append(data)
if "ERROR\n" in data:
self.errors.append(data)
data = self.sock.recv(self.sock_rcvbuf)
self.sock.close()
def canonical_clamav_conf ():
"""Default clamav configs for various platforms."""
if os.name == 'posix':
clamavconf = "/etc/clamav/clamd.conf"
elif os.name == 'nt':
clamavconf = r"c:\clamav-devel\etc\clamd.conf"
else:
clamavconf = "clamd.conf"
return clamavconf
def get_clamav_conf(filename):
"""Initialize clamav configuration."""
if os.path.isfile(filename):
return ClamavConfig(filename)
log.warn(LOG_PLUGIN, "No ClamAV config file found at %r.", filename)
def get_sockinfo (host, port=None):
"""Return socket.getaddrinfo for given host and port."""
family, socktype = socket.AF_INET, socket.SOCK_STREAM
return socket.getaddrinfo(host, port, family, socktype)
class ClamavConfig (dict):
"""Clamav configuration wrapper, with clamd connection method."""
def __init__ (self, filename):
"""Parse clamav configuration file."""
super(ClamavConfig, self).__init__()
self.parseconf(filename)
if self.get('ScannerDaemonOutputFormat'):
raise ClamavError(_("ScannerDaemonOutputFormat must be disabled"))
if self.get('TCPSocket') and self.get('LocalSocket'):
raise ClamavError(_("only one of TCPSocket and LocalSocket must be enabled"))
def parseconf (self, filename):
"""Parse clamav configuration from given file."""
with open(filename) as fd:
# yet another config format, sigh
for line in fd:
line = line.strip()
if not line or line.startswith("#"):
# ignore empty lines and comments
continue
split = line.split(None, 1)
if len(split) == 1:
self[split[0]] = True
else:
self[split[0]] = split[1]
def new_connection (self):
"""Connect to clamd for stream scanning.
@return: tuple (connected socket, host)
"""
if self.get('LocalSocket'):
host = 'localhost'
sock = self.create_local_socket()
elif self.get('TCPSocket'):
host = self.get('TCPAddr', 'localhost')
sock = self.create_tcp_socket(host)
else:
raise ClamavError(_("one of TCPSocket or LocalSocket must be enabled"))
return sock, host
def create_local_socket (self):
"""Create local socket, connect to it and return socket object."""
sock = create_socket(socket.AF_UNIX, socket.SOCK_STREAM)
addr = self['LocalSocket']
try:
sock.connect(addr)
except socket.error:
sock.close()
raise
return sock
def create_tcp_socket (self, host):
"""Create tcp socket, connect to it and return socket object."""
port = int(self['TCPSocket'])
sockinfo = get_sockinfo(host, port=port)
sock = create_socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.connect(sockinfo[0][4])
except socket.error:
sock.close()
raise
return sock
def scan (data, clamconf):
"""Scan data for viruses.
@return (infection msgs, errors)
@rtype ([], [])
"""
try:
scanner = ClamdScanner(clamconf)
except socket.error:
errmsg = _("Could not connect to ClamAV daemon.")
return ([], [errmsg])
try:
scanner.scan(data)
finally:
scanner.close()
return scanner.infected, scanner.errors
| 7,889 | Python | .py | 203 | 30.261084 | 89 | 0.611938 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,318 | parsepdf.py | wummel_linkchecker/linkcheck/plugins/parsepdf.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Parse links in PDF files with pdfminer.
"""
from cStringIO import StringIO
from . import _ParserPlugin
try:
from pdfminer.pdfparser import PDFParser
from pdfminer.pdfdocument import PDFDocument
from pdfminer.pdftypes import PDFStream, PDFObjRef
from pdfminer.pdfpage import PDFPage
from pdfminer.psparser import PSException
except ImportError:
has_pdflib = False
else:
has_pdflib = True
from .. import log, LOG_PLUGIN, strformat
def search_url(obj, url_data, pageno, seen_objs):
"""Recurse through a PDF object, searching for URLs."""
if isinstance(obj, PDFObjRef):
if obj.objid in seen_objs:
# prevent recursive loops
return
seen_objs.add(obj.objid)
obj = obj.resolve()
if isinstance(obj, dict):
for key, value in obj.items():
if key == 'URI' and isinstance(value, basestring):
# URIs should be 7bit ASCII encoded, but be safe and encode
# to unicode
# XXX this does not use an optional specified base URL
url = strformat.unicode_safe(value)
url_data.add_url(url, page=pageno)
else:
search_url(value, url_data, pageno, seen_objs)
elif isinstance(obj, list):
for elem in obj:
search_url(elem, url_data, pageno, seen_objs)
elif isinstance(obj, PDFStream):
search_url(obj.attrs, url_data, pageno, seen_objs)
class PdfParser(_ParserPlugin):
"""PDF parsing plugin."""
def __init__(self, config):
"""Check for pdfminer."""
if not has_pdflib:
log.warn(LOG_PLUGIN, "pdfminer not found for PdfParser plugin")
super(PdfParser, self).__init__(config)
def applies_to(self, url_data, pagetype=None):
"""Check for PDF pagetype."""
return has_pdflib and pagetype == 'pdf'
def check(self, url_data):
"""Parse PDF data."""
# XXX user authentication from url_data
password = ''
data = url_data.get_content()
# PDFParser needs a seekable file object
fp = StringIO(data)
try:
parser = PDFParser(fp)
doc = PDFDocument(parser, password=password)
for (pageno, page) in enumerate(PDFPage.create_pages(doc), start=1):
if "Contents" in page.attrs:
search_url(page.attrs["Contents"], url_data, pageno, set())
if "Annots" in page.attrs:
search_url(page.attrs["Annots"], url_data, pageno, set())
except PSException as msg:
if not msg.args:
# at least show the class name
msg = repr(msg)
log.warn(LOG_PLUGIN, "Error parsing PDF file: %s", msg)
| 3,550 | Python | .py | 85 | 34.223529 | 80 | 0.64825 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,319 | markdowncheck.py | wummel_linkchecker/linkcheck/plugins/markdowncheck.py | # -*- coding: utf-8 -*-
#
# Copyright © 2014 Vadym Khokhlov
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Parse links in Markdown files.
Supported links are:
<http://autolink.com>
[name](http://link.com "Optional title")
[id]: http://link.com "Optional title"
"""
# Some ideas and code were borrowed from https://pypi.python.org/pypi/markdown2 project
import re
from . import _ContentPlugin
from .. import log, LOG_PLUGIN
class MarkdownCheck(_ContentPlugin):
"""Markdown parsing plugin."""
_filename_re_key = "filename_re"
_default_filename_re = re.compile(r'.*\.(markdown|md(own)?|mkdn?)$')
_link_res = [re.compile(r'<((https?|ftp):[^\'">\s]+)>', re.I),
re.compile(r"""
\[.+\]: # id
[ \t]*\n? # maybe *one* newline
[ \t]*
<?(.+?)>? # url = \1
[ \t]*
(?:
\n? # maybe one newline
[ \t]*
(?<=\s) # lookbehind for whitespace
['"(]
[^\n]* # title
['")]
[ \t]*
)? # title is optional
(?:\n+|\Z)
""", re.X | re.M | re.U)]
_whitespace = re.compile(r'\s*')
_strip_anglebrackets = re.compile(r'<(.*)>.*')
_inline_link_title = re.compile(r'''
( # \1
[ \t]+
(['"]) # quote char
(.*?)
)? # title is optional
\)$
''', re.X | re.S)
def __init__(self, config):
super(MarkdownCheck, self).__init__(config)
self.filename_re = self._default_filename_re
pattern = config.get(self._filename_re_key)
if pattern:
try:
self.filename_re = re.compile(pattern)
except re.error as msg:
log.warn(LOG_PLUGIN, "Invalid regex pattern %r: %s" % (pattern, msg))
@classmethod
def read_config(cls, configparser):
"""Read configuration file options."""
config = dict()
config[cls._filename_re_key] = configparser.get(cls.__name__, cls._filename_re_key) \
if configparser.has_option(cls.__name__, cls._filename_re_key) else None
return config
def applies_to(self, url_data, pagetype=None):
"""Check for Markdown file."""
return self.filename_re.search(url_data.base_url) is not None
def check(self, url_data):
"""Extracts urls from the file."""
content = url_data.get_content()
self._check_by_re(url_data, content)
self._check_inline_links(url_data, content)
def _save_url(self, url_data, content, url_text, url_pos):
"""Saves url. Converts url to 1-line text and url position as offset from the file beginning to (line, column).
:param url_data: object for url storing
:param content: file content
:param url_text: url text
:param url_pos: url position from the beginning
"""
line = content.count('\n', 0, url_pos) + 1
column = url_pos - content.rfind('\n', 0, url_pos)
url_data.add_url(url_text.translate(None, '\n '), line=line, column=column)
def _check_by_re(self, url_data, content):
""" Finds urls by re.
:param url_data: object for url storing
:param content: file content
"""
for link_re in self._link_res:
for u in link_re.finditer(content):
self._save_url(url_data, content, u.group(1), u.start(1))
def _find_balanced(self, text, start, open_c, close_c):
"""Returns the index where the open_c and close_c characters balance
out - the same number of open_c and close_c are encountered - or the
end of string if it's reached before the balance point is found.
"""
i = start
l = len(text)
count = 1
while count > 0 and i < l:
if text[i] == open_c:
count += 1
elif text[i] == close_c:
count -= 1
i += 1
return i
def _extract_url_and_title(self, text, start):
"""Extracts the url from the tail of a link."""
# text[start] equals the opening parenthesis
idx = self._whitespace.match(text, start + 1).end()
if idx == len(text):
return None, None
end_idx = idx
has_anglebrackets = text[idx] == "<"
if has_anglebrackets:
end_idx = self._find_balanced(text, end_idx+1, "<", ">")
end_idx = self._find_balanced(text, end_idx, "(", ")")
match = self._inline_link_title.search(text, idx, end_idx)
if not match:
return None, None
url = text[idx:match.start()]
if has_anglebrackets:
url = self._strip_anglebrackets.sub(r'\1', url)
return url, end_idx
def _check_inline_links(self, url_data, content):
"""Checks inline links.
:param url_data: url_data object
:param content: content for processing
"""
MAX_LINK_TEXT_SENTINEL = 3000
curr_pos = 0
content_length = len(content)
while True: # Handle the next link.
# The next '[' is the start of:
# - an inline anchor: [text](url "title")
# - an inline img: 
# - not markup: [...anything else...
try:
start_idx = content.index('[', curr_pos)
except ValueError:
break
# Find the matching closing ']'.
bracket_depth = 0
for p in range(start_idx+1, min(start_idx+MAX_LINK_TEXT_SENTINEL, content_length)):
if content[p] == ']':
bracket_depth -= 1
if bracket_depth < 0:
break
elif content[p] == '[':
bracket_depth += 1
else:
# Closing bracket not found within sentinel length. This isn't markup.
curr_pos = start_idx + 1
continue
# Now determine what this is by the remainder.
p += 1
if p >= content_length:
return
if content[p] == '(':
url, url_end_idx = self._extract_url_and_title(content, p)
if url is not None:
self._save_url(url_data, content, url, p)
start_idx = url_end_idx
# Otherwise, it isn't markup.
curr_pos = start_idx + 1
| 7,428 | Python | .py | 176 | 31.590909 | 119 | 0.535225 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,320 | anchorcheck.py | wummel_linkchecker/linkcheck/plugins/anchorcheck.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2000-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Check HTML anchors
"""
from . import _ContentPlugin
from .. import log, LOG_PLUGIN, url as urlutil
from ..htmlutil import linkparse
from ..parser import find_links
class AnchorCheck(_ContentPlugin):
"""Checks validity of HTML anchors."""
def applies_to(self, url_data):
"""Check for HTML anchor existence."""
return url_data.is_html() and url_data.anchor
def check(self, url_data):
"""Check content for invalid anchors."""
log.debug(LOG_PLUGIN, "checking content for invalid anchors")
# list of parsed anchors
self.anchors = []
find_links(url_data, self.add_anchor, linkparse.AnchorTags)
self.check_anchor(url_data)
def add_anchor (self, url, line, column, name, base):
"""Add anchor URL."""
self.anchors.append((url, line, column, name, base))
def check_anchor(self, url_data):
"""If URL is valid, parseable and has an anchor, check it.
A warning is logged and True is returned if the anchor is not found.
"""
log.debug(LOG_PLUGIN, "checking anchor %r in %s", url_data.anchor, self.anchors)
enc = lambda anchor: urlutil.url_quote_part(anchor, encoding=url_data.encoding)
if any(x for x in self.anchors if enc(x[0]) == url_data.anchor):
return
if self.anchors:
anchornames = sorted(set(u"`%s'" % x[0] for x in self.anchors))
anchors = u", ".join(anchornames)
else:
anchors = u"-"
args = {"name": url_data.anchor, "anchors": anchors}
msg = u"%s %s" % (_("Anchor `%(name)s' not found.") % args,
_("Available anchors: %(anchors)s.") % args)
url_data.add_warning(msg)
| 2,516 | Python | .py | 55 | 39.872727 | 88 | 0.665988 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,321 | locationinfo.py | wummel_linkchecker/linkcheck/plugins/locationinfo.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2000-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Store and retrieve country names for IPs.
"""
from . import _ConnectionPlugin
import os
import sys
import socket
from ..lock import get_lock
from ..decorators import synchronized
from ..strformat import unicode_safe
from .. import log, LOG_PLUGIN
class LocationInfo(_ConnectionPlugin):
"""Adds the country and if possible city name of the URL host as info.
Needs GeoIP or pygeoip and a local country or city lookup DB installed."""
def __init__(self, config):
"""Check for geoip module."""
if not geoip:
log.warn(LOG_PLUGIN, "GeoIP or pygeoip not found for LocationInfo plugin.")
super(LocationInfo, self).__init__(config)
def applies_to(self, url_data):
"""Check for validity, host existence and geoip module."""
return url_data.valid and url_data.host and geoip
def check(self, url_data):
"""Try to ask GeoIP database for country info."""
location = get_location(url_data.host)
if location:
url_data.add_info(_("URL is located in %(location)s.") %
{"location": _(location)})
# It is unknown if the geoip library is already thread-safe, so
# no risks should be taken here by using a lock.
_lock = get_lock("geoip")
def get_geoip_dat ():
"""Find a GeoIP database, preferring city over country lookup."""
datafiles = ("GeoIPCity.dat", "GeoIP.dat")
if os.name == 'nt':
paths = (sys.exec_prefix, r"c:\geoip")
else:
paths = ("/usr/local/share/GeoIP", "/usr/share/GeoIP")
for path in paths:
for datafile in datafiles:
filename = os.path.join(path, datafile)
if os.path.isfile(filename):
return filename
# try importing both the C-library GeoIP and the pure-python pygeoip
geoip_dat = get_geoip_dat()
geoip = None
if geoip_dat:
try:
import GeoIP
geoip = GeoIP.open(geoip_dat, GeoIP.GEOIP_STANDARD)
geoip_error = GeoIP.error
except ImportError:
try:
import pygeoip
geoip = pygeoip.GeoIP(geoip_dat)
geoip_error = pygeoip.GeoIPError
except ImportError:
pass
if geoip_dat.endswith('GeoIPCity.dat'):
get_geoip_record = lambda host: geoip.record_by_name(host)
else:
get_geoip_record = lambda host: {'country_name': geoip.country_name_by_name(host)}
@synchronized(_lock)
def get_location (host):
"""Get translated country and optional city name.
@return: country with optional city or an boolean False if not found
"""
if geoip is None:
# no geoip available
return None
try:
record = get_geoip_record(host)
except (geoip_error, socket.error):
log.debug(LOG_PLUGIN, "Geoip error for %r", host, exception=True)
# ignore lookup errors
return None
value = u""
if record and record.get("city"):
value += unicode_safe(record["city"])
if record and record.get("country_name"):
if value:
value += u", "
value += unicode_safe(record["country_name"])
return value
| 3,883 | Python | .py | 100 | 33.25 | 90 | 0.673999 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,322 | __init__.py | wummel_linkchecker/linkcheck/plugins/__init__.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Module for plugin management.
"""
from .. import loader, log, LOG_PLUGIN
from ..decorators import notimplemented
class _PluginBase(object):
"""Basic plugin class featuring plugin identification and
helper functions."""
def __init__(self, config):
"""Add plugin-specific configuration."""
pass
@notimplemented
def applies_to(self, url_data, **kwargs):
"""See if url_data should be handled by this plugin."""
pass
@notimplemented
def check(self, url_data):
"""Common check method run for all plugins."""
pass
@classmethod
def read_config(cls, configparser):
"""Read configuration file options."""
pass
class _ConnectionPlugin(_PluginBase):
"""Plugins run after connection checks."""
pass
class _ContentPlugin(_PluginBase):
"""Plugins run for valid URLs with content."""
pass
class _ParserPlugin(_PluginBase):
"""Plugins run for valid URLs to parse their contents."""
pass
def get_plugin_modules(folders, package='plugins',
parentpackage='linkcheck.dummy'):
"""Get plugin modules for given folders."""
for folder in folders:
for module in loader.get_folder_modules(folder, parentpackage):
yield module
for module in loader.get_package_modules(package):
yield module
def get_plugin_classes(modules):
"""Get plugin classes for given modules."""
classes = (_ConnectionPlugin, _ContentPlugin, _ParserPlugin)
return loader.get_plugins(modules, classes)
class PluginManager(object):
"""Manage all connection and content plugins."""
def __init__(self, config):
"""Load enabled plugins."""
self.connection_plugins = []
self.content_plugins = []
self.parser_plugins = []
folders = config["pluginfolders"]
modules = get_plugin_modules(folders)
self.load_modules(modules, config)
def load_modules(self, modules, config):
"""Load plugin modules."""
for pluginclass in get_plugin_classes(modules):
name = pluginclass.__name__
if name in config["enabledplugins"]:
if issubclass(pluginclass, _ConnectionPlugin):
log.debug(LOG_PLUGIN, "Enable connection plugin %s", name)
self.connection_plugins.append(pluginclass(config[name]))
elif issubclass(pluginclass, _ContentPlugin):
log.debug(LOG_PLUGIN, "Enable content plugin %s", name)
self.content_plugins.append(pluginclass(config[name]))
elif issubclass(pluginclass, _ParserPlugin):
log.debug(LOG_PLUGIN, "Enable parser plugin %s", name)
self.parser_plugins.append(pluginclass(config[name]))
else:
raise ValueError("Invalid plugin class %s" % pluginclass)
def run_connection_plugins(self, url_data):
"""Run all connection plugins."""
run_plugins(self.connection_plugins, url_data)
def run_content_plugins(self, url_data):
"""Run all content plugins."""
run_plugins(self.content_plugins, url_data)
def run_parser_plugins(self, url_data, pagetype):
"""Run parser plugins for given pagetype."""
run_plugins(self.parser_plugins, url_data, stop_after_match=True, pagetype=pagetype)
def run_plugins(plugins, url_data, stop_after_match=False, **kwargs):
"""Run the check(url_data) method of given plugins."""
for plugin in plugins:
log.debug(LOG_PLUGIN, "Run plugin %s", plugin.__class__.__name__)
if plugin.applies_to(url_data, **kwargs):
plugin.check(url_data)
if stop_after_match:
break
| 4,552 | Python | .py | 103 | 36.786408 | 92 | 0.665687 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,323 | sslcertcheck.py | wummel_linkchecker/linkcheck/plugins/sslcertcheck.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2000-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Handle https links.
"""
import time
import threading
from . import _ConnectionPlugin
from .. import strformat, LinkCheckerError
from ..decorators import synchronized
_lock = threading.Lock()
# configuration option names
sslcertwarndays = "sslcertwarndays"
class SslCertificateCheck(_ConnectionPlugin):
"""Check SSL certificate expiration date. Only internal https: links
will be checked. A domain will only be checked once to avoid duplicate
warnings.
The expiration warning time can be configured with the sslcertwarndays
option."""
def __init__(self, config):
"""Initialize clamav configuration."""
super(SslCertificateCheck, self).__init__(config)
self.warn_ssl_cert_secs_valid = config[sslcertwarndays] * strformat.SECONDS_PER_DAY
# do not check hosts multiple times
self.checked_hosts = set()
def applies_to(self, url_data):
"""Check validity, scheme, extern and url_connection."""
return url_data.valid and url_data.scheme == 'https' and \
not url_data.extern[0] and url_data.url_connection is not None
@synchronized(_lock)
def check(self, url_data):
"""Run all SSL certificate checks that have not yet been done.
OpenSSL already checked the SSL notBefore and notAfter dates.
"""
host = url_data.urlparts[1]
if host in self.checked_hosts:
return
self.checked_hosts.add(host)
cert = url_data.ssl_cert
config = url_data.aggregate.config
if cert and 'notAfter' in cert:
self.check_ssl_valid_date(url_data, cert)
elif config['sslverify']:
msg = _('certificate did not include "notAfter" information')
url_data.add_warning(msg)
else:
msg = _('SSL verification is disabled; enable the sslverify option')
url_data.add_warning(msg)
def check_ssl_valid_date(self, url_data, cert):
"""Check if the certificate is still valid, or if configured check
if it's at least a number of days valid.
"""
import ssl
try:
notAfter = ssl.cert_time_to_seconds(cert['notAfter'])
except ValueError as msg:
msg = _('Invalid SSL certficate "notAfter" value %r') % cert['notAfter']
url_data.add_warning(msg)
return
curTime = time.time()
# Calculate seconds until certifcate expires. Can be negative if
# the certificate is already expired.
secondsValid = notAfter - curTime
args = dict(expire=cert['notAfter'])
if secondsValid < 0:
msg = _('SSL certficate is expired on %(expire)s.')
url_data.add_warning(msg % args)
else:
args['valid'] = strformat.strduration_long(secondsValid)
if secondsValid < self.warn_ssl_cert_secs_valid:
msg = _('SSL certificate expires on %(expire)s and is only %(valid)s valid.')
url_data.add_warning(msg % args)
else:
msg = _('SSL certificate expires on %(expire)s and is %(valid)s valid.')
url_data.add_info(msg % args)
@classmethod
def read_config(cls, configparser):
"""Read configuration file options."""
config = dict()
section = cls.__name__
option = sslcertwarndays
if configparser.has_option(section, option):
num = configparser.getint(section, option)
if num > 0:
config[option] = num
else:
msg = _("invalid value for %s: %d must not be less than %d") % (option, num, 0)
raise LinkCheckerError(msg)
else:
# set the default
config[option] = 30
return config
| 4,577 | Python | .py | 106 | 35.367925 | 95 | 0.648891 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,324 | syntaxchecks.py | wummel_linkchecker/linkcheck/plugins/syntaxchecks.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2000-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import threading
import time
import requests
from xml.dom.minidom import parseString
from . import _ContentPlugin
from .. import log, LOG_PLUGIN
from ..decorators import synchronized
_w3_time_lock = threading.Lock()
class W3Timer(object):
"""Ensure W3C apis are not hammered."""
# every X seconds
SleepSeconds = 2
def __init__(self):
"""Remember last API call."""
self.last_w3_call = 0
@synchronized(_w3_time_lock)
def check_w3_time (self):
"""Make sure the W3C validators are at most called once a second."""
if time.time() - self.last_w3_call < W3Timer.SleepSeconds:
time.sleep(W3Timer.SleepSeconds)
self.last_w3_call = time.time()
class HtmlSyntaxCheck(_ContentPlugin):
"""Check the syntax of HTML pages with the online W3C HTML validator.
See http://validator.w3.org/docs/api.html.
"""
def __init__(self, config):
"""Initialize plugin."""
super(HtmlSyntaxCheck, self).__init__(config)
self.timer = W3Timer()
def applies_to(self, url_data):
"""Check for HTML and extern."""
return url_data.is_html() and not url_data.extern[0]
def check(self, url_data):
"""Check HTML syntax of given URL."""
self.timer.check_w3_time()
session = url_data.session
try:
body = {'uri': url_data.url, 'output': 'soap12'}
response = session.post('http://validator.w3.org/check', data=body)
response.raise_for_status()
if response.headers.get('x-w3c-validator-status', 'Invalid') == 'Valid':
url_data.add_info(u"W3C Validator: %s" % _("valid HTML syntax"))
return
check_w3_errors(url_data, response.text, "W3C HTML")
except requests.exceptions.RequestException:
pass # ignore service failures
except Exception as msg:
log.warn(LOG_PLUGIN, _("HTML syntax check plugin error: %(msg)s ") % {"msg": msg})
class CssSyntaxCheck(_ContentPlugin):
"""Check the syntax of HTML pages with the online W3C CSS validator.
See http://jigsaw.w3.org/css-validator/manual.html#expert.
"""
def __init__(self, config):
"""Initialize plugin."""
super(CssSyntaxCheck, self).__init__(config)
self.timer = W3Timer()
def applies_to(self, url_data):
"""Check for CSS and extern."""
return url_data.is_css() and not url_data.extern[0]
def check(self, url_data):
"""Check CSS syntax of given URL."""
self.timer.check_w3_time()
session = url_data.session
try:
url = 'http://jigsaw.w3.org/css-validator/validator'
params = {
'uri': url_data.url,
'warning': '2',
'output': 'soap12',
}
response = session.get(url, params=params)
response.raise_for_status()
if response.headers.get('X-W3C-Validator-Status', 'Invalid') == 'Valid':
url_data.add_info(u"W3C Validator: %s" % _("valid CSS syntax"))
return
check_w3_errors(url_data, response.text, "W3C HTML")
except requests.exceptions.RequestException:
pass # ignore service failures
except Exception as msg:
log.warn(LOG_PLUGIN, _("CSS syntax check plugin error: %(msg)s ") % {"msg": msg})
def check_w3_errors (url_data, xml, w3type):
"""Add warnings for W3C HTML or CSS errors in xml format.
w3type is either "W3C HTML" or "W3C CSS"."""
dom = parseString(xml)
for error in dom.getElementsByTagName('m:error'):
warnmsg = _("%(w3type)s validation error at line %(line)s col %(column)s: %(msg)s")
attrs = {
"w3type": w3type,
"line": getXmlText(error, "m:line"),
"column": getXmlText(error, "m:col"),
"msg": getXmlText(error, "m:message"),
}
url_data.add_warning(warnmsg % attrs)
def getXmlText (parent, tag):
"""Return XML content of given tag in parent element."""
elem = parent.getElementsByTagName(tag)[0]
# Yes, the DOM standard is awful.
rc = []
for node in elem.childNodes:
if node.nodeType == node.TEXT_NODE:
rc.append(node.data)
return ''.join(rc)
| 5,093 | Python | .py | 118 | 35.686441 | 94 | 0.633098 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,325 | regexcheck.py | wummel_linkchecker/linkcheck/plugins/regexcheck.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2000-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Check page content with regular expression.
"""
import re
from . import _ContentPlugin
from .. import log, LOG_PLUGIN
class RegexCheck(_ContentPlugin):
"""Define a regular expression which prints a warning if it matches
any content of the checked link. This applies only to valid pages,
so we can get their content.
Use this to check for pages that contain some form of error
message, for example 'This page has moved' or 'Oracle
Application error'.
Note that multiple values can be combined in the regular expression,
for example "(This page has moved|Oracle Application error)"."""
def __init__(self, config):
"""Set warning regex from config."""
super(RegexCheck, self).__init__(config)
self.warningregex = None
pattern = config["warningregex"]
if pattern:
try:
self.warningregex = re.compile(pattern)
except re.error as msg:
log.warn(LOG_PLUGIN, "Invalid regex pattern %r: %s" % (pattern, msg))
def applies_to(self, url_data):
"""Check for warningregex, extern flag and parseability."""
return self.warningregex and not url_data.extern[0] and url_data.is_parseable()
def check(self, url_data):
"""Check content."""
log.debug(LOG_PLUGIN, "checking content for warning regex")
content = url_data.get_content()
# add warnings for found matches, up to the maximum allowed number
match = self.warningregex.search(content)
if match:
# calculate line number for match
line = content.count('\n', 0, match.start())
# add a warning message
msg = _("Found %(match)r at line %(line)d in link contents.")
url_data.add_warning(msg % {"match": match.group(), "line": line})
@classmethod
def read_config(cls, configparser):
"""Read configuration file options."""
config = dict()
section = cls.__name__
option = "warningregex"
if configparser.has_option(section, option):
value = configparser.get(section, option)
else:
value = None
config[option] = value
return config
| 3,009 | Python | .py | 68 | 37.838235 | 87 | 0.673713 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,326 | opera.py | wummel_linkchecker/linkcheck/bookmarks/opera.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2011-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os
# Windows filename encoding
nt_filename_encoding="mbcs"
# List of possible Opera bookmark files.
OperaBookmarkFiles = (
"bookmarks.adr", # for Opera >= 10.0
"opera6.adr",
)
def get_profile_dir ():
"""Return path where all profiles of current user are stored."""
if os.name == 'nt':
basedir = unicode(os.environ["APPDATA"], nt_filename_encoding)
dirpath = os.path.join(basedir, u"Opera", u"Opera")
elif os.name == 'posix':
basedir = unicode(os.environ["HOME"])
dirpath = os.path.join(basedir, u".opera")
return dirpath
def find_bookmark_file ():
"""Return the bookmark file of the Opera profile.
Returns absolute filename if found, or empty string if no bookmark file
could be found.
"""
try:
dirname = get_profile_dir()
if os.path.isdir(dirname):
for name in OperaBookmarkFiles:
fname = os.path.join(dirname, name)
if os.path.isfile(fname):
return fname
except Exception:
pass
return u""
def parse_bookmark_data (data):
"""Return iterator for bookmarks of the form (url, name, line number).
Bookmarks are not sorted.
"""
name = None
lineno = 0
for line in data.splitlines():
lineno += 1
line = line.strip()
if line.startswith("NAME="):
name = line[5:]
elif line.startswith("URL="):
url = line[4:]
if url and name is not None:
yield (url, name, lineno)
else:
name = None
| 2,364 | Python | .py | 65 | 30.692308 | 75 | 0.660847 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,327 | firefox.py | wummel_linkchecker/linkcheck/bookmarks/firefox.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2010-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Parser for FireFox bookmark file."""
import os
import glob
import re
try:
import sqlite3
has_sqlite = True
except ImportError:
has_sqlite = False
extension = re.compile(r'/(?i)places.sqlite$')
# Windows filename encoding
nt_filename_encoding="mbcs"
def get_profile_dir ():
"""Return path where all profiles of current user are stored."""
if os.name == 'nt':
basedir = unicode(os.environ["APPDATA"], nt_filename_encoding)
dirpath = os.path.join(basedir, u"Mozilla", u"Firefox", u"Profiles")
elif os.name == 'posix':
basedir = unicode(os.environ["HOME"])
dirpath = os.path.join(basedir, u".mozilla", u"firefox")
return dirpath
def find_bookmark_file (profile="*.default"):
"""Return the first found places.sqlite file of the profile directories
ending with '.default' (or another given profile name).
Returns absolute filename if found, or empty string if no bookmark file
could be found.
"""
try:
for dirname in glob.glob(u"%s/%s" % (get_profile_dir(), profile)):
if os.path.isdir(dirname):
fname = os.path.join(dirname, "places.sqlite")
if os.path.isfile(fname):
return fname
except Exception:
pass
return u""
def parse_bookmark_file (filename):
"""Return iterator for bookmarks of the form (url, name).
Bookmarks are not sorted.
Returns None if sqlite3 module is not installed.
"""
if not has_sqlite:
return
conn = sqlite3.connect(filename, timeout=0.5)
try:
c = conn.cursor()
try:
sql = """SELECT mp.url, mb.title
FROM moz_places mp, moz_bookmarks mb
WHERE mp.hidden=0 AND mp.url NOT LIKE 'place:%' AND
mp.id=mb.fk"""
c.execute(sql)
for url, name in c:
if not name:
name = url
yield (url, name)
finally:
c.close()
finally:
conn.close()
| 2,810 | Python | .py | 76 | 30.855263 | 76 | 0.656147 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,328 | safari.py | wummel_linkchecker/linkcheck/bookmarks/safari.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2011-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os
import sys
import plistlib
try:
import biplist
has_biplist = True
except ImportError:
has_biplist = False
def get_profile_dir ():
"""Return path where all profiles of current user are stored."""
basedir = unicode(os.environ["HOME"])
return os.path.join(basedir, u"Library", u"Safari")
def find_bookmark_file ():
"""Return the bookmark file of the Default profile.
Returns absolute filename if found, or empty string if no bookmark file
could be found.
"""
if sys.platform != 'darwin':
return u""
try:
dirname = get_profile_dir()
if os.path.isdir(dirname):
fname = os.path.join(dirname, u"Bookmarks.plist")
if os.path.isfile(fname):
return fname
except Exception:
pass
return u""
def parse_bookmark_file (filename):
"""Return iterator for bookmarks of the form (url, name).
Bookmarks are not sorted.
"""
return parse_plist(get_plist_data_from_file(filename))
def parse_bookmark_data (data):
"""Return iterator for bookmarks of the form (url, name).
Bookmarks are not sorted.
"""
return parse_plist(get_plist_data_from_string(data))
def get_plist_data_from_file (filename):
"""Parse plist data for a file. Tries biplist, falling back to
plistlib."""
if has_biplist:
return biplist.readPlist(filename)
# fall back to normal plistlist
try:
return plistlib.readPlist(filename)
except Exception:
# not parseable (eg. not well-formed, or binary)
return {}
def get_plist_data_from_string (data):
"""Parse plist data for a string. Tries biplist, falling back to
plistlib."""
if has_biplist:
return biplist.readPlistFromString(data)
# fall back to normal plistlist
try:
return plistlib.readPlistFromString(data)
except Exception:
# not parseable (eg. not well-formed, or binary)
return {}
# some key strings
KEY_URLSTRING = 'URLString'
KEY_URIDICTIONARY = 'URIDictionary'
KEY_CHILDREN = 'Children'
KEY_WEBBOOKMARKTYPE = 'WebBookmarkType'
def parse_plist(entry):
"""Parse a XML dictionary entry."""
if is_leaf(entry):
url = entry[KEY_URLSTRING]
title = entry[KEY_URIDICTIONARY].get('title', url)
yield (url, title)
elif has_children(entry):
for child in entry[KEY_CHILDREN]:
for item in parse_plist(child):
yield item
def is_leaf (entry):
"""Return true if plist entry is an URL entry."""
return entry.get(KEY_WEBBOOKMARKTYPE) == 'WebBookmarkTypeLeaf'
def has_children (entry):
"""Return true if plist entry has children."""
return entry.get(KEY_WEBBOOKMARKTYPE) == 'WebBookmarkTypeList'
| 3,547 | Python | .py | 97 | 31.649485 | 75 | 0.696793 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,329 | __init__.py | wummel_linkchecker/linkcheck/bookmarks/__init__.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2011-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
| 780 | Python | .py | 16 | 47.75 | 73 | 0.774869 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,330 | chrome.py | wummel_linkchecker/linkcheck/bookmarks/chrome.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2011-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os
import sys
# Windows filename encoding
nt_filename_encoding="mbcs"
def get_profile_dir ():
"""Return path where all profiles of current user are stored."""
if os.name == 'nt':
if "LOCALAPPDATA" in os.environ:
basedir = unicode(os.environ["LOCALAPPDATA"], nt_filename_encoding)
else:
# read local appdata directory from registry
from ..winutil import get_shell_folder
try:
basedir = get_shell_folder("Local AppData")
except EnvironmentError:
basedir = os.path.join(os.environ["USERPROFILE"], "Local Settings", "Application Data")
dirpath = os.path.join(basedir, u"Google", u"Chrome", u"User Data")
elif os.name == 'posix':
basedir = unicode(os.environ["HOME"])
if sys.platform == 'darwin':
dirpath = os.path.join(basedir, u"Library", u"Application Support")
else:
dirpath = os.path.join(basedir, u".config")
dirpath = os.path.join(dirpath, u"Google", u"Chrome")
return dirpath
def find_bookmark_file (profile="Default"):
"""Return the bookmark file of the Default profile.
Returns absolute filename if found, or empty string if no bookmark file
could be found.
"""
try:
dirname = os.path.join(get_profile_dir(), profile)
if os.path.isdir(dirname):
fname = os.path.join(dirname, "Bookmarks")
if os.path.isfile(fname):
return fname
except Exception:
pass
return u""
from .chromium import parse_bookmark_data, parse_bookmark_file
| 2,404 | Python | .py | 56 | 36.785714 | 103 | 0.67906 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,331 | chromium.py | wummel_linkchecker/linkcheck/bookmarks/chromium.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2011-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os
import sys
import json
# Windows filename encoding
nt_filename_encoding="mbcs"
def get_profile_dir ():
"""Return path where all profiles of current user are stored."""
if os.name == 'nt':
if "LOCALAPPDATA" in os.environ:
basedir = unicode(os.environ["LOCALAPPDATA"], nt_filename_encoding)
else:
# read local appdata directory from registry
from ..winutil import get_shell_folder
try:
basedir = get_shell_folder("Local AppData")
except EnvironmentError:
basedir = os.path.join(os.environ["USERPROFILE"], "Local Settings", "Application Data")
dirpath = os.path.join(basedir, u"Chromium", u"User Data")
elif os.name == 'posix':
basedir = unicode(os.environ["HOME"])
if sys.platform == 'darwin':
dirpath = os.path.join(basedir, u"Library", u"Application Support")
else:
dirpath = os.path.join(basedir, u".config")
dirpath = os.path.join(dirpath, u"chromium")
return dirpath
def find_bookmark_file (profile="Default"):
"""Return the bookmark file of the Default profile.
Returns absolute filename if found, or empty string if no bookmark file
could be found.
"""
try:
dirname = os.path.join(get_profile_dir(), profile)
if os.path.isdir(dirname):
fname = os.path.join(dirname, "Bookmarks")
if os.path.isfile(fname):
return fname
except Exception:
pass
return u""
def parse_bookmark_data (data):
"""Parse data string.
Return iterator for bookmarks of the form (url, name).
Bookmarks are not sorted.
"""
for url, name in parse_bookmark_json(json.loads(data)):
yield url, name
def parse_bookmark_file (file):
"""Parse file object.
Return iterator for bookmarks of the form (url, name).
Bookmarks are not sorted.
"""
for url, name in parse_bookmark_json(json.load(file)):
yield url, name
def parse_bookmark_json (data):
"""Parse complete JSON data for Chromium Bookmarks."""
for entry in data["roots"].values():
for url, name in parse_bookmark_node(entry):
yield url, name
def parse_bookmark_node (node):
"""Parse one JSON node of Chromium Bookmarks."""
if node["type"] == "url":
yield node["url"], node["name"]
elif node["type"] == "folder":
for child in node["children"]:
for entry in parse_bookmark_node(child):
yield entry
| 3,342 | Python | .py | 83 | 34.024096 | 103 | 0.665228 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,332 | filter_xml_output.py | wummel_linkchecker/doc/examples/filter_xml_output.py | #!/usr/bin/python
# -*- coding: iso-8859-1 -*-
# Copyright (C) 2011 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Example to filter XML output.
Call with XML output filename as first argument.
Prints filtered result on standard output.
"""
import sys
from xml.etree.ElementTree import parse
def main (args):
filename = args[0]
with open(filename) as fd:
tree = parse(fd)
filter_tree(tree)
tree.write(sys.stdout, encoding='utf-8')
def filter_tree(tree):
"""Filter all 401 errors."""
to_remove = []
for elem in tree.findall('urldata'):
valid = elem.find('valid')
if valid is not None and valid.text == '0' and \
valid.attrib.get('result', '').startswith('401'):
to_remove.append(elem)
root = tree.getroot()
for elem in to_remove:
root.remove(elem)
if __name__ == '__main__':
main(sys.argv[1:])
| 1,584 | Python | .py | 43 | 33.302326 | 73 | 0.704235 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,333 | __hooks__.py | wummel_linkchecker/doc/web/hooks/__hooks__.py | # -*- coding: iso-8859-1 -*-
# Hook routines for the wok static site generator.
# Note that mediacompress is a local module.
import os
def compress_javascript(config, output_path):
"""Minify JS files."""
try:
from mediacompress import compress_js_files
except ImportError:
pass
else:
compress_js_files(output_path, excludes=("*.min.js",))
def compress_css(config, output_path):
"""Minify CSS files."""
try:
from mediacompress import compress_css_files
except ImportError:
pass
else:
compress_css_files(output_path)
def chmod(config):
"""Set correct file permissions."""
output_dir = config["output_dir"]
for dirpath, dirnames, filenames in os.walk(output_dir):
for dirname in dirnames:
os.chmod(os.path.join(dirpath, dirname), 0755)
for filename in filenames:
os.chmod(os.path.join(dirpath, filename), 0644)
hooks = {
'site.output.post': [compress_javascript, compress_css],
'site.done': [chmod],
}
| 1,047 | Python | .py | 32 | 27.09375 | 62 | 0.66435 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,334 | viewprof.py | wummel_linkchecker/scripts/viewprof.py | #!/usr/bin/env python
"""
View yappi profiling data.
Usage: $0 <filename>
"""
import sys
import yappi
def main(args):
filename = args[0]
stats = yappi.YFuncStats()
stats.add(filename)
stats.print_all()
if __name__ == '__main__':
main(sys.argv[1:])
| 272 | Python | .py | 14 | 16.714286 | 30 | 0.656126 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,335 | debugparse.py | wummel_linkchecker/scripts/debugparse.py | #!/usr/bin/env python
# -*- coding: iso-8859-1 -*-
# Copyright (C) 2011-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Parse HTML given as file parameter or piped to stdin.
"""
import sys
import os
sys.path.append(os.getcwd())
import linkcheck.HtmlParser.htmlsax
import linkcheck.HtmlParser.htmllib
def main (text):
parser = linkcheck.HtmlParser.htmlsax.parser()
handler = linkcheck.HtmlParser.htmllib.HtmlPrinter()
parser.handler = handler
# debug lexer
#parser.debug(1)
parser.feed(text)
parser.flush()
if __name__ == '__main__':
if len(sys.argv) <= 1:
text = sys.stdin.read()
else:
filename = sys.argv[1]
with open(filename) as fp:
text = fp.read()
main(text)
| 1,429 | Python | .py | 41 | 31.926829 | 73 | 0.72852 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,336 | removeafter.py | wummel_linkchecker/scripts/removeafter.py | #!/usr/bin/env python
# Copyright (C) 2012-2014 Bastian Kleineidam
"""Remove all lines after a given marker line.
"""
from __future__ import print_function
import fileinput
import sys
def main(args):
"""Remove lines after marker."""
filename = args[0]
marker = args[1]
for line in fileinput.input(filename, inplace=1):
print(line.rstrip())
if line.startswith(marker):
break
if __name__ == '__main__':
main(sys.argv[1:])
| 470 | Python | .py | 17 | 23.705882 | 53 | 0.660754 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,337 | analyze_memdump.py | wummel_linkchecker/scripts/analyze_memdump.py | #!/usr/bin/env python
# -*- coding: iso-8859-1 -*-
# Copyright (C) 2012-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Analyze a memory dump by the meliae module.
"""
import sys
import os
import codecs
import cgi
from linkcheck import strformat
def main (filename):
om = print_memorydump(filename)
dirname, basename = os.path.split(filename)
basename = os.path.splitext(basename)[0]
basedir = os.path.join(dirname, basename)
if not os.path.isdir(basedir):
os.mkdir(basedir)
write_htmlfiles(om, basedir)
def print_memorydump(filename):
from meliae import loader
om = loader.load(filename, collapse=True)
om.remove_expensive_references()
print om.summarize()
return om
def write_htmlfiles(om, basedir):
om.compute_parents()
open_files = {}
for obj in om.objs.itervalues():
fp = get_file(obj.type_str, open_files, basedir)
write_html_obj(fp, obj, om.objs)
close_files(open_files)
def get_file(type_str, open_files, basedir):
"""Get already opened file, or open and initialize a new one."""
if type_str not in open_files:
filename = type_str+".html"
encoding = 'utf-8'
fd = codecs.open(os.path.join(basedir, filename), 'w', encoding)
open_files[type_str] = fd
write_html_header(fd, type_str, encoding)
return open_files[type_str]
def close_files(open_files):
for fp in open_files.values():
write_html_footer(fp)
fp.close()
HtmlHeader = u"""
<!doctype html>
<head>
<meta charset="%s">
</head>
<body>
"""
def write_html_header(fp, type_str, encoding):
fp.write(HtmlHeader % encoding)
fp.write(u"<h1>Type %s</h1>\n" % type_str)
fp.write(u"<table><tr><th>Address</th><th>Name</th><th>Size</th><th>Parents</th><th>References</th></tr>\n")
def get_children(obj, objs):
res = []
for address in obj.children:
if address in objs:
child = objs[address]
url = u"#%d" % address
if child.type_str != obj.type_str:
url = child.type_str + u".html" + url
entry = u'<a href="%s">%d</a>' % (url, address)
else:
entry = u"%d" % address
res.append(entry)
return res
def get_parents(obj, objs):
res = []
for address in obj.parents:
if address in objs:
parent = objs[address]
url = u"#%d" % address
if parent.type_str != obj.type_str:
url = parent.type_str + u".html" + url
entry = u'<a href="%s">%d</a>' % (url, address)
else:
entry = u"%d" % address
res.append(entry)
return res
def write_html_obj(fp, obj, objs):
if obj.value is None:
value = u"None"
else:
value = cgi.escape(str(obj.value))
attrs = dict(
address=obj.address,
size=strformat.strsize(obj.size),
children=u",".join(get_children(obj, objs)),
parents=u",".join(get_parents(obj, objs)),
value=value,
)
fp.write(u"<tr><td>%(address)d</td><td>%(value)s</td><td>%(size)s</td><td>%(children)s</td></tr>\n" % attrs)
def write_html_footer(fp):
fp.write(u"</table></body></html>")
if __name__ == '__main__':
filename = sys.argv[1]
main(filename)
| 3,961 | Python | .py | 114 | 29.394737 | 112 | 0.640417 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,338 | update_iana_uri_schemes.py | wummel_linkchecker/scripts/update_iana_uri_schemes.py | import sys
import re
import csv
import requests
iana_uri_schemes = "https://www.iana.org/assignments/uri-schemes/uri-schemes.xhtml"
# CSV format: URI Scheme,Template,Description,Reference
csv_iana_uri_schemes_permanent = 'https://www.iana.org/assignments/uri-schemes/uri-schemes-1.csv'
csv_iana_uri_schemes_provisional = 'https://www.iana.org/assignments/uri-schemes/uri-schemes-2.csv'
csv_iana_uri_schemes_historical = 'https://www.iana.org/assignments/uri-schemes/uri-schemes-3.csv'
iana_uri_schemes_permanent = {}
iana_uri_schemes_provisional = {}
iana_uri_schemes_historical = {}
iana_uri_schemes_other = {
"clsid": "Microsoft specific",
"find" : "Mozilla specific",
"isbn" : "ISBN (int. book numbers)",
"javascript": "JavaScript",
}
filter_uri_schemes_permanent = (
"file",
"ftp",
"http",
"https",
"mailto",
"news",
"nntp",
)
template = '''
# from %(uri)s
ignored_schemes_permanent = r"""
%(permanent)s
"""
ignored_schemes_provisional = r"""
%(provisional)s
"""
ignored_schemes_historical = r"""
%(historical)s
"""
ignored_schemes_other = r"""
%(other)s
"""
ignored_schemes = "^(%%s%%s%%s%%s)$" %% (
ignored_schemes_permanent,
ignored_schemes_provisional,
ignored_schemes_historical,
ignored_schemes_other,
)
ignored_schemes_re = re.compile(ignored_schemes, re.VERBOSE)
is_unknown_scheme = ignored_schemes_re.match
'''
def main(args):
parse_csv_file(csv_iana_uri_schemes_permanent, iana_uri_schemes_permanent)
parse_csv_file(csv_iana_uri_schemes_provisional, iana_uri_schemes_provisional)
parse_csv_file(csv_iana_uri_schemes_historical, iana_uri_schemes_historical)
for scheme in iana_uri_schemes_other:
if (scheme in iana_uri_schemes_permanent or
scheme in iana_uri_schemes_provisional or
scheme in iana_uri_schemes_historical):
raise ValueError(scheme)
for scheme in filter_uri_schemes_permanent:
if scheme in iana_uri_schemes_permanent:
del iana_uri_schemes_permanent[scheme]
args = dict(
uri = iana_uri_schemes,
permanent = get_regex(iana_uri_schemes_permanent),
provisional = get_regex(iana_uri_schemes_provisional),
historical = get_regex(iana_uri_schemes_historical),
other = get_regex(iana_uri_schemes_other),
)
res = template % args
print res
return 0
def get_regex(schemes):
expr = ["|%s # %s" % (re.escape(scheme).ljust(10), description)
for scheme, description in sorted(schemes.items())]
return "\n".join(expr)
def parse_csv_file(url, res):
"""Parse given URL and write res with {scheme -> description}"""
response = requests.get(url, stream=True)
reader = csv.reader(response.iter_lines())
first_row = True
for row in reader:
if first_row:
# skip first row
first_row = False
else:
scheme, template, description, reference = row
res[scheme] = description
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| 3,073 | Python | .py | 90 | 29.488889 | 99 | 0.678463 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,339 | lc_cgi.html.de | wummel_linkchecker/cgi-bin/lconline/lc_cgi.html.de | <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<html><head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>LinkChecker Online</title>
<link rel="stylesheet" href="lc.css" type="text/css" />
<script type="text/javascript" src="check.js"></script>
<script type="text/javascript">
<!--
function gettext (s) {
if (s=="Empty URL was given.") {
return "Leere URL angegeben.";
}
if (s=="Invalid URL was given.") {
return "Ungültige URL angegeben.";
}
return s;
}
// -->
</script>
</head>
<body text="#192c83" bgcolor="#fff7e5" link="#191c83" vlink="#191c83"
alink="#191c83">
<center><h2>LinkChecker Online</h2>
(läuft mit Öl vom <a href="http://wummel.github.io/linkchecker/"
target="_top">LinkChecker</a>)
</center>
<blockquote>
<form method="POST" action="/lconlinewsgi/lc.wsgi" target="links"
onsubmit="return(isValid(document.checklinkForm))"
name="checklinkForm">
<table border="0" cellpadding="2" cellspacing="0" summary="">
<tr>
<td>Url:</td>
<td colspan="4"><input size="70" name="url" value="http://">
<input type="submit" value="Go!">
</td>
</tr>
<tr>
<td rowspan="3" valign="top">Optionen:</td>
<td>Rekursionstiefe:</td>
<td><select name="level">
<option value="0">0</option>
<option value="1" selected>1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="-1">unendlich</option>
</select>
</td>
<td>Prüfe Anker in HTML:</td>
<td><input type="checkbox" name="anchors" checked></td>
</tr>
<tr>
<td>Nur Warnungen und Fehler ausgeben:</td>
<td><input type="checkbox" name="errors"></td>
<td>Ausgabe:</td>
<td><select name="language">
<option value="de" selected>Deutsch</option>
<option value="C">Englisch</option>
</select>
</td>
</tr>
</table>
</form>
</blockquote>
</body></html>
| 1,865 | Python | .cgi | 66 | 26.242424 | 69 | 0.677455 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,340 | lc_cgi.html.en | wummel_linkchecker/cgi-bin/lconline/lc_cgi.html.en | <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<html><head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>LinkChecker Online</title>
<link rel="stylesheet" href="lc.css" type="text/css" />
<script type="text/javascript" src="check.js"></script>
<script type="text/javascript">
<!--
function gettext (s) {
return s;
}
// -->
</script>
</head>
<body text="#192c83" bgcolor="#fff7e5" link="#191c83" vlink="#191c83"
alink="#191c83">
<center><h2>LinkChecker Online</h2>
(powered by <a href="http://wummel.github.io/linkchecker/"
target="_top">LinkChecker</a>)
</center>
<blockquote>
<form method="POST" action="/lconlinewsgi/lc.wsgi" target="links"
onsubmit="return(isValid(document.checklinkForm))"
name="checklinkForm">
<table border="0" cellpadding="2" cellspacing="0" summary="">
<tr>
<td>Url:</td>
<td colspan="4"><input size="70" name="url" value="http://">
<input type="submit" value="Go!">
</td>
</tr>
<tr>
<td rowspan="3" valign="top">Options:</td>
<td>Recursion Level:</td>
<td><select name="level">
<option value="0">0</option>
<option value="1" selected>1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="-1">infinite</option>
</select>
</td>
<td>Check anchors in HTML:</td>
<td><input type="checkbox" name="anchors" checked></td>
</tr>
<tr>
<td>Log only warnings and errors:</td>
<td><input type="checkbox" name="errors"></td>
<td>Output language:</td>
<td><select name="language">
<option value="C" selected>English</option>
<option value="de">German</option>
</select>
</td>
</tr>
</table>
</form>
</blockquote>
</body></html>
| 1,685 | Python | .cgi | 60 | 26.583333 | 69 | 0.689464 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,341 | test_cgi.py | wummel_linkchecker/tests/test_cgi.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2004-2012 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Test cgi form routines.
"""
import unittest
import wsgiref
import urllib
from StringIO import StringIO
from wsgiref.util import setup_testing_defaults
from linkcheck.lc_cgi import checkform, checklink, LCFormError, application
from linkcheck.strformat import limit
class TestWsgi (unittest.TestCase):
"""Test wsgi application."""
def test_form_valid_url (self):
# Check url validity.
env = dict()
form = dict(url="http://www.example.com/", level="1")
checkform(form, env)
def test_form_empty_url (self):
# Check with empty url.
env = dict()
form = dict(url="", level="0")
self.assertRaises(LCFormError, checkform, form, env)
def test_form_default_url (self):
# Check with default url.
env = dict()
form = dict(url="http://", level="0")
self.assertRaises(LCFormError, checkform, form, env)
def test_form_invalid_url (self):
# Check url (in)validity.
env = dict()
form = dict(url="http://www.foo bar/", level="0")
self.assertRaises(LCFormError, checkform, form, env)
def test_checklink (self):
form = dict(url="http://www.example.com/", level="0")
checklink(form)
def test_application (self):
form = dict(url="http://www.example.com/", level="0")
formdata = urllib.urlencode(form)
environ = {'wsgi.input': StringIO(formdata)}
setup_testing_defaults(environ)
test_response = ""
test_headers = [None]
test_status = [None]
def start_response(status, headers):
test_status[0] = status
test_headers[0] = headers
for str_data in application(environ, start_response):
if not isinstance(str_data, str):
err = "answer is not a byte string: %r" % limit(str_data, 30)
self.assertTrue(False, err)
test_response += str_data
self.assertEqual(test_status[0], '200 OK')
self.assertTrue("Generated by LinkChecker" in test_response)
| 2,844 | Python | .cgi | 69 | 35.188406 | 77 | 0.66763 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,342 | lc_cgi.py | wummel_linkchecker/linkcheck/lc_cgi.py | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2000-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Functions used by the WSGI script.
"""
import cgi
import os
import threading
import locale
import re
import time
try:
import urlparse
except ImportError:
# Python 3
from urllib import parse as urlparse
from . import configuration, strformat, checker, director, get_link_pat, \
init_i18n, url as urlutil
from .decorators import synchronized
# 5 minutes timeout for requests
MAX_REQUEST_SECONDS = 300
# character set encoding for HTML output
HTML_ENCODING = 'utf-8'
def application(environ, start_response):
"""WSGI interface: start an URL check."""
# the environment variable CONTENT_LENGTH may be empty or missing
try:
request_body_size = int(environ.get('CONTENT_LENGTH', 0))
except ValueError:
request_body_size = 0
# When the method is POST the query string will be sent
# in the HTTP request body which is passed by the WSGI server
# in the file like wsgi.input environment variable.
if request_body_size > 0:
request_body = environ['wsgi.input'].read(request_body_size)
else:
request_body = environ['wsgi.input'].read()
form = cgi.parse_qs(request_body)
status = '200 OK'
start_response(status, get_response_headers())
for output in checklink(form=form, env=environ):
yield output
_supported_langs = ('de', 'C')
# map language -> locale name
lang_locale = {
'de': 'de_DE',
'C': 'C',
'en': 'en_EN',
}
_is_level = re.compile(r'^(0|1|2|3|-1)$').match
class LCFormError(Exception):
"""Form related errors."""
pass
def get_response_headers():
"""Get list of response headers in key-value form."""
return [("Content-type", "text/html"),
("Cache-Control", "no-cache"),
("Pragma:", "no-cache")
]
def formvalue (form, key):
"""Get value with given key from WSGI form."""
field = form.get(key)
if isinstance(field, list):
field = field[0]
return field
_lock = threading.Lock()
class ThreadsafeIO (object):
"""Thread-safe unicode I/O class."""
def __init__(self):
"""Initialize buffer."""
self.buf = []
self.closed = False
@synchronized(_lock)
def write (self, data):
"""Write given unicode data to buffer."""
assert isinstance(data, unicode)
if self.closed:
raise IOError("Write on closed I/O object")
if data:
self.buf.append(data)
@synchronized(_lock)
def get_data (self):
"""Get bufferd unicode data."""
data = u"".join(self.buf)
self.buf = []
return data
@synchronized(_lock)
def close (self):
"""Reset buffer and close this I/O object."""
self.buf = []
self.closed = True
def encode(s):
"""Encode given string in HTML encoding."""
return s.encode(HTML_ENCODING, 'ignore')
def checklink (form=None, env=os.environ):
"""Validates the CGI form and checks the given links."""
if form is None:
form = {}
try:
checkform(form, env)
except LCFormError as errmsg:
log(env, errmsg)
yield encode(format_error(errmsg))
return
out = ThreadsafeIO()
config = get_configuration(form, out)
url = strformat.stripurl(formvalue(form, "url"))
aggregate = director.get_aggregate(config)
url_data = checker.get_url_from(url, 0, aggregate, extern=(0, 0))
aggregate.urlqueue.put(url_data)
for html_str in start_check(aggregate, out):
yield encode(html_str)
out.close()
def start_check (aggregate, out):
"""Start checking in background and write encoded output to out."""
# check in background
t = threading.Thread(target=director.check_urls, args=(aggregate,))
t.start()
# time to wait for new data
sleep_seconds = 2
# current running time
run_seconds = 0
while not aggregate.is_finished():
yield out.get_data()
time.sleep(sleep_seconds)
run_seconds += sleep_seconds
if run_seconds > MAX_REQUEST_SECONDS:
director.abort(aggregate)
break
yield out.get_data()
def get_configuration(form, out):
"""Initialize a CGI configuration."""
config = configuration.Configuration()
config["recursionlevel"] = int(formvalue(form, "level"))
config["logger"] = config.logger_new('html', fd=out, encoding=HTML_ENCODING)
config["threads"] = 2
if "anchors" in form:
config["enabledplugins"].append("AnchorCheck")
if "errors" not in form:
config["verbose"] = True
# avoid checking of local files or other nasty stuff
pat = "!^%s$" % urlutil.safe_url_pattern
config["externlinks"].append(get_link_pat(pat, strict=True))
config.sanitize()
return config
def get_host_name (form):
"""Return host name of given URL."""
return urlparse.urlparse(formvalue(form, "url"))[1]
def checkform (form, env):
"""Check form data. throw exception on error
Be sure to NOT print out any user-given data as HTML code, so use
only plain strings as exception text."""
# check lang support
if "language" in form:
lang = formvalue(form, 'language')
if lang in _supported_langs:
localestr = lang_locale[lang]
try:
# XXX this is not thread-safe, so think of something else
locale.setlocale(locale.LC_ALL, localestr)
init_i18n()
except locale.Error as errmsg:
log(env, "could not set locale %r: %s" % (localestr, errmsg))
else:
raise LCFormError(_("unsupported language %r") % lang)
# check url syntax
if "url" in form:
url = formvalue(form, "url")
if not url or url == "http://":
raise LCFormError(_("empty url was given"))
if not urlutil.is_safe_url(url):
raise LCFormError(_("disallowed url %r was given") % url)
else:
raise LCFormError(_("no url was given"))
# check recursion level
if "level" in form:
level = formvalue(form, "level")
if not _is_level(level):
raise LCFormError(_("invalid recursion level %r") % level)
# check options
for option in ("anchors", "errors", "intern"):
if option in form:
value = formvalue(form, option)
if value != "on":
raise LCFormError(_("invalid %s option %r") % (option, value))
def log (env, msg):
"""Log message to WSGI error output."""
logfile = env['wsgi.errors']
logfile.write(msg + "\n")
def dump (env, form):
"""Log environment and form."""
for var, value in env.items():
log(env, var+"="+value)
for key in form:
log(env, str(formvalue(form, key)))
def format_error (why):
"""Format standard error page.
@param why: error message
@ptype why: unicode
@return: HTML page content
@rtype: unicode
"""
return _("""<!DOCTYPE HTML>
<html><head>
<meta http-equiv="Content-Type" content="text/html; charset=ISO-8859-1">
<title>LinkChecker Online Error</title></head>
<body text=#192c83 bgcolor=#fff7e5 link=#191c83 vlink=#191c83 alink=#191c83>
<blockquote>
<b>Error: %s</b><br/>
The LinkChecker Online script has encountered an error. Please ensure
that your provided URL link begins with <code>http://</code> and
contains only these characters: <code>A-Za-z0-9./_~-</code><br/><br/>
Errors are logged.
</blockquote>
</body>
</html>""") % cgi.escape(why)
| 8,260 | Python | .cgi | 230 | 30.321739 | 80 | 0.652946 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,343 | .pydevproject | wummel_linkchecker/.pydevproject | <?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?eclipse-pydev version="1.0"?>
<pydev_project>
<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.7</pydev_property>
<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
<path>/linkchecker-git</path>
</pydev_pathproperty>
</pydev_project>
| 421 | Python | .pyde | 9 | 45.666667 | 91 | 0.776156 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,344 | rdataclass.py | wummel_linkchecker/third_party/dnspython/dns/rdataclass.py | # Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS Rdata Classes.
@var _by_text: The rdata class textual name to value mapping
@type _by_text: dict
@var _by_value: The rdata class value to textual name mapping
@type _by_value: dict
@var _metaclasses: If an rdataclass is a metaclass, there will be a mapping
whose key is the rdatatype value and whose value is True in this dictionary.
@type _metaclasses: dict"""
import re
import dns.exception
RESERVED0 = 0
IN = 1
CH = 3
HS = 4
NONE = 254
ANY = 255
_by_text = {
'RESERVED0' : RESERVED0,
'IN' : IN,
'CH' : CH,
'HS' : HS,
'NONE' : NONE,
'ANY' : ANY
}
# We construct the inverse mapping programmatically to ensure that we
# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that
# would cause the mapping not to be true inverse.
_by_value = dict([(y, x) for x, y in _by_text.iteritems()])
# Now that we've built the inverse map, we can add class aliases to
# the _by_text mapping.
_by_text.update({
'INTERNET' : IN,
'CHAOS' : CH,
'HESIOD' : HS
})
_metaclasses = {
NONE : True,
ANY : True
}
_unknown_class_pattern = re.compile('CLASS([0-9]+)$', re.I);
class UnknownRdataclass(dns.exception.DNSException):
"""Raised when a class is unknown."""
pass
def from_text(text):
"""Convert text into a DNS rdata class value.
@param text: the text
@type text: string
@rtype: int
@raises dns.rdataclass.UnknownRdataClass: the class is unknown
@raises ValueError: the rdata class value is not >= 0 and <= 65535
"""
value = _by_text.get(text.upper())
if value is None:
match = _unknown_class_pattern.match(text)
if match == None:
raise UnknownRdataclass
value = int(match.group(1))
if value < 0 or value > 65535:
raise ValueError("class must be between >= 0 and <= 65535")
return value
def to_text(value):
"""Convert a DNS rdata class to text.
@param value: the rdata class value
@type value: int
@rtype: string
@raises ValueError: the rdata class value is not >= 0 and <= 65535
"""
if value < 0 or value > 65535:
raise ValueError("class must be between >= 0 and <= 65535")
text = _by_value.get(value)
if text is None:
text = 'CLASS' + repr(value)
return text
def is_metaclass(rdclass):
"""True if the class is a metaclass.
@param rdclass: the rdata class
@type rdclass: int
@rtype: bool"""
if rdclass in _metaclasses:
return True
return False
| 3,300 | Python | .tac | 95 | 30.926316 | 76 | 0.694288 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,345 | lc.wsgi | wummel_linkchecker/cgi-bin/lc.wsgi | #!/usr/bin/python
# -*- coding: iso-8859-1 -*-
# Copyright (C) 2012 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from linkcheck.lc_cgi import application
| 835 | Python | .wsgi | 18 | 45.333333 | 73 | 0.778186 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,346 | setup.py | ParallelSSH_parallel-ssh/setup.py | # Copyright (C) 2014-2022 Panos Kittenis.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from setuptools import setup, find_packages
import versioneer
cmdclass = versioneer.get_cmdclass()
setup(name='parallel-ssh',
version=versioneer.get_version(),
cmdclass=cmdclass,
description='Asynchronous parallel SSH library',
long_description=open('README.rst').read(),
author='Panos Kittenis',
author_email='zuboci@yandex.com',
url="https://github.com/ParallelSSH/parallel-ssh",
license='LGPLv2.1',
packages=find_packages(
'.', exclude=('embedded_server', 'embedded_server.*',
'tests', 'tests.*',
'*.tests', '*.tests.*')
),
install_requires=[
'gevent', 'ssh2-python', 'ssh-python'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Topic :: System :: Networking',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Operating System :: POSIX :: Linux',
'Operating System :: POSIX :: BSD',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
],
)
| 2,427 | Python | .py | 54 | 37.703704 | 85 | 0.643159 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,347 | versioneer.py | ParallelSSH_parallel-ssh/versioneer.py |
# Version: 0.18-1
"""The Versioneer - like a rocketeer, but for versions.
The Versioneer
==============
* like a rocketeer, but for versions!
* https://github.com/warner/python-versioneer
* Brian Warner
* License: Public Domain
* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy
* [![Latest Version]
(https://pypip.in/version/versioneer/badge.svg?style=flat)
](https://pypi.python.org/pypi/versioneer/)
* [![Build Status]
(https://travis-ci.org/warner/python-versioneer.png?branch=master)
](https://travis-ci.org/warner/python-versioneer)
This is a tool for managing a recorded version number in distutils-based
python projects. The goal is to remove the tedious and error-prone "update
the embedded version string" step from your release process. Making a new
release should be as easy as recording a new tag in your version-control
system, and maybe making new tarballs.
## Quick Install
* `pip install versioneer` to somewhere to your $PATH
* add a `[versioneer]` section to your setup.cfg (see below)
* run `versioneer install` in your source tree, commit the results
## Version Identifiers
Source trees come from a variety of places:
* a version-control system checkout (mostly used by developers)
* a nightly tarball, produced by build automation
* a snapshot tarball, produced by a web-based VCS browser, like github's
"tarball from tag" feature
* a release tarball, produced by "setup.py sdist", distributed through PyPI
Within each source tree, the version identifier (either a string or a number,
this tool is format-agnostic) can come from a variety of places:
* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows
about recent "tags" and an absolute revision-id
* the name of the directory into which the tarball was unpacked
* an expanded VCS keyword ($Id$, etc)
* a `_version.py` created by some earlier build step
For released software, the version identifier is closely related to a VCS
tag. Some projects use tag names that include more than just the version
string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool
needs to strip the tag prefix to extract the version identifier. For
unreleased software (between tags), the version identifier should provide
enough information to help developers recreate the same tree, while also
giving them an idea of roughly how old the tree is (after version 1.2, before
version 1.3). Many VCS systems can report a description that captures this,
for example `git describe --tags --dirty --always` reports things like
"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the
0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has
uncommitted changes.
The version identifier is used for multiple purposes:
* to allow the module to self-identify its version: `myproject.__version__`
* to choose a name and prefix for a 'setup.py sdist' tarball
## Theory of Operation
Versioneer works by adding a special `_version.py` file into your source
tree, where your `__init__.py` can import it. This `_version.py` knows how to
dynamically ask the VCS tool for version information at import time.
`_version.py` also contains `$Revision$` markers, and the installation
process marks `_version.py` to have this marker rewritten with a tag name
during the `git archive` command. As a result, generated tarballs will
contain enough information to get the proper version.
To allow `setup.py` to compute a version too, a `versioneer.py` is added to
the top level of your source tree, next to `setup.py` and the `setup.cfg`
that configures it. This overrides several distutils/setuptools commands to
compute the version when invoked, and changes `setup.py build` and `setup.py
sdist` to replace `_version.py` with a small static file that contains just
the generated version data.
## Installation
See [INSTALL.md](./INSTALL.md) for detailed installation instructions.
## Version-String Flavors
Code which uses Versioneer can learn about its version string at runtime by
importing `_version` from your main `__init__.py` file and running the
`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can
import the top-level `versioneer.py` and run `get_versions()`.
Both functions return a dictionary with different flavors of version
information:
* `['version']`: A condensed version string, rendered using the selected
style. This is the most commonly used value for the project's version
string. The default "pep440" style yields strings like `0.11`,
`0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section
below for alternative styles.
* `['full-revisionid']`: detailed revision identifier. For Git, this is the
full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac".
* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the
commit date in ISO 8601 format. This will be None if the date is not
available.
* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that
this is only accurate if run in a VCS checkout, otherwise it is likely to
be False or None
* `['error']`: if the version string could not be computed, this will be set
to a string describing the problem, otherwise it will be None. It may be
useful to throw an exception in setup.py if this is set, to avoid e.g.
creating tarballs with a version string of "unknown".
Some variants are more useful than others. Including `full-revisionid` in a
bug report should allow developers to reconstruct the exact code being tested
(or indicate the presence of local changes that should be shared with the
developers). `version` is suitable for display in an "about" box or a CLI
`--version` output: it can be easily compared against release notes and lists
of bugs fixed in various releases.
The installer adds the following text to your `__init__.py` to place a basic
version in `YOURPROJECT.__version__`:
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
## Styles
The setup.cfg `style=` configuration controls how the VCS information is
rendered into a version string.
The default style, "pep440", produces a PEP440-compliant string, equal to the
un-prefixed tag name for actual releases, and containing an additional "local
version" section with more detail for in-between builds. For Git, this is
TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags
--dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the
tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and
that this commit is two revisions ("+2") beyond the "0.11" tag. For released
software (exactly equal to a known tag), the identifier will only contain the
stripped tag, e.g. "0.11".
Other styles are available. See [details.md](details.md) in the Versioneer
source tree for descriptions.
## Debugging
Versioneer tries to avoid fatal errors: if something goes wrong, it will tend
to return a version of "0+unknown". To investigate the problem, run `setup.py
version`, which will run the version-lookup code in a verbose mode, and will
display the full contents of `get_versions()` (including the `error` string,
which may help identify what went wrong).
## Known Limitations
Some situations are known to cause problems for Versioneer. This details the
most significant ones. More can be found on Github
[issues page](https://github.com/warner/python-versioneer/issues).
### Subprojects
Versioneer has limited support for source trees in which `setup.py` is not in
the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are
two common reasons why `setup.py` might not be in the root:
* Source trees which contain multiple subprojects, such as
[Buildbot](https://github.com/buildbot/buildbot), which contains both
"master" and "slave" subprojects, each with their own `setup.py`,
`setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI
distributions (and upload multiple independently-installable tarballs).
* Source trees whose main purpose is to contain a C library, but which also
provide bindings to Python (and perhaps other langauges) in subdirectories.
Versioneer will look for `.git` in parent directories, and most operations
should get the right version string. However `pip` and `setuptools` have bugs
and implementation details which frequently cause `pip install .` from a
subproject directory to fail to find a correct version string (so it usually
defaults to `0+unknown`).
`pip install --editable .` should work correctly. `setup.py install` might
work too.
Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in
some later version.
[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking
this issue. The discussion in
[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the
issue from the Versioneer side in more detail.
[pip PR#3176](https://github.com/pypa/pip/pull/3176) and
[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve
pip to let Versioneer work correctly.
Versioneer-0.16 and earlier only looked for a `.git` directory next to the
`setup.cfg`, so subprojects were completely unsupported with those releases.
### Editable installs with setuptools <= 18.5
`setup.py develop` and `pip install --editable .` allow you to install a
project into a virtualenv once, then continue editing the source code (and
test) without re-installing after every change.
"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a
convenient way to specify executable scripts that should be installed along
with the python package.
These both work as expected when using modern setuptools. When using
setuptools-18.5 or earlier, however, certain operations will cause
`pkg_resources.DistributionNotFound` errors when running the entrypoint
script, which must be resolved by re-installing the package. This happens
when the install happens with one version, then the egg_info data is
regenerated while a different version is checked out. Many setup.py commands
cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into
a different virtualenv), so this can be surprising.
[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes
this one, but upgrading to a newer version of setuptools should probably
resolve it.
### Unicode version strings
While Versioneer works (and is continually tested) with both Python 2 and
Python 3, it is not entirely consistent with bytes-vs-unicode distinctions.
Newer releases probably generate unicode version strings on py2. It's not
clear that this is wrong, but it may be surprising for applications when then
write these strings to a network connection or include them in bytes-oriented
APIs like cryptographic checksums.
[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates
this question.
## Updating Versioneer
To upgrade your project to a new release of Versioneer, do the following:
* install the new Versioneer (`pip install -U versioneer` or equivalent)
* edit `setup.cfg`, if necessary, to include any new configuration settings
indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details.
* re-run `versioneer install` in your source tree, to replace
`SRC/_version.py`
* commit any changed files
## Future Directions
This tool is designed to make it easily extended to other version-control
systems: all VCS-specific components are in separate directories like
src/git/ . The top-level `versioneer.py` script is assembled from these
components by running make-versioneer.py . In the future, make-versioneer.py
will take a VCS name as an argument, and will construct a version of
`versioneer.py` that is specific to the given VCS. It might also take the
configuration arguments that are currently provided manually during
installation by editing setup.py . Alternatively, it might go the other
direction and include code from all supported VCS systems, reducing the
number of intermediate scripts.
## License
To make Versioneer easier to embed, all its code is dedicated to the public
domain. The `_version.py` that it creates is also in the public domain.
Specifically, both are released under the Creative Commons "Public Domain
Dedication" license (CC0-1.0), as described in
https://creativecommons.org/publicdomain/zero/1.0/ .
"""
from __future__ import print_function
try:
import configparser
except ImportError:
import ConfigParser as configparser
import errno
import json
import os
import re
import subprocess
import sys
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_root():
"""Get the project root directory.
We require that all commands are run from the project root, i.e. the
directory that contains setup.py, setup.cfg, and versioneer.py .
"""
root = os.path.realpath(os.path.abspath(os.getcwd()))
setup_py = os.path.join(root, "setup.py")
versioneer_py = os.path.join(root, "versioneer.py")
if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
# allow 'python path/to/setup.py COMMAND'
root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))
setup_py = os.path.join(root, "setup.py")
versioneer_py = os.path.join(root, "versioneer.py")
if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
err = ("Versioneer was unable to run the project root directory. "
"Versioneer requires setup.py to be executed from "
"its immediate directory (like 'python setup.py COMMAND'), "
"or in a way that lets it use sys.argv[0] to find the root "
"(like 'python path/to/setup.py COMMAND').")
raise VersioneerBadRootError(err)
try:
# Certain runtime workflows (setup.py install/develop in a setuptools
# tree) execute all dependencies in a single python process, so
# "versioneer" may be imported multiple times, and python's shared
# module-import table will cache the first one. So we can't use
# os.path.dirname(__file__), as that will find whichever
# versioneer.py was first imported, even in later projects.
me = os.path.realpath(os.path.abspath(__file__))
me_dir = os.path.normcase(os.path.splitext(me)[0])
vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])
if me_dir != vsr_dir:
print("Warning: build in %s is using versioneer.py from %s"
% (os.path.dirname(me), versioneer_py))
except NameError:
pass
return root
def get_config_from_root(root):
"""Read the project setup.cfg file to determine Versioneer config."""
# This might raise EnvironmentError (if setup.cfg is missing), or
# configparser.NoSectionError (if it lacks a [versioneer] section), or
# configparser.NoOptionError (if it lacks "VCS="). See the docstring at
# the top of versioneer.py for instructions on writing your setup.cfg .
setup_cfg = os.path.join(root, "setup.cfg")
parser = configparser.SafeConfigParser()
with open(setup_cfg, "r") as f:
parser.readfp(f)
VCS = parser.get("versioneer", "VCS") # mandatory
def get(parser, name):
if parser.has_option("versioneer", name):
return parser.get("versioneer", name)
return None
cfg = VersioneerConfig()
cfg.VCS = VCS
cfg.style = get(parser, "style") or ""
cfg.versionfile_source = get(parser, "versionfile_source")
cfg.versionfile_build = get(parser, "versionfile_build")
cfg.tag_prefix = get(parser, "tag_prefix")
if cfg.tag_prefix in ("''", '""'):
cfg.tag_prefix = ""
cfg.parentdir_prefix = get(parser, "parentdir_prefix")
cfg.verbose = get(parser, "verbose")
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
# these dictionaries contain VCS-specific tools
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
LONG_VERSION_PY['git'] = '''
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.18 (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s"
git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s"
git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "%(STYLE)s"
cfg.tag_prefix = "%(TAG_PREFIX)s"
cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s"
cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %%s" %% dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %%s" %% (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %%s (error)" %% dispcmd)
print("stdout was %%s" %% stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %%s but none started with prefix %%s" %%
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %%d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%%s', no digits" %% ",".join(refs - tags))
if verbose:
print("likely tags: %%s" %% ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %%s" %% r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %%s not under git control" %% root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%%s*" %% tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%%s'"
%% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%%s' doesn't start with prefix '%%s'"
print(fmt %% (full_tag, tag_prefix))
pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'"
%% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%%d" %% pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%%d" %% pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%%s" %% pieces["short"]
else:
# exception #1
rendered = "0.post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%%s" %% pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%%s'" %% style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree",
"date": None}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version", "date": None}
'''
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def do_vcs_install(manifest_in, versionfile_source, ipy):
"""Git-specific installation logic for Versioneer.
For Git, this means creating/changing .gitattributes to mark _version.py
for export-subst keyword substitution.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
files = [manifest_in, versionfile_source]
if ipy:
files.append(ipy)
try:
me = __file__
if me.endswith(".pyc") or me.endswith(".pyo"):
me = os.path.splitext(me)[0] + ".py"
versioneer_file = os.path.relpath(me)
except NameError:
versioneer_file = "versioneer.py"
files.append(versioneer_file)
present = False
try:
f = open(".gitattributes", "r")
for line in f.readlines():
if line.strip().startswith(versionfile_source):
if "export-subst" in line.strip().split()[1:]:
present = True
f.close()
except EnvironmentError:
pass
if not present:
f = open(".gitattributes", "a+")
f.write("%s export-subst\n" % versionfile_source)
f.close()
files.append(".gitattributes")
run_command(GITS, ["add", "--"] + files)
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
SHORT_VERSION_PY = """
# This file was generated by 'versioneer.py' (0.18) from
# revision-control system data, or from the parent directory name of an
# unpacked source archive. Distribution tarballs contain a pre-generated copy
# of this file.
import json
version_json = '''
%s
''' # END VERSION_JSON
def get_versions():
return json.loads(version_json)
"""
def versions_from_file(filename):
"""Try to determine the version from _version.py if present."""
try:
with open(filename) as f:
contents = f.read()
except EnvironmentError:
raise NotThisMethod("unable to read _version.py")
mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON",
contents, re.M | re.S)
if not mo:
mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON",
contents, re.M | re.S)
if not mo:
raise NotThisMethod("no version_json in _version.py")
return json.loads(mo.group(1))
def write_to_version_file(filename, versions):
"""Write the given version number to the given _version.py file."""
os.unlink(filename)
contents = json.dumps(versions, sort_keys=True,
indent=1, separators=(",", ": "))
with open(filename, "w") as f:
f.write(SHORT_VERSION_PY % contents)
print("set %s to '%s'" % (filename, versions["version"]))
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
class VersioneerBadRootError(Exception):
"""The project root directory is unknown or missing key files."""
def get_versions(verbose=False):
"""Get the project version from whatever source is available.
Returns dict with two keys: 'version' and 'full'.
"""
if "versioneer" in sys.modules:
# see the discussion in cmdclass.py:get_cmdclass()
del sys.modules["versioneer"]
root = get_root()
cfg = get_config_from_root(root)
assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg"
handlers = HANDLERS.get(cfg.VCS)
assert handlers, "unrecognized VCS '%s'" % cfg.VCS
verbose = verbose or cfg.verbose
assert cfg.versionfile_source is not None, \
"please set versioneer.versionfile_source"
assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix"
versionfile_abs = os.path.join(root, cfg.versionfile_source)
# extract version from first of: _version.py, VCS command (e.g. 'git
# describe'), parentdir. This is meant to work for developers using a
# source checkout, for users of a tarball created by 'setup.py sdist',
# and for users of a tarball/zipball created by 'git archive' or github's
# download-from-tag feature or the equivalent in other VCSes.
get_keywords_f = handlers.get("get_keywords")
from_keywords_f = handlers.get("keywords")
if get_keywords_f and from_keywords_f:
try:
keywords = get_keywords_f(versionfile_abs)
ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)
if verbose:
print("got version from expanded keyword %s" % ver)
return ver
except NotThisMethod:
pass
try:
ver = versions_from_file(versionfile_abs)
if verbose:
print("got version from file %s %s" % (versionfile_abs, ver))
return ver
except NotThisMethod:
pass
from_vcs_f = handlers.get("pieces_from_vcs")
if from_vcs_f:
try:
pieces = from_vcs_f(cfg.tag_prefix, root, verbose)
ver = render(pieces, cfg.style)
if verbose:
print("got version from VCS %s" % ver)
return ver
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
if verbose:
print("got version from parentdir %s" % ver)
return ver
except NotThisMethod:
pass
if verbose:
print("unable to compute version")
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None, "error": "unable to compute version",
"date": None}
def get_version():
"""Get the short version string for this project."""
return get_versions()["version"]
def get_cmdclass():
"""Get the custom setuptools/distutils subclasses used by Versioneer."""
if "versioneer" in sys.modules:
del sys.modules["versioneer"]
# this fixes the "python setup.py develop" case (also 'install' and
# 'easy_install .'), in which subdependencies of the main project are
# built (using setup.py bdist_egg) in the same python process. Assume
# a main project A and a dependency B, which use different versions
# of Versioneer. A's setup.py imports A's Versioneer, leaving it in
# sys.modules by the time B's setup.py is executed, causing B to run
# with the wrong versioneer. Setuptools wraps the sub-dep builds in a
# sandbox that restores sys.modules to it's pre-build state, so the
# parent is protected against the child's "import versioneer". By
# removing ourselves from sys.modules here, before the child build
# happens, we protect the child from the parent's versioneer too.
# Also see https://github.com/warner/python-versioneer/issues/52
cmds = {}
# we add "version" to both distutils and setuptools
from distutils.core import Command
class cmd_version(Command):
description = "report generated version string"
user_options = []
boolean_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
vers = get_versions(verbose=True)
print("Version: %s" % vers["version"])
print(" full-revisionid: %s" % vers.get("full-revisionid"))
print(" dirty: %s" % vers.get("dirty"))
print(" date: %s" % vers.get("date"))
if vers["error"]:
print(" error: %s" % vers["error"])
cmds["version"] = cmd_version
# we override "build_py" in both distutils and setuptools
#
# most invocation pathways end up running build_py:
# distutils/build -> build_py
# distutils/install -> distutils/build ->..
# setuptools/bdist_wheel -> distutils/install ->..
# setuptools/bdist_egg -> distutils/install_lib -> build_py
# setuptools/install -> bdist_egg ->..
# setuptools/develop -> ?
# pip install:
# copies source tree to a tempdir before running egg_info/etc
# if .git isn't copied too, 'git describe' will fail
# then does setup.py bdist_wheel, or sometimes setup.py install
# setup.py egg_info -> ?
# we override different "build_py" commands for both environments
if "setuptools" in sys.modules:
from setuptools.command.build_py import build_py as _build_py
else:
from distutils.command.build_py import build_py as _build_py
class cmd_build_py(_build_py):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
_build_py.run(self)
# now locate _version.py in the new build/ directory and replace
# it with an updated value
if cfg.versionfile_build:
target_versionfile = os.path.join(self.build_lib,
cfg.versionfile_build)
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
cmds["build_py"] = cmd_build_py
if "setuptools" in sys.modules:
from setuptools.command.build_ext import build_ext as _build_ext
else:
from distutils.command.build_ext import build_ext as _build_ext
class cmd_build_ext(_build_ext):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
_build_ext.run(self)
if self.inplace:
# build_ext --inplace will only build modules in
# build/lib<..> dir with no _version.py to write to.
# As in place builds will already have a _version.py
# in the module dir, we do not need to write one.
return
# now locate _version.py in the new build/ directory and replace
# it with an updated value
target_versionfile = os.path.join(self.build_lib,
cfg.versionfile_source)
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
cmds["build_ext"] = cmd_build_ext
if "cx_Freeze" in sys.modules: # cx_freeze enabled?
from cx_Freeze.dist import build_exe as _build_exe
# nczeczulin reports that py2exe won't like the pep440-style string
# as FILEVERSION, but it can be used for PRODUCTVERSION, e.g.
# setup(console=[{
# "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION
# "product_version": versioneer.get_version(),
# ...
class cmd_build_exe(_build_exe):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
target_versionfile = cfg.versionfile_source
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
_build_exe.run(self)
os.unlink(target_versionfile)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG %
{"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
})
cmds["build_exe"] = cmd_build_exe
del cmds["build_py"]
if 'py2exe' in sys.modules: # py2exe enabled?
try:
from py2exe.distutils_buildexe import py2exe as _py2exe # py3
except ImportError:
from py2exe.build_exe import py2exe as _py2exe # py2
class cmd_py2exe(_py2exe):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
target_versionfile = cfg.versionfile_source
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
_py2exe.run(self)
os.unlink(target_versionfile)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG %
{"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
})
cmds["py2exe"] = cmd_py2exe
# we override different "sdist" commands for both environments
if "setuptools" in sys.modules:
from setuptools.command.sdist import sdist as _sdist
else:
from distutils.command.sdist import sdist as _sdist
class cmd_sdist(_sdist):
def run(self):
versions = get_versions()
self._versioneer_generated_versions = versions
# unless we update this, the command will keep using the old
# version
self.distribution.metadata.version = versions["version"]
return _sdist.run(self)
def make_release_tree(self, base_dir, files):
root = get_root()
cfg = get_config_from_root(root)
_sdist.make_release_tree(self, base_dir, files)
# now locate _version.py in the new base_dir directory
# (remembering that it may be a hardlink) and replace it with an
# updated value
target_versionfile = os.path.join(base_dir, cfg.versionfile_source)
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile,
self._versioneer_generated_versions)
cmds["sdist"] = cmd_sdist
return cmds
CONFIG_ERROR = """
setup.cfg is missing the necessary Versioneer configuration. You need
a section like:
[versioneer]
VCS = git
style = pep440
versionfile_source = src/myproject/_version.py
versionfile_build = myproject/_version.py
tag_prefix =
parentdir_prefix = myproject-
You will also need to edit your setup.py to use the results:
import versioneer
setup(version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(), ...)
Please read the docstring in ./versioneer.py for configuration instructions,
edit setup.cfg, and re-run the installer or 'python versioneer.py setup'.
"""
SAMPLE_CONFIG = """
# See the docstring in versioneer.py for instructions. Note that you must
# re-run 'versioneer.py setup' after changing this section, and commit the
# resulting files.
[versioneer]
#VCS = git
#style = pep440
#versionfile_source =
#versionfile_build =
#tag_prefix =
#parentdir_prefix =
"""
INIT_PY_SNIPPET = """
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
"""
def do_setup():
"""Main VCS-independent setup function for installing Versioneer."""
root = get_root()
try:
cfg = get_config_from_root(root)
except (EnvironmentError, configparser.NoSectionError,
configparser.NoOptionError) as e:
if isinstance(e, (EnvironmentError, configparser.NoSectionError)):
print("Adding sample versioneer config to setup.cfg",
file=sys.stderr)
with open(os.path.join(root, "setup.cfg"), "a") as f:
f.write(SAMPLE_CONFIG)
print(CONFIG_ERROR, file=sys.stderr)
return 1
print(" creating %s" % cfg.versionfile_source)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG % {"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
})
ipy = os.path.join(os.path.dirname(cfg.versionfile_source),
"__init__.py")
if os.path.exists(ipy):
try:
with open(ipy, "r") as f:
old = f.read()
except EnvironmentError:
old = ""
if INIT_PY_SNIPPET not in old:
print(" appending to %s" % ipy)
with open(ipy, "a") as f:
f.write(INIT_PY_SNIPPET)
else:
print(" %s unmodified" % ipy)
else:
print(" %s doesn't exist, ok" % ipy)
ipy = None
# Make sure both the top-level "versioneer.py" and versionfile_source
# (PKG/_version.py, used by runtime code) are in MANIFEST.in, so
# they'll be copied into source distributions. Pip won't be able to
# install the package without this.
manifest_in = os.path.join(root, "MANIFEST.in")
simple_includes = set()
try:
with open(manifest_in, "r") as f:
for line in f:
if line.startswith("include "):
for include in line.split()[1:]:
simple_includes.add(include)
except EnvironmentError:
pass
# That doesn't cover everything MANIFEST.in can do
# (http://docs.python.org/2/distutils/sourcedist.html#commands), so
# it might give some false negatives. Appending redundant 'include'
# lines is safe, though.
if "versioneer.py" not in simple_includes:
print(" appending 'versioneer.py' to MANIFEST.in")
with open(manifest_in, "a") as f:
f.write("include versioneer.py\n")
else:
print(" 'versioneer.py' already in MANIFEST.in")
if cfg.versionfile_source not in simple_includes:
print(" appending versionfile_source ('%s') to MANIFEST.in" %
cfg.versionfile_source)
with open(manifest_in, "a") as f:
f.write("include %s\n" % cfg.versionfile_source)
else:
print(" versionfile_source already in MANIFEST.in")
# Make VCS-specific changes. For git, this means creating/changing
# .gitattributes to mark _version.py for export-subst keyword
# substitution.
do_vcs_install(manifest_in, cfg.versionfile_source, ipy)
return 0
def scan_setup_py():
"""Validate the contents of setup.py against Versioneer's expectations."""
found = set()
setters = False
errors = 0
with open("setup.py", "r") as f:
for line in f.readlines():
if "import versioneer" in line:
found.add("import")
if "versioneer.get_cmdclass()" in line:
found.add("cmdclass")
if "versioneer.get_version()" in line:
found.add("get_version")
if "versioneer.VCS" in line:
setters = True
if "versioneer.versionfile_source" in line:
setters = True
if len(found) != 3:
print("")
print("Your setup.py appears to be missing some important items")
print("(but I might be wrong). Please make sure it has something")
print("roughly like the following:")
print("")
print(" import versioneer")
print(" setup( version=versioneer.get_version(),")
print(" cmdclass=versioneer.get_cmdclass(), ...)")
print("")
errors += 1
if setters:
print("You should remove lines like 'versioneer.VCS = ' and")
print("'versioneer.versionfile_source = ' . This configuration")
print("now lives in setup.cfg, and should be removed from setup.py")
print("")
errors += 1
return errors
if __name__ == "__main__":
cmd = sys.argv[1]
if cmd == "setup":
errors = do_setup()
errors += scan_setup_py()
if errors:
sys.exit(1)
| 69,737 | Python | .py | 1,552 | 36.888531 | 79 | 0.62943 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,348 | quickstart.py | ParallelSSH_parallel-ssh/examples/quickstart.py | from pssh.clients import ParallelSSHClient, SSHClient
hosts = ['localhost']
cmd = 'uname'
client = ParallelSSHClient(hosts)
output = client.run_command(cmd)
for host_out in output:
for line in host_out.stdout:
print(line)
print("Host %s: exit code %s" % (host_out.host, host_out.exit_code))
| 307 | Python | .py | 9 | 31.333333 | 68 | 0.734694 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,349 | parallel_commands.py | ParallelSSH_parallel-ssh/examples/parallel_commands.py | from pssh.clients import ParallelSSHClient
import datetime
output = []
host = 'localhost'
hosts = [host, host]
client = ParallelSSHClient(hosts)
# Run 10 five second sleeps
cmds = ['sleep 5; uname' for _ in range(10)]
start = datetime.datetime.now()
for cmd in cmds:
output.append(client.run_command(cmd, stop_on_errors=False, return_list=True))
end = datetime.datetime.now()
print("Started %s 'sleep 5' commands on %s host(s) in %s" % (
len(cmds), len(hosts), end-start,))
start = datetime.datetime.now()
for _output in output:
client.join(_output)
for host_out in _output:
for line in host_out.stdout:
print(line)
for line in host_out.stderr:
print(line)
print(f"Exit code: {host_out.exit_code}")
end = datetime.datetime.now()
print("All commands finished in %s" % (end-start,))
| 847 | Python | .py | 25 | 30.24 | 82 | 0.687805 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,350 | sftp_copy_file.py | ParallelSSH_parallel-ssh/examples/sftp_copy_file.py | import os
from gevent import joinall
from datetime import datetime
from pssh.clients import ParallelSSHClient
with open('file_copy', 'wb') as fh:
# 200MB
for _ in range(20055120):
fh.write(b'asdfartkj\n')
fileinfo = os.stat('file_copy')
mb_size = fileinfo.st_size / (1024000.0)
client = ParallelSSHClient(['127.0.0.1'], timeout=1, num_retries=1)
print(f"Starting copy of {mb_size}MB file")
now = datetime.now()
cmd = client.copy_file('file_copy', '/tmp/file_copy')
joinall(cmd, raise_error=True)
taken = datetime.now() - now
rate = mb_size / taken.total_seconds()
print("File size %sMB transfered in %s, transfer rate %s MB/s" % (mb_size, taken, rate))
os.unlink('file_copy')
os.unlink('/tmp/file_copy')
| 724 | Python | .py | 20 | 34.2 | 88 | 0.718571 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,351 | single_client.py | ParallelSSH_parallel-ssh/examples/single_client.py | from pssh.clients import SSHClient
from datetime import datetime
host = 'localhost'
cmds = ['echo first command',
'echo second command',
'sleep 1; echo third command took one second',
]
client = SSHClient(host)
start = datetime.now()
for cmd in cmds:
out = client.run_command(cmd)
for line in out.stdout:
print(line)
end = datetime.now()
print("Took %s seconds" % (end - start).total_seconds())
| 438 | Python | .py | 15 | 25.333333 | 56 | 0.685714 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,352 | test_utils.py | ParallelSSH_parallel-ssh/tests/test_utils.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import unittest
from logging import NullHandler, DEBUG
from pssh import utils
class ParallelSSHUtilsTest(unittest.TestCase):
def test_enabling_host_logger(self):
self.assertTrue(len([h for h in utils.host_logger.handlers
if isinstance(h, NullHandler)]) == 1)
utils.enable_host_logger()
# And again to test only one non-null handler is attached
utils.enable_host_logger()
self.assertTrue(len([h for h in utils.host_logger.handlers
if not isinstance(h, NullHandler)]) == 1)
utils.host_logger.handlers = [NullHandler()]
def test_enabling_pssh_logger(self):
self.assertTrue(len([h for h in utils.logger.handlers
if isinstance(h, NullHandler)]) == 1)
utils.enable_logger(utils.logger)
utils.enable_logger(utils.logger)
self.assertTrue(len([h for h in utils.logger.handlers
if not isinstance(h, NullHandler)]) == 1)
utils.enable_debug_logger()
self.assertEqual(utils.logger.level, DEBUG)
utils.logger.handlers = [NullHandler()]
| 1,920 | Python | .py | 39 | 42.25641 | 80 | 0.695096 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,353 | test_exceptions.py | ParallelSSH_parallel-ssh/tests/test_exceptions.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import unittest
from logging import NullHandler
from pssh.exceptions import AuthenticationError, AuthenticationException, UnknownHostError, \
UnknownHostException, ConnectionError, ConnectionErrorException, SSHError, SSHException, \
HostArgumentError, HostArgumentException
class ParallelSSHUtilsTest(unittest.TestCase):
def test_exceptions(self):
try:
raise AuthenticationError
except AuthenticationException:
pass
try:
raise UnknownHostException
except UnknownHostError:
pass
try:
raise ConnectionErrorException
except ConnectionError:
pass
try:
raise SSHException
except SSHError:
pass
try:
raise HostArgumentException
except HostArgumentError:
pass
def test_errors(self):
try:
raise AuthenticationException
except AuthenticationError:
pass
try:
raise UnknownHostError
except UnknownHostException:
pass
try:
raise ConnectionError
except ConnectionErrorException:
pass
try:
raise SSHError
except SSHException:
pass
try:
raise HostArgumentError
except HostArgumentException:
pass
| 2,177 | Python | .py | 64 | 26.28125 | 94 | 0.685415 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,354 | test_output.py | ParallelSSH_parallel-ssh/tests/test_output.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""Unittests for :mod:`pssh.output.HostOutput` class"""
import unittest
from pssh.output import HostOutput, BufferData, HostOutputBuffers
class TestHostOutput(unittest.TestCase):
def setUp(self):
self.output = HostOutput(
None, None, None, None,
buffers=HostOutputBuffers(
BufferData(None, None),
BufferData(None, None),
)
)
def test_print(self):
self.assertTrue(str(self.output))
def test_bad_exit_status(self):
self.assertIsNone(self.output.exit_code)
def test_excepting_client_exit_code(self):
class ChannelError(Exception):
pass
class ExcSSHClient(object):
def get_exit_status(self, channel):
raise ChannelError
exc_client = ExcSSHClient()
host_out = HostOutput(
'host', None, None, client=exc_client)
exit_code = host_out.exit_code
self.assertIsNone(exit_code)
def test_none_output_client(self):
host_out = HostOutput(
'host', None, None, client=None)
exit_code = host_out.exit_code
self.assertEqual(exit_code, None)
self.assertIsNone(host_out.stdout)
self.assertIsNone(host_out.stderr)
| 2,041 | Python | .py | 50 | 34.14 | 80 | 0.687216 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,355 | test_host_config.py | ParallelSSH_parallel-ssh/tests/test_host_config.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import unittest
from pssh.config import HostConfig
class TestHostConfig(unittest.TestCase):
def test_host_config_entries(self):
user = 'user'
port = 22
password = 'password'
alias = 'alias'
private_key = 'private key'
allow_agent = False
num_retries = 1
retry_delay = 1
timeout = 1
identity_auth = False
proxy_host = 'proxy_host'
keepalive_seconds = 1
ipv6_only = True
cert_file = 'file'
auth_thread_pool = True
forward_ssh_agent = False
gssapi_auth = True
gssapi_server_identity = 'some_id'
gssapi_client_identity = 'some_id'
gssapi_delegate_credentials = True
cfg = HostConfig(
user=user, port=port, password=password, alias=alias, private_key=private_key,
allow_agent=allow_agent, num_retries=num_retries, retry_delay=retry_delay,
timeout=timeout, identity_auth=identity_auth, proxy_host=proxy_host,
ipv6_only=ipv6_only,
keepalive_seconds=keepalive_seconds,
cert_file=cert_file,
auth_thread_pool=auth_thread_pool,
forward_ssh_agent=forward_ssh_agent,
gssapi_auth=gssapi_auth,
gssapi_server_identity=gssapi_server_identity,
gssapi_client_identity=gssapi_client_identity,
gssapi_delegate_credentials=gssapi_delegate_credentials,
)
self.assertEqual(cfg.user, user)
self.assertEqual(cfg.port, port)
self.assertEqual(cfg.password, password)
self.assertEqual(cfg.alias, alias)
self.assertEqual(cfg.private_key, private_key)
self.assertEqual(cfg.allow_agent, allow_agent)
self.assertEqual(cfg.num_retries, num_retries)
self.assertEqual(cfg.retry_delay, retry_delay)
self.assertEqual(cfg.timeout, timeout)
self.assertEqual(cfg.identity_auth, identity_auth)
self.assertEqual(cfg.proxy_host, proxy_host)
self.assertEqual(cfg.ipv6_only, ipv6_only)
self.assertEqual(cfg.keepalive_seconds, keepalive_seconds)
self.assertEqual(cfg.cert_file, cert_file)
self.assertEqual(cfg.forward_ssh_agent, forward_ssh_agent)
self.assertEqual(cfg.gssapi_auth, gssapi_auth)
self.assertEqual(cfg.gssapi_server_identity, gssapi_server_identity)
self.assertEqual(cfg.gssapi_client_identity, gssapi_client_identity)
self.assertEqual(cfg.gssapi_delegate_credentials, gssapi_delegate_credentials)
def test_host_config_bad_entries(self):
self.assertRaises(ValueError, HostConfig, user=22)
self.assertRaises(ValueError, HostConfig, password=22)
self.assertRaises(ValueError, HostConfig, port='22')
self.assertRaises(ValueError, HostConfig, alias=2)
self.assertRaises(ValueError, HostConfig, private_key=1)
self.assertRaises(ValueError, HostConfig, allow_agent=1)
self.assertRaises(ValueError, HostConfig, num_retries='')
self.assertRaises(ValueError, HostConfig, retry_delay='')
self.assertRaises(ValueError, HostConfig, timeout='')
self.assertRaises(ValueError, HostConfig, identity_auth='')
self.assertRaises(ValueError, HostConfig, proxy_host=1)
self.assertRaises(ValueError, HostConfig, proxy_port='')
self.assertRaises(ValueError, HostConfig, proxy_user=1)
self.assertRaises(ValueError, HostConfig, proxy_password=1)
self.assertRaises(ValueError, HostConfig, proxy_pkey=1)
self.assertRaises(ValueError, HostConfig, keepalive_seconds='')
self.assertRaises(ValueError, HostConfig, ipv6_only='')
self.assertRaises(ValueError, HostConfig, keepalive_seconds='')
self.assertRaises(ValueError, HostConfig, cert_file=1)
self.assertRaises(ValueError, HostConfig, forward_ssh_agent='')
self.assertRaises(ValueError, HostConfig, gssapi_auth='')
self.assertRaises(ValueError, HostConfig, gssapi_server_identity=1)
self.assertRaises(ValueError, HostConfig, gssapi_client_identity=1)
self.assertRaises(ValueError, HostConfig, gssapi_delegate_credentials='')
| 4,958 | Python | .py | 98 | 42.673469 | 90 | 0.703337 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,356 | test_reader.py | ParallelSSH_parallel-ssh/tests/test_reader.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import unittest
from random import random, randint, randrange
from string import ascii_letters
from gevent.queue import Queue
from gevent import spawn, sleep
from pssh.clients.reader import ConcurrentRWBuffer
class TestReaderBuffer(unittest.TestCase):
def setUp(self):
self.buffer = ConcurrentRWBuffer()
self.data = b'test'
def test_write(self):
self.buffer.write(self.data)
data = self.buffer.read()
self.assertEqual(data, self.data)
def test_multi_write_read(self):
written_data = self.data
self.buffer.write(self.data)
more_data = b"more data"
written_data += more_data
self.buffer.write(more_data)
data = self.buffer.read()
self.assertEqual(data, written_data)
new_write_data = b"yet more data"
self.buffer.write(new_write_data)
data = self.buffer.read()
self.assertEqual(data, new_write_data)
new_write_data = b"even more data"
self.buffer.write(new_write_data)
data = self.buffer.read()
self.assertEqual(data, new_write_data)
def test_concurrent_rw(self):
written_data = Queue()
def _writer(_buffer):
while True:
data = b"".join([ascii_letters[m].encode() for m in [randrange(0, 8) for _ in range(8)]])
_buffer.write(data)
written_data.put(data)
sleep(0.2)
writer = spawn(_writer, self.buffer)
writer.start()
sleep(0.5)
data = self.buffer.read()
_data = b""
while written_data.qsize() !=0 :
_data += written_data.get()
self.assertEqual(data, _data)
sleep(0.5)
data = self.buffer.read()
_data = b""
while written_data.qsize() !=0 :
_data += written_data.get()
self.assertEqual(data, _data)
writer.kill()
writer.get()
def test_non_cur_write(self):
data = b"asdf"
self.buffer.write(data)
self.buffer._buffer.seek(0)
self.buffer.write(data)
self.assertEqual(self.buffer.read(), data + data)
| 2,916 | Python | .py | 76 | 31.342105 | 105 | 0.651237 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,357 | openssh.py | ParallelSSH_parallel-ssh/tests/embedded_server/openssh.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import logging
import os
import random
import string
from getpass import getuser
from subprocess import Popen, TimeoutExpired
from gevent import Timeout
from jinja2 import Template
logger = logging.getLogger('pssh.test.openssh_server')
logger.setLevel(logging.DEBUG)
DIR_NAME = os.path.dirname(__file__)
PDIR_NAME = os.path.dirname(DIR_NAME)
PPDIR_NAME = os.path.dirname(PDIR_NAME)
SERVER_KEY = os.path.abspath(os.path.sep.join([DIR_NAME, 'rsa.key']))
CA_HOST_KEY = os.path.abspath(os.path.sep.join([DIR_NAME, 'ca_host_key']))
SSHD_CONFIG_TMPL = os.path.abspath(os.path.sep.join(
[DIR_NAME, 'sshd_config.tmpl']))
SSHD_CONFIG = os.path.abspath(os.path.sep.join([DIR_NAME, 'sshd_config']))
PRINCIPALS_TMPL = os.path.abspath(os.path.sep.join([DIR_NAME, 'principals.tmpl']))
PRINCIPALS = os.path.abspath(os.path.sep.join([DIR_NAME, 'principals']))
class OpenSSHServerError(Exception):
pass
class OpenSSHServer(object):
def __init__(self, listen_ip='127.0.0.1', port=2222):
self.listen_ip = listen_ip
self.port = port
self.server_proc = None
self.random_server = ''.join(random.choice(string.ascii_lowercase + string.digits)
for _ in range(8))
self.sshd_config = SSHD_CONFIG + '_%s' % self.random_server
self._fix_masks()
self.make_config()
def _fix_masks(self):
_mask = 0o600
dir_mask = 0o755
for _file in [SERVER_KEY, CA_HOST_KEY]:
os.chmod(_file, _mask)
for _dir in [DIR_NAME, PDIR_NAME, PPDIR_NAME]:
os.chmod(_dir, dir_mask)
def make_config(self):
user = getuser()
with open(SSHD_CONFIG_TMPL) as fh:
tmpl = fh.read()
template = Template(tmpl)
with open(self.sshd_config, 'w') as fh:
fh.write(template.render(parent_dir=os.path.abspath(DIR_NAME),
listen_ip=self.listen_ip,
random_server=self.random_server,
))
fh.write(os.linesep)
with open(PRINCIPALS_TMPL) as fh:
_princ_tmpl = fh.read()
princ_tmpl = Template(_princ_tmpl)
with open(PRINCIPALS, 'w') as fh:
fh.write(princ_tmpl.render(user=user))
fh.write(os.linesep)
def start_server(self):
cmd = ['/usr/sbin/sshd', '-D', '-p', str(self.port),
'-h', SERVER_KEY, '-f', self.sshd_config]
logger.debug("Starting server with %s" % (" ".join(cmd),))
self.server_proc = Popen(cmd)
try:
with Timeout(seconds=5, exception=TimeoutError):
while True:
try:
self.server_proc.wait(.1)
except TimeoutExpired:
break
except TimeoutError:
if self.server_proc.stdout is not None:
logger.error(self.server_proc.stdout.read())
if self.server_proc.stderr is not None:
logger.error(self.server_proc.stderr.read())
raise OpenSSHServerError("Server could not start")
def stop(self):
if self.server_proc is not None and self.server_proc.returncode is None:
try:
self.server_proc.terminate()
self.server_proc.wait()
except OSError:
pass
try:
os.unlink(self.sshd_config)
except OSError:
pass
def __del__(self):
self.stop()
| 4,312 | Python | .py | 103 | 33.07767 | 90 | 0.620887 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,358 | test_parallel_client.py | ParallelSSH_parallel-ssh/tests/ssh/test_parallel_client.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import os
import unittest
from datetime import datetime
from sys import version_info
from unittest.mock import patch, MagicMock
from gevent import joinall, spawn, socket, sleep
from pssh import logger as pssh_logger
from pssh.clients.ssh.parallel import ParallelSSHClient
from pssh.exceptions import AuthenticationException, ConnectionErrorException, Timeout, PKeyFileError
from pssh.output import HostOutput
from .base_ssh_case import PKEY_FILENAME, PUB_FILE, USER_CERT_PRIV_KEY, \
USER_CERT_FILE, CA_USER_KEY, USER, sign_cert
from ..embedded_server.openssh import OpenSSHServer
class LibSSHParallelTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
_mask = int('0600') if version_info <= (2,) else 0o600
for _file in [PKEY_FILENAME, USER_CERT_PRIV_KEY, CA_USER_KEY]:
os.chmod(_file, _mask)
sign_cert()
cls.host = '127.0.0.1'
cls.port = 2422
cls.server = OpenSSHServer(listen_ip=cls.host, port=cls.port)
cls.server.start_server()
cls.cmd = 'echo me'
cls.resp = u'me'
cls.user_key = PKEY_FILENAME
cls.user_pub_key = PUB_FILE
cls.cert_pkey = USER_CERT_PRIV_KEY
cls.cert_file = USER_CERT_FILE
cls.user = USER
# Single client for all tests ensures that the client does not do
# anything that causes server to disconnect the session and
# affect all subsequent uses of the same session.
cls.client = ParallelSSHClient([cls.host],
pkey=PKEY_FILENAME,
port=cls.port,
num_retries=1,
retry_delay=.1,
)
@classmethod
def tearDownClass(cls):
del cls.client
cls.server.stop()
del cls.server
def setUp(self):
self.long_cmd = lambda lines: 'for (( i=0; i<%s; i+=1 )) do echo $i; sleep .1; done' % (lines,)
def make_random_port(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(('127.0.0.1', 0))
listen_port = sock.getsockname()[1]
sock.close()
return listen_port
def test_timeout_on_open_session(self):
timeout = .1
client = ParallelSSHClient([self.host], port=self.port,
pkey=self.user_key,
timeout=timeout,
num_retries=1)
def _session(_=None):
sleep(.2)
joinall(client.connect_auth())
sleep(.01)
client._host_clients[(0, self.host)].open_session = _session
self.assertRaises(Timeout, client.run_command, self.cmd)
def test_pkey_from_memory(self):
with open(self.user_key, 'rb') as fh:
key = fh.read()
client = ParallelSSHClient([self.host], pkey=key, port=self.port, num_retries=1)
joinall(client.connect_auth(), raise_error=True)
def test_join_timeout(self):
client = ParallelSSHClient([self.host], port=self.port,
pkey=self.user_key)
output = client.run_command('echo me; sleep .5')
self.assertRaises(Timeout, client.join, output, timeout=.1)
self.assertFalse(output[0].client.finished(output[0].channel))
self.assertFalse(output[0].channel.is_eof())
client.join(output, timeout=1)
self.assertTrue(output[0].channel.is_eof())
self.assertTrue(client.finished(output))
def test_client_join_stdout(self):
output = self.client.run_command(self.cmd)
expected_exit_code = 0
expected_stdout = [self.resp]
expected_stderr = []
stdout = list(output[0].stdout)
stderr = list(output[0].stderr)
self.assertEqual(expected_stdout, stdout,
msg="Got unexpected stdout - %s, expected %s" %
(stdout, expected_stdout,))
self.assertEqual(expected_stderr, stderr,
msg="Got unexpected stderr - %s, expected %s" %
(stderr, expected_stderr,))
self.client.join(output)
exit_code = output[0].exit_code
self.assertEqual(expected_exit_code, exit_code,
msg="Got unexpected exit code - %s, expected %s" %
(exit_code, expected_exit_code,))
output = self.client.run_command(";".join([self.cmd, 'exit 1']))
self.client.join(output)
exit_code = output[0].exit_code
self.assertEqual(exit_code, 1)
self.assertTrue(len(output), len(self.client.cmds))
_output = self.client.get_last_output()
self.assertEqual(len(_output), len(output))
for i, host in enumerate(self.client.hosts):
self.assertEqual(_output[i].host, host)
def test_get_last_output(self):
host = '127.0.0.9'
server = OpenSSHServer(listen_ip=host, port=self.port)
server.start_server()
try:
hosts = [self.host, host]
client = ParallelSSHClient(hosts, port=self.port, pkey=self.user_key)
self.assertTrue(client.cmds is None)
self.assertTrue(client.get_last_output() is None)
client.run_command(self.cmd)
self.assertTrue(client.cmds is not None)
self.assertEqual(len(client.cmds), len(hosts))
expected_stdout = [self.resp]
expected_stderr = []
output = client.get_last_output()
self.assertIsInstance(output, list)
self.assertEqual(len(output), len(hosts))
self.assertIsInstance(output[0], HostOutput)
client.join(output)
for i, host in enumerate(hosts):
self.assertEqual(output[i].host, host)
exit_code = output[i].exit_code
_stdout = list(output[i].stdout)
_stderr = list(output[i].stderr)
self.assertEqual(exit_code, 0)
self.assertListEqual(expected_stdout, _stdout)
self.assertListEqual(expected_stderr, _stderr)
finally:
server.stop()
def test_pssh_client_no_stdout_non_zero_exit_code_immediate_exit(self):
output = self.client.run_command('exit 1')
expected_exit_code = 1
self.client.join(output)
exit_code = output[0].exit_code
self.assertEqual(expected_exit_code, exit_code,
msg="Got unexpected exit code - %s, expected %s" %
(exit_code,
expected_exit_code,))
def test_pssh_client_run_command_get_output(self):
output = self.client.run_command(self.cmd)
expected_exit_code = 0
expected_stdout = [self.resp]
expected_stderr = []
stdout = list(output[0].stdout)
stderr = list(output[0].stderr)
exit_code = output[0].exit_code
self.assertEqual(expected_exit_code, exit_code,
msg="Got unexpected exit code - %s, expected %s" %
(exit_code,
expected_exit_code,))
self.assertEqual(expected_stdout, stdout,
msg="Got unexpected stdout - %s, expected %s" %
(stdout,
expected_stdout,))
self.assertEqual(expected_stderr, stderr,
msg="Got unexpected stderr - %s, expected %s" %
(stderr,
expected_stderr,))
def test_pssh_client_run_long_command(self):
expected_lines = 5
output = self.client.run_command(self.long_cmd(expected_lines))
self.assertEqual(len(output), len(self.client.hosts))
stdout = list(output[0].stdout)
self.client.join(output)
self.assertTrue(len(stdout) == expected_lines,
msg="Expected %s lines of response, got %s" % (
expected_lines, len(stdout)))
def test_pssh_client_auth_failure(self):
client = ParallelSSHClient([self.host], port=self.port,
user='FAKE USER',
pkey=self.user_key,
num_retries=1)
self.assertRaises(
AuthenticationException, client.run_command, self.cmd)
def test_pssh_client_hosts_list_part_failure(self):
"""Test getting output for remainder of host list in the case where one
host in the host list has a failure"""
hosts = [self.host, '127.1.1.100']
client = ParallelSSHClient(hosts,
port=self.port,
pkey=self.user_key,
num_retries=1)
output = client.run_command(self.cmd, stop_on_errors=False)
self.assertFalse(client.finished(output))
client.join(output, consume_output=True)
self.assertTrue(client.finished(output))
self.assertEqual(output[0].host, hosts[0],
msg="Successful host does not exist in output - output is %s" % (output,))
self.assertEqual(output[1].host, hosts[1],
msg="Failed host does not exist in output - output is %s" % (output,))
self.assertIsNotNone(output[1].exception,
msg="Failed host %s has no exception in output - %s" % (hosts[1], output,))
self.assertTrue(output[1].exception is not None)
self.assertEqual(output[1].host, hosts[1])
self.assertEqual(output[1].exception.args[-2], hosts[1])
try:
raise output[1].exception
except ConnectionErrorException:
pass
else:
raise Exception("Expected ConnectionError, got %s instead" % (output[1].exception,))
def test_pssh_client_timeout(self):
# 1ms timeout
client_timeout = 0.00001
client = ParallelSSHClient([self.host], port=self.port,
pkey=self.user_key,
timeout=client_timeout,
num_retries=1)
now = datetime.now()
output = client.run_command('sleep 1', stop_on_errors=False)
dt = datetime.now() - now
pssh_logger.debug("Run command took %s", dt)
self.assertIsInstance(output[0].exception,
Timeout)
def test_connection_timeout(self):
client_timeout = .01
host = 'fakehost.com'
client = ParallelSSHClient([host], port=self.port,
pkey=self.user_key,
timeout=client_timeout,
num_retries=1,
retry_delay=.1)
output = client.run_command('sleep 1', stop_on_errors=False)
self.assertIsInstance(output[0].exception, ConnectionErrorException)
def test_zero_timeout(self):
host = '127.0.0.2'
server = OpenSSHServer(listen_ip=host, port=self.port)
server.start_server()
client = ParallelSSHClient([self.host, host],
port=self.port,
pkey=self.user_key,
timeout=0)
cmd = spawn(client.run_command, 'sleep 1', stop_on_errors=False)
output = cmd.get(timeout=3)
self.assertTrue(output[0].exception is None)
def test_pssh_client_long_running_command_exit_codes(self):
expected_lines = 2
output = self.client.run_command(self.long_cmd(expected_lines))
self.assertEqual(len(output), len(self.client.hosts))
self.assertIsNone(output[0].exit_code)
self.assertFalse(self.client.finished(output))
self.client.join(output, consume_output=True)
self.assertTrue(self.client.finished(output))
self.assertEqual(output[0].exit_code, 0)
def test_pssh_client_long_running_command_exit_codes_no_stdout(self):
expected_lines = 2
output = self.client.run_command(self.long_cmd(expected_lines))
self.assertEqual(len(output), len(self.client.hosts))
self.assertIsNone(output[0].exit_code)
self.assertFalse(self.client.finished(output))
self.client.join(output)
self.assertTrue(self.client.finished(output))
self.assertEqual(output[0].exit_code, 0)
stdout = list(output[0].stdout)
self.assertEqual(expected_lines, len(stdout))
def test_connection_error_exception(self):
"""Test that we get connection error exception in output with correct arguments"""
# Make port with no server listening on it on separate ip
host = '127.0.0.3'
port = self.make_random_port()
hosts = [host]
client = ParallelSSHClient(hosts, port=port,
pkey=self.user_key,
num_retries=1)
output = client.run_command(self.cmd, stop_on_errors=False)
client.join(output)
self.assertIsInstance(output[0].exception, ConnectionErrorException)
self.assertEqual(output[0].host, host)
try:
raise output[0].exception
except ConnectionErrorException as ex:
self.assertEqual(ex.args[-2], host)
self.assertEqual(ex.args[-1], port)
else:
raise Exception("Expected ConnectionErrorException")
def test_bad_pkey_path(self):
self.assertRaises(PKeyFileError, ParallelSSHClient, [self.host], port=self.port,
pkey='A REALLY FAKE KEY',
num_retries=1)
def test_multiple_single_quotes_in_cmd(self):
"""Test that we can run a command with multiple single quotes"""
output = self.client.run_command("echo 'me' 'and me'")
stdout = list(output[0].stdout)
expected = 'me and me'
self.assertTrue(len(stdout)==1,
msg="Got incorrect number of lines in output - %s" % (stdout,))
self.assertEqual(output[0].exit_code, 0)
self.assertEqual(expected, stdout[0],
msg="Got unexpected output. Expected %s, got %s" % (
expected, stdout[0],))
def test_backtics_in_cmd(self):
"""Test running command with backtics in it"""
output = self.client.run_command("out=`ls` && echo $out")
self.client.join(output)
self.assertEqual(output[0].exit_code, 0)
def test_multiple_shell_commands(self):
"""Test running multiple shell commands in one go"""
output = self.client.run_command("echo me; echo and; echo me")
stdout = list(output[0].stdout)
expected = ["me", "and", "me"]
self.assertEqual(output[0].exit_code, 0)
self.assertEqual(expected, stdout,
msg="Got unexpected output. Expected %s, got %s" % (
expected, stdout,))
def test_escaped_quotes(self):
"""Test escaped quotes in shell variable are handled correctly"""
output = self.client.run_command('t="--flags=\\"this\\""; echo $t')
stdout = list(output[0].stdout)
expected = ['--flags="this"']
self.assertEqual(output[0].exit_code, 0)
self.assertEqual(expected, stdout,
msg="Got unexpected output. Expected %s, got %s" % (
expected, stdout,))
def test_read_timeout(self):
client = ParallelSSHClient([self.host], port=self.port,
pkey=self.user_key)
output = client.run_command('sleep .3; echo me; echo me; echo me', read_timeout=.2)
for host_out in output:
self.assertRaises(Timeout, list, host_out.stdout)
self.assertFalse(output[0].channel.is_eof())
client.join(output)
for host_out in output:
stdout = list(host_out.stdout)
self.assertEqual(len(stdout), 3)
self.assertTrue(output[0].channel.is_eof())
def test_timeout_file_read(self):
dir_name = os.path.dirname(__file__)
_file = os.sep.join((dir_name, 'file_to_read'))
contents = [b'a line\n' for _ in range(50)]
with open(_file, 'wb') as fh:
fh.writelines(contents)
try:
output = self.client.run_command(
'tail -f %s' % (_file,), use_pty=True, read_timeout=.1)
self.assertRaises(Timeout, self.client.join, output, timeout=.1)
for host_out in output:
try:
for line in host_out.stdout:
pass
except Timeout:
pass
else:
raise Exception("Timeout should have been raised")
self.assertRaises(Timeout, self.client.join, output, timeout=.1)
channel = output[0].channel
output[0].client.close_channel(channel)
self.client.join(output)
finally:
os.unlink(_file)
def test_file_read_no_timeout(self):
dir_name = os.path.dirname(__file__)
_file = os.sep.join((dir_name, 'file_to_read'))
contents = [b'a line\n' for _ in range(1000)]
with open(_file, 'wb') as fh:
fh.writelines(contents)
try:
output = self.client.run_command('cat %s' % (_file,), read_timeout=10)
_out = list(output[0].stdout)
finally:
os.unlink(_file)
_contents = [c.decode('utf-8').strip() for c in contents]
self.assertEqual(len(contents), len(_out))
self.assertListEqual(_contents, _out)
def test_gssapi_auth(self):
_server_id = 'server_id'
_client_id = 'client_id'
client = ParallelSSHClient(
[self.host], port=self.port, num_retries=1,
pkey=None,
gssapi_server_identity=_server_id,
gssapi_client_identity=_client_id,
gssapi_delegate_credentials=True,
identity_auth=False)
self.assertRaises(AuthenticationException, client.run_command, self.cmd)
client = ParallelSSHClient(
[self.host], port=self.port, num_retries=1,
pkey=None,
gssapi_auth=True,
identity_auth=False)
self.assertRaises(AuthenticationException, client.run_command, self.cmd)
def test_long_running_cmd_join_timeout(self):
output = self.client.run_command('sleep 1')
self.assertRaises(Timeout, self.client.join, output, timeout=0.2)
def test_default_finished(self):
client = ParallelSSHClient([self.host], port=self.port,
pkey=self.user_key)
self.assertTrue(client.finished())
def test_agent_auth(self):
client = ParallelSSHClient(
[self.host], port=self.port,
num_retries=1,
pkey=None, allow_agent=True,
identity_auth=True)
self.assertRaises(AuthenticationException, client.run_command, self.cmd)
def test_multiple_join_timeout(self):
client = ParallelSSHClient([self.host], port=self.port,
pkey=self.user_key)
for _ in range(5):
output = client.run_command(self.cmd)
client.join(output, timeout=1, consume_output=True)
for host_out in output:
self.assertTrue(host_out.client.finished(host_out.channel))
output = client.run_command('sleep .2')
self.assertRaises(Timeout, client.join, output, timeout=.1, consume_output=True)
for host_out in output:
self.assertFalse(host_out.client.finished(host_out.channel))
def test_cert_auth(self):
client = ParallelSSHClient([self.host], port=self.port,
pkey=self.cert_pkey,
cert_file=self.cert_file)
output = client.run_command(self.cmd)
client.join(output)
resp = list(output[0].stdout)
self.assertListEqual(resp, [self.resp])
def test_read_multi_same_hosts(self):
hosts = [self.host, self.host]
outputs = [
self.client.run_command(self.cmd),
self.client.run_command(self.cmd),
]
for output in outputs:
for host_out in output:
stdout = list(host_out.stdout)
self.assertListEqual(stdout, [self.resp])
def test_join_bad_host_out(self):
out = HostOutput(None, None, None, None)
self.assertIsNone(self.client._join(out))
self.assertIsNone(self.client._join(None))
self.assertIsNone(self.client.join([None]))
@patch('pssh.clients.base.single.socket')
def test_ipv6(self, gsocket):
hosts = ['::1']
client = ParallelSSHClient(hosts, port=self.port, pkey=self.user_key, num_retries=1)
addr_info = ('::1', self.port, 0, 0)
gsocket.IPPROTO_TCP = socket.IPPROTO_TCP
gsocket.socket = MagicMock()
_sock = MagicMock()
gsocket.socket.return_value = _sock
sock_con = MagicMock()
_sock.connect = sock_con
getaddrinfo = MagicMock()
gsocket.getaddrinfo = getaddrinfo
getaddrinfo.return_value = [(
socket.AF_INET6, socket.SocketKind.SOCK_STREAM, socket.IPPROTO_TCP, '', addr_info)]
output = client.run_command(self.cmd, stop_on_errors=False)
for host_out in output:
self.assertEqual(hosts[0], host_out.host)
self.assertIsInstance(host_out.exception, TypeError)
# def test_multiple_run_command_timeout(self):
# client = ParallelSSHClient([self.host], port=self.port,
# pkey=self.user_key)
# for _ in range(5):
# output = client.run_command('pwd', return_list=True, timeout=1)
# for host_out in output:
# stdout = list(host_out.stdout)
# self.assertTrue(len(stdout) > 0)
# self.assertTrue(host_out.client.finished(host_out.channel))
# output = client.run_command('sleep 2; echo me', return_list=True, timeout=1)
# for host_out in output:
# self.assertRaises(Timeout, list, host_out.stdout)
# client.join(output)
# for host_out in output:
# stdout = list(host_out.stdout)
# self.assertEqual(stdout, ['me'])
# def test_client_scope(self):
# def scope_killer():
# for _ in range(5):
# client = ParallelSSHClient([self.host], port=self.port,
# pkey=self.user_key, num_retries=1,
# timeout=1)
# output = client.run_command(self.cmd)
# client.join(output)
# scope_killer()
| 23,869 | Python | .py | 497 | 36.092555 | 104 | 0.584516 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,359 | test_single_client.py | ParallelSSH_parallel-ssh/tests/ssh/test_single_client.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import logging
from datetime import datetime
from gevent import sleep, Timeout as GTimeout, spawn
from ssh.exceptions import AuthenticationDenied
from pssh.clients.ssh.single import SSHClient, logger as ssh_logger
from pssh.exceptions import AuthenticationException, ConnectionErrorException, \
SessionError, Timeout, \
AuthenticationError
from .base_ssh_case import SSHTestCase
ssh_logger.setLevel(logging.DEBUG)
logging.basicConfig()
class SSHClientTest(SSHTestCase):
def test_context_manager(self):
with SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1) as client:
self.assertIsInstance(client, SSHClient)
def test_pkey_from_memory(self):
with open(self.user_key, 'rb') as fh:
key_data = fh.read()
SSHClient(self.host, port=self.port,
pkey=key_data, num_retries=1, timeout=1)
def test_execute(self):
host_out = self.client.run_command(self.cmd)
output = list(host_out.stdout)
stderr = list(host_out.stderr)
expected = [self.resp]
self.assertEqual(expected, output)
exit_code = host_out.channel.get_exit_status()
self.assertEqual(exit_code, 0)
def test_finished(self):
self.assertFalse(self.client.finished(None))
host_out = self.client.run_command(self.cmd)
channel = host_out.channel
self.assertFalse(self.client.finished(channel))
self.assertRaises(ValueError, self.client.wait_finished, host_out.channel)
self.client.wait_finished(host_out)
stdout = list(host_out.stdout)
self.assertTrue(self.client.finished(channel))
self.assertListEqual(stdout, [self.resp])
self.assertRaises(ValueError, self.client.wait_finished, None)
host_out.channel = None
self.assertIsNone(self.client.wait_finished(host_out))
def test_finished_error(self):
self.assertRaises(ValueError, self.client.wait_finished, None)
self.assertIsNone(self.client.finished(None))
def test_stderr(self):
host_out = self.client.run_command('echo "me" >&2')
self.client.wait_finished(host_out)
output = list(host_out.stdout)
stderr = list(host_out.stderr)
expected = ['me']
self.assertListEqual(expected, stderr)
self.assertEqual(len(output), 0)
def test_identity_auth(self):
class _SSHClient(SSHClient):
IDENTITIES = (self.user_key,)
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1,
timeout=1,
allow_agent=False)
client.disconnect()
client.pkey = None
del client.session
del client.sock
client._connect(self.host, self.port)
client._init_session()
client.IDENTITIES = (self.user_key,)
# Default identities auth only should succeed
client._identity_auth()
client.disconnect()
del client.session
del client.sock
client._connect(self.host, self.port)
client._init_session()
# Auth should succeed
self.assertIsNone(client.auth())
# Standard init with custom identities
client = _SSHClient(self.host, port=self.port,
num_retries=1,
allow_agent=False)
self.assertIsInstance(client, SSHClient)
def test_long_running_cmd(self):
host_out = self.client.run_command('sleep .2; exit 2')
self.assertRaises(ValueError, self.client.wait_finished, host_out.channel)
self.client.wait_finished(host_out)
exit_code = host_out.exit_code
self.assertEqual(exit_code, 2)
def test_wait_finished_timeout(self):
host_out = self.client.run_command('sleep .2')
timeout = .1
self.assertFalse(self.client.finished(host_out.channel))
start = datetime.now()
self.assertRaises(Timeout, self.client.wait_finished, host_out, timeout=timeout)
dt = datetime.now() - start
self.assertTrue(timeout*1.1 > dt.total_seconds() > timeout)
self.client.wait_finished(host_out)
self.assertTrue(self.client.finished(host_out.channel))
def test_client_disconnect_on_del(self):
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1)
client_sock = client.sock
del client
self.assertTrue(client_sock.closed)
def test_client_bad_sock(self):
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1)
client.disconnect()
client.sock = None
self.assertIsNone(client.poll())
def test_multiple_clients_exec_terminates_channels(self):
# See #200 - Multiple clients should not interfere with
# each other. session.disconnect can leave state in library
# and break subsequent sessions even on different socket and
# session
def scope_killer():
for _ in range(5):
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1,
allow_agent=False)
host_out = client.run_command(self.cmd)
output = list(host_out.stdout)
self.assertListEqual(output, [self.resp])
scope_killer()
def test_interactive_shell(self):
with self.client.open_shell() as shell:
shell.run(self.cmd)
shell.run(self.cmd)
stdout = list(shell.output.stdout)
self.assertListEqual(stdout, [self.resp, self.resp])
self.assertEqual(shell.output.exit_code, 0)
def test_interactive_shell_exit_code(self):
with self.client.open_shell() as shell:
shell.run(self.cmd)
shell.run('sleep .1')
shell.run(self.cmd)
shell.run('exit 1')
stdout = list(shell.output.stdout)
self.assertListEqual(stdout, [self.resp, self.resp])
self.assertEqual(shell.output.exit_code, 1)
def test_identity_auth_failure(self):
self.assertRaises(AuthenticationException,
SSHClient, self.host, port=self.port, num_retries=1,
allow_agent=False)
def test_password_auth_failure(self):
try:
client = SSHClient(self.host, port=self.port, num_retries=1,
allow_agent=False,
identity_auth=False,
password='blah blah blah',
)
except AuthenticationException as ex:
self.assertIsInstance(ex.args[3], AuthenticationDenied)
else:
raise AssertionError
def test_retry_failure(self):
self.assertRaises(ConnectionErrorException,
SSHClient, self.host, port=12345,
timeout=1,
retry_delay=.1,
num_retries=2, _auth_thread_pool=False)
def test_auth_retry_failure(self):
self.assertRaises(AuthenticationException,
SSHClient, self.host, port=self.port,
user=self.user,
password='fake',
timeout=1,
retry_delay=.1,
num_retries=2,
allow_agent=False,
identity_auth=False,
)
def test_connection_timeout(self):
cmd = spawn(SSHClient, 'fakehost.com', port=12345,
retry_delay=.1,
num_retries=2, timeout=.2, _auth_thread_pool=False)
# Should fail within greenlet timeout, otherwise greenlet will
# raise timeout which will fail the test
self.assertRaises(ConnectionErrorException, cmd.get, timeout=2)
def test_open_session_timeout(self):
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=2,
timeout=.1)
def _session(timeout=None):
sleep(.2)
client.open_session = _session
self.assertRaises(GTimeout, client.run_command, self.cmd)
def test_client_read_timeout(self):
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1)
host_out = client.run_command('sleep 2; echo me', timeout=0.2)
self.assertRaises(Timeout, list, host_out.stdout)
def test_open_session_exc(self):
class Error(Exception):
pass
def _session():
raise Error
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1)
client._open_session = _session
self.assertRaises(SessionError, client.open_session)
def test_session_connect_exc(self):
class Error(Exception):
pass
def _con():
raise Error
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=2,
retry_delay=.2,
)
client._session_connect = _con
self.assertRaises(Error, client._init_session)
def test_invalid_mkdir(self):
self.assertRaises(OSError, self.client._make_local_dir, '/my_new_dir')
def test_no_auth(self):
self.assertRaises(
AuthenticationError,
SSHClient,
self.host,
port=self.port,
num_retries=1,
allow_agent=False,
identity_auth=False,
)
def test_agent_auth_failure(self):
class UnknownError(Exception):
pass
def _agent_auth_unk():
raise UnknownError
def _agent_auth_agent_err():
raise AuthenticationDenied
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1,
allow_agent=True,
identity_auth=False)
client.session.disconnect()
client.pkey = None
client._connect(self.host, self.port)
client._agent_auth = _agent_auth_unk
self.assertRaises(AuthenticationError, client.auth)
client._agent_auth = _agent_auth_agent_err
self.assertRaises(AuthenticationError, client.auth)
def test_agent_auth_fake_success(self):
def _agent_auth():
return
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1,
allow_agent=True,
identity_auth=False)
client.session.disconnect()
client.pkey = None
client._connect(self.host, self.port)
client._agent_auth = _agent_auth
self.assertIsNone(client.auth())
def test_disconnect_exc(self):
class DiscError(Exception):
pass
def _disc():
raise DiscError
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1,
retry_delay=.1,
)
client._disconnect_eagain = _disc
client._connect_init_session_retry(0)
client.disconnect()
| 12,677 | Python | .py | 293 | 30.856655 | 88 | 0.58693 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,360 | base_ssh_case.py | ParallelSSH_parallel-ssh/tests/ssh/base_ssh_case.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import logging
import os
import subprocess
import unittest
from getpass import getuser
from sys import version_info
from pssh.clients.ssh.single import SSHClient
from ..embedded_server.openssh import OpenSSHServer
def setup_root_logger():
log = logging.getLogger()
log.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(name)s - %(message)s')
handler = logging.StreamHandler()
handler.setFormatter(formatter)
log.addHandler(handler)
setup_root_logger()
PKEY_FILENAME = os.path.sep.join([os.path.dirname(__file__), '..', 'client_pkey'])
PUB_FILE = "%s.pub" % (PKEY_FILENAME,)
USER_CERT_PRIV_KEY = os.path.sep.join([os.path.dirname(__file__), '..', 'unit_test_cert_key'])
USER_CERT_PUB_KEY = "%s.pub" % (USER_CERT_PRIV_KEY,)
USER_CERT_FILE = "%s-cert.pub" % (USER_CERT_PRIV_KEY,)
CA_USER_KEY = os.path.sep.join([os.path.dirname(__file__), '..', 'embedded_server', 'ca_user_key'])
USER = getuser()
def sign_cert():
cmd = [
'ssh-keygen', '-s', CA_USER_KEY, '-n', USER, '-I', 'tests', USER_CERT_PUB_KEY,
]
subprocess.check_call(cmd)
class SSHTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
_mask = int('0600') if version_info <= (2,) else 0o600
for _file in [PKEY_FILENAME, USER_CERT_PRIV_KEY, CA_USER_KEY]:
os.chmod(_file, _mask)
sign_cert()
cls.host = '127.0.0.1'
cls.port = 2322
cls.server = OpenSSHServer(listen_ip=cls.host, port=cls.port)
cls.server.start_server()
cls.cmd = 'echo me'
cls.resp = u'me'
cls.user_key = PKEY_FILENAME
cls.user_pub_key = PUB_FILE
cls.cert_pkey = USER_CERT_PRIV_KEY
cls.cert_file = USER_CERT_FILE
cls.user = USER
cls.client = SSHClient(cls.host, port=cls.port,
pkey=PKEY_FILENAME,
num_retries=1,
identity_auth=False,
retry_delay=.1,
)
@classmethod
def tearDownClass(cls):
del cls.client
cls.server.stop()
del cls.server
| 2,949 | Python | .py | 73 | 34.123288 | 99 | 0.652341 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,361 | base_ssh2_case.py | ParallelSSH_parallel-ssh/tests/native/base_ssh2_case.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import logging
import os
import unittest
from getpass import getuser
from sys import version_info
from pssh.clients.native import SSHClient
from ..embedded_server.openssh import OpenSSHServer
def setup_root_logger():
log = logging.getLogger('pssh.clients')
log.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(name)s - %(message)s')
handler = logging.StreamHandler()
handler.setFormatter(formatter)
log.addHandler(handler)
setup_root_logger()
PKEY_FILENAME = os.path.sep.join([os.path.dirname(__file__), '..', 'client_pkey'])
PUB_FILE = "%s.pub" % (PKEY_FILENAME,)
class SSH2TestCase(unittest.TestCase):
client = None
server = None
@classmethod
def setUpClass(cls):
_mask = int('0600') if version_info <= (2,) else 0o600
os.chmod(PKEY_FILENAME, _mask)
cls.server = OpenSSHServer()
cls.server.start_server()
cls.host = '127.0.0.1'
cls.port = 2222
cls.cmd = 'echo me'
cls.resp = u'me'
cls.user_key = PKEY_FILENAME
cls.user_pub_key = PUB_FILE
cls.user = getuser()
cls.client = SSHClient(cls.host, port=cls.port,
pkey=PKEY_FILENAME,
num_retries=1,
identity_auth=False,
retry_delay=.1,
)
@classmethod
def tearDownClass(cls):
del cls.client
cls.server.stop()
del cls.server
| 2,306 | Python | .py | 60 | 31.85 | 89 | 0.663384 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,362 | test_parallel_client.py | ParallelSSH_parallel-ssh/tests/native/test_parallel_client.py | # -*- coding: utf-8 -*-
# This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""Unittests for :mod:`pssh.ParallelSSHClient` class"""
import unittest
from getpass import getuser
import os
import shutil
import string
import random
from hashlib import sha256
from datetime import datetime
from unittest.mock import patch, MagicMock
from gevent import joinall, spawn, socket, sleep, Timeout as GTimeout
from pssh.config import HostConfig
from pssh.clients.native import ParallelSSHClient
from pssh.exceptions import UnknownHostException, \
AuthenticationException, ConnectionErrorException, \
HostArgumentException, SFTPError, SFTPIOError, Timeout, SCPError, \
PKeyFileError, ShellError, HostArgumentError, NoIPv6AddressFoundError, \
AuthenticationError
from pssh.output import HostOutput
from .base_ssh2_case import PKEY_FILENAME, PUB_FILE
from ..embedded_server.openssh import OpenSSHServer
class ParallelSSHClientTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
_mask = 0o600
os.chmod(PKEY_FILENAME, _mask)
cls.host = '127.0.0.1'
cls.port = 2223
cls.server = OpenSSHServer(listen_ip=cls.host, port=cls.port)
cls.server.start_server()
cls.cmd = 'echo me'
cls.resp = u'me'
cls.user_key = PKEY_FILENAME
cls.user_pub_key = PUB_FILE
cls.user = getuser()
# Single client for all tests ensures that the client does not do
# anything that causes server to disconnect the session and
# affect all subsequent uses of the same session.
cls.client = ParallelSSHClient([cls.host],
pkey=PKEY_FILENAME,
port=cls.port,
num_retries=1)
@classmethod
def tearDownClass(cls):
del cls.client
cls.server.stop()
del cls.server
def setUp(self):
self.long_cmd = lambda lines: 'for (( i=0; i<%s; i+=1 )) do echo $i; sleep .1; done' % (lines,)
def make_random_port(self, host=None):
host = '127.0.0.1' if host is None else host
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((host, 0))
listen_port = sock.getsockname()[1]
sock.close()
return listen_port
def test_connect_auth(self):
client = ParallelSSHClient([self.host], pkey=self.user_key, port=self.port, num_retries=1)
joinall(client.connect_auth(), raise_error=True)
def test_pkey_from_memory(self):
with open(self.user_key, 'rb') as fh:
key = fh.read()
client = ParallelSSHClient([self.host], pkey=key, port=self.port, num_retries=1)
joinall(client.connect_auth(), raise_error=True)
def test_client_shells(self):
shells = self.client.open_shell()
self.client.run_shell_commands(shells, self.cmd)
self.client.run_shell_commands(shells, [self.cmd, self.cmd])
self.client.run_shell_commands(
shells, """
%s
exit 1
""" % (self.cmd,))
self.client.join_shells(shells)
self.assertRaises(ShellError, self.client.run_shell_commands, shells, self.cmd)
for shell in shells:
stdout = list(shell.stdout)
self.assertListEqual(stdout, [self.resp, self.resp, self.resp, self.resp])
expected_exit_code = 1
self.assertEqual(shell.exit_code, expected_exit_code)
self.assertListEqual(list(shell.stderr), [])
self.assertTrue(shell.stdin is not None)
def test_client_shells_read_timeout(self):
shells = self.client.open_shell(read_timeout=.1)
self.client.run_shell_commands(shells, self.cmd)
self.client.run_shell_commands(shells, [self.cmd, 'sleep .5', 'exit 1'])
stdout = []
for shell in shells:
try:
for line in shell.output.stdout:
stdout.append(line)
except Timeout:
pass
self.assertListEqual(stdout, [self.resp, self.resp])
self.assertEqual(shell.output.exit_code, None)
expected_exit_code = 1
self.client.join_shells(shells)
self.assertEqual(shell.output.exit_code, expected_exit_code)
def test_client_shells_timeout(self):
client = ParallelSSHClient([self.host], pkey=self.user_key, port=self.port,
timeout=0.01, num_retries=1)
client._get_ssh_client = MagicMock()
client._get_ssh_client.side_effect = Timeout
self.assertRaises(Timeout, client.open_shell)
def test_client_shells_join_timeout(self):
shells = self.client.open_shell()
cmd = """
echo me
sleep .3
echo me
"""
self.client.run_shell_commands(shells, cmd)
self.assertRaises(Timeout, self.client.join_shells, shells, timeout=.1)
try:
self.client.join_shells(shells, timeout=.1)
except Timeout:
pass
else:
raise AssertionError
self.client.join_shells(shells, timeout=1)
stdout = list(shells[0].stdout)
self.assertListEqual(stdout, [self.resp, self.resp])
def test_client_join_consume_output(self):
output = self.client.run_command(self.cmd)
expected_exit_code = 0
self.client.join(output, consume_output=True)
exit_code = output[0].exit_code
stdout = list(output[0].stdout)
stderr = list(output[0].stderr)
self.assertEqual(len(stdout), 0)
self.assertEqual(len(stderr), 0)
self.assertEqual(expected_exit_code, exit_code)
output = self.client.run_command('echo "me" >&2', use_pty=False)
self.client.join(output, consume_output=True)
exit_code = output[0].exit_code
stdout = list(output[0].stdout)
stderr = list(output[0].stderr)
self.assertTrue(len(stdout) == 0)
self.assertTrue(len(stderr) == 0)
self.assertEqual(expected_exit_code, exit_code)
self.assertIsNone(self.client._join(None))
self.assertIsNone(self.client.join([None]))
def test_client_join_stdout(self):
output = self.client.run_command(self.cmd)
expected_exit_code = 0
expected_stdout = [self.resp]
expected_stderr = []
self.client.join(output)
stdout = list(output[0].stdout)
stderr = list(output[0].stderr)
exit_code = output[0].exit_code
self.assertEqual(expected_exit_code, exit_code,
msg="Got unexpected exit code - %s, expected %s" %
(exit_code,
expected_exit_code,))
self.assertEqual(expected_stdout, stdout,
msg="Got unexpected stdout - %s, expected %s" %
(stdout,
expected_stdout,))
self.assertEqual(expected_stderr, stderr,
msg="Got unexpected stderr - %s, expected %s" %
(stderr,
expected_stderr,))
output = self.client.run_command(";".join([self.cmd, 'exit 1']))
self.client.join(output, consume_output=True)
exit_code = output[0].exit_code
self.assertTrue(exit_code == 1)
self.assertTrue(len(output), len(self.client.cmds))
_output = [cmd.get() for cmd in self.client.cmds]
self.assertTrue(len(_output) == len(output))
def test_pssh_client_no_stdout_non_zero_exit_code_immediate_exit(self):
output = self.client.run_command('exit 1')
expected_exit_code = 1
self.client.join(output)
exit_code = output[0].exit_code
self.assertEqual(expected_exit_code, exit_code,
msg="Got unexpected exit code - %s, expected %s" %
(exit_code,
expected_exit_code,))
def test_pssh_client_no_stdout_non_zero_exit_code_immediate_exit_no_join(self):
output = self.client.run_command('exit 1')
expected_exit_code = 1
for host_out in output:
for _ in host_out.stdout:
pass
exit_code = output[0].exit_code
self.assertEqual(expected_exit_code, exit_code)
def test_pssh_client_run_command_get_output(self):
output = self.client.run_command(self.cmd)
expected_exit_code = 0
expected_stdout = [self.resp]
expected_stderr = []
stdout = list(output[0].stdout)
stderr = list(output[0].stderr)
exit_code = output[0].exit_code
self.assertEqual(expected_exit_code, exit_code,
msg="Got unexpected exit code - %s, expected %s" %
(exit_code,
expected_exit_code,))
self.assertEqual(expected_stdout, stdout,
msg="Got unexpected stdout - %s, expected %s" %
(stdout,
expected_stdout,))
self.assertEqual(expected_stderr, stderr,
msg="Got unexpected stderr - %s, expected %s" %
(stderr,
expected_stderr,))
def test_pssh_client_run_long_command(self):
expected_lines = 5
output = self.client.run_command(self.long_cmd(expected_lines))
self.client.join(output)
stdout = list(output[0].stdout)
self.assertTrue(len(stdout) == expected_lines,
msg="Expected %s lines of response, got %s" % (
expected_lines, len(stdout)))
def test_pssh_client_auth_failure(self):
client = ParallelSSHClient([self.host], port=self.port,
user='FAKE USER',
pkey=self.user_key,
timeout=1,
retry_delay=.1,
)
self.assertRaises(
AuthenticationException, client.run_command, self.cmd)
def test_pssh_client_hosts_list_part_failure(self):
"""Test getting output for remainder of host list in the case where one
host in the host list has a failure"""
hosts = [self.host, '127.1.1.100']
client = ParallelSSHClient(hosts,
port=self.port,
pkey=self.user_key,
num_retries=1)
output = client.run_command(self.cmd, stop_on_errors=False)
self.assertFalse(client.finished(output))
client.join(output, consume_output=True)
self.assertTrue(client.finished(output))
self.assertEqual(len(hosts), len(output))
self.assertIsNotNone(output[1].exception)
self.assertEqual(output[1].host, hosts[1])
self.assertIsInstance(output[1].exception, ConnectionErrorException)
def test_pssh_client_timeout(self):
# 1ms timeout
client_timeout = 0.001
client = ParallelSSHClient([self.host], port=self.port,
pkey=self.user_key,
timeout=client_timeout,
num_retries=1)
output = client.run_command('sleep 1', stop_on_errors=False)
self.assertIsInstance(output[0].exception,
Timeout)
def test_timeout_on_open_session(self):
timeout = .1
client = ParallelSSHClient([self.host], port=self.port,
pkey=self.user_key,
timeout=timeout,
num_retries=1)
def _session(_=None):
sleep(.2)
joinall(client.connect_auth())
sleep(.01)
client._host_clients[(0, self.host)].open_session = _session
self.assertRaises(Timeout, client.run_command, self.cmd)
def test_connection_timeout(self):
client_timeout = .01
host = 'fakehost.com'
client = ParallelSSHClient([host], port=self.port,
pkey=self.user_key,
timeout=client_timeout,
num_retries=1)
output = client.run_command('sleep 1', stop_on_errors=False)
self.assertIsInstance(output[0].exception, ConnectionErrorException)
def test_zero_timeout(self):
host = '127.0.0.2'
server = OpenSSHServer(listen_ip=host, port=self.port)
server.start_server()
client = ParallelSSHClient([self.host, host],
port=self.port,
pkey=self.user_key,
timeout=0)
cmd = spawn(client.run_command, 'sleep .1', stop_on_errors=False)
output = cmd.get(timeout=.3)
self.assertTrue(output[0].exception is None)
def test_pssh_client_long_running_command_exit_codes(self):
expected_lines = 2
output = self.client.run_command(self.long_cmd(expected_lines))
self.assertIsNone(output[0].exit_code)
self.assertFalse(self.client.finished(output))
self.client.join(output, consume_output=True)
self.assertTrue(self.client.finished(output))
self.assertEqual(output[0].exit_code, 0)
stdout = list(output[0].stdout)
self.assertEqual(len(stdout), 0)
def test_pssh_client_long_running_command_exit_codes_no_stdout(self):
expected_lines = 2
output = self.client.run_command(self.long_cmd(expected_lines))
self.assertEqual(len(output), len(self.client.hosts))
self.assertIsNone(output[0].exit_code)
self.assertFalse(self.client.finished(output))
self.client.join(output)
self.assertTrue(self.client.finished(output))
self.assertEqual(output[0].exit_code, 0)
stdout = list(output[0].stdout)
self.assertEqual(expected_lines, len(stdout))
def test_pssh_client_retries(self):
"""Test connection error retries"""
# listen_port = self.make_random_port()
expected_num_tries = 2
client = ParallelSSHClient([self.host], port=self.port,
pkey=b"fake",
num_retries=expected_num_tries,
retry_delay=.1,
)
self.assertRaises(AuthenticationError, client.run_command, 'blah')
try:
client.run_command('blah')
except AuthenticationError as ex:
max_tries = ex.args[-2:][0]
num_tries = ex.args[-1:][0]
self.assertEqual(expected_num_tries, max_tries)
self.assertEqual(expected_num_tries, num_tries)
else:
raise Exception('No AuthenticationError')
def test_sftp_exceptions(self):
# Port with no server listening on it on separate ip
port = self.make_random_port(host=self.host)
client = ParallelSSHClient([self.host], port=port, num_retries=1)
_local = "fake_local"
_remote = "fake_remote"
cmds = client.copy_file(_local, _remote)
client.pool.join()
for cmd in cmds:
try:
cmd.get()
except Exception as ex:
self.assertEqual(ex.args[2], self.host)
self.assertEqual(ex.args[3], port)
self.assertIsInstance(ex, ConnectionErrorException)
else:
raise Exception("Expected ConnectionErrorException, got none")
self.assertFalse(os.path.isfile(_local))
self.assertFalse(os.path.isfile(_remote))
def test_pssh_copy_file(self):
"""Test parallel copy file"""
client = ParallelSSHClient([self.host], port=self.port,
pkey=self.user_key)
test_file_data = 'test'
local_filename = 'test_file'
remote_test_dir, remote_filepath = 'remote_test_dir', 'test_file_copy'
remote_filename = os.path.sep.join([remote_test_dir, remote_filepath])
remote_file_abspath = os.path.expanduser('~/' + remote_filename)
remote_test_dir_abspath = os.path.expanduser('~/' + remote_test_dir)
test_file = open(local_filename, 'w')
test_file.writelines([test_file_data + os.linesep])
test_file.close()
cmds = client.copy_file(local_filename, remote_filename)
cmds[0].get()
try:
self.assertTrue(os.path.isdir(remote_test_dir_abspath))
self.assertTrue(os.path.isfile(remote_file_abspath))
remote_file_data = open(remote_file_abspath, 'r').readlines()
self.assertEqual(remote_file_data[0].strip(), test_file_data)
except Exception:
raise
finally:
try:
os.unlink(remote_file_abspath)
except OSError:
pass
try:
shutil.rmtree(remote_test_dir_abspath)
except Exception:
pass
# No directory
remote_file_abspath = os.path.expanduser('~/' + remote_filepath)
cmds = client.copy_file(local_filename, remote_filepath)
try:
joinall(cmds, raise_error=True)
except Exception:
raise
finally:
for filepath in [local_filename, remote_file_abspath]:
os.unlink(filepath)
try:
os.unlink(remote_file_abspath)
except OSError:
pass
def test_pssh_copy_file_per_host_args(self):
"""Test parallel copy file with per-host arguments"""
host2, host3 = '127.0.0.6', '127.0.0.7'
server2 = OpenSSHServer(host2, port=self.port)
server3 = OpenSSHServer(host3, port=self.port)
servers = [server2, server3]
for server in servers:
server.start_server()
hosts = [self.host, host2, host3]
local_file_prefix = 'test_file_'
remote_file_prefix = 'test_remote_'
copy_args = [dict(zip(('local_file', 'remote_file',),
(local_file_prefix + str(i + 1),
remote_file_prefix + str(i + 1),)
))
for i, _ in enumerate(hosts)]
test_file_data = 'test'
for i, _ in enumerate(hosts):
test_file = open(local_file_prefix + str(i + 1), 'w')
test_file.writelines([test_file_data + os.linesep])
test_file.close()
client = ParallelSSHClient(hosts, port=self.port, pkey=self.user_key,
num_retries=2)
greenlets = client.copy_file('%(local_file)s', '%(remote_file)s',
copy_args=copy_args)
joinall(greenlets)
self.assertRaises(HostArgumentException, client.copy_file,
'%(local_file)s', '%(remote_file)s',
copy_args=[copy_args[0]])
try:
for i, _ in enumerate(hosts):
remote_file_abspath = os.path.expanduser(
'~/' + remote_file_prefix + str(i + 1))
self.assertTrue(os.path.isfile(remote_file_abspath))
remote_file_data = open(
remote_file_abspath, 'r').readlines()
self.assertEqual(
remote_file_data[0].strip(), test_file_data)
except Exception:
raise
finally:
for i, _ in enumerate(hosts):
remote_file_abspath = os.path.expanduser(
'~/' + remote_file_prefix + str(i + 1))
local_file_path = local_file_prefix + str(i + 1)
os.unlink(remote_file_abspath)
os.unlink(local_file_path)
def test_pssh_client_directory_abs_path(self):
client = ParallelSSHClient([self.host], port=self.port,
pkey=self.user_key)
test_file_data = 'test'
local_test_path = 'directory_test'
remote_test_path = 'directory_test_copied'
dir_name = os.path.dirname(__file__)
remote_test_path_abs = os.sep.join((dir_name, remote_test_path))
for path in [local_test_path, remote_test_path_abs]:
try:
shutil.rmtree(path)
except OSError:
pass
os.mkdir(local_test_path)
remote_file_paths = []
for i in range(0, 10):
local_file_path_dir = os.path.join(
local_test_path, 'sub_dir1', 'sub_dir2', 'dir_foo' + str(i))
os.makedirs(local_file_path_dir)
local_file_path = os.path.join(local_file_path_dir, 'foo' + str(i))
remote_file_path = os.path.join(
remote_test_path, 'sub_dir1', 'sub_dir2', 'dir_foo' + str(i), 'foo' + str(i))
remote_file_paths.append(
os.sep.join((os.path.dirname(__file__), remote_file_path)))
test_file = open(local_file_path, 'w')
test_file.write(test_file_data)
test_file.close()
cmds = client.copy_file(local_test_path, remote_test_path_abs, recurse=True)
try:
joinall(cmds, raise_error=True)
for path in remote_file_paths:
self.assertTrue(os.path.isfile(path))
finally:
for _path in (local_test_path, remote_test_path_abs):
try:
shutil.rmtree(_path)
except Exception:
pass
def test_pssh_client_copy_file_failure(self):
"""Test failure scenarios of file copy"""
local_test_path = 'directory_test'
remote_test_path = 'directory_test_copied'
dir_name = os.path.dirname(__file__)
remote_test_path_abs = os.sep.join((dir_name, remote_test_path))
for path in [local_test_path, remote_test_path_abs]:
mask = 0o700
if os.path.isdir(path):
os.chmod(path, mask)
for root, dirs, files in os.walk(path):
os.chmod(root, mask)
for _path in files + dirs:
os.chmod(os.path.join(root, _path), mask)
try:
shutil.rmtree(path)
except OSError:
pass
os.mkdir(local_test_path)
os.mkdir(remote_test_path_abs)
local_file_path = os.path.join(local_test_path, 'test_file')
remote_test_path_abs = os.sep.join((dir_name, remote_test_path))
test_file = open(local_file_path, 'w')
test_file.write('testing\n')
test_file.close()
# Permission errors on writing into dir
mask = 0o111
os.chmod(remote_test_path_abs, mask)
cmds = self.client.copy_file(local_test_path, remote_test_path_abs, recurse=True)
try:
joinall(cmds, raise_error=True)
raise Exception("Expected SFTPError exception")
except SFTPError:
pass
self.assertFalse(os.path.isfile(remote_test_path_abs))
# Create directory tree failure test
local_file_path = os.path.join(local_test_path, 'test_file')
remote_test_path_abs = os.sep.join((dir_name, remote_test_path))
cmds = self.client.copy_file(local_file_path, remote_test_path_abs, recurse=True)
try:
joinall(cmds, raise_error=True)
raise Exception("Expected SFTPError exception on creating remote "
"directory")
except SFTPError:
pass
self.assertFalse(os.path.isfile(remote_test_path_abs))
mask = 0o600
os.chmod(remote_test_path_abs, mask)
for path in [local_test_path, remote_test_path_abs]:
try:
shutil.rmtree(path)
except Exception:
pass
def test_pssh_copy_remote_file_failure(self):
cmds = self.client.copy_remote_file(
'fakey fakey fake fake', 'equally fake')
try:
cmds[0].get()
except Exception as ex:
self.assertEqual(ex.args[2], self.host)
self.assertIsInstance(ex, SFTPIOError)
else:
raise Exception("Expected SFTPIOError, got none")
def test_pssh_copy_remote_file(self):
"""Test parallel copy file to local host"""
test_file_data = 'test'
dir_name = os.path.dirname(__file__)
local_test_path = os.sep.join((dir_name, 'directory_test_local_remote_copied'))
remote_test_path = 'directory_test_remote_copy'
remote_test_path_abs = os.sep.join((dir_name, remote_test_path))
remote_test_path_rel = os.sep.join(
(dir_name.replace(os.path.expanduser('~') + os.sep, ''),
remote_test_path))
local_copied_dir = '_'.join([local_test_path, self.host])
new_local_copied_dir = '.'.join([local_test_path, self.host])
for path in [local_test_path, remote_test_path_abs, local_copied_dir,
new_local_copied_dir]:
try:
shutil.rmtree(path)
except OSError:
try:
os.unlink(path)
except Exception:
pass
pass
os.mkdir(remote_test_path_abs)
local_file_paths = []
for i in range(0, 10):
remote_file_path_dir = os.path.join(
remote_test_path_abs, 'sub_dir', 'dir_foo' + str(i))
os.makedirs(remote_file_path_dir)
remote_file_path = os.path.join(remote_file_path_dir, 'foo' + str(i))
local_file_path = os.path.join(
local_copied_dir, 'sub_dir', 'dir_foo' + str(i), 'foo' + str(i))
local_file_paths.append(local_file_path)
test_file = open(remote_file_path, 'w')
test_file.write(test_file_data)
test_file.close()
cmds = self.client.copy_remote_file(remote_test_path_abs, local_test_path)
self.assertRaises(ValueError, joinall, cmds, raise_error=True)
cmds = self.client.copy_remote_file(remote_test_path_abs, local_test_path,
recurse=True)
joinall(cmds, raise_error=True)
try:
self.assertTrue(os.path.isdir(local_copied_dir))
for path in local_file_paths:
self.assertTrue(os.path.isfile(path))
except Exception:
try:
shutil.rmtree(remote_test_path_abs)
except Exception:
pass
raise
finally:
try:
shutil.rmtree(local_copied_dir)
except Exception:
pass
# Relative path
cmds = self.client.copy_remote_file(remote_test_path_rel, local_test_path,
recurse=True)
joinall(cmds, raise_error=True)
try:
self.assertTrue(os.path.isdir(local_copied_dir))
for path in local_file_paths:
self.assertTrue(os.path.isfile(path))
finally:
try:
shutil.rmtree(local_copied_dir)
except Exception:
pass
# Different suffix
cmds = self.client.copy_remote_file(remote_test_path_abs, local_test_path,
suffix_separator='.', recurse=True)
joinall(cmds, raise_error=True)
new_local_copied_dir = '.'.join([local_test_path, self.host])
try:
for path in local_file_paths:
path = path.replace(local_copied_dir, new_local_copied_dir)
self.assertTrue(os.path.isfile(path))
finally:
for _path in (new_local_copied_dir, remote_test_path_abs):
try:
shutil.rmtree(_path)
except Exception:
pass
def test_pssh_copy_remote_file_per_host_args(self):
"""Test parallel remote copy file with per-host arguments"""
host2, host3 = '127.0.0.10', '127.0.0.11'
server2 = OpenSSHServer(host2, port=self.port)
server3 = OpenSSHServer(host3, port=self.port)
servers = [server2, server3]
for server in servers:
server.start_server()
hosts = [self.host, host2, host3]
remote_file_prefix = 'test_file_'
local_file_prefix = 'test_local_'
copy_args = [dict(zip(('remote_file', 'local_file',),
(remote_file_prefix + str(i + 1),
local_file_prefix + str(i + 1),)
))
for i, _ in enumerate(hosts)]
test_file_data = 'test'
for i, _ in enumerate(hosts):
remote_file_abspath = os.path.expanduser(
'~/' + remote_file_prefix + str(i + 1))
test_file = open(remote_file_abspath, 'w')
test_file.writelines([test_file_data + os.linesep])
test_file.close()
client = ParallelSSHClient(hosts, port=self.port, pkey=self.user_key,
num_retries=2)
greenlets = client.copy_remote_file('%(remote_file)s', '%(local_file)s',
copy_args=copy_args)
joinall(greenlets)
self.assertRaises(HostArgumentException, client.copy_remote_file,
'%(remote_file)s', '%(local_file)s',
copy_args=[copy_args[0]])
try:
for i, _ in enumerate(hosts):
local_file_path = local_file_prefix + str(i + 1)
self.assertTrue(os.path.isfile(local_file_path))
local_file_data = open(local_file_path, 'r').readlines()
self.assertEqual(local_file_data[0].strip(), test_file_data)
except Exception:
raise
finally:
for i, _ in enumerate(hosts):
remote_file_abspath = os.path.expanduser(
'~/' + remote_file_prefix + str(i + 1))
local_file_path = local_file_prefix + str(i + 1)
try:
os.unlink(remote_file_abspath)
os.unlink(local_file_path)
except OSError:
pass
def test_pssh_pool_size(self):
"""Test setting pool size to non default values"""
hosts = ['host-%01d' % d for d in range(5)]
pool_size = 2
client = ParallelSSHClient(hosts, pool_size=pool_size)
expected, actual = pool_size, client.pool.size
self.assertEqual(expected, actual,
msg="Expected pool size to be %s, got %s" % (
expected, actual,))
hosts = ['host-%01d' % d for d in range(15)]
pool_size = 5
client = ParallelSSHClient(hosts, pool_size=pool_size)
expected, actual = client.pool_size, client.pool.size
self.assertEqual(expected, actual,
msg="Expected pool size to be %s, got %s" % (
expected, actual,))
hosts = ['host-%01d' % d for d in range(15)]
pool_size = len(hosts)+5
client = ParallelSSHClient(hosts, pool_size=pool_size)
expected, actual = pool_size, client.pool.size
self.assertEqual(expected, actual,
msg="Expected pool size to be %s, got %s" % (
expected, actual,))
def test_pssh_hosts_more_than_pool_size(self):
"""Test we can successfully run on more hosts than our pool size and
get logs for all hosts"""
# Make a second server on the same port as the first one
host2 = '127.0.0.2'
server2 = OpenSSHServer(listen_ip=host2, port=self.port)
server2.start_server()
hosts = [self.host, host2]
client = ParallelSSHClient(hosts,
port=self.port,
pkey=self.user_key,
pool_size=1,
)
output = client.run_command(self.cmd)
stdout = [list(host_out.stdout) for host_out in output]
expected_stdout = [[self.resp] for _ in hosts]
self.assertEqual(len(hosts), len(output),
msg="Did not get output from all hosts. Got output for "
"%s/%s hosts" % (len(output), len(hosts),))
self.assertEqual(expected_stdout, stdout,
msg="Did not get expected output from all hosts. "
"Got %s - expected %s" % (stdout, expected_stdout,))
del client
server2.stop()
def test_pssh_hosts_iterator_hosts_modification(self):
"""Test using iterator as host list and modifying host list in place"""
host2, host3 = '127.0.0.2', '127.0.0.3'
server2 = OpenSSHServer(listen_ip=host2, port=self.port)
server3 = OpenSSHServer(listen_ip=host3, port=self.port)
for _server in (server2, server3):
_server.start_server()
hosts = [self.host, '127.0.0.2']
client = ParallelSSHClient(iter(hosts),
port=self.port,
pkey=self.user_key,
pool_size=1,
)
output = client.run_command(self.cmd)
stdout = [list(host_out.stdout) for host_out in output]
expected_stdout = [[self.resp], [self.resp]]
self.assertListEqual(stdout, expected_stdout)
self.assertEqual(len(hosts), len(output),
msg="Did not get output from all hosts. Got output for "
"%s/%s hosts" % (len(output), len(hosts),))
# Run again without re-assigning host list, should run the same
output = client.run_command(self.cmd)
self.assertEqual(len(output), len(hosts))
# Re-assigning host list with new hosts should also work
hosts = ['127.0.0.2', '127.0.0.3']
client.hosts = iter(hosts)
output = client.run_command(self.cmd)
self.assertEqual(len(hosts), len(output),
msg="Did not get output from all hosts. Got output for "
"%s/%s hosts" % (len(output), len(hosts),))
self.assertEqual(output[1].host, hosts[1],
msg="Did not get output for new host %s" % (hosts[1],))
server2.stop()
server3.stop()
def test_bash_variable_substitution(self):
"""Test bash variables work correctly"""
command = """for i in 1 2 3; do echo $i; done"""
host_output = self.client.run_command(command)[0]
output = list(host_output.stdout)
expected = ['1', '2', '3']
self.assertListEqual(output, expected)
def test_identical_host_output(self):
"""Test that we get output when running with duplicated hosts"""
# Make port with no server listening on it just for testing output
port = self.make_random_port()
hosts = [self.host, self.host, self.host]
client = ParallelSSHClient(hosts, port=port,
pkey=self.user_key,
num_retries=1)
output = client.run_command(self.cmd, stop_on_errors=False)
client.join(output)
self.assertEqual(len(hosts), len(output))
def test_identical_hosts_in_host_list(self):
"""Test that we can handle identical hosts in host list"""
host2 = '127.0.0.2'
hosts = [self.host, host2, self.host, self.host]
_server2 = OpenSSHServer(listen_ip=host2, port=self.port)
_server2.start_server()
client = ParallelSSHClient(hosts, port=self.port,
pkey=self.user_key,
num_retries=1)
output = client.run_command(self.cmd, stop_on_errors=False)
client.join(output)
self.assertEqual(len(hosts), len(output),
msg="Host list contains %s identical hosts, only got output for %s" % (
len(hosts), len(output)))
for host_i, host in enumerate(hosts):
single_client = client._host_clients[(host_i, host)]
self.assertEqual(single_client.host, host)
expected_stdout = [self.resp]
for host_out in output:
_host_stdout = list(host_out.stdout)
self.assertListEqual(_host_stdout, expected_stdout)
def test_connection_error(self):
"""Test that we get connection error exception in output with correct arguments"""
# Make port with no server listening on it on separate ip
host = '127.0.0.3'
port = self.make_random_port(host=host)
hosts = [host]
client = ParallelSSHClient(hosts, port=port,
pkey=self.user_key,
num_retries=1)
output = client.run_command(self.cmd, stop_on_errors=False)
client.join(output)
self.assertIsNotNone(output[0].exception)
for host_output in output:
exit_code = host_output.exit_code
self.assertEqual(exit_code, None)
self.assertIsInstance(output[0].exception, ConnectionError)
def test_bad_pkey_path(self):
self.assertRaises(PKeyFileError, ParallelSSHClient, [self.host], port=self.port,
pkey='A REALLY FAKE KEY',
num_retries=1)
def test_multiple_single_quotes_in_cmd(self):
"""Test that we can run a command with multiple single quotes"""
output = self.client.run_command("echo 'me' 'and me'")
stdout = list(output[0].stdout)
expected = 'me and me'
self.assertTrue(len(stdout) == 1,
msg="Got incorrect number of lines in output - %s" % (stdout,))
self.assertEqual(output[0].exit_code, 0)
self.assertEqual(expected, stdout[0],
msg="Got unexpected output. Expected %s, got %s" % (
expected, stdout[0],))
def test_backtics_in_cmd(self):
"""Test running command with backtics in it"""
output = self.client.run_command("out=`ls` && echo $out")
self.client.join(output, consume_output=True)
self.assertEqual(output[0].exit_code, 0)
def test_multiple_shell_commands(self):
"""Test running multiple shell commands in one go"""
output = self.client.run_command("echo me; echo and; echo me")
stdout = list(output[0].stdout)
expected = ["me", "and", "me"]
self.assertEqual(output[0].exit_code, 0)
self.assertEqual(expected, stdout,
msg="Got unexpected output. Expected %s, got %s" % (
expected, stdout,))
def test_escaped_quotes(self):
"""Test escaped quotes in shell variable are handled correctly"""
output = self.client.run_command('t="--flags=\\"this\\""; echo $t')
stdout = list(output[0].stdout)
expected = ['--flags="this"']
self.assertEqual(output[0].exit_code, 0)
self.assertEqual(expected, stdout,
msg="Got unexpected output. Expected %s, got %s" % (
expected, stdout,))
def test_host_config(self):
"""Test per-host configuration functionality of ParallelSSHClient"""
hosts = [('127.0.0.%01d' % n, self.make_random_port())
for n in range(1, 3)]
host_config = [HostConfig() for _ in hosts]
servers = []
password = 'overriden_pass'
fake_key = 'FAKE KEY'
aliases = [f"alias for host {host_i}" for host_i, _ in enumerate(hosts)]
for host_i, (host, port) in enumerate(hosts):
server = OpenSSHServer(listen_ip=host, port=port)
server.start_server()
host_config[host_i].port = port
host_config[host_i].user = self.user
host_config[host_i].password = password
host_config[host_i].private_key = self.user_key
host_config[host_i].alias = aliases[host_i]
servers.append(server)
host_config[1].private_key = fake_key
client = ParallelSSHClient([h for h, _ in hosts],
host_config=host_config,
num_retries=1)
output = client.run_command(self.cmd, stop_on_errors=False)
client.join(output)
self.assertEqual(len(hosts), len(output))
try:
raise output[1].exception
except PKeyFileError:
self.assertEqual(output[1].host, hosts[1][0])
self.assertTrue(output[1].exit_code is None,
msg="Execution failed on host %s" % (hosts[1][0],))
self.assertEqual(client._host_clients[0, hosts[0][0]].user, self.user)
self.assertEqual(client._host_clients[0, hosts[0][0]].password, password)
self.assertEqual(client._host_clients[0, hosts[0][0]].pkey, open(os.path.abspath(self.user_key), 'rb').read())
self.assertEqual(set(aliases), set([client.alias for client in output]))
for server in servers:
server.stop()
def test_host_config_bad_entries(self):
hosts = ['localhost', 'localhost']
host_config = [HostConfig()]
self.assertRaises(ValueError, ParallelSSHClient, hosts, host_config=host_config)
self.assertRaises(ValueError, ParallelSSHClient, iter(hosts), host_config=host_config)
def test_pssh_client_override_allow_agent_authentication(self):
"""Test running command with allow_agent set to False"""
client = ParallelSSHClient([self.host],
port=self.port,
allow_agent=False,
pkey=self.user_key)
output = client.run_command(self.cmd)
expected_exit_code = 0
expected_stdout = [self.resp]
expected_stderr = []
stdout = list(output[0].stdout)
stderr = list(output[0].stderr)
exit_code = output[0].exit_code
self.assertEqual(expected_exit_code, exit_code,
msg="Got unexpected exit code - %s, expected %s" %
(exit_code,
expected_exit_code,))
self.assertEqual(expected_stdout, stdout,
msg="Got unexpected stdout - %s, expected %s" %
(stdout,
expected_stdout,))
self.assertEqual(expected_stderr, stderr,
msg="Got unexpected stderr - %s, expected %s" %
(stderr,
expected_stderr,))
def test_per_host_tuple_args(self):
host2, host3 = '127.0.0.2', '127.0.0.3'
server2 = OpenSSHServer(host2, port=self.port)
server3 = OpenSSHServer(host3, port=self.port)
servers = [server2, server3]
for server in servers:
server.start_server()
hosts = [self.host, host2, host3]
host_args = ('arg1', 'arg2', 'arg3')
cmd = 'echo %s'
client = ParallelSSHClient(hosts, port=self.port,
pkey=self.user_key,
num_retries=2,
retry_delay=.2,
)
output = client.run_command(cmd, host_args=host_args)
client.join()
for i, host in enumerate(hosts):
expected = [host_args[i]]
stdout = list(output[i].stdout)
self.assertEqual(expected, stdout)
self.assertEqual(output[i].exit_code, 0)
host_args = (('arg1', 'arg2'), ('arg3', 'arg4'), ('arg5', 'arg6'),)
cmd = 'echo %s %s'
output = client.run_command(cmd, host_args=host_args)
client.join()
for i, host in enumerate(hosts):
expected = ["%s %s" % host_args[i]]
stdout = list(output[i].stdout)
self.assertEqual(expected, stdout)
self.assertEqual(output[i].exit_code, 0)
self.assertRaises(HostArgumentException, client.run_command,
cmd, host_args=[host_args[0]])
# Invalid number of args
host_args = (('arg1', ),)
self.assertRaises(
TypeError, client.run_command, cmd, host_args=host_args)
for server in servers:
server.stop()
def test_per_host_dict_args(self):
host2, host3 = '127.0.0.2', '127.0.0.3'
server2 = OpenSSHServer(host2, port=self.port)
server3 = OpenSSHServer(host3, port=self.port)
servers = [server2, server3]
for server in servers:
server.start_server()
hosts = [self.host, host2, host3]
hosts_gen = (h for h in hosts)
host_args = [dict(zip(('host_arg1', 'host_arg2',),
('arg1-%s' % (i,), 'arg2-%s' % (i,),)))
for i, _ in enumerate(hosts)]
cmd = 'echo %(host_arg1)s %(host_arg2)s'
client = ParallelSSHClient(hosts, port=self.port,
pkey=self.user_key,
num_retries=1)
output = client.run_command(cmd, host_args=host_args)
for i, host in enumerate(hosts):
expected = ["%(host_arg1)s %(host_arg2)s" % host_args[i]]
stdout = list(output[i].stdout)
self.assertEqual(expected, stdout)
self.assertEqual(output[i].exit_code, 0)
self.assertRaises(HostArgumentException, client.run_command,
cmd, host_args=[host_args[0]])
# Host list generator should work also
client.hosts = hosts_gen
output = client.run_command(cmd, host_args=host_args)
for i, host in enumerate(hosts):
expected = ["%(host_arg1)s %(host_arg2)s" % host_args[i]]
stdout = list(output[i].stdout)
self.assertEqual(expected, stdout)
self.assertEqual(output[i].exit_code, 0)
client.hosts = (h for h in hosts)
self.assertRaises(HostArgumentException, client.run_command,
cmd, host_args=[host_args[0]])
def test_per_host_dict_args_invalid(self):
cmd = 'echo %(host_arg1)s %(host_arg2)s'
# Invalid number of host args
host_args = [{'host_arg1': 'arg1'}]
self.assertRaises(
KeyError, self.client.run_command, cmd, host_args=host_args)
def test_run_command_encoding(self):
"""Test that unicode command works"""
exp = b"\xbc"
_cmd = b"echo " + exp
cmd = _cmd.decode('latin-1')
expected = [exp.decode('latin-1')]
output = self.client.run_command(cmd, encoding='latin-1')
stdout = list(output[0].stdout)
self.assertEqual(expected, stdout)
# With join
output = self.client.run_command(cmd, encoding='latin-1')
self.client.join(output)
stdout = list(output[0].stdout)
self.assertEqual(expected, stdout)
def test_shell_encoding(self):
exp = b"\xbc"
_cmd = b"echo " + exp
cmd = _cmd.decode('latin-1')
expected = [exp.decode('latin-1')]
shells = self.client.open_shell(encoding='latin-1')
self.client.run_shell_commands(shells, cmd)
self.client.join_shells(shells)
stdout = list(shells[0].stdout)
self.assertEqual(expected, stdout)
def test_pty(self):
cmd = "echo 'asdf' >&2"
expected_stderr = ['asdf']
output = self.client.run_command(cmd)
self.client.join(output)
stdout = list(output[0].stdout)
stderr = list(output[0].stderr)
exit_code = output[0].exit_code
self.assertEqual([], stdout)
self.assertEqual(expected_stderr, stderr)
self.assertTrue(exit_code == 0)
output = self.client.run_command(cmd, use_pty=True)
stdout = list(output[0].stdout)
stderr = list(output[0].stderr)
exit_code = output[0].exit_code
expected_stdout = []
# With a PTY, stdout and stderr are combined into stdout
self.assertEqual(expected_stderr, stdout)
self.assertEqual([], stderr)
self.assertTrue(exit_code == 0)
def test_output_attributes(self):
output = self.client.run_command(self.cmd)
self.client.join(output)
self.assertTrue(hasattr(output[0], 'host'))
self.assertTrue(hasattr(output[0], 'channel'))
self.assertTrue(hasattr(output[0], 'stdout'))
self.assertTrue(hasattr(output[0], 'stderr'))
self.assertTrue(hasattr(output[0], 'stdin'))
self.assertTrue(hasattr(output[0], 'exception'))
self.assertTrue(hasattr(output[0], 'exit_code'))
def test_run_command_user_sudo(self):
user = 'fakey_fake_user'
output = self.client.run_command(self.cmd, user=user)
self.client.join(output)
stderr = list(output[0].stderr)
self.assertTrue(len(stderr) > 0)
self.assertEqual(output[0].exit_code, 1)
def test_run_command_shell(self):
output = self.client.run_command(self.cmd, shell="bash -c")
self.client.join(output)
stdout = list(output[0].stdout)
self.assertEqual(stdout, [self.resp])
def test_run_command_shell_sudo(self):
output = self.client.run_command(self.cmd,
shell="bash -c",
sudo=True)
self.assertEqual(len(output), len(self.client.hosts))
self.assertTrue(output[0].channel is not None)
def test_run_command_sudo(self):
output = self.client.run_command(self.cmd, sudo=True)
self.assertEqual(len(output), len(self.client.hosts))
self.assertTrue(output[0].channel is not None)
def test_conn_failure(self):
"""Test connection error failure case - ConnectionErrorException"""
client = ParallelSSHClient(['127.0.0.100'], port=self.port,
num_retries=0)
self.assertRaises(ConnectionErrorException,
client.run_command, self.cmd)
def test_retries(self):
client = ParallelSSHClient(['127.0.0.100'], port=self.port,
num_retries=2, retry_delay=.1)
self.assertRaises(ConnectionErrorException, client.run_command, self.cmd)
host = ''.join([random.choice(string.ascii_letters) for n in range(8)])
client.hosts = [host]
self.assertRaises(UnknownHostException, client.run_command, self.cmd)
def test_setting_hosts(self):
host2 = '127.0.0.3'
server2 = OpenSSHServer(host2, port=self.port)
server2.start_server()
client = ParallelSSHClient(
[self.host], port=self.port,
num_retries=1, retry_delay=1,
pkey=self.user_key,
)
joinall(client.connect_auth())
_client = list(client._host_clients.values())[0]
client.hosts = [self.host]
joinall(client.connect_auth())
try:
self.assertEqual(len(client._host_clients), 1)
_client_after = list(client._host_clients.values())[0]
self.assertEqual(id(_client), id(_client_after))
client.hosts = ['127.0.0.2', self.host, self.host]
self.assertEqual(len(client._host_clients), 0)
joinall(client.connect_auth())
self.assertEqual(len(client._host_clients), 2)
client.hosts = ['127.0.0.2', self.host, self.host]
self.assertListEqual([(1, self.host), (2, self.host)],
sorted(list(client._host_clients.keys())))
self.assertEqual(len(client._host_clients), 2)
hosts = [self.host, self.host, host2]
client.hosts = hosts
joinall(client.connect_auth())
self.assertListEqual([(0, self.host), (1, self.host), (2, host2)],
sorted(list(client._host_clients.keys())))
self.assertEqual(len(client._host_clients), 3)
hosts = [host2, self.host, self.host]
client.hosts = hosts
joinall(client.connect_auth())
self.assertListEqual([(0, host2), (1, self.host), (2, self.host)],
sorted(list(client._host_clients.keys())))
self.assertEqual(len(client._host_clients), 3)
client.hosts = [self.host]
self.assertEqual(len(client._host_clients), 0)
joinall(client.connect_auth())
self.assertEqual(len(client._host_clients), 1)
client.hosts = [self.host, host2]
joinall(client.connect_auth())
self.assertListEqual([(0, self.host), (1, host2)],
sorted(list(client._host_clients.keys())))
self.assertEqual(len(client._host_clients), 2)
try:
client.hosts = None
except ValueError:
pass
else:
raise AssertionError
try:
client.hosts = ''
except TypeError:
pass
else:
raise AssertionError
finally:
server2.stop()
def test_unknown_host_failure(self):
"""Test connection error failure case - ConnectionErrorException"""
host = ''.join([random.choice(string.ascii_letters) for n in range(8)])
client = ParallelSSHClient([host], port=self.port,
num_retries=1)
self.assertRaises(UnknownHostException, client.run_command, self.cmd)
def test_invalid_host_out(self):
output = {'blah': None}
self.assertRaises(ValueError, self.client.join, output)
def test_join_timeout(self):
client = ParallelSSHClient([self.host], port=self.port,
pkey=self.user_key)
output = client.run_command('echo me; sleep .5')
try:
client.join(output, timeout=.1)
except Timeout as ex:
self.assertEqual(len(ex.args), 4)
self.assertTrue(isinstance(ex.args[2], list))
self.assertTrue(isinstance(ex.args[3], list))
else:
raise Exception("Expected timeout")
self.assertFalse(output[0].channel.eof())
client.join(output, timeout=2, consume_output=False)
self.assertTrue(output[0].channel.eof())
self.assertTrue(client.finished(output))
stdout = list(output[0].stdout)
self.assertListEqual(stdout, [self.resp])
def test_join_timeout_subset_read(self):
hosts = [self.host, self.host]
cmd = 'sleep %(i)s; echo %(i)s'
host_args = [{'i': '0.1'},
{'i': '0.25'},
]
client = ParallelSSHClient(hosts, port=self.port,
pkey=self.user_key)
output = client.run_command(cmd, host_args=host_args)
try:
client.join(output, timeout=.2)
except Timeout as ex:
finished_output = ex.args[2]
unfinished_output = ex.args[3]
else:
raise Exception("Expected timeout")
self.assertEqual(len(finished_output), 1)
self.assertEqual(len(unfinished_output), 1)
finished_stdout = list(finished_output[0].stdout)
self.assertEqual(finished_stdout, ['0.1'])
# Should not timeout
client.join(unfinished_output, timeout=2)
rest_stdout = list(unfinished_output[0].stdout)
self.assertEqual(rest_stdout, ['0.25'])
def test_join_timeout_set_no_timeout(self):
client = ParallelSSHClient([self.host], port=self.port,
pkey=self.user_key)
output = client.run_command('sleep .1')
client.join(output, timeout=.5)
self.assertTrue(client.finished(output))
def test_read_timeout(self):
client = ParallelSSHClient([self.host], port=self.port,
pkey=self.user_key)
output = client.run_command('sleep .3; echo me; echo me; echo me', read_timeout=.2)
for host_out in output:
self.assertRaises(Timeout, list, host_out.stdout)
self.assertFalse(client.finished(output))
client.join(output)
for host_out in output:
stdout = list(host_out.stdout)
self.assertEqual(len(stdout), 3)
self.assertTrue(client.finished(output))
def test_finished_no_run_command(self):
client = ParallelSSHClient([self.host], port=self.port,
pkey=self.user_key, num_retries=1)
client.join()
self.assertTrue(client.finished())
def test_partial_read_timeout_close_cmd(self):
client = ParallelSSHClient([self.host], port=self.port,
pkey=self.user_key, num_retries=1)
self.assertTrue(client.finished())
output = client.run_command('while true; do echo a line; sleep .01; done',
use_pty=True, read_timeout=.2)
stdout = []
try:
with GTimeout(seconds=.3):
for line in output[0].stdout:
stdout.append(line)
except Timeout:
pass
self.assertTrue(len(stdout) > 0)
# Allow some more output to be generated
sleep(.1)
output[0].client.close_channel(output[0].channel)
client.join(output)
# Should not timeout
with GTimeout(seconds=.5):
stdout = list(output[0].stdout)
self.assertTrue(len(stdout) > 0)
def test_partial_read_timeout_join_no_output(self):
client = ParallelSSHClient([self.host], port=self.port,
pkey=self.user_key, num_retries=1)
self.assertTrue(client.finished())
client.run_command('while true; do echo a line; sleep .01; done')
try:
with GTimeout(seconds=.1):
client.join()
except GTimeout:
pass
else:
raise Exception("Should have timed out")
output = client.get_last_output()
stdout = []
try:
with GTimeout(seconds=.1):
for line in output[0].stdout:
stdout.append(line)
except GTimeout:
pass
else:
raise Exception("Should have timed out")
self.assertTrue(len(stdout) > 0)
self.assertRaises(Timeout, client.join, timeout=.1)
stdout = []
try:
with GTimeout(seconds=.2):
for line in output[0].stdout:
stdout.append(line)
except GTimeout:
pass
else:
raise Exception("Should have timed out")
self.assertTrue(len(stdout) > 0)
# Setting timeout
output[0].read_timeout = .2
stdout = []
try:
for line in output[0].stdout:
stdout.append(line)
except Timeout:
pass
else:
raise Exception("Should have timed out")
self.assertTrue(len(stdout) > 0)
# Allow some more output to be generated
sleep(.1)
output[0].client.close_channel(output[0].channel)
client.join()
self.assertTrue(client.finished())
stdout = list(output[0].stdout)
self.assertTrue(len(stdout) > 0)
def test_timeout_file_read(self):
dir_name = os.path.dirname(__file__)
_file = os.sep.join((dir_name, 'file_to_read'))
contents = [b'a line\n' for _ in range(50)]
with open(_file, 'wb') as fh:
fh.writelines(contents)
try:
output = self.client.run_command('tail -f %s' % (_file,),
use_pty=True,
read_timeout=.1)
self.assertRaises(Timeout, self.client.join, output, timeout=.1)
for host_out in output:
try:
for line in host_out.stdout:
pass
except Timeout:
pass
else:
raise Exception("Timeout should have been raised")
self.assertRaises(Timeout, self.client.join, output, timeout=.1)
finally:
os.unlink(_file)
def test_file_read_no_timeout(self):
dir_name = os.path.dirname(__file__)
_file = os.sep.join((dir_name, 'file_to_read'))
contents = [b'a line\n' for _ in range(1000)]
with open(_file, 'wb') as fh:
fh.writelines(contents)
try:
output = self.client.run_command('cat %s' % (_file,), read_timeout=10)
_out = list(output[0].stdout)
finally:
os.unlink(_file)
_contents = [c.decode('utf-8').strip() for c in contents]
self.assertEqual(len(contents), len(_out))
self.assertListEqual(_contents, _out)
def test_scp_send_dir(self):
"""
Attempting to copy into a non-existent remote directory via scp_send()
without recurse=True should raise an SCPError.
"""
test_file_data = 'test'
local_filename = 'test_file'
remote_test_dir, remote_filepath = 'remote_test_dir', 'test_file_copy'
with open(local_filename, 'w') as file_h:
file_h.writelines([test_file_data + os.linesep])
remote_filename = os.path.sep.join([remote_test_dir, remote_filepath])
remote_file_abspath = os.path.expanduser('~/' + remote_filename)
remote_test_dir_abspath = os.path.expanduser('~/' + remote_test_dir)
try:
cmds = self.client.scp_send(local_filename, remote_filename)
joinall(cmds, raise_error=True)
except Exception as ex:
self.assertIsInstance(ex, SCPError)
finally:
try:
os.unlink(local_filename)
except OSError:
pass
try:
shutil.rmtree(remote_test_dir_abspath)
except OSError:
pass
def test_scp_send_dir_recurse(self):
test_file_data = 'test'
local_filename = 'test_file'
remote_test_dir, remote_filepath = 'remote_test_dir', 'test_file_copy'
with open(local_filename, 'w') as file_h:
file_h.writelines([test_file_data + os.linesep])
remote_filename = os.path.sep.join([remote_test_dir, remote_filepath])
remote_file_abspath = os.path.expanduser('~/' + remote_filename)
remote_test_dir_abspath = os.path.expanduser('~/' + remote_test_dir)
try:
cmds = self.client.scp_send(local_filename, remote_filename, recurse=True)
joinall(cmds, raise_error=True)
self.assertTrue(os.path.isdir(remote_test_dir_abspath))
self.assertTrue(os.path.isfile(remote_file_abspath))
sleep(.1)
remote_file_data = open(remote_file_abspath, 'r').read()
self.assertEqual(remote_file_data.strip(), test_file_data)
except Exception:
raise
finally:
try:
os.unlink(local_filename)
except OSError:
pass
try:
shutil.rmtree(remote_test_dir_abspath)
except OSError:
pass
def _scp_larger_files(self, hosts):
client = ParallelSSHClient(
hosts, port=self.port, pkey=self.user_key, num_retries=1, timeout=30,
pool_size=len(hosts),
)
local_filename = 'test_file'
remote_filepath = 'file_copy'
copy_args = [{
'local_file': local_filename,
'remote_file': 'host_%s_%s' % (n, remote_filepath)}
for n in range(len(hosts))]
remote_file_names = [arg['remote_file'] for arg in copy_args]
sha = sha256()
with open(local_filename, 'wb') as file_h:
for _ in range(1000):
data = os.urandom(1024)
file_h.write(data)
sha.update(data)
source_file_sha = sha.hexdigest()
cmds = client.scp_send('%(local_file)s', '%(remote_file)s', copy_args=copy_args)
try:
joinall(cmds, raise_error=True)
except Exception:
raise
else:
del client
for remote_file_name in remote_file_names:
sha = sha256()
remote_file_abspath = os.path.expanduser('~/' + remote_file_name)
self.assertTrue(os.path.isfile(remote_file_abspath))
with open(remote_file_abspath, 'rb') as remote_fh:
data = remote_fh.read(10240)
while data:
sha.update(data)
data = remote_fh.read(10240)
sha.update(data)
remote_file_sha = sha.hexdigest()
self.assertEqual(source_file_sha, remote_file_sha)
finally:
try:
os.unlink(local_filename)
for remote_file_name in remote_file_names:
remote_file_abspath = os.path.expanduser('~/' + remote_file_name)
os.unlink(remote_file_abspath)
except OSError:
pass
def test_scp_send_larger_files(self):
hosts = ['127.0.0.1%s' % (i,) for i in range(1, 3)]
servers = [OpenSSHServer(host, port=self.port) for host in hosts]
for server in servers:
server.start_server()
for _ in range(20):
self._scp_larger_files(hosts)
def test_scp_bad_copy_args(self):
client = ParallelSSHClient([self.host, self.host])
copy_args = [{'local_file': 'fake_file', 'remote_file': 'fake_remote_file'}]
self.assertRaises(HostArgumentException,
client.scp_send, '%(local_file)s', '%(remote_file)s',
copy_args=copy_args)
self.assertRaises(HostArgumentError,
client.scp_recv, '%(local_file)s', '%(remote_file)s',
copy_args=copy_args)
self.assertFalse(os.path.isfile('fake_file'))
self.assertFalse(os.path.isfile('fake_remote_file'))
def test_scp_send_exc(self):
client = ParallelSSHClient([self.host], pkey=self.user_key, num_retries=1)
def _scp_send(*args):
raise Exception
def _client_send(*args):
return client._handle_greenlet_exc(_scp_send, 'fake')
client._scp_send = _client_send
cmds = client.scp_send('local_file', 'remote_file')
self.assertRaises(Exception, joinall, cmds, raise_error=True)
def test_scp_recv_exc(self):
client = ParallelSSHClient([self.host], pkey=self.user_key, num_retries=1)
def _scp_recv(*args):
raise Exception
def _client_recv(*args):
return client._handle_greenlet_exc(_scp_recv, 'fake')
client._scp_recv = _client_recv
cmds = client.scp_recv('remote_file', 'local_file')
self.assertRaises(Exception, joinall, cmds, raise_error=True)
def test_scp_recv_failure(self):
cmds = self.client.scp_recv(
'fakey fakey fake fake', 'equally fake')
try:
joinall(cmds, raise_error=True)
except Exception as ex:
self.assertEqual(ex.args[2], self.host)
self.assertIsInstance(ex, SCPError)
else:
raise Exception("Expected SCPError, got none")
def test_scp_recv(self):
test_file_data = 'test'
dir_name = os.path.dirname(__file__)
local_test_path = os.sep.join((dir_name, 'directory_test_local_remote_copied'))
remote_test_path = 'directory_test_remote_copy'
remote_test_path_abs = os.sep.join((dir_name, remote_test_path))
remote_test_path_rel = os.sep.join(
(dir_name.replace(os.path.expanduser('~') + os.sep, ''),
remote_test_path))
local_copied_dir = '_'.join([local_test_path, self.host])
new_local_copied_dir = '.'.join([local_test_path, self.host])
for path in [local_test_path, remote_test_path_abs, local_copied_dir,
new_local_copied_dir]:
try:
shutil.rmtree(path)
except OSError:
try:
os.unlink(path)
except Exception:
pass
pass
os.mkdir(remote_test_path_abs)
local_file_paths = []
for i in range(0, 10):
remote_file_path_dir = os.path.join(
remote_test_path_abs, 'sub_dir', 'dir_foo' + str(i))
os.makedirs(remote_file_path_dir)
remote_file_path = os.path.join(remote_file_path_dir, 'foo' + str(i))
local_file_path = os.path.join(
local_copied_dir, 'sub_dir', 'dir_foo' + str(i), 'foo' + str(i))
local_file_paths.append(local_file_path)
test_file = open(remote_file_path, 'w')
test_file.write(test_file_data)
test_file.close()
cmds = self.client.scp_recv(remote_test_path_abs, local_test_path)
self.assertRaises(SCPError, joinall, cmds, raise_error=True)
cmds = self.client.scp_recv(remote_test_path_abs, local_test_path,
recurse=True)
try:
joinall(cmds, raise_error=True)
self.assertTrue(os.path.isdir(local_copied_dir))
for path in local_file_paths:
self.assertTrue(os.path.isfile(path))
except Exception:
try:
shutil.rmtree(remote_test_path_abs)
except Exception:
pass
raise
finally:
try:
shutil.rmtree(local_copied_dir)
except Exception:
pass
# Relative path
cmds = self.client.scp_recv(remote_test_path_rel, local_test_path,
recurse=True)
try:
joinall(cmds, raise_error=True)
self.assertTrue(os.path.isdir(local_copied_dir))
for path in local_file_paths:
self.assertTrue(os.path.isfile(path))
finally:
for _path in (remote_test_path_abs, local_copied_dir):
try:
shutil.rmtree(_path)
except Exception:
pass
def test_scp_recv_larger_files(self):
hosts = ['127.0.0.1%s' % (i,) for i in range(1, 3)]
servers = [OpenSSHServer(host, port=self.port) for host in hosts]
for server in servers:
server.start_server()
client = ParallelSSHClient(
hosts, port=self.port, pkey=self.user_key, num_retries=1, timeout=1,
pool_size=len(hosts),
)
dir_name = os.path.dirname(__file__)
remote_filename = 'test_file'
remote_filepath = os.path.join(dir_name, remote_filename)
local_filename = 'file_copy'
copy_args = [{
'remote_file': remote_filepath,
'local_file': os.path.expanduser("~/" + 'host_%s_%s' % (n, local_filename))}
for n in range(len(hosts))
]
local_file_names = [
arg['local_file'] for arg in copy_args]
sha = sha256()
with open(remote_filepath, 'wb') as file_h:
for _ in range(10000):
data = os.urandom(1024)
file_h.write(data)
sha.update(data)
file_h.flush()
source_file_sha = sha.hexdigest()
sha = sha256()
cmds = client.scp_recv('%(remote_file)s', '%(local_file)s', copy_args=copy_args)
try:
joinall(cmds, raise_error=True)
except Exception:
raise
else:
del client
for _local_file_name in local_file_names:
self.assertTrue(os.path.isfile(_local_file_name))
with open(_local_file_name, 'rb') as fh:
data = fh.read(10240)
while data:
sha.update(data)
data = fh.read(10240)
local_file_sha = sha.hexdigest()
sha = sha256()
self.assertEqual(source_file_sha, local_file_sha)
finally:
try:
os.unlink(remote_filepath)
for _local_file_name in local_file_names:
os.unlink(_local_file_name)
except OSError:
pass
def test_bad_hosts_value(self):
self.assertRaises(TypeError, ParallelSSHClient, 'a host')
self.assertRaises(TypeError, ParallelSSHClient, b'a host')
def test_disable_agent_forward(self):
client = ParallelSSHClient(
[self.host], port=self.port, pkey=self.user_key,
forward_ssh_agent=False,
num_retries=1)
output = client.run_command(self.cmd)
client.join(output)
self.assertFalse(output[0].client.forward_ssh_agent)
def test_keepalive_off(self):
client = ParallelSSHClient(
[self.host], port=self.port, pkey=self.user_key,
keepalive_seconds=0,
num_retries=1)
output = client.run_command(self.cmd)
client.join(output)
self.assertFalse(output[0].client.keepalive_seconds)
def test_return_list_last_output_multi_host(self):
host2, host3 = '127.0.0.2', '127.0.0.3'
server2 = OpenSSHServer(host2, port=self.port)
server3 = OpenSSHServer(host3, port=self.port)
servers = [server2, server3]
for server in servers:
server.start_server()
try:
hosts = [self.host, host2, host3]
client = ParallelSSHClient(hosts, port=self.port,
pkey=self.user_key,
num_retries=1)
self.assertTrue(client.cmds is None)
self.assertTrue(client.get_last_output() is None)
client.run_command(self.cmd)
expected_stdout = [self.resp]
expected_stderr = []
output = client.get_last_output()
self.assertIsInstance(output, list)
self.assertEqual(len(output), len(hosts))
self.assertIsInstance(output[0], HostOutput)
client.join(output)
for i, host in enumerate(hosts):
self.assertEqual(output[i].host, host)
exit_code = output[i].exit_code
_stdout = list(output[i].stdout)
_stderr = list(output[i].stderr)
self.assertEqual(exit_code, 0)
self.assertListEqual(expected_stdout, _stdout)
self.assertListEqual(expected_stderr, _stderr)
finally:
for server in servers:
server.stop()
def test_client_disconnect(self):
client = ParallelSSHClient([self.host],
port=self.port,
pkey=self.user_key,
num_retries=1)
client.run_command(self.cmd)
client.join(consume_output=True)
single_client = list(client._host_clients.values())[0]
del client
self.assertEqual(single_client.session, None)
def test_client_disconnect_error(self):
def disc():
raise Exception
client = ParallelSSHClient([self.host], port=self.port,
pkey=self.user_key, num_retries=1)
output = client.run_command(self.cmd)
client.join(output)
client._host_clients[(0, self.host)].disconnect = disc
del client
def test_multiple_join_timeout(self):
client = ParallelSSHClient([self.host], port=self.port,
pkey=self.user_key)
for _ in range(5):
output = client.run_command(self.cmd)
client.join(output, timeout=1, consume_output=True)
for host_out in output:
self.assertTrue(host_out.client.finished(host_out.channel))
output = client.run_command('sleep .2')
self.assertRaises(Timeout, client.join, output, timeout=.1, consume_output=True)
for host_out in output:
self.assertFalse(host_out.client.finished(host_out.channel))
def test_multiple_run_command_timeout(self):
client = ParallelSSHClient([self.host], port=self.port,
pkey=self.user_key)
for _ in range(5):
output = client.run_command('pwd', read_timeout=1)
for host_out in output:
stdout = list(host_out.stdout)
self.assertTrue(len(stdout) > 0)
self.assertTrue(host_out.client.finished(host_out.channel))
output = client.run_command('sleep .25; echo me', read_timeout=.1)
for host_out in output:
self.assertRaises(Timeout, list, host_out.stdout)
client.join(output)
for host_out in output:
stdout = list(host_out.stdout)
self.assertEqual(stdout, ['me'])
def read_stream_dt(self, host_out, stream, read_timeout):
now = datetime.now()
timed_out = False
try:
for line in stream:
pass
except Timeout:
timed_out = True
finally:
dt = datetime.now() - now
return dt, timed_out
def test_read_timeout_mixed_output(self):
cmd = 'sleep .1; echo start >&2; for i in 1 4 4; do sleep .$i; echo $i; done'
read_timeout = .3
output = self.client.run_command(
cmd, read_timeout=read_timeout, stop_on_errors=False)
for host_out in output:
while not host_out.client.finished(host_out.channel):
dt, timed_out = self.read_stream_dt(host_out, host_out.stdout, read_timeout)
dt_seconds = dt.total_seconds()
# Timeout within timeout value + 3%
self.assertTrue(
not timed_out or (read_timeout <= dt_seconds <= read_timeout*1.03),
msg="Read for stdout timed out at %s seconds for %s second timeout" % (
dt_seconds, read_timeout))
dt, timed_out = self.read_stream_dt(host_out, host_out.stderr, read_timeout)
dt_seconds = dt.total_seconds()
self.assertTrue(
not timed_out or (read_timeout <= dt_seconds <= read_timeout*1.03),
msg="Read for stdout timed out at %s seconds for %s second timeout" % (
dt_seconds, read_timeout))
def test_read_stdout_no_timeout(self):
cmd = 'sleep .1; echo me; sleep .1; echo me'
read_timeout = 1
output = self.client.run_command(
cmd, read_timeout=read_timeout, stop_on_errors=False)
for host_out in output:
dt, timed_out = self.read_stream_dt(host_out, host_out.stdout, read_timeout)
self.assertFalse(timed_out)
self.assertTrue(dt.total_seconds() < read_timeout)
# Command finished, shouldn't time out
dt, timed_out = self.read_stream_dt(host_out, host_out.stderr, read_timeout)
self.assertFalse(timed_out)
def test_read_timeout_no_timeouts(self):
cmd = 'echo me; echo me_stderr >&2'
read_timeout = 1
# No timeouts
output = self.client.run_command(
cmd, read_timeout=read_timeout, stop_on_errors=False)
for host_out in output:
dt, timed_out = self.read_stream_dt(host_out, host_out.stdout, read_timeout)
self.assertTrue(dt.total_seconds() < read_timeout)
self.assertFalse(timed_out)
dt, timed_out = self.read_stream_dt(host_out, host_out.stderr, read_timeout)
self.assertFalse(timed_out)
self.assertTrue(dt.total_seconds() < read_timeout)
def test_read_stdout_timeout_stderr_no_timeout(self):
"""No timeouts for stderr only"""
cmd = 'sleep .1; echo me >&2; sleep .1; echo me >&2; sleep .1'
read_timeout = .25
output = self.client.run_command(
cmd, read_timeout=read_timeout, stop_on_errors=False)
for host_out in output:
dt, timed_out = self.read_stream_dt(host_out, host_out.stdout, read_timeout)
self.assertTrue(timed_out)
self.assertTrue(read_timeout <= dt.total_seconds() <= read_timeout*1.03)
dt, timed_out = self.read_stream_dt(host_out, host_out.stderr, read_timeout)
self.assertFalse(timed_out)
self.assertTrue(dt.total_seconds() < read_timeout)
def test_read_multi_same_hosts(self):
hosts = [self.host, self.host]
outputs = [
self.client.run_command(self.cmd),
self.client.run_command(self.cmd),
]
for output in outputs:
for host_out in output:
stdout = list(host_out.stdout)
self.assertListEqual(stdout, [self.resp])
@patch('pssh.clients.base.single.socket')
def test_ipv6(self, gsocket):
hosts = ['::1']
client = ParallelSSHClient(hosts, port=self.port, pkey=self.user_key, num_retries=1)
addr_info = ('::1', self.port, 0, 0)
gsocket.IPPROTO_TCP = socket.IPPROTO_TCP
gsocket.socket = MagicMock()
_sock = MagicMock()
gsocket.socket.return_value = _sock
sock_con = MagicMock()
_sock.connect = sock_con
getaddrinfo = MagicMock()
gsocket.getaddrinfo = getaddrinfo
getaddrinfo.return_value = [(
socket.AF_INET6, socket.SocketKind.SOCK_STREAM, socket.IPPROTO_TCP, '', addr_info)]
output = client.run_command(self.cmd, stop_on_errors=False)
for host_out in output:
self.assertEqual(hosts[0], host_out.host)
self.assertIsInstance(host_out.exception, TypeError)
def test_no_ipv6(self):
client = ParallelSSHClient([self.host], port=self.port, pkey=self.user_key, num_retries=1, ipv6_only=True)
output = client.run_command(self.cmd, stop_on_errors=False)
for host_out in output:
self.assertEqual(self.host, host_out.host)
self.assertIsInstance(host_out.exception, NoIPv6AddressFoundError)
# TODO:
# * password auth
| 83,911 | Python | .py | 1,811 | 33.558807 | 118 | 0.566458 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,363 | test_single_client.py | ParallelSSH_parallel-ssh/tests/native/test_single_client.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import os
import shutil
import subprocess
import tempfile
from datetime import datetime
from hashlib import sha256
from tempfile import NamedTemporaryFile
from unittest.mock import MagicMock, call, patch
import pytest
from gevent import sleep, spawn, Timeout as GTimeout, socket
from pytest import raises
from ssh2.exceptions import (SocketDisconnectError, BannerRecvError, SocketRecvError,
AgentConnectionError, AgentListIdentitiesError,
AgentAuthenticationError, AgentGetIdentityError, SFTPProtocolError,
AuthenticationError as SSH2AuthenticationError,
)
from ssh2.session import Session
from pssh.clients.native import SSHClient
from pssh.exceptions import (AuthenticationException, ConnectionErrorException,
SessionError, SFTPIOError, SFTPError, SCPError, PKeyFileError, Timeout,
AuthenticationError, NoIPv6AddressFoundError, ConnectionError
)
from .base_ssh2_case import SSH2TestCase
class SSH2ClientTest(SSH2TestCase):
def test_context_manager(self):
with SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1) as client:
self.assertIsInstance(client, SSHClient)
def test_sftp_fail(self):
sftp = self.client._make_sftp()
self.assertRaises(SFTPIOError, self.client._mkdir, sftp, '/blah')
self.assertRaises(SFTPError, self.client.sftp_put, sftp, 'a file', '/blah')
def test_sftp_exc(self):
def _sftp_exc(local_file, remote_file):
raise SFTPProtocolError
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1)
client._sftp_put = _sftp_exc
local_file = 'local_file'
try:
with open(local_file, 'wb') as fh:
fh.write(b'asdf')
fh.flush()
self.assertRaises(SFTPIOError, client.copy_file, local_file, 'remote_file')
finally:
try:
os.unlink(local_file)
except Exception:
pass
client._sftp_get = _sftp_exc
remote_file = os.path.expanduser('~/remote_file')
try:
with open(remote_file, 'wb') as fh:
fh.write(b'asdf')
fh.flush()
self.assertRaises(SFTPIOError, client.copy_remote_file, remote_file, 'local_file')
finally:
try:
os.unlink(remote_file)
except Exception:
pass
self.assertRaises(
SFTPIOError, client.copy_remote_file, 'fake_remote_file_not_exists', 'local')
def test_conn_refused(self):
with pytest.raises(ConnectionRefusedError):
SSHClient('127.0.0.99', port=self.port, num_retries=1, timeout=1)
@patch('pssh.clients.base.single.socket')
def test_ipv6(self, gsocket):
# As of Oct 2021, CircleCI does not support IPv6 in its containers.
# Rather than having to create and manage our own docker containers just for testing, we patch gevent.socket
# and test it unit test style.
# Not ideal, but managing our own containers for one test is worse.
host = '::1'
addr_info = ('::1', self.port, 0, 0)
gsocket.IPPROTO_TCP = socket.IPPROTO_TCP
gsocket.socket = MagicMock()
_sock = MagicMock()
gsocket.socket.return_value = _sock
sock_con = MagicMock()
sock_con.side_effect = ConnectionRefusedError
_sock.connect = sock_con
getaddrinfo = MagicMock()
gsocket.getaddrinfo = getaddrinfo
getaddrinfo.return_value = [(
socket.AF_INET6, socket.SocketKind.SOCK_STREAM, socket.IPPROTO_TCP, '', addr_info)]
with raises(ConnectionError):
client = SSHClient(host, port=self.port, pkey=self.user_key,
num_retries=1)
getaddrinfo.assert_called_once_with(host, self.port, proto=socket.IPPROTO_TCP)
sock_con.assert_called_once_with(addr_info)
@patch('pssh.clients.base.single.socket')
def test_multiple_available_addr(self, gsocket):
host = '127.0.0.1'
addr_info = (host, self.port)
gsocket.IPPROTO_TCP = socket.IPPROTO_TCP
gsocket.socket = MagicMock()
_sock = MagicMock()
gsocket.socket.return_value = _sock
sock_con = MagicMock()
sock_con.side_effect = ConnectionRefusedError
_sock.connect = sock_con
getaddrinfo = MagicMock()
gsocket.getaddrinfo = getaddrinfo
getaddrinfo.return_value = [
(socket.AF_INET, socket.SocketKind.SOCK_STREAM, socket.IPPROTO_TCP, '', addr_info),
(socket.AF_INET, socket.SocketKind.SOCK_STREAM, socket.IPPROTO_TCP, '', addr_info),
]
with raises(ConnectionError):
client = SSHClient(host, port=self.port, pkey=self.user_key,
num_retries=1)
getaddrinfo.assert_called_with(host, self.port, proto=socket.IPPROTO_TCP)
assert sock_con.call_count == len(getaddrinfo.return_value)
def test_no_ipv6(self):
try:
SSHClient(self.host,
port=self.port, pkey=self.user_key,
num_retries=1, ipv6_only=True)
except NoIPv6AddressFoundError as ex:
self.assertEqual(len(ex.args), 3)
self.assertIsInstance(ex.args[2], list)
self.assertTrue(len(ex.args[2]) > 0)
_host, _port = ex.args[2][0]
self.assertEqual(_host, self.host)
self.assertEqual(_port, self.port)
else:
raise AssertionError
def test_scp_fail(self):
self.assertRaises(SCPError, self.client.scp_recv, 'fakey', 'fake')
try:
os.mkdir('adir')
except OSError:
pass
try:
self.assertRaises(ValueError, self.client.scp_send, 'adir', 'fake')
finally:
os.rmdir('adir')
def test_pkey_from_memory(self):
with open(self.user_key, 'rb') as fh:
key_data = fh.read()
SSHClient(self.host, port=self.port,
pkey=key_data, num_retries=1, timeout=1)
def test_execute(self):
host_out = self.client.run_command(self.cmd)
output = list(host_out.stdout)
stderr = list(host_out.stderr)
expected = [self.resp]
exit_code = host_out.channel.get_exit_status()
self.assertEqual(host_out.exit_code, 0)
self.assertEqual(expected, output)
def test_alias(self):
client = SSHClient(self.host, port=self.port,
pkey=self.user_key, num_retries=1,
alias='test')
host_out = client.run_command(self.cmd)
self.assertEqual(host_out.alias, 'test')
def test_open_session_timeout(self):
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1,
retry_delay=.1,
timeout=.1)
def _session(timeout=None):
sleep(.2)
client.open_session = _session
self.assertRaises(GTimeout, client.run_command, self.cmd)
def test_open_session_exc(self):
class Error(Exception):
pass
def _session():
raise Error
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1)
client._open_session = _session
self.assertRaises(SessionError, client.open_session)
def test_finished_error(self):
self.assertRaises(ValueError, self.client.wait_finished, None)
self.assertIsNone(self.client.finished(None))
def test_stderr(self):
host_out = self.client.run_command('echo "me" >&2')
self.client.wait_finished(host_out)
output = list(host_out.stdout)
stderr = list(host_out.stderr)
expected = ['me']
self.assertListEqual(expected, stderr)
self.assertTrue(len(output) == 0)
def test_stdin(self):
host_out = self.client.run_command('read line; echo $line')
host_out.stdin.write('a line\n')
host_out.stdin.flush()
self.client.wait_finished(host_out)
stdout = list(host_out.stdout)
self.assertListEqual(stdout, ['a line'])
def test_long_running_cmd(self):
host_out = self.client.run_command('sleep .2; exit 2')
self.assertRaises(ValueError, self.client.wait_finished, host_out.channel)
self.client.wait_finished(host_out)
exit_code = host_out.exit_code
self.assertEqual(exit_code, 2)
def test_manual_auth(self):
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1,
allow_agent=False)
client.session.disconnect()
del client.session
del client.sock
client._connect(self.host, self.port)
client._init_session()
# Identity auth
client.pkey = None
client.session.disconnect()
del client.session
del client.sock
client._connect(self.host, self.port)
client.session = Session()
client.session.handshake(client.sock)
self.assertRaises(AuthenticationException, client.auth)
def test_identity_auth(self):
class _SSHClient(SSHClient):
IDENTITIES = (self.user_key,)
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1,
allow_agent=False)
client.disconnect()
client.pkey = None
del client.session
del client.sock
client._connect(self.host, self.port)
client._init_session()
client.IDENTITIES = (self.user_key,)
# Default identities auth only should succeed
client._identity_auth()
client.disconnect()
client._connect(self.host, self.port)
client._init_session()
# Auth should succeed
self.assertIsNone(client.auth())
# Standard init with custom identities
client = _SSHClient(self.host, port=self.port,
num_retries=1,
allow_agent=False)
self.assertIsInstance(client, SSHClient)
def test_no_auth(self):
self.assertRaises(
AuthenticationError,
SSHClient,
self.host,
port=self.port,
num_retries=1,
allow_agent=False,
identity_auth=False,
)
def test_agent_auth_failure(self):
class UnknownError(Exception):
pass
def _agent_auth_unk():
raise UnknownError
def _agent_auth_agent_err():
raise AgentConnectionError
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1,
allow_agent=True,
identity_auth=False)
client.session.disconnect()
client.pkey = None
client._connect(self.host, self.port)
client._agent_auth = _agent_auth_unk
self.assertRaises(AuthenticationError, client.auth)
client._agent_auth = _agent_auth_agent_err
self.assertRaises(AuthenticationError, client.auth)
def test_agent_auth_fake_success(self):
def _agent_auth():
return
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1,
allow_agent=True,
identity_auth=False)
client.session.disconnect()
client.pkey = None
client._connect(self.host, self.port)
client._agent_auth = _agent_auth
self.assertIsNone(client.auth())
def test_agent_fwd(self):
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1,
allow_agent=True,
forward_ssh_agent=True)
out = client.run_command(self.cmd)
client.wait_finished(out)
def test_failed_auth(self):
self.assertRaises(PKeyFileError, SSHClient, self.host, port=self.port,
pkey='client_pkey',
num_retries=1)
self.assertRaises(PKeyFileError, SSHClient, self.host, port=self.port,
pkey='~/fake_key',
num_retries=1)
def test_handshake_fail(self):
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1)
client.session.disconnect()
self.assertRaises((SocketDisconnectError, BannerRecvError, SocketRecvError), client._init_session)
def test_stdout_parsing(self):
dir_list = os.listdir(os.path.expanduser('~'))
host_out = self.client.run_command('ls -la')
output = list(host_out.stdout)
# Output of `ls` will have 'total', '.', and '..' in addition to dir
# listing
self.assertEqual(len(dir_list), len(output) - 3)
def test_file_output_parsing(self):
lines = int(subprocess.check_output(
['wc', '-l', 'README.rst']).split()[0])
dir_name = os.path.dirname(__file__)
_file = os.sep.join((dir_name, '..', '..', 'README.rst'))
cmd = 'cat %s' % _file
host_out = self.client.run_command(cmd)
output = list(host_out.stdout)
self.assertEqual(lines, len(output))
def test_identity_auth_failure(self):
self.assertRaises(AuthenticationException,
SSHClient, self.host, port=self.port, num_retries=1,
allow_agent=False)
def test_password_auth_failure(self):
try:
client = SSHClient(self.host, port=self.port, num_retries=1,
allow_agent=False,
identity_auth=False,
password='blah blah blah',
)
except AuthenticationException as ex:
self.assertIsInstance(ex.args[3], SSH2AuthenticationError)
else:
raise AssertionError
def test_retry_failure(self):
self.assertRaises(ConnectionError,
SSHClient, self.host, port=12345,
num_retries=2, _auth_thread_pool=False,
retry_delay=.1,
)
def test_auth_retry_failure(self):
self.assertRaises(AuthenticationException,
SSHClient, self.host, port=self.port,
user=self.user,
password='fake',
num_retries=3,
retry_delay=.1,
allow_agent=False,
identity_auth=False,
)
def test_connection_timeout(self):
cmd = spawn(SSHClient, 'fakehost.com', port=12345,
num_retries=1, timeout=.1, _auth_thread_pool=False)
# Should fail within greenlet timeout, otherwise greenlet will
# raise timeout which will fail the test
self.assertRaises(ConnectionErrorException, cmd.get, timeout=1)
def test_client_read_timeout(self):
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1)
host_out = client.run_command('sleep 2; echo me', timeout=0.2)
self.assertRaises(Timeout, list, host_out.stdout)
def test_multiple_clients_exec_terminates_channels(self):
# See #200 - Multiple clients should not interfere with
# each other. session.disconnect can leave state in libssh2
# and break subsequent sessions even on different socket and
# session
def scope_killer():
for _ in range(5):
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1,
allow_agent=False)
host_out = client.run_command(self.cmd)
output = list(host_out.stdout)
self.assertListEqual(output, [self.resp])
client.disconnect()
scope_killer()
def test_agent_auth_exceptions(self):
"""Test SSH agent authentication failure with custom client that
does not do auth at class init.
"""
class _SSHClient(SSHClient):
def __init__(self, host, port, num_retries):
self.keepalive_seconds = None
super(SSHClient, self).__init__(
host, port=port, num_retries=2,
allow_agent=True)
self.IDENTITIES = set()
def _init_session(self):
self.session = Session()
if self.timeout:
self.session.set_timeout(self.timeout * 1000)
self.session.handshake(self.sock)
def _auth_retry(self):
pass
client = _SSHClient(self.host, port=self.port,
num_retries=1)
self.assertRaises((AgentConnectionError, AgentListIdentitiesError, \
AgentAuthenticationError, AgentGetIdentityError),
client.session.agent_auth, client.user)
self.assertRaises(AuthenticationException,
client.auth)
def test_finished(self):
self.assertFalse(self.client.finished(None))
host_out = self.client.run_command('echo me')
channel = host_out.channel
self.assertFalse(self.client.finished(channel))
self.assertRaises(ValueError, self.client.wait_finished, host_out.channel)
self.client.wait_finished(host_out)
stdout = list(host_out.stdout)
self.assertTrue(self.client.finished(channel))
self.assertListEqual(stdout, [self.resp])
self.assertRaises(ValueError, self.client.wait_finished, None)
host_out.channel = None
self.assertIsNone(self.client.wait_finished(host_out))
def test_wait_finished_timeout(self):
host_out = self.client.run_command('sleep .2')
timeout = .1
self.assertFalse(self.client.finished(host_out.channel))
start = datetime.now()
self.assertRaises(Timeout, self.client.wait_finished, host_out, timeout=timeout)
dt = datetime.now() - start
self.assertTrue(timeout*1.1 > dt.total_seconds() > timeout)
self.client.wait_finished(host_out)
self.assertTrue(self.client.finished(host_out.channel))
def test_scp_abspath_recursion(self):
cur_dir = os.path.dirname(__file__)
dir_name_to_copy = 'a_dir'
files = ['file1', 'file2']
dir_paths = [cur_dir, dir_name_to_copy]
to_copy_dir_path = os.path.abspath(os.path.sep.join(dir_paths))
# Dir to copy to
copy_to_path = '/tmp/copied_dir'
try:
shutil.rmtree(copy_to_path)
except Exception:
pass
try:
try:
os.makedirs(to_copy_dir_path)
except OSError:
pass
# Copy for empty remote dir should create local dir
self.client.scp_recv(to_copy_dir_path, copy_to_path, recurse=True)
self.assertTrue(os.path.isdir(copy_to_path))
for _file in files:
_filepath = os.path.sep.join([to_copy_dir_path, _file])
with open(_filepath, 'w') as fh:
fh.writelines(['asdf'])
self.client.scp_recv(to_copy_dir_path, copy_to_path, recurse=True)
for _file in files:
local_file_path = os.path.sep.join([copy_to_path, _file])
self.assertTrue(os.path.isfile(local_file_path))
shutil.rmtree(to_copy_dir_path)
self.assertRaises(
SCPError, self.client.scp_recv, to_copy_dir_path, copy_to_path, recurse=True)
finally:
for _path in (copy_to_path, to_copy_dir_path):
try:
shutil.rmtree(_path)
except Exception:
pass
def test_copy_file_abspath_recurse(self):
cur_dir = os.path.dirname(__file__)
dir_name_to_copy = 'a_dir'
files = ['file1', 'file2']
dir_paths = [cur_dir, dir_name_to_copy]
to_copy_dir_path = os.path.abspath(os.path.sep.join(dir_paths))
copy_to_path = '/tmp/dest_path//'
for _path in (copy_to_path, to_copy_dir_path):
try:
shutil.rmtree(_path)
except Exception:
pass
try:
try:
os.makedirs(to_copy_dir_path)
except OSError:
pass
self.assertRaises(
ValueError,
self.client.copy_file, to_copy_dir_path, copy_to_path, recurse=False)
self.assertFalse(os.path.isdir(copy_to_path))
self.client.copy_file(to_copy_dir_path, copy_to_path, recurse=True)
self.assertTrue(os.path.isdir(copy_to_path))
for _file in files:
_filepath = os.path.sep.join([to_copy_dir_path, _file])
with open(_filepath, 'w') as fh:
fh.writelines(['asdf'])
self.client.copy_file(to_copy_dir_path, copy_to_path, recurse=True)
self.assertFalse(os.path.exists(os.path.expanduser('~/tmp')))
for _file in files:
local_file_path = os.path.sep.join([copy_to_path, _file])
self.assertTrue(os.path.isfile(local_file_path))
finally:
for _path in (copy_to_path, to_copy_dir_path):
try:
shutil.rmtree(_path)
except Exception:
pass
def test_copy_file_remote_dir_relpath(self):
cur_dir = os.path.dirname(__file__)
dir_base_dir = 'a_dir'
dir_name_to_copy = '//'.join([dir_base_dir, 'dir1', 'dir2'])
file_to_copy = 'file_to_copy'
dir_path = [cur_dir, file_to_copy]
copy_from_file_path = os.path.abspath(os.path.sep.join(dir_path))
copy_to_file_path = '///'.join([dir_name_to_copy, file_to_copy])
copy_to_abs_path = os.path.abspath(os.path.expanduser('~/' + copy_to_file_path))
copy_to_abs_dir = os.path.abspath(os.path.expanduser('~/' + dir_base_dir))
try:
os.unlink(copy_from_file_path)
except Exception:
pass
try:
shutil.rmtree(copy_to_abs_dir, ignore_errors=True)
except Exception:
pass
try:
with open(copy_from_file_path, 'w') as fh:
fh.writelines(['asdf'])
self.client.copy_file(copy_from_file_path, copy_to_file_path)
self.assertTrue(os.path.isfile(copy_to_abs_path))
finally:
try:
os.unlink(copy_from_file_path)
except Exception:
pass
try:
shutil.rmtree(copy_to_abs_dir, ignore_errors=True)
except Exception:
pass
def test_copy_file_with_newlines(self):
with NamedTemporaryFile('wb') as temp_file:
# 2MB
for _ in range(200512):
temp_file.write(b'asdfartkj\n')
temp_file.flush()
now = datetime.now()
try:
self.client.copy_file(os.path.abspath(temp_file.name), 'write_file')
took = datetime.now() - now
assert took.total_seconds() < 1
finally:
try:
os.unlink(os.path.expanduser('~/write_file'))
except OSError:
pass
def test_sftp_mkdir_abspath(self):
remote_dir = '/tmp/dir_to_create/dir1/dir2/dir3'
_sftp = self.client._make_sftp()
try:
self.client.mkdir(_sftp, remote_dir)
self.assertTrue(os.path.isdir(remote_dir))
self.assertFalse(os.path.exists(os.path.expanduser('~/tmp')))
finally:
for _dir in (remote_dir, os.path.expanduser('~/tmp')):
try:
shutil.rmtree(_dir)
except Exception:
pass
def test_sftp_mkdir_rel_path(self):
remote_dir = 'dir_to_create/dir1/dir2/dir3'
try:
shutil.rmtree(os.path.expanduser('~/' + remote_dir))
except Exception:
pass
_sftp = self.client._make_sftp()
try:
self.client.mkdir(_sftp, remote_dir)
self.assertTrue(os.path.exists(os.path.expanduser('~/' + remote_dir)))
finally:
for _dir in (remote_dir, os.path.expanduser('~/tmp')):
try:
shutil.rmtree(_dir)
except Exception:
pass
def test_scp_recv_large_file(self):
cur_dir = os.path.dirname(__file__)
file_name = 'file1'
file_copy_to = 'file_copied'
file_path_from = os.path.sep.join([cur_dir, file_name])
file_copy_to_dirpath = os.path.expanduser('~/') + file_copy_to
for _path in (file_path_from, file_copy_to_dirpath):
try:
os.unlink(_path)
except OSError:
pass
sha = sha256()
try:
with open(file_path_from, 'wb') as fh:
for _ in range(10000):
data = os.urandom(1024)
fh.write(data)
sha.update(data)
source_file_sha = sha.hexdigest()
self.client.scp_recv(file_path_from, file_copy_to_dirpath)
self.assertTrue(os.path.isfile(file_copy_to_dirpath))
sha = sha256()
with open(file_copy_to_dirpath, 'rb') as fh:
for block in fh:
sha.update(block)
written_file_hash = sha.hexdigest()
self.assertEqual(source_file_sha, written_file_hash)
finally:
for _path in (file_path_from, file_copy_to_dirpath):
try:
os.unlink(_path)
except Exception:
pass
def test_scp_send_write_exc(self):
class WriteError(Exception):
pass
def write_exc(func, data):
raise WriteError
cur_dir = os.path.dirname(__file__)
file_name = 'file1'
file_copy_to = 'file_copied'
file_path_from = os.path.sep.join([cur_dir, file_name])
file_copy_to_dirpath = os.path.expanduser('~/') + file_copy_to
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1)
for _path in (file_path_from, file_copy_to_dirpath):
try:
os.unlink(_path)
except OSError:
pass
try:
with open(file_path_from, 'wb') as fh:
fh.write(b"adsfasldkfjabafj")
client.eagain_write = write_exc
self.assertRaises(SCPError, client.scp_send, file_path_from, file_copy_to_dirpath)
# File created on SCP channel open
self.assertTrue(os.path.isfile(file_copy_to_dirpath))
finally:
for _path in (file_path_from, file_copy_to_dirpath):
try:
os.unlink(_path)
except Exception:
pass
def test_scp_send_large_file(self):
cur_dir = os.path.dirname(__file__)
file_name = 'file1'
file_copy_to = 'file_copied'
file_path_from = os.path.sep.join([cur_dir, file_name])
file_copy_to_dirpath = os.path.expanduser('~/') + file_copy_to
for _path in (file_path_from, file_copy_to_dirpath):
try:
os.unlink(_path)
except OSError:
pass
sha = sha256()
try:
with open(file_path_from, 'wb') as fh:
for _ in range(10000):
data = os.urandom(1024)
fh.write(data)
sha.update(data)
source_file_sha = sha.hexdigest()
self.client.scp_send(file_path_from, file_copy_to_dirpath)
self.assertTrue(os.path.isfile(file_copy_to_dirpath))
sha = sha256()
with open(file_copy_to_dirpath, 'rb') as fh:
for block in fh:
sha.update(block)
written_file_hash = sha.hexdigest()
self.assertEqual(source_file_sha, written_file_hash)
finally:
for _path in (file_path_from, file_copy_to_dirpath):
try:
os.unlink(_path)
except Exception:
pass
def test_scp_send_err(self):
cur_dir = os.path.dirname(__file__)
file_name = 'file1'
file_copy_to = 'file_copied'
file_path_from = os.path.sep.join([cur_dir, file_name])
file_copy_to_dirpath = os.path.expanduser('~/') + file_copy_to
for _path in (file_path_from, file_copy_to_dirpath):
try:
os.unlink(_path)
except OSError:
pass
try:
with open(file_path_from, 'wb') as fh:
fh.write(b"adsfasldkfjabafj")
# Permission denied reading local file
os.chmod(file_path_from, 0o100)
self.assertRaises(
SCPError,
self.client.scp_send, file_path_from, file_copy_to_dirpath)
os.chmod(file_path_from, 0o500)
self.client.scp_send(file_path_from, file_copy_to_dirpath)
self.assertTrue(os.path.isfile(file_copy_to_dirpath))
# OS file flush race condition
sleep(.1)
read_file_size = os.stat(file_path_from).st_size
written_file_size = os.stat(file_copy_to_dirpath).st_size
self.assertEqual(read_file_size, written_file_size)
sha = sha256()
with open(file_path_from, 'rb') as fh:
for block in fh:
sha.update(block)
read_file_hash = sha.hexdigest()
sha = sha256()
with open(file_copy_to_dirpath, 'rb') as fh:
for block in fh:
sha.update(block)
written_file_hash = sha.hexdigest()
self.assertEqual(read_file_hash, written_file_hash)
finally:
for _path in (file_path_from, file_copy_to_dirpath):
try:
os.unlink(_path)
except Exception:
pass
def test_scp_send_dir_target(self):
cur_dir = os.path.dirname(__file__)
file_name = 'file1'
file_path_from = os.path.sep.join([cur_dir, file_name])
file_copy_to_dirpath = os.path.expanduser('~/')
file_copy_to_abs = file_copy_to_dirpath + file_name
dir_copy_from = os.path.sep.join([cur_dir, 'copy_from'])
dir_copy_file_from = os.path.sep.join([dir_copy_from, file_name])
os.makedirs(dir_copy_from)
dir_copy_to = tempfile.mkdtemp()
# Should be created by client
shutil.rmtree(dir_copy_to)
for _path in (file_path_from, file_copy_to_abs):
try:
os.unlink(_path)
except OSError:
pass
try:
with open(file_path_from, 'wb') as fh, \
open(dir_copy_file_from, 'wb') as fh2:
fh.write(b"adsfasldkfjabafj")
fh2.write(b"adsfasldkfjabafj")
self.client.scp_send(file_path_from, file_copy_to_dirpath)
self.assertTrue(os.path.isfile(file_copy_to_abs))
self.assertRaises(ValueError, self.client.scp_send, dir_copy_from, dir_copy_to)
self.assertFalse(os.path.isdir(dir_copy_to))
self.client.scp_send(dir_copy_from, dir_copy_to, recurse=True)
self.assertTrue(os.path.isdir(dir_copy_to))
self.assertTrue(os.path.isfile(os.path.sep.join([dir_copy_to, file_name])))
finally:
try:
for _path in (file_path_from, file_copy_to_abs):
os.unlink(_path)
except OSError:
pass
try:
shutil.rmtree(dir_copy_from)
except Exception:
pass
# Relative path
file_copy_to_dirpath = './'
for _path in (file_path_from, file_copy_to_abs):
try:
os.unlink(_path)
except OSError:
pass
try:
with open(file_path_from, 'wb') as fh:
fh.write(b"adsfasldkfjabafj")
self.client.scp_send(file_path_from, file_copy_to_dirpath)
self.assertTrue(os.path.isfile(file_copy_to_abs))
finally:
for _path in (file_path_from, file_copy_to_abs):
try:
os.unlink(_path)
except OSError:
pass
def test_sftp_openfh_exc(self):
cur_dir = os.path.dirname(__file__)
file_name = 'file1'
file_path_from = os.path.sep.join([cur_dir, file_name])
file_copy_to_dirpath = os.path.expanduser('~/')
file_copy_to_abs = file_copy_to_dirpath + file_name
for _path in (file_path_from, file_copy_to_abs):
try:
os.unlink(_path)
except OSError:
pass
try:
with open(file_path_from, 'wb') as fh:
fh.write(b"adsfasldkfjabafj")
os.chmod(file_path_from, 0o200)
self.assertRaises(
SFTPError, self.client.copy_remote_file, file_path_from, file_copy_to_dirpath)
self.assertFalse(os.path.isfile(file_copy_to_abs))
finally:
for _path in (file_path_from, file_copy_to_abs):
try:
os.unlink(_path)
except OSError:
pass
def test_scp_dir_target(self):
cur_dir = os.path.dirname(__file__)
file_name = 'file1'
file_path_from = os.path.sep.join([cur_dir, file_name])
file_copy_to_dirpath = os.path.expanduser('~/')
file_copy_to_abs = file_copy_to_dirpath + file_name
for _path in (file_path_from, file_copy_to_abs):
try:
os.unlink(_path)
except OSError:
pass
try:
with open(file_path_from, 'wb') as fh:
fh.write(b"adsfasldkfjabafj")
self.client.scp_recv(file_path_from, file_copy_to_dirpath)
self.assertTrue(os.path.isfile(file_copy_to_abs))
finally:
for _path in (file_path_from, file_copy_to_abs):
try:
os.unlink(_path)
except OSError:
pass
# Relative path
file_copy_to_dirpath = './'
for _path in (file_path_from, file_copy_to_abs):
try:
os.unlink(_path)
except OSError:
pass
try:
with open(file_path_from, 'wb') as fh:
fh.write(b"adsfasldkfjabafj")
self.client.scp_send(file_path_from, file_copy_to_dirpath)
self.assertTrue(os.path.isfile(file_copy_to_abs))
finally:
for _path in (file_path_from, file_copy_to_abs):
try:
os.unlink(_path)
except OSError:
pass
def test_scp_recv_dir_target_recurse_err(self):
copy_from_dir = os.path.sep.join([os.path.dirname(__file__), 'copy_from_dir'])
try:
os.makedirs(copy_from_dir)
except OSError:
pass
file_names = ['file1', 'file2']
file_copy_to_parent_dir = os.path.expanduser('~/copy_parent_dir')
file_copy_to_dirpath = os.path.sep.join([file_copy_to_parent_dir, 'copy_to_dir'])
_files = [os.path.sep.join([copy_from_dir, file_name])
for file_name in file_names]
copied_files = [os.path.sep.join([file_copy_to_dirpath, file_name])
for file_name in file_names]
try:
os.chmod(file_copy_to_parent_dir, 0o711)
except OSError:
pass
try:
shutil.rmtree(file_copy_to_parent_dir)
except OSError:
pass
os.chmod(copy_from_dir, 0o711)
for _path in _files:
try:
os.unlink(_path)
except OSError:
pass
for _path in _files:
with open(_path, 'wb') as fh:
fh.write(b"adsfasldkfjabafj")
# Permission denied for creating directories under parent dir
os.mkdir(file_copy_to_parent_dir, mode=0o500)
try:
self.assertRaises(
PermissionError,
self.client.scp_recv, copy_from_dir, file_copy_to_dirpath, recurse=True)
self.assertFalse(os.path.isdir(file_copy_to_dirpath))
for _path in copied_files:
self.assertFalse(os.path.isfile(_path))
os.chmod(file_copy_to_parent_dir, 0o700)
# Permission denied reading remote dir
os.chmod(copy_from_dir, 0o000)
self.assertRaises(
SCPError,
self.client.scp_recv, copy_from_dir, file_copy_to_dirpath, recurse=True)
self.assertFalse(os.path.isdir(file_copy_to_dirpath))
for _path in copied_files:
self.assertFalse(os.path.isfile(_path))
finally:
for _path in [file_copy_to_parent_dir, copy_from_dir]:
os.chmod(_path, 0o711)
try:
shutil.rmtree(copy_from_dir)
except OSError:
pass
try:
shutil.rmtree(file_copy_to_parent_dir)
except OSError:
pass
def test_interactive_shell(self):
with self.client.open_shell() as shell:
shell.run(self.cmd)
shell.run(self.cmd)
stdout = list(shell.stdout)
self.assertListEqual(stdout, [self.resp, self.resp])
self.assertEqual(shell.exit_code, 0)
shell._chan = None
self.assertIsNone(shell.close())
def test_interactive_shell_exit_code(self):
with self.client.open_shell() as shell:
shell.run(self.cmd)
shell.run('sleep .1')
shell.run(self.cmd)
shell.run('exit 1')
stdout = list(shell.stdout)
self.assertListEqual(stdout, [self.resp, self.resp])
self.assertEqual(shell.exit_code, 1)
def test_sftp_init_exc(self):
def _make_sftp():
raise Exception
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1)
client._make_sftp_eagain = _make_sftp
self.assertRaises(SFTPError, client._make_sftp)
def test_disconnect_exc(self):
class DiscError(Exception):
pass
def _disc():
raise DiscError
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
retry_delay=.1,
num_retries=1,
timeout=1,
)
client._disconnect_eagain = _disc
client._connect_init_session_retry(1)
client.disconnect()
def test_copy_remote_dir_encoding(self):
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1)
remote_file_mock = MagicMock()
suffix = b"\xbc"
encoding = 'latin-1'
encoded_fn = suffix.decode(encoding)
file_list = [suffix + b"1", suffix + b"2"]
client.copy_remote_file = remote_file_mock
local_dir = (b"l_dir" + suffix).decode(encoding)
remote_dir = (b"r_dir" + suffix).decode(encoding)
client._copy_remote_dir(
file_list, local_dir, remote_dir, None, encoding=encoding)
call_args = [call(local_dir + "/" + file_list[0].decode(encoding),
remote_dir + "/" + file_list[0].decode(encoding),
recurse=True, sftp=None, encoding=encoding),
call(local_dir + "/" + file_list[1].decode(encoding),
remote_dir + "/" + file_list[1].decode(encoding),
recurse=True, sftp=None, encoding=encoding)
]
self.assertListEqual(remote_file_mock.call_args_list, call_args)
def test_many_short_lived_commands(self):
for _ in range(20):
timeout = 2
start = datetime.now()
client = SSHClient(self.host, port=self.port,
pkey=self.user_key,
num_retries=1,
allow_agent=False,
timeout=timeout)
host_out = client.run_command(self.cmd)
_ = list(host_out.stdout)
end = datetime.now() - start
duration = end.total_seconds()
self.assertTrue(duration < timeout * 0.9, msg=f"Duration of instant cmd is {duration}")
# TODO
# * read output callback
| 42,968 | Python | .py | 999 | 30.054054 | 116 | 0.555341 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,364 | test_tunnel.py | ParallelSSH_parallel-ssh/tests/native/test_tunnel.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import gc
import os
import time
import unittest
from datetime import datetime
from getpass import getuser
from sys import version_info
from gevent import sleep, spawn, Timeout as GTimeout
from ssh2.exceptions import SocketSendError, SocketRecvError
from pssh.clients.native import SSHClient, ParallelSSHClient
from pssh.clients.native.tunnel import LocalForwarder, TunnelServer, FORWARDER
from pssh.config import HostConfig
from pssh.exceptions import ProxyError
from .base_ssh2_case import PKEY_FILENAME, PUB_FILE
from ..embedded_server.openssh import OpenSSHServer
class TunnelTest(unittest.TestCase):
server = None
@classmethod
def setUpClass(cls):
_mask = int('0600') if version_info <= (2,) else 0o600
os.chmod(PKEY_FILENAME, _mask)
cls.port = 2225
cls.cmd = 'echo me'
cls.resp = u'me'
cls.user_key = PKEY_FILENAME
cls.user_pub_key = PUB_FILE
cls.user = getuser()
cls.proxy_host = '127.0.0.9'
cls.proxy_port = cls.port + 1
cls.server = OpenSSHServer(listen_ip=cls.proxy_host, port=cls.proxy_port)
cls.server.start_server()
@classmethod
def tearDownClass(cls):
cls.server.stop()
def test_forwarder(self):
forwarder = LocalForwarder()
forwarder.daemon = True
forwarder.start()
forwarder.started.wait()
client = SSHClient(
self.proxy_host, port=self.proxy_port, pkey=self.user_key)
forwarder.enqueue(client, self.proxy_host, self.port)
forwarder.out_q.get()
self.assertTrue(len(forwarder._servers) > 0)
forwarder.shutdown()
def test_tunnel_server(self):
remote_host = '127.0.0.8'
remote_server = OpenSSHServer(listen_ip=remote_host, port=self.port)
remote_server.start_server()
try:
client = SSHClient(
remote_host, port=self.port, pkey=self.user_key,
num_retries=1,
proxy_host=self.proxy_host,
proxy_pkey=self.user_key,
proxy_port=self.proxy_port,
)
output = client.run_command(self.cmd)
_stdout = list(output.stdout)
self.assertListEqual(_stdout, [self.resp])
self.assertEqual(remote_host, client.host)
self.assertEqual(self.port, client.port)
finally:
remote_server.stop()
def test_proxy_pkey_bytes_data(self):
remote_host = '127.0.0.8'
remote_server = OpenSSHServer(listen_ip=remote_host, port=self.port)
remote_server.start_server()
with open(self.user_key, 'rb') as fh:
pkey_data = fh.read()
try:
client = ParallelSSHClient(
[remote_host], port=self.port, pkey=pkey_data,
num_retries=1,
proxy_host=self.proxy_host,
proxy_pkey=pkey_data,
proxy_port=self.proxy_port,
)
output = client.run_command(self.cmd)
_stdout = list(output[0].stdout)
self.assertListEqual(_stdout, [self.resp])
self.assertEqual(remote_host, output[0].host)
self.assertEqual(self.port, client.port)
finally:
remote_server.stop()
# The purpose of this test is to exercise
# https://github.com/ParallelSSH/parallel-ssh/issues/304
def test_tunnel_server_reconn(self):
remote_host = '127.0.0.8'
remote_server = OpenSSHServer(listen_ip=remote_host, port=self.port)
remote_server.start_server()
reconn_n = 20 # Number of reconnect attempts
reconn_delay = .1 # Number of seconds to delay between reconnects
try:
for _ in range(reconn_n):
client = SSHClient(
remote_host, port=self.port, pkey=self.user_key,
num_retries=1,
proxy_host=self.proxy_host,
proxy_pkey=self.user_key,
proxy_port=self.proxy_port,
)
output = client.run_command(self.cmd)
client.wait_finished(output)
self.assertEqual(remote_host, client.host)
self.assertEqual(self.port, client.port)
client.disconnect()
FORWARDER._cleanup_servers()
time.sleep(reconn_delay)
gc.collect()
finally:
remote_server.stop()
def test_tunnel_server_same_port(self):
remote_host = '127.0.0.7'
remote_server = OpenSSHServer(listen_ip=remote_host, port=self.proxy_port)
remote_server.start_server()
try:
client = SSHClient(
remote_host, port=self.proxy_port, pkey=self.user_key,
num_retries=1,
retry_delay=.1,
proxy_host=self.proxy_host,
)
output = client.run_command(self.cmd)
_stdout = list(output.stdout)
self.assertListEqual(_stdout, [self.resp])
self.assertEqual(remote_host, client.host)
self.assertEqual(self.proxy_port, client.port)
finally:
remote_server.stop()
def test_tunnel_parallel_client(self):
hosts = ['127.0.0.1%s' % (d,) for d in range(5)]
servers = [OpenSSHServer(listen_ip=_host, port=self.port) for _host in hosts]
for server in servers:
server.start_server()
hosts_5 = [hosts[0], hosts[1], hosts[2], hosts[3], hosts[4]]
try:
client = ParallelSSHClient(hosts_5, port=self.port, pkey=self.user_key,
proxy_host=self.proxy_host,
proxy_pkey=self.user_key,
proxy_port=self.proxy_port,
num_retries=1,
)
start = datetime.now()
output = client.run_command(self.cmd)
end = datetime.now()
dt_5 = end - start
client = ParallelSSHClient(hosts, port=self.port, pkey=self.user_key,
proxy_host=self.proxy_host,
proxy_pkey=self.user_key,
proxy_port=self.proxy_port,
num_retries=1,
)
start = datetime.now()
output = client.run_command(self.cmd)
end = datetime.now()
dt_10 = end - start
dt = dt_10.total_seconds() / dt_5.total_seconds()
# self.assertTrue(dt < 2)
client.join(output)
self.assertEqual(len(hosts), len(output))
for i, host_out in enumerate(output):
_stdout = list(host_out.stdout)
self.assertListEqual(_stdout, [self.resp])
self.assertEqual(hosts[i], host_out.host)
finally:
for server in servers:
server.stop()
def test_tunnel_parallel_client_part_failure(self):
hosts = ['127.0.0.11', '127.0.0.12', '127.0.0.13']
servers = [OpenSSHServer(listen_ip=_host, port=self.port) for _host in hosts]
servers[0].start_server()
try:
client = ParallelSSHClient(hosts, port=self.port, pkey=self.user_key,
proxy_host=self.proxy_host,
proxy_pkey=self.user_key,
proxy_port=self.proxy_port,
num_retries=1,
retry_delay=.1,
)
output = client.run_command(self.cmd, stop_on_errors=False)
client.join(output)
self.assertEqual(len(hosts), len(output))
self.assertTrue(output[1].exception is not None)
self.assertTrue(output[2].exception is not None)
self.assertListEqual(list(output[0].stdout), [self.resp])
finally:
for server in servers:
server.stop()
def test_tunnel_parallel_client_running_fail(self):
hosts = ['127.0.0.11', '127.0.0.12', '127.0.0.13']
servers = [OpenSSHServer(listen_ip=_host, port=self.port) for _host in hosts]
for server in servers:
server.start_server()
try:
client = ParallelSSHClient(hosts, port=self.port, pkey=self.user_key,
proxy_host=self.proxy_host,
proxy_pkey=self.user_key,
proxy_port=self.proxy_port,
num_retries=1,
retry_delay=.1,
)
output = client.run_command(self.cmd)
client.join(output)
for server in (servers[1], servers[2]):
server.stop()
server.server_proc.communicate()
client._host_clients[(1, hosts[1])].disconnect()
client._host_clients[(2, hosts[2])].disconnect()
output = client.run_command(self.cmd, stop_on_errors=False)
client.join(output)
self.assertEqual(len(hosts), len(output))
self.assertTrue(output[1].exception is not None)
self.assertTrue(output[2].exception is not None)
self.assertListEqual(list(output[0].stdout), [self.resp])
finally:
for server in servers:
server.stop()
def test_tunnel_host_config(self):
hosts = ['127.0.0.11', '127.0.0.12']
servers = [OpenSSHServer(listen_ip=_host, port=self.port) for _host in hosts]
for server in servers:
server.start_server()
host_config = [
HostConfig(proxy_host=self.proxy_host,
proxy_port=self.proxy_port,
proxy_pkey=self.user_key),
HostConfig(proxy_host='127.0.0.155',
proxy_port=123),
]
client = ParallelSSHClient(hosts, port=self.port, pkey=self.user_key,
host_config=host_config, num_retries=1)
output = client.run_command(self.cmd, stop_on_errors=False)
client.join(output)
self.assertIsInstance(output[1].exception, ProxyError)
self.assertTrue(output[0].exception is None)
stdout = list(output[0].stdout)
self.assertListEqual(stdout, [self.resp])
def test_proxy_error(self):
client = ParallelSSHClient([self.proxy_host], port=self.port, pkey=self.user_key,
proxy_host='127.0.0.155',
proxy_port=123,
num_retries=1)
output = client.run_command(self.cmd, stop_on_errors=False)
client.join(output)
self.assertIsInstance(output[0].exception, ProxyError)
def test_proxy_bad_target(self):
self.assertRaises(
SocketRecvError, SSHClient,
'127.0.0.155', port=self.proxy_port, pkey=self.user_key,
proxy_host=self.proxy_host, proxy_port=self.proxy_port,
num_retries=1,
)
def test_forwarder_exit(self):
def _start_server():
raise Exception
forwarder = LocalForwarder()
forwarder.daemon = True
forwarder.start()
forwarder.started.wait()
client = SSHClient(
self.proxy_host, port=self.proxy_port, pkey=self.user_key)
forwarder.enqueue(client, self.proxy_host, self.port)
forwarder.out_q.get()
self.assertTrue(len(forwarder._servers) > 0)
client.sock.close()
client.disconnect()
forwarder._cleanup_servers()
self.assertEqual(len(forwarder._servers), 0)
forwarder._start_server = _start_server
forwarder.enqueue(client, self.proxy_host, self.port)
sleep(.1)
def test_socket_channel_error(self):
class SocketError(Exception):
pass
class ChannelFailure(object):
def read(self):
raise SocketRecvError
def write(self, data):
raise SocketSendError
def eof(self):
return False
def close(self):
return
class Channel(object):
def __init__(self):
self._eof = False
def read(self):
return 5, b"asdfa"
def write(self, data):
return 0, len(data)
def eof(self):
return self._eof
def close(self):
return
class Socket(object):
def recv(self, num):
return b"asdfaf"
def close(self):
return
class SocketFailure(object):
def sendall(self, data):
raise SocketError
def recv(self, num):
raise SocketError
def close(self):
return
class SocketEmpty(object):
def recv(self, num):
return b""
def close(self):
return
client = SSHClient(
self.proxy_host, port=self.proxy_port, pkey=self.user_key)
server = TunnelServer(client, self.proxy_host, self.port)
let = spawn(server._read_forward_sock, SocketEmpty(), Channel())
let.start()
sleep(.01)
self.assertRaises(SocketSendError, server._read_forward_sock, Socket(), ChannelFailure())
self.assertRaises(SocketError, server._read_forward_sock, SocketFailure(), Channel())
self.assertRaises(SocketError, server._read_channel, SocketFailure(), Channel())
self.assertRaises(SocketRecvError, server._read_channel, Socket(), ChannelFailure())
channel = Channel()
_socket = Socket()
source_let = spawn(server._read_forward_sock, _socket, channel)
dest_let = spawn(server._read_channel, _socket, channel)
channel._eof = True
self.assertIsNone(server._wait_send_receive_lets(source_let, dest_let, channel))
let.kill()
def test_server_start(self):
_port = 1234
class Server(object):
def __init__(self):
self.started = False
self.listen_port = _port
server = Server()
forwarder = LocalForwarder()
let = spawn(forwarder._get_server_listen_port, None, server)
let.start()
sleep(.01)
server.started = True
sleep(.01)
with GTimeout(seconds=1):
port = forwarder.out_q.get()
self.assertEqual(port, _port)
| 15,650 | Python | .py | 361 | 30.562327 | 97 | 0.565309 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,365 | conf.py | ParallelSSH_parallel-ssh/doc/conf.py | # -*- coding: utf-8 -*-
#
# Parallel-SSH documentation build configuration file, created by
# sphinx-quickstart on Mon Mar 10 17:08:38 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import pssh
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.3'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.autosummary',
'sphinx.ext.coverage',
# 'sphinx.ext.viewcode',
]
intersphinx_mapping = {'ssh2-python': ('https://ssh2-python.readthedocs.io/en/latest/', None),
'ssh-python': ('https://ssh-python.readthedocs.io/en/latest/', None),
'gevent': ('https://www.gevent.org', None),
'python': ('https://docs.python.org/2', None),
}
# Autodoc settings
autodoc_default_flags = ['members']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Parallel-SSH'
copyright = u'2014-2020, P Kittenis'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = pssh.__version__
# The full version, including alpha/beta/rc tags.
release = pssh.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Parallel-SSHdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'Parallel-SSH.tex', u'Parallel-SSH Documentation',
u'P Kittenis', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'parallel-ssh', u'Parallel-SSH Documentation',
[u'P Kittenis'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Parallel-SSH', u'Parallel-SSH Documentation',
u'P Kittenis', 'Parallel-SSH', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'Parallel-SSH'
epub_author = u'P Kittenis'
epub_publisher = u'P Kittenis'
epub_copyright = u'2017, P Kittenis'
# The basename for the epub file. It defaults to the project name.
#epub_basename = u'Parallel-SSH'
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the PIL.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
autoclass_content = "both"
| 10,862 | Python | .py | 254 | 40.874016 | 94 | 0.723564 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,366 | config.py | ParallelSSH_parallel-ssh/pssh/config.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""Host specific configuration."""
class HostConfig(object):
"""Host configuration for ParallelSSHClient.
Used to hold individual configuration for each host in ParallelSSHClient host list.
"""
__slots__ = ('user', 'port', 'password', 'private_key', 'allow_agent',
'alias', 'num_retries', 'retry_delay', 'timeout', 'identity_auth',
'proxy_host', 'proxy_port', 'proxy_user', 'proxy_password', 'proxy_pkey',
'keepalive_seconds', 'ipv6_only', 'cert_file', 'auth_thread_pool', 'gssapi_auth',
'gssapi_server_identity', 'gssapi_client_identity', 'gssapi_delegate_credentials',
'forward_ssh_agent',
)
def __init__(self, user=None, port=None, password=None, private_key=None,
allow_agent=None, alias=None, num_retries=None, retry_delay=None, timeout=None,
identity_auth=None,
proxy_host=None, proxy_port=None, proxy_user=None, proxy_password=None,
proxy_pkey=None,
keepalive_seconds=None,
ipv6_only=None,
cert_file=None,
auth_thread_pool=True,
gssapi_auth=False,
gssapi_server_identity=None,
gssapi_client_identity=None,
gssapi_delegate_credentials=False,
forward_ssh_agent=False,
):
"""
:param user: Username to login as.
:type user: str
:param port: Port number.
:type port: int
:param password: Password to login with.
:type password: str
:param private_key: Private key file to use for authentication.
:type private_key: str
:param allow_agent: Enable/disable SSH agent authentication.
:type allow_agent: bool
:param alias: Use an alias for this host.
:type alias: str
:param num_retries: Number of retry attempts before giving up on connection
and SSH operations.
:type num_retries: int
:param retry_delay: Delay in seconds between retry attempts.
:type retry_delay: int or float
:param timeout: Timeout value for connection and SSH sessions in seconds.
:type timeout: int or float
:param identity_auth: Enable/disable identity file authentication under user's
home directory (~/.ssh).
:type identity_auth: bool
:param proxy_host: Proxy SSH host to use for connecting to target SSH host.
client -> proxy_host -> SSH host
:type proxy_host: str
:param proxy_port: Port for proxy host.
:type proxy_port: int
:param proxy_user: Username for proxy host.
:type proxy_user: str
:param proxy_password: Password for proxy host.
:type proxy_password: str
:param proxy_pkey: Private key for proxy host.
:type proxy_pkey: str
:param keepalive_seconds: Seconds between keepalive packets being sent.
0 to disable.
:type keepalive_seconds: int
:param ipv6_only: Use IPv6 addresses only.
:type ipv6_only: bool
:param cert_file: Certificate file for authentication (pssh.clients.ssh only)
:type cert_file: str
:param auth_thread_pool: Enable/Disable use of thread pool for authentication.
:type auth_thread_pool: bool
:param forward_ssh_agent: Currently unused.
:type forward_ssh_agent: bool
:param gssapi_server_identity: Set GSSAPI server identity. (pssh.clients.ssh only)
:type gssapi_server_identity: str
:param gssapi_server_identity: Set GSSAPI client identity. (pssh.clients.ssh only)
:type gssapi_server_identity: str
:param gssapi_delegate_credentials: Enable/disable server credentials
delegation. (pssh.clients.ssh only)
:type gssapi_delegate_credentials: bool
"""
self.user = user
self.port = port
self.password = password
self.private_key = private_key
self.allow_agent = allow_agent
self.alias = alias
self.num_retries = num_retries
self.timeout = timeout
self.retry_delay = retry_delay
self.identity_auth = identity_auth
self.proxy_host = proxy_host
self.proxy_port = proxy_port
self.proxy_user = proxy_user
self.proxy_password = proxy_password
self.proxy_pkey = proxy_pkey
self.keepalive_seconds = keepalive_seconds
self.ipv6_only = ipv6_only
self.cert_file = cert_file
self.auth_thread_pool = auth_thread_pool
self.forward_ssh_agent = forward_ssh_agent
self.gssapi_auth = gssapi_auth
self.gssapi_server_identity = gssapi_server_identity
self.gssapi_client_identity = gssapi_client_identity
self.gssapi_delegate_credentials = gssapi_delegate_credentials
self._sanity_checks()
def _sanity_checks(self):
if self.user is not None and not isinstance(self.user, str):
raise ValueError("Username %s is not a string" % (self.user,))
if self.port is not None and not isinstance(self.port, int):
raise ValueError("Port %s is not an integer" % (self.port,))
if self.password is not None and not isinstance(self.password, str):
raise ValueError("Password %s is not a string" % (self.password,))
if self.alias is not None and not isinstance(self.alias, str):
raise ValueError("Alias %s is not a string" % (self.alias,))
if self.private_key is not None and not (
isinstance(self.private_key, str) or isinstance(self.private_key, bytes)
):
raise ValueError("Private key %s is not a string or bytes" % (self.private_key,))
if self.allow_agent is not None and not isinstance(self.allow_agent, bool):
raise ValueError("Allow agent %s is not a boolean" % (self.allow_agent,))
if self.num_retries is not None and not isinstance(self.num_retries, int):
raise ValueError("Num retries %s is not an integer" % (self.num_retries,))
if self.timeout is not None and not \
(isinstance(self.timeout, int) or isinstance(self.timeout, float)):
raise ValueError("Timeout %s is not an integer" % (self.timeout,))
if self.retry_delay is not None and not \
(isinstance(self.retry_delay, int) or isinstance(self.retry_delay, float)):
raise ValueError("Retry delay %s is not a number" % (self.retry_delay,))
if self.identity_auth is not None and not isinstance(self.identity_auth, bool):
raise ValueError("Identity auth %s is not a boolean" % (self.identity_auth,))
if self.proxy_host is not None and not isinstance(self.proxy_host, str):
raise ValueError("Proxy host %s is not a string" % (self.proxy_host,))
if self.proxy_port is not None and not isinstance(self.proxy_port, int):
raise ValueError("Proxy port %s is not an integer" % (self.proxy_port,))
if self.proxy_user is not None and not isinstance(self.proxy_user, str):
raise ValueError("Proxy user %s is not a string" % (self.proxy_user,))
if self.proxy_password is not None and not isinstance(self.proxy_password, str):
raise ValueError("Proxy password %s is not a string" % (self.proxy_password,))
if self.proxy_pkey is not None and not (
isinstance(self.proxy_pkey, str) or isinstance(self.proxy_pkey, bytes)
):
raise ValueError("Proxy pkey %s is not a string or bytes" % (self.proxy_pkey,))
if self.keepalive_seconds is not None and not isinstance(self.keepalive_seconds, int):
raise ValueError("Keepalive seconds %s is not an integer" % (self.keepalive_seconds,))
if self.ipv6_only is not None and not isinstance(self.ipv6_only, bool):
raise ValueError("IPv6 only %s is not a boolean value" % (self.ipv6_only,))
if self.cert_file is not None and not (
isinstance(self.cert_file, str) or isinstance(self.cert_file, bytes)
):
raise ValueError("Cert file %s is not a string or bytes", self.cert_file)
if self.forward_ssh_agent is not None and not isinstance(self.forward_ssh_agent, bool):
raise ValueError("Forward SSH agent %s is not a bool", self.forward_ssh_agent)
if self.gssapi_auth is not None and not isinstance(self.gssapi_auth, bool):
raise ValueError("GSSAPI auth %s is not a bool", self.gssapi_auth)
if self.gssapi_server_identity is not None and not isinstance(self.gssapi_server_identity, str):
raise ValueError("GSSAPI server identity %s is not a string", self.gssapi_server_identity)
if self.gssapi_client_identity is not None and not isinstance(self.gssapi_client_identity, str):
raise ValueError("GSSAPI client identity %s is not a string", self.gssapi_client_identity)
if self.gssapi_delegate_credentials is not None and not isinstance(self.gssapi_delegate_credentials, bool):
raise ValueError("GSSAPI delegate credentials %s is not a bool", self.gssapi_delegate_credentials)
| 10,050 | Python | .py | 176 | 47.1875 | 115 | 0.655924 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,367 | _version.py | ParallelSSH_parallel-ssh/pssh/_version.py |
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.18 (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
git_date = "$Format:%ci$"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = ""
cfg.parentdir_prefix = "None"
cfg.versionfile_source = "pssh/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree",
"date": None}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version", "date": None}
| 18,445 | Python | .py | 439 | 33.298405 | 79 | 0.591799 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,368 | constants.py | ParallelSSH_parallel-ssh/pssh/constants.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""Constants definitions for pssh package"""
DEFAULT_RETRIES = 3
"""Default number of retry attempts for SSH client initialisation - authentication, establishing
connections et al."""
RETRY_DELAY = 5
"""Default delay in seconds between retry attempts for SSH client initialisation."""
| 1,056 | Python | .py | 22 | 46.727273 | 96 | 0.787938 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,369 | output.py | ParallelSSH_parallel-ssh/pssh/output.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""Output module of ParallelSSH"""
from os import linesep
from . import logger
class HostOutputBuffers(object):
__slots__ = ('stdout', 'stderr')
def __init__(self, stdout, stderr):
"""
:param stdout: Stdout data
:type stdout: :py:class:`BufferData`
:param stderr: Stderr data
:type stderr: :py:class:`BufferData`
"""
self.stdout = stdout
self.stderr = stderr
class BufferData(object):
__slots__ = ('reader', 'rw_buffer')
def __init__(self, reader, rw_buffer):
"""
:param reader: Greenlet reading data from channel and writing to rw_buffer
:type reader: :py:class:`gevent.Greenlet`
:param rw_buffer: Read/write buffer
:type rw_buffer: :py:class:`pssh.clients.reader.ConcurrentRWBuffer`
"""
self.reader = reader
self.rw_buffer = rw_buffer
class HostOutput(object):
"""Host output"""
__slots__ = ('host', 'channel', 'stdin',
'client', 'alias', 'exception',
'encoding', 'read_timeout', 'buffers',
)
def __init__(self, host, channel, stdin,
client, alias=None, exception=None, encoding='utf-8', read_timeout=None,
buffers=None):
"""
:param host: Host name output is for
:type host: str
:param channel: SSH channel used for command execution
:type channel: :py:class:`socket.socket` compatible object
:param stdin: Standard input buffer
:type stdin: :py:func:`file`-like object
:param client: `SSHClient` output is coming from.
:type client: :py:class:`pssh.clients.base.single.BaseSSHClient` or `None`.
:param alias: Host alias.
:type alias: str
:param exception: Exception from host if any
:type exception: :py:class:`Exception` or ``None``
:param read_timeout: Timeout in seconds for reading from buffers.
:type read_timeout: float
:param buffers: Host buffer data.
:type buffers: :py:class:`HostOutputBuffers`
"""
self.host = host
self.channel = channel
self.stdin = stdin
self.client = client
self.alias = alias
self.exception = exception
self.encoding = encoding
self.read_timeout = read_timeout
self.buffers = buffers
@property
def stdout(self):
if not self.client:
return
_stdout = self.client.read_output_buffer(
self.client.read_output(self.buffers.stdout.rw_buffer, timeout=self.read_timeout),
encoding=self.encoding)
return _stdout
@property
def stderr(self):
if not self.client:
return
_stderr = self.client.read_output_buffer(
self.client.read_stderr(self.buffers.stderr.rw_buffer, timeout=self.read_timeout),
encoding=self.encoding,
prefix='\t[err]')
return _stderr
@property
def exit_code(self):
if not self.client:
return
try:
return self.client.get_exit_status(self.channel)
except Exception as ex:
logger.error("Error getting exit status - %s", ex)
def __repr__(self):
return "\thost={host}{linesep}" \
"\talias={alias}{linesep}" \
"\texit_code={exit_code}{linesep}" \
"\tchannel={channel}{linesep}" \
"\texception={exception}{linesep}" \
"\tencoding={encoding}{linesep}" \
"\tread_timeout={read_timeout}".format(
host=self.host, alias=self.alias, channel=self.channel,
exception=self.exception, linesep=linesep,
exit_code=self.exit_code, encoding=self.encoding, read_timeout=self.read_timeout,
)
def __str__(self):
return self.__repr__()
| 4,665 | Python | .py | 116 | 31.905172 | 97 | 0.625386 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,370 | utils.py | ParallelSSH_parallel-ssh/pssh/utils.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""Module containing static utility functions for parallel-ssh."""
import logging
host_logger = logging.getLogger('pssh.host_logger')
logger = logging.getLogger('pssh')
def enable_logger(_logger, level=logging.INFO):
"""Enables logging to stdout for given logger"""
_logger.setLevel(level)
stream_handlers = [h for h in _logger.handlers
if isinstance(h, logging.StreamHandler)]
if stream_handlers:
logger.warning("Logger already has a StreamHandler attached")
return
handler = logging.StreamHandler()
host_log_format = logging.Formatter('%(asctime)s %(levelname)-8s %(name)-15s %(message)s')
handler.setFormatter(host_log_format)
_logger.addHandler(handler)
def enable_host_logger():
"""Enable host logger for logging stdout from remote commands
as it becomes available.
"""
enable_logger(host_logger)
def enable_debug_logger():
"""Enable debug logging for the library to sdout."""
return enable_logger(logger, level=logging.DEBUG)
| 1,802 | Python | .py | 40 | 41.425 | 94 | 0.746005 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,371 | __init__.py | ParallelSSH_parallel-ssh/pssh/__init__.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""Asynchronous parallel SSH client library.
Run SSH commands over many - hundreds/hundreds of thousands - number of servers
asynchronously and with minimal system load on the client host.
New users should start with `pssh.clients.ParallelSSHClient.run_command` and
`pssh.clients.SSHClient.run_command`
See also `pssh.clients.ParallelSSHClient` and pssh.clients.SSHClient`
for class documentation.
"""
from logging import getLogger, NullHandler
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
host_logger = getLogger('pssh.host_logger')
logger = getLogger('pssh')
host_logger.addHandler(NullHandler())
logger.addHandler(NullHandler())
| 1,451 | Python | .py | 32 | 44.125 | 80 | 0.798867 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,372 | exceptions.py | ParallelSSH_parallel-ssh/pssh/exceptions.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""Exceptions raised by parallel-ssh classes."""
class NoIPv6AddressFoundError(Exception):
"""Raised when an IPV6 only address was requested but none are
available for a host.
This exception is raised instead of UnknownHostError
in the case where only IPV4 addresses are available via DNS for a host,
or an IPV4 address was provided but IPV6 only was requested.
"""
class UnknownHostError(Exception):
"""Raised when a host is unknown (dns failure)"""
pass
UnknownHostException = UnknownHostError
ConnectionError = ConnectionError
ConnectionErrorException = ConnectionError
class AuthenticationError(Exception):
"""Raised on authentication error (user/password/ssh key error)"""
pass
AuthenticationException = AuthenticationError
class SSHError(Exception):
"""Raised on error authenticating with SSH server"""
pass
SSHException = SSHError
class HostArgumentError(Exception):
"""Raised on errors with per-host arguments to parallel functions"""
pass
HostArgumentException = HostArgumentError
class SessionError(Exception):
"""Raised on errors establishing SSH session"""
pass
class SFTPError(Exception):
"""Raised on SFTP errors"""
pass
class SFTPIOError(SFTPError):
"""Raised on SFTP IO errors"""
pass
class ProxyError(Exception):
"""Raised on proxy errors"""
class Timeout(Exception):
"""Raised on timeout requested and reached"""
class SCPError(Exception):
"""Raised on errors copying file via SCP"""
class PKeyFileError(Exception):
"""Raised on errors finding private key file"""
class ShellError(Exception):
"""Raised on errors running command on interactive shell"""
class HostConfigError(Exception):
"""Raised on invalid host configuration"""
| 2,561 | Python | .py | 63 | 37.301587 | 80 | 0.771452 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,373 | __init__.py | ParallelSSH_parallel-ssh/pssh/clients/__init__.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# flake8: noqa: F401
from .native.parallel import ParallelSSHClient
from .native.single import SSHClient
| 871 | Python | .py | 19 | 44.789474 | 80 | 0.792009 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,374 | reader.py | ParallelSSH_parallel-ssh/pssh/clients/reader.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from io import BytesIO
from gevent.event import Event
from gevent.lock import RLock
class _Eof(Event):
def __init__(self, unread_data):
self._unread_data = unread_data
Event.__init__(self)
def set(self):
self._unread_data.set()
Event.set(self)
class ConcurrentRWBuffer(object):
"""Concurrent reader/writer of bytes for use from multiple greenlets.
Supports both concurrent reading and writing.
Iterate on buffer object to read data, yielding event loop if no data exists
until self.eof has been set.
Writers should call ``ConcurrentRWBuffer.eof.set()`` when finished writing data via ``write``.
Readers can use ``read()`` to get any available data or ``None``.
"""
__slots__ = ('_buffer', '_read_pos', '_write_pos', 'eof', '_lock', '_unread_data')
def __init__(self):
self._buffer = BytesIO()
self._read_pos = 0
self._write_pos = 0
self._lock = RLock()
self._unread_data = Event()
self.eof = _Eof(self._unread_data)
def write(self, data):
"""Write data to buffer.
:param data: Data to write
:type data: bytes
"""
with self._lock:
if not self._buffer.tell() == self._write_pos:
self._buffer.seek(self._write_pos)
self._write_pos += self._buffer.write(data)
if not self._unread_data.is_set() and self._read_pos < self._write_pos:
self._unread_data.set()
def read(self):
"""Read available data, or return None.
:rtype: bytes
"""
with self._lock:
if self._write_pos == 0 or self._read_pos == self._write_pos:
self._unread_data.clear()
return
elif not self._buffer.tell() == self._read_pos:
self._buffer.seek(self._read_pos)
data = self._buffer.read()
self._read_pos += len(data)
return data
def __iter__(self):
while not self.eof.is_set() or self._read_pos != self._write_pos:
data = self.read()
if data:
yield data
else:
self._unread_data.wait()
| 2,981 | Python | .py | 73 | 33.356164 | 98 | 0.626773 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,375 | common.py | ParallelSSH_parallel-ssh/pssh/clients/common.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import os
from ..exceptions import PKeyFileError
def _validate_pkey_path(pkey):
if pkey is None:
return
pkey = os.path.normpath(os.path.expanduser(pkey))
if not os.path.exists(pkey):
msg = "File %s does not exist. " \
"Please use either absolute or relative to user directory " \
"paths like '~/.ssh/my_key' for pkey parameter"
ex = PKeyFileError(msg, pkey)
raise ex
return pkey
def _validate_pkey(pkey):
if pkey is None:
return
if isinstance(pkey, str):
return _validate_pkey_path(pkey)
return pkey
| 1,375 | Python | .py | 35 | 35.142857 | 80 | 0.72039 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,376 | parallel.py | ParallelSSH_parallel-ssh/pssh/clients/ssh/parallel.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import logging
from .single import SSHClient
from ..common import _validate_pkey_path, _validate_pkey
from ..base.parallel import BaseParallelSSHClient
from ...constants import DEFAULT_RETRIES, RETRY_DELAY
logger = logging.getLogger(__name__)
class ParallelSSHClient(BaseParallelSSHClient):
"""ssh-python based parallel client."""
def __init__(self, hosts, user=None, password=None, port=22, pkey=None,
cert_file=None,
num_retries=DEFAULT_RETRIES, timeout=None, pool_size=100,
allow_agent=True, host_config=None, retry_delay=RETRY_DELAY,
forward_ssh_agent=False,
gssapi_auth=False,
gssapi_server_identity=None,
gssapi_client_identity=None,
gssapi_delegate_credentials=False,
identity_auth=True,
ipv6_only=False,
):
"""
:param hosts: Hosts to connect to
:type hosts: list(str)
:param user: (Optional) User to login as. Defaults to logged in user
:type user: str
:param password: (Optional) Password to use for login. Defaults to
no password
:type password: str
:param port: (Optional) Port number to use for SSH connection. Defaults
to 22.
:type port: int
:param pkey: Private key file path to use. Path must be either absolute
path or relative to user home directory like ``~/<path>``.
Bytes type input is used as private key data for authentication.
:type pkey: str or bytes
:param cert_file: Public key signed certificate file to use for
authentication. The corresponding private key must also be provided
via ``pkey`` parameter.
For example ``pkey='id_rsa', cert_file='id_rsa-cert.pub'`` for RSA
signed certificate.
Path must be absolute or relative to user home directory.
:type cert_file: str
:param num_retries: (Optional) Number of connection and authentication
attempts before the client gives up. Defaults to 3.
:type num_retries: int
:param retry_delay: Number of seconds to wait between retries. Defaults
to :py:class:`pssh.constants.RETRY_DELAY`
:type retry_delay: int or float
:param timeout: (Optional) Individual SSH client timeout setting in
seconds passed on to each SSH client spawned by `ParallelSSHClient`.
This controls timeout setting of socket operations used for SSH
sessions *on a per session basis* meaning for each individual
SSH session.
Defaults to OS default - usually 60 seconds.
Parallel functions like `run_command` and `join` have a cummulative
timeout setting that is separate to and
not affected by `self.timeout`.
:type timeout: int or float
:param pool_size: (Optional) Greenlet pool size. Controls
concurrency, on how many hosts to execute tasks in parallel.
Defaults to 100. Overhead in event
loop will determine how high this can be set to, see scaling guide
lines in project's readme.
:type pool_size: int
:param host_config: (Optional) Per-host configuration for cases where
not all hosts use the same configuration.
:type host_config: list(:py:class:`pssh.config.HostConfig`)
:param allow_agent: (Optional) set to False to disable connecting to
the system's SSH agent. Currently unused - always off.
:type allow_agent: bool
:param identity_auth: (Optional) set to False to disable attempting to
authenticate with default identity files from
`pssh.clients.base_ssh_client.BaseSSHClient.IDENTITIES`
:type identity_auth: bool
:param forward_ssh_agent: (Optional) Turn on SSH agent forwarding -
equivalent to `ssh -A` from the `ssh` command line utility.
Defaults to False if not set.
Currently unused meaning always off.
:type forward_ssh_agent: bool
:param gssapi_server_identity: Set GSSAPI server identity.
:type gssapi_server_identity: str
:param gssapi_server_identity: Set GSSAPI client identity.
:type gssapi_server_identity: str
:param gssapi_delegate_credentials: Enable/disable server credentials
delegation.
:type gssapi_delegate_credentials: bool
:param ipv6_only: Choose IPv6 addresses only if multiple are available
for the host or raise NoIPv6AddressFoundError otherwise. Note this will
disable connecting to an IPv4 address if an IP address is provided instead.
:type ipv6_only: bool
:raises: :py:class:`pssh.exceptions.PKeyFileError` on errors finding
provided private key.
"""
BaseParallelSSHClient.__init__(
self, hosts, user=user, password=password, port=port, pkey=pkey,
allow_agent=allow_agent, num_retries=num_retries,
timeout=timeout, pool_size=pool_size,
host_config=host_config, retry_delay=retry_delay,
identity_auth=identity_auth,
ipv6_only=ipv6_only,
)
self.pkey = _validate_pkey(pkey)
self.cert_file = _validate_pkey_path(cert_file)
self.forward_ssh_agent = forward_ssh_agent
self.gssapi_auth = gssapi_auth
self.gssapi_server_identity = gssapi_server_identity
self.gssapi_client_identity = gssapi_client_identity
self.gssapi_delegate_credentials = gssapi_delegate_credentials
def run_command(self, command, sudo=False, user=None, stop_on_errors=True,
use_pty=False, host_args=None, shell=None,
encoding='utf-8', read_timeout=None,
):
"""Run command on all hosts in parallel, honoring self.pool_size,
and return output.
This function will block until all commands have been received
by remote servers and then return immediately.
More explicitly, function will return after connection and
authentication establishment in the case of on new connections and
after execute
commands have been accepted by successfully established SSH channels.
Any connection and/or authentication exceptions will be raised here
and need catching *unless* ``run_command`` is called with
``stop_on_errors=False`` in which case exceptions are added to
individual host output instead.
:param command: Command to run
:type command: str
:param sudo: (Optional) Run with sudo. Defaults to False
:type sudo: bool
:param user: (Optional) User to run command as. Requires sudo access
for that user from the logged in user account.
:type user: str
:param stop_on_errors: (Optional) Raise exception on errors running
command. Defaults to True. With stop_on_errors set to False,
exceptions are instead added to output of `run_command`. See example
usage below.
:type stop_on_errors: bool
:param shell: (Optional) Override shell to use to run command with.
Defaults to login user's defined shell. Use the shell's command
syntax, eg `shell='bash -c'` or `shell='zsh -c'`.
:type shell: str
:param use_pty: (Optional) Enable/Disable use of pseudo terminal
emulation. Defaults to ``False``
:type use_pty: bool
:param host_args: (Optional) Format command string with per-host
arguments in ``host_args``. ``host_args`` length must equal length of
host list - :py:class:`pssh.exceptions.HostArgumentError` is
raised otherwise
:type host_args: tuple or list
:param encoding: Encoding to use for command string and output. Must be valid
`Python codec <https://docs.python.org/library/codecs.html>`_
:type encoding: str
:param read_timeout: (Optional) Timeout in seconds for reading from stdout
or stderr. Defaults to `self.timeout`. Reading from stdout/stderr will
raise :py:class:`pssh.exceptions.Timeout`
after ``timeout`` number seconds if remote output is not ready.
:type read_timeout: float
:rtype: list(:py:class:`pssh.output.HostOutput`)
:raises: :py:class:`pssh.exceptions.AuthenticationError` on
authentication error
:raises: :py:class:`pssh.exceptions.UnknownHostError` on DNS
resolution error
:raises: :py:class:`pssh.exceptions.ConnectionError` on error
connecting
:raises: :py:class:`pssh.exceptions.HostArgumentError` on number of
host arguments not equal to number of hosts
:raises: :py:class:`TypeError` on not enough host arguments for cmd
string format
:raises: :py:class:`KeyError` on no host argument key in arguments
dict for cmd string format
:raises: :py:class:`pssh.exceptions.ProxyError` on errors connecting
to proxy if a proxy host has been set.
:raises: :py:class:`pssh.exceptions.Timeout` on timeout starting command.
:raises: Exceptions from :py:mod:`ssh.exceptions` for all other
specific errors.
"""
return BaseParallelSSHClient.run_command(
self, command, stop_on_errors=stop_on_errors, host_args=host_args,
user=user, shell=shell, sudo=sudo,
encoding=encoding, use_pty=use_pty,
read_timeout=read_timeout,
)
def _make_ssh_client(self, host, cfg, _pkey_data):
_client = SSHClient(
host, user=cfg.user or self.user, password=cfg.password or self.password, port=cfg.port or self.port,
pkey=_pkey_data, num_retries=cfg.num_retries or self.num_retries,
alias=cfg.alias,
timeout=cfg.timeout or self.timeout,
allow_agent=cfg.allow_agent or self.allow_agent, retry_delay=cfg.retry_delay or self.retry_delay,
_auth_thread_pool=cfg.auth_thread_pool or self._auth_thread_pool,
identity_auth=cfg.identity_auth or self.identity_auth,
ipv6_only=cfg.ipv6_only or self.ipv6_only,
gssapi_auth=self.gssapi_auth,
gssapi_server_identity=self.gssapi_server_identity,
gssapi_client_identity=self.gssapi_client_identity,
gssapi_delegate_credentials=self.gssapi_delegate_credentials,
cert_file=cfg.cert_file,
)
return _client
| 11,397 | Python | .py | 214 | 43.523364 | 113 | 0.668428 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,377 | __init__.py | ParallelSSH_parallel-ssh/pssh/clients/ssh/__init__.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# flake8: noqa: F401
from .parallel import ParallelSSHClient
from .single import SSHClient
| 857 | Python | .py | 19 | 44.052632 | 80 | 0.79092 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,378 | single.py | ParallelSSH_parallel-ssh/pssh/clients/ssh/single.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import logging
from gevent import sleep, spawn, Timeout as GTimeout, joinall
from ssh import options
from ssh.error_codes import SSH_AGAIN
from ssh.exceptions import EOF
from ssh.key import import_privkey_file, import_cert_file, copy_cert_to_privkey, \
import_privkey_base64
from ssh.session import Session, SSH_READ_PENDING, SSH_WRITE_PENDING
from ..base.single import BaseSSHClient
from ..common import _validate_pkey_path
from ...constants import DEFAULT_RETRIES, RETRY_DELAY
from ...exceptions import SessionError, Timeout
from ...output import HostOutput
logger = logging.getLogger(__name__)
class SSHClient(BaseSSHClient):
"""ssh-python based non-blocking client."""
def __init__(self, host,
user=None, password=None, port=None,
pkey=None, alias=None,
cert_file=None,
num_retries=DEFAULT_RETRIES,
retry_delay=RETRY_DELAY,
allow_agent=True, timeout=None,
identity_auth=True,
gssapi_auth=False,
gssapi_server_identity=None,
gssapi_client_identity=None,
gssapi_delegate_credentials=False,
ipv6_only=False,
_auth_thread_pool=True):
""":param host: Host name or IP to connect to.
:type host: str
:param user: User to connect as. Defaults to logged in user.
:type user: str
:param password: Password to use for password authentication.
:type password: str
:param port: SSH port to connect to. Defaults to SSH default (22)
:type port: int
:param alias: Use an alias for this host.
:type alias: str
:param pkey: Private key file path to use for authentication. Path must
be either absolute path or relative to user home directory
like ``~/<path>``.
Bytes type input is used as private key data for authentication.
:type pkey: str or bytes
:param cert_file: Public key signed certificate file to use for
authentication. The corresponding private key must also be provided
via ``pkey`` parameter.
For example ``pkey='id_rsa',cert_file='id_rsa-cert.pub'`` for RSA
signed certificate.
Path must be absolute or relative to user home directory.
:type cert_file: str
:param num_retries: (Optional) Number of connection and authentication
attempts before the client gives up. Defaults to 3.
:type num_retries: int
:param retry_delay: Number of seconds to wait between retries. Defaults
to :py:class:`pssh.constants.RETRY_DELAY`
:type retry_delay: int or float
:param timeout: (Optional) If provided, all commands will timeout after
<timeout> number of seconds.
:type timeout: int or float
:param allow_agent: (Optional) set to False to disable connecting to
the system's SSH agent. Currently unused.
:type allow_agent: bool
:param identity_auth: (Optional) set to False to disable attempting to
authenticate with default identity files from
`pssh.clients.base_ssh_client.BaseSSHClient.IDENTITIES`
:type identity_auth: bool
:param gssapi_server_identity: Enable GSS-API authentication.
Uses GSS-MIC key exchange. Enabled if either gssapi_server_identity or
gssapi_client_identity are provided.
:type gssapi_auth: bool
:type gssapi_server_identity: str
:param gssapi_server_identity: Set GSSAPI server identity.
:type gssapi_server_identity: str
:param gssapi_client_identity: Set GSSAPI client identity.
:type gssapi_client_identity: str
:param gssapi_delegate_credentials: Enable/disable server credentials
delegation.
:type gssapi_delegate_credentials: bool
:param ipv6_only: Choose IPv6 addresses only if multiple are available
for the host or raise NoIPv6AddressFoundError otherwise. Note this will
disable connecting to an IPv4 address if an IP address is provided instead.
:type ipv6_only: bool
:raises: :py:class:`pssh.exceptions.PKeyFileError` on errors finding
provided private key.
"""
self.cert_file = _validate_pkey_path(cert_file)
self.gssapi_auth = gssapi_auth
self.gssapi_server_identity = gssapi_server_identity
self.gssapi_client_identity = gssapi_client_identity
self.gssapi_delegate_credentials = gssapi_delegate_credentials
super(SSHClient, self).__init__(
host, user=user, password=password, port=port, pkey=pkey, alias=alias,
num_retries=num_retries, retry_delay=retry_delay,
allow_agent=allow_agent,
_auth_thread_pool=_auth_thread_pool,
timeout=timeout,
identity_auth=identity_auth,
ipv6_only=ipv6_only,
)
def disconnect(self):
"""Close socket if needed."""
if self.sock is not None and not self.sock.closed:
self.sock.close()
def _agent_auth(self):
self.session.userauth_agent(self.user)
def _keepalive(self):
pass
def _init_session(self, retries=1):
logger.debug("Starting new session for %s@%s:%s",
self.user, self.host, self.port)
self.session = Session()
self.session.options_set(options.USER, self.user)
self.session.options_set(options.HOST, self.host)
self.session.options_set_port(self.port)
if self.gssapi_server_identity:
self.session.options_set(
options.GSSAPI_SERVER_IDENTITY, self.gssapi_server_identity)
if self.gssapi_client_identity:
self.session.options_set(
options.GSSAPI_CLIENT_IDENTITY, self.gssapi_client_identity)
if self.gssapi_client_identity or self.gssapi_server_identity:
self.session.options_set_gssapi_delegate_credentials(
self.gssapi_delegate_credentials)
logger.debug("Session started, connecting with existing socket")
try:
self.session.set_socket(self.sock)
self._session_connect()
except Exception as ex:
if retries < self.num_retries:
return self._connect_init_session_retry(retries=retries+1)
msg = "Error connecting to host %s:%s - %s"
logger.error(msg, self.host, self.port, ex)
raise ex
def _session_connect(self):
self.session.connect()
def auth(self):
if self.gssapi_auth or (self.gssapi_server_identity or self.gssapi_client_identity):
try:
return self.session.userauth_gssapi()
except Exception as ex:
logger.error(
"GSSAPI authentication with server id %s and client id %s failed - %s",
self.gssapi_server_identity, self.gssapi_client_identity, ex)
return super(SSHClient, self).auth()
def _password_auth(self):
self.session.userauth_password(self.user, self.password)
def _pkey_file_auth(self, pkey_file, password=None):
pkey = import_privkey_file(pkey_file, passphrase=password if password is not None else '')
return self._pkey_obj_auth(pkey)
def _pkey_obj_auth(self, pkey):
if self.cert_file is not None:
logger.debug("Certificate file set - trying certificate authentication")
self._import_cert_file(pkey)
self.session.userauth_publickey(pkey)
def _pkey_from_memory(self, pkey_data):
_pkey = import_privkey_base64(
pkey_data,
passphrase=self.password if self.password is not None else b'')
return self._pkey_obj_auth(_pkey)
def _import_cert_file(self, pkey):
cert_key = import_cert_file(self.cert_file)
self.session.userauth_try_publickey(cert_key)
copy_cert_to_privkey(cert_key, pkey)
logger.debug("Imported certificate file %s for pkey %s", self.cert_file, self.pkey)
def _shell(self, channel):
return self._eagain(channel.request_shell)
def _open_session(self):
channel = self.session.channel_new()
channel.set_blocking(0)
self._eagain(channel.open_session)
return channel
def open_session(self):
"""Open new channel from session."""
logger.debug("Opening new channel on %s", self.host)
try:
channel = self._open_session()
except Exception as ex:
raise SessionError(ex)
return channel
def _make_output_readers(self, channel, stdout_buffer, stderr_buffer):
_stdout_reader = spawn(
self._read_output_to_buffer, channel, stdout_buffer)
_stderr_reader = spawn(
self._read_output_to_buffer, channel, stderr_buffer, is_stderr=True)
return _stdout_reader, _stderr_reader
def execute(self, cmd, use_pty=False, channel=None):
"""Execute command on remote host.
:param cmd: The command string to execute.
:type cmd: str
:param use_pty: Whether or not to request a PTY on the channel executing
command.
:type use_pty: bool
:param channel: Channel to use. New channel is created if not provided.
:type channel: :py:class:`ssh.channel.Channel`"""
channel = self.open_session() if not channel else channel
if use_pty:
self._eagain(channel.request_pty, timeout=self.timeout)
logger.debug("Executing command '%s'", cmd)
self._eagain(channel.request_exec, cmd)
return channel
def _read_output_to_buffer(self, channel, _buffer, is_stderr=False):
try:
while True:
self.poll()
try:
size, data = channel.read_nonblocking(is_stderr=is_stderr)
except EOF:
return
if size > 0:
_buffer.write(data)
sleep()
finally:
_buffer.eof.set()
def wait_finished(self, host_output, timeout=None):
"""Wait for EOF from channel and close channel.
Used to wait for remote command completion and be able to gather
exit code.
:param host_output: Host output of command to wait for.
:type host_output: :py:class:`pssh.output.HostOutput`
:param timeout: Timeout value in seconds - defaults to no timeout.
:type timeout: float
:raises: :py:class:`pssh.exceptions.Timeout` after <timeout> seconds if
timeout set.
"""
if not isinstance(host_output, HostOutput):
raise ValueError("%s is not a HostOutput object" % (host_output,))
channel = host_output.channel
if channel is None:
return
logger.debug("Sending EOF on channel %s", channel)
self._eagain(channel.send_eof, timeout=self.timeout)
logger.debug("Waiting for readers, timeout %s", timeout)
with GTimeout(seconds=timeout, exception=Timeout):
joinall((host_output.buffers.stdout.reader, host_output.buffers.stderr.reader))
logger.debug("Readers finished, closing channel")
self.close_channel(channel)
def finished(self, channel):
"""Checks if remote command has finished - has server sent client
EOF.
:rtype: bool
"""
if channel is None:
return
return channel.is_eof()
def get_exit_status(self, channel):
"""Get exit status code for channel or ``None`` if not ready.
:param channel: The channel to get status from.
:type channel: :py:mod:`ssh.channel.Channel`
:rtype: int or ``None``
"""
if not channel.is_eof():
return
return channel.get_exit_status()
def close_channel(self, channel):
"""Close channel.
:param channel: The channel to close.
:type channel: :py:class:`ssh.channel.Channel`
"""
logger.debug("Closing channel")
self._eagain(channel.close)
def poll(self, timeout=None):
"""ssh-python based co-operative gevent poll on session socket.
:param timeout: Deprecated and unused - to be removed.
"""
self._poll_errcodes(
self.session.get_poll_flags,
SSH_READ_PENDING,
SSH_WRITE_PENDING,
)
def _eagain(self, func, *args, **kwargs):
"""Run function given and handle EAGAIN for an ssh-python session"""
return self._eagain_errcode(func, SSH_AGAIN, *args, **kwargs)
def _eagain_write(self, write_func, data):
return self._eagain_write_errcode(write_func, data, SSH_AGAIN)
| 13,617 | Python | .py | 292 | 37.068493 | 98 | 0.645717 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,379 | parallel.py | ParallelSSH_parallel-ssh/pssh/clients/base/parallel.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""Abstract parallel SSH client package"""
import logging
import gevent.pool
from gevent import joinall, spawn, Timeout as GTimeout
from gevent.hub import Hub
from ..common import _validate_pkey_path, _validate_pkey
from ...config import HostConfig
from ...constants import DEFAULT_RETRIES, RETRY_DELAY
from ...exceptions import HostArgumentError, Timeout, ShellError, HostConfigError
from ...output import HostOutput
Hub.NOT_ERROR = (Exception,)
logger = logging.getLogger(__name__)
class BaseParallelSSHClient(object):
"""Parallel client base class."""
def __init__(self, hosts, user=None, password=None, port=None, pkey=None,
allow_agent=True,
num_retries=DEFAULT_RETRIES,
timeout=120, pool_size=100,
host_config=None, retry_delay=RETRY_DELAY,
identity_auth=True,
ipv6_only=False,
proxy_host=None,
proxy_port=None,
proxy_user=None,
proxy_password=None,
proxy_pkey=None,
keepalive_seconds=None,
cert_file=None,
gssapi_auth=False,
gssapi_server_identity=None,
gssapi_client_identity=None,
gssapi_delegate_credentials=False,
forward_ssh_agent=False,
_auth_thread_pool=True,
):
self.allow_agent = allow_agent
self.pool_size = pool_size
self.pool = gevent.pool.Pool(size=self.pool_size)
self._hosts = self._validate_hosts(hosts)
self.user = user
self.password = password
self.port = port
self.pkey = _validate_pkey(pkey)
self.__pkey_data = self._load_pkey_data(pkey) if pkey is not None else None
self.num_retries = num_retries
self.timeout = timeout
self._host_clients = {}
self.host_config = host_config
self.retry_delay = retry_delay
self.cmds = None
self.identity_auth = identity_auth
self.ipv6_only = ipv6_only
self.proxy_host = proxy_host
self.proxy_port = proxy_port
self.proxy_user = proxy_user
self.proxy_password = proxy_password
self.proxy_pkey = proxy_pkey
self.keepalive_seconds = keepalive_seconds
self.cert_file = cert_file
self.forward_ssh_agent = forward_ssh_agent
self.gssapi_auth = gssapi_auth
self.gssapi_server_identity = gssapi_server_identity
self.gssapi_client_identity = gssapi_client_identity
self.gssapi_delegate_credentials = gssapi_delegate_credentials
self._auth_thread_pool = _auth_thread_pool
self._check_host_config()
def _validate_hosts(self, _hosts):
if _hosts is None:
raise ValueError
elif isinstance(_hosts, str) or isinstance(_hosts, bytes):
raise TypeError(
"Hosts must be list or other iterable, not string. "
"For example: ['localhost'] not 'localhost'.")
elif hasattr(_hosts, '__next__') or hasattr(_hosts, 'next'):
_hosts = list(_hosts)
return _hosts
@property
def hosts(self):
return self._hosts
@hosts.setter
def hosts(self, _hosts):
_hosts = self._validate_hosts(_hosts)
cur_vals = set(enumerate(self._hosts))
new_vals = set(enumerate(_hosts))
to_remove = cur_vals.difference(new_vals)
for i, host in to_remove:
self._host_clients.pop((i, host), None)
self._hosts = _hosts
def __del__(self):
self.disconnect()
def disconnect(self):
"""Disconnect all clients."""
if not hasattr(self, '_host_clients'):
return
for s_client in self._host_clients.values():
try:
s_client.disconnect()
except Exception as ex:
logger.debug("Client disconnect failed with %s", ex)
pass
def _check_host_config(self):
if self.host_config is None:
return
if not isinstance(self.host_config, list):
raise HostConfigError("Host configuration of type %s is invalid - valid types are List[HostConfig]",
type(self.host_config))
host_len = len(self.hosts)
if host_len != len(self.host_config):
raise ValueError(
"Host config entries must match number of hosts if provided. "
"Got %s host config entries from %s hosts" % (
len(self.host_config), host_len))
def _open_shell(self, host_i, host,
encoding='utf-8', read_timeout=None):
try:
_client = self._get_ssh_client(host_i, host)
shell = _client.open_shell(
encoding=encoding, read_timeout=read_timeout)
return shell
except (GTimeout, Exception) as ex:
logger.error("Failed to run on host %s - %s", host, ex)
raise ex
def open_shell(self, encoding='utf-8', read_timeout=None):
"""Open interactive shells on all hosts.
:param encoding: Encoding to use for command string and shell output.
:type encoding: str
:param read_timeout: Seconds before reading from output times out.
:type read_timeout: float
:returns: Opened shells for each of self.hosts, in order.
:rtype: list(:py:class:`pssh.clients.native.base.single.InteractiveShell`)
"""
cmds = [self.pool.spawn(
self._open_shell, host_i, host, encoding=encoding, read_timeout=read_timeout)
for host_i, host in enumerate(self.hosts)
]
finished = joinall(cmds, raise_error=True)
return [cmd.get() for cmd in finished]
def run_shell_commands(self, shells, commands):
"""Run command(s) on shells.
:param shells: Shells to run on.
:type shells: list(:py:class:`pssh.clients.base.single.InteractiveShell`)
:param commands: Commands to run.
:type commands: list or str
"""
if not isinstance(commands, list):
commands = [commands]
cmds = [self.pool.spawn(shell.run, cmd)
for shell in shells
for cmd in commands]
try:
finished = joinall(cmds, raise_error=True, timeout=self.timeout)
except Exception as ex:
raise ShellError(ex)
return finished
def join_shells(self, shells, timeout=None):
"""Wait for running commands to complete and close shells.
:param shells: Shells to join on.
:type shells: list(:py:class:`pssh.clients.base.single.InteractiveShell`)
:param timeout: Seconds before waiting for shell commands to finish times out.
Defaults to self.timeout if not provided.
:type timeout: float
:raises: :py:class:`pssh.exceptions.Timeout` on timeout requested and
reached with commands still running.
"""
_timeout = self.timeout if timeout is None else timeout
cmds = [self.pool.spawn(shell.close) for shell in shells]
finished = joinall(cmds, timeout=_timeout)
if _timeout is None:
return
finished_shells = [g.get() for g in finished]
unfinished_shells = list(set(shells).difference(set(finished_shells)))
if len(unfinished_shells) > 0:
raise Timeout(
"Timeout of %s sec(s) reached with commands still running",
timeout, finished_shells, unfinished_shells,
)
def run_command(self, command, user=None, stop_on_errors=True,
host_args=None, use_pty=False, shell=None,
encoding='utf-8',
*args, **kwargs):
if host_args:
try:
cmds = [self.pool.spawn(
self._run_command, host_i, host,
command % host_args[host_i],
user=user, encoding=encoding,
use_pty=use_pty, shell=shell,
*args, **kwargs)
for host_i, host in enumerate(self.hosts)]
except IndexError:
raise HostArgumentError(
"Number of host arguments provided does not match "
"number of hosts ")
else:
cmds = [self.pool.spawn(
self._run_command, host_i, host, command,
user=user, encoding=encoding, use_pty=use_pty, shell=shell,
*args, **kwargs)
for host_i, host in enumerate(self.hosts)]
self.cmds = cmds
joinall(cmds, timeout=self.timeout)
return self._get_output_from_cmds(cmds, raise_error=stop_on_errors)
def _get_output_from_cmds(self, cmds, raise_error=False):
_cmds = [spawn(self._get_output_from_greenlet, cmd_i, cmd, raise_error=raise_error)
for cmd_i, cmd in enumerate(cmds)]
finished = joinall(_cmds, raise_error=True)
return [f.get() for f in finished]
def _get_output_from_greenlet(self, cmd_i, cmd, raise_error=False):
host = self.hosts[cmd_i]
alias = self._get_host_config(cmd_i).alias
try:
host_out = cmd.get()
return host_out
except (GTimeout, Exception) as ex:
if isinstance(ex, GTimeout):
ex = Timeout()
if raise_error:
raise ex
return HostOutput(host, None, None, None, exception=ex, alias=alias)
def get_last_output(self, cmds=None):
"""Get output for last commands executed by ``run_command``.
:param cmds: Commands to get output for. Defaults to ``client.cmds``
:type cmds: list(:py:class:`gevent.Greenlet`)
:rtype: list(:py:class:`pssh.output.HostOutput`)
"""
cmds = self.cmds if cmds is None else cmds
if cmds is None:
return
return self._get_output_from_cmds(
cmds, raise_error=False)
def _get_host_config(self, host_i):
if self.host_config is None:
config = HostConfig(
user=self.user, port=self.port, password=self.password, private_key=self.pkey,
allow_agent=self.allow_agent, num_retries=self.num_retries, retry_delay=self.retry_delay,
timeout=self.timeout, identity_auth=self.identity_auth, proxy_host=self.proxy_host,
proxy_port=self.proxy_port, proxy_user=self.proxy_user, proxy_password=self.proxy_password,
proxy_pkey=self.proxy_pkey,
keepalive_seconds=self.keepalive_seconds,
ipv6_only=self.ipv6_only,
cert_file=self.cert_file,
forward_ssh_agent=self.forward_ssh_agent,
gssapi_auth=self.gssapi_auth,
gssapi_server_identity=self.gssapi_server_identity,
gssapi_client_identity=self.gssapi_client_identity,
gssapi_delegate_credentials=self.gssapi_delegate_credentials,
)
return config
config = self.host_config[host_i]
return config
def _run_command(self, host_i, host, command, sudo=False, user=None,
shell=None, use_pty=False,
encoding='utf-8', read_timeout=None):
"""Make SSHClient if needed, run command on host"""
try:
_client = self._get_ssh_client(host_i, host)
host_out = _client.run_command(
command, sudo=sudo, user=user, shell=shell,
use_pty=use_pty, encoding=encoding, read_timeout=read_timeout)
return host_out
except (GTimeout, Exception) as ex:
logger.error("Failed to run on host %s - %s", host, ex)
raise ex
def connect_auth(self):
"""Connect to and authenticate with all hosts in parallel.
This function can be used to perform connection and authentication outside of
command functions like ``run_command`` or ``copy_file`` so the two operations,
login and running a remote command, can be separated.
It is not required to be called prior to any other functions.
Connections and authentication is performed in parallel by this and all other
functions.
:returns: list of greenlets to ``joinall`` with.
:rtype: list(:py:mod:`gevent.greenlet.Greenlet`)
"""
cmds = [self.pool.spawn(self._get_ssh_client, i, host) for i, host in enumerate(self.hosts)]
return cmds
def _consume_output(self, stdout, stderr):
for _ in stdout:
pass
for _ in stderr:
pass
def join(self, output=None, consume_output=False, timeout=None):
"""Wait until all remote commands in output have finished.
Does *not* block other commands from running in parallel.
:param output: Output of commands to join on
:type output: `HostOutput` objects
:param consume_output: Whether or not join should consume output
buffers. Output buffers will be empty after ``join`` if set
to ``True``. Must be set to ``True`` to allow host logger to log
output on call to ``join`` when host logger has been enabled.
:type consume_output: bool
:param timeout: Timeout in seconds if **all** remote commands are not
yet finished.
This function's timeout is for all commands in total and will therefor
be affected by pool size and total number of concurrent commands in
self.pool.
Since self.timeout is passed onto each individual SSH session it is
**not** used for any parallel functions like `run_command` or `join`.
:type timeout: float
:raises: :py:class:`pssh.exceptions.Timeout` on timeout requested and
reached with commands still running.
:rtype: ``None``"""
if output is None:
output = self.get_last_output()
if output is None:
logger.info("No last output to join on - run_command has never been run.")
return
elif not isinstance(output, list):
raise ValueError("Unexpected output object type")
cmds = [self.pool.spawn(self._join, host_out, timeout=timeout,
consume_output=consume_output)
for host_i, host_out in enumerate(output)]
# Errors raised by self._join should be propagated.
finished_cmds = joinall(cmds, raise_error=True, timeout=timeout)
if timeout is None:
return
unfinished_cmds = set.difference(set(cmds), set(finished_cmds))
if unfinished_cmds:
finished_output = self.get_last_output(cmds=finished_cmds)
unfinished_output = list(set.difference(set(output), set(finished_output)))
raise Timeout(
"Timeout of %s sec(s) reached with commands still running",
timeout, finished_output, unfinished_output,
)
def _join(self, host_out, consume_output=False, timeout=None):
if host_out is None:
return
client = host_out.client
if client is None:
return
client.wait_finished(host_out, timeout=timeout)
if consume_output:
self._consume_output(host_out.stdout, host_out.stderr)
return host_out
def finished(self, output=None):
"""Check if commands have finished without blocking.
:param output: (Optional) Output to check if finished. Defaults to
:py:func:`get_last_output <pssh.clients.base.parallel..ParallelSSHClient.get_last_output>`
:type output: list(:py:mod:`HostOutput <pssh.output.HostOutput>`)
:rtype: bool
"""
if output is None:
output = self.get_last_output()
if output is None:
return True
for host_out in output:
chan = host_out.channel
if host_out.client and not host_out.client.finished(chan):
return False
return True
def copy_file(self, local_file, remote_file, recurse=False, copy_args=None):
"""Copy local file to remote file in parallel
This function returns a list of greenlets which can be
`join`-ed on to wait for completion.
:py:func:`gevent.joinall` function may be used to join on all greenlets
and will also raise exceptions from them if called with
``raise_error=True`` - default is `False`.
Alternatively call `.get` on each greenlet to raise any exceptions from
it.
Exceptions listed here are raised when
either ``gevent.joinall(<greenlets>, raise_error=True)`` is called
or ``.get`` is called on each greenlet, not this function itself.
:param local_file: Local filepath to copy to remote host
:type local_file: str
:param remote_file: Remote filepath on remote host to copy file to
:type remote_file: str
:param recurse: Whether or not to descend into directories recursively.
:type recurse: bool
:param copy_args: (Optional) format local_file and remote_file strings
with per-host arguments in ``copy_args``. ``copy_args`` length must
equal length of host list -
:py:class:`pssh.exceptions.HostArgumentError` is raised otherwise
:type copy_args: tuple or list
:rtype: List(:py:class:`gevent.Greenlet`) of greenlets for remote copy
commands
:raises: :py:class:`ValueError` when a directory is supplied to
local_file and recurse is not set
:raises: :py:class:`pssh.exceptions.HostArgumentError` on number of
per-host copy arguments not equal to number of hosts
:raises: :py:class:`IOError` on I/O errors writing files
:raises: :py:class:`OSError` on OS errors like permission denied
.. note ::
Remote directories in `remote_file` that do not exist will be
created as long as permissions allow.
"""
if copy_args:
try:
return [self.pool.spawn(self._copy_file, host_i, host,
local_file % copy_args[host_i],
remote_file % copy_args[host_i],
{'recurse': recurse})
for host_i, host in enumerate(self.hosts)]
except IndexError:
raise HostArgumentError(
"Number of per-host copy arguments provided does not match "
"number of hosts")
else:
return [self.pool.spawn(self._copy_file, host_i, host, local_file,
remote_file, {'recurse': recurse})
for host_i, host in enumerate(self.hosts)]
def _copy_file(self, host_i, host, local_file, remote_file, recurse=False):
"""Make sftp client, copy file"""
client = self._get_ssh_client(host_i, host)
return client.copy_file(
local_file, remote_file, recurse=recurse)
def copy_remote_file(self, remote_file, local_file, recurse=False,
suffix_separator='_', copy_args=None, **kwargs):
"""Copy remote file(s) in parallel as
<local_file><suffix_separator><host>
With a ``local_file`` value of ``myfile`` and default separator ``_``
the resulting filename will be ``myfile_myhost`` for the file from host
``myhost``.
This function, like :py:func:`ParallelSSHClient.copy_file`, returns a
list of greenlets which can be `join`-ed on to wait for completion.
:py:func:`gevent.joinall` function may be used to join on all greenlets
and will also raise exceptions if called with ``raise_error=True`` -
default is `False`.
Alternatively call `.get` on each greenlet to raise any exceptions from
it.
Exceptions listed here are raised when
either ``gevent.joinall(<greenlets>, raise_error=True)`` is called
or ``.get`` is called on each greenlet, not this function itself.
:param remote_file: remote filepath to copy to local host
:type remote_file: str
:param local_file: local filepath on local host to copy file to
:type local_file: str
:param recurse: whether or not to recurse
:type recurse: bool
:param suffix_separator: (Optional) Separator string between
filename and host, defaults to ``_``. For example, for a
``local_file`` value of ``myfile`` and default separator the
resulting filename will be ``myfile_myhost`` for the file from
host ``myhost``. ``suffix_separator`` has no meaning if
``copy_args`` is provided
:type suffix_separator: str
:param copy_args: (Optional) Format remote_file and local_file strings
with per-host arguments in ``copy_args``. ``copy_args`` length must
equal length of host list -
:py:class:`pssh.exceptions.HostArgumentError` is raised otherwise
:type copy_args: tuple or list
:rtype: list(:py:class:`gevent.Greenlet`) of greenlets for remote copy
commands
:raises: :py:class:`ValueError` when a directory is supplied to
local_file and recurse is not set
:raises: :py:class:`pssh.exceptions.HostArgumentError` on number of
per-host copy arguments not equal to number of hosts
:raises: :py:class:`IOError` on I/O errors writing files
:raises: :py:class:`OSError` on OS errors like permission denied
.. note ::
Local directories in ``local_file`` that do not exist will be
created as long as permissions allow.
.. note ::
File names will be de-duplicated by appending the hostname to the
filepath separated by ``suffix_separator``.
"""
if copy_args:
try:
return [self.pool.spawn(
self._copy_remote_file, host_i, host,
remote_file % copy_args[host_i],
local_file % copy_args[host_i], recurse=recurse, **kwargs)
for host_i, host in enumerate(self.hosts)]
except IndexError:
raise HostArgumentError(
"Number of per-host copy arguments provided does not match "
"number of hosts")
else:
return [self.pool.spawn(
self._copy_remote_file, host_i, host, remote_file,
suffix_separator.join([local_file, host]), recurse, **kwargs)
for host_i, host in enumerate(self.hosts)]
def _copy_remote_file(self, host_i, host, remote_file, local_file, recurse,
**kwargs):
"""Make sftp client, copy file to local"""
client = self._get_ssh_client(host_i, host)
return client.copy_remote_file(
remote_file, local_file, recurse=recurse, **kwargs)
def _get_ssh_client(self, host_i, host):
logger.debug("Make client request for host %s, (host_i, host) in clients: %s",
host, (host_i, host) in self._host_clients)
_client = self._host_clients.get((host_i, host))
if _client is not None:
return _client
cfg = self._get_host_config(host_i)
_pkey = self.pkey if cfg.private_key is None else cfg.private_key
_pkey_data = self._load_pkey_data(_pkey)
_client = self._make_ssh_client(host, cfg, _pkey_data)
self._host_clients[(host_i, host)] = _client
return _client
def _load_pkey_data(self, _pkey):
if not isinstance(_pkey, str):
return _pkey
_pkey = _validate_pkey_path(_pkey)
with open(_pkey, 'rb') as fh:
_pkey_data = fh.read()
return _pkey_data
def _make_ssh_client(self, host, cfg, _pkey_data):
raise NotImplementedError
| 25,010 | Python | .py | 514 | 37.338521 | 112 | 0.609498 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,380 | single.py | ParallelSSH_parallel-ssh/pssh/clients/base/single.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import logging
import os
from getpass import getuser
from socket import gaierror as sock_gaierror, error as sock_error
from gevent import sleep, socket, Timeout as GTimeout
from gevent.hub import Hub
from gevent.select import poll, POLLIN, POLLOUT
from ssh2.exceptions import AgentConnectionError, AgentListIdentitiesError, \
AgentAuthenticationError, AgentGetIdentityError
from ssh2.utils import find_eol
from ..common import _validate_pkey
from ..reader import ConcurrentRWBuffer
from ...constants import DEFAULT_RETRIES, RETRY_DELAY
from ...exceptions import UnknownHostError, AuthenticationError, \
ConnectionError, Timeout, NoIPv6AddressFoundError
from ...output import HostOutput, HostOutputBuffers, BufferData
Hub.NOT_ERROR = (Exception,)
host_logger = logging.getLogger('pssh.host_logger')
logger = logging.getLogger(__name__)
class Stdin(object):
"""Stdin stream for a channel.
Handles EAGAIN.
Provides ``write`` and ``flush`` only.
"""
__slots__ = ('_channel', '_client')
def __init__(self, channel, client):
"""
:param channel: The channel the stdin stream is from.
:type channel: IO object
:param client: The SSH client the channel is from.
:type client: ``BaseSSHClient``
"""
self._channel = channel
self._client = client
def write(self, data):
"""Write to stdin.
:param data: Data to write.
:type data: str
"""
return self._client._eagain(self._channel.write, data)
def flush(self):
"""Flush pending data written to stdin."""
return self._client._eagain(self._channel.flush)
class InteractiveShell(object):
"""
Run commands on an interactive shell.
Use as context manager to wait for commands to finish on exit.
Read from .stdout and stderr once context manager has exited.
``InteractiveShell.output`` is a :py:class:`pssh.output.HostOutput` object.
"""
__slots__ = ('_chan', '_client', 'output', '_encoding')
_EOL = b'\n'
def __init__(self, channel, client, encoding='utf-8', read_timeout=None):
"""
:param channel: The channel to open shell on.
:type channel: ``ssh2.channel.Channel`` or similar.
:param client: The SSHClient that opened the channel.
:type client: :py:class:`BaseSSHClient`
:param encoding: Encoding to use for command string when calling ``run`` and shell output.
:type encoding: str
"""
self._chan = channel
self._client = client
self._client._shell(self._chan)
self._encoding = encoding
self.output = self._client._make_host_output(
self._chan, encoding=encoding, read_timeout=read_timeout)
@property
def stdout(self):
"""``self.output.stdout``"""
return self.output.stdout
@property
def stderr(self):
"""``self.output.stderr``"""
return self.output.stderr
@property
def stdin(self):
"""``self.output.stdin``"""
return self.output.stdin
@property
def exit_code(self):
"""``self.output.exit_code``"""
return self.output.exit_code
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
def close(self):
"""Wait for shell to finish executing and close channel."""
if self._chan is None:
return
self._client._eagain(self._chan.send_eof)
self._client.wait_finished(self.output)
return self
def run(self, cmd):
"""Run command on interactive shell.
:param cmd: The command string to run.
Note that ``\\n`` is appended to every string.
:type cmd: str
"""
cmd = cmd.encode(self._encoding) + self._EOL
self._client._eagain_write(self._chan.write, cmd)
class BaseSSHClient(object):
IDENTITIES = (
os.path.expanduser('~/.ssh/id_rsa'),
os.path.expanduser('~/.ssh/id_dsa'),
os.path.expanduser('~/.ssh/identity'),
os.path.expanduser('~/.ssh/id_ecdsa'),
os.path.expanduser('~/.ssh/id_ed25519'),
)
def __init__(self, host,
user=None, password=None, port=None,
pkey=None, alias=None,
num_retries=DEFAULT_RETRIES,
retry_delay=RETRY_DELAY,
allow_agent=True, timeout=None,
proxy_host=None,
proxy_port=None,
_auth_thread_pool=True,
identity_auth=True,
ipv6_only=False,
):
self._auth_thread_pool = _auth_thread_pool
self.host = host
self.alias = alias
self.user = user if user else getuser()
self.password = password
self.port = port if port else 22
self.num_retries = num_retries
self.sock = None
self.timeout = timeout if timeout else None
self.retry_delay = retry_delay
self.allow_agent = allow_agent
self.session = None
self._host = proxy_host if proxy_host else host
self._port = proxy_port if proxy_port else self.port
self.pkey = _validate_pkey(pkey)
self.identity_auth = identity_auth
self._keepalive_greenlet = None
self.ipv6_only = ipv6_only
self._init()
def _pkey_from_memory(self, pkey_data):
raise NotImplementedError
def _init(self):
self._connect(self._host, self._port)
self._init_session()
self._auth_retry()
self._keepalive()
logger.debug("Authentication completed successfully - "
"setting session to non-blocking mode")
self.session.set_blocking(0)
def _auth_retry(self, retries=1):
try:
self.auth()
except Exception as ex:
if retries < self.num_retries:
sleep(self.retry_delay)
return self._auth_retry(retries=retries+1)
msg = "Authentication error while connecting to %s:%s - %s - retries %s/%s"
raise AuthenticationError(msg, self.host, self.port, ex, retries, self.num_retries)
def disconnect(self):
raise NotImplementedError
def __del__(self):
try:
self.disconnect()
except Exception:
pass
def __enter__(self):
return self
def __exit__(self, *args):
self.disconnect()
def open_shell(self, encoding='utf-8', read_timeout=None):
"""Open interactive shell on new channel.
Can be used as context manager - ``with open_shell() as shell``.
:param encoding: Encoding to use for command string and shell output.
:type encoding: str
:param read_timeout: Timeout in seconds for reading from output.
:type read_timeout: float
"""
chan = self.open_session()
shell = InteractiveShell(chan, self, encoding=encoding, read_timeout=read_timeout)
return shell
def _shell(self, channel):
raise NotImplementedError
def _disconnect_eagain(self):
self._eagain(self.session.disconnect)
def _connect_init_session_retry(self, retries):
try:
self._disconnect_eagain()
except Exception:
pass
self.session = None
if not self.sock.closed:
try:
self.sock.close()
except Exception:
pass
sleep(self.retry_delay)
self._connect(self._host, self._port, retries=retries)
return self._init_session(retries=retries)
def _get_addr_info(self, host, port):
addr_info = socket.getaddrinfo(host, port, proto=socket.IPPROTO_TCP)
if self.ipv6_only:
filtered = [addr for addr in addr_info if addr[0] is socket.AF_INET6]
if not filtered:
raise NoIPv6AddressFoundError(
"Requested IPv6 only and no IPv6 addresses found for host %s from "
"address list %s", host, [addr for _, _, _, _, addr in addr_info])
addr_info = filtered
return addr_info
def _connect(self, host, port, retries=1):
try:
addr_info = self._get_addr_info(host, port)
except sock_gaierror as ex:
logger.error("Could not resolve host '%s' - retry %s/%s",
host, retries, self.num_retries)
if retries < self.num_retries:
sleep(self.retry_delay)
return self._connect(host, port, retries=retries+1)
unknown_ex = UnknownHostError("Unknown host %s - %s - retry %s/%s",
host, str(ex.args[1]), retries,
self.num_retries)
raise unknown_ex from ex
for i, (family, _type, proto, _, sock_addr) in enumerate(addr_info):
try:
return self._connect_socket(family, _type, sock_addr, host, port, retries)
except ConnectionRefusedError as ex:
if i+1 == len(addr_info):
logger.error("No available addresses from %s", [addr[4] for addr in addr_info])
ex.args += (host, port)
raise
continue
def _connect_socket(self, family, _type, sock_addr, host, port, retries):
self.sock = socket.socket(family, _type)
if self.timeout:
self.sock.settimeout(self.timeout)
logger.debug("Connecting to %s:%s", host, port)
try:
self.sock.connect(sock_addr)
except ConnectionRefusedError:
raise
except sock_error as ex:
logger.error("Error connecting to host '%s:%s' - retry %s/%s",
host, port, retries, self.num_retries)
while retries < self.num_retries:
sleep(self.retry_delay)
return self._connect(host, port, retries=retries+1)
error_type = ex.args[1] if len(ex.args) > 1 else ex.args[0]
ex = ConnectionError(
"Error connecting to host '%s:%s' - %s - retry %s/%s",
host, port, str(error_type), retries,
self.num_retries,)
raise ex
def _identity_auth(self):
for identity_file in self.IDENTITIES:
if not os.path.isfile(identity_file):
continue
logger.debug(
"Trying to authenticate with identity file %s",
identity_file)
try:
self._pkey_file_auth(identity_file, password=self.password)
except Exception as ex:
logger.debug(
"Authentication with identity file %s failed with %s, "
"continuing with other identities",
identity_file, ex)
continue
else:
logger.info("Authentication succeeded with identity file %s",
identity_file)
return
raise AuthenticationError("No authentication methods succeeded")
def _init_session(self, retries=1):
raise NotImplementedError
def _keepalive(self):
raise NotImplementedError
def auth(self):
if self.pkey is not None:
logger.debug(
"Proceeding with private key authentication")
return self._pkey_auth(self.pkey)
if self.allow_agent:
try:
self._agent_auth()
except (AgentAuthenticationError, AgentConnectionError, AgentGetIdentityError,
AgentListIdentitiesError) as ex:
logger.debug("Agent auth failed with %s "
"continuing with other authentication methods", repr(ex))
except Exception as ex:
logger.error("Agent auth failed with - %s", repr(ex))
else:
logger.debug("Authentication with SSH Agent succeeded")
return
if self.identity_auth:
try:
return self._identity_auth()
except AuthenticationError:
if self.password is None:
raise
if self.password is None:
msg = "No remaining authentication methods"
logger.error(msg)
raise AuthenticationError(msg)
logger.debug("Private key auth failed, trying password")
self._password_auth()
def _agent_auth(self):
raise NotImplementedError
def _password_auth(self):
raise NotImplementedError
def _pkey_auth(self, pkey):
_pkey = pkey
if isinstance(pkey, str):
logger.debug("Private key is provided as str, loading from private key file path")
with open(pkey, 'rb') as fh:
_pkey = fh.read()
elif isinstance(pkey, bytes):
logger.debug("Private key is provided in bytes, using as private key data")
return self._pkey_from_memory(_pkey)
def _pkey_file_auth(self, pkey_file, password=None):
raise NotImplementedError
def _open_session(self):
raise NotImplementedError
def open_session(self):
raise NotImplementedError
def _make_host_output(self, channel, encoding, read_timeout):
_stdout_buffer = ConcurrentRWBuffer()
_stderr_buffer = ConcurrentRWBuffer()
_stdout_reader, _stderr_reader = self._make_output_readers(
channel, _stdout_buffer, _stderr_buffer)
_stdout_reader.start()
_stderr_reader.start()
_buffers = HostOutputBuffers(
stdout=BufferData(rw_buffer=_stdout_buffer, reader=_stdout_reader),
stderr=BufferData(rw_buffer=_stderr_buffer, reader=_stderr_reader))
host_out = HostOutput(
host=self.host, alias=self.alias, channel=channel, stdin=Stdin(channel, self),
client=self, encoding=encoding, read_timeout=read_timeout,
buffers=_buffers,
)
return host_out
def _make_output_readers(self, channel, stdout_buffer, stderr_buffer):
raise NotImplementedError
def execute(self, cmd, use_pty=False, channel=None):
raise NotImplementedError
def read_stderr(self, stderr_buffer, timeout=None):
"""Read standard error buffer.
Returns a generator of line by line output.
:param stderr_buffer: Buffer to read from.
:type stderr_buffer: :py:class:`pssh.clients.reader.ConcurrentRWBuffer`
:param timeout: Timeout in seconds - defaults to no timeout.
:type timeout: int or float
:rtype: generator
"""
logger.debug("Reading from stderr buffer, timeout=%s", timeout)
return self._read_output_buffer(stderr_buffer, timeout=timeout)
def read_output(self, stdout_buffer, timeout=None):
"""Read standard output buffer.
Returns a generator of line by line output.
:param stdout_buffer: Buffer to read from.
:type stdout_buffer: :py:class:`pssh.clients.reader.ConcurrentRWBuffer`
:param timeout: Timeout in seconds - defaults to no timeout.
:type timeout: int or float
:rtype: generator
"""
logger.debug("Reading from stdout buffer, timeout=%s", timeout)
return self._read_output_buffer(stdout_buffer, timeout=timeout)
def _read_output_buffer(self, _buffer, timeout=None):
timer = GTimeout(seconds=timeout, exception=Timeout)
remainder = b""
remainder_len = 0
timer.start()
try:
for data in _buffer:
pos = 0
size = len(data)
while pos < size:
linesep, new_line_pos = find_eol(data, pos)
if linesep == -1:
remainder += data[pos:]
remainder_len = len(remainder)
break
end_of_line = pos+linesep
if remainder_len > 0:
line = remainder + data[pos:end_of_line]
remainder = b""
remainder_len = 0
else:
line = data[pos:end_of_line]
yield line
pos += linesep + new_line_pos
if remainder_len > 0:
# Finished reading without finding ending linesep
yield remainder
finally:
timer.close()
def _read_output_to_buffer(self, read_func, _buffer):
raise NotImplementedError
def wait_finished(self, host_output, timeout=None):
raise NotImplementedError
def close_channel(self, channel):
raise NotImplementedError
def get_exit_status(self, channel):
raise NotImplementedError
def read_output_buffer(self, output_buffer, prefix=None,
callback=None,
callback_args=None,
encoding='utf-8'):
"""Read from output buffers and log to ``host_logger``.
:param output_buffer: Iterator containing buffer.
:type output_buffer: iterator
:param prefix: String to prefix log output to ``host_logger`` with.
:type prefix: str
:param callback: Function to call back once buffer is depleted.
:type callback: function
:param callback_args: Arguments for call back function.
:type callback_args: tuple
:param encoding: Encoding for output.
:type encoding: str
"""
prefix = '' if prefix is None else prefix
for line in output_buffer:
output = line.decode(encoding)
host_logger.info("[%s]%s\t%s", self.host, prefix, output)
yield output
if callback:
callback(*callback_args)
def run_command(self, command, sudo=False, user=None,
use_pty=False, shell=None,
encoding='utf-8', timeout=None, read_timeout=None):
"""Run remote command.
:param command: Command to run.
:type command: str
:param sudo: Run command via sudo as super-user.
:type sudo: bool
:param user: Run command as user via sudo
:type user: str
:param use_pty: Whether or not to obtain a PTY on the channel.
:type use_pty: bool
:param shell: (Optional) Override shell to use to run command with.
Defaults to login user's defined shell. Use the shell's command
syntax, eg `shell='bash -c'` or `shell='zsh -c'`.
:type shell: str
:param encoding: Encoding to use for output. Must be valid
`Python codec <https://docs.python.org/library/codecs.html>`_
:type encoding: str
:param read_timeout: (Optional) Timeout in seconds for reading output.
:type read_timeout: float
:param timeout: Deprecated - use read_timeout.
:rtype: :py:class:`pssh.output.HostOutput`
"""
# Fast path for no command substitution needed
if not sudo and not user and not shell:
_command = command
else:
_command = ''
if sudo and not user:
_command = 'sudo -S '
elif user:
_command = 'sudo -u %s -S ' % (user,)
_shell = shell if shell else '$SHELL -c'
_command += "%s '%s'" % (_shell, command,)
_command = _command.encode(encoding)
with GTimeout(seconds=self.timeout):
channel = self.execute(_command, use_pty=use_pty)
_timeout = read_timeout if read_timeout else timeout
host_out = self._make_host_output(channel, encoding, _timeout)
return host_out
def _eagain_write_errcode(self, write_func, data, eagain):
data_len = len(data)
total_written = 0
while total_written < data_len:
rc, bytes_written = write_func(data[total_written:])
total_written += bytes_written
if rc == eagain:
self.poll()
sleep()
def _eagain_errcode(self, func, eagain, *args, **kwargs):
timeout = kwargs.pop('timeout', self.timeout)
with GTimeout(seconds=timeout, exception=Timeout):
ret = func(*args, **kwargs)
while ret == eagain:
self.poll()
ret = func(*args, **kwargs)
sleep()
return ret
def _eagain_write(self, write_func, data):
raise NotImplementedError
def _eagain(self, func, *args, **kwargs):
raise NotImplementedError
def _make_sftp(self):
raise NotImplementedError
def _mkdir(self, sftp, directory):
raise NotImplementedError
def copy_file(self, local_file, remote_file, recurse=False,
sftp=None):
raise NotImplementedError
def _sftp_put(self, remote_fh, local_file):
raise NotImplementedError
def sftp_put(self, sftp, local_file, remote_file):
raise NotImplementedError
def mkdir(self, sftp, directory):
raise NotImplementedError
def _copy_dir(self, local_dir, remote_dir, sftp):
"""Call copy_file on every file in the specified directory, copying
them to the specified remote directory."""
self.mkdir(sftp, remote_dir)
file_list = os.listdir(local_dir)
for file_name in file_list:
local_path = os.path.join(local_dir, file_name)
remote_path = '/'.join([remote_dir, file_name])
self.copy_file(local_path, remote_path, recurse=True,
sftp=sftp)
def copy_remote_file(self, remote_file, local_file, recurse=False,
sftp=None, encoding='utf-8'):
raise NotImplementedError
def scp_recv(self, remote_file, local_file, recurse=False, sftp=None,
encoding='utf-8'):
raise NotImplementedError
def _scp_recv(self, remote_file, local_file):
raise NotImplementedError
def _scp_send_dir(self, local_dir, remote_dir, sftp):
file_list = os.listdir(local_dir)
for file_name in file_list:
local_path = os.path.join(local_dir, file_name)
remote_path = '/'.join([remote_dir, file_name])
self.scp_send(local_path, remote_path, recurse=True,
sftp=sftp)
def _scp_recv_dir(self, file_list, remote_dir, local_dir, sftp,
encoding='utf-8'):
for file_name in file_list:
file_name = file_name.decode(encoding)
if file_name in ('.', '..'):
continue
remote_path = os.path.join(remote_dir, file_name)
local_path = os.path.join(local_dir, file_name)
logger.debug("Attempting recursive copy from %s:%s to %s",
self.host, remote_path, local_path)
self.scp_recv(remote_path, local_path, sftp=sftp,
recurse=True)
def scp_send(self, local_file, remote_file, recurse=False, sftp=None):
raise NotImplementedError
def _scp_send(self, local_file, remote_file):
raise NotImplementedError
def _sftp_readdir(self, dir_h):
for size, buf, attrs in dir_h.readdir():
for line in buf.splitlines():
yield line
def _sftp_openfh(self, open_func, remote_file, *args):
raise NotImplementedError
def _sftp_get(self, remote_fh, local_file):
raise NotImplementedError
def sftp_get(self, sftp, remote_file, local_file):
raise NotImplementedError
def _copy_remote_dir(self, file_list, remote_dir, local_dir, sftp,
encoding='utf-8'):
for file_name in file_list:
file_name = file_name.decode(encoding)
if file_name in ('.', '..'):
continue
remote_path = os.path.join(remote_dir, file_name)
local_path = os.path.join(local_dir, file_name)
self.copy_remote_file(remote_path, local_path, sftp=sftp,
recurse=True, encoding=encoding)
def _make_local_dir(self, dirpath):
if os.path.exists(dirpath):
return
try:
os.makedirs(dirpath)
except OSError:
logger.error("Unable to create local directory structure for "
"directory %s", dirpath)
raise
def _remote_paths_split(self, file_path):
_sep = file_path.rfind('/')
if _sep > 0:
return file_path[:_sep]
def poll(self):
raise NotImplementedError
def _poll_socket(self, events):
if self.sock is None:
return
poller = poll()
poller.register(self.sock, eventmask=events)
poller.poll(timeout=1)
def _poll_errcodes(self, directions_func, inbound, outbound):
directions = directions_func()
if directions == 0:
return
events = 0
if directions & inbound:
events = POLLIN
if directions & outbound:
events |= POLLOUT
self._poll_socket(events)
| 26,065 | Python | .py | 611 | 31.9509 | 99 | 0.596442 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,381 | parallel.py | ParallelSSH_parallel-ssh/pssh/clients/native/parallel.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import logging
from .single import SSHClient
from ..base.parallel import BaseParallelSSHClient
from ..common import _validate_pkey
from ...constants import DEFAULT_RETRIES, RETRY_DELAY
from ...exceptions import HostArgumentError
logger = logging.getLogger(__name__)
class ParallelSSHClient(BaseParallelSSHClient):
"""ssh2-python based parallel client."""
def __init__(self, hosts, user=None, password=None, port=22, pkey=None,
num_retries=DEFAULT_RETRIES, timeout=None, pool_size=100,
allow_agent=True, host_config=None, retry_delay=RETRY_DELAY,
proxy_host=None, proxy_port=None,
proxy_user=None, proxy_password=None, proxy_pkey=None,
forward_ssh_agent=False,
keepalive_seconds=60, identity_auth=True,
ipv6_only=False,
):
"""
:param hosts: Hosts to connect to
:type hosts: list(str)
:param user: (Optional) User to login as. Defaults to logged in user
:type user: str
:param password: (Optional) Password to use for login. Defaults to
no password
:type password: str
:param port: (Optional) Port number to use for SSH connection. Defaults
to 22.
:type port: int
:param pkey: Private key file path or private key data to use.
Paths must be str type and either absolute
path or relative to user home directory like ``~/<path>``.
Bytes type input is used as private key data for authentication.
:type pkey: str or bytes
:param num_retries: (Optional) Number of connection and authentication
attempts before the client gives up. Defaults to 3.
:type num_retries: int
:param retry_delay: Number of seconds to wait between retries. Defaults
to :py:class:`pssh.constants.RETRY_DELAY`
:type retry_delay: int or float
:param timeout: (Optional) Global timeout setting in seconds for all remote
operations including all SSH client operations DNS, opening connections,
reading output from remote servers, et al.
For concurrent functions this is a cumulative timeout
setting for all concurrent operations. These functions, eg ``run_command`` and
``join``, also allow timeout to be set just for those functions if not
set globally via this option.
For per host operations like
``list(host_out.stdout)`` for reading output it is applied per host if set.
Host output read timeout can also be set separately via
``run_command(<..>, read_timeout=<seconds>)``
Defaults to OS default - usually 60 seconds.
:type timeout: int or float
:param pool_size: (Optional) Greenlet pool size. Controls
concurrency, on how many hosts to execute tasks in parallel.
Defaults to 100. Overhead in event
loop will determine how high this can be set to, see scaling guide
lines in project's readme.
:type pool_size: int
:param host_config: (Optional) Per-host configuration for cases where
not all hosts use the same configuration.
:type host_config: list(:py:class:`pssh.config.HostConfig`)
:param allow_agent: (Optional) set to False to disable connecting to
the system's SSH agent.
:type allow_agent: bool
:param identity_auth: (Optional) set to False to disable attempting to
authenticate with default identity files from
`pssh.clients.base_ssh_client.BaseSSHClient.IDENTITIES`
:type identity_auth: bool
:param proxy_host: (Optional) SSH host to tunnel connection through
so that SSH clients connect to host via client -> proxy_host -> host
:type proxy_host: str
:param proxy_port: (Optional) SSH port to use to login to proxy host if
set. Defaults to 22.
:type proxy_port: int
:param proxy_user: (Optional) User to login to ``proxy_host`` as.
Defaults to logged in user.
:type proxy_user: str
:param proxy_password: (Optional) Password to login to ``proxy_host``
with. Defaults to no password.
:type proxy_password: str
:param proxy_pkey: (Optional) Private key file to be used for
authentication with ``proxy_host``. Defaults to available keys from
SSHAgent and user's SSH identities.
Bytes type input is used as private key data for authentication.
:type proxy_pkey: str or bytes
:param forward_ssh_agent: (Optional) Turn on SSH agent forwarding.
Currently unused.
:type forward_ssh_agent: bool
:param ipv6_only: Choose IPv6 addresses only if multiple are available
for the host(s) or raise NoIPv6AddressFoundError otherwise. Note this will
disable connecting to an IPv4 address if an IP address is provided instead.
:type ipv6_only: bool
:raises: :py:class:`pssh.exceptions.PKeyFileError` on errors finding
provided private key.
"""
BaseParallelSSHClient.__init__(
self, hosts, user=user, password=password, port=port, pkey=pkey,
allow_agent=allow_agent, num_retries=num_retries,
timeout=timeout, pool_size=pool_size,
host_config=host_config, retry_delay=retry_delay,
identity_auth=identity_auth,
ipv6_only=ipv6_only,
)
self.proxy_host = proxy_host
self.proxy_port = proxy_port
self.proxy_pkey = _validate_pkey(proxy_pkey)
self.proxy_user = proxy_user
self.proxy_password = proxy_password
self.forward_ssh_agent = forward_ssh_agent
self.keepalive_seconds = keepalive_seconds
def run_command(self, command, sudo=False, user=None, stop_on_errors=True,
use_pty=False, host_args=None, shell=None,
encoding='utf-8', read_timeout=None,
):
"""Run command on all hosts in parallel, honoring self.pool_size,
and return output.
This function will block until all commands have been received
by remote servers and then return immediately.
More explicitly, function will return after connection and
authentication establishment in the case of on new connections and
after execute
commands have been accepted by successfully established SSH channels.
Any connection and/or authentication exceptions will be raised here
and need catching *unless* ``run_command`` is called with
``stop_on_errors=False`` in which case exceptions are added to
individual host output instead.
:param command: Command to run
:type command: str
:param sudo: (Optional) Run with sudo. Defaults to False
:type sudo: bool
:param user: (Optional) User to run command as. Requires sudo access
for that user from the logged in user account.
:type user: str
:param stop_on_errors: (Optional) Raise exception on errors running
command. Defaults to True. With stop_on_errors set to False,
exceptions are instead added to output of `run_command`. See example
usage below.
:type stop_on_errors: bool
:param shell: (Optional) Override shell to use to run command with.
Defaults to login user's defined shell. Use the shell's command
syntax, eg `shell='bash -c'` or `shell='zsh -c'`.
:type shell: str
:param use_pty: (Optional) Enable/Disable use of pseudo terminal
emulation. Defaults to ``False``
:type use_pty: bool
:param host_args: (Optional) Format command string with per-host
arguments in ``host_args``. ``host_args`` length must equal length of
host list - :py:class:`pssh.exceptions.HostArgumentError` is
raised otherwise
:type host_args: tuple or list
:param encoding: Encoding to use for command string and output. Must be valid
`Python codec <https://docs.python.org/library/codecs.html>`_
:type encoding: str
:param read_timeout: (Optional) Timeout in seconds for reading from stdout
or stderr. Reading from stdout/stderr will
raise :py:class:`pssh.exceptions.Timeout`
after ``timeout`` seconds when set if remote output is not ready.
:type read_timeout: float
:rtype: list(:py:class:`pssh.output.HostOutput`)
:raises: :py:class:`pssh.exceptions.AuthenticationError` on
authentication error
:raises: :py:class:`pssh.exceptions.UnknownHostError` on DNS
resolution error
:raises: :py:class:`pssh.exceptions.ConnectionError` on error
connecting
:raises: :py:class:`pssh.exceptions.HostArgumentError` on number of
host arguments not equal to number of hosts
:raises: :py:class:`TypeError` on not enough host arguments for cmd
string format
:raises: :py:class:`KeyError` on no host argument key in arguments
dict for cmd string format
:raises: :py:class:`pssh.exceptions.ProxyError` on errors connecting
to proxy if a proxy host has been set.
:raises: :py:class:`pssh.exceptions.Timeout` on timeout starting command.
:raises: Exceptions from :py:mod:`ssh2.exceptions` for all other
specific errors such as
:py:class:`ssh2.exceptions.SocketDisconnectError` et al.
"""
return BaseParallelSSHClient.run_command(
self, command, stop_on_errors=stop_on_errors, host_args=host_args,
user=user, shell=shell, sudo=sudo,
encoding=encoding, use_pty=use_pty,
read_timeout=read_timeout,
)
def _make_ssh_client(self, host, cfg, _pkey_data):
_client = SSHClient(
host, user=cfg.user or self.user, password=cfg.password or self.password, port=cfg.port or self.port,
pkey=_pkey_data, num_retries=cfg.num_retries or self.num_retries,
alias=cfg.alias,
timeout=cfg.timeout or self.timeout,
allow_agent=cfg.allow_agent or self.allow_agent, retry_delay=cfg.retry_delay or self.retry_delay,
proxy_host=cfg.proxy_host or self.proxy_host,
proxy_port=cfg.proxy_port or self.proxy_port,
proxy_user=cfg.proxy_user or self.proxy_user,
proxy_password=cfg.proxy_password or self.proxy_password,
proxy_pkey=cfg.proxy_pkey or self.proxy_pkey,
_auth_thread_pool=cfg.auth_thread_pool or self._auth_thread_pool,
forward_ssh_agent=cfg.forward_ssh_agent or self.forward_ssh_agent,
keepalive_seconds=cfg.keepalive_seconds or self.keepalive_seconds,
identity_auth=cfg.identity_auth or self.identity_auth,
ipv6_only=cfg.ipv6_only or self.ipv6_only,
)
return _client
def copy_file(self, local_file, remote_file, recurse=False, copy_args=None):
"""Copy local file to remote file in parallel via SFTP.
This function returns a list of greenlets which can be
`join`-ed on to wait for completion.
:py:func:`gevent.joinall` function may be used to join on all greenlets
and will also raise exceptions from them if called with
``raise_error=True`` - default is `False`.
Alternatively call `.get()` on each greenlet to raise any exceptions
from it.
Exceptions listed here are raised when
either ``gevent.joinall(<greenlets>, raise_error=True)``
or ``.get()`` on each greenlet are called, not this function itself.
:param local_file: Local filepath to copy to remote host
:type local_file: str
:param remote_file: Remote filepath on remote host to copy file to
:type remote_file: str
:param recurse: Whether or not to descend into directories recursively.
:type recurse: bool
:param copy_args: (Optional) format local_file and remote_file strings
with per-host arguments in ``copy_args``. ``copy_args`` length must
equal length of host list -
:py:class:`pssh.exceptions.HostArgumentError` is raised otherwise
:type copy_args: tuple or list
:rtype: list(:py:class:`gevent.Greenlet`) of greenlets for remote copy
commands
:raises: :py:class:`ValueError` when a directory is supplied to
local_file and recurse is not set
:raises: :py:class:`pssh.exceptions.HostArgumentError` on number of
per-host copy arguments not equal to number of hosts
:raises: :py:class:`pssh.exceptions.SFTPError` on SFTP initialisation
errors
:raises: :py:class:`pssh.exceptions.SFTPIOError` on I/O errors writing
via SFTP
:raises: :py:class:`OSError` on local OS errors like permission denied
.. note ::
Remote directories in ``remote_file`` that do not exist will be
created as long as permissions allow.
"""
return BaseParallelSSHClient.copy_file(
self, local_file, remote_file, recurse=recurse, copy_args=copy_args)
def copy_remote_file(self, remote_file, local_file, recurse=False,
suffix_separator='_', copy_args=None,
encoding='utf-8'):
"""Copy remote file(s) in parallel via SFTP as
<local_file><suffix_separator><host>
With a ``local_file`` value of ``myfile`` and default separator ``_``
the resulting filename will be ``myfile_myhost`` for the file from host
``myhost``.
This function, like :py:func:`ParallelSSHClient.copy_file`, returns a
list of greenlets which can be `join`-ed on to wait for completion.
:py:func:`gevent.joinall` function may be used to join on all greenlets
and will also raise exceptions if called with ``raise_error=True`` -
default is `False`.
Alternatively call `.get` on each greenlet to raise any exceptions from
it.
Exceptions listed here are raised when
either ``gevent.joinall(<greenlets>, raise_error=True)`` is called
or ``.get`` is called on each greenlet, not this function itself.
:param remote_file: remote filepath to copy to local host
:type remote_file: str
:param local_file: local filepath on local host to copy file to
:type local_file: str
:param recurse: whether or not to recurse
:type recurse: bool
:param suffix_separator: (Optional) Separator string between
filename and host, defaults to ``_``. For example, for a
``local_file`` value of ``myfile`` and default separator the
resulting filename will be ``myfile_myhost`` for the file from
host ``myhost``. ``suffix_separator`` has no meaning if
``copy_args`` is provided
:type suffix_separator: str
:param copy_args: (Optional) format remote_file and local_file strings
with per-host arguments in ``copy_args``. ``copy_args`` length must
equal length of host list -
:py:class:`pssh.exceptions.HostArgumentError` is raised otherwise
:type copy_args: tuple or list
:param encoding: Encoding to use for file paths.
:type encoding: str
:rtype: list(:py:class:`gevent.Greenlet`) of greenlets for remote copy
commands
:raises: :py:class:`ValueError` when a directory is supplied to
local_file and recurse is not set
:raises: :py:class:`pssh.exceptions.HostArgumentError` on number of
per-host copy arguments not equal to number of hosts
:raises: :py:class:`pssh.exceptions.SFTPError` on SFTP initialisation
errors
:raises: :py:class:`pssh.exceptions.SFTPIOError` on I/O errors reading
from SFTP
:raises: :py:class:`OSError` on local OS errors like permission denied
.. note ::
Local directories in `local_file` that do not exist will be
created as long as permissions allow.
.. note ::
File names will be de-duplicated by appending the hostname to the
filepath separated by ``suffix_separator``.
"""
return BaseParallelSSHClient.copy_remote_file(
self, remote_file, local_file, recurse=recurse,
suffix_separator=suffix_separator, copy_args=copy_args,
encoding=encoding)
def _scp_send(self, host_i, host, local_file, remote_file, recurse=False):
_client = self._get_ssh_client(host_i, host)
return _client.scp_send(local_file, remote_file, recurse=recurse)
def _scp_recv(self, host_i, host, remote_file, local_file, recurse=False):
_client = self._get_ssh_client(host_i, host)
return _client.scp_recv(remote_file, local_file, recurse=recurse)
def scp_send(self, local_file, remote_file, recurse=False, copy_args=None):
"""Copy local file to remote file in parallel via SCP.
This function returns a list of greenlets which can be
`join`-ed on to wait for completion.
:py:func:`gevent.joinall` function may be used to join on all greenlets
and will also raise exceptions from them if called with
``raise_error=True`` - default is `False`.
Alternatively call `.get()` on each greenlet to raise any exceptions
from it.
.. note::
Creating remote directories when either ``remote_file`` contains
directory paths or ``recurse`` is enabled requires SFTP support on
the server as libssh2 SCP implementation lacks directory creation
support.
:param local_file: Local filepath to copy to remote host
:type local_file: str
:param remote_file: Remote filepath on remote host to copy file to
:type remote_file: str
:param copy_args: (Optional) format local_file and remote_file strings
with per-host arguments in ``copy_args``. ``copy_args`` length must
equal length of host list -
:py:class:`pssh.exceptions.HostArgumentError` is raised otherwise
:type copy_args: tuple or list
:param recurse: Whether or not to descend into directories recursively.
:type recurse: bool
:rtype: list(:py:class:`gevent.Greenlet`) of greenlets for remote copy
commands.
:raises: :py:class:`pssh.exceptions.SCPError` on errors copying file.
:raises: :py:class:`OSError` on local OS errors like permission denied.
"""
copy_args = [{'local_file': local_file,
'remote_file': remote_file}
for _ in self.hosts] \
if copy_args is None else copy_args
local_file = "%(local_file)s"
remote_file = "%(remote_file)s"
try:
return [self.pool.spawn(self._scp_send, host_i, host,
local_file % copy_args[host_i],
remote_file % copy_args[host_i],
recurse=recurse)
for host_i, host in enumerate(self.hosts)]
except IndexError:
raise HostArgumentError(
"Number of per-host copy arguments provided does not match "
"number of hosts")
def scp_recv(self, remote_file, local_file, recurse=False, copy_args=None,
suffix_separator='_'):
"""Copy remote file(s) in parallel via SCP as
<local_file><suffix_separator><host> or as per ``copy_args`` argument.
With a ``local_file`` value of ``myfile`` and default separator ``_``
the resulting filename will be ``myfile_myhost`` for the file from host
``myhost``.
De-duplication behaviour is configurable by providing ``copy_args``
argument, see below.
This function, like :py:func:`ParallelSSHClient.scp_send`, returns a
list of greenlets which can be `join`-ed on to wait for completion.
:py:func:`gevent.joinall` function may be used to join on all greenlets
and will also raise exceptions if called with ``raise_error=True`` -
default is `False`.
Alternatively call `.get` on each greenlet to raise any exceptions from
it.
Exceptions listed here are raised when
either ``gevent.joinall(<greenlets>, raise_error=True)`` is called
or ``.get`` is called on each greenlet, not this function itself.
:param remote_file: remote filepath to copy to local host
:type remote_file: str
:param local_file: local filepath on local host to copy file to
:type local_file: str
:param recurse: whether or not to recurse
:type recurse: bool
:param suffix_separator: (Optional) Separator string between
filename and host, defaults to ``_``. For example, for a
``local_file`` value of ``myfile`` and default separator the
resulting filename will be ``myfile_myhost`` for the file from
host ``myhost``. ``suffix_separator`` has no meaning if
``copy_args`` is provided
:type suffix_separator: str
:param copy_args: (Optional) format remote_file and local_file strings
with per-host arguments in ``copy_args``. ``copy_args`` length *must*
equal length of host list -
:py:class:`pssh.exceptions.HostArgumentError` is raised otherwise
:type copy_args: tuple or list
:rtype: list(:py:class:`gevent.Greenlet`) of greenlets for remote copy
commands.
:raises: :py:class:`ValueError` when a directory is supplied to
local_file and recurse is not set.
:raises: :py:class:`pssh.exceptions.HostArgumentError` on number of
per-host copy arguments not equal to number of hosts.
:raises: :py:class:`pssh.exceptions.SCPError` on errors copying file.
:raises: :py:class:`OSError` on local OS errors like permission denied.
.. note ::
Local directories in ``local_file`` that do not exist will be
created as long as permissions allow.
.. note ::
File names will be de-duplicated by appending the hostname to the
filepath separated by ``suffix_separator`` or as per ``copy_args``
argument if provided.
"""
copy_args = [{'local_file': suffix_separator.join([local_file, host]),
'remote_file': remote_file}
for i, host in enumerate(self.hosts)] \
if copy_args is None else copy_args
local_file = "%(local_file)s"
remote_file = "%(remote_file)s"
try:
return [self.pool.spawn(
self._scp_recv, host_i, host,
remote_file % copy_args[host_i],
local_file % copy_args[host_i], recurse=recurse)
for host_i, host in enumerate(self.hosts)]
except IndexError:
raise HostArgumentError(
"Number of per-host copy arguments provided does not match "
"number of hosts")
| 24,119 | Python | .py | 441 | 44.421769 | 113 | 0.6522 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,382 | __init__.py | ParallelSSH_parallel-ssh/pssh/clients/native/__init__.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# flake8: noqa: F401
from .parallel import ParallelSSHClient
from .single import SSHClient, logger
| 865 | Python | .py | 19 | 44.473684 | 80 | 0.790533 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,383 | single.py | ParallelSSH_parallel-ssh/pssh/clients/native/single.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import logging
import os
from collections import deque
from gevent import sleep, spawn, get_hub
from gevent.lock import RLock
from ssh2.error_codes import LIBSSH2_ERROR_EAGAIN
from ssh2.exceptions import SFTPHandleError, SFTPProtocolError, \
Timeout as SSH2Timeout
from ssh2.session import Session, LIBSSH2_SESSION_BLOCK_INBOUND, LIBSSH2_SESSION_BLOCK_OUTBOUND
from ssh2.sftp import LIBSSH2_FXF_READ, LIBSSH2_FXF_CREAT, LIBSSH2_FXF_WRITE, \
LIBSSH2_FXF_TRUNC, LIBSSH2_SFTP_S_IRUSR, LIBSSH2_SFTP_S_IRGRP, \
LIBSSH2_SFTP_S_IWUSR, LIBSSH2_SFTP_S_IXUSR, LIBSSH2_SFTP_S_IROTH, \
LIBSSH2_SFTP_S_IXGRP, LIBSSH2_SFTP_S_IXOTH
from .tunnel import FORWARDER
from ..base.single import BaseSSHClient
from ...constants import DEFAULT_RETRIES, RETRY_DELAY
from ...exceptions import SessionError, SFTPError, \
SFTPIOError, Timeout, SCPError, ProxyError
from ...output import HostOutput
logger = logging.getLogger(__name__)
THREAD_POOL = get_hub().threadpool
class SSHClient(BaseSSHClient):
"""ssh2-python (libssh2) based non-blocking SSH client."""
# 2MB buffer
_BUF_SIZE = 2048 * 1024
def __init__(self, host,
user=None, password=None, port=None,
pkey=None, alias=None,
num_retries=DEFAULT_RETRIES,
retry_delay=RETRY_DELAY,
allow_agent=True, timeout=None,
forward_ssh_agent=False,
proxy_host=None,
proxy_port=None,
proxy_pkey=None,
proxy_user=None,
proxy_password=None,
_auth_thread_pool=True, keepalive_seconds=60,
identity_auth=True,
ipv6_only=False,
):
"""
:param host: Host name or IP to connect to.
:type host: str
:param user: User to connect as. Defaults to logged in user.
:type user: str
:param password: Password to use for password authentication.
:type password: str
:param alias: Use an alias for this host.
:type alias: str
:param port: SSH port to connect to. Defaults to SSH default (22)
:type port: int
:param pkey: Private key file path to use for authentication. Path must
be either absolute path or relative to user home directory
like ``~/<path>``.
Bytes type input is used as private key data for authentication.
:type pkey: str or bytes
:param num_retries: (Optional) Number of connection and authentication
attempts before the client gives up. Defaults to 3.
:type num_retries: int
:param retry_delay: Number of seconds to wait between retries. Defaults
to :py:class:`pssh.constants.RETRY_DELAY`
:type retry_delay: int or float
:param timeout: SSH session timeout setting in seconds. This controls
timeout setting of authenticated SSH sessions.
:type timeout: int or float
:param allow_agent: (Optional) set to False to disable connecting to
the system's SSH agent
:type allow_agent: bool
:param identity_auth: (Optional) set to False to disable attempting to
authenticate with default identity files from
`pssh.clients.base.single.BaseSSHClient.IDENTITIES`
:type identity_auth: bool
:param forward_ssh_agent: Unused - agent forwarding not implemented.
:type forward_ssh_agent: bool
:param proxy_host: Connect to target host via given proxy host.
:type proxy_host: str
:param proxy_port: Port to use for proxy connection. Defaults to self.port
:type proxy_port: int
:param keepalive_seconds: Interval of keep alive messages being sent to
server. Set to ``0`` or ``False`` to disable.
:type keepalive_seconds: int
:param ipv6_only: Choose IPv6 addresses only if multiple are available
for the host or raise NoIPv6AddressFoundError otherwise. Note this will
disable connecting to an IPv4 address if an IP address is provided instead.
:type ipv6_only: bool
:raises: :py:class:`pssh.exceptions.PKeyFileError` on errors finding
provided private key.
"""
self.forward_ssh_agent = forward_ssh_agent
self._forward_requested = False
self.keepalive_seconds = keepalive_seconds
self._keepalive_greenlet = None
self._proxy_client = None
self.alias = alias
self.host = host
self.port = port if port is not None else 22
if proxy_host is not None:
_port = port if proxy_port is None else proxy_port
_pkey = pkey if proxy_pkey is None else proxy_pkey
_user = user if proxy_user is None else proxy_user
_password = password if proxy_password is None else proxy_password
proxy_port = self._connect_proxy(
proxy_host, _port, _pkey, user=_user, password=_password,
num_retries=num_retries, retry_delay=retry_delay,
allow_agent=allow_agent,
timeout=timeout,
keepalive_seconds=keepalive_seconds,
identity_auth=identity_auth,
)
proxy_host = '127.0.0.1'
self._chan_stdout_lock = RLock()
self._chan_stderr_lock = RLock()
super(SSHClient, self).__init__(
host, user=user, password=password, alias=alias, port=port, pkey=pkey,
num_retries=num_retries, retry_delay=retry_delay,
allow_agent=allow_agent, _auth_thread_pool=_auth_thread_pool,
timeout=timeout,
proxy_host=proxy_host, proxy_port=proxy_port,
identity_auth=identity_auth,
ipv6_only=ipv6_only,
)
def _shell(self, channel):
return self._eagain(channel.shell)
def _connect_proxy(self, proxy_host, proxy_port, proxy_pkey,
user=None, password=None, alias=None,
num_retries=DEFAULT_RETRIES,
retry_delay=RETRY_DELAY,
allow_agent=True, timeout=None,
forward_ssh_agent=False,
keepalive_seconds=60,
identity_auth=True):
assert isinstance(self.port, int)
try:
self._proxy_client = SSHClient(
proxy_host, port=proxy_port, pkey=proxy_pkey, alias=alias,
num_retries=num_retries, user=user, password=password,
retry_delay=retry_delay, allow_agent=allow_agent,
timeout=timeout, forward_ssh_agent=forward_ssh_agent,
identity_auth=identity_auth,
keepalive_seconds=keepalive_seconds,
_auth_thread_pool=False)
except Exception as ex:
msg = "Proxy authentication failed. " \
"Exception from tunnel client: %s"
logger.error(msg, ex)
raise ProxyError(msg, ex)
if not FORWARDER.started.is_set():
FORWARDER.start()
FORWARDER.started.wait()
FORWARDER.enqueue(self._proxy_client, self.host, self.port)
proxy_local_port = FORWARDER.out_q.get()
return proxy_local_port
def disconnect(self):
"""Attempt to disconnect session.
Any errors on calling disconnect are suppressed by this function.
"""
self._keepalive_greenlet = None
if self.session is not None:
try:
self._disconnect_eagain()
except Exception:
pass
self.session = None
if isinstance(self._proxy_client, SSHClient):
# Don't disconnect proxy client here - let the TunnelServer do it at the time that
# _wait_send_receive_lets ends. The cleanup_server call here triggers the TunnelServer
# to stop.
FORWARDER.cleanup_server(self._proxy_client)
# I wanted to clean up all the sockets here to avoid a ResourceWarning from unittest,
# but unfortunately closing this socket here causes a segfault, not sure why yet.
# self.sock.close()
else:
self.sock.close()
self.sock = None
def spawn_send_keepalive(self):
"""Spawns a new greenlet that sends keep alive messages every
self.keepalive_seconds"""
return spawn(self._send_keepalive)
def _send_keepalive(self):
while True:
sleep(self._eagain(self.session.keepalive_send))
def configure_keepalive(self):
"""Configures keepalive on the server for `self.keepalive_seconds`."""
self.session.keepalive_config(False, self.keepalive_seconds)
def _init_session(self, retries=1):
self.session = Session()
if self.timeout:
# libssh2 timeout is in ms
self.session.set_timeout(self.timeout * 1000)
try:
if self._auth_thread_pool:
THREAD_POOL.apply(self.session.handshake, (self.sock,))
else:
self.session.handshake(self.sock)
except Exception as ex:
if retries < self.num_retries:
sleep(self.retry_delay)
return self._connect_init_session_retry(retries=retries+1)
msg = "Error connecting to host %s:%s - %s"
logger.error(msg, self.host, self.port, ex)
if not self.sock.closed:
self.sock.close()
if isinstance(ex, SSH2Timeout):
raise Timeout(msg, self.host, self.port, ex)
raise
def _keepalive(self):
if self.keepalive_seconds:
self.configure_keepalive()
self._keepalive_greenlet = self.spawn_send_keepalive()
def _agent_auth(self):
self.session.agent_auth(self.user)
def _pkey_file_auth(self, pkey_file, password=None):
self.session.userauth_publickey_fromfile(
self.user,
pkey_file,
passphrase=password if password is not None else b'')
def _pkey_from_memory(self, pkey_data):
self.session.userauth_publickey_frommemory(
self.user,
pkey_data,
passphrase=self.password if self.password is not None else b'',
)
def _password_auth(self):
self.session.userauth_password(self.user, self.password)
def _open_session(self):
chan = self._eagain(self.session.open_session)
return chan
def open_session(self):
"""Open new channel from session.
:rtype: :py:class:`ssh2.channel.Channel`
"""
try:
chan = self._open_session()
except Exception as ex:
raise SessionError(ex)
# if self.forward_ssh_agent and not self._forward_requested:
# self._eagain(chan.request_auth_agent)
# self._forward_requested = True
return chan
def _make_output_readers(self, channel, stdout_buffer, stderr_buffer):
_stdout_reader = spawn(
self._read_output_to_buffer, channel.read, stdout_buffer)
_stderr_reader = spawn(
self._read_output_to_buffer, channel.read_stderr, stderr_buffer)
return _stdout_reader, _stderr_reader
def execute(self, cmd, use_pty=False, channel=None):
"""Execute command on remote server.
:param cmd: Command to execute.
:type cmd: str
:param use_pty: Whether or not to obtain a PTY on the channel.
:type use_pty: bool
:param channel: Use provided channel for execute rather than creating
a new one.
:type channel: :py:class:`ssh2.channel.Channel`
"""
channel = self.open_session() if channel is None else channel
if use_pty:
self._eagain(channel.pty)
logger.debug("Executing command '%s'", cmd)
self._eagain(channel.execute, cmd)
return channel
def _read_output_to_buffer(self, read_func, _buffer, is_stderr=False):
_lock = self._chan_stderr_lock if is_stderr else self._chan_stdout_lock
try:
while True:
with _lock:
size, data = read_func()
if size == LIBSSH2_ERROR_EAGAIN:
self.poll()
continue
if size <= 0:
break
_buffer.write(data)
sleep()
finally:
_buffer.eof.set()
def wait_finished(self, host_output, timeout=None):
"""Wait for EOF from channel and close channel.
Used to wait for remote command completion and be able to gather
exit code.
:param host_output: Host output of command to wait for.
:type host_output: :py:class:`pssh.output.HostOutput`
:param timeout: Timeout value in seconds - defaults to no timeout.
:type timeout: float
:raises: :py:class:`pssh.exceptions.Timeout` after <timeout> seconds if
timeout given.
"""
if not isinstance(host_output, HostOutput):
raise ValueError("%s is not a HostOutput object" % (host_output,))
channel = host_output.channel
if channel is None:
return
self._eagain(channel.wait_eof, timeout=timeout)
# Close channel to indicate no more commands will be sent over it
self.close_channel(channel)
def close_channel(self, channel):
with self._chan_stdout_lock, self._chan_stderr_lock:
logger.debug("Closing channel")
self._eagain(channel.close)
def _eagain(self, func, *args, **kwargs):
return self._eagain_errcode(func, LIBSSH2_ERROR_EAGAIN, *args, **kwargs)
def _make_sftp_eagain(self):
return self._eagain(self.session.sftp_init)
def _make_sftp(self):
try:
sftp = self._make_sftp_eagain()
except Exception as ex:
raise SFTPError(ex)
return sftp
def _mkdir(self, sftp, directory):
"""Make directory via SFTP channel.
:param sftp: SFTP client object
:type sftp: :py:class:`ssh2.sftp.SFTP`
:param directory: Remote directory to create
:type directory: str
:raises: :py:class:`pssh.exceptions.SFTPIOError` on SFTP IO errors
"""
mode = LIBSSH2_SFTP_S_IRUSR | \
LIBSSH2_SFTP_S_IWUSR | \
LIBSSH2_SFTP_S_IXUSR | \
LIBSSH2_SFTP_S_IRGRP | \
LIBSSH2_SFTP_S_IROTH | \
LIBSSH2_SFTP_S_IXGRP | \
LIBSSH2_SFTP_S_IXOTH
try:
self._eagain(sftp.mkdir, directory, mode)
except SFTPProtocolError as error:
msg = "Error occured creating directory %s on host %s - %s"
logger.error(msg, directory, self.host, error)
raise SFTPIOError(msg, directory, self.host, error)
logger.debug("Created remote directory %s", directory)
def copy_file(self, local_file, remote_file, recurse=False, sftp=None):
"""Copy local file to host via SFTP.
:param local_file: Local filepath to copy to remote host
:type local_file: str
:param remote_file: Remote filepath on remote host to copy file to
:type remote_file: str
:param recurse: Whether or not to descend into directories recursively.
:type recurse: bool
:param sftp: SFTP channel to use instead of creating a
new one.
:type sftp: :py:class:`ssh2.sftp.SFTP`
:raises: :py:class:`ValueError` when a directory is supplied to
``local_file`` and ``recurse`` is not set
:raises: :py:class:`pssh.exceptions.SFTPError` on SFTP initialisation
errors
:raises: :py:class:`pssh.exceptions.SFTPIOError` on I/O errors writing
via SFTP
:raises: :py:class:`IOError` on local file IO errors
:raises: :py:class:`OSError` on local OS errors like permission denied
"""
sftp = self._make_sftp() if sftp is None else sftp
if os.path.isdir(local_file) and recurse:
return self._copy_dir(local_file, remote_file, sftp)
elif os.path.isdir(local_file) and not recurse:
raise ValueError("Recurse must be true if local_file is a "
"directory.")
destination = self._remote_paths_split(remote_file)
if destination is not None:
try:
self._eagain(sftp.stat, destination)
except (SFTPHandleError, SFTPProtocolError):
self.mkdir(sftp, destination)
self.sftp_put(sftp, local_file, remote_file)
logger.info("Copied local file %s to remote destination %s:%s",
local_file, self.host, remote_file)
def _sftp_put(self, remote_fh, local_file):
with open(local_file, 'rb', self._BUF_SIZE) as local_fh:
data = local_fh.read(self._BUF_SIZE)
while data:
self.eagain_write(remote_fh.write, data)
data = local_fh.read(self._BUF_SIZE)
def sftp_put(self, sftp, local_file, remote_file):
"""Perform an SFTP put - copy local file path to remote via SFTP.
:param sftp: SFTP client object.
:type sftp: :py:class:`ssh2.sftp.SFTP`
:param local_file: Local filepath to copy to remote host.
:type local_file: str
:param remote_file: Remote filepath on remote host to copy file to.
:type remote_file: str
:raises: :py:class:`pssh.exceptions.SFTPIOError` on I/O errors writing
via SFTP.
"""
mode = LIBSSH2_SFTP_S_IRUSR | \
LIBSSH2_SFTP_S_IWUSR | \
LIBSSH2_SFTP_S_IRGRP | \
LIBSSH2_SFTP_S_IROTH
f_flags = LIBSSH2_FXF_CREAT | LIBSSH2_FXF_WRITE | LIBSSH2_FXF_TRUNC
with self._sftp_openfh(
sftp.open, remote_file, f_flags, mode) as remote_fh:
try:
self._sftp_put(remote_fh, local_file)
except SFTPProtocolError as ex:
msg = "Error writing to remote file %s - %s"
logger.error(msg, remote_file, ex)
raise SFTPIOError(msg, remote_file, ex)
def mkdir(self, sftp, directory):
"""Make directory via SFTP channel.
Parent paths in the directory are created if they do not exist.
:param sftp: SFTP client object
:type sftp: :py:class:`ssh2.sftp.SFTP`
:param directory: Remote directory to create
:type directory: str
Catches and logs at error level remote IOErrors on creating directory.
"""
_paths_to_create = deque()
for d in directory.split('/'):
if not d:
continue
_paths_to_create.append(d)
cwd = '' if directory.startswith('/') else '.'
while _paths_to_create:
cur_dir = _paths_to_create.popleft()
cwd = '/'.join([cwd, cur_dir])
try:
self._eagain(sftp.stat, cwd)
except (SFTPHandleError, SFTPProtocolError) as ex:
logger.debug("Stat for %s failed with %s", cwd, ex)
self._mkdir(sftp, cwd)
def copy_remote_file(self, remote_file, local_file, recurse=False,
sftp=None, encoding='utf-8'):
"""Copy remote file to local host via SFTP.
:param remote_file: Remote filepath to copy from
:type remote_file: str
:param local_file: Local filepath where file(s) will be copied to
:type local_file: str
:param recurse: Whether or not to recursively copy directories
:type recurse: bool
:param encoding: Encoding to use for file paths.
:type encoding: str
:param sftp: SFTP channel to use instead of creating a
new one.
:type sftp: :py:class:`ssh2.sftp.SFTP`
:raises: :py:class:`ValueError` when a directory is supplied to
``local_file`` and ``recurse`` is not set
:raises: :py:class:`pssh.exceptions.SFTPError` on SFTP initialisation
errors
:raises: :py:class:`pssh.exceptions.SFTPIOError` on I/O errors reading
from SFTP
:raises: :py:class:`IOError` on local file IO errors
:raises: :py:class:`OSError` on local OS errors like permission denied
"""
sftp = self._make_sftp() if sftp is None else sftp
try:
self._eagain(sftp.stat, remote_file)
except (SFTPHandleError, SFTPProtocolError):
msg = "Remote file or directory %s on host %s does not exist"
logger.error(msg, remote_file, self.host)
raise SFTPIOError(msg, remote_file, self.host)
try:
dir_h = self._sftp_openfh(sftp.opendir, remote_file)
except SFTPError:
pass
else:
if not recurse:
raise ValueError("Recurse must be true if remote_file is a "
"directory.")
file_list = self._sftp_readdir(dir_h)
return self._copy_remote_dir(file_list, remote_file,
local_file, sftp,
encoding=encoding)
destination = os.path.join(os.path.sep, os.path.sep.join(
[_dir for _dir in local_file.split('/')
if _dir][:-1]))
self._make_local_dir(destination)
self.sftp_get(sftp, remote_file, local_file)
logger.info("Copied local file %s from remote destination %s:%s",
local_file, self.host, remote_file)
def _scp_recv_recursive(self, remote_file, local_file, sftp, encoding='utf-8'):
try:
self._eagain(sftp.stat, remote_file)
except (SFTPHandleError, SFTPProtocolError):
msg = "Remote file or directory %s does not exist"
logger.error(msg, remote_file)
raise SCPError(msg, remote_file)
try:
dir_h = self._sftp_openfh(sftp.opendir, remote_file)
except SFTPError:
# remote_file is not a dir, scp file
return self.scp_recv(remote_file, local_file, encoding=encoding)
try:
os.makedirs(local_file)
except OSError:
pass
file_list = self._sftp_readdir(dir_h)
return self._scp_recv_dir(file_list, remote_file,
local_file, sftp,
encoding=encoding)
def scp_recv(self, remote_file, local_file, recurse=False, sftp=None,
encoding='utf-8'):
"""Copy remote file to local host via SCP.
Note - Remote directory listings are gathered via SFTP when
``recurse`` is enabled - SCP lacks directory list support.
Enabling recursion therefore involves creating an extra SFTP channel
and requires SFTP support on the server.
:param remote_file: Remote filepath to copy from
:type remote_file: str
:param local_file: Local filepath where file(s) will be copied to
:type local_file: str
:param recurse: Whether or not to recursively copy directories
:type recurse: bool
:param sftp: The SFTP channel to use instead of creating a new one.
Only used when ``recurse`` is ``True``.
:type sftp: :py:class:`ssh2.sftp.SFTP`
:param encoding: Encoding to use for file paths when recursion is
enabled.
:type encoding: str
:raises: :py:class:`pssh.exceptions.SCPError` on errors copying file.
:raises: :py:class:`IOError` on local file IO errors.
:raises: :py:class:`OSError` on local OS errors like permission denied.
"""
if recurse:
sftp = self._make_sftp() if sftp is None else sftp
return self._scp_recv_recursive(remote_file, local_file, sftp, encoding=encoding)
elif local_file.endswith('/'):
remote_filename = remote_file.rsplit('/')[-1]
local_file += remote_filename
destination = os.path.join(os.path.sep, os.path.sep.join(
[_dir for _dir in local_file.split('/')
if _dir][:-1]))
self._make_local_dir(destination)
self._scp_recv(remote_file, local_file)
logger.info("SCP local file %s from remote destination %s:%s",
local_file, self.host, remote_file)
def _scp_recv(self, remote_file, local_file):
try:
(file_chan, fileinfo) = self._eagain(
self.session.scp_recv2, remote_file)
except Exception as ex:
msg = "Error copying file %s from host %s - %s"
logger.error(msg, remote_file, self.host, ex)
raise SCPError(msg, remote_file, self.host, ex)
local_fh = open(local_file, 'wb')
try:
total = 0
while total < fileinfo.st_size:
size, data = file_chan.read(size=fileinfo.st_size - total)
if size == LIBSSH2_ERROR_EAGAIN:
self.poll()
continue
total += size
local_fh.write(data)
finally:
local_fh.flush()
local_fh.close()
file_chan.close()
def scp_send(self, local_file, remote_file, recurse=False, sftp=None):
"""Copy local file to host via SCP.
Note - Directories are created via SFTP when ``recurse`` is enabled -
SCP lacks directory create support. Enabling recursion therefore
involves creating an extra SFTP channel and requires SFTP support on the
server.
:param local_file: Local filepath to copy to remote host
:type local_file: str
:param remote_file: Remote filepath on remote host to copy file to
:type remote_file: str
:param sftp: The SFTP channel to use instead of creating a new one.
Only used when ``recurse`` is ``True``.
:type sftp: :py:class:`ssh2.sftp.SFTP`
:param recurse: Whether or not to descend into directories recursively.
:type recurse: bool
:raises: :py:class:`ValueError` when a directory is supplied to
``local_file`` and ``recurse`` is not set
:raises: :py:class:`pssh.exceptions.SFTPError` on SFTP initialisation
errors
:raises: :py:class:`pssh.exceptions.SFTPIOError` on I/O errors writing
via SFTP
:raises: :py:class:`IOError` on local file IO errors
:raises: :py:class:`OSError` on local OS errors like permission denied
"""
if os.path.isdir(local_file) and recurse:
sftp = self._make_sftp() if sftp is None else sftp
return self._scp_send_dir(local_file, remote_file, sftp)
elif os.path.isdir(local_file) and not recurse:
raise ValueError("Recurse must be True if local_file is a "
"directory.")
if recurse:
destination = self._remote_paths_split(remote_file)
if destination is not None:
sftp = self._make_sftp() if sftp is None else sftp
try:
self._eagain(sftp.stat, destination)
except (SFTPHandleError, SFTPProtocolError):
self.mkdir(sftp, destination)
elif remote_file.endswith('/'):
local_filename = local_file.rsplit('/')[-1]
remote_file += local_filename
self._scp_send(local_file, remote_file)
logger.info("SCP local file %s to remote destination %s:%s",
local_file, self.host, remote_file)
def _scp_send(self, local_file, remote_file):
fileinfo = os.stat(local_file)
try:
chan = self._eagain(
self.session.scp_send64,
remote_file, fileinfo.st_mode & 0o777, fileinfo.st_size,
fileinfo.st_mtime, fileinfo.st_atime)
except Exception as ex:
msg = "Error opening remote file %s for writing on host %s - %s"
logger.error(msg, remote_file, self.host, ex)
raise SCPError(msg, remote_file, self.host, ex)
try:
with open(local_file, 'rb', 2097152) as local_fh:
data = local_fh.read(self._BUF_SIZE)
while data:
self.eagain_write(chan.write, data)
data = local_fh.read(self._BUF_SIZE)
except Exception as ex:
msg = "Error writing to remote file %s on host %s - %s"
logger.error(msg, remote_file, self.host, ex)
raise SCPError(msg, remote_file, self.host, ex)
finally:
self._eagain(chan.flush)
self._eagain(chan.send_eof)
self._eagain(chan.wait_eof)
self._eagain(chan.wait_closed)
def _sftp_openfh(self, open_func, remote_file, *args):
try:
fh = self._eagain(open_func, remote_file, *args)
except Exception as ex:
raise SFTPError(ex)
return fh
def _sftp_get(self, remote_fh, local_file):
with open(local_file, 'wb') as local_fh:
for size, data in remote_fh:
if size == LIBSSH2_ERROR_EAGAIN:
self.poll()
continue
local_fh.write(data)
def sftp_get(self, sftp, remote_file, local_file):
with self._sftp_openfh(
sftp.open, remote_file,
LIBSSH2_FXF_READ, LIBSSH2_SFTP_S_IRUSR) as remote_fh:
try:
self._sftp_get(remote_fh, local_file)
except SFTPProtocolError as ex:
msg = "Error reading from remote file %s - %s"
logger.error(msg, remote_file, ex)
raise SFTPIOError(msg, remote_file, ex)
def get_exit_status(self, channel):
"""Get exit status code for channel or ``None`` if not ready.
:param channel: The channel to get status from.
:type channel: :py:mod:`ssh2.channel.Channel`
:rtype: int or ``None``
"""
if not channel.eof():
return
return channel.get_exit_status()
def finished(self, channel):
"""Checks if remote command has finished - has server sent client
EOF.
:rtype: bool
"""
if channel is None:
return
return channel.eof()
def poll(self, timeout=None):
"""Perform co-operative gevent poll on ssh2 session socket.
Blocks current greenlet only if socket has pending read or write operations
in the appropriate direction.
:param timeout: Deprecated and unused - to be removed.
"""
self._poll_errcodes(
self.session.block_directions,
LIBSSH2_SESSION_BLOCK_INBOUND,
LIBSSH2_SESSION_BLOCK_OUTBOUND,
)
def _eagain_write(self, write_func, data):
"""Write data with given write_func for an ssh2-python session while
handling EAGAIN and resuming writes from last written byte on each call to
write_func.
"""
return self._eagain_write_errcode(write_func, data, LIBSSH2_ERROR_EAGAIN)
def eagain_write(self, write_func, data):
return self._eagain_write(write_func, data)
| 32,117 | Python | .py | 692 | 35.33237 | 98 | 0.607228 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,384 | tunnel.py | ParallelSSH_parallel-ssh/pssh/clients/native/tunnel.py | # This file is part of parallel-ssh.
#
# Copyright (C) 2014-2022 Panos Kittenis and contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import logging
from threading import Thread, Event
from queue import Queue
from gevent import spawn, joinall, get_hub, sleep
from gevent.server import StreamServer
from ssh2.error_codes import LIBSSH2_ERROR_EAGAIN
from ...constants import DEFAULT_RETRIES
logger = logging.getLogger(__name__)
class LocalForwarder(Thread):
def __init__(self):
"""Thread runner for a group of local port forwarding proxies.
Starts servers in their own gevent hub via thread run target.
Use ``enqueue`` to create new servers
and get port to connect to via ``out_q`` once a target has been put into the input queue.
``SSHClient`` is the client for the SSH host that will be proxying.
"""
Thread.__init__(self)
self.in_q = Queue(1)
self.out_q = Queue(1)
self._servers = {}
self._hub = None
self.started = Event()
self._cleanup_let = None
def _start_server(self):
client, host, port = self.in_q.get()
server = TunnelServer(client, host, port)
server.start()
self._get_server_listen_port(client, server)
def _get_server_listen_port(self, client, server):
while not server.started:
sleep(0.01)
self._servers[client] = server
local_port = server.listen_port
self.out_q.put(local_port)
def enqueue(self, client, host, port):
"""Add target host:port to tunnel via client to queue.
:param client: The client to connect via.
:type client: :py:mod:`pssh.clients.native.single.SSHClient`
:param host: Target host to open connection to.
:type host: str
:param port: Target port to connect on.
:type port: int
"""
self.in_q.put((client, host, port))
def shutdown(self):
"""Stop all tunnel servers."""
for client, server in self._servers.items():
server.stop()
def _cleanup_servers_let(self):
while True:
self._cleanup_servers()
sleep(60)
def _cleanup_servers(self):
for client in list(self._servers.keys()):
if client.sock is None or client.sock.closed:
self.cleanup_server(client)
def run(self):
"""Thread runner ensures a non main hub has been created for all subsequent
greenlets and waits for (client, host, port) tuples to be put into self.in_q.
A server is created once something is in the queue and the port to connect to
is put into self.out_q.
"""
self._hub = get_hub()
assert self._hub.main_hub is False
self.started.set()
self._cleanup_let = spawn(self._cleanup_servers_let)
logger.debug("Hub in server runner is main hub: %s", self._hub.main_hub)
try:
while True:
if self.in_q.empty():
sleep(.01)
continue
self._start_server()
except Exception:
logger.exception("Tunnel thread caught exception and will exit:")
self.shutdown()
def cleanup_server(self, client):
"""The purpose of this function is for a proxied client to notify the LocalForwarder that it
is shutting down and its corresponding server can also be shut down."""
server = self._servers[client]
server.stop()
del self._servers[client]
class TunnelServer(StreamServer):
"""Local port forwarding server for tunneling connections from remote SSH server.
Accepts connections on an available bind_address port once started and tunnels data
to/from remote SSH host for each connection.
"""
def __init__(self, client, host, port, bind_address='127.0.0.1',
num_retries=DEFAULT_RETRIES):
StreamServer.__init__(self, (bind_address, 0), self._read_rw)
self.client = client
self.host = host
self.port = port
self.session = client.session
self._client = client
self._retries = num_retries
self.bind_address = bind_address
self.exception = None
@property
def listen_port(self):
return self.socket.getsockname()[1] if self.socket is not None else None
def _read_rw(self, socket, address):
local_addr, local_port = address
logger.debug("Client connected, forwarding %s:%s on"
" remote host to %s:%s",
self.host, self.port, local_addr, local_port)
try:
channel = self._open_channel_retries(
self.host, self.port, local_port)
except Exception as ex:
logger.error("Could not establish channel to %s:%s: %s",
self.host, self.port, ex)
self.exception = ex
return
source = spawn(self._read_forward_sock, socket, channel)
dest = spawn(self._read_channel, socket, channel)
logger.debug("Waiting for read/write greenlets")
self._source_let = source
self._dest_let = dest
self._wait_send_receive_lets(source, dest, channel)
def _wait_send_receive_lets(self, source, dest, channel):
try:
joinall((source, dest), raise_error=True)
finally:
# Forward socket does not need to be closed here; StreamServer does it in do_close
logger.debug("Closing channel")
self._client.close_channel(channel)
# Disconnect client here to make sure it happens AFTER close_channel
self._client.disconnect()
def _read_forward_sock(self, forward_sock, channel):
while True:
if channel is None or channel.eof():
logger.debug("Channel closed, tunnel forward socket reader exiting")
return
try:
data = forward_sock.recv(1024)
except Exception as ex:
logger.error("Forward socket read error: %s", ex)
raise
data_len = len(data)
# logger.debug("Read %s data from forward socket", data_len,)
if data_len == 0:
sleep(.01)
continue
try:
self._client.eagain_write(channel.write, data)
except Exception as ex:
logger.error("Error writing data to channel - %s", ex)
raise
logger.debug("Wrote all data to channel")
def _read_channel(self, forward_sock, channel):
while True:
if channel is None or channel.eof():
logger.debug("Channel closed, tunnel reader exiting")
return
try:
size, data = channel.read()
except Exception as ex:
logger.error("Error reading from channel - %s", ex)
raise
# logger.debug("Read %s data from channel" % (size,))
if size == LIBSSH2_ERROR_EAGAIN:
self._client.poll()
continue
elif size == 0:
sleep(.01)
continue
try:
forward_sock.sendall(data)
except Exception as ex:
logger.error(
"Error sending data to forward socket - %s", ex)
raise
logger.debug("Wrote %s data to forward socket", len(data))
def _open_channel(self, fw_host, fw_port, local_port):
channel = self.session.direct_tcpip_ex(
fw_host, fw_port, self.bind_address,
local_port)
while channel == LIBSSH2_ERROR_EAGAIN:
self._client.poll()
channel = self.session.direct_tcpip_ex(
fw_host, fw_port, self.bind_address,
local_port)
return channel
def _open_channel_retries(self, fw_host, fw_port, local_port,
wait_time=0.1):
num_tries = 0
while num_tries < self._retries:
try:
channel = self._open_channel(fw_host, fw_port, local_port)
except Exception:
num_tries += 1
logger.error("Error opening channel to %s:%s, retries %s/%s",
fw_host, fw_port, num_tries, self._retries)
if num_tries >= self._retries:
raise
sleep(wait_time)
wait_time *= 5
continue
return channel
FORWARDER = LocalForwarder()
FORWARDER.daemon = True
| 9,311 | Python | .py | 218 | 32.243119 | 100 | 0.599205 | ParallelSSH/parallel-ssh | 1,198 | 148 | 36 | LGPL-2.1 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,385 | manage.py | wger-project_wger/manage.py | #!/usr/bin/env python3
# Standard Library
import sys
# Django
from django.core.management import execute_from_command_line
# wger
from wger.tasks import (
get_path,
setup_django_environment,
)
if __name__ == '__main__':
# If user passed the settings flag ignore the default wger settings
if not any('--settings' in s for s in sys.argv):
setup_django_environment(get_path('settings.py'))
# Alternative to above
# os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
execute_from_command_line(sys.argv)
| 551 | Python | .py | 17 | 28.882353 | 71 | 0.721063 | wger-project/wger | 3,065 | 570 | 221 | AGPL-3.0 | 9/5/2024, 5:13:18 PM (Europe/Amsterdam) |
22,386 | settings.py | wger-project_wger/extras/docker/production/settings.py | #!/usr/bin/env python
# Third Party
import environ
# wger
from wger.settings_global import *
env = environ.Env(
# set casting, default value
DJANGO_DEBUG=(bool, False)
)
# Use 'DEBUG = True' to get more details for server errors
DEBUG = env("DJANGO_DEBUG")
if os.environ.get('DJANGO_ADMINS'):
ADMINS = [env.tuple('DJANGO_ADMINS'), ]
MANAGERS = ADMINS
if os.environ.get("DJANGO_DB_ENGINE"):
DATABASES = {
'default': {
'ENGINE': env.str("DJANGO_DB_ENGINE"),
'NAME': env.str("DJANGO_DB_DATABASE"),
'USER': env.str("DJANGO_DB_USER"),
'PASSWORD': env.str("DJANGO_DB_PASSWORD"),
'HOST': env.str("DJANGO_DB_HOST"),
'PORT': env.int("DJANGO_DB_PORT"),
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': env.str('DJANGO_DB_DATABASE', '/home/wger/db/database.sqlite'),
}
}
# Timezone for this installation. Consult settings_global.py for more information
TIME_ZONE = env.str("TIME_ZONE", 'Europe/Berlin')
# Make this unique, and don't share it with anybody.
SECRET_KEY = env.str("SECRET_KEY", 'wger-docker-supersecret-key-1234567890!@#$%^&*(-_)')
# Your reCaptcha keys
RECAPTCHA_PUBLIC_KEY = env.str('RECAPTCHA_PUBLIC_KEY', '')
RECAPTCHA_PRIVATE_KEY = env.str('RECAPTCHA_PRIVATE_KEY', '')
# The site's URL (e.g. http://www.my-local-gym.com or http://localhost:8000)
# This is needed for uploaded files and images (exercise images, etc.) to be
# properly served.
SITE_URL = env.str('SITE_URL', 'http://localhost:8000')
# Path to uploaded files
# Absolute filesystem path to the directory that will hold user-uploaded files.
MEDIA_ROOT = env.str("DJANGO_MEDIA_ROOT", '/home/wger/media')
STATIC_ROOT = env.str("DJANGO_STATIC_ROOT", '/home/wger/static')
# If you change these, adjust nginx alias definitions as well
MEDIA_URL = env.str('MEDIA_URL', '/media/')
STATIC_URL = env.str('STATIC_URL', '/static/')
LOGIN_REDIRECT_URL = env.str('LOGIN_REDIRECT_URL', '/')
# Allow all hosts to access the application. Change if used in production.
ALLOWED_HOSTS = ['*', ]
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
# Configure a real backend in production
if DEBUG:
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
if env.bool("ENABLE_EMAIL", False):
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = env.str("EMAIL_HOST")
EMAIL_PORT = env.int("EMAIL_PORT")
EMAIL_HOST_USER = env.str("EMAIL_HOST_USER")
EMAIL_HOST_PASSWORD = env.str("EMAIL_HOST_PASSWORD")
EMAIL_USE_TLS = env.bool("EMAIL_USE_TLS", True)
EMAIL_USE_SSL = env.bool("EMAIL_USE_SSL", False)
EMAIL_TIMEOUT = 60
# Sender address used for sent emails
DEFAULT_FROM_EMAIL = env.str("FROM_EMAIL", "wger Workout Manager <wger@example.com>")
WGER_SETTINGS['EMAIL_FROM'] = DEFAULT_FROM_EMAIL
SERVER_EMAIL = DEFAULT_FROM_EMAIL
EMAIL_FROM_ADDRESS = DEFAULT_FROM_EMAIL
# Management
WGER_SETTINGS["ALLOW_GUEST_USERS"] = env.bool("ALLOW_GUEST_USERS", True)
WGER_SETTINGS["ALLOW_REGISTRATION"] = env.bool("ALLOW_REGISTRATION", True)
WGER_SETTINGS["ALLOW_UPLOAD_VIDEOS"] = env.bool("ALLOW_UPLOAD_VIDEOS", True)
WGER_SETTINGS["DOWNLOAD_INGREDIENTS_FROM"] = env.str("DOWNLOAD_INGREDIENTS_FROM", "WGER")
WGER_SETTINGS["EXERCISE_CACHE_TTL"] = env.int("EXERCISE_CACHE_TTL", 3600)
WGER_SETTINGS["MIN_ACCOUNT_AGE_TO_TRUST"] = env.int("MIN_ACCOUNT_AGE_TO_TRUST", 21) # in days
WGER_SETTINGS["SYNC_EXERCISES_CELERY"] = env.bool("SYNC_EXERCISES_CELERY", False)
WGER_SETTINGS["SYNC_EXERCISE_IMAGES_CELERY"] = env.bool("SYNC_EXERCISE_IMAGES_CELERY", False)
WGER_SETTINGS["SYNC_EXERCISE_VIDEOS_CELERY"] = env.bool("SYNC_EXERCISE_VIDEOS_CELERY", False)
WGER_SETTINGS["SYNC_INGREDIENTS_CELERY"] = env.bool("SYNC_INGREDIENTS_CELERY", False)
WGER_SETTINGS["SYNC_OFF_DAILY_DELTA_CELERY"] = env.bool("SYNC_OFF_DAILY_DELTA_CELERY", False)
WGER_SETTINGS["USE_RECAPTCHA"] = env.bool("USE_RECAPTCHA", False)
WGER_SETTINGS["USE_CELERY"] = env.bool("USE_CELERY", False)
# Cache
if os.environ.get("DJANGO_CACHE_BACKEND"):
CACHES = {
'default': {
'BACKEND': env.str("DJANGO_CACHE_BACKEND"),
'LOCATION': env.str("DJANGO_CACHE_LOCATION"),
'TIMEOUT': env.int("DJANGO_CACHE_TIMEOUT"),
'OPTIONS': {
'CLIENT_CLASS': env.str("DJANGO_CACHE_CLIENT_CLASS")
}
}
}
if os.environ.get('DJANGO_CACHE_CLIENT_PASSWORD'):
CACHES['default']['OPTIONS']['PASSWORD'] = env.str('DJANGO_CACHE_CLIENT_PASSWORD')
CONNECTION_POOL_KWARGS = dict()
if "DJANGO_CACHE_CLIENT_SSL_KEYFILE" in os.environ:
CONNECTION_POOL_KWARGS['ssl_keyfile'] = env.str("DJANGO_CACHE_CLIENT_SSL_KEYFILE")
if "DJANGO_CACHE_CLIENT_SSL_CERTFILE" in os.environ:
CONNECTION_POOL_KWARGS['ssl_certfile'] = env.str("DJANGO_CACHE_CLIENT_SSL_CERTFILE")
if "DJANGO_CACHE_CLIENT_SSL_CERT_REQS" in os.environ:
CONNECTION_POOL_KWARGS['ssl_cert_reqs'] = env.str("DJANGO_CACHE_CLIENT_SSL_CERT_REQS")
if "DJANGO_CACHE_CLIENT_SSL_CHECK_HOSTNAME" in os.environ:
CONNECTION_POOL_KWARGS['ssl_check_hostname'] = env.bool(
"DJANGO_CACHE_CLIENT_SSL_CHECK_HOSTNAME")
if CONNECTION_POOL_KWARGS:
CACHES["default"]["OPTIONS"]["CONNECTION_POOL_KWARGS"] = CONNECTION_POOL_KWARGS
# Folder for compressed CSS and JS files
COMPRESS_ROOT = STATIC_ROOT
# The site's domain as used by the email verification workflow
EMAIL_PAGE_DOMAIN = SITE_URL
#
# Django Axes
#
AXES_ENABLED = env.bool('AXES_ENABLED', True)
AXES_LOCKOUT_PARAMETERS = env.list('AXES_LOCKOUT_PARAMETERS', default=['ip_address'])
AXES_FAILURE_LIMIT = env.int('AXES_FAILURE_LIMIT', 10)
AXES_COOLOFF_TIME = timedelta(minutes=env.float('AXES_COOLOFF_TIME', 30))
AXES_HANDLER = env.str('AXES_HANDLER', 'axes.handlers.cache.AxesCacheHandler')
AXES_IPWARE_PROXY_COUNT = env.int('AXES_IPWARE_PROXY_COUNT', 0)
AXES_IPWARE_META_PRECEDENCE_ORDER = env.list('AXES_IPWARE_META_PRECEDENCE_ORDER',
default=['REMOTE_ADDR'])
#
# Django Rest Framework SimpleJWT
#
SIMPLE_JWT['ACCESS_TOKEN_LIFETIME'] = timedelta(minutes=env.int("ACCESS_TOKEN_LIFETIME", 15))
SIMPLE_JWT['REFRESH_TOKEN_LIFETIME'] = timedelta(hours=env.int("REFRESH_TOKEN_LIFETIME", 24))
SIMPLE_JWT['SIGNING_KEY'] = env.str("SIGNING_KEY", SECRET_KEY)
#
# https://docs.djangoproject.com/en/4.1/ref/csrf/
#
CSRF_TRUSTED_ORIGINS = env.list(
"CSRF_TRUSTED_ORIGINS",
default=['http://127.0.0.1', 'http://localhost', 'https://localhost'],
)
if env.bool('X_FORWARDED_PROTO_HEADER_SET', False):
SECURE_PROXY_SSL_HEADER = (
env.str('SECURE_PROXY_SSL_HEADER', 'HTTP_X_FORWARDED_PROTO'),
'https'
)
REST_FRAMEWORK['NUM_PROXIES'] = env.int('NUMBER_OF_PROXIES', 1)
#
# Celery message queue configuration
#
CELERY_BROKER_URL = env.str("CELERY_BROKER", "redis://cache:6379/2")
CELERY_RESULT_BACKEND = env.str("CELERY_BACKEND", "redis://cache:6379/2")
#
# Prometheus metrics
#
EXPOSE_PROMETHEUS_METRICS = env.bool('EXPOSE_PROMETHEUS_METRICS', False)
PROMETHEUS_URL_PATH = env.str('PROMETHEUS_URL_PATH', 'super-secret-path')
| 7,213 | Python | .py | 155 | 42.341935 | 94 | 0.697751 | wger-project/wger | 3,065 | 570 | 221 | AGPL-3.0 | 9/5/2024, 5:13:18 PM (Europe/Amsterdam) |
22,387 | filter-fixtures.py | wger-project_wger/extras/scripts/filter-fixtures.py | # -*- coding: utf-8 -*-
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
"""
Simple script that filters the output of django's dumpdata command into more
manageable chunks.
After dumping the database (or parts of it), just copy the file and filter it:
python ./manage.py dumpdata --indent 4 --natural-foreign > extras/scripts/data.json
cd extras/scripts
python3 filter-fixtures.py
mv exercises.json ../../wger/exercises/fixtures/
...
rm *.json
"""
import json
# This is a full dump of the DB
fixture = open('data.json')
data = json.load(fixture)
fixture.close()
def filter_dump(model_list, filename):
"""
Helper function
"""
filter_data = [i for i in data if i['model'] in model_list]
if filter_data:
with open(filename, 'w') as outfile:
# Filter out submission models that are not accepted, if an entry
# has no 'status' field, add them all
out_data = [entry for entry in filter_data if entry['fields'].get('status', '2') == '2']
json.dump(out_data, outfile, indent=4)
#
# Ingredients
#
filter_dump(('nutrition.ingredient',), 'ingredients.json')
filter_dump(('nutrition.weightunit',), 'weight_units.json')
filter_dump(('nutrition.ingredientweightunit',), 'ingredient_units.json')
filter_dump(('nutrition.logitem',), 'nutrition_diary.json')
#
# Exercises
#
filter_dump(('exercises.muscle',), 'muscles.json')
filter_dump(('exercises.exercisecategory',), 'categories.json')
filter_dump(('exercises.exerciseimage',), 'exercise-images.json')
filter_dump(
(
'exercises.exercisebase',
'exercises.variation',
),
'exercise-base-data.json',
)
filter_dump(
('exercises.exercise', 'exercises.exercisecomment', 'exercises.alias'), 'translations.json'
)
filter_dump(
(
'exercises.equipment',
'exercises.equipment',
),
'equipment.json',
)
#
# Gym
#
filter_dump(('gym.gym',), 'gyms.json')
filter_dump(('gym.gymconfig',), 'gym_config.json')
filter_dump(('gym.gymadminconfig',), 'gym_adminconfig.json')
filter_dump(('gym.gymuserconfig',), 'gym_userconfig.json')
filter_dump(('gym.adminusernote',), 'gym_admin_user_notes.json')
filter_dump(('gym.userdocument',), 'gym_user_documents.json')
filter_dump(('gym.contract',), 'gym_contracts.json')
#
# Core
#
filter_dump(('core.gym',), 'gyms.json')
filter_dump(('core.language',), 'languages.json')
filter_dump(('core.license',), 'licenses.json')
filter_dump(('core.repetitionunit',), 'repetition_units.json')
#
# Configurations
#
filter_dump(('config.gymconfig',), 'gym_config.json')
#
# Other
#
filter_dump(('auth.group',), 'groups.json')
filter_dump(('auth.user',), 'users.json')
| 3,274 | Python | .py | 97 | 30.958763 | 100 | 0.710718 | wger-project/wger | 3,065 | 570 | 221 | AGPL-3.0 | 9/5/2024, 5:13:18 PM (Europe/Amsterdam) |
22,388 | .python-lint | wger-project_wger/.github/linters/.python-lint | [MASTER]
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code.
extension-pkg-whitelist=
# Specify a score threshold to be exceeded before program exits with error.
fail-under=10
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=tests,migrations
# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
ignore-patterns=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
# number of processors available to use.
jobs=0
# Control the amount of potential inferred values when inferring a single
# object. This can help the performance when dealing with large functions or
# complex, nested conditions.
limit-inference-results=100
# List of plugins (as comma separated values of python module names) to load,
# usually to register additional checkers.
load-plugins=
# Pickle collected data for later comparisons.
persistent=yes
# When enabled, pylint would attempt to guess common misconfiguration and emit
# user-friendly hints instead of false-positive error messages.
suggestion-mode=yes
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
confidence=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once). You can also use "--disable=all" to
# disable everything first and then re-enable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use "--disable=all --enable=classes
# --disable=W".
disable=import-error,
print-statement,
parameter-unpacking,
unpacking-in-except,
old-raise-syntax,
backtick,
long-suffix,
old-ne-operator,
old-octal-literal,
import-star-module-level,
non-ascii-bytes-literal,
raw-checker-failed,
bad-inline-option,
locally-disabled,
file-ignored,
suppressed-message,
useless-suppression,
deprecated-pragma,
use-symbolic-message-instead,
duplicate-code,
apply-builtin,
basestring-builtin,
buffer-builtin,
cmp-builtin,
coerce-builtin,
execfile-builtin,
file-builtin,
long-builtin,
raw_input-builtin,
reduce-builtin,
standarderror-builtin,
unicode-builtin,
xrange-builtin,
coerce-method,
delslice-method,
getslice-method,
setslice-method,
no-absolute-import,
old-division,
dict-iter-method,
dict-view-method,
next-method-called,
metaclass-assignment,
indexing-exception,
raising-string,
reload-builtin,
oct-method,
hex-method,
nonzero-method,
cmp-method,
input-builtin,
round-builtin,
intern-builtin,
unichr-builtin,
map-builtin-not-iterating,
zip-builtin-not-iterating,
range-builtin-not-iterating,
filter-builtin-not-iterating,
using-cmp-argument,
eq-without-hash,
div-method,
idiv-method,
rdiv-method,
exception-message-attribute,
invalid-str-codec,
sys-max-int,
bad-python3-import,
deprecated-string-function,
deprecated-str-translate-call,
deprecated-itertools-function,
deprecated-types-field,
next-method-defined,
dict-items-not-iterating,
dict-keys-not-iterating,
dict-values-not-iterating,
deprecated-operator-function,
deprecated-urllib-function,
xreadlines-attribute,
deprecated-sys-function,
exception-escape,
comprehension-escape
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifiers separated by comma (,) or put this option
# multiple times (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=c-extension-no-member
[REPORTS]
# Python expression which should return a score less than or equal to 10. You
# have access to the variables 'error', 'warning', 'refactor', and 'convention'
# which contain the number of messages in each category, as well as 'statement'
# which is the total number of statements analyzed. This score is used by the
# global evaluation report (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details.
#msg-template=
# Set the output format. Available formats are text, parseable, colorized, json
# and msvs (visual studio). You can also give a reporter class, e.g.
# mypackage.mymodule.MyReporterClass.
output-format=text
# Tells whether to display a full report or only the messages.
reports=no
# Activate the evaluation score.
score=yes
[REFACTORING]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
# Complete name of functions that never returns. When checking for
# inconsistent-return-statements, if a never returning function is called then
# it will be considered as an explicit return statement and no message will be
# printed.
never-returning-functions=sys.exit
[LOGGING]
# The type of string formatting that logging methods do. `old` means using %
# formatting, `new` is for `{}` formatting.
logging-format-style=old
# Logging modules to check that the string format arguments are in logging
# function parameter format.
logging-modules=logging
[STRING]
# This flag controls whether inconsistent-quotes generates a warning when the
# character used as a quote delimiter is used inconsistently within a module.
check-quote-consistency=no
# This flag controls whether the implicit-str-concat should generate a warning
# on implicit string concatenation in sequences defined over several lines.
check-str-concat-over-line-jumps=no
[TYPECHECK]
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# Tells whether to warn about missing members when the owner of the attribute
# is inferred to be None.
ignore-none=yes
# This flag controls whether pylint should warn about no-member and similar
# checks whenever an opaque object is returned when inferring. The inference
# can return multiple potential results while evaluating a Python object, but
# some branches might not be evaluated, which results in partial inference. In
# that case, it might be useful to still emit no-member and other checks for
# the rest of the inferred objects.
ignore-on-opaque-inference=yes
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=optparse.Values,thread._local,_thread._local
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis). It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
missing-member-hint=yes
# The minimum edit distance a name should have in order to be considered a
# similar match for a missing member name.
missing-member-hint-distance=1
# The total number of similar names that should be taken in consideration when
# showing a hint for a missing member.
missing-member-max-choices=1
# List of decorators that change the signature of a decorated function.
signature-mutators=
[SIMILARITIES]
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
# Minimum lines number of a similarity.
min-similarity-lines=4
[BASIC]
# Naming style matching correct argument names.
argument-naming-style=snake_case
# Regular expression matching correct argument names. Overrides argument-
# naming-style.
#argument-rgx=
# Naming style matching correct attribute names.
attr-naming-style=snake_case
# Regular expression matching correct attribute names. Overrides attr-naming-
# style.
#attr-rgx=
# Bad variable names which should always be refused, separated by a comma.
bad-names=foo,
bar,
baz,
toto,
tutu,
tata
# Bad variable names regexes, separated by a comma. If names match any regex,
# they will always be refused
bad-names-rgxs=
# Naming style matching correct class attribute names.
class-attribute-naming-style=any
# Regular expression matching correct class attribute names. Overrides class-
# attribute-naming-style.
#class-attribute-rgx=
# Naming style matching correct class names.
class-naming-style=PascalCase
# Regular expression matching correct class names. Overrides class-naming-
# style.
#class-rgx=
# Naming style matching correct constant names.
const-naming-style=UPPER_CASE
# Regular expression matching correct constant names. Overrides const-naming-
# style.
#const-rgx=
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
# Naming style matching correct function names.
function-naming-style=snake_case
# Regular expression matching correct function names. Overrides function-
# naming-style.
#function-rgx=
# Good variable names which should always be accepted, separated by a comma.
good-names=i,
j,
k,
ex,
Run,
_
# Good variable names regexes, separated by a comma. If names match any regex,
# they will always be accepted
good-names-rgxs=
# Include a hint for the correct naming format with invalid-name.
include-naming-hint=no
# Naming style matching correct inline iteration names.
inlinevar-naming-style=any
# Regular expression matching correct inline iteration names. Overrides
# inlinevar-naming-style.
#inlinevar-rgx=
# Naming style matching correct method names.
method-naming-style=snake_case
# Regular expression matching correct method names. Overrides method-naming-
# style.
#method-rgx=
# Naming style matching correct module names.
module-naming-style=snake_case
# Regular expression matching correct module names. Overrides module-naming-
# style.
#module-rgx=
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
# These decorators are taken in consideration only for invalid-name.
property-classes=abc.abstractproperty
# Naming style matching correct variable names.
variable-naming-style=snake_case
# Regular expression matching correct variable names. Overrides variable-
# naming-style.
#variable-rgx=
[SPELLING]
# Limits count of emitted suggestions for spelling mistakes.
max-spelling-suggestions=4
# Spelling dictionary name. Available dictionaries: none. To make it work,
# install the python-enchant package.
spelling-dict=
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains the private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to the private dictionary (see the
# --spelling-private-dict-file option) instead of raising a message.
spelling-store-unknown-words=no
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,
XXX,
TODO
# Regular expression of note tags to take in consideration.
#notes-rgx=
[FORMAT]
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Maximum number of characters on a single line.
max-line-length=100
# Maximum number of lines in a module.
max-module-lines=1000
# List of optional constructs for which whitespace checking is disabled. `dict-
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
# `empty-line` allows space-only lines.
no-space-check=trailing-comma,
dict-separator
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
single-line-class-stmt=no
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
[VARIABLES]
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid defining new builtins when possible.
additional-builtins=
# Tells whether unused global variables should be treated as a violation.
allow-global-unused-variables=yes
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,
_cb
# A regular expression matching the name of dummy variables (i.e. expected to
# not be used).
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
# Argument names that match this expression will be ignored. Default to name
# with leading underscore.
ignored-argument-names=_.*|^ignored_|^unused_
# Tells whether we should check for unused import in __init__ files.
init-import=no
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
[DESIGN]
# Maximum number of arguments for function / method.
max-args=5
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Maximum number of boolean expressions in an if statement (see R0916).
max-bool-expr=5
# Maximum number of branches for function / method body.
max-branches=12
# Maximum number of locals for function / method body.
max-locals=15
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of return / yield for function / method body.
max-returns=6
# Maximum number of statements in function / method body.
max-statements=50
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
[IMPORTS]
# List of modules that can be imported at any level, not just the top level
# one.
allow-any-import-level=
# Allow wildcard imports from modules that define __all__.
allow-wildcard-with-all=no
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
# Deprecated modules which should not be used, separated by a comma.
deprecated-modules=optparse,tkinter.tix
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled).
ext-import-graph=
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled).
import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled).
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
# Couples of modules and preferred modules, separated by a comma.
preferred-modules=
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,
__new__,
setUp,
__post_init__
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,
_fields,
_replace,
_source,
_make
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=cls
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "BaseException, Exception".
overgeneral-exceptions=BaseException,
Exception
| 18,464 | Python | .py | 448 | 37.776786 | 89 | 0.765154 | wger-project/wger | 3,065 | 570 | 221 | AGPL-3.0 | 9/5/2024, 5:13:18 PM (Europe/Amsterdam) |
22,389 | settings_global.py | wger-project_wger/wger/settings_global.py | # -*- coding: utf-8 -*-
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# Standard Library
import os
import re
import sys
from datetime import timedelta
# wger
from wger import get_version
from wger.utils.constants import DOWNLOAD_INGREDIENT_WGER
"""
This file contains the global settings that don't usually need to be changed.
For a full list of options, visit:
https://docs.djangoproject.com/en/dev/ref/settings/
"""
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
SITE_ROOT = os.path.realpath(os.path.dirname(__file__))
#
# Application definition
#
SITE_ID = 1
ROOT_URLCONF = 'wger.urls'
WSGI_APPLICATION = 'wger.wsgi.application'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.staticfiles',
'storages',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Apps from wger proper
'wger.config',
'wger.core',
'wger.mailer',
'wger.exercises',
'wger.gym',
'wger.manager',
'wger.nutrition',
'wger.software',
'wger.utils',
'wger.weight',
'wger.gallery',
'wger.measurements',
# reCaptcha support, see https://github.com/praekelt/django-recaptcha
'django_recaptcha',
# The sitemaps app
'django.contrib.sitemaps',
# thumbnails
'easy_thumbnails',
# CSS/JS compressor
'compressor',
# Form renderer helper
'crispy_forms',
'crispy_bootstrap5',
# REST-API
'rest_framework',
'rest_framework.authtoken',
'django_filters',
'rest_framework_simplejwt',
'drf_spectacular',
'drf_spectacular_sidecar',
# Breadcrumbs
'django_bootstrap_breadcrumbs',
# CORS
'corsheaders',
# Django Axes
'axes',
# History keeping
'simple_history',
# Django email verification
'django_email_verification',
# Activity stream
'actstream',
# Fontawesome
'fontawesomefree',
# Prometheus
'django_prometheus',
]
MIDDLEWARE = [
# Prometheus
'django_prometheus.middleware.PrometheusBeforeMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
# Django Admin
'django.contrib.auth.middleware.AuthenticationMiddleware',
# Javascript Header. Sends helper headers for AJAX
'wger.utils.middleware.JavascriptAJAXRedirectionMiddleware',
# Custom authentication middleware. Creates users on-the-fly for certain paths
'wger.utils.middleware.WgerAuthenticationMiddleware',
# Send an appropriate Header so search engines don't index pages
'wger.utils.middleware.RobotsExclusionMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.locale.LocaleMiddleware',
# History keeping
'simple_history.middleware.HistoryRequestMiddleware',
# Prometheus
'django_prometheus.middleware.PrometheusAfterMiddleware',
# Django Axes
'axes.middleware.AxesMiddleware', # should be the last one in the list
]
AUTHENTICATION_BACKENDS = (
'axes.backends.AxesStandaloneBackend', # should be the first one in the list
'django.contrib.auth.backends.ModelBackend',
'wger.utils.helpers.EmailAuthBackend',
)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'OPTIONS': {
'context_processors': [
'wger.utils.context_processor.processor',
# Django
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
# Breadcrumbs
'django.template.context_processors.request'
],
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
'debug': False
},
},
]
# Store the user messages in the session
MESSAGE_STORAGE = 'django.contrib.messages.storage.session.SessionStorage'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# Django compressor
'compressor.finders.CompressorFinder',
)
#
# Email
#
EMAIL_SUBJECT_PREFIX = '[wger] '
# EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
#
# Login
#
LOGIN_URL = '/user/login'
LOGIN_REDIRECT_URL = '/'
#
# Internationalization
#
USE_TZ = True
USE_I18N = True
USE_L10N = True
USE_THOUSAND_SEPARATOR = True
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'UTC'
# Available languages. Needs to be kept in sync with sufficiently
# translated languages: https://hosted.weblate.org/projects/wger/web/
#
# Translated languages for which a country specific locale exists in django
# upstream need to be added here as well (plus their country flag)
# https://github.com/django/django/blob/main/django/conf/global_settings.py
AVAILABLE_LANGUAGES = (
('bg', 'Bulgarian'),
('ca', 'Catalan'),
('cs', 'Czech'),
('de', 'German'),
('el', 'Greek'),
('en', 'English'),
('en-au', 'Australian English'),
('en-gb', 'British English'),
('es', 'Spanish'),
('es-ar', 'Argentinian Spanish'),
('es-co', 'Colombian Spanish'),
('es-mx', 'Mexican Spanish'),
('es-ni', 'Nicaraguan Spanish'),
('es-ve', 'Venezuelan Spanish'),
('fr', 'French'),
('hr', 'Croatian'),
('it', 'Italian'),
('nl', 'Dutch'),
('nb', 'Norwegian'),
('pl', 'Polish'),
('pt', 'Portuguese'),
('pt-br', 'Brazilian Portuguese'),
('ru', 'Russian'),
('sv', 'Swedish'),
('tr', 'Turkish'),
('uk', 'Ukrainian'),
('zh-hans', 'Chinese simplified'),
('zh-hant', 'Traditional Chinese'),
)
# Default language code for this installation.
LANGUAGE_CODE = 'en'
# All translation files are in one place
LOCALE_PATHS = (os.path.join(SITE_ROOT, 'locale'),)
# Primary keys are AutoFields
DEFAULT_AUTO_FIELD = 'django.db.models.AutoField'
#
# Logging
# See http://docs.python.org/library/logging.config.html
#
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'simple': {
'format': '%(levelname)s %(asctime)s %(module)s %(message)s'
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
},
'loggers': {
'wger': {
'handlers': ['console'],
'level': 'DEBUG',
},
'': {
'handlers': ['console'],
'level': 'INFO',
'propagate': False,
}
}
}
#
# ReCaptcha
#
RECAPTCHA_USE_SSL = True
#
# Cache
#
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'wger-cache',
'TIMEOUT': 30 * 24 * 60 * 60, # Cache for a month
}
}
#
# Django Axes
#
AXES_ENABLED = True
AXES_FAILURE_LIMIT = 10
AXES_COOLOFF_TIME = timedelta(minutes=30)
AXES_LOCKOUT_TEMPLATE = None
AXES_RESET_ON_SUCCESS = False
AXES_RESET_COOL_OFF_ON_FAILURE_DURING_LOCKOUT = True
# If you want to set up redis, set AXES_HANDLER = 'axes.handlers.cache.AxesCacheHandler'
AXES_HANDLER = 'axes.handlers.database.AxesDatabaseHandler'
# If your redis or MemcachedCache has a different name other than 'default'
# (e.g. when you have multiple caches defined in CACHES), change the following value to that name
AXES_CACHE = 'default'
#
# Django Crispy Templates
#
CRISPY_ALLOWED_TEMPLATE_PACKS = "bootstrap5"
CRISPY_TEMPLATE_PACK = 'bootstrap5'
#
# Easy thumbnails
#
THUMBNAIL_ALIASES = {
'': {
'micro': {
'size': (30, 30)
},
'micro_cropped': {
'size': (30, 30),
'crop': 'smart'
},
'thumbnail': {
'size': (80, 80)
},
'thumbnail_cropped': {
'size': (80, 80),
'crop': 'smart'
},
'small': {
'size': (200, 200)
},
'small_cropped': {
'size': (200, 200),
'crop': 'smart'
},
'medium': {
'size': (400, 400)
},
'medium_cropped': {
'size': (400, 400),
'crop': 'smart'
},
'large': {
'size': (800, 800),
'quality': 90
},
'large_cropped': {
'size': (800, 800),
'crop': 'smart',
'quality': 90
},
},
}
STATIC_ROOT = ''
USE_S3 = os.getenv('USE_S3') == 'TRUE'
if USE_S3:
# aws settings
AWS_ACCESS_KEY_ID = os.getenv('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.getenv('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = os.getenv('AWS_STORAGE_BUCKET_NAME')
AWS_DEFAULT_ACL = 'public-read'
AWS_S3_CUSTOM_DOMAIN = os.getenv('WGER_CDN_DOMAIN')
AWS_S3_OBJECT_PARAMETERS = {'CacheControl': 'max-age=31557600'}
# s3 static settings
AWS_LOCATION = 'static'
STATIC_URL = 'https://%s/%s/' % (AWS_S3_CUSTOM_DOMAIN, AWS_LOCATION)
STATICFILES_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
COMPRESS_URL = STATIC_URL
COMPRESS_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
COMPRESS_OFFLINE = True
COMPRESS_OFFLINE_CONTEXT = [
{
'request': {
'user_agent': {
'is_mobile': True
}
},
'STATIC_URL': STATIC_URL
}, {
'request': {
'user_agent': {
'is_mobile': False
}
},
'STATIC_URL': STATIC_URL
}
]
else:
STATIC_URL = '/static/'
#
# Django compressor
#
# The default is not DEBUG, override if needed
# COMPRESS_ENABLED = True
COMPRESS_CSS_FILTERS = (
'compressor.filters.css_default.CssAbsoluteFilter',
'compressor.filters.cssmin.rCSSMinFilter',
)
COMPRESS_JS_FILTERS = [
'compressor.filters.jsmin.JSMinFilter',
'compressor.filters.template.TemplateFilter',
]
COMPRESS_ROOT = STATIC_ROOT
#
# Django Rest Framework
#
# yapf: disable
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': ('wger.utils.permissions.WgerPermission',),
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
'PAGE_SIZE': 20,
'PAGINATE_BY_PARAM': 'limit', # Allow client to override, using `?limit=xxx`.
'TEST_REQUEST_DEFAULT_FORMAT': 'json',
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.TokenAuthentication',
'rest_framework_simplejwt.authentication.JWTAuthentication',
),
'DEFAULT_FILTER_BACKENDS': (
'django_filters.rest_framework.DjangoFilterBackend',
'rest_framework.filters.OrderingFilter',
),
'DEFAULT_THROTTLE_CLASSES': ['rest_framework.throttling.ScopedRateThrottle'],
'DEFAULT_THROTTLE_RATES': {
'login': '10/min'
},
'DEFAULT_SCHEMA_CLASS': 'drf_spectacular.openapi.AutoSchema',
}
# yapf: enable
# Api docs
# yapf: disable
SPECTACULAR_SETTINGS = {
'TITLE': 'wger',
'SERVERS': [
{'url': '/', 'description': 'This server'},
{'url': 'https://wger.de', 'description': 'The "official" upstream wger instance'},
],
'DESCRIPTION': 'Self hosted FLOSS workout and fitness tracker',
'VERSION': get_version(),
'SERVE_INCLUDE_SCHEMA': True,
'SCHEMA_PATH_PREFIX': '/api/v[0-9]',
'SWAGGER_UI_DIST': 'SIDECAR',
'SWAGGER_UI_FAVICON_HREF': 'SIDECAR',
'REDOC_DIST': 'SIDECAR',
'COMPONENT_SPLIT_REQUEST': True
}
# yapf: enable
#
# Django Rest Framework SimpleJWT
#
SIMPLE_JWT = {
'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5),
'REFRESH_TOKEN_LIFETIME': timedelta(days=1),
'ROTATE_REFRESH_TOKENS': False,
'BLACKLIST_AFTER_ROTATION': False,
'UPDATE_LAST_LOGIN': False,
}
#
# CORS headers: allow all hosts to access the API
#
CORS_ORIGIN_ALLOW_ALL = True
CORS_URLS_REGEX = r'^/api/.*$'
#
# Ignore these URLs if they cause 404
#
IGNORABLE_404_URLS = (re.compile(r'^/favicon\.ico$'),)
#
# Password rules
#
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
USER_AGENTS_CACHE = 'default'
#
# Application specific configuration options
#
# Consult docs/settings.rst for more information
#
WGER_SETTINGS = {
'ALLOW_GUEST_USERS': True,
'ALLOW_REGISTRATION': True,
'ALLOW_UPLOAD_VIDEOS': False,
'DOWNLOAD_INGREDIENTS_FROM': DOWNLOAD_INGREDIENT_WGER,
'EMAIL_FROM': 'wger Workout Manager <wger@example.com>',
'EXERCISE_CACHE_TTL': 3600,
'INGREDIENT_CACHE_TTL': 604800, # one week
'MIN_ACCOUNT_AGE_TO_TRUST': 21,
'SYNC_EXERCISES_CELERY': False,
'SYNC_EXERCISE_IMAGES_CELERY': False,
'SYNC_EXERCISE_VIDEOS_CELERY': False,
'SYNC_INGREDIENTS_CELERY': False,
'SYNC_OFF_DAILY_DELTA_CELERY': False,
'TWITTER': False,
'MASTODON': 'https://fosstodon.org/@wger',
'USE_CELERY': False,
'USE_RECAPTCHA': False,
'WGER_INSTANCE': 'https://wger.de',
}
#
# Prometheus metrics
#
EXPOSE_PROMETHEUS_METRICS = False
PROMETHEUS_URL_PATH = 'super-secret-path'
#
# Django email verification
#
def email_verified_callback(user):
user.userprofile.email_verified = True
user.userprofile.save()
EMAIL_MAIL_CALLBACK = email_verified_callback
EMAIL_FROM_ADDRESS = WGER_SETTINGS['EMAIL_FROM']
EMAIL_MAIL_SUBJECT = 'Confirm your email'
EMAIL_MAIL_HTML = 'email_verification/email_body_html.tpl'
EMAIL_MAIL_PLAIN = 'email_verification/email_body_txt.tpl'
EMAIL_MAIL_TOKEN_LIFE = 60 * 60
EMAIL_MAIL_PAGE_TEMPLATE = 'email_verification/confirm_template.html'
EMAIL_PAGE_DOMAIN = 'http://localhost:8000/'
#
# Django-activity stream
#
ACTSTREAM_SETTINGS = {
'USE_JSONFIELD': True,
}
# Whether the application is being run regularly or during tests
TESTING = len(sys.argv) > 1 and sys.argv[1] == 'test'
| 15,763 | Python | .py | 514 | 25.669261 | 97 | 0.660076 | wger-project/wger | 3,065 | 570 | 221 | AGPL-3.0 | 9/5/2024, 5:13:18 PM (Europe/Amsterdam) |
22,390 | __main__.py | wger-project_wger/wger/__main__.py | # -*- coding: utf-8 -*-
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# Standard Library
import os
import sys
# Third Party
from invoke import run
"""
This simple wrapper script is used as a console entry point in the packaged
version of the application. It simply redirects all arguments to the invoke
command, which does all the work.
"""
invoke_cmd = 'invoke '
def main():
# Change the working directory so that invoke can find the tasks file
os.chdir(os.path.dirname(os.path.abspath(__file__)))
args = sys.argv[1:]
if len(args):
run(invoke_cmd + ' '.join(args), pty=True)
else:
run(invoke_cmd + '--list')
if __name__ == '__main__':
main()
| 1,287 | Python | .py | 35 | 34.314286 | 78 | 0.738114 | wger-project/wger | 3,065 | 570 | 221 | AGPL-3.0 | 9/5/2024, 5:13:18 PM (Europe/Amsterdam) |
22,391 | urls.py | wger-project_wger/wger/urls.py | # -*- coding: utf-8 -*-
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Workout Manager. If not, see <http://www.gnu.org/licenses/>.
# Django
from django.conf import settings
from django.conf.urls import include
from django.conf.urls.i18n import i18n_patterns
from django.conf.urls.static import static
from django.contrib.sitemaps.views import (
index,
sitemap,
)
from django.urls import path
# Third Party
from django_email_verification import urls as email_urls
from drf_spectacular.views import (
SpectacularAPIView,
SpectacularRedocView,
SpectacularSwaggerView,
)
from rest_framework import routers
from rest_framework_simplejwt.views import (
TokenObtainPairView,
TokenRefreshView,
TokenVerifyView,
)
# wger
from wger.core.api import views as core_api_views
from wger.exercises.api import views as exercises_api_views
from wger.exercises.sitemap import ExercisesSitemap
from wger.gallery.api import views as gallery_api_views
from wger.manager.api import views as manager_api_views
from wger.measurements.api import views as measurements_api_views
from wger.nutrition.api import views as nutrition_api_views
from wger.nutrition.sitemap import NutritionSitemap
from wger.utils.generic_views import TextTemplateView
from wger.weight.api import views as weight_api_views
#
# REST API
#
router = routers.DefaultRouter()
#
# Application
#
# Manager app
router.register(r'day', manager_api_views.DayViewSet, basename='day')
router.register(r'set', manager_api_views.SetViewSet, basename='Set')
router.register(r'setting', manager_api_views.SettingViewSet, basename='Setting')
router.register(r'workout', manager_api_views.WorkoutViewSet, basename='workout')
router.register(r'templates', manager_api_views.UserWorkoutTemplateViewSet, basename='templates')
router.register(
r'public-templates',
manager_api_views.PublicWorkoutTemplateViewSet,
basename='public-templates',
)
router.register(
r'workoutsession', manager_api_views.WorkoutSessionViewSet, basename='workoutsession'
)
router.register(r'workoutlog', manager_api_views.WorkoutLogViewSet, basename='workoutlog')
router.register(r'schedulestep', manager_api_views.ScheduleStepViewSet, basename='schedulestep')
router.register(r'schedule', manager_api_views.ScheduleViewSet, basename='schedule')
# Core app
router.register(r'daysofweek', core_api_views.DaysOfWeekViewSet, basename='daysofweek')
router.register(r'language', core_api_views.LanguageViewSet, basename='language')
router.register(r'license', core_api_views.LicenseViewSet, basename='license')
router.register(r'userprofile', core_api_views.UserProfileViewSet, basename='userprofile')
router.register(
r'setting-repetitionunit',
core_api_views.RepetitionUnitViewSet,
basename='setting-repetition-unit',
)
router.register(
r'setting-weightunit', core_api_views.RoutineWeightUnitViewSet, basename='setting-weight-unit'
)
# Exercises app
router.register(
r'exerciseinfo',
exercises_api_views.ExerciseInfoViewset,
basename='exerciseinfo',
)
router.register(
r'exercisebaseinfo',
exercises_api_views.ExerciseBaseInfoViewset,
basename='exercisebaseinfo',
)
router.register(
r'exercise',
exercises_api_views.ExerciseViewSet,
basename='exercise',
)
router.register(
r'exercise-translation',
exercises_api_views.ExerciseTranslationViewSet,
basename='exercise-translation',
)
router.register(
r'exercise-base',
exercises_api_views.ExerciseBaseViewSet,
basename='exercise-base',
)
router.register(
r'equipment',
exercises_api_views.EquipmentViewSet,
basename='equipment',
)
router.register(
r'deletion-log',
exercises_api_views.DeletionLogViewSet,
basename='deletion-log',
)
router.register(
r'exercisecategory',
exercises_api_views.ExerciseCategoryViewSet,
basename='exercisecategory',
)
router.register(
r'video',
exercises_api_views.ExerciseVideoViewSet,
basename='video',
)
router.register(
r'exerciseimage',
exercises_api_views.ExerciseImageViewSet,
basename='exerciseimage',
)
router.register(
r'exercisecomment',
exercises_api_views.ExerciseCommentViewSet,
basename='exercisecomment',
)
router.register(
r'exercisealias',
exercises_api_views.ExerciseAliasViewSet,
basename='exercisealias',
)
router.register(
r'muscle',
exercises_api_views.MuscleViewSet,
basename='muscle',
)
router.register(
r'variation',
exercises_api_views.ExerciseVariationViewSet,
basename='variation',
)
# Nutrition app
router.register(r'ingredient', nutrition_api_views.IngredientViewSet, basename='api-ingredient')
router.register(
r'ingredientinfo', nutrition_api_views.IngredientInfoViewSet, basename='api-ingredientinfo'
)
router.register(r'weightunit', nutrition_api_views.WeightUnitViewSet, basename='weightunit')
router.register(
r'ingredientweightunit',
nutrition_api_views.IngredientWeightUnitViewSet,
basename='ingredientweightunit',
)
router.register(
r'nutritionplan', nutrition_api_views.NutritionPlanViewSet, basename='nutritionplan'
)
router.register(
r'nutritionplaninfo', nutrition_api_views.NutritionPlanInfoViewSet, basename='nutritionplaninfo'
)
router.register(r'nutritiondiary', nutrition_api_views.LogItemViewSet, basename='nutritiondiary')
router.register(r'meal', nutrition_api_views.MealViewSet, basename='meal')
router.register(r'mealitem', nutrition_api_views.MealItemViewSet, basename='mealitem')
router.register(r'ingredient-image', nutrition_api_views.ImageViewSet, basename='ingredientimage')
# Weight app
router.register(r'weightentry', weight_api_views.WeightEntryViewSet, basename='weightentry')
# Gallery app
router.register(r'gallery', gallery_api_views.GalleryImageViewSet, basename='gallery')
# Measurements app
router.register(
r'measurement',
measurements_api_views.MeasurementViewSet,
basename='measurement',
)
router.register(
r'measurement-category',
measurements_api_views.CategoryViewSet,
basename='measurement-category',
)
#
# Sitemaps
#
sitemaps = {
'exercises': ExercisesSitemap,
} # 'nutrition': NutritionSitemap}
#
# The actual URLs
#
urlpatterns = i18n_patterns(
path('', include(('wger.core.urls', 'core'), namespace='core')),
path('routine/', include(('wger.manager.urls', 'manager'), namespace='manager')),
path('exercise/', include(('wger.exercises.urls', 'exercise'), namespace='exercise')),
path('weight/', include(('wger.weight.urls', 'weight'), namespace='weight')),
path('nutrition/', include(('wger.nutrition.urls', 'nutrition'), namespace='nutrition')),
path('software/', include(('wger.software.urls', 'software'), namespace='software')),
path('config/', include(('wger.config.urls', 'config'), namespace='config')),
path('gym/', include(('wger.gym.urls', 'gym'), namespace='gym')),
path('gallery/', include(('wger.gallery.urls', 'gallery'), namespace='gallery')),
path(
'measurement/',
include(('wger.measurements.urls', 'measurements'), namespace='measurements'),
),
path('email/', include(('wger.mailer.urls', 'email'), namespace='email')),
path('sitemap.xml', index, {'sitemaps': sitemaps}, name='sitemap'),
path(
'sitemap-<section>.xml',
sitemap,
{'sitemaps': sitemaps},
name='django.contrib.sitemaps.views.sitemap',
),
)
#
# URLs without language prefix
#
urlpatterns += [
path('robots.txt', TextTemplateView.as_view(template_name='robots.txt'), name='robots'),
# API
path('api/v2/exercise/search/', exercises_api_views.search, name='exercise-search'),
path('api/v2/ingredient/search/', nutrition_api_views.search, name='ingredient-search'),
path('api/v2/', include(router.urls)),
# The api user login
path(
'api/v2/login/', core_api_views.UserAPILoginView.as_view({'post': 'post'}), name='api_user'
),
path(
'api/v2/register/',
core_api_views.UserAPIRegistrationViewSet.as_view({'post': 'post'}),
name='api_register',
),
path('api/v2/token', TokenObtainPairView.as_view(), name='token_obtain_pair'),
path('api/v2/token/refresh', TokenRefreshView.as_view(), name='token_refresh'),
path('api/v2/token/verify', TokenVerifyView.as_view(), name='token_verify'),
# Others
path(
'api/v2/version/',
core_api_views.ApplicationVersionView.as_view({'get': 'get'}),
name='app_version',
),
path(
'api/v2/check-permission/',
core_api_views.PermissionView.as_view({'get': 'get'}),
name='permission',
),
path(
'api/v2/min-app-version/',
core_api_views.RequiredApplicationVersionView.as_view({'get': 'get'}),
name='min_app_version',
),
# Api documentation
path(
'api/v2/schema',
SpectacularAPIView.as_view(),
name='schema',
),
path(
'api/v2/schema/ui',
SpectacularSwaggerView.as_view(url_name='schema'),
name='api-swagger-ui',
),
path(
'api/v2/schema/redoc',
SpectacularRedocView.as_view(url_name='schema'),
name='api-redoc',
),
path('email/', include(email_urls)),
]
#
# URL for user uploaded files, served like this during development only
#
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# urlpatterns.append(path("__debug__/", include("debug_toolbar.urls")))
if settings.EXPOSE_PROMETHEUS_METRICS:
urlpatterns += [
path(f'prometheus/{settings.PROMETHEUS_URL_PATH}/', include('django_prometheus.urls'))
]
| 10,227 | Python | .py | 290 | 31.772414 | 100 | 0.743194 | wger-project/wger | 3,065 | 570 | 221 | AGPL-3.0 | 9/5/2024, 5:13:18 PM (Europe/Amsterdam) |
22,392 | tasks.py | wger-project_wger/wger/tasks.py | # -*- coding: utf-8 -*-
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# Standard Library
import logging
import os
import pathlib
import sys
import tempfile
# Django
import django
from django.core.management import (
call_command,
execute_from_command_line,
)
from django.utils.crypto import get_random_string
# Third Party
import requests
from invoke import task
from tqdm import tqdm
logger = logging.getLogger(__name__)
FIXTURE_URL = 'https://github.com/wger-project/data/raw/master/fixtures/'
@task(
help={
'address': 'Address to bind to. Default: localhost',
'port': 'Port to use. Default: 8000',
'settings-path': 'Path to settings file (absolute path). Leave empty for default',
'extra-args': 'Additional arguments to pass to the builtin server. Pass as string: '
'"--arg1 --arg2=value". Default: none',
}
)
def start(context, address='localhost', port=8000, settings_path=None, extra_args=''):
"""
Start the application using django's built in webserver
"""
# Find the path to the settings and setup the django environment
setup_django_environment(settings_path)
argv = ['', 'runserver', '--noreload']
if extra_args != '':
for argument in extra_args.split(' '):
argv.append(argument)
argv.append(f'{address}:{port}')
execute_from_command_line(argv)
@task(
help={
'settings-path': 'Path to settings file (absolute path). Leave empty for default',
'database-path': 'Path to sqlite database (absolute path). Leave empty for default',
}
)
def bootstrap(context, settings_path=None, database_path=None, process_static=True):
"""
Performs all steps necessary to bootstrap the application
"""
# Create settings if necessary
if settings_path is None:
settings_path = get_path('settings.py')
if not os.path.exists(settings_path):
create_settings(context, settings_path=settings_path, database_path=database_path)
# Find the path to the settings and setup the django environment
setup_django_environment(settings_path)
# Create Database if necessary
if not database_exists():
print('*** Database does not exist, creating one now')
migrate_db(context, settings_path=settings_path)
load_fixtures(context, settings_path=settings_path)
create_or_reset_admin(context, settings_path=settings_path)
# Download JS and CSS libraries
if process_static:
context.run('yarn install')
context.run('yarn build:css:sass')
@task(
help={
'settings-path': 'Path to settings file (absolute path). Leave empty for default',
'database-path': 'Path to sqlite database (absolute path). Leave empty for default',
'database-type': 'Database type to use. Supported: sqlite3, postgresql. Default: sqlite3',
'key-length': 'Length of the generated secret key. Default: 50',
}
)
def create_settings(
context, settings_path=None, database_path=None, database_type='sqlite3', key_length=50
):
"""
Creates a local settings file
"""
if settings_path is None:
settings_path = get_path('settings.py')
settings_module = os.path.dirname(settings_path)
print(f'*** Creating settings file at {settings_module}')
if database_path is None:
database_path = get_path('database.sqlite').as_posix()
dbpath_value = database_path
media_folder_path = get_path('media').as_posix()
# Use localhost with default django port if no URL given
url = 'http://localhost:8000'
# Fill in the config file template
settings_template = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'settings.tpl')
with open(settings_template, 'r') as settings_file:
settings_content = settings_file.read()
if database_type == 'postgresql':
dbengine = 'postgresql'
dbname = 'wger'
dbuser = 'wger'
dbpassword = 'wger'
dbhost = 'localhost'
dbport = 5432
elif database_type == 'sqlite3':
dbengine = 'sqlite3'
dbname = dbpath_value
dbuser = ''
dbpassword = ''
dbhost = ''
dbport = ''
# Create a random SECRET_KEY to put it in the settings.
# from django.core.management.commands.startproject
secret_key = get_random_string(key_length, 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)')
settings_content = settings_content.format(
dbname=dbname,
dbpath=dbpath_value,
dbengine=dbengine,
dbuser=dbuser,
dbpassword=dbpassword,
dbhost=dbhost,
dbport=dbport,
default_key=secret_key,
siteurl=url,
media_folder_path=media_folder_path,
)
if not os.path.exists(settings_module):
os.makedirs(settings_module)
if not os.path.exists(os.path.dirname(database_path)):
os.makedirs(os.path.dirname(database_path))
with open(settings_path, 'w') as settings_file:
settings_file.write(settings_content)
@task(help={'settings-path': 'Path to settings file (absolute path). Leave empty for default'})
def create_or_reset_admin(context, settings_path=None):
"""
Creates an admin user or resets the password for an existing one
"""
# Find the path to the settings and setup the django environment
setup_django_environment(settings_path)
# can't be imported in global scope as it already requires
# the settings module during import
# Django
from django.contrib.auth.models import User
try:
User.objects.get(username='admin')
print("*** Password for user admin was reset to 'adminadmin'")
except User.DoesNotExist:
print('*** Created default admin user')
current_dir = os.path.dirname(os.path.abspath(__file__))
path = os.path.join(current_dir, 'core', 'fixtures/')
call_command('loaddata', path + 'users.json')
@task(help={'settings-path': 'Path to settings file (absolute path). Leave empty for default'})
def migrate_db(context, settings_path=None):
"""
Run all database migrations
"""
# Find the path to the settings and setup the django environment
setup_django_environment(settings_path)
call_command('migrate')
@task(help={'settings-path': 'Path to settings file (absolute path). Leave empty for default'})
def load_fixtures(context, settings_path=None):
"""
Loads all fixtures
"""
# Find the path to the settings and setup the django environment
setup_django_environment(settings_path)
# Gym
call_command('loaddata', 'gym.json')
# Core
call_command('loaddata', 'languages.json')
call_command('loaddata', 'groups.json')
call_command('loaddata', 'users.json')
call_command('loaddata', 'licenses.json')
call_command('loaddata', 'days_of_week.json')
call_command('loaddata', 'setting_repetition_units.json')
call_command('loaddata', 'setting_weight_units.json')
# Config
call_command('loaddata', 'gym_config.json')
# Manager
# Exercises
call_command('loaddata', 'equipment.json')
call_command('loaddata', 'muscles.json')
call_command('loaddata', 'categories.json')
call_command('loaddata', 'exercise-base-data.json')
call_command('loaddata', 'translations.json')
# Gym
call_command('loaddata', 'gym.json')
call_command('loaddata', 'gym-config.json')
call_command('loaddata', 'gym-adminconfig.json')
@task(help={'settings-path': 'Path to settings file (absolute path). Leave empty for ' 'default'})
def load_online_fixtures(context, settings_path=None):
"""
Downloads fixtures from server and installs them (at the moment only ingredients)
"""
# Find the path to the settings and set up the django environment
setup_django_environment(settings_path)
# Prepare the download
for name in ('ingredients', 'weight_units', 'ingredient_units'):
url = f'{FIXTURE_URL}{name}.json.zip'
print(f'Downloading fixture data from {url}...')
response = requests.get(url, stream=True)
total_size = int(response.headers.get('content-length', 0))
size = int(response.headers['content-length']) / (1024 * 1024)
print(f'-> fixture size: {size:.3} MB')
# Save to temporary file and load the data
with tempfile.NamedTemporaryFile(delete=False, suffix='.json.zip') as f:
print(f'-> saving to temp file {f.name}')
with tqdm(total=total_size, unit='B', unit_scale=True, desc='Downloading') as pbar:
for data in response.iter_content(chunk_size=1024):
f.write(data)
pbar.update(len(data))
f.close()
print('Loading downloaded data, this may take a while...')
call_command('loaddata', f.name, '--verbosity=3')
print('-> removing temp file')
print('')
os.unlink(f.name)
@task
def config_location(context):
"""
Returns the default location for the settings file and the data folder
"""
print('Default locations:')
print(f'* settings: {get_path("settings.py")}')
print(f'* media folder: {get_path("media")}')
print(f'* database path: {get_path("database.sqlite")}')
#
#
# Helper functions
#
# Note: these functions were originally in wger/utils/main.py but were moved
# here because of different import problems (the packaged pip-installed
# packaged has a different sys path than the local one)
#
def get_path(file='settings.py') -> pathlib.Path:
"""
Return the path of the given file relatively to the wger source folder
Note: one parent is the step from e.g. some-checkout/wger/settings.py
to some-checkout/wger, the second one to get to the source folder
itself.
"""
return (pathlib.Path(__file__).parent.parent / file).resolve()
def setup_django_environment(settings_path):
"""
Setup the django environment
"""
# Use default settings if the user didn't specify something else
if settings_path is None:
settings_path = get_path('settings.py').as_posix()
print(f'*** No settings given, using {settings_path}')
# Find out file path and fine name of settings and setup django
settings_file = os.path.basename(settings_path)
settings_module_name = ''.join(settings_file.split('.')[:-1])
if '.' in settings_module_name:
print("'.' is not an allowed character in the settings-file")
sys.exit(1)
settings_module_dir = os.path.dirname(settings_path)
sys.path.append(settings_module_dir)
os.environ[django.conf.ENVIRONMENT_VARIABLE] = '%s' % settings_module_name
django.setup()
def database_exists():
"""Detect if the database exists"""
# can't be imported in global scope as they already require
# the settings module during import
# Django
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured
from django.db import DatabaseError
try:
# TODO: Use another model, the User could be deactivated
User.objects.count()
except DatabaseError:
return False
except ImproperlyConfigured:
print('Your settings file seems broken')
sys.exit(0)
else:
return True
| 11,922 | Python | .py | 290 | 35.455172 | 100 | 0.684484 | wger-project/wger | 3,065 | 570 | 221 | AGPL-3.0 | 9/5/2024, 5:13:18 PM (Europe/Amsterdam) |
22,393 | __init__.py | wger-project_wger/wger/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
:copyright: 2011, 2012 by OpenSlides team, see AUTHORS.
:license: GNU GPL, see LICENSE for more details.
"""
# Local
from .celery_configuration import app
MIN_APP_VERSION = (1, 7, 4, 'final', 1)
VERSION = (2, 3, 0, 'alpha', 2)
RELEASE = True
def get_version(version=None, release=None):
"""Derives a PEP386-compliant version number from VERSION."""
if version is None:
version = VERSION
if release is None:
release = RELEASE
assert len(version) == 5
assert version[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
# Always use all three parts, otherwise we might get problems in the version
# parser on the flutter side of things
main_parts = 3
main = '.'.join(str(x) for x in version[:main_parts])
if version[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'rc'}
sub = mapping[version[3]] + str(version[4])
else:
sub = ''
if not release:
sub += '.dev0'
return main + sub
| 1,206 | Python | .py | 35 | 29.828571 | 80 | 0.619828 | wger-project/wger | 3,065 | 570 | 221 | AGPL-3.0 | 9/5/2024, 5:13:18 PM (Europe/Amsterdam) |
22,394 | wsgi.py | wger-project_wger/wger/wsgi.py | """
WSGI config for workout_manager project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
# Standard Library
import os
# Django
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| 1,167 | Python | .py | 24 | 47.25 | 79 | 0.820106 | wger-project/wger | 3,065 | 570 | 221 | AGPL-3.0 | 9/5/2024, 5:13:18 PM (Europe/Amsterdam) |
22,395 | celery_configuration.py | wger-project_wger/wger/celery_configuration.py | # wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
See https://docs.celeryq.dev/en/stable/django/first-steps-with-django.html
"""
# Standard Library
import os
# Third Party
from celery import Celery
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
app = Celery('wger')
# read config from Django settings, the CELERY namespace would make celery
# config keys has `CELERY` prefix
app.config_from_object('django.conf:settings', namespace='CELERY')
# discover and load tasks.py from all registered Django apps
app.autodiscover_tasks()
| 1,182 | Python | .py | 26 | 44.230769 | 79 | 0.782609 | wger-project/wger | 3,065 | 570 | 221 | AGPL-3.0 | 9/5/2024, 5:13:18 PM (Europe/Amsterdam) |
22,396 | urls.py | wger-project_wger/wger/gallery/urls.py | # -*- coding: utf-8 -*-
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# Django
from django.conf.urls import include
from django.urls import path
# wger
from wger.gallery.views import images
# 'sub patterns' for gyms
patterns_images = [
path(
'overview',
images.overview,
name='overview',
),
path(
'<int:pk>/edit',
images.ImageUpdateView.as_view(),
name='edit',
),
path(
'add',
images.ImageAddView.as_view(),
name='add',
),
path(
'<int:pk>/delete',
images.ImageDeleteView.as_view(),
name='delete',
),
]
#
# All patterns for this app
#
urlpatterns = [
path('', include((patterns_images, 'images'), namespace='images')),
]
| 1,351 | Python | .py | 48 | 24.270833 | 78 | 0.686199 | wger-project/wger | 3,065 | 570 | 221 | AGPL-3.0 | 9/5/2024, 5:13:18 PM (Europe/Amsterdam) |
22,397 | apps.py | wger-project_wger/wger/gallery/apps.py | from django.apps import AppConfig
class GalleryConfig(AppConfig):
name = 'wger.gallery'
verbose_name = 'Gallery'
| 123 | Python | .py | 4 | 27.25 | 33 | 0.760684 | wger-project/wger | 3,065 | 570 | 221 | AGPL-3.0 | 9/5/2024, 5:13:18 PM (Europe/Amsterdam) |
22,398 | __init__.py | wger-project_wger/wger/gallery/__init__.py | # -*- coding: utf-8 -*-
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Workout Manager. If not, see <http://www.gnu.org/licenses/>.
# wger
from wger import get_version
VERSION = get_version()
default_app_config = 'wger.gallery.apps.GalleryConfig'
| 857 | Python | .py | 19 | 43.894737 | 78 | 0.776978 | wger-project/wger | 3,065 | 570 | 221 | AGPL-3.0 | 9/5/2024, 5:13:18 PM (Europe/Amsterdam) |
22,399 | forms.py | wger-project_wger/wger/gallery/forms.py | # Django
from django.forms import (
DateField,
ModelForm,
widgets,
)
# wger
from wger.gallery.models import Image
from wger.utils.constants import DATE_FORMATS
from wger.utils.widgets import Html5DateInput
class ImageForm(ModelForm):
date = DateField(input_formats=DATE_FORMATS, widget=Html5DateInput())
class Meta:
model = Image
exclude = []
widgets = {
'user': widgets.HiddenInput(),
}
| 456 | Python | .py | 18 | 20.555556 | 73 | 0.693548 | wger-project/wger | 3,065 | 570 | 221 | AGPL-3.0 | 9/5/2024, 5:13:18 PM (Europe/Amsterdam) |