signature
stringlengths
8
3.44k
body
stringlengths
0
1.41M
docstring
stringlengths
1
122k
id
stringlengths
5
17
def _r(self, value):
return round(value, self.precision)<EOL>
Round a value according defined precision
f10740:c0:m9
def load_module(name, filename):
if sys.version_info < (<NUM_LIT:3>, <NUM_LIT:5>):<EOL><INDENT>import imp<EOL>import warnings<EOL>with warnings.catch_warnings(): <EOL><INDENT>warnings.simplefilter("<STR_LIT:ignore>", RuntimeWarning)<EOL>return imp.load_source(name, filename)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>from importlib.machinery import SourceFileLoader<EOL>loader = SourceFileLoader(name, filename)<EOL>return loader.load_module()<EOL><DEDENT>
Load a module into name given its filename
f10741:m0
def start(self):
pass<EOL>
Hook called once on run start
f10743:c0:m2
def before_class(self, bench):
pass<EOL>
Hook called once before each benchmark class
f10743:c0:m3
def after_class(self, bench):
pass<EOL>
Hook called once after each benchmark class
f10743:c0:m4
def before_method(self, bench, method):
pass<EOL>
Hook called once before each benchmark method
f10743:c0:m5
def after_method(self, bench, method):
pass<EOL>
Hook called once after each benchmark method
f10743:c0:m6
def progress(self, bench, method, times):
pass<EOL>
Hook called after each benchmark method call
f10743:c0:m7
def end(self):
pass<EOL>
Hook called once on run end
f10743:c0:m8
def summary(self):
out = {}<EOL>for bench in self.runner.runned:<EOL><INDENT>key = self.key(bench)<EOL>runs = {}<EOL>for method, results in bench.results.items():<EOL><INDENT>mean = results.total / bench.times<EOL>name = bench.label_for(method)<EOL>runs[method] = {<EOL>'<STR_LIT:name>': name,<EOL>'<STR_LIT>': results.total,<EOL>'<STR_LIT>': mean<EOL>}<EOL><DEDENT>out[key] = {<EOL>'<STR_LIT:name>': bench.label,<EOL>'<STR_LIT>': bench.times,<EOL>'<STR_LIT>': runs<EOL>}<EOL><DEDENT>return out<EOL>
Compute the execution summary
f10743:c0:m9
def key(self, bench):
return '<STR_LIT>'.format(bench=bench)<EOL>
Generate a report key from a benchmark instance
f10743:c0:m10
def __init__(self, filename, **kwargs):
self.filename = filename<EOL>super(FileReporter, self).__init__(**kwargs)<EOL>
:param filename: the output file name :type filename: string
f10743:c1:m0
def end(self):
dirname = os.path.dirname(self.filename)<EOL>if dirname and not os.path.exists(dirname):<EOL><INDENT>os.makedirs(dirname)<EOL><DEDENT>with open(self.filename, '<STR_LIT:w>') as out:<EOL><INDENT>self.out = out<EOL>self.output(out)<EOL>self.out = None<EOL><DEDENT>
Dump the report into the output file. If the file directory does not exists, it will be created. The open file is then given as parameter to :meth:`~minibench.report.FileReporter.output`.
f10743:c1:m1
def output(self, out):
raise NotImplementedError('<STR_LIT>')<EOL>
Serialize the report into the open file. Child classes should implement this method. :param out: an open file object to serialize into. :type out: file
f10743:c1:m2
def line(self, text='<STR_LIT>'):
self.out.write(text)<EOL>self.out.write('<STR_LIT:\n>')<EOL>
A simple helper to write line with `\n`
f10743:c1:m3
def with_sizes(self, *headers):
if len(headers) != <NUM_LIT:5>:<EOL><INDENT>raise ValueError('<STR_LIT>')<EOL><DEDENT>summary = self.summary()<EOL>for row in summary.values():<EOL><INDENT>sizes = [len(header) for header in headers]<EOL>sizes[<NUM_LIT:0>] = max(sizes[<NUM_LIT:0>], len(row['<STR_LIT:name>']))<EOL>max_length = max(len(r['<STR_LIT:name>']) for r in row['<STR_LIT>'].values())<EOL>sizes[<NUM_LIT:1>] = max(sizes[<NUM_LIT:1>], max_length)<EOL>sizes[<NUM_LIT:2>] = max(sizes[<NUM_LIT:2>], len(str(row['<STR_LIT>'])))<EOL>for idx, field in [(<NUM_LIT:3>, '<STR_LIT>'), (<NUM_LIT:4>, '<STR_LIT>')]:<EOL><INDENT>float_len = lambda r: len(self.float(r[field]))<EOL>max_length = max(float_len(r) for r in row['<STR_LIT>'].values())<EOL>sizes[idx] = max(sizes[idx], max_length)<EOL><DEDENT>row['<STR_LIT>'] = sizes<EOL><DEDENT>return summary<EOL>
Compute the report summary and add the computed column sizes
f10743:c4:m0
def humanize(text):
words = []<EOL>for part in text.split('<STR_LIT:_>'):<EOL><INDENT>for word in RE_CAMEL.findall(part) or [part]:<EOL><INDENT>words.append(word.lower())<EOL><DEDENT><DEDENT>words[<NUM_LIT:0>] = words[<NUM_LIT:0>].title()<EOL>return '<STR_LIT:U+0020>'.join(words)<EOL>
Transform code conventions to human readable strings
f10744:m0
def __init__(self, *filenames, **kwargs):
self.benchmarks = []<EOL>self.runned = []<EOL>self.reporters = []<EOL>self.debug = kwargs.get('<STR_LIT>', False)<EOL>for filename in filenames:<EOL><INDENT>module = self.load_module(filename)<EOL>benchmarks = self.load_from_module(module)<EOL>self.benchmarks.extend(benchmarks)<EOL><DEDENT>for reporter in kwargs.get('<STR_LIT>', []):<EOL><INDENT>if inspect.isclass(reporter) and issubclass(reporter, BaseReporter):<EOL><INDENT>reporter = reporter()<EOL><DEDENT>if isinstance(reporter, BaseReporter):<EOL><INDENT>reporter.init(self)<EOL>self.reporters.append(reporter)<EOL><DEDENT>else:<EOL><INDENT>log.warning('<STR_LIT>', reporter)<EOL><DEDENT><DEDENT>
:param filenames: the benchmark files names :type filenames: string :param reporters: the reporters classes or instance to run :type reporters: list :param debug: Run in debug mode if ``True`` :type debug: bool
f10745:c0:m0
def run(self, **kwargs):
self.report_start()<EOL>for bench in self.benchmarks:<EOL><INDENT>bench = bench(before=self.report_before_method,<EOL>after=self.report_after_method,<EOL>after_each=self.report_progress,<EOL>debug=self.debug,<EOL>**kwargs)<EOL>self.report_before_class(bench)<EOL>bench.run()<EOL>self.report_after_class(bench)<EOL>self.runned.append(bench)<EOL><DEDENT>self.report_end()<EOL>
Run all benchmarks. Extras kwargs are passed to benchmarks construtors.
f10745:c0:m1
def load_module(self, filename):
if not isinstance(filename, string_types):<EOL><INDENT>return filename<EOL><DEDENT>basename = os.path.splitext(os.path.basename(filename))[<NUM_LIT:0>]<EOL>basename = basename.replace('<STR_LIT>', '<STR_LIT>')<EOL>modulename = '<STR_LIT>'.format(basename)<EOL>return load_module(modulename, filename)<EOL>
Load a benchmark module from file
f10745:c0:m2
def load_from_module(self, module):
benchmarks = []<EOL>for name in dir(module):<EOL><INDENT>obj = getattr(module, name)<EOL>if (inspect.isclass(obj) and issubclass(obj, Benchmark)<EOL>and obj != Benchmark):<EOL><INDENT>benchmarks.append(obj)<EOL><DEDENT><DEDENT>return benchmarks<EOL>
Load all benchmarks from a given module
f10745:c0:m3
@property<EOL><INDENT>def label(self):<DEDENT>
if self.__doc__ and self.__doc__.strip():<EOL><INDENT>return self.__doc__.strip().splitlines()[<NUM_LIT:0>]<EOL><DEDENT>return humanize(self.__class__.__name__)<EOL>
A human readable label
f10746:c1:m1
def label_for(self, name):
method = getattr(self, name)<EOL>if method.__doc__ and method.__doc__.strip():<EOL><INDENT>return method.__doc__.strip().splitlines()[<NUM_LIT:0>]<EOL><DEDENT>return humanize(name.replace(self._prefix, '<STR_LIT>'))<EOL>
Get a human readable label for a method given its name
f10746:c1:m2
def before_class(self):
pass<EOL>
Hook called before each class
f10746:c1:m4
def before(self):
pass<EOL>
Hook called once before each method
f10746:c1:m5
def before_each(self):
pass<EOL>
Hook called before each method
f10746:c1:m6
def after_each(self):
pass<EOL>
Hook called after each method once
f10746:c1:m7
def after(self):
pass<EOL>
Hook called once after each method
f10746:c1:m8
def after_class(self):
pass<EOL>
Hook called after each class
f10746:c1:m9
def run(self):
tests = self._collect()<EOL>if not tests:<EOL><INDENT>return<EOL><DEDENT>self.times<EOL>self.before_class()<EOL>for test in tests:<EOL><INDENT>func = getattr(self, test)<EOL>results = self.results[test] = Result()<EOL>self._before(self, test)<EOL>self.before()<EOL>for i in range(self.times):<EOL><INDENT>self._before_each(self, test, i)<EOL>result = self._run_one(func)<EOL>results.total += result.duration<EOL>if result.success:<EOL><INDENT>results.has_success = True<EOL><DEDENT>else:<EOL><INDENT>results.has_errors = True<EOL><DEDENT>self._after_each(self, test, i)<EOL>if self.debug and not result.success:<EOL><INDENT>results.error = result.result<EOL>break<EOL><DEDENT><DEDENT>self.after()<EOL>self._after(self, test)<EOL><DEDENT>self.after_class()<EOL>
Collect all tests to run and run them. Each method will be run :attr:`Benchmark.times`.
f10746:c1:m12
def _sanitize_numbers(uncleaned_numbers):
cleaned_numbers = []<EOL>for x in uncleaned_numbers:<EOL><INDENT>try:<EOL><INDENT>cleaned_numbers.append(int(x))<EOL><DEDENT>except ValueError:<EOL><INDENT>cleaned_numbers.append(x)<EOL><DEDENT><DEDENT>return cleaned_numbers<EOL>
Convert strings to integers if possible
f10748:m1
def _handle_negatives(numbers):
min_number = min(filter(lambda x : type(x)==int,numbers))<EOL>if min_number < <NUM_LIT:0>:<EOL><INDENT>return [x+abs(min_number) if type(x)==int else x for x in numbers]<EOL><DEDENT>else:<EOL><INDENT>return numbers<EOL><DEDENT>
Add the minimum negative number to all the numbers in the such that all the elements become >= 0
f10748:m2
def _draw_tickgram(numbers):
max_number = max(filter(lambda x : type(x)==int,numbers))<EOL>if max_number == <NUM_LIT:0> :<EOL><INDENT>return upticks[<NUM_LIT:0>]*len(numbers)<EOL><DEDENT>else:<EOL><INDENT>normalized_numbers = [ float(x)/max_number if type(x)==int else x for x in numbers ]<EOL>upticks_indexes = [ int(math.ceil(x*len(upticks))) if type(x)==float else x for x in normalized_numbers ]<EOL>return '<STR_LIT>'.join([ '<STR_LIT:U+0020>' if type(x)==str else upticks[x-<NUM_LIT:1>] if x != <NUM_LIT:0> else upticks[<NUM_LIT:0>] for x in upticks_indexes ])<EOL><DEDENT>
Takes a list of integers and generate the equivalent list of ticks corresponding to each of the number
f10748:m3
def _write_with_fallback(s, write, fileobj):
try:<EOL><INDENT>write(s)<EOL>return write<EOL><DEDENT>except UnicodeEncodeError:<EOL><INDENT>pass<EOL><DEDENT>enc = locale.getpreferredencoding()<EOL>try:<EOL><INDENT>Writer = codecs.getwriter(enc)<EOL><DEDENT>except LookupError:<EOL><INDENT>Writer = codecs.getwriter(_DEFAULT_ENCODING)<EOL><DEDENT>f = Writer(fileobj)<EOL>write = f.write<EOL>try:<EOL><INDENT>write(s)<EOL>return write<EOL><DEDENT>except UnicodeEncodeError:<EOL><INDENT>Writer = codecs.getwriter('<STR_LIT>')<EOL>f = Writer(fileobj)<EOL>write = f.write<EOL><DEDENT>write(s)<EOL>return write<EOL>
Write the supplied string with the given write function like ``write(s)``, but use a writer for the locale's preferred encoding in case of a UnicodeEncodeError. Failing that attempt to write with 'utf-8' or 'latin-1'.
f10756:m0
def isatty(file):
if (multiprocessing.current_process().name != '<STR_LIT>' or<EOL>threading.current_thread().getName() != '<STR_LIT>'):<EOL><INDENT>return False<EOL><DEDENT>if (OutStream is not None and<EOL>isinstance(file, OutStream) and file.name == '<STR_LIT>'):<EOL><INDENT>return True<EOL><DEDENT>elif hasattr(file, '<STR_LIT>'):<EOL><INDENT>return file.isatty()<EOL><DEDENT>return False<EOL>
Returns `True` if `file` is a tty. Most built-in Python file-like objects have an `isatty` member, but some user-defined types may not, so this assumes those are not ttys.
f10756:m1
def color_text(text, color):
color_mapping = {<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT:default>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>'}<EOL>if sys.platform == '<STR_LIT:win32>' and OutStream is None:<EOL><INDENT>return text<EOL><DEDENT>color_code = color_mapping.get(color, '<STR_LIT>')<EOL>return '<STR_LIT>'.format(color_code, text)<EOL>
Returns a string wrapped in ANSI color codes for coloring the text in a terminal:: colored_text = color_text('Here is a message', 'blue') This won't actually effect the text until it is printed to the terminal. Parameters ---------- text : str The string to return, bounded by the color codes. color : str An ANSI terminal color name. Must be one of: black, red, green, brown, blue, magenta, cyan, lightgrey, default, darkgrey, lightred, lightgreen, yellow, lightblue, lightmagenta, lightcyan, white, or '' (the empty string).
f10756:m2
def _decode_preferred_encoding(s):
enc = locale.getpreferredencoding()<EOL>try:<EOL><INDENT>try:<EOL><INDENT>return s.decode(enc)<EOL><DEDENT>except LookupError:<EOL><INDENT>enc = _DEFAULT_ENCODING<EOL><DEDENT>return s.decode(enc)<EOL><DEDENT>except UnicodeDecodeError:<EOL><INDENT>return s.decode('<STR_LIT>')<EOL><DEDENT>
Decode the supplied byte string using the preferred encoding for the locale (`locale.getpreferredencoding`) or, if the default encoding is invalid, fall back first on utf-8, then on latin-1 if the message cannot be decoded with utf-8.
f10756:m3
def color_print(*args, **kwargs):
file = kwargs.get('<STR_LIT:file>', stdio.stdout)<EOL>end = kwargs.get('<STR_LIT:end>', '<STR_LIT:\n>')<EOL>write = file.write<EOL>if isatty(file):<EOL><INDENT>for i in range(<NUM_LIT:0>, len(args), <NUM_LIT:2>):<EOL><INDENT>msg = args[i]<EOL>if i + <NUM_LIT:1> == len(args):<EOL><INDENT>color = '<STR_LIT>'<EOL><DEDENT>else:<EOL><INDENT>color = args[i + <NUM_LIT:1>]<EOL><DEDENT>if color:<EOL><INDENT>msg = color_text(msg, color)<EOL><DEDENT>if not IS_PY3 and isinstance(msg, bytes):<EOL><INDENT>msg = _decode_preferred_encoding(msg)<EOL><DEDENT>write = _write_with_fallback(msg, write, file)<EOL><DEDENT>write(end)<EOL><DEDENT>else:<EOL><INDENT>for i in range(<NUM_LIT:0>, len(args), <NUM_LIT:2>):<EOL><INDENT>msg = args[i]<EOL>if not IS_PY3 and isinstance(msg, bytes):<EOL><INDENT>msg = _decode_preferred_encoding(msg)<EOL><DEDENT>write(msg)<EOL><DEDENT>write(end)<EOL><DEDENT>
Prints colors and styles to the terminal uses ANSI escape sequences. :: color_print('This is the color ', 'default', 'GREEN', 'green') Parameters ---------- positional args : strings The positional arguments come in pairs (*msg*, *color*), where *msg* is the string to display and *color* is the color to display it in. *color* is an ANSI terminal color name. Must be one of: black, red, green, brown, blue, magenta, cyan, lightgrey, default, darkgrey, lightred, lightgreen, yellow, lightblue, lightmagenta, lightcyan, white, or '' (the empty string). file : writeable file-like object, optional Where to write to. Defaults to `sys.stdout`. If file is not a tty (as determined by calling its `isatty` member, if one exists), no coloring will be included. end : str, optional The ending of the message. Defaults to ``\\n``. The end will be printed after resetting any color or font state.
f10756:m4
def _indent(text, level=<NUM_LIT:1>):
prefix = '<STR_LIT:U+0020>' * (<NUM_LIT:4> * level)<EOL>def prefixed_lines():<EOL><INDENT>for line in text.splitlines(True):<EOL><INDENT>yield (prefix + line if line.strip() else line)<EOL><DEDENT><DEDENT>return '<STR_LIT>'.join(prefixed_lines())<EOL>
Does a proper indenting for Sphinx rst
f10757:m0
def _format_templates(name, command, templates):
yield '<STR_LIT>'.format(name)<EOL>yield _indent('<STR_LIT>')<EOL>yield _indent('<STR_LIT>')<EOL>yield '<STR_LIT>'<EOL>yield _indent('<STR_LIT>')<EOL>yield _indent('<STR_LIT>')<EOL>yield _indent('<STR_LIT>')<EOL>for key, var in templates.items():<EOL><INDENT>kwargs = command.lookup_keys(key)<EOL>yield _indent('<STR_LIT>'.format(key))<EOL>yield _indent('<STR_LIT>'.format(var))<EOL>yield _indent('<STR_LIT>'.format('<STR_LIT:U+002CU+0020>'.join(kwargs)))<EOL><DEDENT>yield '<STR_LIT>'<EOL>
Creates a list-table directive for a set of defined environment variables Parameters: name (str): The name of the config section command (object): The sdss_access path instance templates (dict): A dictionary of the path templates Yields: A string rst-formated list-table directive
f10757:m1
def _load_module(self, module_path):
<EOL>module_path = str(module_path)<EOL>try:<EOL><INDENT>module_name, attr_name = module_path.split('<STR_LIT::>', <NUM_LIT:1>)<EOL><DEDENT>except ValueError: <EOL><INDENT>raise self.error('<STR_LIT>'.format(module_path))<EOL><DEDENT>try:<EOL><INDENT>mod = __import__(module_name, globals(), locals(), [attr_name])<EOL><DEDENT>except (Exception, SystemExit) as exc: <EOL><INDENT>err_msg = '<STR_LIT>'.format(attr_name, module_name)<EOL>if isinstance(exc, SystemExit):<EOL><INDENT>err_msg += '<STR_LIT>'<EOL><DEDENT>else:<EOL><INDENT>err_msg += '<STR_LIT>'.format(traceback.format_exc())<EOL><DEDENT>raise self.error(err_msg)<EOL><DEDENT>if not hasattr(mod, attr_name):<EOL><INDENT>raise self.error('<STR_LIT>'.format(module_name, attr_name))<EOL><DEDENT>return getattr(mod, attr_name)<EOL>
Load the module.
f10757:c0:m0
def _generate_nodes(self, name, command, templates=None):
<EOL>source_name = name<EOL>section = nodes.section(<EOL>'<STR_LIT>',<EOL>nodes.title(text=name),<EOL>ids=[nodes.make_id(name)],<EOL>names=[nodes.fully_normalize_name(name)])<EOL>result = statemachine.ViewList()<EOL>if templates:<EOL><INDENT>lines = _format_templates(name, command, command.templates)<EOL><DEDENT>for line in lines:<EOL><INDENT>result.append(line, source_name)<EOL><DEDENT>self.state.nested_parse(result, <NUM_LIT:0>, section)<EOL>return [section]<EOL>
Generate the relevant Sphinx nodes. Generates a section for the Tree datamodel. Formats a tree section as a list-table directive. Parameters: name (str): The name of the config to be documented, e.g. 'sdsswork' command (object): The loaded module templates (bool): If True, generate a section for the path templates Returns: A section docutil node
f10757:c0:m1
def print_exception_formatted(type, value, tb):
tbtext = '<STR_LIT>'.join(traceback.format_exception(type, value, tb))<EOL>lexer = get_lexer_by_name('<STR_LIT>', stripall=True)<EOL>formatter = TerminalFormatter()<EOL>sys.stderr.write(highlight(tbtext, lexer, formatter))<EOL>
A custom hook for printing tracebacks with colours.
f10759:m1
def colored_formatter(record):
colours = {'<STR_LIT:info>': ('<STR_LIT>', '<STR_LIT>'),<EOL>'<STR_LIT>': ('<STR_LIT>', '<STR_LIT>'),<EOL>'<STR_LIT>': ('<STR_LIT>', '<STR_LIT>'),<EOL>'<STR_LIT>': ('<STR_LIT>', '<STR_LIT>'),<EOL>'<STR_LIT:error>': ('<STR_LIT>', '<STR_LIT>')}<EOL>levelname = record.levelname.lower()<EOL>if levelname == '<STR_LIT:error>':<EOL><INDENT>return<EOL><DEDENT>if levelname.lower() in colours:<EOL><INDENT>levelname_color = colours[levelname][<NUM_LIT:0>]<EOL>header = color_text('<STR_LIT>'.format(levelname.upper()), levelname_color)<EOL><DEDENT>message = '<STR_LIT>'.format(record.msg)<EOL>warning_category = re.match(r'<STR_LIT>', message)<EOL>if warning_category is not None:<EOL><INDENT>warning_category_colour = color_text(warning_category.groups()[<NUM_LIT:0>], '<STR_LIT>')<EOL>message = message.replace(warning_category.groups()[<NUM_LIT:0>], warning_category_colour)<EOL><DEDENT>sub_level = re.match(r'<STR_LIT>', message)<EOL>if sub_level is not None:<EOL><INDENT>sub_level_name = color_text(sub_level.groups()[<NUM_LIT:0>], '<STR_LIT>')<EOL>message = '<STR_LIT>'.format(sub_level_name, '<STR_LIT>'.join(sub_level.groups()[<NUM_LIT:1>:]))<EOL><DEDENT>sys.__stdout__.write('<STR_LIT>'.format(header, message))<EOL>sys.__stdout__.flush()<EOL>return<EOL>
Prints log messages with colours.
f10759:m2
def _catch_exceptions(self, exctype, value, tb):
<EOL>self.error('<STR_LIT>', exc_info=(exctype, value, tb))<EOL>print_exception_formatted(exctype, value, tb)<EOL>
Catches all exceptions and logs them.
f10759:c2:m2
def _set_defaults(self, log_level=logging.INFO, redirect_stdout=False):
<EOL>for handler in self.handlers[:]:<EOL><INDENT>self.removeHandler(handler)<EOL><DEDENT>self.setLevel(logging.DEBUG)<EOL>self.fh = None<EOL>self.sh = logging.StreamHandler()<EOL>self.sh.emit = colored_formatter<EOL>self.addHandler(self.sh)<EOL>self.sh.setLevel(log_level)<EOL>if redirect_stdout:<EOL><INDENT>sys.stdout = LoggerStdout(self._print)<EOL><DEDENT>sys.excepthook = self._catch_exceptions<EOL>
Reset logger to its initial state.
f10759:c2:m3
def start_file_logger(self, name, log_file_level=logging.DEBUG, log_file_path='<STR_LIT>'):
log_file_path = os.path.expanduser(log_file_path) / '<STR_LIT>'.format(name)<EOL>logdir = log_file_path.parent<EOL>try:<EOL><INDENT>logdir.mkdir(parents=True, exist_ok=True)<EOL>if log_file_path.exists():<EOL><INDENT>strtime = datetime.datetime.utcnow().strftime('<STR_LIT>')<EOL>shutil.move(log_file_path, log_file_path + '<STR_LIT:.>' + strtime)<EOL><DEDENT>self.fh = TimedRotatingFileHandler(str(log_file_path), when='<STR_LIT>', utc=True)<EOL>self.fh.suffix = '<STR_LIT>'<EOL><DEDENT>except (IOError, OSError) as ee:<EOL><INDENT>warnings.warn('<STR_LIT>'<EOL>'<STR_LIT>'.format(log_file_path, ee), RuntimeWarning)<EOL><DEDENT>else:<EOL><INDENT>self.fh.setFormatter(fmt)<EOL>self.addHandler(self.fh)<EOL>self.fh.setLevel(log_file_level)<EOL><DEDENT>self.log_filename = log_file_path<EOL>
Start file logging.
f10759:c2:m4
def remote(self, remote_base=None, username=None, password=None):
if remote_base is not None:<EOL><INDENT>self.remote_base = remote_base<EOL><DEDENT>self._remote = True<EOL>self.set_auth(username=username, password=password)<EOL>if self.auth.ready():<EOL><INDENT>passman = HTTPPasswordMgrWithDefaultRealm()<EOL>passman.add_password(None, self.remote_base, self.auth.username, self.auth.password)<EOL>authhandler = HTTPBasicAuthHandler(passman)<EOL>opener = build_opener(authhandler)<EOL>install_opener(opener)<EOL><DEDENT>
Configures remote access Parameters ---------- remote_base : str base URL path for remote repository username : str user name for remote repository password : str password for local repository
f10763:c0:m2
def local(self):
self._remote = False<EOL>
Configures back to local access
f10763:c0:m3
def get(self, filetype, **kwargs):
path = self.full(filetype, **kwargs)<EOL>if path:<EOL><INDENT>if self._remote:<EOL><INDENT>self.download_url_to_path(self.url(filetype, **kwargs), path)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>print("<STR_LIT>" % filetype)<EOL><DEDENT>
Returns file name, downloading if remote access configured. Parameters ---------- filetype : str type of file keyword arguments : keywords to fully specify path Notes ----- Path templates are defined in $DIMAGE_DIR/data/dimage_paths.ini
f10763:c0:m4
def download_url_to_path(self, url, path, force=False):
path_exists = isfile(path)<EOL>if not path_exists or force:<EOL><INDENT>dir = dirname(path)<EOL>if not exists(dir):<EOL><INDENT>if self.verbose:<EOL><INDENT>print("<STR_LIT>" % dir)<EOL><DEDENT>makedirs(dir)<EOL><DEDENT>try:<EOL><INDENT>u = urlopen(url)<EOL><DEDENT>except HTTPError as e:<EOL><INDENT>u = None<EOL>print("<STR_LIT>" % e.code)<EOL><DEDENT>if u:<EOL><INDENT>with open(path, '<STR_LIT:wb>') as file:<EOL><INDENT>meta = u.info()<EOL>meta_func = meta.getheadersif hasattr(meta, '<STR_LIT>') else meta.get_all<EOL>meta_length = meta_func("<STR_LIT>")<EOL>file_size = None<EOL>if meta_length:<EOL><INDENT>file_size = int(meta_length[<NUM_LIT:0>])<EOL><DEDENT>if self.verbose:<EOL><INDENT>print("<STR_LIT>".format(url, file_size))<EOL><DEDENT>file_size_dl = <NUM_LIT:0><EOL>block_sz = <NUM_LIT><EOL>while True:<EOL><INDENT>buffer = u.read(block_sz)<EOL>if not buffer:<EOL><INDENT>break<EOL><DEDENT>file_size_dl += len(buffer)<EOL>file.write(buffer)<EOL><DEDENT><DEDENT>if self.verbose:<EOL><INDENT>if path_exists:<EOL><INDENT>print("<STR_LIT>" % path)<EOL><DEDENT>else:<EOL><INDENT>print("<STR_LIT>" % path)<EOL><DEDENT><DEDENT><DEDENT><DEDENT>elif self.verbose:<EOL><INDENT>print("<STR_LIT>" % path)<EOL><DEDENT>
Download a file from url via http, and put it at path Parameters ---------- url : str URL of file to download path : str local path to put file in
f10763:c0:m5
def remote(self, username=None, password=None, inquire=None):
self.set_netloc(sdss=True) <EOL>self.set_auth(username=username, password=password, inquire=inquire)<EOL>self.set_netloc(dtn=not self.public)<EOL>self.set_remote_base(scheme="<STR_LIT>")<EOL>
Configures remote access
f10765:c0:m2
def reset(self):
<EOL>if self.stream:<EOL><INDENT>self.stream.reset()<EOL><DEDENT>if self.initial_stream:<EOL><INDENT>self.initial_stream.reset()<EOL><DEDENT>
Reset all streams
f10765:c0:m4
def add(self, filetype, **kwargs):
location = self.location(filetype, **kwargs)<EOL>source = self.url(filetype, sasdir='<STR_LIT>' if not self.public else '<STR_LIT>', **kwargs)<EOL>if '<STR_LIT>' not in kwargs:<EOL><INDENT>destination = self.full(filetype, **kwargs)<EOL><DEDENT>else:<EOL><INDENT>destination = kwargs.get('<STR_LIT>')<EOL><DEDENT>if location and source and destination:<EOL><INDENT>self.initial_stream.append_task(location=location, source=source, destination=destination)<EOL><DEDENT>else:<EOL><INDENT>print("<STR_LIT>" % filetype)<EOL><DEDENT>
Adds a filepath into the list of tasks to download
f10765:c0:m5
def set_stream(self):
if not self.auth:<EOL><INDENT>raise AccessError("<STR_LIT>")<EOL><DEDENT>elif not self.initial_stream.task:<EOL><INDENT>raise AccessError("<STR_LIT>")<EOL><DEDENT>else:<EOL><INDENT>self.stream = self.get_stream()<EOL>self.stream.source = join(self.remote_base, '<STR_LIT>') if self.remote_base and not self.public else join(self.remote_base, self.release) if self.release else self.remote_base<EOL>self.stream.destination = join(self.base_dir, self.release) if self.public and self.release else self.base_dir<EOL>self.stream.cli.env = {'<STR_LIT>': self.auth.password} if self.auth.ready() else None<EOL>if self.stream.source and self.stream.destination:<EOL><INDENT>for task in self.initial_stream.task:<EOL><INDENT>self.set_stream_task(task)<EOL><DEDENT><DEDENT>ntask = len(self.stream.task)<EOL>if self.stream.stream_count > ntask:<EOL><INDENT>if self.verbose:<EOL><INDENT>print("<STR_LIT>" % (self.stream.stream_count, ntask))<EOL><DEDENT>self.stream.stream_count = ntask<EOL>self.stream.streamlet = self.stream.streamlet[:ntask]<EOL><DEDENT><DEDENT>
Sets the download streams
f10765:c0:m6
def commit(self, offset=None, limit=None, dryrun=False):
self.stream.command = "<STR_LIT>"<EOL>self.stream.append_tasks_to_streamlets(offset=offset, limit=limit)<EOL>self.stream.commit_streamlets()<EOL>self.stream.run_streamlets()<EOL>self.stream.reset_streamlet()<EOL>
Start the rsync download
f10765:c0:m15
def replant_tree(self):
tree.replant_tree(self.release)<EOL>
replants the tree based on release
f10768:c0:m1
def _input_templates(self):
foo = self._config.read([self._pathfile])<EOL>if len(foo) == <NUM_LIT:1>:<EOL><INDENT>for k, v in self._config.items('<STR_LIT>'):<EOL><INDENT>self.templates[k] = v<EOL><DEDENT><DEDENT>else:<EOL><INDENT>raise ValueError("<STR_LIT>".format(self._pathfile))<EOL><DEDENT>return<EOL>
Read the path template file.
f10768:c0:m2
def lookup_keys(self, name):
assert name, '<STR_LIT>'<EOL>assert name in self.templates.keys(), '<STR_LIT>'.format(name)<EOL>keys = list(set(re.findall(r'<STR_LIT>', self.templates[name])))<EOL>skeys = self._check_special_kwargs(name)<EOL>keys.extend(skeys)<EOL>keys = list(set(keys))<EOL>keys = [k.split('<STR_LIT::>')[<NUM_LIT:0>] for k in keys]<EOL>return keys<EOL>
Lookup the keyword arguments needed for a given path name Parameters: name (str): The name of the path Returns: A list of keywords needed for filepath generation
f10768:c0:m3
def _check_special_kwargs(self, name):
keys = []<EOL>functions = re.findall(r"<STR_LIT>", self.templates[name])<EOL>if not functions:<EOL><INDENT>return keys<EOL><DEDENT>for function in functions:<EOL><INDENT>method = getattr(self, function[<NUM_LIT:1>:])<EOL>source = self._find_source(method)<EOL>fkeys = re.findall(r'<STR_LIT>', source)<EOL>if fkeys:<EOL><INDENT>fkeys = [ast.literal_eval(k) for k in fkeys]<EOL>keys.extend(fkeys)<EOL><DEDENT><DEDENT>return keys<EOL>
check special functions for kwargs Checks the content of the special functions (%methodname) for any keyword arguments referenced within Parameters: name (str): A path key name Returns: A list of keyword arguments found in any special functions
f10768:c0:m4
@staticmethod<EOL><INDENT>def _find_source(method):<DEDENT>
<EOL>source = inspect.findsource(method)<EOL>is_method = inspect.ismethod(method)<EOL>source_str = '<STR_LIT:\n>'.join(source[<NUM_LIT:0>])<EOL>if is_method:<EOL><INDENT>pattern = r'<STR_LIT>'.format(method.__name__)<EOL><DEDENT>start = re.search(pattern, source_str)<EOL>if start:<EOL><INDENT>startpos = start.start()<EOL>endpos = source_str.find('<STR_LIT>', startpos + <NUM_LIT:1>)<EOL>code = source_str[startpos:endpos]<EOL><DEDENT>else:<EOL><INDENT>code = None<EOL><DEDENT>return code<EOL>
find source code of a given method Find and extract the source code of a given method in a module. Uses inspect.findsource to get all source code and performs some selection magic to identify method source code. Doing it this way because inspect.getsource returns wrong method. Parameters: method (obj): A method object Returns: A string containing the source code of a given method Example: >>> from sdss_access.path import Path >>> path = Path() >>> path._find_source(path.full)
f10768:c0:m5
def lookup_names(self):
return self.templates.keys()<EOL>
Lookup what path names are available Returns a list of the available path names in sdss_access. Use with lookup_keys to find the required keyword arguments for a given path name. Returns: A list of the available path names.
f10768:c0:m6
def extract(self, name, example):
<EOL>if not pathlib:<EOL><INDENT>return None<EOL><DEDENT>if isinstance(example, pathlib.Path):<EOL><INDENT>example = str(example)<EOL><DEDENT>assert isinstance(example, six.string_types), '<STR_LIT>'<EOL>assert name in self.lookup_names(), '<STR_LIT>'.format(name)<EOL>template = self.templates[name]<EOL>template = os.path.expandvars(template)<EOL>if re.match('<STR_LIT>', template):<EOL><INDENT>template = re.sub('<STR_LIT>', os.environ['<STR_LIT>'], template)<EOL><DEDENT>elif re.search('<STR_LIT>', template):<EOL><INDENT>template = re.sub('<STR_LIT>', '<STR_LIT>', template)<EOL><DEDENT>elif re.search('<STR_LIT>', template):<EOL><INDENT>template = re.sub('<STR_LIT>', '<STR_LIT>', template)<EOL><DEDENT>if re.search('<STR_LIT>', template):<EOL><INDENT>template = re.sub('<STR_LIT>', '<STR_LIT>', template)<EOL><DEDENT>haskwargs = re.search('<STR_LIT>', template)<EOL>if not haskwargs:<EOL><INDENT>return None<EOL><DEDENT>subtemp = template.replace('<STR_LIT:$>', '<STR_LIT>').replace('<STR_LIT:.>', '<STR_LIT>')<EOL>research = re.sub('<STR_LIT>', '<STR_LIT>', subtemp)<EOL>pmatch = re.search(research, template)<EOL>tmatch = re.search(research, example)<EOL>path_dict = {}<EOL>if tmatch:<EOL><INDENT>values = tmatch.groups(<NUM_LIT:0>)<EOL>keys = pmatch.groups(<NUM_LIT:0>)<EOL>assert len(keys) == len(values), '<STR_LIT>'<EOL>parts = zip(keys, values)<EOL>for part in parts:<EOL><INDENT>value = part[<NUM_LIT:1>]<EOL>if re.findall('<STR_LIT>', part[<NUM_LIT:0>]):<EOL><INDENT>keys = re.findall('<STR_LIT>', part[<NUM_LIT:0>])<EOL>keys = [k.split('<STR_LIT::>')[<NUM_LIT:0>] for k in keys]<EOL>if len(keys) > <NUM_LIT:1>:<EOL><INDENT>if keys[<NUM_LIT:0>] == '<STR_LIT>':<EOL><INDENT>drval = re.match('<STR_LIT>', value).group(<NUM_LIT:0>)<EOL>otherval = value.split(drval)[-<NUM_LIT:1>]<EOL>pdict = {keys[<NUM_LIT:0>]: drval, keys[<NUM_LIT:1>]: otherval}<EOL><DEDENT>elif keys[<NUM_LIT:0>] in ['<STR_LIT>', '<STR_LIT>', '<STR_LIT>', '<STR_LIT>']:<EOL><INDENT>pdict = {keys[<NUM_LIT:0>]: value[<NUM_LIT:0>], keys[<NUM_LIT:1>]: value[<NUM_LIT:1>:]}<EOL><DEDENT>else:<EOL><INDENT>raise ValueError('<STR_LIT>')<EOL><DEDENT>path_dict.update(pdict)<EOL><DEDENT>else:<EOL><INDENT>path_dict[keys[<NUM_LIT:0>]] = value<EOL><DEDENT><DEDENT><DEDENT><DEDENT>return path_dict<EOL>
Extract keywords from an example path
f10768:c0:m7
def dir(self, filetype, **kwargs):
full = kwargs.get('<STR_LIT>', None)<EOL>if not full:<EOL><INDENT>full = self.full(filetype, **kwargs)<EOL><DEDENT>return os.path.dirname(full)<EOL>
Return the directory containing a file of a given type. Parameters ---------- filetype : str File type parameter. Returns ------- dir : str Directory containing the file.
f10768:c0:m8
def name(self, filetype, **kwargs):
full = kwargs.get('<STR_LIT>', None)<EOL>if not full:<EOL><INDENT>full = self.full(filetype, **kwargs)<EOL><DEDENT>return os.path.basename(full)<EOL>
Return the directory containing a file of a given type. Parameters ---------- filetype : str File type parameter. Returns ------- name : str Name of a file with no directory information.
f10768:c0:m9
def exists(self, filetype, remote=None, **kwargs):
full = kwargs.get('<STR_LIT>', None)<EOL>if not full:<EOL><INDENT>full = self.full(filetype, **kwargs)<EOL><DEDENT>if remote:<EOL><INDENT>url = self.url('<STR_LIT>', full=full)<EOL>try:<EOL><INDENT>resp = requests.head(url)<EOL><DEDENT>except Exception as e:<EOL><INDENT>raise AccessError('<STR_LIT>'.format(url, e))<EOL><DEDENT>else:<EOL><INDENT>return resp.ok<EOL><DEDENT><DEDENT>else:<EOL><INDENT>return os.path.isfile(full)<EOL><DEDENT>
Checks if the given type of file exists locally Parameters ---------- filetype : str File type parameter. remote : bool If True, checks for remote existence of the file Returns ------- exists : bool Boolean indicating if the file exists.
f10768:c0:m10
def expand(self, filetype, **kwargs):
full = kwargs.get('<STR_LIT>', None)<EOL>if not full:<EOL><INDENT>full = self.full(filetype, **kwargs)<EOL><DEDENT>files = glob(full)<EOL>as_url = kwargs.get('<STR_LIT>', None)<EOL>newfiles = [self.url('<STR_LIT>', full=full) for full in files] if as_url else files<EOL>refine = kwargs.get('<STR_LIT>', None)<EOL>if refine:<EOL><INDENT>newfiles = self.refine(newfiles, refine, **kwargs)<EOL><DEDENT>return newfiles<EOL>
Expand a wildcard path locally Parameters ---------- filetype : str File type parameter. as_url: bool Boolean to return SAS urls refine: str Regular expression string to filter the list of files by before random selection Returns ------- expand : list List of expanded full paths of the given type.
f10768:c0:m11
def any(self, filetype, **kwargs):
expanded_files = self.expand(filetype, **kwargs)<EOL>return any(expanded_files)<EOL>
Checks if the local directory contains any of the type of file Parameters ---------- filetype : str File type parameter. Returns ------- any : bool Boolean indicating if the any files exist in the expanded path on disk.
f10768:c0:m12
def one(self, filetype, **kwargs):
expanded_files = self.expand(filetype, **kwargs)<EOL>isany = self.any(filetype, **kwargs)<EOL>return choice(expanded_files) if isany else None<EOL>
Returns random one of the given type of file Parameters ---------- filetype : str File type parameter. as_url: bool Boolean to return SAS urls refine: str Regular expression string to filter the list of files by before random selection Returns ------- one : str Random file selected from the expanded list of full paths on disk.
f10768:c0:m13
def random(self, filetype, **kwargs):
expanded_files = self.expand(filetype, **kwargs)<EOL>isany = self.any(filetype, **kwargs)<EOL>if isany:<EOL><INDENT>num = kwargs.get('<STR_LIT>', <NUM_LIT:1>)<EOL>assert num <= len(expanded_files), '<STR_LIT>'<EOL>return sample(expanded_files, num)<EOL><DEDENT>else:<EOL><INDENT>return None<EOL><DEDENT>
Returns random number of the given type of file Parameters ---------- filetype : str File type parameter. num : int The number of files to return as_url: bool Boolean to return SAS urls refine: str Regular expression string to filter the list of files by before random selection Returns ------- random : list Random file selected from the expanded list of full paths on disk.
f10768:c0:m14
def refine(self, filelist, regex, filterdir='<STR_LIT>', **kwargs):
assert filelist, '<STR_LIT>'<EOL>assert regex, '<STR_LIT>'<EOL>r = re.compile(regex)<EOL>assert filterdir in ['<STR_LIT>', '<STR_LIT>'], '<STR_LIT>'<EOL>if filterdir == '<STR_LIT>':<EOL><INDENT>subset = list(filter(lambda i: r.search(i), filelist))<EOL><DEDENT>elif filterdir == '<STR_LIT>':<EOL><INDENT>subset = list(filter(lambda i: not r.search(i), filelist))<EOL><DEDENT>return subset<EOL>
Returns a list of files filterd by a regular expression Parameters ---------- filelist : list A list of files to filter on. regex : str The regular expression string to filter your list filterdir: {'in', 'out'} Indicates the filter to be inclusive or exclusive 'out' removes the items satisfying the regular expression 'in' keeps the items satisfying the regular expression Returns ------- refine : list A file list refined by an input regular expression.
f10768:c0:m15
def full(self, filetype, **kwargs):
<EOL>if '<STR_LIT>' in kwargs:<EOL><INDENT>return kwargs.get('<STR_LIT>')<EOL><DEDENT>assert filetype in self.templates, ('<STR_LIT>'<EOL>'<STR_LIT>'<EOL>'<STR_LIT>'.format(filetype))<EOL>template = self.templates[filetype]<EOL>if template:<EOL><INDENT>keys = self.lookup_keys(filetype)<EOL>keys = [k.split('<STR_LIT::>')[<NUM_LIT:0>] for k in keys]<EOL>missing_keys = set(keys) - set(kwargs.keys())<EOL>if missing_keys:<EOL><INDENT>raise KeyError('<STR_LIT>'.format(list(missing_keys)))<EOL><DEDENT>else:<EOL><INDENT>template = template.format(**kwargs)<EOL><DEDENT><DEDENT>if template:<EOL><INDENT>template = os.path.expandvars(template)<EOL>template = self._call_special_functions(filetype, template, **kwargs)<EOL><DEDENT>return template<EOL>
Return the full name of a given type of file. Parameters ---------- filetype : str File type parameter. Returns ------- full : str The full path to the file.
f10768:c0:m16
def _call_special_functions(self, filetype, template, **kwargs):
<EOL>functions = re.findall(r"<STR_LIT>", template)<EOL>if not functions:<EOL><INDENT>return template<EOL><DEDENT>for function in functions:<EOL><INDENT>try:<EOL><INDENT>method = getattr(self, function[<NUM_LIT:1>:])<EOL><DEDENT>except AttributeError:<EOL><INDENT>return None<EOL><DEDENT>else:<EOL><INDENT>value = method(filetype, **kwargs)<EOL>template = re.sub(function, value, template)<EOL><DEDENT><DEDENT>return template<EOL>
Call the special functions found in a template path Calls special functions indicated by %methodname found in the sdss_paths.ini template file, and replaces the %location in the path with the returned content. Parameters: filetype (str): template name of file template (str): the template path kwargs (dict): Any kwargs needed to pass into the methods Returns: The expanded template path
f10768:c0:m17
def location(self, filetype, base_dir=None, **kwargs):
full = kwargs.get('<STR_LIT>', None)<EOL>if not full:<EOL><INDENT>full = self.full(filetype, **kwargs)<EOL><DEDENT>self.set_base_dir(base_dir=base_dir)<EOL>location = full[len(self.base_dir):] if full and full.startswith(self.base_dir) else None<EOL>if location and '<STR_LIT>' in location:<EOL><INDENT>location = location.replace('<STR_LIT>', '<STR_LIT:/>')<EOL><DEDENT>return location<EOL>
Return the location of the relative sas path of a given type of file. Parameters ---------- filetype : str File type parameter. Returns ------- full : str The relative sas path to the file.
f10768:c0:m22
def url(self, filetype, base_dir=None, sasdir='<STR_LIT>', **kwargs):
location = self.location(filetype, **kwargs)<EOL>return join(self.remote_base, sasdir, location) if self.remote_base and location else None<EOL>
Return the url of a given type of file. Parameters ---------- filetype : str File type parameter. Returns ------- full : str The sas url to the file.
f10768:c0:m23
def plateid6(self, filetype, **kwargs):
plateid = int(kwargs['<STR_LIT>'])<EOL>if plateid < <NUM_LIT>:<EOL><INDENT>return "<STR_LIT>".format(plateid)<EOL><DEDENT>else:<EOL><INDENT>return "<STR_LIT>".format(plateid)<EOL><DEDENT>
Print plate ID, accounting for 5-6 digit plate IDs. Parameters ---------- filetype : str File type parameter. plateid : int or str Plate ID number. Will be converted to int internally. Returns ------- plateid6 : str Plate ID formatted to a string of 6 characters.
f10768:c1:m1
def platedir(self, filetype, **kwargs):
plateid = int(kwargs['<STR_LIT>'])<EOL>plateid100 = plateid // <NUM_LIT:100><EOL>subdir = "<STR_LIT>".format(plateid100) + "<STR_LIT>"<EOL>return os.path.join(subdir, "<STR_LIT>".format(plateid))<EOL>
Returns plate subdirectory in :envvar:`PLATELIST_DIR` of the form: ``NNNNXX/NNNNNN``. Parameters ---------- filetype : str File type parameter. plateid : int or str Plate ID number. Will be converted to int internally. Returns ------- platedir : str Plate directory in the format ``NNNNXX/NNNNNN``.
f10768:c1:m2
def spectrodir(self, filetype, **kwargs):
if str(kwargs['<STR_LIT>']) in ('<STR_LIT>', '<STR_LIT>', '<STR_LIT>'):<EOL><INDENT>return os.environ['<STR_LIT>']<EOL><DEDENT>else:<EOL><INDENT>return os.environ['<STR_LIT>']<EOL><DEDENT>
Returns :envvar:`SPECTRO_REDUX` or :envvar:`BOSS_SPECTRO_REDUX` depending on the value of `run2d`. Parameters ---------- filetype : str File type parameter. run2d : int or str 2D Reduction ID. Returns ------- spectrodir : str Value of the appropriate environment variable.
f10768:c1:m3
def definitiondir(self, filetype, **kwargs):
designid = int(kwargs['<STR_LIT>'])<EOL>designid100 = designid // <NUM_LIT:100><EOL>subdir = "<STR_LIT>".format(designid100) + "<STR_LIT>"<EOL>return subdir<EOL>
Returns definition subdirectory in :envvar:`PLATELIST_DIR` of the form: ``NNNNXX``. Parameters ---------- filetype : str File type parameter. designid : int or str Design ID number. Will be converted to int internally. Returns ------- definitiondir : str Definition directory in the format ``NNNNXX``.
f10768:c1:m4
@task<EOL>def clean_docs(ctx):
print('<STR_LIT>')<EOL>ctx.run("<STR_LIT>")<EOL>
Cleans up the Sphinx docs
f10770:m0
@task(clean_docs)<EOL>def build_docs(ctx):
print('<STR_LIT>')<EOL>os.chdir('<STR_LIT>')<EOL>ctx.run("<STR_LIT>")<EOL>
Builds the Sphinx docs
f10770:m1
@task(build_docs)<EOL>def show_docs(ctx):
print('<STR_LIT>')<EOL>os.chdir('<STR_LIT>')<EOL>ctx.run('<STR_LIT>')<EOL>
Shows the Sphinx docs
f10770:m2
@task<EOL>def clean(ctx):
print('<STR_LIT>')<EOL>ctx.run("<STR_LIT>")<EOL>ctx.run("<STR_LIT>")<EOL>ctx.run("<STR_LIT>")<EOL>
Cleans up the crap before a Pip build
f10770:m3
@task(clean)<EOL>def deploy(ctx):
print('<STR_LIT>')<EOL>ctx.run("<STR_LIT>")<EOL>ctx.run("<STR_LIT>")<EOL>
Deploy the project to pypi
f10770:m4
def get_requirements(opts):
if opts.dev:<EOL><INDENT>name = '<STR_LIT>'<EOL><DEDENT>elif opts.doc:<EOL><INDENT>name = '<STR_LIT>'<EOL><DEDENT>else:<EOL><INDENT>name = '<STR_LIT>'<EOL><DEDENT>requirements_file = os.path.join(os.path.dirname(__file__), name)<EOL>install_requires = [line.strip().replace('<STR_LIT>', '<STR_LIT>') for line in open(requirements_file)<EOL>if not line.strip().startswith('<STR_LIT:#>') and line.strip() != '<STR_LIT>']<EOL>return install_requires<EOL>
Get the proper requirements file based on the optional argument
f10771:m1
def remove_args(parser):
arguments = []<EOL>for action in list(parser._get_optional_actions()):<EOL><INDENT>if '<STR_LIT>' not in action.option_strings:<EOL><INDENT>arguments += action.option_strings<EOL><DEDENT><DEDENT>for arg in arguments:<EOL><INDENT>if arg in sys.argv:<EOL><INDENT>sys.argv.remove(arg)<EOL><DEDENT><DEDENT>
Remove custom arguments from the parser
f10771:m2
def add_checkpoint(html_note, counter):
if html_note.text:<EOL><INDENT>html_note.text = (html_note.text + CHECKPOINT_PREFIX +<EOL>str(counter) + CHECKPOINT_SUFFIX)<EOL><DEDENT>else:<EOL><INDENT>html_note.text = (CHECKPOINT_PREFIX + str(counter) +<EOL>CHECKPOINT_SUFFIX)<EOL><DEDENT>counter += <NUM_LIT:1><EOL>for child in html_note.iterchildren():<EOL><INDENT>counter = add_checkpoint(child, counter)<EOL><DEDENT>if html_note.tail:<EOL><INDENT>html_note.tail = (html_note.tail + CHECKPOINT_PREFIX +<EOL>str(counter) + CHECKPOINT_SUFFIX)<EOL><DEDENT>else:<EOL><INDENT>html_note.tail = (CHECKPOINT_PREFIX + str(counter) +<EOL>CHECKPOINT_SUFFIX)<EOL><DEDENT>counter += <NUM_LIT:1><EOL>return counter<EOL>
Recursively adds checkpoints to html tree.
f10784:m0
def delete_quotation_tags(html_note, counter, quotation_checkpoints):
tag_in_quotation = True<EOL>if quotation_checkpoints[counter]:<EOL><INDENT>html_note.text = '<STR_LIT>'<EOL><DEDENT>else:<EOL><INDENT>tag_in_quotation = False<EOL><DEDENT>counter += <NUM_LIT:1><EOL>quotation_children = [] <EOL>for child in html_note.iterchildren():<EOL><INDENT>counter, child_tag_in_quotation = delete_quotation_tags(<EOL>child, counter,<EOL>quotation_checkpoints<EOL>)<EOL>if child_tag_in_quotation:<EOL><INDENT>quotation_children.append(child)<EOL><DEDENT><DEDENT>if quotation_checkpoints[counter]:<EOL><INDENT>html_note.tail = '<STR_LIT>'<EOL><DEDENT>else:<EOL><INDENT>tag_in_quotation = False<EOL><DEDENT>counter += <NUM_LIT:1><EOL>if tag_in_quotation:<EOL><INDENT>return counter, tag_in_quotation<EOL><DEDENT>else:<EOL><INDENT>for child in quotation_children:<EOL><INDENT>html_note.remove(child)<EOL><DEDENT>return counter, tag_in_quotation<EOL><DEDENT>
Deletes tags with quotation checkpoints from html tree.
f10784:m1
def cut_gmail_quote(html_message):
gmail_quote = cssselect('<STR_LIT>', html_message)<EOL>if gmail_quote and (gmail_quote[<NUM_LIT:0>].text is None or not RE_FWD.match(gmail_quote[<NUM_LIT:0>].text)):<EOL><INDENT>gmail_quote[<NUM_LIT:0>].getparent().remove(gmail_quote[<NUM_LIT:0>])<EOL>return True<EOL><DEDENT>
Cuts the outermost block element with class gmail_quote.
f10784:m2
def cut_microsoft_quote(html_message):
<EOL>ns = {"<STR_LIT>": "<STR_LIT>"}<EOL>splitter = html_message.xpath(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>, namespaces=ns<EOL>)<EOL>if splitter:<EOL><INDENT>splitter = splitter[<NUM_LIT:0>]<EOL>if splitter == splitter.getparent().getchildren()[<NUM_LIT:0>]:<EOL><INDENT>splitter = splitter.getparent()<EOL><DEDENT><DEDENT>else:<EOL><INDENT>splitter = html_message.xpath(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>)<EOL>if len(splitter):<EOL><INDENT>splitter = splitter[<NUM_LIT:0>]<EOL>splitter = splitter.getparent().getparent()<EOL>splitter = splitter.getparent().getparent()<EOL><DEDENT><DEDENT>if len(splitter):<EOL><INDENT>parent = splitter.getparent()<EOL>after_splitter = splitter.getnext()<EOL>while after_splitter is not None:<EOL><INDENT>parent.remove(after_splitter)<EOL>after_splitter = splitter.getnext()<EOL><DEDENT>parent.remove(splitter)<EOL>return True<EOL><DEDENT>return False<EOL>
Cuts splitter block and all following blocks.
f10784:m3
def cut_blockquote(html_message):
quote = html_message.xpath(<EOL>'<STR_LIT>'<EOL>'<STR_LIT>'<EOL>'<STR_LIT>')<EOL>if quote:<EOL><INDENT>quote = quote[<NUM_LIT:0>]<EOL>quote.getparent().remove(quote)<EOL>return True<EOL><DEDENT>
Cuts the last non-nested blockquote with wrapping elements.
f10784:m5
def cut_from_block(html_message):
<EOL>block = html_message.xpath(<EOL>("<STR_LIT>"<EOL>"<STR_LIT>"))<EOL>if block:<EOL><INDENT>block = block[-<NUM_LIT:1>]<EOL>parent_div = None<EOL>while block.getparent() is not None:<EOL><INDENT>if block.tag == '<STR_LIT>':<EOL><INDENT>parent_div = block<EOL>break<EOL><DEDENT>block = block.getparent()<EOL><DEDENT>if parent_div is not None:<EOL><INDENT>maybe_body = parent_div.getparent()<EOL>parent_div_is_all_content = (<EOL>maybe_body is not None and maybe_body.tag == '<STR_LIT:body>' and<EOL>len(maybe_body.getchildren()) == <NUM_LIT:1>)<EOL>if not parent_div_is_all_content:<EOL><INDENT>parent = block.getparent()<EOL>next_sibling = block.getnext()<EOL>while next_sibling is not None:<EOL><INDENT>parent.remove(block)<EOL>block = next_sibling<EOL>next_sibling = block.getnext()<EOL><DEDENT>if block is not None:<EOL><INDENT>parent.remove(block)<EOL><DEDENT>return True<EOL><DEDENT><DEDENT>else:<EOL><INDENT>return False<EOL><DEDENT><DEDENT>block = html_message.xpath(<EOL>("<STR_LIT>"<EOL>"<STR_LIT>"))<EOL>if block:<EOL><INDENT>block = block[<NUM_LIT:0>]<EOL>if RE_FWD.match(block.getparent().text or '<STR_LIT>'):<EOL><INDENT>return False<EOL><DEDENT>while(block.getnext() is not None):<EOL><INDENT>block.getparent().remove(block.getnext())<EOL><DEDENT>block.getparent().remove(block)<EOL>return True<EOL><DEDENT>
Cuts div tag which wraps block starting with "From:".
f10784:m6
def remove_initial_spaces_and_mark_message_lines(lines):
i = <NUM_LIT:0><EOL>while i < len(lines):<EOL><INDENT>lines[i] = lines[i].lstrip('<STR_LIT:U+0020>')<EOL>i += <NUM_LIT:1><EOL><DEDENT>return mark_message_lines(lines)<EOL>
Removes the initial spaces in each line before marking message lines. This ensures headers can be identified if they are indented with spaces.
f10787:m1
def mark_message_lines(lines):
markers = ['<STR_LIT:e>' for _ in lines]<EOL>i = <NUM_LIT:0><EOL>while i < len(lines):<EOL><INDENT>if not lines[i].strip():<EOL><INDENT>markers[i] = '<STR_LIT:e>' <EOL><DEDENT>elif QUOT_PATTERN.match(lines[i]):<EOL><INDENT>markers[i] = '<STR_LIT:m>' <EOL><DEDENT>elif RE_FWD.match(lines[i]):<EOL><INDENT>markers[i] = '<STR_LIT:f>' <EOL><DEDENT>else:<EOL><INDENT>splitter = is_splitter('<STR_LIT:\n>'.join(lines[i:i + SPLITTER_MAX_LINES]))<EOL>if splitter:<EOL><INDENT>splitter_lines = splitter.group().splitlines()<EOL>for j in range(len(splitter_lines)):<EOL><INDENT>markers[i + j] = '<STR_LIT:s>'<EOL><DEDENT>i += len(splitter_lines) - <NUM_LIT:1><EOL><DEDENT>else:<EOL><INDENT>markers[i] = '<STR_LIT:t>'<EOL><DEDENT><DEDENT>i += <NUM_LIT:1><EOL><DEDENT>return '<STR_LIT>'.join(markers)<EOL>
Mark message lines with markers to distinguish quotation lines. Markers: * e - empty line * m - line that starts with quotation marker '>' * s - splitter line * t - presumably lines from the last message in the conversation >>> mark_message_lines(['answer', 'From: foo@bar.com', '', '> question']) 'tsem'
f10787:m2
def process_marked_lines(lines, markers, return_flags=[False, -<NUM_LIT:1>, -<NUM_LIT:1>]):
markers = '<STR_LIT>'.join(markers)<EOL>if '<STR_LIT:s>' not in markers and not re.search('<STR_LIT>', markers):<EOL><INDENT>markers = markers.replace('<STR_LIT:m>', '<STR_LIT:t>')<EOL><DEDENT>if re.match('<STR_LIT>', markers):<EOL><INDENT>return_flags[:] = [False, -<NUM_LIT:1>, -<NUM_LIT:1>]<EOL>return lines<EOL><DEDENT>for inline_reply in re.finditer('<STR_LIT>', markers):<EOL><INDENT>links = (<EOL>RE_PARENTHESIS_LINK.search(lines[inline_reply.start() - <NUM_LIT:1>]) or<EOL>RE_PARENTHESIS_LINK.match(lines[inline_reply.start()].strip()))<EOL>if not links:<EOL><INDENT>return_flags[:] = [False, -<NUM_LIT:1>, -<NUM_LIT:1>]<EOL>return lines<EOL><DEDENT><DEDENT>quotation = re.search('<STR_LIT>', markers)<EOL>if quotation:<EOL><INDENT>return_flags[:] = [True, quotation.start(), len(lines)]<EOL>return lines[:quotation.start()]<EOL><DEDENT>quotation = (RE_QUOTATION.search(markers) or<EOL>RE_EMPTY_QUOTATION.search(markers))<EOL>if quotation:<EOL><INDENT>return_flags[:] = True, quotation.start(<NUM_LIT:1>), quotation.end(<NUM_LIT:1>)<EOL>return lines[:quotation.start(<NUM_LIT:1>)] + lines[quotation.end(<NUM_LIT:1>):]<EOL><DEDENT>return_flags[:] = [False, -<NUM_LIT:1>, -<NUM_LIT:1>]<EOL>return lines<EOL>
Run regexes against message's marked lines to strip quotations. Return only last message lines. >>> mark_message_lines(['Hello', 'From: foo@bar.com', '', '> Hi', 'tsem']) ['Hello'] Also returns return_flags. return_flags = [were_lines_deleted, first_deleted_line, last_deleted_line]
f10787:m3
def preprocess(msg_body, delimiter, content_type='<STR_LIT>'):
msg_body = _replace_link_brackets(msg_body)<EOL>msg_body = _wrap_splitter_with_newline(msg_body, delimiter, content_type)<EOL>return msg_body<EOL>
Prepares msg_body for being stripped. Replaces link brackets so that they couldn't be taken for quotation marker. Splits line in two if splitter pattern preceded by some text on the same line (done only for 'On <date> <person> wrote:' pattern). Converts msg_body into a unicode.
f10787:m4
def _replace_link_brackets(msg_body):
if isinstance(msg_body, bytes):<EOL><INDENT>msg_body = msg_body.decode('<STR_LIT:utf8>')<EOL><DEDENT>def link_wrapper(link):<EOL><INDENT>newline_index = msg_body[:link.start()].rfind("<STR_LIT:\n>")<EOL>if msg_body[newline_index + <NUM_LIT:1>] == "<STR_LIT:>>":<EOL><INDENT>return link.group()<EOL><DEDENT>else:<EOL><INDENT>return "<STR_LIT>" % link.group(<NUM_LIT:1>)<EOL><DEDENT><DEDENT>msg_body = re.sub(RE_LINK, link_wrapper, msg_body)<EOL>return msg_body<EOL>
Normalize links i.e. replace '<', '>' wrapping the link with some symbols so that '>' closing the link couldn't be mistakenly taken for quotation marker. Converts msg_body into a unicode
f10787:m5
def _wrap_splitter_with_newline(msg_body, delimiter, content_type='<STR_LIT>'):
def splitter_wrapper(splitter):<EOL><INDENT>"""<STR_LIT>"""<EOL>if splitter.start() and msg_body[splitter.start() - <NUM_LIT:1>] != '<STR_LIT:\n>':<EOL><INDENT>return '<STR_LIT>' % (delimiter, splitter.group())<EOL><DEDENT>else:<EOL><INDENT>return splitter.group()<EOL><DEDENT><DEDENT>if content_type == '<STR_LIT>':<EOL><INDENT>msg_body = re.sub(RE_ON_DATE_SMB_WROTE, splitter_wrapper, msg_body)<EOL><DEDENT>return msg_body<EOL>
Splits line in two if splitter pattern preceded by some text on the same line (done only for 'On <date> <person> wrote:' pattern.
f10787:m6
def postprocess(msg_body):
return re.sub(RE_NORMALIZED_LINK, r'<STR_LIT>', msg_body).strip()<EOL>
Make up for changes done at preprocessing message. Replace link brackets back to '<' and '>'.
f10787:m7
def extract_from_plain(msg_body):
stripped_text = msg_body<EOL>delimiter = get_delimiter(msg_body)<EOL>msg_body = preprocess(msg_body, delimiter)<EOL>lines = msg_body.splitlines()[:MAX_LINES_COUNT]<EOL>markers = mark_message_lines(lines)<EOL>lines = process_marked_lines(lines, markers)<EOL>msg_body = delimiter.join(lines)<EOL>msg_body = postprocess(msg_body)<EOL>return msg_body<EOL>
Extracts a non quoted message from provided plain text.
f10787:m8