signature
stringlengths 8
3.44k
| body
stringlengths 0
1.41M
| docstring
stringlengths 1
122k
| id
stringlengths 5
17
|
|---|---|---|---|
def between(self, time_):
|
hour = int(time_[<NUM_LIT:0>:<NUM_LIT:2>])<EOL>minute = int(time_[<NUM_LIT:3>:<NUM_LIT:5>])<EOL>return not (<EOL>hour < self.h1 or hour > self.h2 or<EOL>(hour == self.h1 and minute < self.m1) or<EOL>(hour == self.h2 and minute > self.m2)<EOL>)<EOL>
|
Compare if the parameter HH:MM is in the time range.
|
f9085:c0:m1
|
def has_host_match(log_data, hosts):
|
hostname = getattr(log_data, '<STR_LIT:host>', None)<EOL>if hostname and hostname not in host_cache:<EOL><INDENT>for host_pattern in hosts:<EOL><INDENT>if host_pattern.search(hostname) is not None:<EOL><INDENT>host_cache.add(hostname)<EOL>return True<EOL><DEDENT><DEDENT>else:<EOL><INDENT>return False<EOL><DEDENT><DEDENT>return True<EOL>
|
Match the data with a list of hostname patterns. If the log line data
doesn't include host information considers the line as matched.
|
f9088:m4
|
def create_matcher(dispatcher, parsers, apptags, matcher='<STR_LIT>', hosts=tuple(), time_range=None,<EOL>time_period=(None, None), patterns=tuple(), invert=False, count=False,<EOL>files_with_match=None, max_count=<NUM_LIT:0>, only_matching=False, quiet=False,<EOL>thread=False, name_cache=None):
|
parsers = CycleParsers(parsers)<EOL>max_matches = <NUM_LIT:1> if quiet else max_count<EOL>use_app_rules = matcher != '<STR_LIT>'<EOL>select_unparsed = matcher == '<STR_LIT>'<EOL>register_log_lines = not (quiet or count or files_with_match is not None)<EOL>start_dt, end_dt = get_mktime_period(time_period)<EOL>pattern_search = create_search_function(invert, only_matching)<EOL>dispatch_selected = dispatcher.dispatch_selected<EOL>dispatch_context = dispatcher.dispatch_context<EOL>display_progress_bar = sys.stdout.isatty() and all(c.name != '<STR_LIT>' for c in dispatcher.channels)<EOL>def process_logfile(source, apps, encoding='<STR_LIT:utf-8>'):<EOL><INDENT>log_parser = next(parsers)<EOL>first_event = None<EOL>last_event = None<EOL>app_thread = None<EOL>selected_data = None<EOL>line_counter = <NUM_LIT:0><EOL>unknown_counter = <NUM_LIT:0><EOL>selected_counter = <NUM_LIT:0><EOL>extra_tags = Counter()<EOL>dispatcher.reset()<EOL>read_size = <NUM_LIT:0><EOL>progress_bar = None<EOL>with open_resource(source) as logfile:<EOL><INDENT>logfile_name = logfile.name<EOL>fstat = os.fstat(logfile.fileno())<EOL>file_mtime = datetime.datetime.fromtimestamp(fstat.st_mtime)<EOL>file_year = file_mtime.year<EOL>file_month = file_mtime.month<EOL>prev_year = file_year - <NUM_LIT:1><EOL>if display_progress_bar:<EOL><INDENT>read_size = <NUM_LIT:0><EOL>progress_bar = ProgressBar(sys.stdout, fstat.st_size, logfile_name)<EOL><DEDENT>for line in logfile:<EOL><INDENT>line = line.decode(encoding)<EOL>line_counter += <NUM_LIT:1><EOL>if line[-<NUM_LIT:1>] != '<STR_LIT:\n>':<EOL><INDENT>line += '<STR_LIT:\n>'<EOL><DEDENT>if display_progress_bar:<EOL><INDENT>read_size += len(line)<EOL>if not line_counter % <NUM_LIT:100>:<EOL><INDENT>progress_bar.redraw(read_size)<EOL><DEDENT><DEDENT>log_match = log_parser.match(line)<EOL>if log_match is None:<EOL><INDENT>next_parser, log_match = parsers.detect(line)<EOL>if log_match is not None:<EOL><INDENT>log_parser = next_parser<EOL><DEDENT>elif line_counter == <NUM_LIT:1>:<EOL><INDENT>logger.warning("<STR_LIT>".format(logfile_name))<EOL>break<EOL><DEDENT>else:<EOL><INDENT>unknown_counter += <NUM_LIT:1><EOL>continue<EOL><DEDENT><DEDENT>log_data = log_parser.get_data(log_match)<EOL>if getattr(log_data, '<STR_LIT>', None) is not None:<EOL><INDENT>if selected_data is not None:<EOL><INDENT>repeat = int(log_data.repeat)<EOL>if not thread:<EOL><INDENT>selected_counter += repeat<EOL><DEDENT>if use_app_rules:<EOL><INDENT>app = log_parser.app or get_app(selected_data, apps, apptags, extra_tags)<EOL>app.increase_last(repeat)<EOL>app.matches += <NUM_LIT:1><EOL>dispatch_context(<EOL>key=(app, app_thread),<EOL>filename=logfile_name,<EOL>line_number=line_counter,<EOL>rawlog=line<EOL>)<EOL><DEDENT>selected_data = None<EOL><DEDENT>continue<EOL><DEDENT>selected_data = None<EOL>year = getattr(<EOL>log_data, '<STR_LIT>',<EOL>prev_year if MONTHMAP[log_data.month] != '<STR_LIT>' and file_month == <NUM_LIT:1> else file_year<EOL>)<EOL>event_dt = get_mktime(<EOL>year=year,<EOL>month=log_data.month,<EOL>day=log_data.day,<EOL>ltime=log_data.ltime<EOL>)<EOL>if event_dt is None or event_dt < start_dt:<EOL><INDENT>continue<EOL><DEDENT>elif event_dt > end_dt:<EOL><INDENT>if fstat.st_mtime < event_dt:<EOL><INDENT>logger.error("<STR_LIT>", logfile_name, line_counter)<EOL><DEDENT>logger.warning("<STR_LIT>", line_counter, logfile_name)<EOL>break<EOL><DEDENT>elif time_range is not None and not time_range.between(log_data.ltime):<EOL><INDENT>continue<EOL><DEDENT>elif hosts and not has_host_match(log_data, hosts):<EOL><INDENT>continue<EOL><DEDENT>pattern_matched, match, rawlog = pattern_search(line, patterns)<EOL>if not pattern_matched and not thread:<EOL><INDENT>dispatch_context(filename=logfile_name, line_number=line_counter, rawlog=rawlog)<EOL>continue<EOL><DEDENT>app = log_parser.app or get_app(log_data, apps, apptags, extra_tags)<EOL>if app is None:<EOL><INDENT>continue<EOL><DEDENT>elif use_app_rules:<EOL><INDENT>app_matched, has_full_match, app_thread, output_data = app.match_rules(log_data)<EOL>if not pattern_matched and app_matched and app_thread is None:<EOL><INDENT>continue<EOL><DEDENT>if output_data:<EOL><INDENT>rawlog = name_cache.match_to_string(log_match, log_parser.parser.groupindex, output_data)<EOL><DEDENT>if app_matched:<EOL><INDENT>app.matches += <NUM_LIT:1><EOL>if not has_full_match or select_unparsed:<EOL><INDENT>dispatch_context(<EOL>key=(app, app_thread),<EOL>filename=logfile_name,<EOL>line_number=line_counter,<EOL>rawlog=rawlog<EOL>)<EOL>continue<EOL><DEDENT><DEDENT>else:<EOL><INDENT>app.unparsed += <NUM_LIT:1><EOL>if not select_unparsed:<EOL><INDENT>dispatch_context(<EOL>key=(app, app_thread),<EOL>filename=logfile_name,<EOL>line_number=line_counter,<EOL>rawlog=rawlog<EOL>)<EOL>continue<EOL><DEDENT><DEDENT><DEDENT>selected_data = log_data<EOL>if first_event is None:<EOL><INDENT>first_event = event_dt<EOL>last_event = event_dt<EOL><DEDENT>else:<EOL><INDENT>if first_event > event_dt:<EOL><INDENT>first_event = event_dt<EOL><DEDENT>if last_event < event_dt:<EOL><INDENT>last_event = event_dt<EOL><DEDENT><DEDENT>if pattern_matched:<EOL><INDENT>if max_matches and selected_counter >= max_matches:<EOL><INDENT>break<EOL><DEDENT>selected_counter += <NUM_LIT:1><EOL>if files_with_match:<EOL><INDENT>break<EOL><DEDENT>if register_log_lines:<EOL><INDENT>dispatch_selected(<EOL>key=(app, app_thread),<EOL>filename=logfile_name,<EOL>line_number=line_counter,<EOL>log_data=log_data,<EOL>rawlog=rawlog,<EOL>match=match<EOL>)<EOL><DEDENT><DEDENT>elif register_log_lines and not only_matching:<EOL><INDENT>dispatch_context(<EOL>key=(app, app_thread),<EOL>filename=logfile_name,<EOL>line_number=line_counter,<EOL>rawlog=rawlog<EOL>)<EOL><DEDENT><DEDENT>if display_progress_bar:<EOL><INDENT>progress_bar.redraw(fstat.st_size)<EOL><DEDENT><DEDENT>try:<EOL><INDENT>for key in list(dispatcher.keys()):<EOL><INDENT>dispatcher.flush(key)<EOL><DEDENT><DEDENT>except (NameError, AttributeError):<EOL><INDENT>pass<EOL><DEDENT>if files_with_match and selected_counter or files_with_match is False and not selected_counter:<EOL><INDENT>dispatch_selected(filename=logfile.name)<EOL><DEDENT>elif count:<EOL><INDENT>dispatch_selected(filename=logfile.name, counter=selected_counter)<EOL><DEDENT>return MatcherResult(<EOL>lines=line_counter,<EOL>matches=selected_counter,<EOL>unknown=unknown_counter,<EOL>extra_tags=extra_tags,<EOL>first_event=first_event,<EOL>last_event=last_event<EOL>)<EOL><DEDENT>return process_logfile<EOL>
|
Create a matcher engine.
:return: A matcher function.
|
f9088:m5
|
def __init__(self, name, pattern, args, filter_keys=None):
|
try:<EOL><INDENT>if not pattern:<EOL><INDENT>raise LogRaptorConfigError('<STR_LIT>' % name)<EOL><DEDENT>self.regexp = re.compile(pattern)<EOL><DEDENT>except RegexpCompileError:<EOL><INDENT>raise LogRaptorConfigError("<STR_LIT>" % name)<EOL><DEDENT>self.name = name<EOL>self.args = args<EOL>self.filter_keys = filter_keys or []<EOL>self.full_match = filter_keys is not None<EOL>self.used_by_report = False<EOL>self.results = dict()<EOL>key_gids = ['<STR_LIT:host>']<EOL>for gid in self.regexp.groupindex:<EOL><INDENT>if gid != '<STR_LIT:host>':<EOL><INDENT>key_gids.append(gid)<EOL><DEDENT><DEDENT>self.key_gids = tuple(key_gids)<EOL>if not self.key_gids:<EOL><INDENT>raise LogRaptorConfigError("<STR_LIT>")<EOL><DEDENT>self._last_idx = None<EOL>
|
Initialize AppRule.
:param name: the configuration option name
:param pattern: the option value that represents the search pattern
:param filter_keys: the filtering keys dictionary if the rule is a filter
|
f9089:c0:m0
|
def add_result(self, values):
|
idx = [values['<STR_LIT:host>']]<EOL>for gid in self.key_gids[<NUM_LIT:1>:]:<EOL><INDENT>idx.append(values[gid])<EOL><DEDENT>idx = tuple(idx)<EOL>try:<EOL><INDENT>self.results[idx] += <NUM_LIT:1><EOL><DEDENT>except KeyError:<EOL><INDENT>self.results[idx] = <NUM_LIT:1><EOL><DEDENT>self._last_idx = idx<EOL>
|
Add a tuple or increment the value of an existing one
in the rule results dictionary.
|
f9089:c0:m2
|
def increase_last(self, k):
|
idx = self._last_idx<EOL>if idx is not None:<EOL><INDENT>self.results[idx] += k<EOL><DEDENT>
|
Increase the last result by k.
|
f9089:c0:m3
|
def total_events(self, cond, valfld=None):
|
results = self.results<EOL>if cond == "<STR_LIT:*>" and valfld is None:<EOL><INDENT>return sum(results.values())<EOL><DEDENT>val = self.key_gids.index(valfld) if valfld is not None else None<EOL>if cond == "<STR_LIT:*>":<EOL><INDENT>tot = <NUM_LIT:0><EOL>for key in results:<EOL><INDENT>tot += results[key] * int(key[val])<EOL><DEDENT>return tot<EOL><DEDENT>match = re.search("<STR_LIT>", cond)<EOL>condpos = self.key_gids.index(match.group(<NUM_LIT:1>))<EOL>invert = (match.group(<NUM_LIT:2>) == '<STR_LIT>')<EOL>recond = re.compile(match.group(<NUM_LIT:3>))<EOL>tot = <NUM_LIT:0><EOL>for key in results:<EOL><INDENT>match = recond.search(key[condpos])<EOL>if (not invert and match is not None) or (invert and match is None):<EOL><INDENT>if valfld is None:<EOL><INDENT>tot += results[key]<EOL><DEDENT>else:<EOL><INDENT>tot += results[key] * int(key[val])<EOL><DEDENT><DEDENT><DEDENT>return tot<EOL>
|
Return total number of events in the rule'result set. A condition
could be provided to select the events to count. If value field (valfld)
is passed the function compute the sum taking the product of each value with
correspondent event counter.
|
f9089:c0:m4
|
def top_events(self, num, valfld, usemax, gid):
|
def classify():<EOL><INDENT>if value is None:<EOL><INDENT>return<EOL><DEDENT>for j in range(num):<EOL><INDENT>if top[j] is None:<EOL><INDENT>top[j] = [tot, [value]]<EOL>break<EOL><DEDENT>elif tot == top[j][<NUM_LIT:0>]:<EOL><INDENT>top[j][<NUM_LIT:1>].append(value)<EOL>break<EOL><DEDENT>elif tot > top[j][<NUM_LIT:0>]:<EOL><INDENT>top.insert(j, [tot, [value]])<EOL>break<EOL><DEDENT><DEDENT><DEDENT>if not self.results:<EOL><INDENT>return []<EOL><DEDENT>results = self.results<EOL>top = [None] * num<EOL>pos = self.key_gids.index(gid)<EOL>if valfld is not None:<EOL><INDENT>val = self.key_gids.index(valfld)<EOL>if usemax:<EOL><INDENT>i = <NUM_LIT:0> <EOL>for key in sorted(results.keys(), key=lambda x: (int(x[val]), x[pos]),<EOL>reverse=True)[:num]:<EOL><INDENT>top[i] = [int(key[val]), [key[pos]]]<EOL>i += <NUM_LIT:1><EOL><DEDENT>return [res for res in top if res is not None]<EOL><DEDENT><DEDENT>value = None<EOL>tot = <NUM_LIT:0><EOL>for key in sorted(results.keys(), key=lambda x: (x[pos])):<EOL><INDENT>if value is None or value != key[pos]:<EOL><INDENT>classify()<EOL>value = key[pos]<EOL>tot = results[key] if valfld is None else results[key] * int(key[val])<EOL>continue<EOL><DEDENT>tot += results[key] if valfld is None else results[key] * int(key[val])<EOL><DEDENT>else:<EOL><INDENT>classify()<EOL><DEDENT>del top[num:]<EOL>return [res for res in top if res is not None]<EOL>
|
Return a list with the top NUM list of events. Each list element
contain a value, indicating the number of events, and a list of
matching gid values (usernames, email addresses, clients).
Instead of calculating the top sum of occurrences a value field
should be provided to compute the max of a numeric value field or
the sum of product of value field with events.
|
f9089:c0:m5
|
def list_events(self, cond, cols, fields):
|
def insert_row():<EOL><INDENT>"""<STR_LIT>"""<EOL>row = list(row_template)<EOL>j = <NUM_LIT:0><EOL>for n in range(cols):<EOL><INDENT>if row[n] is None:<EOL><INDENT>if j == keylen:<EOL><INDENT>row[n] = tabvalues<EOL><DEDENT>else:<EOL><INDENT>row[n] = tabkey[j]<EOL><DEDENT>j += <NUM_LIT:1><EOL><DEDENT><DEDENT>reslist.append(row)<EOL><DEDENT>if not self.results:<EOL><INDENT>return []<EOL><DEDENT>results = self.results<EOL>pos = [self.key_gids.index(gid) for gid in fields if gid[<NUM_LIT:0>] != '<STR_LIT:">']<EOL>has_cond = cond != "<STR_LIT:*>"<EOL>if has_cond:<EOL><INDENT>match = re.search("<STR_LIT>", cond)<EOL>condpos = self.key_gids.index(match.group(<NUM_LIT:1>))<EOL>invert = (match.group(<NUM_LIT:2>) == '<STR_LIT>')<EOL>recond = re.compile(match.group(<NUM_LIT:3>))<EOL><DEDENT>else:<EOL><INDENT>recond = condpos = None<EOL><DEDENT>row_template = []<EOL>for i in range(cols):<EOL><INDENT>if fields[i][<NUM_LIT:0>] == '<STR_LIT:">':<EOL><INDENT>row_template.append(fields[i].strip('<STR_LIT:">'))<EOL><DEDENT>else:<EOL><INDENT>row_template.append(None)<EOL><DEDENT><DEDENT>keylen = len(pos) - (len(fields) - cols) - <NUM_LIT:1><EOL>tabvalues = dict()<EOL>tabkey = None<EOL>reslist = []<EOL>for key in sorted(results, key=lambda x: x[pos[<NUM_LIT:0>]]):<EOL><INDENT>if has_cond:<EOL><INDENT>try:<EOL><INDENT>match = recond.search(key[condpos])<EOL><DEDENT>except TypeError:<EOL><INDENT>continue<EOL><DEDENT>if ((match is None) and not invert) or ((match is not None) and invert):<EOL><INDENT>continue<EOL><DEDENT><DEDENT>new_tabkey = [key[pos[i]] for i in range(keylen)]<EOL>if tabkey is None:<EOL><INDENT>tabkey = new_tabkey<EOL><DEDENT>elif tabkey != new_tabkey:<EOL><INDENT>insert_row()<EOL>tabvalues = dict()<EOL>tabkey = [key[pos[i]] for i in range(keylen)]<EOL><DEDENT>value = tuple([key[k] for k in pos[keylen:]])<EOL>if value in tabvalues:<EOL><INDENT>tabvalues[value] += results[key]<EOL><DEDENT>else:<EOL><INDENT>tabvalues[value] = results[key]<EOL><DEDENT><DEDENT>if tabvalues:<EOL><INDENT>insert_row()<EOL><DEDENT>return reslist<EOL>
|
Return the list of events, with a specific order and filtered by a condition.
An element of the list is a tuple with three component. The first is the main
attribute (first field). The second the second field/label, usually a string
that identify the service. The third is a dictionary with a key-tuple composed
by all other fields and values indicating the number of events associated.
|
f9089:c0:m6
|
def __init__(self, name, cfgfile, args, logdir, fields, name_cache=None, report=None):
|
logger.debug('<STR_LIT>', name)<EOL>self.name = name <EOL>self.cfgfile = cfgfile <EOL>self.args = args<EOL>self.logdir = logdir<EOL>self.fields = fields<EOL>self.name_cache = name_cache<EOL>self._report = report<EOL>self._thread = args.thread<EOL>self.matches = <NUM_LIT:0> <EOL>self.unparsed = <NUM_LIT:0> <EOL>self._last_rule = None <EOL>self._last_idx = None <EOL>self.config = AppConfig(cfgfiles=cfgfile, appname=name, logdir=logdir)<EOL>self.description = self.config.get('<STR_LIT>', '<STR_LIT:description>')<EOL>self.tags = list(set(re.split('<STR_LIT>', self.config.get('<STR_LIT>', '<STR_LIT>'))))<EOL>self._files = list(set(re.split('<STR_LIT>', self.config.get('<STR_LIT>', '<STR_LIT>'))))<EOL>self.enabled = self.config.getboolean('<STR_LIT>', '<STR_LIT>')<EOL>self.priority = self.config.getint('<STR_LIT>', '<STR_LIT>')<EOL>self.files = field_multisub(self._files, '<STR_LIT:host>', args.hosts or ['<STR_LIT:*>'])<EOL>logger.debug('<STR_LIT>', name, self.tags)<EOL>logger.debug('<STR_LIT>', name, self.files)<EOL>logger.debug('<STR_LIT>', name, self.enabled, self.priority)<EOL>self.rules = self.parse_rules()<EOL>if self._report:<EOL><INDENT>subreports = [sr.name for sr in self._report.subreports]<EOL>self.report_data = [e for e in self.get_report_data() if e.subreport in subreports]<EOL><DEDENT>self.has_filters = any([rule.filter_keys for rule in self.rules])<EOL>if self.has_filters:<EOL><INDENT>self.rules = sorted(self.rules, key=lambda x: x.filter_keys)<EOL>logger.debug('<STR_LIT>', name, len(self.filters))<EOL>logger.debug('<STR_LIT>', name, len(self.rules) - len(self.filters))<EOL><DEDENT>else:<EOL><INDENT>for rule in self.rules:<EOL><INDENT>rule.full_match = True<EOL><DEDENT><DEDENT>logger.info('<STR_LIT>', name, len(self.rules))<EOL>
|
:param name: application name
:param cfgfile: application config file
:param args: cli arguments
:param logdir: Log directory
:param fields: Configured fields
:param name_cache: Optional name cache (--ip-lookup/--uid-lookup/--anonymize options)
:param report: Optional report (--report option)
|
f9089:c1:m0
|
def parse_rules(self):
|
<EOL>try:<EOL><INDENT>rule_options = self.config.items('<STR_LIT>')<EOL><DEDENT>except configparser.NoSectionError:<EOL><INDENT>raise LogRaptorConfigError("<STR_LIT>" % self.name)<EOL><DEDENT>rules = []<EOL>for option, value in rule_options:<EOL><INDENT>pattern = value.replace('<STR_LIT:\n>', '<STR_LIT>') <EOL>if not self.args.filters:<EOL><INDENT>pattern = string.Template(pattern).safe_substitute(self.fields)<EOL>rules.append(AppRule(option, pattern, self.args))<EOL>continue<EOL><DEDENT>for filter_group in self.args.filters:<EOL><INDENT>_pattern, filter_keys = exact_sub(pattern, filter_group)<EOL>_pattern = string.Template(_pattern).safe_substitute(self.fields)<EOL>if len(filter_keys) >= len(filter_group):<EOL><INDENT>rules.append(AppRule(option, _pattern, self.args, filter_keys))<EOL><DEDENT>elif self._thread:<EOL><INDENT>rules.append(AppRule(option, _pattern, self.args))<EOL><DEDENT><DEDENT><DEDENT>return rules<EOL>
|
Add a set of rules to the app, dividing between filter and other rule set
|
f9089:c1:m4
|
def increase_last(self, k):
|
rule = self._last_rule<EOL>if rule is not None:<EOL><INDENT>rule.increase_last(k)<EOL><DEDENT>
|
Increase the counter of the last matched rule by k.
|
f9089:c1:m5
|
def match_rules(self, log_data):
|
for rule in self.rules:<EOL><INDENT>match = rule.regexp.search(log_data.message)<EOL>if match is not None:<EOL><INDENT>gids = rule.regexp.groupindex<EOL>self._last_rule = rule<EOL>if self.name_cache is not None:<EOL><INDENT>values = self.name_cache.match_to_dict(match, rule.key_gids)<EOL>values['<STR_LIT:host>'] = self.name_cache.map_value(log_data.host, '<STR_LIT:host>')<EOL>output_data = {<EOL>'<STR_LIT:host>': values['<STR_LIT:host>'],<EOL>'<STR_LIT:message>': self.name_cache.match_to_string(match, gids, values),<EOL>}<EOL><DEDENT>else:<EOL><INDENT>values = {'<STR_LIT:host>': log_data.host}<EOL>for gid in gids:<EOL><INDENT>values[gid] = match.group(gid)<EOL><DEDENT>output_data = None<EOL><DEDENT>if self._thread and '<STR_LIT>' in rule.regexp.groupindex:<EOL><INDENT>thread = match.group('<STR_LIT>')<EOL>if rule.filter_keys is not None andany([values[key] is None for key in rule.filter_keys]):<EOL><INDENT>return False, None, None, None<EOL><DEDENT>if self._report:<EOL><INDENT>rule.add_result(values)<EOL><DEDENT>return True, rule.full_match, thread, output_data<EOL><DEDENT>else:<EOL><INDENT>if rule.filter_keys is not None andany([values[key] is None for key in rule.filter_keys]):<EOL><INDENT>return False, None, None, None<EOL><DEDENT>elif self._report or (rule.filter_keys is not None or not self.has_filters):<EOL><INDENT>rule.add_result(values)<EOL><DEDENT>return True, rule.full_match, None, output_data<EOL><DEDENT><DEDENT><DEDENT>self._last_rule = None<EOL>return False, None, None, None<EOL>
|
Process a log line data message with app's pattern rules.
Return a tuple with this data:
Element #0 (app_matched): True if a rule match, False otherwise;
Element #1 (has_full_match): True if a rule match and is a filter or the
app has not filters; False if a rule match but is not a filter;
None otherwise;
Element #2 (app_thread): Thread value if a rule match and it has a "thread"
group, None otherwise;
Element #3 (output_data): Mapping dictionary if a rule match and a map
of output is requested (--anonymize/--ip/--uid options).
|
f9089:c1:m6
|
def send_report(self, report_parts):
|
logger.info('<STR_LIT>')<EOL>report_parts = sorted(<EOL>filter(lambda x: x.fmt in self.formats, report_parts),<EOL>key=lambda x: self.formats.index(x.fmt)<EOL>)<EOL>fmtname = '<STR_LIT>' if len(report_parts) > <NUM_LIT:1> else '<STR_LIT>'<EOL>root_part = MIMEMultipart('<STR_LIT>')<EOL>root_part.preamble = '<STR_LIT>'<EOL>logger.debug('<STR_LIT>')<EOL>for i, text_part in enumerate(report_parts):<EOL><INDENT>attachment_name = fmtname.format(socket.gethostname(), i, text_part.ext)<EOL>attach_part = MIMEText(text_part.text, text_part.ext, '<STR_LIT:utf-8>')<EOL>attach_part.add_header('<STR_LIT>', '<STR_LIT>', filename=attachment_name)<EOL>root_part.attach(attach_part)<EOL><DEDENT>if self.rawlogs:<EOL><INDENT>out = BytesIO()<EOL>do_chunked_gzip(self.rawfh, out, filename=u'<STR_LIT>')<EOL>out.seek(<NUM_LIT:0>, os.SEEK_END)<EOL>size = out.tell()<EOL>if size > self.rawlogs_limit:<EOL><INDENT>logger.warning('<STR_LIT>', size, self.rawlogs_limit)<EOL>logger.warning('<STR_LIT>')<EOL><DEDENT>else:<EOL><INDENT>logger.debug('<STR_LIT>')<EOL>attach_part = MIMEBase('<STR_LIT>', '<STR_LIT>')<EOL>attach_part.set_payload(out.getvalue())<EOL>from email.encoders import encode_base64<EOL>logger.debug('<STR_LIT>')<EOL>encode_base64(attach_part)<EOL>attach_part.add_header('<STR_LIT>', '<STR_LIT>', filename='<STR_LIT>')<EOL>root_part.attach(attach_part)<EOL><DEDENT><DEDENT>if self.gpg_encrypt:<EOL><INDENT>import gpgme<EOL>try:<EOL><INDENT>if self.gpg_keyringdir and os.path.exists(self.gpg_keyringdir):<EOL><INDENT>logger.debug('<STR_LIT>', self.gpg_keyringdir)<EOL>os.environ['<STR_LIT>'] = self.gpg_keyringdir<EOL><DEDENT>cleartext = BytesIO(root_part.as_string().encode())<EOL>ciphertext = BytesIO()<EOL>ctx = gpgme.Context()<EOL>ctx.armor = True<EOL>if self.gpg_recipients:<EOL><INDENT>recipients = [ctx.get_key(recipient) for recipient in self.gpg_recipients]<EOL><DEDENT>else:<EOL><INDENT>recipients = []<EOL>for key in ctx.keylist():<EOL><INDENT>for subkey in key.subkeys:<EOL><INDENT>if subkey.can_encrypt:<EOL><INDENT>logger.debug('<STR_LIT>', subkey.keyid)<EOL>recipients.append(key)<EOL>break<EOL><DEDENT><DEDENT><DEDENT><DEDENT>signers = [ctx.get_key(signer) for signer in self.gpg_signers]<EOL>if signers:<EOL><INDENT>logger.info('<STR_LIT>')<EOL>ctx.signers = signers<EOL>ctx.encrypt_sign(recipients, gpgme.ENCRYPT_ALWAYS_TRUST, cleartext, ciphertext)<EOL><DEDENT>else:<EOL><INDENT>logger.info('<STR_LIT>')<EOL>ctx.encrypt(recipients, gpgme.ENCRYPT_ALWAYS_TRUST, cleartext, ciphertext)<EOL><DEDENT>logger.debug('<STR_LIT>')<EOL>gpg_envelope_part = MIMEMultipart('<STR_LIT>')<EOL>gpg_envelope_part.set_param('<STR_LIT>', '<STR_LIT>', header='<STR_LIT:Content-Type>')<EOL>gpg_envelope_part.preamble = '<STR_LIT>'<EOL>gpg_mime_version_part = MIMEBase('<STR_LIT>', '<STR_LIT>')<EOL>gpg_mime_version_part.add_header('<STR_LIT>', '<STR_LIT>')<EOL>gpg_mime_version_part.set_payload('<STR_LIT>')<EOL>gpg_payload_part = MIMEBase('<STR_LIT>', '<STR_LIT>', name='<STR_LIT>')<EOL>gpg_payload_part.add_header('<STR_LIT>', '<STR_LIT>')<EOL>gpg_payload_part.add_header('<STR_LIT>', '<STR_LIT>', filename='<STR_LIT>')<EOL>gpg_payload_part.set_payload(ciphertext.getvalue())<EOL>gpg_envelope_part.attach(gpg_mime_version_part)<EOL>gpg_envelope_part.attach(gpg_payload_part)<EOL>root_part = gpg_envelope_part<EOL><DEDENT>except ImportError:<EOL><INDENT>logger.error('<STR_LIT>')<EOL>logger.error('<STR_LIT>')<EOL>logger.error('<STR_LIT>')<EOL>return<EOL><DEDENT><DEDENT>root_part['<STR_LIT>'] = formatdate()<EOL>root_part['<STR_LIT>'] = self.email_address<EOL>root_part['<STR_LIT>'] = '<STR_LIT:U+002CU+0020>'.join(self.mailto)<EOL>root_part['<STR_LIT>'] = '<STR_LIT>'.format(<EOL>socket.gethostname(), time.strftime('<STR_LIT>', time.localtime())<EOL>)<EOL>root_part['<STR_LIT>'] = make_msgid()<EOL>root_part['<STR_LIT>'] = u'<STR_LIT>'.format(package_name, __version__)<EOL>mail_message(self.smtp_server, root_part.as_string(), self.email_address, self.mailto)<EOL>print('<STR_LIT>'.format('<STR_LIT:U+002C>'.join(self.mailto)))<EOL>
|
Publish by sending the report by e-mail
|
f9091:c4:m6
|
def prune_old(self):
|
path = self.pubdir<EOL>dirmask = self.dirmask<EOL>expire = self.expire<EOL>expire_limit = int(time.time()) - (<NUM_LIT> * expire)<EOL>logger.info('<STR_LIT>', expire)<EOL>if not os.path.isdir(path):<EOL><INDENT>logger.warning('<STR_LIT>', path)<EOL>return<EOL><DEDENT>for entry in os.listdir(path):<EOL><INDENT>logger.debug('<STR_LIT>', entry)<EOL>if os.path.isdir(os.path.join(path, entry)):<EOL><INDENT>try: <EOL><INDENT>stamp = time.mktime(time.strptime(entry, dirmask))<EOL><DEDENT>except ValueError as e:<EOL><INDENT>logger.info('<STR_LIT>', entry, dirmask, e)<EOL>logger.info('<STR_LIT>', entry)<EOL>continue<EOL><DEDENT>if stamp < expire_limit:<EOL><INDENT>shutil.rmtree(os.path.join(path, entry))<EOL>logger.info('<STR_LIT>', entry)<EOL><DEDENT>else:<EOL><INDENT>logger.info('<STR_LIT>', entry)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>logger.info('<STR_LIT>', entry)<EOL><DEDENT><DEDENT>logger.info('<STR_LIT>')<EOL>
|
Removes the directories that are older than a certain date.
|
f9091:c5:m2
|
def send_report(self, report_parts):
|
logger.info('<STR_LIT>')<EOL>report_parts = sorted(<EOL>filter(lambda x: x.fmt in self.formats, report_parts),<EOL>key=lambda x: self.formats.index(x.fmt)<EOL>)<EOL>workdir = os.path.join(self.pubdir, self.dirname)<EOL>if not os.path.isdir(workdir):<EOL><INDENT>try: <EOL><INDENT>os.makedirs(workdir)<EOL><DEDENT>except OSError as e:<EOL><INDENT>logger.error('<STR_LIT>'.format(workdir, e))<EOL>return<EOL><DEDENT><DEDENT>fmtname = '<STR_LIT>' if len(report_parts) > <NUM_LIT:1> else '<STR_LIT>'<EOL>for i, text_part in enumerate(filter(lambda x: x.fmt in self.formats, report_parts)):<EOL><INDENT>filename = fmtname.format(self.filename, i, socket.gethostname(), text_part.ext)<EOL>repfile = os.path.join(workdir, filename)<EOL>logger.info('<STR_LIT>', i, repfile)<EOL>fh = open(repfile, '<STR_LIT:w>')<EOL>fh.write(text_part.text)<EOL>fh.close()<EOL>print('<STR_LIT>' % repfile)<EOL><DEDENT>if self.notify:<EOL><INDENT>logger.info('<STR_LIT>')<EOL>email_address = self.config.get('<STR_LIT>', '<STR_LIT>')<EOL>smtp_server = self.config.get('<STR_LIT>', '<STR_LIT>')<EOL>publoc = os.path.join(self.pubroot, self.dirname)<EOL>eml = MIMEText('<STR_LIT>'.format(publoc))<EOL>eml['<STR_LIT>'] = '<STR_LIT>'.format(<EOL>socket.gethostname(), time.strftime('<STR_LIT>', time.localtime())<EOL>)<EOL>eml['<STR_LIT>'] = formatdate()<EOL>eml['<STR_LIT>'] = email_address<EOL>eml['<STR_LIT>'] = '<STR_LIT:U+002CU+0020>'.join(self.notify)<EOL>eml['<STR_LIT>'] = u'<STR_LIT>'.format(package_name, __version__)<EOL>mail_message(smtp_server, eml.as_string(), email_address, self.notify)<EOL>print('<STR_LIT>'.format('<STR_LIT:U+002C>'.join(self.notify)))<EOL><DEDENT>if self.rawlogs:<EOL><INDENT>logfilename = '<STR_LIT>'.format(self.filename)<EOL>logfile = os.path.join(workdir, '<STR_LIT>'.format(logfilename))<EOL>logger.info('<STR_LIT>', logfilename)<EOL>outfh = open(logfile, '<STR_LIT>')<EOL>do_chunked_gzip(self.rawfh, outfh, logfilename)<EOL>outfh.close()<EOL>print('<STR_LIT>'.format(logfile))<EOL><DEDENT>self.prune_old()<EOL>
|
Publish the report parts to local files. Each report part is a text
with a title and specific extension. For html and plaintext sending
the report part is unique, for csv send also the stats and unparsed
string are plain text and report items are csv texts.
|
f9091:c5:m7
|
def __eq__(self, repitem):
|
if self.function != '<STR_LIT>' or repitem.function != '<STR_LIT>':<EOL><INDENT>return False<EOL><DEDENT>if self.title != repitem.title:<EOL><INDENT>return False<EOL><DEDENT>head1 = re.split('<STR_LIT>', self.headers)<EOL>head2 = re.split('<STR_LIT>', repitem.headers)<EOL>if len(head1) != len(head2):<EOL><INDENT>return False<EOL><DEDENT>for k in range(len(head1)):<EOL><INDENT>if head1[k].strip() != head2[k].strip():<EOL><INDENT>return False<EOL><DEDENT><DEDENT>return True<EOL>
|
Compare two 'table' report items. When True the report items
results are mergeable.
|
f9092:c0:m7
|
def make_text(self, width):
|
def mformat(reslist):<EOL><INDENT>_text = "<STR_LIT>"<EOL>_buffer = reslist[<NUM_LIT:0>]<EOL>for j in range(<NUM_LIT:1>, len(reslist)):<EOL><INDENT>if (_buffer == "<STR_LIT>") or (len(_buffer) + len(reslist[j])) <= (width - len(filling)):<EOL><INDENT>if reslist[j][<NUM_LIT:0>] == '<STR_LIT:[>' and reslist[j][-<NUM_LIT:1>] == '<STR_LIT:]>':<EOL><INDENT>_buffer = '<STR_LIT>'.format(_buffer, reslist[j])<EOL><DEDENT>else:<EOL><INDENT>_buffer = '<STR_LIT>'.format(_buffer, reslist[j])<EOL><DEDENT><DEDENT>else:<EOL><INDENT>_text = '<STR_LIT>'.format(_text, _buffer, filling)<EOL>_buffer = reslist[j]<EOL><DEDENT><DEDENT>_text = '<STR_LIT>'.format(_text, _buffer)<EOL>return _text<EOL><DEDENT>text = '<STR_LIT>'.format(self.title.strip())<EOL>if self.function == '<STR_LIT>':<EOL><INDENT>width1 = max(len(res[<NUM_LIT:0>]) for res in self.results if res is not None)<EOL>for res in self.results:<EOL><INDENT>padding = '<STR_LIT:U+0020>' * (width1 - len(res[<NUM_LIT:0>]) + <NUM_LIT:1>)<EOL>text = '<STR_LIT>'.format(text, res[<NUM_LIT:0>], padding, res[<NUM_LIT:1>])<EOL><DEDENT><DEDENT>elif self.function == '<STR_LIT>':<EOL><INDENT>if self.results[<NUM_LIT:0>] is not None:<EOL><INDENT>width1 = max(len(res[<NUM_LIT:0>]) for res in self.results if res is not None)<EOL>width2 = min([width-width1-<NUM_LIT:4>,<EOL>max(len('<STR_LIT:U+002CU+0020>'.join(res[<NUM_LIT:1>])) for res in self.results if res is not None)])<EOL>text = '<STR_LIT>'.format(text, '<STR_LIT:U+0020>' * width1, self.headers.strip('<STR_LIT:">'))<EOL>text = '<STR_LIT>'.format(text, '<STR_LIT:->' * width1, '<STR_LIT:->' * width2)<EOL>for res in self.results:<EOL><INDENT>if res is not None:<EOL><INDENT>padding = '<STR_LIT:U+0020>' * (width1 - len(res[<NUM_LIT:0>]) + <NUM_LIT:1>)<EOL>filling = '<STR_LIT>'.format('<STR_LIT:U+0020>' * (width1 + <NUM_LIT:1>))<EOL>lastcol = mformat(res[<NUM_LIT:1>])<EOL>text = '<STR_LIT>'.format(text, res[<NUM_LIT:0>], padding, lastcol)<EOL><DEDENT><DEDENT><DEDENT>else:<EOL><INDENT>text = '<STR_LIT>'.format(text, '<STR_LIT:None>')<EOL><DEDENT><DEDENT>elif self.function == '<STR_LIT>':<EOL><INDENT>headers = re.split('<STR_LIT>', self.headers)<EOL>colwidth = []<EOL>for i in range(len(headers)-<NUM_LIT:1>):<EOL><INDENT>colwidth.append(max([len(headers[i]), max(len(res[i]) for res in self.results)]))<EOL><DEDENT>for i in range(len(headers)-<NUM_LIT:1>):<EOL><INDENT>text = '<STR_LIT>'.format(text, headers[i].strip('<STR_LIT:">'), '<STR_LIT:U+0020>' * (colwidth[i]-len(headers[i])+<NUM_LIT:2>))<EOL><DEDENT>text = '<STR_LIT>'.format(text, headers[-<NUM_LIT:1>].strip('<STR_LIT:">'))<EOL>text = '<STR_LIT>'.format(text, '<STR_LIT:->' * (width-<NUM_LIT:1>))<EOL>filling = "<STR_LIT>"<EOL>for i in range(len(headers)-<NUM_LIT:1>):<EOL><INDENT>filling = '<STR_LIT>'.format(filling, '<STR_LIT:U+0020>' * colwidth[i])<EOL><DEDENT>for res in sorted(self.results, key=lambda x: x[<NUM_LIT:0>]):<EOL><INDENT>for i in range(len(headers)-<NUM_LIT:1>):<EOL><INDENT>text = '<STR_LIT>'.format(text, res[i], '<STR_LIT:U+0020>' * (colwidth[i]-len(res[i])))<EOL><DEDENT>lastcol = get_fmt_results(res[-<NUM_LIT:1>], limit=<NUM_LIT:5>)<EOL>text = '<STR_LIT>'.format(text, mformat(lastcol))<EOL><DEDENT><DEDENT>self.text = text<EOL>
|
Make the text representation of a report data element.
|
f9092:c0:m8
|
def make_html(self):
|
html = None<EOL>if self.function == '<STR_LIT>':<EOL><INDENT>html = u'<STR_LIT>''<STR_LIT>''<STR_LIT>'.format(htmlsafe(self.title.strip()), self.color)<EOL>for res in self.results:<EOL><INDENT>html = u'<STR_LIT>''<STR_LIT>'.format(html, res[<NUM_LIT:0>], res[<NUM_LIT:1>])<EOL><DEDENT><DEDENT>elif self.function == '<STR_LIT>':<EOL><INDENT>html = u'<STR_LIT>''<STR_LIT>''<STR_LIT>'.format(htmlsafe(self.title.strip()), self.color)<EOL>if self.results[<NUM_LIT:0>] is not None:<EOL><INDENT>for res in self.results:<EOL><INDENT>if res is not None:<EOL><INDENT>html = u'<STR_LIT>''<STR_LIT>'.format(html, res[<NUM_LIT:0>], '<STR_LIT:U+002CU+0020>'.join(res[<NUM_LIT:1>]))<EOL><DEDENT><DEDENT><DEDENT>else:<EOL><INDENT>html = u'<STR_LIT>'.format(html, "<STR_LIT:None>")<EOL><DEDENT><DEDENT>elif self.function == '<STR_LIT>':<EOL><INDENT>html = u'<STR_LIT>''<STR_LIT>''<STR_LIT>'.format(htmlsafe(self.title.strip()), self.color)<EOL>headers = re.split('<STR_LIT>', self.headers)<EOL>for i in range(len(headers)):<EOL><INDENT>html = '<STR_LIT>''<STR_LIT>'.format(html, headers[i].strip('<STR_LIT:">'))<EOL><DEDENT>html = u'<STR_LIT>'.format(html)<EOL>oddflag = False<EOL>lastval = "<STR_LIT>" <EOL>for res in sorted(self.results, key=lambda x: x[<NUM_LIT:0>]):<EOL><INDENT>if lastval != res[<NUM_LIT:0>]:<EOL><INDENT>oddflag = not oddflag<EOL>if oddflag:<EOL><INDENT>html = u'<STR_LIT>'.format(html)<EOL><DEDENT>else:<EOL><INDENT>html = u'<STR_LIT>'.format(html)<EOL><DEDENT>html = u'<STR_LIT>'.format(html, res[<NUM_LIT:0>])<EOL><DEDENT>else:<EOL><INDENT>if oddflag:<EOL><INDENT>html = u'<STR_LIT>'.format(html)<EOL><DEDENT>else:<EOL><INDENT>html = u'<STR_LIT>'.format(html)<EOL><DEDENT>html = u'<STR_LIT>'.format(html)<EOL><DEDENT>lastval = res[<NUM_LIT:0>]<EOL>for i in range(<NUM_LIT:1>, len(headers)-<NUM_LIT:1>):<EOL><INDENT>html = u'<STR_LIT>'.format(html, res[i])<EOL><DEDENT>lastcol = get_fmt_results(res[-<NUM_LIT:1>], limit=<NUM_LIT:10>, fmt=u'<STR_LIT>')<EOL>if lastcol[-<NUM_LIT:1>].find(u"<STR_LIT>") > -<NUM_LIT:1>:<EOL><INDENT>html = u'<STR_LIT>'.format(html, <NUM_LIT:100>-<NUM_LIT:15>*(len(headers)-<NUM_LIT:1>),<EOL>u'<STR_LIT:U+002CU+0020>'.join(lastcol[:-<NUM_LIT:1>]), lastcol[-<NUM_LIT:1>])<EOL><DEDENT>else:<EOL><INDENT>html = u'<STR_LIT>'.format(html, <NUM_LIT:100>-<NUM_LIT:15>*(len(headers)-<NUM_LIT:1>), u'<STR_LIT:U+002CU+0020>'.join(lastcol))<EOL><DEDENT><DEDENT><DEDENT>self.html = u'<STR_LIT>'.format(html)<EOL>
|
Make the text representation of a report element as html.
|
f9092:c0:m9
|
def make_csv(self):
|
import csv<EOL>try:<EOL><INDENT>from StringIO import StringIO <EOL><DEDENT>except ImportError:<EOL><INDENT>from io import StringIO<EOL><DEDENT>out = StringIO()<EOL>writer = csv.writer(out, delimiter='<STR_LIT:|>', lineterminator='<STR_LIT:\n>', quoting=csv.QUOTE_MINIMAL)<EOL>if self.function == '<STR_LIT>':<EOL><INDENT>writer.writerows(self.results) <EOL><DEDENT>elif self.function == '<STR_LIT>':<EOL><INDENT>rows = [['<STR_LIT>', self.headers.strip('<STR_LIT:">')]]<EOL>if self.results[<NUM_LIT:0>] is not None:<EOL><INDENT>for res in self.results:<EOL><INDENT>if res is not None:<EOL><INDENT>rows.append(tuple([res[<NUM_LIT:0>], '<STR_LIT:U+002C>'.join(res[<NUM_LIT:1>])]))<EOL><DEDENT><DEDENT>writer.writerows(rows)<EOL><DEDENT><DEDENT>elif self.function == '<STR_LIT>':<EOL><INDENT>rows = [[header.strip('<STR_LIT:">') for header in re.split('<STR_LIT>', self.headers)]]<EOL>for res in sorted(self.results, key=lambda x: x[<NUM_LIT:0>]):<EOL><INDENT>row = list(res[:-<NUM_LIT:1>])<EOL>lastcol = get_fmt_results(res[-<NUM_LIT:1>], limit=<NUM_LIT:10>)<EOL>if lastcol[-<NUM_LIT:1>][<NUM_LIT:0>] == '<STR_LIT:[>' and lastcol[-<NUM_LIT:1>][-<NUM_LIT:1>] == '<STR_LIT:]>':<EOL><INDENT>row.append(u'<STR_LIT>'.format(u'<STR_LIT:U+002CU+0020>'.join(lastcol[:-<NUM_LIT:1>]), lastcol[-<NUM_LIT:1>]))<EOL><DEDENT>else:<EOL><INDENT>row.append(u'<STR_LIT:U+002CU+0020>'.join(lastcol))<EOL><DEDENT>rows.append(row)<EOL><DEDENT>writer.writerows(rows)<EOL><DEDENT>self.csv = out.getvalue()<EOL>
|
Get the text representation of a report element as csv.
|
f9092:c0:m10
|
def make(self, apps):
|
for (appname, app) in sorted(apps.items(), key=lambda x: (x[<NUM_LIT:1>].priority, x[<NUM_LIT:0>])):<EOL><INDENT>logger.info('<STR_LIT>', appname)<EOL>for report_data in app.report_data:<EOL><INDENT>if report_data.subreport != self.name:<EOL><INDENT>continue<EOL><DEDENT>if report_data.function == '<STR_LIT>':<EOL><INDENT>for opt in report_data:<EOL><INDENT>match = report_data.parse_report_data(opt)<EOL>cond = match.group('<STR_LIT>')<EOL>valfld = match.group('<STR_LIT>')<EOL>unit = match.group('<STR_LIT>')<EOL>itemtitle = match.group('<STR_LIT>').strip('<STR_LIT:">')<EOL>total = report_data.rules[opt].total_events(cond, valfld)<EOL>if total == <NUM_LIT:0>:<EOL><INDENT>continue<EOL><DEDENT>if unit is not None:<EOL><INDENT>total, unit = get_value_unit(total, unit, '<STR_LIT:T>')<EOL>total = '<STR_LIT>'.format(total, unit)<EOL><DEDENT>else:<EOL><INDENT>total = str(total)<EOL><DEDENT>report_data.results.append(tuple([total, itemtitle]))<EOL><DEDENT><DEDENT>elif report_data.function == '<STR_LIT>':<EOL><INDENT>k = int(report_data.topnum)<EOL>for opt in report_data:<EOL><INDENT>match = report_data.parse_report_data(opt)<EOL>valfld = match.group('<STR_LIT>')<EOL>field = match.group('<STR_LIT>')<EOL>usemax = match.group('<STR_LIT>') is None<EOL>toplist = report_data.rules[opt].top_events(k, valfld, usemax, field)<EOL>report_data.results.extend(toplist)<EOL><DEDENT><DEDENT>elif report_data.function == '<STR_LIT>':<EOL><INDENT>cols = len(re.split('<STR_LIT>', report_data.headers))<EOL>for opt in report_data:<EOL><INDENT>match = report_data.parse_report_data(opt)<EOL>cond = match.group('<STR_LIT>')<EOL>fields = re.split('<STR_LIT>', match.group('<STR_LIT>'))<EOL>tablelist = report_data.rules[opt].list_events(cond, cols, fields)<EOL>report_data.results.extend(tablelist)<EOL><DEDENT><DEDENT>if report_data.results:<EOL><INDENT>self.report_data.append(report_data)<EOL><DEDENT><DEDENT><DEDENT>for report_data in self.report_data:<EOL><INDENT>if report_data.function == '<STR_LIT>':<EOL><INDENT>report_data.results = sorted(report_data.results, key=lambda x: x[<NUM_LIT:0>], reverse=True)<EOL>unit = None<EOL>for opt in report_data:<EOL><INDENT>match = report_data.parse_report_data(opt)<EOL>unit = match.group('<STR_LIT>')<EOL>if unit is not None:<EOL><INDENT>break<EOL><DEDENT><DEDENT>for res in report_data.results:<EOL><INDENT>if unit is not None:<EOL><INDENT>v, u = get_value_unit(res[<NUM_LIT:0>], unit, '<STR_LIT:T>')<EOL>res[<NUM_LIT:0>] = '<STR_LIT>'.format(v, u)<EOL><DEDENT>else:<EOL><INDENT>res[<NUM_LIT:0>] = str(res[<NUM_LIT:0>])<EOL><DEDENT><DEDENT><DEDENT><DEDENT>
|
Make subreport items from results.
|
f9092:c1:m3
|
def make_format(self, fmt, width):
|
if not self.report_data:<EOL><INDENT>return<EOL><DEDENT>for data_item in self.report_data:<EOL><INDENT>if data_item.results:<EOL><INDENT>if fmt is None or fmt == '<STR_LIT:text>':<EOL><INDENT>data_item.make_text(width)<EOL><DEDENT>elif fmt == '<STR_LIT:html>':<EOL><INDENT>data_item.make_html()<EOL><DEDENT>elif fmt == '<STR_LIT>':<EOL><INDENT>data_item.make_csv()<EOL><DEDENT><DEDENT><DEDENT>
|
Make subreport text in a specified format
|
f9092:c1:m4
|
def compact_tables(self):
|
items_to_del = set()<EOL>for i in range(len(self.report_data)):<EOL><INDENT>if i in items_to_del:<EOL><INDENT>continue<EOL><DEDENT>if self.report_data[i].function[<NUM_LIT:0>:<NUM_LIT:5>] == '<STR_LIT>':<EOL><INDENT>for j in range(i+<NUM_LIT:1>, len(self.report_data)):<EOL><INDENT>if self.report_data[j].function[<NUM_LIT:0>:<NUM_LIT:5>] == '<STR_LIT>':<EOL><INDENT>if self.report_data[i] == self.report_data[j]:<EOL><INDENT>logger.debug('<STR_LIT>'<EOL>.format(self.report_data[i].title)) <EOL>items_to_del.add(j)<EOL>self.report_data[i].results.extend(self.report_data[j].results)<EOL><DEDENT><DEDENT><DEDENT><DEDENT><DEDENT>if items_to_del:<EOL><INDENT>for i in reversed(sorted(items_to_del, key=lambda x: x)):<EOL><INDENT>self.report_data.pop(i)<EOL><DEDENT><DEDENT>
|
Compact report items of type "table" with same results type. Report items of type "tables" in the
same subreport is merged into one. The data are ordered by 1st column.
|
f9092:c1:m5
|
def make(self, apps):
|
for subreport in self.subreports:<EOL><INDENT>logger.debug('<STR_LIT>'.format(subreport.name))<EOL>subreport.make(apps)<EOL><DEDENT>for subreport in self.subreports:<EOL><INDENT>subreport.compact_tables()<EOL><DEDENT>
|
Create the report from application results
|
f9092:c2:m1
|
def get_report_parts(self, apps, formats):
|
for fmt in formats:<EOL><INDENT>width = <NUM_LIT:100> if fmt is not None else tui.get_terminal_size()[<NUM_LIT:0>]<EOL>for sr in self.subreports:<EOL><INDENT>sr.make_format(fmt, width)<EOL><DEDENT><DEDENT>logger.debug('<STR_LIT>')<EOL>value_mapping = {<EOL>'<STR_LIT:title>': self.title,<EOL>'<STR_LIT>': '<STR_LIT:U+002CU+0020>'.join([repr(pattern) for pattern in self.args.patterns]) or None,<EOL>'<STR_LIT>': '<STR_LIT:U+002CU+0020>'.join(self.args.pattern_files) or None,<EOL>'<STR_LIT>': '<STR_LIT:U+002CU+0020>'.join(self.args.hosts) or None,<EOL>'<STR_LIT>': u'<STR_LIT:U+002CU+0020>'.join([<EOL>u'<STR_LIT>' % (app.name, app.matches) for app in apps.values() if app.matches > <NUM_LIT:0><EOL>]),<EOL>'<STR_LIT:version>': __version__<EOL>}<EOL>filters = []<EOL>for flt in self.args.filters:<EOL><INDENT>filters.append('<STR_LIT>'.join(['<STR_LIT>' % (k, v.pattern) for k, v in flt.items()]))<EOL><DEDENT>if filters:<EOL><INDENT>value_mapping['<STR_LIT>'] = '<STR_LIT>'.join(['<STR_LIT>' % item for item in filters])<EOL><DEDENT>else:<EOL><INDENT>value_mapping['<STR_LIT>'] = filters[<NUM_LIT:0>] if filters else None<EOL><DEDENT>value_mapping.update(self.stats)<EOL>report = []<EOL>for fmt in formats:<EOL><INDENT>if fmt == '<STR_LIT:text>':<EOL><INDENT>logger.info('<STR_LIT>')<EOL>report.append(self.make_text_page(value_mapping))<EOL><DEDENT>elif fmt == '<STR_LIT:html>':<EOL><INDENT>logger.info('<STR_LIT>')<EOL>report.append(self.make_html_page(value_mapping))<EOL><DEDENT>elif fmt == '<STR_LIT>':<EOL><INDENT>logger.info('<STR_LIT>')<EOL>report.extend(self.make_csv_tables())<EOL><DEDENT><DEDENT>return report<EOL>
|
Make report item texts in a specified format.
|
f9092:c2:m3
|
def is_empty(self):
|
return not any(self.subreports)<EOL>
|
A report is empty when it hasn't subreports or when all subreports are empty.
|
f9092:c2:m4
|
def set_stats(self, run_stats):
|
self.stats = run_stats.copy()<EOL>self.stats['<STR_LIT>'] = '<STR_LIT:U+002CU+0020>'.join(self.stats['<STR_LIT>'])<EOL>self.stats['<STR_LIT>'] = len(run_stats['<STR_LIT>'])<EOL>self.stats['<STR_LIT>'] = '<STR_LIT:U+002CU+0020>'.join(self.stats['<STR_LIT>'])<EOL>
|
Set run statistics for the report.
|
f9092:c2:m5
|
def make_html_page(self, valumap):
|
logger.info('<STR_LIT>', self.html_template)<EOL>fh = open(self.html_template)<EOL>template = fh.read()<EOL>fh.close()<EOL>parts = []<EOL>for sr in self.subreports:<EOL><INDENT>report_data = [item.html for item in sr.report_data if item.html]<EOL>if report_data:<EOL><INDENT>parts.append('<STR_LIT>'.format(sr.title, sr.reptext))<EOL>parts.extend(report_data)<EOL>parts.append('<STR_LIT>')<EOL><DEDENT><DEDENT>valumap['<STR_LIT>'] = '<STR_LIT:\n>'.join(parts) <EOL>html_page = Template(template).safe_substitute(valumap)<EOL>return TextPart(fmt='<STR_LIT:html>', text=html_page, ext='<STR_LIT:html>')<EOL>
|
Builds the report as html page, using the template page from file.
|
f9092:c2:m6
|
def make_text_page(self, valumap):
|
logger.info('<STR_LIT>', self.text_template)<EOL>fh = open(self.text_template)<EOL>template = fh.read()<EOL>fh.close()<EOL>parts = []<EOL>for sr in self.subreports:<EOL><INDENT>report_data = [item.text for item in sr.report_data if item.text]<EOL>if report_data:<EOL><INDENT>parts.append('<STR_LIT>'.format(sr.title, '<STR_LIT:*>' * (len(sr.title)+<NUM_LIT:12>)))<EOL>parts.extend(report_data)<EOL><DEDENT><DEDENT>valumap['<STR_LIT>'] = '<STR_LIT:\n>'.join(parts) <EOL>text_page = Template(template).safe_substitute(valumap)<EOL>return TextPart(fmt='<STR_LIT:text>', text=text_page, ext='<STR_LIT>')<EOL>
|
Builds the report as text page, using the template page from file.
|
f9092:c2:m7
|
def make_csv_tables(self):
|
logger.info('<STR_LIT>')<EOL>report_parts = []<EOL>for sr in self.subreports:<EOL><INDENT>for data_item in sr.report_data:<EOL><INDENT>report_parts.append(TextPart(fmt='<STR_LIT>', text=data_item.csv, ext='<STR_LIT>'))<EOL><DEDENT><DEDENT>return report_parts<EOL>
|
Builds the report as a list of csv tables with titles.
|
f9092:c2:m8
|
def _read_apps(self):
|
apps = {}<EOL>for cfgfile in glob.iglob(os.path.join(self.confdir, '<STR_LIT>')):<EOL><INDENT>name = os.path.basename(cfgfile)[<NUM_LIT:0>:-<NUM_LIT:5>]<EOL>try:<EOL><INDENT>app = AppLogParser(name, cfgfile, self.args, self.logdir,<EOL>self.fields, self.name_cache, self.report)<EOL><DEDENT>except (LogRaptorOptionError, LogRaptorConfigError, LogFormatError) as err:<EOL><INDENT>logger.error('<STR_LIT>', name, err)<EOL><DEDENT>else:<EOL><INDENT>apps[name] = app<EOL><DEDENT><DEDENT>if not apps:<EOL><INDENT>raise LogRaptorConfigError('<STR_LIT>' % self.confdir)<EOL><DEDENT>return apps<EOL>
|
Read the configuration of applications returning a dictionary
:return: A dictionary with application names as keys and configuration \
object as values.
|
f9093:c0:m1
|
@property<EOL><INDENT>def filters(self):<DEDENT>
|
return self.args.filters<EOL>
|
Log processor filters.
|
f9093:c0:m2
|
def set_logger(self):
|
<EOL>effective_level = max(logging.DEBUG, logging.CRITICAL - self.args.loglevel * <NUM_LIT:10>)<EOL>logger.setLevel(effective_level)<EOL>if not logger.handlers:<EOL><INDENT>if sys.stdout.isatty():<EOL><INDENT>handler = logging.StreamHandler()<EOL><DEDENT>else:<EOL><INDENT>try:<EOL><INDENT>handler = logging.FileHandler(self.config.get('<STR_LIT>', '<STR_LIT>'))<EOL><DEDENT>except (IOError, OSError, TypeError, AttributeError):<EOL><INDENT>handler = logging.StreamHandler()<EOL><DEDENT><DEDENT>logger.addHandler(handler)<EOL><DEDENT>for handler in logger.handlers:<EOL><INDENT>if effective_level <= logging.DEBUG:<EOL><INDENT>formatter = logging.Formatter("<STR_LIT>")<EOL><DEDENT>else:<EOL><INDENT>formatter = logging.Formatter("<STR_LIT>")<EOL><DEDENT>handler.setLevel(effective_level)<EOL>handler.setFormatter(formatter)<EOL><DEDENT>
|
Setup lograptor logger with an handler and a formatter. The logging
level is defined by a [0..4] range, where an higher value means a
more verbose logging. The loglevel value is mapped to correspondent
logging module's value:
LOG_CRIT=0 (syslog.h value is 2) ==> logging.CRITICAL
LOG_ERR=1 (syslog.h value is 3) ==> logging.ERROR
LOG_WARNING=2 (syslog.h value is 4) ==> logging.WARNING
LOG_INFO=3 (syslog.h value is 6) ==> logging.INFO
LOG_DEBUG=4 (syslog.h value is 7) ==> logging.DEBUG
If the stdout is a tty the log is sent to stderr, otherwise is sent
to the configured logfile.
|
f9093:c0:m9
|
@protected_property<EOL><INDENT>def patterns(self):<DEDENT>
|
<EOL>if not self.args.patterns and not self.args.pattern_files:<EOL><INDENT>try:<EOL><INDENT>self.args.patterns.append(self.args.files.pop(<NUM_LIT:0>))<EOL><DEDENT>except IndexError:<EOL><INDENT>raise LogRaptorArgumentError('<STR_LIT>', '<STR_LIT>')<EOL><DEDENT><DEDENT>patterns = set()<EOL>if self.args.pattern_files:<EOL><INDENT>patterns.update([p.rstrip('<STR_LIT:\n>') for p in fileinput.input(self.args.pattern_files)])<EOL><DEDENT>patterns.update(self.args.patterns)<EOL>logger.debug("<STR_LIT>", patterns)<EOL>if '<STR_LIT>' in patterns:<EOL><INDENT>logger.info("<STR_LIT>")<EOL>return tuple()<EOL><DEDENT>try:<EOL><INDENT>flags = re.IGNORECASE if self.args.case else <NUM_LIT:0> | re.UNICODE<EOL>return tuple([<EOL>re.compile(r'<STR_LIT>' % pat if self.args.word else '<STR_LIT>' % pat, flags=flags)<EOL>for pat in patterns if pat<EOL>])<EOL><DEDENT>except re.error as err:<EOL><INDENT>raise LogRaptorArgumentError('<STR_LIT>' % err)<EOL><DEDENT>
|
A tuple with re.RegexObject objects created from regex pattern arguments.
|
f9093:c0:m11
|
@protected_property<EOL><INDENT>def files(self):<DEDENT>
|
<EOL>if not self.args.files and self.recursive:<EOL><INDENT>return ['<STR_LIT:.>']<EOL><DEDENT>else:<EOL><INDENT>return self.args.files<EOL><DEDENT>
|
A list of input sources. Each item can be a file path, a glob path or URL.
|
f9093:c0:m12
|
@protected_property<EOL><INDENT>def matcher(self):<DEDENT>
|
if self.args.matcher is None:<EOL><INDENT>return '<STR_LIT>'<EOL><DEDENT>elif self.args.matcher.startswith('<STR_LIT:->'):<EOL><INDENT>matcher = self.args.matcher.strip('<STR_LIT:->').replace('<STR_LIT:->', '<STR_LIT:_>')<EOL><DEDENT>else:<EOL><INDENT>matcher = self.args.matcher<EOL><DEDENT>if matcher not in ['<STR_LIT>', '<STR_LIT>', '<STR_LIT>']:<EOL><INDENT>raise LogRaptorArgumentError('<STR_LIT>', '<STR_LIT>' % matcher)<EOL><DEDENT>return matcher<EOL>
|
Matcher engine: ruled, unruled, unparsed.
|
f9093:c0:m14
|
@property<EOL><INDENT>def time_range(self):<DEDENT>
|
return self.args.time_range<EOL>
|
Selected time range for log matching. If `None` then match always (equivalent to 0:00-23:59).
|
f9093:c0:m16
|
@protected_property<EOL><INDENT>def time_period(self):<DEDENT>
|
if self.args.time_period is None:<EOL><INDENT>if self.args.files or is_pipe(STDIN_FILENO) or is_redirected(STDIN_FILENO):<EOL><INDENT>time_period = (None, None)<EOL><DEDENT>else:<EOL><INDENT>diff = <NUM_LIT> <EOL>time_period = get_datetime_interval(int(time.time()), diff, <NUM_LIT>)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>time_period = self.args.time_period<EOL><DEDENT>logger.debug('<STR_LIT>', time_period)<EOL>return time_period<EOL>
|
Time period that is determined from the arguments --date and --last. It's a 2-tuple with
(<start datetime>, <end_datetime>) items. An item is `None` if there isn't a limit.
|
f9093:c0:m17
|
@protected_property<EOL><INDENT>def apps(self):<DEDENT>
|
logger.debug("<STR_LIT>")<EOL>enabled = None<EOL>apps = self.args.apps or self._config_apps.keys()<EOL>unknown = set(apps) - set(self._config_apps.keys())<EOL>if unknown:<EOL><INDENT>raise LogRaptorArgumentError("<STR_LIT>", "<STR_LIT>" % list(unknown))<EOL><DEDENT>if apps or enabled is None:<EOL><INDENT>return {k: v for k, v in self._config_apps.items() if k in apps}<EOL><DEDENT>else:<EOL><INDENT>return {k: v for k, v in self._config_apps.items() if k in apps and v.enabled == enabled}<EOL><DEDENT>
|
Dictionary with loaded applications.
|
f9093:c0:m21
|
@protected_property<EOL><INDENT>def apptags(self):<DEDENT>
|
logger.debug("<STR_LIT>")<EOL>apps = self._apps.keys()<EOL>unknown = set(apps)<EOL>unknown.difference_update(self._config_apps.keys())<EOL>if unknown:<EOL><INDENT>raise ValueError("<STR_LIT>" % list(unknown))<EOL><DEDENT>apps = [v for v in self._config_apps.values() if v.name in apps]<EOL>tagmap = {}<EOL>for app in sorted(apps, key=lambda x: (x.priority, x.name)):<EOL><INDENT>for tag in app.tags:<EOL><INDENT>if not tag:<EOL><INDENT>raise LogRaptorConfigError('<STR_LIT>' % app.name)<EOL><DEDENT>try:<EOL><INDENT>tagmap[tag].append(app)<EOL><DEDENT>except KeyError:<EOL><INDENT>tagmap[tag] = [app]<EOL><DEDENT><DEDENT><DEDENT>return tagmap<EOL>
|
Map from log app-name to an application.
|
f9093:c0:m22
|
@protected_property<EOL><INDENT>def channels(self):<DEDENT>
|
try:<EOL><INDENT>return self._channels<EOL><DEDENT>except AttributeError:<EOL><INDENT>logger.debug("<STR_LIT>")<EOL><DEDENT>channels = self.args.channels<EOL>config_channels = [sec.rpartition('<STR_LIT:_>')[<NUM_LIT:0>] for sec in self.config.sections(suffix='<STR_LIT>')]<EOL>unknown = set(channels) - set(config_channels)<EOL>if unknown:<EOL><INDENT>raise ValueError("<STR_LIT>" % list(unknown))<EOL><DEDENT>output_channels = []<EOL>for channel in set(channels):<EOL><INDENT>channel_type = self.config.get('<STR_LIT>' % channel, '<STR_LIT:type>')<EOL>if channel_type == '<STR_LIT>':<EOL><INDENT>output_channels.append(TermChannel(channel, self.args, self.config))<EOL><DEDENT>elif channel_type == '<STR_LIT:file>':<EOL><INDENT>output_channels.append(FileChannel(channel, self.args, self.config))<EOL><DEDENT>elif channel_type == '<STR_LIT>':<EOL><INDENT>output_channels.append(MailChannel(channel, self.args, self.config))<EOL><DEDENT>else:<EOL><INDENT>raise LogRaptorConfigError('<STR_LIT>' % channel_type)<EOL><DEDENT><DEDENT>return output_channels<EOL>
|
Output channels
|
f9093:c0:m24
|
def __call__(self, dispatcher=None, parsers=None):
|
if dispatcher is None:<EOL><INDENT>dispatcher = self.create_dispatcher()<EOL><DEDENT>matcher_engine = self.create_matcher(dispatcher, parsers=parsers)<EOL>dispatcher.open()<EOL>display_progress_bar = sys.stdout.isatty() and all(c.name != '<STR_LIT>' for c in dispatcher.channels)<EOL>logger.info("<STR_LIT>")<EOL>files = []<EOL>lines = matches = unknown = <NUM_LIT:0><EOL>extra_tags = Counter()<EOL>first_event = last_event = None<EOL>if self.args.report:<EOL><INDENT>self.report.cleanup()<EOL><DEDENT>for (source, apps) in self._logmap:<EOL><INDENT>if apps is not None:<EOL><INDENT>logger.info('<STR_LIT>', source, apps)<EOL><DEDENT>else:<EOL><INDENT>if self.args.files:<EOL><INDENT>logger.error("<STR_LIT>", source)<EOL><DEDENT>continue<EOL><DEDENT>try:<EOL><INDENT>for encoding in self._encodings:<EOL><INDENT>try:<EOL><INDENT>result = matcher_engine(source, apps, encoding)<EOL><DEDENT>except UnicodeDecodeError:<EOL><INDENT>if display_progress_bar:<EOL><INDENT>print()<EOL><DEDENT>logger.error("<STR_LIT>", encoding)<EOL>continue<EOL><DEDENT>break<EOL><DEDENT>else:<EOL><INDENT>logger.error("<STR_LIT>" % source)<EOL>continue<EOL><DEDENT>files.append(str(source))<EOL>lines += result.lines<EOL>matches += result.matches<EOL>unknown += result.unknown<EOL>if result.extra_tags:<EOL><INDENT>extra_tags.update(result.extra_tags)<EOL><DEDENT>if result.first_event is not None:<EOL><INDENT>if first_event is None or first_event > result.first_event:<EOL><INDENT>first_event = result.first_event<EOL><DEDENT><DEDENT>if result.last_event is not None:<EOL><INDENT>if last_event is None or last_event < result.last_event:<EOL><INDENT>last_event = result.last_event<EOL><DEDENT><DEDENT><DEDENT>except IOError as msg:<EOL><INDENT>if self.args.loglevel:<EOL><INDENT>logger.error(msg)<EOL><DEDENT><DEDENT><DEDENT>if not files and self._time_period[<NUM_LIT:0>] is not None:<EOL><INDENT>raise FileMissingError("<STR_LIT>".format([<EOL>datetime.datetime.strftime(e, '<STR_LIT>') for e in self._time_period<EOL>]))<EOL><DEDENT>elif not lines:<EOL><INDENT>return False<EOL><DEDENT>try:<EOL><INDENT>first_event = datetime.datetime.fromtimestamp(first_event)<EOL>last_event = datetime.datetime.fromtimestamp(last_event)<EOL><DEDENT>except (TypeError, UnboundLocalError):<EOL><INDENT>first_event = last_event = None<EOL><DEDENT>run_stats = {<EOL>'<STR_LIT>': files,<EOL>'<STR_LIT>': first_event,<EOL>'<STR_LIT>': last_event,<EOL>'<STR_LIT>': matches,<EOL>'<STR_LIT>': lines,<EOL>'<STR_LIT>': unknown,<EOL>'<STR_LIT>': extra_tags,<EOL>}<EOL>if sys.stdout.isatty():<EOL><INDENT>sys.stdout.write('<STR_LIT:\n>')<EOL><DEDENT>if unknown > <NUM_LIT:0>:<EOL><INDENT>logger.error('<STR_LIT>'.format(unknown))<EOL><DEDENT>if extra_tags:<EOL><INDENT>num_lines = sum(extra_tags.values())<EOL>logger.warning(u'<STR_LIT>'.format(num_lines))<EOL>logger.warning(u'<STR_LIT>'.format(dict(extra_tags)))<EOL>if sys.stdout.isatty():<EOL><INDENT>sys.stdout.write('<STR_LIT:\n>')<EOL><DEDENT><DEDENT>if matches > <NUM_LIT:0> and self.report:<EOL><INDENT>self.report.set_stats(run_stats)<EOL>self._report.make(self._apps)<EOL>formats = list(set([fmt for channel in self._channels for fmt in channel.formats]))<EOL>report_parts = self._report.get_report_parts(self._apps, formats)<EOL>dispatcher.send_report(report_parts)<EOL><DEDENT>elif self.args.loglevel and not self.args.quiet:<EOL><INDENT>dispatcher.send_message(self.get_run_summary(run_stats))<EOL><DEDENT>dispatcher.close()<EOL>logger.info("<STR_LIT>" % len(files))<EOL>return matches > <NUM_LIT:0><EOL>
|
Log processing main routine. Iterate over the log files calling
the processing internal routine for each file.
|
f9093:c0:m26
|
def create_dispatcher(self):
|
before_context = max(self.args.before_context, self.args.context)<EOL>after_context = max(self.args.after_context, self.args.context)<EOL>if self.args.files_with_match is not None or self.args.count or self.args.only_matching or self.args.quiet:<EOL><INDENT>return UnbufferedDispatcher(self._channels)<EOL><DEDENT>elif before_context == <NUM_LIT:0> and after_context == <NUM_LIT:0>:<EOL><INDENT>return UnbufferedDispatcher(self._channels)<EOL><DEDENT>elif self.args.thread:<EOL><INDENT>return ThreadedDispatcher(self._channels, before_context, after_context)<EOL><DEDENT>else:<EOL><INDENT>return LineBufferDispatcher(self._channels, before_context, after_context)<EOL><DEDENT>
|
Return a dispatcher for configured channels.
|
f9093:c0:m27
|
def get_config(self):
|
<EOL>channels = [sect.rsplit('<STR_LIT:_>')[<NUM_LIT:0>] for sect in self.config.sections(suffix='<STR_LIT>')]<EOL>channels.sort()<EOL>disabled_apps = [app for app in self._config_apps.keys() if app not in self._apps]<EOL>return u'<STR_LIT>'.join([<EOL>u"<STR_LIT>" % __package__,<EOL>u"<STR_LIT>" % self.config.cfgfile,<EOL>u"<STR_LIT>" % self.confdir,<EOL>u"<STR_LIT>" % '<STR_LIT:U+002CU+0020>'.join(self._config_apps.keys()),<EOL>u"<STR_LIT>" % '<STR_LIT:U+002CU+0020>'.join(disabled_apps) if disabled_apps else '<STR_LIT>',<EOL>u"<STR_LIT>" % '<STR_LIT:U+002CU+0020>'.join(self.config.options('<STR_LIT>')),<EOL>u"<STR_LIT>" % '<STR_LIT:U+002CU+0020>'.join(channels) if channels else u'<STR_LIT>',<EOL>u"<STR_LIT>" % '<STR_LIT:U+002CU+0020>'.join(<EOL>[section[:-<NUM_LIT:7>] for section in self.config.sections(suffix='<STR_LIT>')]<EOL>),<EOL>'<STR_LIT>'<EOL>])<EOL>
|
Return a formatted text with main configuration parameters.
|
f9093:c0:m29
|
def get_run_summary(self, run_stats):
|
run_stats = run_stats.copy()<EOL>run_stats['<STR_LIT>'] = len(run_stats['<STR_LIT>'])<EOL>summary = [<EOL>u'<STR_LIT>' % __package__,<EOL>u'<STR_LIT>',<EOL>u'<STR_LIT>',<EOL>u'<STR_LIT>',<EOL>]<EOL>if any([app.matches or app.unparsed for app in self.apps.values()]):<EOL><INDENT>if self.matcher == '<STR_LIT>':<EOL><INDENT>summary.append("<STR_LIT>")<EOL>for app in filter(lambda x: x.matches, self.apps.values()):<EOL><INDENT>summary.append(u'<STR_LIT>' % (app.name, app.matches))<EOL><DEDENT><DEDENT>else:<EOL><INDENT>summary.append("<STR_LIT>")<EOL>for app in filter(lambda x: x.matches or x.unparsed, self.apps.values()):<EOL><INDENT>summary.append(u'<STR_LIT>' % (app.name, app.matches, app.unparsed))<EOL><DEDENT><DEDENT><DEDENT>summary.append('<STR_LIT:\n>')<EOL>return '<STR_LIT:\n>'.join(summary) % run_stats<EOL>
|
Produce a text summary from run statistics.
:param run_stats: A dictionary containing run stats
:return: Formatted multiline string
|
f9093:c0:m30
|
def do_chunked_gzip(infh, outfh, filename):
|
import gzip<EOL>gzfh = gzip.GzipFile('<STR_LIT>', mode='<STR_LIT:wb>', fileobj=outfh)<EOL>if infh.closed:<EOL><INDENT>infh = open(infh.name, '<STR_LIT:r>')<EOL><DEDENT>else:<EOL><INDENT>infh.seek(<NUM_LIT:0>)<EOL><DEDENT>readsize = <NUM_LIT:0><EOL>sys.stdout.write('<STR_LIT>'.format(filename))<EOL>if os.stat(infh.name).st_size:<EOL><INDENT>infh.seek(<NUM_LIT:0>)<EOL>progressbar = ProgressBar(sys.stdout, os.stat(infh.name).st_size, "<STR_LIT>")<EOL>while True:<EOL><INDENT>chunk = infh.read(GZIP_CHUNK_SIZE)<EOL>if not chunk:<EOL><INDENT>break<EOL><DEDENT>if sys.version_info[<NUM_LIT:0>] >= <NUM_LIT:3>:<EOL><INDENT>gzfh.write(bytes(chunk, "<STR_LIT:utf-8>"))<EOL><DEDENT>else:<EOL><INDENT>gzfh.write(chunk)<EOL><DEDENT>readsize += len(chunk)<EOL>progressbar.redraw(readsize)<EOL><DEDENT><DEDENT>gzfh.close()<EOL>
|
A memory-friendly way of compressing the data.
|
f9094:m0
|
def mail_message(smtp_server, message, from_address, rcpt_addresses):
|
if smtp_server[<NUM_LIT:0>] == '<STR_LIT:/>':<EOL><INDENT>p = os.popen(smtp_server, '<STR_LIT:w>')<EOL>p.write(message)<EOL>p.close()<EOL><DEDENT>else:<EOL><INDENT>import smtplib<EOL>server = smtplib.SMTP(smtp_server)<EOL>server.sendmail(from_address, rcpt_addresses, message)<EOL>server.quit()<EOL><DEDENT>
|
Send mail using smtp.
|
f9094:m1
|
def get_value_unit(value, unit, prefix):
|
prefixes = ('<STR_LIT>', '<STR_LIT>', '<STR_LIT:M>', '<STR_LIT>', '<STR_LIT:T>')<EOL>if len(unit):<EOL><INDENT>if unit[:<NUM_LIT:1>] in prefixes:<EOL><INDENT>valprefix = unit[<NUM_LIT:0>] <EOL>unit = unit[<NUM_LIT:1>:]<EOL><DEDENT>else:<EOL><INDENT>valprefix = '<STR_LIT>'<EOL><DEDENT><DEDENT>else:<EOL><INDENT>valprefix = '<STR_LIT>'<EOL><DEDENT>while valprefix != prefix:<EOL><INDENT>uidx = prefixes.index(valprefix)<EOL>if uidx > prefixes.index(prefix):<EOL><INDENT>value *= <NUM_LIT><EOL>valprefix = prefixes[uidx-<NUM_LIT:1>]<EOL><DEDENT>else:<EOL><INDENT>if value < <NUM_LIT>:<EOL><INDENT>return value, '<STR_LIT>'.format(valprefix, unit)<EOL><DEDENT>value = int(round(value/<NUM_LIT>))<EOL>valprefix = prefixes[uidx+<NUM_LIT:1>]<EOL><DEDENT><DEDENT>return value, '<STR_LIT>'.format(valprefix, unit)<EOL>
|
Return a human-readable value with unit specification. Try to
transform the unit prefix to the one passed as parameter. When
transform to higher prefix apply nearest integer round.
|
f9094:m2
|
def htmlsafe(unsafe):
|
unsafe = unsafe.replace('<STR_LIT:&>', '<STR_LIT>')<EOL>unsafe = unsafe.replace('<STR_LIT:<>', '<STR_LIT>')<EOL>unsafe = unsafe.replace('<STR_LIT:>>', '<STR_LIT>')<EOL>return unsafe<EOL>
|
Escapes all x(ht)ml control characters.
|
f9094:m3
|
def get_fmt_results(results, limit=<NUM_LIT:5>, sep='<STR_LIT>', fmt=None):
|
result_list = []<EOL>for key in sorted(results, key=lambda x: results[x], reverse=True):<EOL><INDENT>if len(result_list) >= limit and results[key] <= <NUM_LIT:1>:<EOL><INDENT>break<EOL><DEDENT>if fmt is not None:<EOL><INDENT>fmtkey = []<EOL>for i in range(len(key)):<EOL><INDENT>if i % <NUM_LIT:2> == <NUM_LIT:1>:<EOL><INDENT>fmtkey.append(fmt.format(key[i]))<EOL><DEDENT>else:<EOL><INDENT>fmtkey.append(key[i])<EOL><DEDENT><DEDENT>result_list.append(u'<STR_LIT>'.format(sep.join(fmtkey), results[key]))<EOL><DEDENT>else:<EOL><INDENT>result_list.append(u'<STR_LIT>'.format(sep.join(key), results[key]))<EOL><DEDENT><DEDENT>else:<EOL><INDENT>return result_list<EOL><DEDENT>if fmt is not None:<EOL><INDENT>result_list.append(fmt.format(u'<STR_LIT>' % (len(results) - len(result_list))))<EOL><DEDENT>else:<EOL><INDENT>result_list.append(u'<STR_LIT>' % (len(results) - len(result_list)))<EOL><DEDENT>return result_list<EOL>
|
Return a list of formatted strings representation on a result dictionary.
The elements of the key are divided by a separator string. The result is
appended after the key between parentheses. Apply a format transformation
to odd elements of the key if a fmt parameter is passed.
|
f9094:m4
|
def safe_expand(template, mapping):
|
for _ in range(len(mapping) + <NUM_LIT:1>):<EOL><INDENT>_template = template<EOL>template = string.Template(template).safe_substitute(mapping)<EOL>if template == _template:<EOL><INDENT>return template<EOL><DEDENT><DEDENT>else:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>
|
Safe string template expansion. Raises an error if the provided substitution mapping has circularities.
|
f9094:m7
|
def protected_property(func):
|
if func.__name__.startswith('<STR_LIT:_>'):<EOL><INDENT>raise ValueError("<STR_LIT>" % func)<EOL><DEDENT>@property<EOL>@wraps(func)<EOL>def proxy_wrapper(self):<EOL><INDENT>try:<EOL><INDENT>return getattr(self, '<STR_LIT>' % func.__name__)<EOL><DEDENT>except AttributeError:<EOL><INDENT>pass<EOL><DEDENT>return func(self)<EOL><DEDENT>return proxy_wrapper<EOL>
|
Class method decorator that creates a property that returns the protected attribute
or the value returned by the wrapped method, if the protected attribute is not defined.
|
f9094:m11
|
def open_resource(source):
|
try:<EOL><INDENT>return open(source, mode='<STR_LIT:rb>')<EOL><DEDENT>except (IOError, OSError) as err:<EOL><INDENT>try:<EOL><INDENT>resource = urlopen(source)<EOL><DEDENT>except ValueError:<EOL><INDENT>pass<EOL><DEDENT>else:<EOL><INDENT>resource.name = resource.url<EOL>if hasattr(resource, '<STR_LIT>'):<EOL><INDENT>return resource<EOL><DEDENT>else:<EOL><INDENT>return closing(resource)<EOL><DEDENT><DEDENT>raise err<EOL><DEDENT>except TypeError:<EOL><INDENT>if hasattr(source, '<STR_LIT>') and hasattr(source, '<STR_LIT>'):<EOL><INDENT>return source <EOL><DEDENT>raise<EOL><DEDENT>
|
Opens a resource in binary reading mode. Wraps the resource with a
context manager when it doesn't have one.
:param source: a filepath or an URL.
|
f9094:m14
|
def get_terminal_size():
|
import platform<EOL>current_os = platform.system()<EOL>tuple_xy = None<EOL>if current_os == '<STR_LIT>':<EOL><INDENT>tuple_xy = get_windows_terminal_size()<EOL>if tuple_xy is None:<EOL><INDENT>tuple_xy = get_unix_tput_terminal_size() <EOL><DEDENT><DEDENT>elif current_os == '<STR_LIT>' or current_os == '<STR_LIT>' or current_os.startswith('<STR_LIT>'):<EOL><INDENT>tuple_xy = get_unix_ioctl_terminal_size()<EOL><DEDENT>if tuple_xy is None:<EOL><INDENT>tuple_xy = (<NUM_LIT>, <NUM_LIT>) <EOL><DEDENT>return tuple_xy<EOL>
|
Get the terminal size in width and height. Works on Linux, Mac OS X, Windows, Cygwin (Windows).
:return: Returns a 2-tuple with width and height.
|
f9095:m0
|
def get_windows_terminal_size():
|
from ctypes import windll, create_string_buffer<EOL>handle = windll.kernel32.GetStdHandle(-<NUM_LIT:12>)<EOL>try:<EOL><INDENT>csbi = create_string_buffer(<NUM_LIT>)<EOL>res = windll.kernel32.GetConsoleScreenBufferInfo(handle, csbi)<EOL><DEDENT>except (IOError, OSError):<EOL><INDENT>return None<EOL><DEDENT>if res:<EOL><INDENT>import struct<EOL>(bufx, bufy, curx, cury, wattr,<EOL>left, top, right, bottom, maxx, maxy) = struct.unpack("<STR_LIT>", csbi.raw)<EOL>sizex = right - left + <NUM_LIT:1><EOL>sizey = bottom - top + <NUM_LIT:1><EOL>return sizex, sizey<EOL><DEDENT>else:<EOL><INDENT>return None<EOL><DEDENT>
|
Get the terminal size of a Windows OS terminal.
|
f9095:m1
|
def get_unix_tput_terminal_size():
|
import subprocess<EOL>try:<EOL><INDENT>proc = subprocess.Popen(["<STR_LIT>", "<STR_LIT>"], stdin=subprocess.PIPE, stdout=subprocess.PIPE)<EOL>output = proc.communicate(input=None)<EOL>cols = int(output[<NUM_LIT:0>])<EOL>proc = subprocess.Popen(["<STR_LIT>", "<STR_LIT>"], stdin=subprocess.PIPE, stdout=subprocess.PIPE)<EOL>output = proc.communicate(input=None)<EOL>rows = int(output[<NUM_LIT:0>])<EOL>return cols, rows<EOL><DEDENT>except (IOError, OSError):<EOL><INDENT>return None<EOL><DEDENT>
|
Get the terminal size of a UNIX terminal using the tput UNIX command.
Ref: http://stackoverflow.com/questions/263890/how-do-i-find-the-width-height-of-a-terminal-window
|
f9095:m2
|
def get_unix_ioctl_terminal_size():
|
def ioctl_gwinsz(fd):<EOL><INDENT>try:<EOL><INDENT>import fcntl<EOL>import termios<EOL>import struct<EOL>return struct.unpack('<STR_LIT>', fcntl.ioctl(fd, termios.TIOCGWINSZ, '<STR_LIT>'))<EOL><DEDENT>except (IOError, OSError):<EOL><INDENT>return None<EOL><DEDENT><DEDENT>cr = ioctl_gwinsz(<NUM_LIT:0>) or ioctl_gwinsz(<NUM_LIT:1>) or ioctl_gwinsz(<NUM_LIT:2>)<EOL>if not cr:<EOL><INDENT>try:<EOL><INDENT>f = open(os.ctermid())<EOL>cr = ioctl_gwinsz(f.fileno())<EOL>f.close()<EOL><DEDENT>except (IOError, OSError):<EOL><INDENT>pass<EOL><DEDENT><DEDENT>if not cr:<EOL><INDENT>try:<EOL><INDENT>cr = (os.environ['<STR_LIT>'], os.environ['<STR_LIT>'])<EOL><DEDENT>except KeyError:<EOL><INDENT>return None<EOL><DEDENT><DEDENT>return int(cr[<NUM_LIT:1>]), int(cr[<NUM_LIT:0>])<EOL>
|
Get the terminal size of a UNIX terminal using the ioctl UNIX command.
|
f9095:m3
|
def create_argument_parser():
|
parser = argparse.ArgumentParser(prog='<STR_LIT>', description=__description__, add_help=False)<EOL>parser.usage = """<STR_LIT>"""<EOL>group = parser.add_argument_group("<STR_LIT>")<EOL>group.add_argument(<EOL>"<STR_LIT>", dest="<STR_LIT>", action='<STR_LIT>', default=None, metavar="<STR_LIT>",<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", dest="<STR_LIT>", default=<NUM_LIT:2>, type=int, metavar="<STR_LIT>", choices=range(<NUM_LIT:5>),<EOL>help="<STR_LIT>"<EOL>"<STR_LIT>"<EOL>)<EOL>group.add_argument('<STR_LIT>', '<STR_LIT>', action='<STR_LIT:version>', version=__version__)<EOL>group.add_argument('<STR_LIT>', action='<STR_LIT>', help="<STR_LIT>")<EOL>group = parser.add_argument_group("<STR_LIT>")<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", metavar='<STR_LIT>', type=comma_separated_string,<EOL>default=[], help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", metavar="<STR_LIT>", type=comma_separated_string,<EOL>default=[], help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", metavar="<STR_LIT>",<EOL>action="<STR_LIT>", dest="<STR_LIT>", type=filter_spec, default=[],<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", metavar="<STR_LIT>", type=TimeRange, action="<STR_LIT:store>", dest="<STR_LIT>",<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", metavar="<STR_LIT>", action="<STR_LIT:store>", dest="<STR_LIT>",<EOL>type=date_interval_spec, help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", action="<STR_LIT:store>", dest="<STR_LIT>", type=last_period_spec,<EOL>metavar="<STR_LIT>",<EOL>help="<STR_LIT>"<EOL>)<EOL>group = parser.add_argument_group("<STR_LIT>")<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", dest='<STR_LIT>', action=StoreOptionAction,<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", dest='<STR_LIT>', action=StoreOptionAction,<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", dest='<STR_LIT>', action=StoreOptionAction,<EOL>help="<STR_LIT>"<EOL>)<EOL>group = parser.add_argument_group("<STR_LIT>")<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", metavar="<STR_LIT>", dest="<STR_LIT>", default=[],<EOL>action="<STR_LIT>", help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", metavar="<STR_LIT>", dest="<STR_LIT>", default=[],<EOL>action="<STR_LIT>", help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", action="<STR_LIT:store_true>", dest="<STR_LIT>", default=False,<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", action="<STR_LIT:store_true>", dest="<STR_LIT>", default=False,<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", action="<STR_LIT:store_true>", dest="<STR_LIT>", default=False,<EOL>help="<STR_LIT>"<EOL>)<EOL>group = parser.add_argument_group("<STR_LIT>")<EOL>group.add_argument(<EOL>"<STR_LIT>", default=['<STR_LIT>'], metavar='<STR_LIT>', dest='<STR_LIT>',<EOL>type=comma_separated_string, help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT:-c>", "<STR_LIT>", action="<STR_LIT:store_true>", default=False,<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", default='<STR_LIT>', nargs='<STR_LIT:?>', choices=['<STR_LIT>', '<STR_LIT>', '<STR_LIT>'],<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", action="<STR_LIT>", dest="<STR_LIT>", default=None,<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", action="<STR_LIT:store_true>", dest="<STR_LIT>",<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", metavar='<STR_LIT>', action="<STR_LIT:store>", type=positive_integer, default=<NUM_LIT:0>,<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", action="<STR_LIT:store_true>", default=False,<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", action="<STR_LIT:store_true>", default=False, help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", action="<STR_LIT>", const=<NUM_LIT:0>, dest='<STR_LIT>',<EOL>help="<STR_LIT>"<EOL>)<EOL>group = parser.add_argument_group("<STR_LIT>")<EOL>group.add_argument(<EOL>"<STR_LIT>", metavar='<STR_LIT>', nargs='<STR_LIT:?>', default=False,<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", action="<STR_LIT:store_true>", default=False,<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", action="<STR_LIT:store_true>", default=False,<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", action="<STR_LIT:store_true>", default=False,<EOL>help="<STR_LIT>"<EOL>)<EOL>group = parser.add_argument_group("<STR_LIT>")<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", action="<STR_LIT:store_true>", default=False,<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", action="<STR_LIT:store_true>", dest="<STR_LIT>", default=None,<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", action="<STR_LIT>", dest="<STR_LIT>", default=None,<EOL>help="<STR_LIT>"<EOL>)<EOL>group = parser.add_argument_group("<STR_LIT>")<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", action="<STR_LIT:store_true>", default=False,<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", metavar='<STR_LIT>', type=positive_integer, default=<NUM_LIT:0>,<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", metavar='<STR_LIT>', type=positive_integer, default=<NUM_LIT:0>,<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", metavar='<STR_LIT>', type=positive_integer, default=<NUM_LIT:0>,<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", metavar='<STR_LIT>', default='<STR_LIT>',<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", dest="<STR_LIT>", action="<STR_LIT>", const='<STR_LIT>',<EOL>help="<STR_LIT>"<EOL>)<EOL>group = parser.add_argument_group("<STR_LIT>")<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", action="<STR_LIT:store_true>", default=False,<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", "<STR_LIT>", action="<STR_LIT:store_true>", default=False,<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", metavar='<STR_LIT>', default=[], action="<STR_LIT>",<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", metavar='<STR_LIT>', default=[], action="<STR_LIT>",<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", metavar='<STR_LIT>', default=[], action="<STR_LIT>",<EOL>help="<STR_LIT>"<EOL>)<EOL>group.add_argument(<EOL>"<STR_LIT>", metavar='<STR_LIT>', default=[], action="<STR_LIT>",<EOL>help="<STR_LIT>"<EOL>)<EOL>parser.add_argument(<EOL>'<STR_LIT>', metavar='<STR_LIT>', nargs='<STR_LIT:*>',<EOL>help='<STR_LIT>'<EOL>'<STR_LIT>'<EOL>'<STR_LIT>'<EOL>)<EOL>return parser<EOL>
|
Command line options and arguments parsing. This function return
a list of options and the list of arguments (pattern, filenames).
|
f9096:m5
|
def has_void_args(argv):
|
n_args = len(argv)<EOL>return n_args == <NUM_LIT:1> or n_args == <NUM_LIT:2> and argv[<NUM_LIT:1>].startswith('<STR_LIT>') or n_args == <NUM_LIT:3> and argv[<NUM_LIT:1>] == '<STR_LIT>'<EOL>
|
Check if the command line has no arguments or only the --conf optional argument.
|
f9096:m6
|
def lograptor(files, patterns=None, matcher='<STR_LIT>', cfgfiles=None, apps=None, hosts=None,<EOL>filters=None, time_period=None, time_range=None, case=False, invert=False,<EOL>word=False, files_with_match=None, count=False, quiet=False, max_count=<NUM_LIT:0>,<EOL>only_matching=False, line_number=False, with_filename=None,<EOL>ip_lookup=False, uid_lookup=False, anonymize=False, thread=False,<EOL>before_context=<NUM_LIT:0>, after_context=<NUM_LIT:0>, context=<NUM_LIT:0>):
|
cli_parser = create_argument_parser()<EOL>args = cli_parser.parse_args()<EOL>args.files = files<EOL>args.matcher = matcher<EOL>args.cfgfiles = cfgfiles<EOL>args.time_period = time_period<EOL>args.time_range = time_range<EOL>args.case = case<EOL>args.invert = invert<EOL>args.word = word<EOL>args.files_with_match = files_with_match<EOL>args.count = count<EOL>args.quiet = quiet<EOL>args.max_count = max_count<EOL>args.only_matching = only_matching<EOL>args.line_number = line_number<EOL>args.with_filename = with_filename<EOL>args.anonymize = anonymize<EOL>args.ip_lookup = ip_lookup<EOL>args.uid_lookup = uid_lookup<EOL>args.thread = thread<EOL>args.context = context<EOL>args.after_context = after_context<EOL>args.before_context = before_context<EOL>args.patterns = ['<STR_LIT>'] if patterns is None else patterns<EOL>if apps is not None:<EOL><INDENT>args.apps = apps<EOL><DEDENT>if hosts is not None:<EOL><INDENT>args.hosts = hosts<EOL><DEDENT>if filters is not None:<EOL><INDENT>args.filters = filters<EOL><DEDENT>_lograptor = LogRaptor(args)<EOL>return _lograptor()<EOL>
|
Run lograptor with arguments. Experimental feature to use the log processor into
generic Python scripts. This part is still under development, do not use.
:param files: Input files. Each argument can be a file path or a glob pathname.
:param patterns: Regex patterns, select the log line if at least one pattern matches.
:param matcher: Matcher engine, can be 'ruled' (default), 'unruled' or 'unparsed'.
:param cfgfiles: use a specific configuration file.
:param apps: process the log lines related to a list of applications.
:param hosts: process the log lines related to a list of hosts.
:param filters: process the log lines that match all the conditions for rule's field values.
:param time_range: process the log lines related to a time range.
:param time_period: restrict the search scope to a date or a date interval.
:param case: ignore case distinctions, defaults to `False`.
:param invert: invert the sense of patterns regexp matching.
:param word: force PATTERN to match only whole words.
:param files_with_match: get only names of FILEs containing matches, defaults is `False`.
:param count: get only a count of matching lines per FILE.
:param quiet: suppress all normal output.
:param max_count: stop after NUM matches.
:param only_matching: get only the part of a line matching PATTERN.
:param line_number: get line number with output lines.
:param with_filename: get or suppress the file name for each match.
:param ip_lookup: translate IP addresses to DNS names.
:param uid_lookup: translate numeric UIDs to usernames.
:param anonymize: anonymize defined rule's fields value.
:param thread: get the lines of logs related to each log line selected.
:param before_context: get NUM lines of leading context for each log line selected.
:param after_context: get NUM lines of trailing context for each log line selected.
:param context: get NUM lines of output context for each log line selected.
:return:
|
f9096:m7
|
def _make_spec_file(self):
|
spec_file = setuptools.command.bdist_rpm.bdist_rpm._make_spec_file(self)<EOL>spec_file.append('<STR_LIT>')<EOL>spec_file.append('<STR_LIT>')<EOL>spec_file.append('<STR_LIT>')<EOL>return spec_file<EOL>
|
Customize spec file inserting %config section
|
f9104:c1:m0
|
def fake_create_redis_pool(fake_pool):
|
async def create_redis_pool(*args, **kwargs):<EOL><INDENT>return fake_pool<EOL><DEDENT>return create_redis_pool<EOL>
|
Original Redis pool have magick method __await__ to create exclusive
connection. CoroutineMock sees this method and thinks that Redis pool
instance is awaitable and tries to await it.
To avoit this behavior we are using this constructor with Mock.side_effect
instead of Mock.return_value.
|
f9108:m0
|
@lock_timeout.validator<EOL><INDENT>def _validate_lock_timeout(self, attribute, value):<DEDENT>
|
if value <= <NUM_LIT:0>:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>
|
Validate if lock_timeout is greater than 0
|
f9109:c0:m1
|
@drift.validator<EOL><INDENT>def _validate_drift(self, attribute, value):<DEDENT>
|
if value <= <NUM_LIT:0>:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>
|
Validate if drift is greater than 0
|
f9109:c0:m2
|
@retry_count.validator<EOL><INDENT>def _validate_retry_count(self, attribute, value):<DEDENT>
|
if value < <NUM_LIT:1>:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>
|
Validate if retry_count is greater or equal 1
|
f9109:c0:m3
|
@retry_delay_min.validator<EOL><INDENT>@retry_delay_max.validator<EOL>def _validate_retry_delay(self, attribute, value):<DEDENT>
|
if value <= <NUM_LIT:0>:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>
|
Validate if retry_delay_min and retry_delay_max is greater than 0
|
f9109:c0:m4
|
async def lock(self, resource):
|
lock_identifier = str(uuid.uuid4())<EOL>error = RuntimeError('<STR_LIT>')<EOL>try:<EOL><INDENT>for n in range(self.retry_count):<EOL><INDENT>self.log.debug('<STR_LIT>',<EOL>resource, n + <NUM_LIT:1>, self.retry_count)<EOL>if n != <NUM_LIT:0>:<EOL><INDENT>delay = random.uniform(self.retry_delay_min,<EOL>self.retry_delay_max)<EOL>await asyncio.sleep(delay)<EOL><DEDENT>try:<EOL><INDENT>elapsed_time = await self.redis.set_lock(resource, lock_identifier)<EOL><DEDENT>except LockError as exc:<EOL><INDENT>error = exc<EOL>continue<EOL><DEDENT>if self.lock_timeout - elapsed_time - self.drift <= <NUM_LIT:0>:<EOL><INDENT>error = LockError('<STR_LIT>')<EOL>self.log.debug('<STR_LIT>',<EOL>resource)<EOL>continue<EOL><DEDENT>error = None<EOL>break<EOL><DEDENT>else:<EOL><INDENT>raise error<EOL><DEDENT><DEDENT>except Exception as exc:<EOL><INDENT>async def cleanup():<EOL><INDENT>self.log.debug('<STR_LIT>', resource)<EOL>with contextlib.suppress(LockError):<EOL><INDENT>await self.redis.unset_lock(resource, lock_identifier)<EOL><DEDENT><DEDENT>asyncio.ensure_future(cleanup())<EOL>raise<EOL><DEDENT>return Lock(self, resource, lock_identifier, valid=True)<EOL>
|
Tries to acquire de lock.
If the lock is correctly acquired, the valid property of
the returned lock is True.
In case of fault the LockError exception will be raised
:param resource: The string identifier of the resource to lock
:return: :class:`aioredlock.Lock`
:raises: LockError in case of fault
|
f9109:c0:m6
|
async def extend(self, lock):
|
self.log.debug('<STR_LIT>', lock.resource)<EOL>if not lock.valid:<EOL><INDENT>raise RuntimeError('<STR_LIT>')<EOL><DEDENT>await self.redis.set_lock(lock.resource, lock.id)<EOL>
|
Tries to reset the lock's lifetime to lock_timeout
In case of fault the LockError exception will be raised
:param lock: :class:`aioredlock.Lock`
:raises: RuntimeError if lock is not valid
:raises: LockError in case of fault
|
f9109:c0:m7
|
async def unlock(self, lock):
|
self.log.debug('<STR_LIT>', lock.resource)<EOL>await self.redis.unset_lock(lock.resource, lock.id)<EOL>lock.valid = False<EOL>
|
Release the lock and sets it's validity to False if
lock successfuly released.
In case of fault the LockError exception will be raised
:param lock: :class:`aioredlock.Lock`
:raises: LockError in case of fault
|
f9109:c0:m8
|
async def is_locked(self, resource_or_lock):
|
if isinstance(resource_or_lock, Lock):<EOL><INDENT>resource = resource_or_lock.resource<EOL><DEDENT>elif isinstance(resource_or_lock, str):<EOL><INDENT>resource = resource_or_lock<EOL><DEDENT>else:<EOL><INDENT>raise TypeError(<EOL>'<STR_LIT>'<EOL>'<STR_LIT>', type(resource_or_lock)<EOL>)<EOL><DEDENT>return await self.redis.is_locked(resource)<EOL>
|
Checks if the resource or the lock is locked by any redlock instance.
:param resource_or_lock: resource name or aioredlock.Lock instance
:returns: True if locked else False
|
f9109:c0:m9
|
async def destroy(self):
|
self.log.debug('<STR_LIT>', repr(self))<EOL>await self.redis.clear_connections()<EOL>
|
Clear all the redis connections
|
f9109:c0:m10
|
def __init__(self, connection):
|
self.connection = connection<EOL>self._pool = None<EOL>self._lock = asyncio.Lock()<EOL>self.set_lock_script = re.sub(r'<STR_LIT>', '<STR_LIT>', self.SET_LOCK_SCRIPT, flags=re.M).strip()<EOL>self.unset_lock_script = re.sub(r'<STR_LIT>', '<STR_LIT>', self.UNSET_LOCK_SCRIPT, flags=re.M).strip()<EOL>
|
Redis instance constructor
Constructor takes single argument - a redis host address
The address can be one of the following:
* a dict - {'host': 'localhost', 'port': 6379,
'db': 0, 'password': 'pass'}
all keys except host and port will be passed as kwargs to
the aioredis.create_redis_pool();
* a Redis URI - "redis://host:6379/0?encoding=utf-8";
* a (host, port) tuple - ('localhost', 6379);
* or a unix domain socket path string - "/path/to/redis.sock".
* a redis connection pool.
:param connection: redis host address (dict, tuple or str)
|
f9113:c0:m0
|
@staticmethod<EOL><INDENT>async def _create_redis_pool(*args, **kwargs):<DEDENT>
|
if StrictVersion(aioredis.__version__) >= StrictVersion('<STR_LIT>'): <EOL><INDENT>return await aioredis.create_redis_pool(*args, **kwargs)<EOL><DEDENT>else: <EOL><INDENT>return await aioredis.create_pool(*args, **kwargs)<EOL><DEDENT>
|
Adapter to support both aioredis-0.3.0 and aioredis-1.0.0
For aioredis-1.0.0 and later calls:
aioredis.create_redis_pool(*args, **kwargs)
For aioredis-0.3.0 calls:
aioredis.create_pool(*args, **kwargs)
|
f9113:c0:m3
|
async def connect(self):
|
if isinstance(self.connection, dict):<EOL><INDENT>kwargs = self.connection.copy()<EOL>address = (<EOL>kwargs.pop('<STR_LIT:host>', '<STR_LIT:localhost>'),<EOL>kwargs.pop('<STR_LIT:port>', <NUM_LIT>)<EOL>)<EOL>redis_kwargs = kwargs<EOL><DEDENT>elif isinstance(self.connection, aioredis.Redis):<EOL><INDENT>self._pool = self.connection<EOL><DEDENT>else:<EOL><INDENT>address = self.connection<EOL>redis_kwargs = {}<EOL><DEDENT>if self._pool is None:<EOL><INDENT>async with self._lock:<EOL><INDENT>if self._pool is None:<EOL><INDENT>self.log.debug('<STR_LIT>', repr(self))<EOL>self._pool = await self._create_redis_pool(<EOL>address, **redis_kwargs,<EOL>minsize=<NUM_LIT:1>, maxsize=<NUM_LIT:100>)<EOL><DEDENT><DEDENT><DEDENT>return await self._pool<EOL>
|
Get an connection for the self instance
|
f9113:c0:m4
|
async def close(self):
|
if self._pool is not None and not isinstance(self.connection, aioredis.Redis):<EOL><INDENT>self._pool.close()<EOL>await self._pool.wait_closed()<EOL><DEDENT>self._pool = None<EOL>
|
Closes connection and resets pool
|
f9113:c0:m5
|
async def set_lock(self, resource, lock_identifier, lock_timeout):
|
lock_timeout_ms = int(lock_timeout * <NUM_LIT:1000>)<EOL>try:<EOL><INDENT>with await self.connect() as redis:<EOL><INDENT>await redis.eval(<EOL>self.set_lock_script,<EOL>keys=[resource],<EOL>args=[lock_identifier, lock_timeout_ms]<EOL>)<EOL><DEDENT><DEDENT>except aioredis.errors.ReplyError as exc: <EOL><INDENT>self.log.debug('<STR_LIT>',<EOL>resource, repr(self))<EOL>raise LockError('<STR_LIT>') from exc<EOL><DEDENT>except (aioredis.errors.RedisError, OSError) as exc:<EOL><INDENT>self.log.error('<STR_LIT>',<EOL>resource, repr(self), repr(exc))<EOL>raise LockError('<STR_LIT>') from exc<EOL><DEDENT>except asyncio.CancelledError:<EOL><INDENT>self.log.debug('<STR_LIT>',<EOL>resource, repr(self))<EOL>raise<EOL><DEDENT>except Exception as exc:<EOL><INDENT>self.log.exception('<STR_LIT>',<EOL>resource, repr(self))<EOL>raise<EOL><DEDENT>else:<EOL><INDENT>self.log.debug('<STR_LIT>', resource, repr(self))<EOL><DEDENT>
|
Lock this instance and set lock expiration time to lock_timeout
:param resource: redis key to set
:param lock_identifier: uniquie id of lock
:param lock_timeout: timeout for lock in seconds
:raises: LockError if lock is not acquired
|
f9113:c0:m6
|
async def unset_lock(self, resource, lock_identifier):
|
try:<EOL><INDENT>with await self.connect() as redis:<EOL><INDENT>await redis.eval(<EOL>self.unset_lock_script,<EOL>keys=[resource],<EOL>args=[lock_identifier]<EOL>)<EOL><DEDENT><DEDENT>except aioredis.errors.ReplyError as exc: <EOL><INDENT>self.log.debug('<STR_LIT>',<EOL>resource, repr(self))<EOL>raise LockError('<STR_LIT>') from exc<EOL><DEDENT>except (aioredis.errors.RedisError, OSError) as exc:<EOL><INDENT>self.log.error('<STR_LIT>',<EOL>resource, repr(self), repr(exc))<EOL>raise LockError('<STR_LIT>') from exc<EOL><DEDENT>except asyncio.CancelledError:<EOL><INDENT>self.log.debug('<STR_LIT>',<EOL>resource, repr(self))<EOL>raise<EOL><DEDENT>except Exception as exc:<EOL><INDENT>self.log.exception('<STR_LIT>',<EOL>resource, repr(self))<EOL>raise<EOL><DEDENT>else:<EOL><INDENT>self.log.debug('<STR_LIT>', resource, repr(self))<EOL><DEDENT>
|
Unlock this instance
:param resource: redis key to set
:param lock_identifier: uniquie id of lock
:raises: LockError if the lock resource acquired with different lock_identifier
|
f9113:c0:m7
|
async def is_locked(self, resource):
|
with await self.connect() as redis:<EOL><INDENT>lock_identifier = await redis.get(resource)<EOL><DEDENT>if lock_identifier:<EOL><INDENT>return True<EOL><DEDENT>else:<EOL><INDENT>return False<EOL><DEDENT>
|
Checks if the resource is locked by any redlock instance.
:param resource: The resource string name to check
:returns: True if locked else False
|
f9113:c0:m8
|
async def set_lock(self, resource, lock_identifier):
|
start_time = time.time()<EOL>lock_timeout = self.lock_timeout<EOL>successes = await asyncio.gather(*[<EOL>i.set_lock(resource, lock_identifier, lock_timeout) for<EOL>i in self.instances<EOL>], return_exceptions=True)<EOL>successful_sets = sum(s is None for s in successes)<EOL>elapsed_time = time.time() - start_time<EOL>locked = True if successful_sets >= int(len(self.instances) / <NUM_LIT:2>) + <NUM_LIT:1> else False<EOL>self.log.debug('<STR_LIT>',<EOL>resource, successful_sets, len(self.instances), elapsed_time)<EOL>if not locked:<EOL><INDENT>raise LockError('<STR_LIT>' % resource)<EOL><DEDENT>return elapsed_time<EOL>
|
Tries to set the lock to all the redis instances
:param resource: The resource string name to lock
:param lock_identifier: The id of the lock. A unique string
:return float: The elapsed time that took to lock the instances
in seconds
:raises: LockError if the lock has not been set to at least (N/2 + 1)
instances
|
f9113:c1:m2
|
async def unset_lock(self, resource, lock_identifier):
|
start_time = time.time()<EOL>successes = await asyncio.gather(*[<EOL>i.unset_lock(resource, lock_identifier) for<EOL>i in self.instances<EOL>], return_exceptions=True)<EOL>successful_remvoes = sum(s is None for s in successes)<EOL>elapsed_time = time.time() - start_time<EOL>unlocked = True if successful_remvoes >= int(len(self.instances) / <NUM_LIT:2>) + <NUM_LIT:1> else False<EOL>self.log.debug('<STR_LIT>',<EOL>resource, successful_remvoes, len(self.instances), elapsed_time)<EOL>if not unlocked:<EOL><INDENT>raise LockError('<STR_LIT>')<EOL><DEDENT>return elapsed_time<EOL>
|
Tries to unset the lock to all the redis instances
:param resource: The resource string name to lock
:param lock_identifier: The id of the lock. A unique string
:return float: The elapsed time that took to lock the instances in iseconds
:raises: LockError if the lock has not matching identifier in more then
(N/2 - 1) instances
|
f9113:c1:m3
|
async def is_locked(self, resource):
|
successes = await asyncio.gather(*[<EOL>i.is_locked(resource) for<EOL>i in self.instances<EOL>], return_exceptions=True)<EOL>successful_sets = sum(s is True for s in successes)<EOL>locked = True if successful_sets >= int(len(self.instances) / <NUM_LIT:2>) + <NUM_LIT:1> else False<EOL>return locked<EOL>
|
Checks if the resource is locked by any redlock instance.
:param resource: The resource string name to lock
:returns: True if locked else False
|
f9113:c1:m4
|
def _setup_branchy_tags(self):
|
<EOL>with open('<STR_LIT>', '<STR_LIT:a>') as f:<EOL><INDENT>f.write('<STR_LIT>')<EOL><DEDENT>self.mgr._invoke('<STR_LIT>', '<STR_LIT>', '<STR_LIT>')<EOL>self.mgr._invoke('<STR_LIT>', '<STR_LIT:1.0>')<EOL>self.mgr._invoke('<STR_LIT>', '<STR_LIT:1>')<EOL>with open('<STR_LIT>', '<STR_LIT:a>') as f:<EOL><INDENT>f.write('<STR_LIT>')<EOL><DEDENT>self.mgr._invoke('<STR_LIT>', '<STR_LIT>', '<STR_LIT>')<EOL>self.mgr._invoke('<STR_LIT>', '<STR_LIT>')<EOL>
|
Create two heads, one which has a 1.0 tag and a different one which
has a 1.1 tag.
|
f9117:c1:m8
|
def file_finder(dirname="<STR_LIT:.>"):
|
import distutils.log<EOL>dirname = dirname or '<STR_LIT:.>'<EOL>try:<EOL><INDENT>valid_mgrs = managers.RepoManager.get_valid_managers(dirname)<EOL>valid_mgrs = managers.RepoManager.existing_only(valid_mgrs)<EOL>for mgr in valid_mgrs:<EOL><INDENT>try:<EOL><INDENT>return mgr.find_all_files()<EOL><DEDENT>except Exception:<EOL><INDENT>e = sys.exc_info()[<NUM_LIT:1>]<EOL>distutils.log.warn(<EOL>"<STR_LIT>",<EOL>mgr, e)<EOL><DEDENT><DEDENT><DEDENT>except Exception:<EOL><INDENT>e = sys.exc_info()[<NUM_LIT:1>]<EOL>distutils.log.warn(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>", e)<EOL><DEDENT>return []<EOL>
|
Find the files in ``dirname`` under Mercurial version control
according to the setuptools spec (see
http://peak.telecommunity.com/DevCenter/setuptools#adding-support-for-other-revision-control-systems
).
|
f9123:m0
|
def patch_egg_info(force_hg_version=False):
|
from setuptools.command.egg_info import egg_info<EOL>from pkg_resources import safe_version<EOL>import functools<EOL>orig_ver = egg_info.tagged_version<EOL>@functools.wraps(orig_ver)<EOL>def tagged_version(self):<EOL><INDENT>vcs_param = (<EOL>getattr(self.distribution, '<STR_LIT>', False)<EOL>or getattr(self.distribution, '<STR_LIT>', False)<EOL>)<EOL>using_hg_version = force_hg_version or vcs_param<EOL>if force_hg_version:<EOL><INDENT>egg_info.tagged_version = orig_ver<EOL><DEDENT>if using_hg_version:<EOL><INDENT>result = safe_version(self.distribution.get_version())<EOL><DEDENT>else:<EOL><INDENT>result = orig_ver(self)<EOL><DEDENT>self.tag_build = result<EOL>return result<EOL><DEDENT>egg_info.tagged_version = tagged_version<EOL>
|
A hack to replace egg_info.tagged_version with a wrapped version
that will use the mercurial version if indicated.
`force_hg_version` is used for hgtools itself.
|
f9123:m1
|
def version_calc(dist, attr, value):
|
expected_attrs = '<STR_LIT>', '<STR_LIT>'<EOL>if not value or attr not in expected_attrs:<EOL><INDENT>return<EOL><DEDENT>options = value if isinstance(value, dict) else {}<EOL>dist.metadata.version = calculate_version(options)<EOL>patch_egg_info()<EOL>
|
Handler for parameter to setup(use_vcs_version=value)
attr should be 'use_vcs_version' (also allows use_hg_version for
compatibility).
bool(value) should be true to invoke this plugin.
value may optionally be a dict and supply options to the plugin.
|
f9123:m4
|
def find(pred, items):
|
for i, item in enumerate(items):<EOL><INDENT>if pred(item):<EOL><INDENT>return i<EOL><DEDENT><DEDENT>
|
Find the index of the first element in items for which pred returns
True
>>> find(lambda x: x > 3, range(100))
4
>>> find(lambda x: x < -3, range(100)) is None
True
|
f9124:m0
|
def rfind(pred, items):
|
return -find(pred, reversed(items)) - <NUM_LIT:1><EOL>
|
Find the index of the last element in items for which pred returns
True. Returns a negative number useful for indexing from the end
of a list or tuple.
>>> rfind(lambda x: x > 3, [5,4,3,2,1])
-4
|
f9124:m1
|
def reset_less_significant(self, significant_version):
|
def nonzero(x):<EOL><INDENT>return x != <NUM_LIT:0><EOL><DEDENT>version_len = <NUM_LIT:3> <EOL>significant_pos = rfind(nonzero, significant_version.version)<EOL>significant_pos = version_len + significant_pos + <NUM_LIT:1><EOL>self.version = (<EOL>self.version[:significant_pos]<EOL>+ (<NUM_LIT:0>,) * (version_len - significant_pos))<EOL>
|
Reset to zero all version info less significant than the
indicated version.
>>> ver = SummableVersion('3.1.2')
>>> ver.reset_less_significant(SummableVersion('0.1'))
>>> str(ver)
'3.1'
|
f9124:c0:m1
|
def as_number(self):
|
def combine(subver, ver):<EOL><INDENT>return subver / <NUM_LIT:10> + ver<EOL><DEDENT>return reduce(combine, reversed(self.version))<EOL>
|
>>> round(SummableVersion('1.9.3').as_number(), 12)
1.93
|
f9124:c0:m2
|
def get_strict_versions(self):
|
return self.__versions_from_tags(<EOL>tag.tag for tag in self.get_repo_tags()<EOL>)<EOL>
|
Return all version tags that can be represented by a
StrictVersion.
|
f9124:c1:m2
|
def get_tagged_version(self):
|
tags = list(self.get_tags())<EOL>if '<STR_LIT>' in tags and not self.is_modified():<EOL><INDENT>tags = self.get_parent_tags('<STR_LIT>')<EOL><DEDENT>versions = self.__versions_from_tags(tags)<EOL>return self.__best_version(versions)<EOL>
|
Get the version of the local working set as a StrictVersion or
None if no viable tag exists. If the local working set is itself
the tagged commit and the tip and there are no local
modifications, use the tag on the parent changeset.
|
f9124:c1:m3
|
def get_current_version(self, increment=None):
|
ver = (<EOL>self.get_tagged_version()<EOL>or str(self.get_next_version(increment)) + '<STR_LIT>'<EOL>)<EOL>return str(ver)<EOL>
|
Return as a string the version of the current state of the
repository -- a tagged version, if present, or the next version
based on prior tagged releases.
|
f9124:c1:m5
|
def get_next_version(self, increment=None):
|
increment = increment or self.increment<EOL>return self.infer_next_version(self.get_latest_version(), increment)<EOL>
|
Return the next version based on prior tagged releases.
|
f9124:c1:m6
|
@staticmethod<EOL><INDENT>def infer_next_version(last_version, increment):<DEDENT>
|
if last_version is None:<EOL><INDENT>return increment<EOL><DEDENT>last_version = SummableVersion(str(last_version))<EOL>if last_version.prerelease:<EOL><INDENT>last_version.prerelease = None<EOL>return str(last_version)<EOL><DEDENT>increment = SummableVersion(increment)<EOL>sum = last_version + increment<EOL>sum.reset_less_significant(increment)<EOL>return sum<EOL>
|
Given a simple application version (as a StrictVersion),
and an increment (1.0, 0.1, or 0.0.1), guess the next version.
Set up a shorthand for examples
>>> def VM_infer(*params):
... return str(VersionManagement.infer_next_version(*params))
>>> VM_infer('3.2', '0.0.1')
'3.2.1'
>>> VM_infer(StrictVersion('3.2'), '0.0.1')
'3.2.1'
>>> VM_infer('3.2.3', '0.1')
'3.3'
>>> VM_infer('3.1.2', '1.0')
'4.0'
Subversions never increment parent versions
>>> VM_infer('3.0.9', '0.0.1')
'3.0.10'
If it's a prerelease version, just remove the prerelease.
>>> VM_infer('3.1a1', '0.0.1')
'3.1'
If there is no last version, use the increment itself
>>> VM_infer(None, '0.1')
'0.1'
|
f9124:c1:m7
|
def _invoke(self, *params):
|
cmd = [self.exe, '<STR_LIT>', self.location] + list(params)<EOL>with reentry.in_process_context(cmd) as result:<EOL><INDENT>sys.modules['<STR_LIT>'].run()<EOL><DEDENT>stdout = result.stdio.stdout.getvalue()<EOL>stderr = result.stdio.stderr.getvalue()<EOL>if not result.returncode == <NUM_LIT:0>:<EOL><INDENT>raise RuntimeError(stderr.strip() or stdout.strip())<EOL><DEDENT>return stdout.decode('<STR_LIT:utf-8>')<EOL>
|
Run the self.exe command in-process with the supplied params.
|
f9125:c0:m0
|
def _invoke(self, *params):
|
cmd = [self.exe] + list(params)<EOL>proc = subprocess.Popen(<EOL>cmd, stdout=subprocess.PIPE,<EOL>stderr=subprocess.PIPE, cwd=self.location, env=self.env)<EOL>stdout, stderr = proc.communicate()<EOL>if not proc.returncode == <NUM_LIT:0>:<EOL><INDENT>raise RuntimeError(stderr.strip() or stdout.strip())<EOL><DEDENT>return stdout.decode('<STR_LIT:utf-8>')<EOL>
|
Invoke self.exe as a subprocess
|
f9127:c0:m0
|
@property<EOL><INDENT>def env(self):<DEDENT>
|
env = os.environ.copy()<EOL>env.pop('<STR_LIT>', None)<EOL>return env<EOL>
|
Return an environment safe for calling an `hg` subprocess.
Removes MACOSX_DEPLOYMENT_TARGET from the env, as if there's a
mismatch between the local Python environment and the environment
in which `hg` is installed, it will cause an exception. See
https://bitbucket.org/jaraco/hgtools/issue/7 for details.
|
f9127:c1:m0
|
def is_valid(self):
|
return True<EOL>
|
Return True if this is a valid manager for this location.
|
f9128:c0:m1
|
@classmethod<EOL><INDENT>def get_valid_managers(cls, location):<DEDENT>
|
def by_priority_attr(c):<EOL><INDENT>return getattr(c, '<STR_LIT>', <NUM_LIT:0>)<EOL><DEDENT>classes = sorted(<EOL>iter_subclasses(cls), key=by_priority_attr,<EOL>reverse=True)<EOL>all_managers = (c(location) for c in classes)<EOL>return (mgr for mgr in all_managers if mgr.is_valid())<EOL>
|
Get the valid RepoManagers for this location.
|
f9128:c0:m3
|
@staticmethod<EOL><INDENT>def existing_only(managers):<DEDENT>
|
return (mgr for mgr in managers if mgr.find_root())<EOL>
|
Return only those managers that refer to an existing repo
|
f9128:c0:m5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.