signature
stringlengths 8
3.44k
| body
stringlengths 0
1.41M
| docstring
stringlengths 1
122k
| id
stringlengths 5
17
|
|---|---|---|---|
def collapse_indents(indentation):
|
change_in_level = ind_change(indentation)<EOL>if change_in_level == <NUM_LIT:0>:<EOL><INDENT>indents = "<STR_LIT>"<EOL><DEDENT>elif change_in_level < <NUM_LIT:0>:<EOL><INDENT>indents = closeindent * (-change_in_level)<EOL><DEDENT>else:<EOL><INDENT>indents = openindent * change_in_level<EOL><DEDENT>return indentation.replace(openindent, "<STR_LIT>").replace(closeindent, "<STR_LIT>") + indents<EOL>
|
Removes all openindent-closeindent pairs.
|
f11256:m34
|
def transform(grammar, text):
|
results = []<EOL>intervals = []<EOL>for result, start, stop in all_matches(grammar, text):<EOL><INDENT>if result is not ignore_transform:<EOL><INDENT>internal_assert(isinstance(result, str), "<STR_LIT>", result)<EOL>if start == <NUM_LIT:0> and stop == len(text):<EOL><INDENT>return result<EOL><DEDENT>results.append(result)<EOL>intervals.append((start, stop))<EOL><DEDENT><DEDENT>if not results:<EOL><INDENT>return None<EOL><DEDENT>split_indices = [<NUM_LIT:0>]<EOL>split_indices.extend(start for start, _ in intervals)<EOL>split_indices.extend(stop for _, stop in intervals)<EOL>split_indices.sort()<EOL>split_indices.append(None)<EOL>out = []<EOL>for i in range(len(split_indices) - <NUM_LIT:1>):<EOL><INDENT>if i % <NUM_LIT:2> == <NUM_LIT:0>:<EOL><INDENT>start, stop = split_indices[i], split_indices[i + <NUM_LIT:1>]<EOL>out.append(text[start:stop])<EOL><DEDENT>else:<EOL><INDENT>out.append(results[i // <NUM_LIT:2>])<EOL><DEDENT><DEDENT>if i // <NUM_LIT:2> < len(results) - <NUM_LIT:1>:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", results[i // <NUM_LIT:2> + <NUM_LIT:1>:])<EOL><DEDENT>if stop is not None:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>")<EOL><DEDENT>return "<STR_LIT>".join(out)<EOL>
|
Transform text by replacing matches to grammar.
|
f11256:m35
|
def disable_inside(item, *elems, **kwargs):
|
_invert = kwargs.get("<STR_LIT>", False)<EOL>internal_assert(set(kwargs.keys()) <= set(("<STR_LIT>",)), "<STR_LIT>")<EOL>level = [<NUM_LIT:0>] <EOL>@contextmanager<EOL>def manage_item(self, instring, loc):<EOL><INDENT>level[<NUM_LIT:0>] += <NUM_LIT:1><EOL>try:<EOL><INDENT>yield<EOL><DEDENT>finally:<EOL><INDENT>level[<NUM_LIT:0>] -= <NUM_LIT:1><EOL><DEDENT><DEDENT>yield Wrap(item, manage_item)<EOL>@contextmanager<EOL>def manage_elem(self, instring, loc):<EOL><INDENT>if level[<NUM_LIT:0>] == <NUM_LIT:0> if not _invert else level[<NUM_LIT:0>] > <NUM_LIT:0>:<EOL><INDENT>yield<EOL><DEDENT>else:<EOL><INDENT>raise ParseException(instring, loc, self.errmsg, self)<EOL><DEDENT><DEDENT>for elem in elems:<EOL><INDENT>yield Wrap(elem, manage_elem)<EOL><DEDENT>
|
Prevent elems from matching inside of item.
Returns (item with elem disabled, *new versions of elems).
|
f11256:m36
|
def disable_outside(item, *elems):
|
for wrapped in disable_inside(item, *elems, **{"<STR_LIT>": True}):<EOL><INDENT>yield wrapped<EOL><DEDENT>
|
Prevent elems from matching outside of item.
Returns (item with elem disabled, *new versions of elems).
|
f11256:m37
|
def __new__(cls, action, original, loc, tokens, greedy=False, ignore_no_tokens=False, ignore_one_token=False):
|
if ignore_no_tokens and len(tokens) == <NUM_LIT:0>:<EOL><INDENT>return []<EOL><DEDENT>elif ignore_one_token and len(tokens) == <NUM_LIT:1>:<EOL><INDENT>return tokens[<NUM_LIT:0>] <EOL><DEDENT>else:<EOL><INDENT>self = super(ComputationNode, cls).__new__(cls)<EOL>self.action, self.loc, self.tokens = action, loc, tokens<EOL>try:<EOL><INDENT>self.index_of_original = self.list_of_originals.index(original)<EOL><DEDENT>except ValueError:<EOL><INDENT>self.index_of_original = len(self.list_of_originals)<EOL>self.list_of_originals.append(original)<EOL><DEDENT>if DEVELOP:<EOL><INDENT>self.been_called = False<EOL><DEDENT>if greedy:<EOL><INDENT>return self.evaluate()<EOL><DEDENT>else:<EOL><INDENT>return self<EOL><DEDENT><DEDENT>
|
Create a ComputionNode to return from a parse action.
If greedy, then never defer the action until later.
If ignore_no_tokens, then don't call the action if there are no tokens.
If ignore_one_token, then don't call the action if there is only one token.
|
f11256:c0:m0
|
@property<EOL><INDENT>def original(self):<DEDENT>
|
return self.list_of_originals[self.index_of_original]<EOL>
|
Get the original from the originals memo.
|
f11256:c0:m1
|
@property<EOL><INDENT>def name(self):<DEDENT>
|
name = getattr(self.action, "<STR_LIT>", None)<EOL>return name if name is not None else ascii(self.action)<EOL>
|
Get the name of the action.
|
f11256:c0:m2
|
def evaluate(self):
|
if DEVELOP:<EOL><INDENT>internal_assert(not self.been_called, "<STR_LIT>" + self.name + "<STR_LIT>", self.tokens)<EOL>self.been_called = True<EOL><DEDENT>evaluated_toks = evaluate_tokens(self.tokens)<EOL>if logger.tracing: <EOL><INDENT>logger.log_trace(self.name, self.original, self.loc, evaluated_toks, self.tokens)<EOL><DEDENT>try:<EOL><INDENT>return _trim_arity(self.action)(<EOL>self.original,<EOL>self.loc,<EOL>evaluated_toks,<EOL>)<EOL><DEDENT>except CoconutException:<EOL><INDENT>raise<EOL><DEDENT>except (Exception, AssertionError):<EOL><INDENT>traceback.print_exc()<EOL>raise CoconutInternalException("<STR_LIT>" + self.name + "<STR_LIT>", evaluated_toks)<EOL><DEDENT>
|
Get the result of evaluating the computation graph at this node.
|
f11256:c0:m3
|
def __repr__(self):
|
inner_repr = "<STR_LIT:\n>".join("<STR_LIT:\t>" + line for line in repr(self.tokens).splitlines())<EOL>return self.name + "<STR_LIT>" + inner_repr + "<STR_LIT>"<EOL>
|
Get a representation of the entire computation graph below this node.
|
f11256:c0:m4
|
def _combine(self, original, loc, tokens):
|
combined_tokens = super(CombineNode, self).postParse(original, loc, tokens)<EOL>internal_assert(len(combined_tokens) == <NUM_LIT:1>, "<STR_LIT>", combined_tokens)<EOL>return combined_tokens[<NUM_LIT:0>]<EOL>
|
Implement the parse action for Combine.
|
f11256:c1:m0
|
def postParse(self, original, loc, tokens):
|
return ComputationNode(self._combine, original, loc, tokens, ignore_no_tokens=True, ignore_one_token=True)<EOL>
|
Create a ComputationNode for Combine.
|
f11256:c1:m1
|
def parseImpl(self, instring, loc, *args, **kwargs):
|
with self.wrapper(self, instring, loc):<EOL><INDENT>return super(Wrap, self).parseImpl(instring, loc, *args, **kwargs)<EOL><DEDENT>
|
Wrapper around ParseElementEnhance.parseImpl.
|
f11256:c2:m1
|
def split_function_call(tokens, loc):
|
pos_args = []<EOL>star_args = []<EOL>kwd_args = []<EOL>dubstar_args = []<EOL>for arg in tokens:<EOL><INDENT>argstr = "<STR_LIT>".join(arg)<EOL>if len(arg) == <NUM_LIT:1>:<EOL><INDENT>if star_args or kwd_args or dubstar_args:<EOL><INDENT>raise CoconutDeferredSyntaxError("<STR_LIT>", loc)<EOL><DEDENT>pos_args.append(argstr)<EOL><DEDENT>elif len(arg) == <NUM_LIT:2>:<EOL><INDENT>if arg[<NUM_LIT:0>] == "<STR_LIT:*>":<EOL><INDENT>if kwd_args or dubstar_args:<EOL><INDENT>raise CoconutDeferredSyntaxError("<STR_LIT>", loc)<EOL><DEDENT>star_args.append(argstr)<EOL><DEDENT>elif arg[<NUM_LIT:0>] == "<STR_LIT>":<EOL><INDENT>dubstar_args.append(argstr)<EOL><DEDENT>else:<EOL><INDENT>kwd_args.append(argstr)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", arg)<EOL><DEDENT><DEDENT>return pos_args, star_args, kwd_args, dubstar_args<EOL>
|
Split into positional arguments and keyword arguments.
|
f11257:m0
|
def attrgetter_atom_split(tokens):
|
if len(tokens) == <NUM_LIT:1>: <EOL><INDENT>return tokens[<NUM_LIT:0>], None<EOL><DEDENT>elif len(tokens) >= <NUM_LIT:2> and tokens[<NUM_LIT:1>] == "<STR_LIT:(>": <EOL><INDENT>if len(tokens) == <NUM_LIT:2>: <EOL><INDENT>return tokens[<NUM_LIT:0>], "<STR_LIT>"<EOL><DEDENT>elif len(tokens) == <NUM_LIT:3>: <EOL><INDENT>return tokens[<NUM_LIT:0>], tokens[<NUM_LIT:2>]<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>
|
Split attrgetter_atom_tokens into (attr_or_method_name, method_args_or_none_if_attr).
|
f11257:m1
|
def pipe_item_split(tokens, loc):
|
<EOL>if isinstance(tokens, list) or "<STR_LIT>" in tokens:<EOL><INDENT>internal_assert(len(tokens) == <NUM_LIT:1>, "<STR_LIT>", tokens)<EOL>return "<STR_LIT>", (tokens[<NUM_LIT:0>],)<EOL><DEDENT>elif "<STR_LIT>" in tokens:<EOL><INDENT>func, args = tokens<EOL>pos_args, star_args, kwd_args, dubstar_args = split_function_call(args, loc)<EOL>return "<STR_LIT>", (func, join_args(pos_args, star_args), join_args(kwd_args, dubstar_args))<EOL><DEDENT>elif "<STR_LIT>" in tokens:<EOL><INDENT>name, args = attrgetter_atom_split(tokens)<EOL>return "<STR_LIT>", (name, args)<EOL><DEDENT>elif "<STR_LIT>" in tokens:<EOL><INDENT>op, args = tokens<EOL>return "<STR_LIT>", (op, args)<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>
|
Process a pipe item, which could be a partial, an attribute access, a method call, or an expression.
Return (type, split) where split is
- (expr,) for expression,
- (func, pos_args, kwd_args) for partial,
- (name, args) for attr/method, and
- (op, args) for itemgetter.
|
f11257:m2
|
def infix_error(tokens):
|
raise CoconutInternalException("<STR_LIT>", tokens)<EOL>
|
Raise inner infix error.
|
f11257:m3
|
def get_infix_items(tokens, callback=infix_error):
|
internal_assert(len(tokens) >= <NUM_LIT:3>, "<STR_LIT>", tokens)<EOL>(arg1, func, arg2), tokens = tokens[:<NUM_LIT:3>], tokens[<NUM_LIT:3>:]<EOL>args = list(arg1) + list(arg2)<EOL>while tokens:<EOL><INDENT>args = [callback([args, func, []])]<EOL>(func, newarg), tokens = tokens[:<NUM_LIT:2>], tokens[<NUM_LIT:2>:]<EOL>args += list(newarg)<EOL><DEDENT>return func, args<EOL>
|
Perform infix token processing.
Takes a callback that (takes infix tokens and returns a string) to handle inner infix calls.
|
f11257:m4
|
def comp_pipe_info(op):
|
if op == "<STR_LIT>":<EOL><INDENT>return "<STR_LIT>", False<EOL><DEDENT>elif op == "<STR_LIT>":<EOL><INDENT>return "<STR_LIT>", False<EOL><DEDENT>elif op == "<STR_LIT>":<EOL><INDENT>return "<STR_LIT>", True<EOL><DEDENT>elif op == "<STR_LIT>":<EOL><INDENT>return "<STR_LIT>", True<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", op)<EOL><DEDENT>
|
Returns (direction, star) where direction is 'forwards' or 'backwards'.
|
f11257:m5
|
def add_paren_handle(tokens):
|
internal_assert(len(tokens) == <NUM_LIT:1>, "<STR_LIT>", tokens)<EOL>return "<STR_LIT:(>" + tokens[<NUM_LIT:0>] + "<STR_LIT:)>"<EOL>
|
Add parentheses.
|
f11257:m6
|
def function_call_handle(loc, tokens):
|
return "<STR_LIT:(>" + join_args(*split_function_call(tokens, loc)) + "<STR_LIT:)>"<EOL>
|
Enforce properly ordered function parameters.
|
f11257:m7
|
def item_handle(loc, tokens):
|
out = tokens.pop(<NUM_LIT:0>)<EOL>for i, trailer in enumerate(tokens):<EOL><INDENT>if isinstance(trailer, str):<EOL><INDENT>out += trailer<EOL><DEDENT>elif len(trailer) == <NUM_LIT:1>:<EOL><INDENT>if trailer[<NUM_LIT:0>] == "<STR_LIT>":<EOL><INDENT>out = "<STR_LIT>" + out + "<STR_LIT:)>"<EOL><DEDENT>elif trailer[<NUM_LIT:0>] == "<STR_LIT:$>":<EOL><INDENT>out = "<STR_LIT>" + out + "<STR_LIT:)>"<EOL><DEDENT>elif trailer[<NUM_LIT:0>] == "<STR_LIT>":<EOL><INDENT>out = "<STR_LIT>" + out + "<STR_LIT:)>"<EOL><DEDENT>elif trailer[<NUM_LIT:0>] == "<STR_LIT:.>":<EOL><INDENT>out = "<STR_LIT>" + out + "<STR_LIT:)>"<EOL><DEDENT>elif trailer[<NUM_LIT:0>] == "<STR_LIT>":<EOL><INDENT>out = "<STR_LIT>" + out + "<STR_LIT:]>"<EOL><DEDENT>elif trailer[<NUM_LIT:0>] == "<STR_LIT>":<EOL><INDENT>out = "<STR_LIT>" + out + "<STR_LIT:]>"<EOL><DEDENT>elif trailer[<NUM_LIT:0>] == "<STR_LIT>":<EOL><INDENT>out = "<STR_LIT>" + out + "<STR_LIT:]>"<EOL><DEDENT>elif trailer[<NUM_LIT:0>] == "<STR_LIT:?>":<EOL><INDENT>rest_of_trailers = tokens[i + <NUM_LIT:1>:]<EOL>if len(rest_of_trailers) == <NUM_LIT:0>:<EOL><INDENT>raise CoconutDeferredSyntaxError("<STR_LIT>", loc)<EOL><DEDENT>not_none_tokens = ["<STR_LIT:x>"]<EOL>not_none_tokens.extend(rest_of_trailers)<EOL>return "<STR_LIT>" + item_handle(loc, not_none_tokens) + "<STR_LIT>" + out + "<STR_LIT:)>"<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", trailer[<NUM_LIT:0>])<EOL><DEDENT><DEDENT>elif len(trailer) == <NUM_LIT:2>:<EOL><INDENT>if trailer[<NUM_LIT:0>] == "<STR_LIT>":<EOL><INDENT>out = "<STR_LIT>" + out + "<STR_LIT:U+002CU+0020>" + trailer[<NUM_LIT:1>] + "<STR_LIT:)>"<EOL><DEDENT>elif trailer[<NUM_LIT:0>] == "<STR_LIT>":<EOL><INDENT>args = trailer[<NUM_LIT:1>][<NUM_LIT:1>:-<NUM_LIT:1>]<EOL>if not args:<EOL><INDENT>raise CoconutDeferredSyntaxError("<STR_LIT>", loc)<EOL><DEDENT>out = "<STR_LIT>" + out + "<STR_LIT:U+002CU+0020>" + args + "<STR_LIT:)>"<EOL><DEDENT>elif trailer[<NUM_LIT:0>] == "<STR_LIT>":<EOL><INDENT>out = "<STR_LIT>" + out + "<STR_LIT:U+002CU+0020>" + trailer[<NUM_LIT:1>] + "<STR_LIT:)>"<EOL><DEDENT>elif trailer[<NUM_LIT:0>] == "<STR_LIT>":<EOL><INDENT>pos_args, star_args, kwd_args, dubstar_args = split_function_call(trailer[<NUM_LIT:1>], loc)<EOL>extra_args_str = join_args(star_args, kwd_args, dubstar_args)<EOL>argdict_pairs = []<EOL>has_question_mark = False<EOL>for i, arg in enumerate(pos_args):<EOL><INDENT>if arg == "<STR_LIT:?>":<EOL><INDENT>has_question_mark = True<EOL><DEDENT>else:<EOL><INDENT>argdict_pairs.append(str(i) + "<STR_LIT>" + arg)<EOL><DEDENT><DEDENT>if not has_question_mark:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", trailer[<NUM_LIT:1>])<EOL><DEDENT>elif argdict_pairs or extra_args_str:<EOL><INDENT>out = (<EOL>"<STR_LIT>"<EOL>+ out<EOL>+ "<STR_LIT>" + "<STR_LIT:U+002CU+0020>".join(argdict_pairs) + "<STR_LIT:}>"<EOL>+ "<STR_LIT:U+002CU+0020>" + str(len(pos_args))<EOL>+ ("<STR_LIT:U+002CU+0020>" if extra_args_str else "<STR_LIT>") + extra_args_str<EOL>+ "<STR_LIT:)>"<EOL>)<EOL><DEDENT>else:<EOL><INDENT>raise CoconutDeferredSyntaxError("<STR_LIT>", loc)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", trailer[<NUM_LIT:0>])<EOL><DEDENT><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", trailer)<EOL><DEDENT><DEDENT>return out<EOL>
|
Process trailers.
|
f11257:m8
|
def pipe_handle(loc, tokens, **kwargs):
|
internal_assert(set(kwargs) <= set(("<STR_LIT>",)), "<STR_LIT>", kwargs)<EOL>top = kwargs.get("<STR_LIT>", True)<EOL>if len(tokens) == <NUM_LIT:1>:<EOL><INDENT>item = tokens.pop()<EOL>if not top: <EOL><INDENT>return item<EOL><DEDENT>name, split_item = pipe_item_split(item, loc)<EOL>if name == "<STR_LIT>":<EOL><INDENT>internal_assert(len(split_item) == <NUM_LIT:1>)<EOL>return split_item[<NUM_LIT:0>]<EOL><DEDENT>elif name == "<STR_LIT>":<EOL><INDENT>internal_assert(len(split_item) == <NUM_LIT:3>)<EOL>return "<STR_LIT>" + join_args(split_item) + "<STR_LIT:)>"<EOL><DEDENT>elif name == "<STR_LIT>":<EOL><INDENT>return attrgetter_atom_handle(loc, item)<EOL><DEDENT>elif name == "<STR_LIT>":<EOL><INDENT>return itemgetter_handle(item)<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", split_item)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>item, op = tokens.pop(), tokens.pop()<EOL>if op == "<STR_LIT>" or op == "<STR_LIT>":<EOL><INDENT>name, split_item = pipe_item_split(item, loc)<EOL>star = "<STR_LIT:*>" if op == "<STR_LIT>" else "<STR_LIT>"<EOL>if name == "<STR_LIT>":<EOL><INDENT>internal_assert(len(split_item) == <NUM_LIT:1>)<EOL>return "<STR_LIT:(>" + split_item[<NUM_LIT:0>] + "<STR_LIT>" + star + pipe_handle(loc, tokens) + "<STR_LIT:)>"<EOL><DEDENT>elif name == "<STR_LIT>":<EOL><INDENT>internal_assert(len(split_item) == <NUM_LIT:3>)<EOL>return split_item[<NUM_LIT:0>] + "<STR_LIT:(>" + join_args((split_item[<NUM_LIT:1>], star + pipe_handle(loc, tokens), split_item[<NUM_LIT:2>])) + "<STR_LIT:)>"<EOL><DEDENT>elif name == "<STR_LIT>":<EOL><INDENT>internal_assert(len(split_item) == <NUM_LIT:2>)<EOL>if star:<EOL><INDENT>raise CoconutDeferredSyntaxError("<STR_LIT>", loc)<EOL><DEDENT>return "<STR_LIT:(>" + pipe_handle(loc, tokens) + "<STR_LIT>" + split_item[<NUM_LIT:0>] + ("<STR_LIT:(>" + split_item[<NUM_LIT:1>] + "<STR_LIT:)>" if split_item[<NUM_LIT:1>] is not None else "<STR_LIT>")<EOL><DEDENT>elif name == "<STR_LIT>":<EOL><INDENT>internal_assert(len(split_item) == <NUM_LIT:2>)<EOL>if star:<EOL><INDENT>raise CoconutDeferredSyntaxError("<STR_LIT>", loc)<EOL><DEDENT>op, args = split_item<EOL>if op == "<STR_LIT:[>":<EOL><INDENT>return "<STR_LIT:(>" + pipe_handle(loc, tokens) + "<STR_LIT>" + args + "<STR_LIT:]>"<EOL><DEDENT>elif op == "<STR_LIT>":<EOL><INDENT>return "<STR_LIT>" + pipe_handle(loc, tokens) + "<STR_LIT:U+002CU+0020>" + args + "<STR_LIT:)>"<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", op)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", split_item)<EOL><DEDENT><DEDENT>elif op == "<STR_LIT>" or op == "<STR_LIT>":<EOL><INDENT>star = "<STR_LIT:*>" if op == "<STR_LIT>" else "<STR_LIT>"<EOL>inner_item = pipe_handle(loc, tokens, top=False)<EOL>if isinstance(inner_item, str):<EOL><INDENT>inner_item = [inner_item] <EOL><DEDENT>return pipe_handle(loc, [item, "<STR_LIT:|>" + star + "<STR_LIT:>>", inner_item])<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", op)<EOL><DEDENT><DEDENT>
|
Process pipe calls.
|
f11257:m9
|
def comp_pipe_handle(loc, tokens):
|
internal_assert(len(tokens) >= <NUM_LIT:3> and len(tokens) % <NUM_LIT:2> == <NUM_LIT:1>, "<STR_LIT>", tokens)<EOL>funcs = [tokens[<NUM_LIT:0>]]<EOL>stars = []<EOL>direction = None<EOL>for i in range(<NUM_LIT:1>, len(tokens), <NUM_LIT:2>):<EOL><INDENT>op, fn = tokens[i], tokens[i + <NUM_LIT:1>]<EOL>new_direction, star = comp_pipe_info(op)<EOL>if direction is None:<EOL><INDENT>direction = new_direction<EOL><DEDENT>elif new_direction != direction:<EOL><INDENT>raise CoconutDeferredSyntaxError("<STR_LIT>", loc)<EOL><DEDENT>funcs.append(fn)<EOL>stars.append(star)<EOL><DEDENT>if direction == "<STR_LIT>":<EOL><INDENT>funcs.reverse()<EOL>stars.reverse()<EOL><DEDENT>func = funcs.pop(<NUM_LIT:0>)<EOL>funcstars = zip(funcs, stars)<EOL>return "<STR_LIT>" + func + "<STR_LIT:U+002CU+0020>" + "<STR_LIT:U+002CU+0020>".join(<EOL>"<STR_LIT>" % (f, star) for f, star in funcstars<EOL>) + "<STR_LIT:)>"<EOL>
|
Process pipe function composition.
|
f11257:m10
|
def none_coalesce_handle(tokens):
|
if len(tokens) == <NUM_LIT:1>:<EOL><INDENT>return tokens[<NUM_LIT:0>]<EOL><DEDENT>elif tokens[<NUM_LIT:0>].isalnum():<EOL><INDENT>return "<STR_LIT>".format(<EOL>a=tokens[<NUM_LIT:0>],<EOL>b=none_coalesce_handle(tokens[<NUM_LIT:1>:]),<EOL>)<EOL><DEDENT>else:<EOL><INDENT>return "<STR_LIT>".format(<EOL>x=none_coalesce_var,<EOL>a=tokens[<NUM_LIT:0>],<EOL>b=none_coalesce_handle(tokens[<NUM_LIT:1>:]),<EOL>)<EOL><DEDENT>
|
Process the None-coalescing operator.
|
f11257:m11
|
def attrgetter_atom_handle(loc, tokens):
|
name, args = attrgetter_atom_split(tokens)<EOL>if args is None:<EOL><INDENT>return '<STR_LIT>' + name + '<STR_LIT>'<EOL><DEDENT>elif "<STR_LIT:.>" in name:<EOL><INDENT>raise CoconutDeferredSyntaxError("<STR_LIT>", loc)<EOL><DEDENT>elif args == "<STR_LIT>":<EOL><INDENT>return '<STR_LIT>' + tokens[<NUM_LIT:0>] + '<STR_LIT>'<EOL><DEDENT>else:<EOL><INDENT>return '<STR_LIT>' + tokens[<NUM_LIT:0>] + '<STR_LIT>' + tokens[<NUM_LIT:2>] + "<STR_LIT:)>"<EOL><DEDENT>
|
Process attrgetter literals.
|
f11257:m12
|
def lazy_list_handle(tokens):
|
if len(tokens) == <NUM_LIT:0>:<EOL><INDENT>return "<STR_LIT>"<EOL><DEDENT>else:<EOL><INDENT>return (<EOL>"<STR_LIT>" % (func_var, func_var)<EOL>+ "<STR_LIT>" + "<STR_LIT>".join(tokens) + ("<STR_LIT:U+002C>" if len(tokens) == <NUM_LIT:1> else "<STR_LIT>") + "<STR_LIT>"<EOL>)<EOL><DEDENT>
|
Process lazy lists.
|
f11257:m13
|
def chain_handle(tokens):
|
if len(tokens) == <NUM_LIT:1>:<EOL><INDENT>return tokens[<NUM_LIT:0>]<EOL><DEDENT>else:<EOL><INDENT>return "<STR_LIT>" + lazy_list_handle(tokens) + "<STR_LIT:)>"<EOL><DEDENT>
|
Process chain calls.
|
f11257:m14
|
def infix_handle(tokens):
|
func, args = get_infix_items(tokens, callback=infix_handle)<EOL>return "<STR_LIT:(>" + func + "<STR_LIT>" + "<STR_LIT:U+002CU+0020>".join(args) + "<STR_LIT:)>"<EOL>
|
Process infix calls.
|
f11257:m15
|
def op_funcdef_handle(tokens):
|
func, base_args = get_infix_items(tokens)<EOL>args = []<EOL>for arg in base_args[:-<NUM_LIT:1>]:<EOL><INDENT>rstrip_arg = arg.rstrip()<EOL>if not rstrip_arg.endswith(unwrapper):<EOL><INDENT>if not rstrip_arg.endswith("<STR_LIT:U+002C>"):<EOL><INDENT>arg += "<STR_LIT:U+002CU+0020>"<EOL><DEDENT>elif arg.endswith("<STR_LIT:U+002C>"):<EOL><INDENT>arg += "<STR_LIT:U+0020>"<EOL><DEDENT><DEDENT>args.append(arg)<EOL><DEDENT>last_arg = base_args[-<NUM_LIT:1>]<EOL>if last_arg.rstrip().endswith("<STR_LIT:U+002C>"):<EOL><INDENT>last_arg = last_arg.rsplit("<STR_LIT:U+002C>")[<NUM_LIT:0>]<EOL><DEDENT>args.append(last_arg)<EOL>return func + "<STR_LIT:(>" + "<STR_LIT>".join(args) + "<STR_LIT:)>"<EOL>
|
Process infix defs.
|
f11257:m16
|
def lambdef_handle(tokens):
|
if len(tokens) == <NUM_LIT:0>:<EOL><INDENT>return "<STR_LIT>"<EOL><DEDENT>elif len(tokens) == <NUM_LIT:1>:<EOL><INDENT>return "<STR_LIT>" + tokens[<NUM_LIT:0>] + "<STR_LIT::>"<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>
|
Process lambda calls.
|
f11257:m17
|
def typedef_callable_handle(tokens):
|
if len(tokens) == <NUM_LIT:1>:<EOL><INDENT>return '<STR_LIT>' + tokens[<NUM_LIT:0>] + '<STR_LIT:]>'<EOL><DEDENT>elif len(tokens) == <NUM_LIT:2>:<EOL><INDENT>return '<STR_LIT>' + tokens[<NUM_LIT:0>] + '<STR_LIT>' + tokens[<NUM_LIT:1>] + '<STR_LIT:]>'<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>
|
Process -> to Callable inside type annotations.
|
f11257:m18
|
def make_suite_handle(tokens):
|
internal_assert(len(tokens) == <NUM_LIT:1>, "<STR_LIT>", tokens)<EOL>return "<STR_LIT:\n>" + openindent + tokens[<NUM_LIT:0>] + closeindent<EOL>
|
Make simple statements into suites.
|
f11257:m19
|
def implicit_return_handle(tokens):
|
internal_assert(len(tokens) == <NUM_LIT:1>, "<STR_LIT>", tokens)<EOL>return "<STR_LIT>" + tokens[<NUM_LIT:0>]<EOL>
|
Add an implicit return.
|
f11257:m20
|
def math_funcdef_handle(tokens):
|
internal_assert(len(tokens) == <NUM_LIT:2>, "<STR_LIT>", tokens)<EOL>return tokens[<NUM_LIT:0>] + ("<STR_LIT>" if tokens[<NUM_LIT:1>].startswith("<STR_LIT:\n>") else "<STR_LIT:U+0020>") + tokens[<NUM_LIT:1>]<EOL>
|
Process assignment function definition.
|
f11257:m21
|
def decorator_handle(tokens):
|
defs = []<EOL>decorates = []<EOL>for i, tok in enumerate(tokens):<EOL><INDENT>if "<STR_LIT>" in tok and len(tok) == <NUM_LIT:1>:<EOL><INDENT>decorates.append("<STR_LIT:@>" + tok[<NUM_LIT:0>])<EOL><DEDENT>elif "<STR_LIT:test>" in tok and len(tok) == <NUM_LIT:1>:<EOL><INDENT>varname = decorator_var + "<STR_LIT:_>" + str(i)<EOL>defs.append(varname + "<STR_LIT>" + tok[<NUM_LIT:0>])<EOL>decorates.append("<STR_LIT:@>" + varname)<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tok)<EOL><DEDENT><DEDENT>return "<STR_LIT:\n>".join(defs + decorates) + "<STR_LIT:\n>"<EOL>
|
Process decorators.
|
f11257:m22
|
def match_handle(loc, tokens):
|
if len(tokens) == <NUM_LIT:4>:<EOL><INDENT>matches, match_type, item, stmts = tokens<EOL>cond = None<EOL><DEDENT>elif len(tokens) == <NUM_LIT:5>:<EOL><INDENT>matches, match_type, item, cond, stmts = tokens<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>if match_type == "<STR_LIT>":<EOL><INDENT>invert = False<EOL><DEDENT>elif match_type == "<STR_LIT>":<EOL><INDENT>invert = True<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", match_type)<EOL><DEDENT>matching = Matcher(loc, match_check_var)<EOL>matching.match(matches, match_to_var)<EOL>if cond:<EOL><INDENT>matching.add_guard(cond)<EOL><DEDENT>return (<EOL>match_to_var + "<STR_LIT>" + item + "<STR_LIT:\n>"<EOL>+ matching.build(stmts, invert=invert)<EOL>)<EOL>
|
Process match blocks.
|
f11257:m23
|
def except_handle(tokens):
|
if len(tokens) == <NUM_LIT:1>:<EOL><INDENT>errs, asname = tokens[<NUM_LIT:0>], None<EOL><DEDENT>elif len(tokens) == <NUM_LIT:2>:<EOL><INDENT>errs, asname = tokens<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>out = "<STR_LIT>"<EOL>if "<STR_LIT:list>" in tokens:<EOL><INDENT>out += "<STR_LIT:(>" + errs + "<STR_LIT:)>"<EOL><DEDENT>else:<EOL><INDENT>out += errs<EOL><DEDENT>if asname is not None:<EOL><INDENT>out += "<STR_LIT>" + asname<EOL><DEDENT>return out<EOL>
|
Process except statements.
|
f11257:m24
|
def subscriptgroup_handle(tokens):
|
internal_assert(<NUM_LIT:0> < len(tokens) <= <NUM_LIT:3>, "<STR_LIT>", tokens)<EOL>args = []<EOL>for arg in tokens:<EOL><INDENT>if not arg:<EOL><INDENT>arg = "<STR_LIT:None>"<EOL><DEDENT>args.append(arg)<EOL><DEDENT>if len(args) == <NUM_LIT:1>:<EOL><INDENT>return args[<NUM_LIT:0>]<EOL><DEDENT>else:<EOL><INDENT>return "<STR_LIT>" + "<STR_LIT:U+002CU+0020>".join(args) + "<STR_LIT:)>"<EOL><DEDENT>
|
Process subscriptgroups.
|
f11257:m25
|
def itemgetter_handle(tokens):
|
internal_assert(len(tokens) == <NUM_LIT:2>, "<STR_LIT>", tokens)<EOL>op, args = tokens<EOL>if op == "<STR_LIT:[>":<EOL><INDENT>return "<STR_LIT>" + args + "<STR_LIT:)>"<EOL><DEDENT>elif op == "<STR_LIT>":<EOL><INDENT>return "<STR_LIT>" + args + "<STR_LIT:)>"<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", op)<EOL><DEDENT>
|
Process implicit itemgetter partials.
|
f11257:m26
|
def class_suite_handle(tokens):
|
internal_assert(len(tokens) == <NUM_LIT:1>, "<STR_LIT>", tokens)<EOL>return "<STR_LIT>" + tokens[<NUM_LIT:0>]<EOL>
|
Process implicit pass in class suite.
|
f11257:m27
|
def namelist_handle(tokens):
|
if len(tokens) == <NUM_LIT:1>:<EOL><INDENT>return tokens[<NUM_LIT:0>]<EOL><DEDENT>elif len(tokens) == <NUM_LIT:2>:<EOL><INDENT>return tokens[<NUM_LIT:0>] + "<STR_LIT:\n>" + tokens[<NUM_LIT:0>] + "<STR_LIT>" + tokens[<NUM_LIT:1>]<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>
|
Process inline nonlocal and global statements.
|
f11257:m28
|
def compose_item_handle(tokens):
|
if len(tokens) < <NUM_LIT:1>:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>elif len(tokens) == <NUM_LIT:1>:<EOL><INDENT>return tokens[<NUM_LIT:0>]<EOL><DEDENT>else:<EOL><INDENT>return "<STR_LIT>" + "<STR_LIT:U+002CU+0020>".join(reversed(tokens)) + "<STR_LIT:)>"<EOL><DEDENT>
|
Process function composition.
|
f11257:m29
|
def tco_return_handle(tokens):
|
internal_assert(len(tokens) == <NUM_LIT:2>, "<STR_LIT>", tokens)<EOL>if tokens[<NUM_LIT:1>].startswith("<STR_LIT>"):<EOL><INDENT>return "<STR_LIT>" + tokens[<NUM_LIT:0>] + "<STR_LIT:)>" + tokens[<NUM_LIT:1>][<NUM_LIT:2>:] <EOL><DEDENT>else:<EOL><INDENT>return "<STR_LIT>" + tokens[<NUM_LIT:0>] + "<STR_LIT:U+002CU+0020>" + tokens[<NUM_LIT:1>][<NUM_LIT:1>:]<EOL><DEDENT>
|
Process tail-call-optimizable return statements.
|
f11257:m30
|
def split_func_name_args_params_handle(tokens):
|
internal_assert(len(tokens) == <NUM_LIT:2>, "<STR_LIT>", tokens)<EOL>func_name = tokens[<NUM_LIT:0>]<EOL>func_args = []<EOL>func_params = []<EOL>for arg in tokens[<NUM_LIT:1>]:<EOL><INDENT>if len(arg) > <NUM_LIT:1> and arg[<NUM_LIT:0>] in ("<STR_LIT:*>", "<STR_LIT>"):<EOL><INDENT>func_args.append(arg[<NUM_LIT:1>])<EOL><DEDENT>elif arg[<NUM_LIT:0>] != "<STR_LIT:*>":<EOL><INDENT>func_args.append(arg[<NUM_LIT:0>])<EOL><DEDENT>func_params.append("<STR_LIT>".join(arg))<EOL><DEDENT>return [<EOL>func_name,<EOL>"<STR_LIT:U+002CU+0020>".join(func_args),<EOL>"<STR_LIT:(>" + "<STR_LIT:U+002CU+0020>".join(func_params) + "<STR_LIT:)>",<EOL>]<EOL>
|
Process splitting a function into name, params, and args.
|
f11257:m31
|
def join_match_funcdef(tokens):
|
if len(tokens) == <NUM_LIT:2>:<EOL><INDENT>(func, insert_after_docstring), body = tokens<EOL>docstring = None<EOL><DEDENT>elif len(tokens) == <NUM_LIT:3>:<EOL><INDENT>(func, insert_after_docstring), docstring, body = tokens<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>insert_after_docstring, dedent = split_trailing_indent(insert_after_docstring)<EOL>indent, body = split_leading_indent(body)<EOL>indentation = collapse_indents(dedent + indent)<EOL>return (<EOL>func<EOL>+ (docstring if docstring is not None else "<STR_LIT>")<EOL>+ insert_after_docstring<EOL>+ indentation<EOL>+ body<EOL>)<EOL>
|
Join the pieces of a pattern-matching function together.
|
f11257:m32
|
def where_stmt_handle(tokens):
|
internal_assert(len(tokens) == <NUM_LIT:2>, "<STR_LIT>", tokens)<EOL>base_stmt, assignment_stmts = tokens<EOL>stmts = list(assignment_stmts) + [base_stmt]<EOL>return "<STR_LIT:\n>".join(stmts) + "<STR_LIT:\n>"<EOL>
|
Process a where statement.
|
f11257:m33
|
def set_grammar_names():
|
for varname, val in vars(Grammar).items():<EOL><INDENT>if isinstance(val, ParserElement):<EOL><INDENT>setattr(Grammar, varname, val.setName(varname))<EOL><DEDENT><DEDENT>
|
Set names of grammar elements to their variable names.
|
f11257:m34
|
def gethash(compiled):
|
lines = compiled.splitlines()<EOL>if len(lines) < <NUM_LIT:3> or not lines[<NUM_LIT:2>].startswith(hash_prefix):<EOL><INDENT>return None<EOL><DEDENT>else:<EOL><INDENT>return lines[<NUM_LIT:2>][len(hash_prefix):]<EOL><DEDENT>
|
Retrieve a hash from a header.
|
f11258:m0
|
def minify(compiled):
|
compiled = compiled.strip()<EOL>if compiled:<EOL><INDENT>out = []<EOL>for line in compiled.splitlines():<EOL><INDENT>line = line.split("<STR_LIT:#>", <NUM_LIT:1>)[<NUM_LIT:0>].rstrip()<EOL>if line:<EOL><INDENT>ind = <NUM_LIT:0><EOL>while line.startswith("<STR_LIT:U+0020>"):<EOL><INDENT>line = line[<NUM_LIT:1>:]<EOL>ind += <NUM_LIT:1><EOL><DEDENT>internal_assert(ind % tabideal == <NUM_LIT:0>, "<STR_LIT>", line)<EOL>out.append("<STR_LIT:U+0020>" * (ind // tabideal) + line)<EOL><DEDENT><DEDENT>compiled = "<STR_LIT:\n>".join(out) + "<STR_LIT:\n>"<EOL><DEDENT>return compiled<EOL>
|
Perform basic minifications.
Fails on non-tabideal indentation or a string with a #.
|
f11258:m1
|
def get_template(template):
|
with open(os.path.join(template_dir, template) + template_ext, "<STR_LIT:r>") as template_file:<EOL><INDENT>return template_file.read()<EOL><DEDENT>
|
Read the given template file.
|
f11258:m2
|
def one_num_ver(target):
|
return target[:<NUM_LIT:1>]<EOL>
|
Return the first number of the target version, if it has one.
|
f11258:m3
|
def section(name):
|
line = "<STR_LIT>" + name + "<STR_LIT>"<EOL>return line + "<STR_LIT:->" * (justify_len - len(line)) + "<STR_LIT>"<EOL>
|
Generate a section break.
|
f11258:m4
|
def process_header_args(which, target, use_hash, no_tco, strict):
|
target_startswith = one_num_ver(target)<EOL>target_info = get_target_info(target)<EOL>try_backport_lru_cache = r'''<STR_LIT>'''try:<EOL>import trollius as asyncio<EOL>pt ImportError:<EOL>class you_need_to_install_trollius: pass<EOL>asyncio = you_need_to_install_trollius()<EOL>format_dict = dict(<EOL>comment=comment(),<EOL>empty_dict="<STR_LIT:{}>",<EOL>target_startswith=target_startswith,<EOL>default_encoding=default_encoding,<EOL>hash_line=hash_prefix + use_hash + "<STR_LIT:\n>" if use_hash is not None else "<STR_LIT>",<EOL>typing_line="<STR_LIT>" if which == "<STR_LIT>" else "<STR_LIT>",<EOL>VERSION_STR=VERSION_STR,<EOL>module_docstring='<STR_LIT>' if which == "<STR_LIT>" else "<STR_LIT>",<EOL>object="<STR_LIT>" if target_startswith != "<STR_LIT:3>" else "<STR_LIT>",<EOL>import_asyncio=_indent(<EOL>"<STR_LIT>" if not target or target_info >= (<NUM_LIT:3>, <NUM_LIT:5>)<EOL>else "<STR_LIT>" if target_info >= (<NUM_LIT:3>, <NUM_LIT:4>)<EOL>else r'''<STR_LIT>'''if _coconut_sys.version_info < (<NUM_LIT:3>,):<EOL>import cPickle as pickle<EOL>:<EOL>import pickle'''<STR_LIT>'''if _coconut_sys.version_info >= (<NUM_LIT:2>, <NUM_LIT:7>):<EOL>OrderedDict = collections.OrderedDict<EOL>:<EOL>OrderedDict = dict'''<STR_LIT>'''if _coconut_sys.version_info < (<NUM_LIT:3>, <NUM_LIT:3>):<EOL>abc = collections<EOL>:<EOL>import collections.abc as abc'''<STR_LIT>'''if _coconut_sys.version_info < (<NUM_LIT:3>, <NUM_LIT:2>):<EOL>+ _indent(try_backport_lru_cache)<EOL>if not target<EOL>else try_backport_lru_cache if target_startswith == "<STR_LIT:2>"<EOL>else "<STR_LIT>"<EOL>),<EOL>comma_bytearray="<STR_LIT>" if target_startswith != "<STR_LIT:3>" else "<STR_LIT>",<EOL>static_repr="<STR_LIT>" if target_startswith != "<STR_LIT:3>" else "<STR_LIT>",<EOL>with_ThreadPoolExecutor=(<EOL>r'''<STR_LIT>''' if target_info < (<NUM_LIT:3>, <NUM_LIT:5>)<EOL>else '''<STR_LIT>'''<EOL>),<EOL>tco_decorator="<STR_LIT>" + "<STR_LIT:U+0020>" * <NUM_LIT:8> if not no_tco else "<STR_LIT>",<EOL>tail_call_func_args_kwargs="<STR_LIT>" if no_tco else "<STR_LIT>",<EOL>comma_tco="<STR_LIT>" if not no_tco else "<STR_LIT>",<EOL>def_coconut_NamedTuple=(<EOL>r'''<STR_LIT>'''<EOL>if target_info < (<NUM_LIT:3>, <NUM_LIT:6>)<EOL>else "<STR_LIT>"<EOL>),<EOL>def_prepattern=(<EOL>r'''<STR_LIT>'''def datamaker(data_type):<EOL>"""<STR_LIT>"""<EOL>return _coconut.functools.partial(makedata, data_type)<EOL>if not strict else "<STR_LIT>"<EOL>),<EOL>__coconut__=(<EOL>'<STR_LIT>' if target_startswith == "<STR_LIT:3>"<EOL>else '<STR_LIT>' if target_startswith == "<STR_LIT:2>"<EOL>else '<STR_LIT>'<EOL>),<EOL>)<EOL>format_dict["<STR_LIT>"] = "<STR_LIT>".format(**format_dict)<EOL>format_dict["<STR_LIT>"] = "<STR_LIT>" if no_tco else
|
Create the dictionary passed to str.format in the header, target_startswith, and target_info.
|
f11258:m5
|
def getheader(which, target="<STR_LIT>", use_hash=None, no_tco=False, strict=False):
|
internal_assert(which in allowed_headers, "<STR_LIT>", which)<EOL>if which == "<STR_LIT:none>":<EOL><INDENT>return "<STR_LIT>"<EOL><DEDENT>format_dict, target_startswith, target_info = process_header_args(which, target, use_hash, no_tco, strict)<EOL>if which == "<STR_LIT>" or which == "<STR_LIT>":<EOL><INDENT>header = '''<STR_LIT>'''.format(**format_dict)<EOL><DEDENT>elif use_hash is not None:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", which)<EOL><DEDENT>else:<EOL><INDENT>header = "<STR_LIT>"<EOL><DEDENT>if which == "<STR_LIT>":<EOL><INDENT>return header<EOL><DEDENT>header += section("<STR_LIT>")<EOL>if target_startswith != "<STR_LIT:3>":<EOL><INDENT>header += "<STR_LIT>"<EOL><DEDENT>elif target_info >= (<NUM_LIT:3>, <NUM_LIT:5>):<EOL><INDENT>header += "<STR_LIT>"<EOL><DEDENT>if which == "<STR_LIT>":<EOL><INDENT>return header + '''<STR_LIT>'''import sys as _coconut_sys<EOL>
|
Generate the specified header.
|
f11258:m6
|
def __getattr__(self, attr):
|
return "<STR_LIT>"<EOL>
|
Return an empty string for all comment attributes.
|
f11258:c0:m0
|
def get_match_names(match):
|
names = []<EOL>if "<STR_LIT>" in match:<EOL><INDENT>(match,) = match<EOL>names += get_match_names(match)<EOL><DEDENT>elif "<STR_LIT>" in match:<EOL><INDENT>(setvar,) = match<EOL>if setvar != wildcard:<EOL><INDENT>names.append(setvar)<EOL><DEDENT><DEDENT>elif "<STR_LIT>" in match:<EOL><INDENT>match, trailers = match[<NUM_LIT:0>], match[<NUM_LIT:1>:]<EOL>for i in range(<NUM_LIT:0>, len(trailers), <NUM_LIT:2>):<EOL><INDENT>op, arg = trailers[i], trailers[i + <NUM_LIT:1>]<EOL>if op == "<STR_LIT>":<EOL><INDENT>names.append(arg)<EOL><DEDENT><DEDENT>names += get_match_names(match)<EOL><DEDENT>return names<EOL>
|
Gets keyword names for the given match.
|
f11260:m0
|
def __init__(self, loc, check_var, checkdefs=None, names=None, var_index=<NUM_LIT:0>):
|
self.loc = loc<EOL>self.check_var = check_var<EOL>self.position = <NUM_LIT:0><EOL>self.checkdefs = []<EOL>if checkdefs is None:<EOL><INDENT>self.increment()<EOL><DEDENT>else:<EOL><INDENT>for checks, defs in checkdefs:<EOL><INDENT>self.checkdefs.append((checks[:], defs[:]))<EOL><DEDENT>self.set_position(-<NUM_LIT:1>)<EOL><DEDENT>self.names = names if names is not None else {}<EOL>self.var_index = var_index<EOL>self.others = []<EOL>self.guards = []<EOL>self.use_sentinel = False<EOL>
|
Creates the matcher.
|
f11260:c0:m0
|
def duplicate(self):
|
other = Matcher(self.loc, self.check_var, self.checkdefs, self.names, self.var_index)<EOL>other.insert_check(<NUM_LIT:0>, "<STR_LIT>" + self.check_var)<EOL>self.others.append(other)<EOL>return other<EOL>
|
Duplicates the matcher to others.
|
f11260:c0:m1
|
def add_guard(self, cond):
|
self.guards.append(cond)<EOL>
|
Adds cond as a guard.
|
f11260:c0:m2
|
def get_checks(self, position=None):
|
if position is None:<EOL><INDENT>position = self.position<EOL><DEDENT>return self.checkdefs[position][<NUM_LIT:0>]<EOL>
|
Gets the checks at the position.
|
f11260:c0:m3
|
def set_checks(self, checks, position=None):
|
if position is None:<EOL><INDENT>position = self.position<EOL><DEDENT>self.checkdefs[position][<NUM_LIT:0>] = checks<EOL>
|
Sets the checks at the position.
|
f11260:c0:m4
|
def get_defs(self, position=None):
|
if position is None:<EOL><INDENT>position = self.position<EOL><DEDENT>return self.checkdefs[position][<NUM_LIT:1>]<EOL>
|
Gets the defs at the position.
|
f11260:c0:m5
|
def set_defs(self, defs, position=None):
|
if position is None:<EOL><INDENT>position = self.position<EOL><DEDENT>self.checkdefs[position][<NUM_LIT:1>] = defs<EOL>
|
Sets the defs at the position.
|
f11260:c0:m6
|
def add_check(self, check_item):
|
self.checks.append(check_item)<EOL>for other in self.others:<EOL><INDENT>other.add_check(check_item)<EOL><DEDENT>
|
Adds a check universally.
|
f11260:c0:m7
|
def add_def(self, def_item):
|
self.defs.append(def_item)<EOL>for other in self.others:<EOL><INDENT>other.add_def(def_item)<EOL><DEDENT>
|
Adds a def universally.
|
f11260:c0:m8
|
def insert_check(self, index, check_item):
|
self.checks.insert(index, check_item)<EOL>for other in self.others:<EOL><INDENT>other.insert_check(index, check_item)<EOL><DEDENT>
|
Inserts a check universally.
|
f11260:c0:m9
|
def insert_def(self, index, def_item):
|
self.defs.insert(index, def_item)<EOL>for other in self.others:<EOL><INDENT>other.insert_def(index, def_item)<EOL><DEDENT>
|
Inserts a def universally.
|
f11260:c0:m10
|
def set_position(self, position):
|
if position < <NUM_LIT:0>:<EOL><INDENT>position += len(self.checkdefs)<EOL><DEDENT>while position >= len(self.checkdefs):<EOL><INDENT>self.checkdefs.append(([], []))<EOL><DEDENT>self.position = position<EOL>
|
Sets the if-statement position.
|
f11260:c0:m11
|
def increment(self, by=<NUM_LIT:1>):
|
self.set_position(self.position + by)<EOL>
|
Advances the if-statement position.
|
f11260:c0:m12
|
def decrement(self, by=<NUM_LIT:1>):
|
self.set_position(self.position - by)<EOL>
|
Decrements the if-statement position.
|
f11260:c0:m13
|
@contextmanager<EOL><INDENT>def down_a_level(self, by=<NUM_LIT:1>):<DEDENT>
|
self.increment(by)<EOL>try:<EOL><INDENT>yield<EOL><DEDENT>finally:<EOL><INDENT>self.decrement(by)<EOL><DEDENT>
|
Increment then decrement.
|
f11260:c0:m14
|
@contextmanager<EOL><INDENT>def only_self(self):<DEDENT>
|
others, self.others = self.others, []<EOL>try:<EOL><INDENT>yield<EOL><DEDENT>finally:<EOL><INDENT>self.others = others + self.others<EOL><DEDENT>
|
Only match in self not others.
|
f11260:c0:m15
|
def get_temp_var(self):
|
tempvar = match_temp_var + "<STR_LIT:_>" + str(self.var_index)<EOL>self.var_index += <NUM_LIT:1><EOL>return tempvar<EOL>
|
Gets the next match_temp_var.
|
f11260:c0:m16
|
def match_all_in(self, matches, item):
|
for i, match in enumerate(matches):<EOL><INDENT>self.match(match, item + "<STR_LIT:[>" + str(i) + "<STR_LIT:]>")<EOL><DEDENT>
|
Matches all matches to elements of item.
|
f11260:c0:m17
|
def check_len_in(self, min_len, max_len, item):
|
if max_len is None:<EOL><INDENT>if min_len:<EOL><INDENT>self.add_check("<STR_LIT>" + item + "<STR_LIT>" + str(min_len))<EOL><DEDENT><DEDENT>elif min_len == max_len:<EOL><INDENT>self.add_check("<STR_LIT>" + item + "<STR_LIT>" + str(min_len))<EOL><DEDENT>elif not min_len:<EOL><INDENT>self.add_check("<STR_LIT>" + item + "<STR_LIT>" + str(max_len))<EOL><DEDENT>else:<EOL><INDENT>self.add_check(str(min_len) + "<STR_LIT>" + item + "<STR_LIT>" + str(max_len))<EOL><DEDENT>
|
Checks that the length of item is in range(min_len, max_len+1).
|
f11260:c0:m18
|
def match_function(self, args, kwargs, match_args=(), star_arg=None, kwd_args=(), dubstar_arg=None):
|
self.match_in_args_kwargs(match_args, args, kwargs, allow_star_args=star_arg is not None)<EOL>if star_arg is not None:<EOL><INDENT>self.match(star_arg, args + "<STR_LIT:[>" + str(len(match_args)) + "<STR_LIT>")<EOL><DEDENT>self.match_in_kwargs(kwd_args, kwargs)<EOL>with self.down_a_level():<EOL><INDENT>if dubstar_arg is None:<EOL><INDENT>self.add_check("<STR_LIT>" + kwargs)<EOL><DEDENT>else:<EOL><INDENT>self.match(dubstar_arg, kwargs)<EOL><DEDENT><DEDENT>
|
Matches a pattern-matching function.
|
f11260:c0:m19
|
def match_in_args_kwargs(self, match_args, args, kwargs, allow_star_args=False):
|
req_len = <NUM_LIT:0><EOL>arg_checks = {}<EOL>to_match = [] <EOL>for i, arg in enumerate(match_args):<EOL><INDENT>if isinstance(arg, tuple):<EOL><INDENT>(match, default) = arg<EOL><DEDENT>else:<EOL><INDENT>match, default = arg, None<EOL><DEDENT>names = get_match_names(match)<EOL>if default is None:<EOL><INDENT>if not names:<EOL><INDENT>req_len = i + <NUM_LIT:1><EOL>to_match.append((False, match, args + "<STR_LIT:[>" + str(i) + "<STR_LIT:]>"))<EOL><DEDENT>else:<EOL><INDENT>arg_checks[i] = (<EOL>"<STR_LIT>".join('<STR_LIT:">' + name + '<STR_LIT>' + kwargs for name in names),<EOL>"<STR_LIT>" + args + "<STR_LIT>" + str(i) + "<STR_LIT:U+002CU+0020>"<EOL>+ "<STR_LIT:U+002CU+0020>".join('<STR_LIT:">' + name + '<STR_LIT>' + kwargs for name in names)<EOL>+ "<STR_LIT>",<EOL>)<EOL>tempvar = self.get_temp_var()<EOL>self.add_def(<EOL>tempvar + "<STR_LIT>"<EOL>+ args + "<STR_LIT:[>" + str(i) + "<STR_LIT>" + args + "<STR_LIT>" + str(i) + "<STR_LIT>"<EOL>+ "<STR_LIT>".join(kwargs + '<STR_LIT>' + name + '<STR_LIT>' + name + '<STR_LIT>' + kwargs + "<STR_LIT>"<EOL>for name in names[:-<NUM_LIT:1>])<EOL>+ kwargs + '<STR_LIT>' + names[-<NUM_LIT:1>] + '<STR_LIT>',<EOL>)<EOL>to_match.append((True, match, tempvar))<EOL><DEDENT><DEDENT>else:<EOL><INDENT>if not names:<EOL><INDENT>tempvar = self.get_temp_var()<EOL>self.add_def(tempvar + "<STR_LIT>" + args + "<STR_LIT:[>" + str(i) + "<STR_LIT>" + args + "<STR_LIT>" + str(i) + "<STR_LIT>" + default)<EOL>to_match.append((True, match, tempvar))<EOL><DEDENT>else:<EOL><INDENT>arg_checks[i] = (<EOL>None,<EOL>"<STR_LIT>" + args + "<STR_LIT>" + str(i) + "<STR_LIT:U+002CU+0020>"<EOL>+ "<STR_LIT:U+002CU+0020>".join('<STR_LIT:">' + name + '<STR_LIT>' + kwargs for name in names)<EOL>+ "<STR_LIT>",<EOL>)<EOL>tempvar = self.get_temp_var()<EOL>self.add_def(<EOL>tempvar + "<STR_LIT>"<EOL>+ args + "<STR_LIT:[>" + str(i) + "<STR_LIT>" + args + "<STR_LIT>" + str(i) + "<STR_LIT>"<EOL>+ "<STR_LIT>".join(<EOL>kwargs + '<STR_LIT>' + name + '<STR_LIT>' + name + '<STR_LIT>' + kwargs + "<STR_LIT>"<EOL>for name in names<EOL>)<EOL>+ default,<EOL>)<EOL>to_match.append((True, match, tempvar))<EOL><DEDENT><DEDENT><DEDENT>max_len = None if allow_star_args else len(match_args)<EOL>self.check_len_in(req_len, max_len, args)<EOL>for i in sorted(arg_checks):<EOL><INDENT>lt_check, ge_check = arg_checks[i]<EOL>if i < req_len:<EOL><INDENT>if lt_check is not None:<EOL><INDENT>self.add_check(lt_check)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>if ge_check is not None:<EOL><INDENT>self.add_check(ge_check)<EOL><DEDENT><DEDENT><DEDENT>for move_down, match, against in to_match:<EOL><INDENT>if move_down:<EOL><INDENT>with self.down_a_level():<EOL><INDENT>self.match(match, against)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>self.match(match, against)<EOL><DEDENT><DEDENT>
|
Matches against args or kwargs.
|
f11260:c0:m20
|
def match_in_kwargs(self, match_args, kwargs):
|
for match, default in match_args:<EOL><INDENT>names = get_match_names(match)<EOL>if names:<EOL><INDENT>tempvar = self.get_temp_var()<EOL>self.add_def(<EOL>tempvar + "<STR_LIT>"<EOL>+ "<STR_LIT>".join(<EOL>kwargs + '<STR_LIT>' + name + '<STR_LIT>' + name + '<STR_LIT>' + kwargs + "<STR_LIT>"<EOL>for name in names<EOL>)<EOL>+ default,<EOL>)<EOL>with self.down_a_level():<EOL><INDENT>self.match(match, tempvar)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>raise CoconutDeferredSyntaxError("<STR_LIT>", self.loc)<EOL><DEDENT><DEDENT>
|
Matches against kwargs.
|
f11260:c0:m21
|
def match_dict(self, tokens, item):
|
if len(tokens) == <NUM_LIT:1>:<EOL><INDENT>matches, rest = tokens[<NUM_LIT:0>], None<EOL><DEDENT>else:<EOL><INDENT>matches, rest = tokens<EOL><DEDENT>self.add_check("<STR_LIT>" + item + "<STR_LIT>")<EOL>if rest is None:<EOL><INDENT>self.add_check("<STR_LIT>" + item + "<STR_LIT>" + str(len(matches)))<EOL><DEDENT>if matches:<EOL><INDENT>self.use_sentinel = True<EOL><DEDENT>for k, v in matches:<EOL><INDENT>key_var = self.get_temp_var()<EOL>self.add_def(key_var + "<STR_LIT>" + item + "<STR_LIT>" + k + "<STR_LIT:U+002CU+0020>" + sentinel_var + "<STR_LIT:)>")<EOL>with self.down_a_level():<EOL><INDENT>self.add_check(key_var + "<STR_LIT>" + sentinel_var)<EOL>self.match(v, key_var)<EOL><DEDENT><DEDENT>if rest is not None and rest != wildcard:<EOL><INDENT>match_keys = [k for k, v in matches]<EOL>with self.down_a_level():<EOL><INDENT>self.add_def(<EOL>rest + "<STR_LIT>"<EOL>+ item + "<STR_LIT>"<EOL>+ "<STR_LIT:U+002CU+0020>".join(match_keys) + ("<STR_LIT:U+002C>" if len(match_keys) == <NUM_LIT:1> else "<STR_LIT>")<EOL>+ "<STR_LIT>",<EOL>)<EOL><DEDENT><DEDENT>
|
Matches a dictionary.
|
f11260:c0:m22
|
def assign_to_series(self, name, series_type, item):
|
if series_type == "<STR_LIT:(>":<EOL><INDENT>self.add_def(name + "<STR_LIT>" + item + "<STR_LIT:)>")<EOL><DEDENT>elif series_type == "<STR_LIT:[>":<EOL><INDENT>self.add_def(name + "<STR_LIT>" + item + "<STR_LIT:)>")<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", series_type)<EOL><DEDENT>
|
Assign name to item converted to the given series_type.
|
f11260:c0:m23
|
def match_sequence(self, tokens, item):
|
tail = None<EOL>if len(tokens) == <NUM_LIT:2>:<EOL><INDENT>series_type, matches = tokens<EOL><DEDENT>else:<EOL><INDENT>series_type, matches, tail = tokens<EOL><DEDENT>self.add_check("<STR_LIT>" + item + "<STR_LIT>")<EOL>if tail is None:<EOL><INDENT>self.add_check("<STR_LIT>" + item + "<STR_LIT>" + str(len(matches)))<EOL><DEDENT>else:<EOL><INDENT>self.add_check("<STR_LIT>" + item + "<STR_LIT>" + str(len(matches)))<EOL>if tail != wildcard:<EOL><INDENT>if len(matches) > <NUM_LIT:0>:<EOL><INDENT>splice = "<STR_LIT:[>" + str(len(matches)) + "<STR_LIT>"<EOL><DEDENT>else:<EOL><INDENT>splice = "<STR_LIT>"<EOL><DEDENT>self.assign_to_series(tail, series_type, item + splice)<EOL><DEDENT><DEDENT>self.match_all_in(matches, item)<EOL>
|
Matches a sequence.
|
f11260:c0:m24
|
def match_iterator(self, tokens, item):
|
tail = None<EOL>if len(tokens) == <NUM_LIT:2>:<EOL><INDENT>_, matches = tokens<EOL><DEDENT>else:<EOL><INDENT>_, matches, tail = tokens<EOL><DEDENT>self.add_check("<STR_LIT>" + item + "<STR_LIT>")<EOL>if tail is None:<EOL><INDENT>itervar = self.get_temp_var()<EOL>self.add_def(itervar + "<STR_LIT>" + item + "<STR_LIT:)>")<EOL><DEDENT>elif matches:<EOL><INDENT>itervar = self.get_temp_var()<EOL>if tail == wildcard:<EOL><INDENT>tail = item<EOL><DEDENT>else:<EOL><INDENT>self.add_def(tail + "<STR_LIT>" + item + "<STR_LIT:)>")<EOL><DEDENT>self.add_def(itervar + "<STR_LIT>" + tail + "<STR_LIT>" + str(len(matches)) + "<STR_LIT>")<EOL><DEDENT>else:<EOL><INDENT>itervar = None<EOL>if tail != wildcard:<EOL><INDENT>self.add_def(tail + "<STR_LIT>" + item)<EOL><DEDENT><DEDENT>if itervar is not None:<EOL><INDENT>with self.down_a_level():<EOL><INDENT>self.add_check("<STR_LIT>" + itervar + "<STR_LIT>" + str(len(matches)))<EOL>self.match_all_in(matches, itervar)<EOL><DEDENT><DEDENT>
|
Matches a lazy list or a chain.
|
f11260:c0:m25
|
def match_star(self, tokens, item):
|
head_matches, last_matches = None, None<EOL>if len(tokens) == <NUM_LIT:1>:<EOL><INDENT>middle = tokens[<NUM_LIT:0>]<EOL><DEDENT>elif len(tokens) == <NUM_LIT:2>:<EOL><INDENT>if isinstance(tokens[<NUM_LIT:0>], str):<EOL><INDENT>middle, last_matches = tokens<EOL><DEDENT>else:<EOL><INDENT>head_matches, middle = tokens<EOL><DEDENT><DEDENT>else:<EOL><INDENT>head_matches, middle, last_matches = tokens<EOL><DEDENT>self.add_check("<STR_LIT>" + item + "<STR_LIT>")<EOL>if head_matches is None and last_matches is None:<EOL><INDENT>if middle != wildcard:<EOL><INDENT>self.add_def(middle + "<STR_LIT>" + item + "<STR_LIT:)>")<EOL><DEDENT><DEDENT>else:<EOL><INDENT>itervar = self.get_temp_var()<EOL>self.add_def(itervar + "<STR_LIT>" + item + "<STR_LIT:)>")<EOL>with self.down_a_level():<EOL><INDENT>req_length = (len(head_matches) if head_matches is not None else <NUM_LIT:0>) + (len(last_matches) if last_matches is not None else <NUM_LIT:0>)<EOL>self.add_check("<STR_LIT>" + itervar + "<STR_LIT>" + str(req_length))<EOL>if middle != wildcard:<EOL><INDENT>head_splice = str(len(head_matches)) if head_matches is not None else "<STR_LIT>"<EOL>last_splice = "<STR_LIT:->" + str(len(last_matches)) if last_matches is not None else "<STR_LIT>"<EOL>self.add_def(middle + "<STR_LIT>" + itervar + "<STR_LIT:[>" + head_splice + "<STR_LIT::>" + last_splice + "<STR_LIT:]>")<EOL><DEDENT>if head_matches is not None:<EOL><INDENT>self.match_all_in(head_matches, itervar)<EOL><DEDENT>if last_matches is not None:<EOL><INDENT>for x in range(<NUM_LIT:1>, len(last_matches) + <NUM_LIT:1>):<EOL><INDENT>self.match(last_matches[-x], itervar + "<STR_LIT>" + str(x) + "<STR_LIT:]>")<EOL><DEDENT><DEDENT><DEDENT><DEDENT>
|
Matches starred assignment.
|
f11260:c0:m26
|
def match_rsequence(self, tokens, item):
|
front, series_type, matches = tokens<EOL>self.add_check("<STR_LIT>" + item + "<STR_LIT>")<EOL>self.add_check("<STR_LIT>" + item + "<STR_LIT>" + str(len(matches)))<EOL>if front != wildcard:<EOL><INDENT>if len(matches):<EOL><INDENT>splice = "<STR_LIT>" + str(-len(matches)) + "<STR_LIT:]>"<EOL><DEDENT>else:<EOL><INDENT>splice = "<STR_LIT>"<EOL><DEDENT>self.assign_to_series(front, series_type, item + splice)<EOL><DEDENT>for i, match in enumerate(matches):<EOL><INDENT>self.match(match, item + "<STR_LIT:[>" + str(i - len(matches)) + "<STR_LIT:]>")<EOL><DEDENT>
|
Matches a reverse sequence.
|
f11260:c0:m27
|
def match_msequence(self, tokens, item):
|
series_type, head_matches, middle, _, last_matches = tokens<EOL>self.add_check("<STR_LIT>" + item + "<STR_LIT>")<EOL>self.add_check("<STR_LIT>" + item + "<STR_LIT>" + str(len(head_matches) + len(last_matches)))<EOL>if middle != wildcard:<EOL><INDENT>if len(head_matches) and len(last_matches):<EOL><INDENT>splice = "<STR_LIT:[>" + str(len(head_matches)) + "<STR_LIT::>" + str(-len(last_matches)) + "<STR_LIT:]>"<EOL><DEDENT>elif len(head_matches):<EOL><INDENT>splice = "<STR_LIT:[>" + str(len(head_matches)) + "<STR_LIT>"<EOL><DEDENT>elif len(last_matches):<EOL><INDENT>splice = "<STR_LIT>" + str(-len(last_matches)) + "<STR_LIT:]>"<EOL><DEDENT>else:<EOL><INDENT>splice = "<STR_LIT>"<EOL><DEDENT>self.assign_to_series(middle, series_type, item + splice)<EOL><DEDENT>self.match_all_in(head_matches, item)<EOL>for i, match in enumerate(last_matches):<EOL><INDENT>self.match(match, item + "<STR_LIT:[>" + str(i - len(last_matches)) + "<STR_LIT:]>")<EOL><DEDENT>
|
Matches a middle sequence.
|
f11260:c0:m28
|
def match_string(self, tokens, item):
|
prefix, name = tokens<EOL>return self.match_mstring((prefix, name, None), item, use_bytes=prefix.startswith("<STR_LIT:b>"))<EOL>
|
Match prefix string.
|
f11260:c0:m29
|
def match_rstring(self, tokens, item):
|
name, suffix = tokens<EOL>return self.match_mstring((None, name, suffix), item, use_bytes=suffix.startswith("<STR_LIT:b>"))<EOL>
|
Match suffix string.
|
f11260:c0:m30
|
def match_mstring(self, tokens, item, use_bytes=None):
|
prefix, name, suffix = tokens<EOL>if use_bytes is None:<EOL><INDENT>if prefix.startswith("<STR_LIT:b>") or suffix.startswith("<STR_LIT:b>"):<EOL><INDENT>if prefix.startswith("<STR_LIT:b>") and suffix.startswith("<STR_LIT:b>"):<EOL><INDENT>use_bytes = True<EOL><DEDENT>else:<EOL><INDENT>raise CoconutDeferredSyntaxError("<STR_LIT>", self.loc)<EOL><DEDENT><DEDENT><DEDENT>if use_bytes:<EOL><INDENT>self.add_check("<STR_LIT>" + item + "<STR_LIT>")<EOL><DEDENT>else:<EOL><INDENT>self.add_check("<STR_LIT>" + item + "<STR_LIT>")<EOL><DEDENT>if prefix is not None:<EOL><INDENT>self.add_check(item + "<STR_LIT>" + prefix + "<STR_LIT:)>")<EOL><DEDENT>if suffix is not None:<EOL><INDENT>self.add_check(item + "<STR_LIT>" + suffix + "<STR_LIT:)>")<EOL><DEDENT>if name != wildcard:<EOL><INDENT>self.add_def(<EOL>name + "<STR_LIT>" + item + "<STR_LIT:[>" +<EOL>("<STR_LIT>" if prefix is None else "<STR_LIT>" + prefix + "<STR_LIT:)>") + "<STR_LIT::>"<EOL>+ ("<STR_LIT>" if suffix is None else "<STR_LIT>" + suffix + "<STR_LIT:)>") + "<STR_LIT:]>",<EOL>)<EOL><DEDENT>
|
Match prefix and suffix string.
|
f11260:c0:m31
|
def match_const(self, tokens, item):
|
match, = tokens<EOL>if match in const_vars:<EOL><INDENT>self.add_check(item + "<STR_LIT>" + match)<EOL><DEDENT>else:<EOL><INDENT>self.add_check(item + "<STR_LIT>" + match)<EOL><DEDENT>
|
Matches a constant.
|
f11260:c0:m32
|
def match_var(self, tokens, item):
|
setvar, = tokens<EOL>if setvar != wildcard:<EOL><INDENT>if setvar in self.names:<EOL><INDENT>self.add_check(self.names[setvar] + "<STR_LIT>" + item)<EOL><DEDENT>else:<EOL><INDENT>self.add_def(setvar + "<STR_LIT>" + item)<EOL>self.names[setvar] = item<EOL><DEDENT><DEDENT>
|
Matches a variable.
|
f11260:c0:m33
|
def match_set(self, tokens, item):
|
match, = tokens<EOL>self.add_check("<STR_LIT>" + item + "<STR_LIT>")<EOL>self.add_check("<STR_LIT>" + item + "<STR_LIT>" + str(len(match)))<EOL>for const in match:<EOL><INDENT>self.add_check(const + "<STR_LIT>" + item)<EOL><DEDENT>
|
Matches a set.
|
f11260:c0:m34
|
def match_data(self, tokens, item):
|
if len(tokens) == <NUM_LIT:2>:<EOL><INDENT>data_type, matches = tokens<EOL>star_match = None<EOL><DEDENT>elif len(tokens) == <NUM_LIT:3>:<EOL><INDENT>data_type, matches, star_match = tokens<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>self.add_check("<STR_LIT>" + item + "<STR_LIT:U+002CU+0020>" + data_type + "<STR_LIT:)>")<EOL>if star_match is None:<EOL><INDENT>self.add_check("<STR_LIT>" + item + "<STR_LIT>" + str(len(matches)))<EOL><DEDENT>elif len(matches):<EOL><INDENT>self.add_check("<STR_LIT>" + item + "<STR_LIT>" + str(len(matches)))<EOL><DEDENT>self.match_all_in(matches, item)<EOL>if star_match is not None:<EOL><INDENT>self.match(star_match, item + "<STR_LIT:[>" + str(len(matches)) + "<STR_LIT>")<EOL><DEDENT>
|
Matches a data type.
|
f11260:c0:m35
|
def match_paren(self, tokens, item):
|
match, = tokens<EOL>return self.match(match, item)<EOL>
|
Matches a paren.
|
f11260:c0:m36
|
def match_trailer(self, tokens, item):
|
internal_assert(len(tokens) > <NUM_LIT:1> and len(tokens) % <NUM_LIT:2> == <NUM_LIT:1>, "<STR_LIT>", tokens)<EOL>match, trailers = tokens[<NUM_LIT:0>], tokens[<NUM_LIT:1>:]<EOL>for i in range(<NUM_LIT:0>, len(trailers), <NUM_LIT:2>):<EOL><INDENT>op, arg = trailers[i], trailers[i + <NUM_LIT:1>]<EOL>if op == "<STR_LIT>":<EOL><INDENT>self.add_check("<STR_LIT>" + item + "<STR_LIT:U+002CU+0020>" + arg + "<STR_LIT:)>")<EOL><DEDENT>elif op == "<STR_LIT>":<EOL><INDENT>if arg in self.names:<EOL><INDENT>self.add_check(self.names[arg] + "<STR_LIT>" + item)<EOL><DEDENT>elif arg != wildcard:<EOL><INDENT>self.add_def(arg + "<STR_LIT>" + item)<EOL>self.names[arg] = item<EOL><DEDENT><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", op)<EOL><DEDENT><DEDENT>self.match(match, item)<EOL>
|
Matches typedefs and as patterns.
|
f11260:c0:m37
|
def match_and(self, tokens, item):
|
for match in tokens:<EOL><INDENT>self.match(match, item)<EOL><DEDENT>
|
Matches and.
|
f11260:c0:m38
|
def match_or(self, tokens, item):
|
for x in range(<NUM_LIT:1>, len(tokens)):<EOL><INDENT>self.duplicate().match(tokens[x], item)<EOL><DEDENT>with self.only_self():<EOL><INDENT>self.match(tokens[<NUM_LIT:0>], item)<EOL><DEDENT>
|
Matches or.
|
f11260:c0:m39
|
def match(self, tokens, item):
|
for flag, get_handler in self.matchers.items():<EOL><INDENT>if flag in tokens:<EOL><INDENT>return get_handler(self)(tokens, item)<EOL><DEDENT><DEDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL>
|
Performs pattern-matching processing.
|
f11260:c0:m40
|
def out(self):
|
out = "<STR_LIT>"<EOL>if self.use_sentinel:<EOL><INDENT>out += sentinel_var + "<STR_LIT>"<EOL><DEDENT>closes = <NUM_LIT:0><EOL>for checks, defs in self.checkdefs:<EOL><INDENT>if checks:<EOL><INDENT>out += "<STR_LIT>" + paren_join(checks, "<STR_LIT>") + "<STR_LIT>" + openindent<EOL>closes += <NUM_LIT:1><EOL><DEDENT>if defs:<EOL><INDENT>out += "<STR_LIT:\n>".join(defs) + "<STR_LIT:\n>"<EOL><DEDENT><DEDENT>return out + (<EOL>self.check_var + "<STR_LIT>"<EOL>+ closeindent * closes<EOL>+ "<STR_LIT>".join(other.out() for other in self.others)<EOL>+ (<EOL>"<STR_LIT>" + self.check_var + "<STR_LIT>"<EOL>+ paren_join(self.guards, "<STR_LIT>")<EOL>+ "<STR_LIT>" + openindent<EOL>+ self.check_var + "<STR_LIT>" + closeindent<EOL>if self.guards else "<STR_LIT>"<EOL>)<EOL>)<EOL>
|
Return pattern-matching code.
|
f11260:c0:m41
|
def build(self, stmts=None, set_check_var=True, invert=False):
|
out = "<STR_LIT>"<EOL>if set_check_var:<EOL><INDENT>out += self.check_var + "<STR_LIT>"<EOL><DEDENT>out += self.out()<EOL>if stmts is not None:<EOL><INDENT>out += "<STR_LIT>" + ("<STR_LIT>" if invert else "<STR_LIT>") + self.check_var + "<STR_LIT::>" + "<STR_LIT:\n>" + openindent + "<STR_LIT>".join(stmts) + closeindent<EOL><DEDENT>return out<EOL>
|
Construct code for performing the match then executing stmts.
|
f11260:c0:m42
|
def add_coconut_to_path():
|
try:<EOL><INDENT>import coconut <EOL><DEDENT>except ImportError:<EOL><INDENT>sys.path.insert(<NUM_LIT:0>, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))<EOL><DEDENT>
|
Adds coconut to sys.path if it isn't there already.
|
f11262:m0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.