signature
stringlengths 8
3.44k
| body
stringlengths 0
1.41M
| docstring
stringlengths 1
122k
| id
stringlengths 5
17
|
|---|---|---|---|
def passthrough_proc(self, inputstring, **kwargs):
|
out = []<EOL>found = None <EOL>hold = None <EOL>count = None <EOL>multiline = None <EOL>skips = self.copy_skips()<EOL>for i, c in enumerate(append_it(inputstring, "<STR_LIT:\n>")):<EOL><INDENT>if hold is not None:<EOL><INDENT>count += paren_change(c, opens="<STR_LIT:(>", closes="<STR_LIT:)>")<EOL>if count >= <NUM_LIT:0> and c == hold:<EOL><INDENT>out.append(self.wrap_passthrough(found, multiline))<EOL>found = None<EOL>hold = None<EOL>count = None<EOL>multiline = None<EOL><DEDENT>else:<EOL><INDENT>if c == "<STR_LIT:\n>":<EOL><INDENT>skips = addskip(skips, self.adjust(lineno(i, inputstring)))<EOL><DEDENT>found += c<EOL><DEDENT><DEDENT>elif found:<EOL><INDENT>if c == "<STR_LIT:\\>":<EOL><INDENT>found = "<STR_LIT>"<EOL>hold = "<STR_LIT:\n>"<EOL>count = <NUM_LIT:0><EOL>multiline = False<EOL><DEDENT>elif c == "<STR_LIT:(>":<EOL><INDENT>found = "<STR_LIT>"<EOL>hold = "<STR_LIT:)>"<EOL>count = -<NUM_LIT:1><EOL>multiline = True<EOL><DEDENT>else:<EOL><INDENT>out.append("<STR_LIT:\\>" + c)<EOL>found = None<EOL><DEDENT><DEDENT>elif c == "<STR_LIT:\\>":<EOL><INDENT>found = True<EOL><DEDENT>else:<EOL><INDENT>out.append(c)<EOL><DEDENT><DEDENT>if hold is not None or found is not None:<EOL><INDENT>raise self.make_err(CoconutSyntaxError, "<STR_LIT>", inputstring, i)<EOL><DEDENT>self.set_skips(skips)<EOL>return "<STR_LIT>".join(out)<EOL>
|
Process python passthroughs.
|
f11255:c0:m30
|
def leading_whitespace(self, inputstring):
|
count = <NUM_LIT:0><EOL>for i, c in enumerate(inputstring):<EOL><INDENT>if c == "<STR_LIT:U+0020>":<EOL><INDENT>count += <NUM_LIT:1><EOL><DEDENT>elif c == "<STR_LIT:\t>":<EOL><INDENT>count += tabworth - (i % tabworth)<EOL><DEDENT>else:<EOL><INDENT>break<EOL><DEDENT>if self.indchar is None:<EOL><INDENT>self.indchar = c<EOL><DEDENT>elif c != self.indchar:<EOL><INDENT>self.strict_err_or_warn("<STR_LIT>", inputstring, i)<EOL><DEDENT><DEDENT>return count<EOL>
|
Count leading whitespace.
|
f11255:c0:m31
|
def ind_proc(self, inputstring, **kwargs):
|
lines = inputstring.splitlines()<EOL>new = [] <EOL>opens = [] <EOL>current = None <EOL>levels = [] <EOL>skips = self.copy_skips()<EOL>for ln in range(<NUM_LIT:1>, len(lines) + <NUM_LIT:1>): <EOL><INDENT>line = lines[ln - <NUM_LIT:1>] <EOL>line_rstrip = line.rstrip()<EOL>if line != line_rstrip:<EOL><INDENT>if self.strict:<EOL><INDENT>raise self.make_err(CoconutStyleError, "<STR_LIT>", line, len(line), self.adjust(ln))<EOL><DEDENT>line = line_rstrip<EOL><DEDENT>last = rem_comment(new[-<NUM_LIT:1>]) if new else None<EOL>if not line or line.lstrip().startswith("<STR_LIT:#>"): <EOL><INDENT>if opens: <EOL><INDENT>skips = addskip(skips, self.adjust(ln))<EOL><DEDENT>else:<EOL><INDENT>new.append(line)<EOL><DEDENT><DEDENT>elif last is not None and last.endswith("<STR_LIT:\\>"): <EOL><INDENT>if self.strict:<EOL><INDENT>raise self.make_err(CoconutStyleError, "<STR_LIT>", last, len(last), self.adjust(ln - <NUM_LIT:1>))<EOL><DEDENT>skips = addskip(skips, self.adjust(ln))<EOL>new[-<NUM_LIT:1>] = last[:-<NUM_LIT:1>] + "<STR_LIT:U+0020>" + line<EOL><DEDENT>elif opens: <EOL><INDENT>skips = addskip(skips, self.adjust(ln))<EOL>new[-<NUM_LIT:1>] = last + "<STR_LIT:U+0020>" + line<EOL><DEDENT>else:<EOL><INDENT>check = self.leading_whitespace(line)<EOL>if current is None:<EOL><INDENT>if check:<EOL><INDENT>raise self.make_err(CoconutSyntaxError, "<STR_LIT>", line, <NUM_LIT:0>, self.adjust(ln))<EOL><DEDENT>else:<EOL><INDENT>current = <NUM_LIT:0><EOL><DEDENT><DEDENT>elif check > current:<EOL><INDENT>levels.append(current)<EOL>current = check<EOL>line = openindent + line<EOL><DEDENT>elif check in levels:<EOL><INDENT>point = levels.index(check) + <NUM_LIT:1><EOL>line = closeindent * (len(levels[point:]) + <NUM_LIT:1>) + line<EOL>levels = levels[:point]<EOL>current = levels.pop()<EOL><DEDENT>elif current != check:<EOL><INDENT>raise self.make_err(CoconutSyntaxError, "<STR_LIT>", line, <NUM_LIT:0>, self.adjust(ln))<EOL><DEDENT>new.append(line)<EOL><DEDENT>count = paren_change(line) <EOL>if count > len(opens):<EOL><INDENT>raise self.make_err(CoconutSyntaxError, "<STR_LIT>", new[-<NUM_LIT:1>], <NUM_LIT:0>, self.adjust(len(new)))<EOL><DEDENT>elif count > <NUM_LIT:0>: <EOL><INDENT>for _ in range(count):<EOL><INDENT>opens.pop()<EOL><DEDENT><DEDENT>elif count < <NUM_LIT:0>: <EOL><INDENT>opens += [(new[-<NUM_LIT:1>], self.adjust(len(new)))] * (-count)<EOL><DEDENT><DEDENT>self.set_skips(skips)<EOL>if new:<EOL><INDENT>last = rem_comment(new[-<NUM_LIT:1>])<EOL>if last.endswith("<STR_LIT:\\>"):<EOL><INDENT>raise self.make_err(CoconutSyntaxError, "<STR_LIT>", new[-<NUM_LIT:1>], len(new[-<NUM_LIT:1>]), self.adjust(len(new)))<EOL><DEDENT>if opens:<EOL><INDENT>line, adj_ln = opens[<NUM_LIT:0>]<EOL>raise self.make_err(CoconutSyntaxError, "<STR_LIT>", line, <NUM_LIT:0>, adj_ln)<EOL><DEDENT><DEDENT>new.append(closeindent * len(levels))<EOL>return "<STR_LIT:\n>".join(new)<EOL>
|
Process indentation.
|
f11255:c0:m32
|
def stmt_lambda_proc(self, inputstring, **kwargs):
|
regexes = []<EOL>for i in range(len(self.stmt_lambdas)):<EOL><INDENT>name = self.stmt_lambda_name(i)<EOL>regex = compile_regex(r"<STR_LIT>" % (name,))<EOL>regexes.append(regex)<EOL><DEDENT>out = []<EOL>for line in inputstring.splitlines():<EOL><INDENT>for i, regex in enumerate(regexes):<EOL><INDENT>if regex.search(line):<EOL><INDENT>indent, line = split_leading_indent(line)<EOL>out.append(indent + self.stmt_lambdas[i])<EOL><DEDENT><DEDENT>out.append(line)<EOL><DEDENT>return "<STR_LIT:\n>".join(out)<EOL>
|
Add statement lambda definitions.
|
f11255:c0:m33
|
@property<EOL><INDENT>def tabideal(self):<DEDENT>
|
return <NUM_LIT:1> if self.minify else tabideal<EOL>
|
Local tabideal.
|
f11255:c0:m34
|
def reind_proc(self, inputstring, **kwargs):
|
out = []<EOL>level = <NUM_LIT:0><EOL>for line in inputstring.splitlines():<EOL><INDENT>line, comment = split_comment(line.strip())<EOL>indent, line = split_leading_indent(line)<EOL>level += ind_change(indent)<EOL>if line:<EOL><INDENT>line = "<STR_LIT:U+0020>" * self.tabideal * level + line<EOL><DEDENT>line, indent = split_trailing_indent(line)<EOL>level += ind_change(indent)<EOL>line = (line + comment).rstrip()<EOL>out.append(line)<EOL><DEDENT>if level != <NUM_LIT:0>:<EOL><INDENT>complain(CoconutInternalException("<STR_LIT>", level))<EOL><DEDENT>return "<STR_LIT:\n>".join(out)<EOL>
|
Add back indentation.
|
f11255:c0:m35
|
def ln_comment(self, ln):
|
if self.keep_lines:<EOL><INDENT>if not <NUM_LIT:1> <= ln <= len(self.original_lines) + <NUM_LIT:1>:<EOL><INDENT>raise CoconutInternalException(<EOL>"<STR_LIT>", ln,<EOL>"<STR_LIT>" + str(len(self.original_lines) + <NUM_LIT:1>) + "<STR_LIT:]>",<EOL>)<EOL><DEDENT>elif ln == len(self.original_lines) + <NUM_LIT:1>: <EOL><INDENT>lni = -<NUM_LIT:1><EOL><DEDENT>else:<EOL><INDENT>lni = ln - <NUM_LIT:1><EOL><DEDENT><DEDENT>if self.line_numbers and self.keep_lines:<EOL><INDENT>if self.minify:<EOL><INDENT>comment = str(ln) + "<STR_LIT:U+0020>" + self.original_lines[lni]<EOL><DEDENT>else:<EOL><INDENT>comment = "<STR_LIT>" + str(ln) + "<STR_LIT>" + self.original_lines[lni]<EOL><DEDENT><DEDENT>elif self.keep_lines:<EOL><INDENT>if self.minify:<EOL><INDENT>comment = self.original_lines[lni]<EOL><DEDENT>else:<EOL><INDENT>comment = "<STR_LIT:U+0020>" + self.original_lines[lni]<EOL><DEDENT><DEDENT>elif self.line_numbers:<EOL><INDENT>if self.minify:<EOL><INDENT>comment = str(ln)<EOL><DEDENT>else:<EOL><INDENT>comment = "<STR_LIT>" + str(ln)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>return "<STR_LIT>"<EOL><DEDENT>return self.wrap_comment(comment, reformat=False)<EOL>
|
Get an end line comment. CoconutInternalExceptions should always be caught and complained.
|
f11255:c0:m36
|
def endline_repl(self, inputstring, reformatting=False, **kwargs):
|
out = []<EOL>ln = <NUM_LIT:1> <EOL>for line in inputstring.splitlines():<EOL><INDENT>add_one_to_ln = False<EOL>try:<EOL><INDENT>if line.endswith(lnwrapper):<EOL><INDENT>line, index = line[:-<NUM_LIT:1>].rsplit("<STR_LIT:#>", <NUM_LIT:1>)<EOL>new_ln = self.get_ref("<STR_LIT>", index)<EOL>if new_ln < ln:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", (ln, new_ln))<EOL><DEDENT>ln = new_ln<EOL>line = line.rstrip()<EOL>add_one_to_ln = True<EOL><DEDENT>if not reformatting or add_one_to_ln: <EOL><INDENT>line += self.comments.get(ln, "<STR_LIT>")<EOL><DEDENT>if not reformatting and line.rstrip() and not line.lstrip().startswith("<STR_LIT:#>"):<EOL><INDENT>line += self.ln_comment(ln)<EOL><DEDENT><DEDENT>except CoconutInternalException as err:<EOL><INDENT>complain(err)<EOL><DEDENT>out.append(line)<EOL>if add_one_to_ln:<EOL><INDENT>ln += <NUM_LIT:1><EOL><DEDENT><DEDENT>return "<STR_LIT:\n>".join(out)<EOL>
|
Add end of line comments.
|
f11255:c0:m37
|
def passthrough_repl(self, inputstring, **kwargs):
|
out = []<EOL>index = None<EOL>for c in append_it(inputstring, None):<EOL><INDENT>try:<EOL><INDENT>if index is not None:<EOL><INDENT>if c is not None and c in nums:<EOL><INDENT>index += c<EOL><DEDENT>elif c == unwrapper and index:<EOL><INDENT>ref = self.get_ref("<STR_LIT>", index)<EOL>out.append(ref)<EOL>index = None<EOL><DEDENT>elif c != "<STR_LIT:\\>" or index:<EOL><INDENT>out.append("<STR_LIT:\\>" + index)<EOL>if c is not None:<EOL><INDENT>out.append(c)<EOL><DEDENT>index = None<EOL><DEDENT><DEDENT>elif c is not None:<EOL><INDENT>if c == "<STR_LIT:\\>":<EOL><INDENT>index = "<STR_LIT>"<EOL><DEDENT>else:<EOL><INDENT>out.append(c)<EOL><DEDENT><DEDENT><DEDENT>except CoconutInternalException as err:<EOL><INDENT>complain(err)<EOL>if index is not None:<EOL><INDENT>out.append(index)<EOL>index = None<EOL><DEDENT>out.append(c)<EOL><DEDENT><DEDENT>return "<STR_LIT>".join(out)<EOL>
|
Add back passthroughs.
|
f11255:c0:m38
|
def str_repl(self, inputstring, **kwargs):
|
out = []<EOL>comment = None<EOL>string = None<EOL>for i, c in enumerate(append_it(inputstring, None)):<EOL><INDENT>try:<EOL><INDENT>if comment is not None:<EOL><INDENT>if c is not None and c in nums:<EOL><INDENT>comment += c<EOL><DEDENT>elif c == unwrapper and comment:<EOL><INDENT>ref = self.get_ref("<STR_LIT>", comment)<EOL>if out and not out[-<NUM_LIT:1>].endswith("<STR_LIT:\n>"):<EOL><INDENT>out[-<NUM_LIT:1>] = out[-<NUM_LIT:1>].rstrip("<STR_LIT:U+0020>")<EOL>if not self.minify:<EOL><INDENT>out[-<NUM_LIT:1>] += "<STR_LIT:U+0020>" <EOL><DEDENT><DEDENT>out.append("<STR_LIT:#>" + ref)<EOL>comment = None<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", getline(i, inputstring))<EOL><DEDENT><DEDENT>elif string is not None:<EOL><INDENT>if c is not None and c in nums:<EOL><INDENT>string += c<EOL><DEDENT>elif c == unwrapper and string:<EOL><INDENT>text, strchar = self.get_ref("<STR_LIT:str>", string)<EOL>out.append(strchar + text + strchar)<EOL>string = None<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", getline(i, inputstring))<EOL><DEDENT><DEDENT>elif c is not None:<EOL><INDENT>if c == "<STR_LIT:#>":<EOL><INDENT>comment = "<STR_LIT>"<EOL><DEDENT>elif c == strwrapper:<EOL><INDENT>string = "<STR_LIT>"<EOL><DEDENT>else:<EOL><INDENT>out.append(c)<EOL><DEDENT><DEDENT><DEDENT>except CoconutInternalException as err:<EOL><INDENT>complain(err)<EOL>if comment is not None:<EOL><INDENT>out.append(comment)<EOL>comment = None<EOL><DEDENT>if string is not None:<EOL><INDENT>out.append(string)<EOL>string = None<EOL><DEDENT>out.append(c)<EOL><DEDENT><DEDENT>return "<STR_LIT>".join(out)<EOL>
|
Add back strings.
|
f11255:c0:m39
|
def repl_proc(self, inputstring, log=True, **kwargs):
|
return self.apply_procs(self.replprocs, kwargs, inputstring, log=log)<EOL>
|
Process using replprocs.
|
f11255:c0:m40
|
def header_proc(self, inputstring, header="<STR_LIT:file>", initial="<STR_LIT>", use_hash=None, **kwargs):
|
pre_header = self.getheader(initial, use_hash=use_hash, polish=False)<EOL>main_header = self.getheader(header, polish=False)<EOL>if self.minify:<EOL><INDENT>main_header = minify(main_header)<EOL><DEDENT>return pre_header + self.docstring + main_header + inputstring<EOL>
|
Add the header.
|
f11255:c0:m41
|
def polish(self, inputstring, final_endline=True, **kwargs):
|
return inputstring.rstrip() + ("<STR_LIT:\n>" if final_endline else "<STR_LIT>")<EOL>
|
Does final polishing touches.
|
f11255:c0:m42
|
def set_docstring(self, loc, tokens):
|
internal_assert(len(tokens) == <NUM_LIT:2>, "<STR_LIT>", tokens)<EOL>self.docstring = self.reformat(tokens[<NUM_LIT:0>]) + "<STR_LIT>"<EOL>return tokens[<NUM_LIT:1>]<EOL>
|
Set the docstring.
|
f11255:c0:m43
|
def yield_from_handle(self, tokens):
|
internal_assert(len(tokens) == <NUM_LIT:1>, "<STR_LIT>", tokens)<EOL>if self.target_info < (<NUM_LIT:3>, <NUM_LIT:3>):<EOL><INDENT>return (<EOL>yield_from_var + "<STR_LIT>" + tokens[<NUM_LIT:0>]<EOL>+ "<STR_LIT>" + yield_item_var + "<STR_LIT>" + yield_from_var + "<STR_LIT>"<EOL>+ openindent + "<STR_LIT>" + yield_item_var + "<STR_LIT:\n>" + closeindent<EOL>)<EOL><DEDENT>else:<EOL><INDENT>return "<STR_LIT>" + tokens[<NUM_LIT:0>]<EOL><DEDENT>
|
Process Python 3.3 yield from.
|
f11255:c0:m44
|
def endline_handle(self, original, loc, tokens):
|
internal_assert(len(tokens) == <NUM_LIT:1>, "<STR_LIT>", tokens)<EOL>lines = tokens[<NUM_LIT:0>].splitlines(True)<EOL>if self.minify:<EOL><INDENT>lines = lines[<NUM_LIT:0>]<EOL><DEDENT>out = []<EOL>ln = lineno(loc, original)<EOL>for endline in lines:<EOL><INDENT>out.append(self.wrap_line_number(self.adjust(ln)) + endline)<EOL>ln += <NUM_LIT:1><EOL><DEDENT>return "<STR_LIT>".join(out)<EOL>
|
Add line number information to end of line.
|
f11255:c0:m45
|
def comment_handle(self, original, loc, tokens):
|
internal_assert(len(tokens) == <NUM_LIT:1>, "<STR_LIT>", tokens)<EOL>ln = self.adjust(lineno(loc, original))<EOL>internal_assert(lambda: ln not in self.comments, "<STR_LIT>", ln)<EOL>self.comments[ln] = tokens[<NUM_LIT:0>]<EOL>return "<STR_LIT>"<EOL>
|
Store comment in comments.
|
f11255:c0:m46
|
def augassign_handle(self, tokens):
|
internal_assert(len(tokens) == <NUM_LIT:3>, "<STR_LIT>", tokens)<EOL>name, op, item = tokens<EOL>out = "<STR_LIT>"<EOL>if op == "<STR_LIT>":<EOL><INDENT>out += name + "<STR_LIT>" + item + "<STR_LIT>" + name + "<STR_LIT:)>"<EOL><DEDENT>elif op == "<STR_LIT>":<EOL><INDENT>out += name + "<STR_LIT>" + item + "<STR_LIT>" + name + "<STR_LIT:)>"<EOL><DEDENT>elif op == "<STR_LIT>":<EOL><INDENT>out += name + "<STR_LIT>" + name + "<STR_LIT>" + item + "<STR_LIT>"<EOL><DEDENT>elif op == "<STR_LIT>":<EOL><INDENT>out += name + "<STR_LIT>" + name + "<STR_LIT>" + item + "<STR_LIT>"<EOL><DEDENT>elif op == "<STR_LIT>" or op == "<STR_LIT>":<EOL><INDENT>out += name + "<STR_LIT>" + item + "<STR_LIT>" + name + "<STR_LIT:)>"<EOL><DEDENT>elif op == "<STR_LIT>":<EOL><INDENT>out += name + "<STR_LIT>" + name + "<STR_LIT>" + item + "<STR_LIT>"<EOL><DEDENT>elif op == "<STR_LIT>":<EOL><INDENT>out += name + "<STR_LIT>" + item + "<STR_LIT>" + name + "<STR_LIT:)>"<EOL><DEDENT>elif op == "<STR_LIT>":<EOL><INDENT>out += name + "<STR_LIT>" + name + "<STR_LIT>" + item + "<STR_LIT>"<EOL><DEDENT>elif op == "<STR_LIT>":<EOL><INDENT>out += name + "<STR_LIT>" + item + "<STR_LIT>" + name + "<STR_LIT>" + name<EOL><DEDENT>elif op == "<STR_LIT>":<EOL><INDENT>ichain_var = lazy_chain_var + "<STR_LIT:_>" + str(self.ichain_count)<EOL>self.ichain_count += <NUM_LIT:1><EOL>out += (<EOL>ichain_var + "<STR_LIT>" + name + "<STR_LIT:\n>"<EOL>+ name + "<STR_LIT>" + lazy_list_handle([ichain_var, "<STR_LIT:(>" + item + "<STR_LIT:)>"]) + "<STR_LIT:)>"<EOL>)<EOL><DEDENT>else:<EOL><INDENT>out += name + "<STR_LIT:U+0020>" + op + "<STR_LIT:U+0020>" + item<EOL><DEDENT>return out<EOL>
|
Process assignments.
|
f11255:c0:m47
|
def classlist_handle(self, original, loc, tokens):
|
if len(tokens) == <NUM_LIT:0>:<EOL><INDENT>if self.target.startswith("<STR_LIT:3>"):<EOL><INDENT>return "<STR_LIT>"<EOL><DEDENT>else:<EOL><INDENT>return "<STR_LIT>"<EOL><DEDENT><DEDENT>elif len(tokens) == <NUM_LIT:1> and len(tokens[<NUM_LIT:0>]) == <NUM_LIT:1>:<EOL><INDENT>if "<STR_LIT>" in tokens[<NUM_LIT:0>]:<EOL><INDENT>if self.strict and tokens[<NUM_LIT:0>][<NUM_LIT:0>] == "<STR_LIT>":<EOL><INDENT>raise self.make_err(CoconutStyleError, "<STR_LIT>", original, loc)<EOL><DEDENT>return tokens[<NUM_LIT:0>][<NUM_LIT:0>]<EOL><DEDENT>elif "<STR_LIT:args>" in tokens[<NUM_LIT:0>]:<EOL><INDENT>if self.target.startswith("<STR_LIT:3>"):<EOL><INDENT>return tokens[<NUM_LIT:0>][<NUM_LIT:0>]<EOL><DEDENT>else:<EOL><INDENT>raise self.make_err(CoconutTargetError, "<STR_LIT>", original, loc, target="<STR_LIT:3>")<EOL><DEDENT><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens[<NUM_LIT:0>])<EOL><DEDENT><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>
|
Process class inheritance lists.
|
f11255:c0:m48
|
def data_handle(self, loc, tokens):
|
if len(tokens) == <NUM_LIT:3>:<EOL><INDENT>name, original_args, stmts = tokens<EOL>inherit = None<EOL><DEDENT>elif len(tokens) == <NUM_LIT:4>:<EOL><INDENT>name, original_args, inherit, stmts = tokens<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>all_args = [] <EOL>base_args = [] <EOL>req_args = <NUM_LIT:0> <EOL>starred_arg = None <EOL>saw_defaults = False <EOL>types = {} <EOL>for i, arg in enumerate(original_args):<EOL><INDENT>star, default, typedef = False, None, None<EOL>if "<STR_LIT:name>" in arg:<EOL><INDENT>internal_assert(len(arg) == <NUM_LIT:1>)<EOL>argname = arg[<NUM_LIT:0>]<EOL><DEDENT>elif "<STR_LIT:default>" in arg:<EOL><INDENT>internal_assert(len(arg) == <NUM_LIT:2>)<EOL>argname, default = arg<EOL><DEDENT>elif "<STR_LIT>" in arg:<EOL><INDENT>internal_assert(len(arg) == <NUM_LIT:1>)<EOL>star, argname = True, arg[<NUM_LIT:0>]<EOL><DEDENT>elif "<STR_LIT:type>" in arg:<EOL><INDENT>internal_assert(len(arg) == <NUM_LIT:2>)<EOL>argname, typedef = arg<EOL><DEDENT>elif "<STR_LIT>" in arg:<EOL><INDENT>internal_assert(len(arg) == <NUM_LIT:3>)<EOL>argname, typedef, default = arg<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", arg)<EOL><DEDENT>if argname.startswith("<STR_LIT:_>"):<EOL><INDENT>raise CoconutDeferredSyntaxError("<STR_LIT>", loc)<EOL><DEDENT>if star:<EOL><INDENT>if i != len(original_args) - <NUM_LIT:1>:<EOL><INDENT>raise CoconutDeferredSyntaxError("<STR_LIT>", loc)<EOL><DEDENT>starred_arg = argname<EOL><DEDENT>else:<EOL><INDENT>if default:<EOL><INDENT>saw_defaults = True<EOL><DEDENT>elif saw_defaults:<EOL><INDENT>raise CoconutDeferredSyntaxError("<STR_LIT>", loc)<EOL><DEDENT>else:<EOL><INDENT>req_args += <NUM_LIT:1><EOL><DEDENT>base_args.append(argname)<EOL><DEDENT>if typedef:<EOL><INDENT>internal_assert(not star, "<STR_LIT>", typedef)<EOL>types[i] = typedef<EOL><DEDENT>arg_str = ("<STR_LIT:*>" if star else "<STR_LIT>") + argname + ("<STR_LIT:=>" + default if default else "<STR_LIT>")<EOL>all_args.append(arg_str)<EOL><DEDENT>attr_str = "<STR_LIT:U+0020>".join(base_args)<EOL>extra_stmts = '''<STR_LIT>'''def __new__(_cls, {all_args}):<EOL><INDENT>{oind}return _coconut.tuple.__new__(_cls, {base_args_tuple} + {starred_arg})<EOL>
|
Process data blocks.
|
f11255:c0:m49
|
def import_handle(self, original, loc, tokens):
|
if len(tokens) == <NUM_LIT:1>:<EOL><INDENT>imp_from, imports = None, tokens[<NUM_LIT:0>]<EOL><DEDENT>elif len(tokens) == <NUM_LIT:2>:<EOL><INDENT>imp_from, imports = tokens<EOL>if imp_from == "<STR_LIT>":<EOL><INDENT>self.strict_err_or_warn("<STR_LIT>", original, loc)<EOL>return "<STR_LIT>"<EOL><DEDENT><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>if self.strict:<EOL><INDENT>self.unused_imports.update(imported_names(imports))<EOL><DEDENT>return universal_import(imports, imp_from=imp_from, target=self.target)<EOL>
|
Universalizes imports.
|
f11255:c0:m50
|
def complex_raise_stmt_handle(self, tokens):
|
internal_assert(len(tokens) == <NUM_LIT:2>, "<STR_LIT>", tokens)<EOL>if self.target.startswith("<STR_LIT:3>"):<EOL><INDENT>return "<STR_LIT>" + tokens[<NUM_LIT:0>] + "<STR_LIT>" + tokens[<NUM_LIT:1>]<EOL><DEDENT>else:<EOL><INDENT>return (<EOL>raise_from_var + "<STR_LIT>" + tokens[<NUM_LIT:0>] + "<STR_LIT:\n>"<EOL>+ raise_from_var + "<STR_LIT>" + tokens[<NUM_LIT:1>] + "<STR_LIT:\n>"<EOL>+ "<STR_LIT>" + raise_from_var<EOL>)<EOL><DEDENT>
|
Process Python 3 raise from statement.
|
f11255:c0:m51
|
def dict_comp_handle(self, loc, tokens):
|
internal_assert(len(tokens) == <NUM_LIT:3>, "<STR_LIT>", tokens)<EOL>if self.target.startswith("<STR_LIT:3>"):<EOL><INDENT>key, val, comp = tokens<EOL>return "<STR_LIT:{>" + key + "<STR_LIT>" + val + "<STR_LIT:U+0020>" + comp + "<STR_LIT:}>"<EOL><DEDENT>else:<EOL><INDENT>key, val, comp = tokens<EOL>return "<STR_LIT>" + key + "<STR_LIT>" + val + "<STR_LIT>" + comp + "<STR_LIT:)>"<EOL><DEDENT>
|
Process Python 2.7 dictionary comprehension.
|
f11255:c0:m52
|
def pattern_error(self, original, loc, value_var, check_var):
|
base_line = clean(self.reformat(getline(loc, original)))<EOL>line_wrap = self.wrap_str_of(base_line)<EOL>repr_wrap = self.wrap_str_of(ascii(base_line))<EOL>return (<EOL>"<STR_LIT>" + check_var + "<STR_LIT>" + openindent<EOL>+ match_err_var + '<STR_LIT>'<EOL>+ repr_wrap + '<STR_LIT>' + value_var + "<STR_LIT>"<EOL>+ match_err_var + "<STR_LIT>" + line_wrap + "<STR_LIT:\n>"<EOL>+ match_err_var + "<STR_LIT>" + value_var<EOL>+ "<STR_LIT>" + match_err_var + "<STR_LIT:\n>" + closeindent<EOL>)<EOL>
|
Construct a pattern-matching error message.
|
f11255:c0:m53
|
def destructuring_stmt_handle(self, original, loc, tokens):
|
internal_assert(len(tokens) == <NUM_LIT:2>, "<STR_LIT>", tokens)<EOL>matches, item = tokens<EOL>out = match_handle(loc, [matches, "<STR_LIT>", item, None])<EOL>out += self.pattern_error(original, loc, match_to_var, match_check_var)<EOL>return out<EOL>
|
Process match assign blocks.
|
f11255:c0:m54
|
def name_match_funcdef_handle(self, original, loc, tokens):
|
if len(tokens) == <NUM_LIT:2>:<EOL><INDENT>func, matches = tokens<EOL>cond = None<EOL><DEDENT>elif len(tokens) == <NUM_LIT:3>:<EOL><INDENT>func, matches, cond = tokens<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>matcher = Matcher(loc, match_check_var)<EOL>req_args, def_args, star_arg, kwd_args, dubstar_arg = split_args_list(matches, loc)<EOL>matcher.match_function(match_to_args_var, match_to_kwargs_var, req_args + def_args, star_arg, kwd_args, dubstar_arg)<EOL>if cond is not None:<EOL><INDENT>matcher.add_guard(cond)<EOL><DEDENT>before_docstring = (<EOL>"<STR_LIT>" + func<EOL>+ "<STR_LIT>" + match_to_args_var + "<STR_LIT>" + match_to_kwargs_var + "<STR_LIT>"<EOL>+ openindent<EOL>)<EOL>after_docstring = (<EOL>match_check_var + "<STR_LIT>"<EOL>+ matcher.out()<EOL>+ self.pattern_error(original, loc, match_to_args_var, match_check_var) + closeindent<EOL>)<EOL>return before_docstring, after_docstring<EOL>
|
Process match defs. Result must be passed to insert_docstring_handle.
|
f11255:c0:m55
|
def op_match_funcdef_handle(self, original, loc, tokens):
|
if len(tokens) == <NUM_LIT:3>:<EOL><INDENT>func, args = get_infix_items(tokens)<EOL>cond = None<EOL><DEDENT>elif len(tokens) == <NUM_LIT:4>:<EOL><INDENT>func, args = get_infix_items(tokens[:-<NUM_LIT:1>])<EOL>cond = tokens[-<NUM_LIT:1>]<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>name_tokens = [func, args]<EOL>if cond is not None:<EOL><INDENT>name_tokens.append(cond)<EOL><DEDENT>return self.name_match_funcdef_handle(original, loc, name_tokens)<EOL>
|
Process infix match defs. Result must be passed to insert_docstring_handle.
|
f11255:c0:m56
|
def set_literal_handle(self, tokens):
|
internal_assert(len(tokens) == <NUM_LIT:1> and len(tokens[<NUM_LIT:0>]) == <NUM_LIT:1>, "<STR_LIT>", tokens)<EOL>if self.target_info < (<NUM_LIT:2>, <NUM_LIT:7>):<EOL><INDENT>return "<STR_LIT>" + set_to_tuple(tokens[<NUM_LIT:0>]) + "<STR_LIT:)>"<EOL><DEDENT>else:<EOL><INDENT>return "<STR_LIT:{>" + tokens[<NUM_LIT:0>][<NUM_LIT:0>] + "<STR_LIT:}>"<EOL><DEDENT>
|
Converts set literals to the right form for the target Python.
|
f11255:c0:m57
|
def set_letter_literal_handle(self, tokens):
|
if len(tokens) == <NUM_LIT:1>:<EOL><INDENT>set_type = tokens[<NUM_LIT:0>]<EOL>if set_type == "<STR_LIT:s>":<EOL><INDENT>return "<STR_LIT>"<EOL><DEDENT>elif set_type == "<STR_LIT:f>":<EOL><INDENT>return "<STR_LIT>"<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", set_type)<EOL><DEDENT><DEDENT>elif len(tokens) == <NUM_LIT:2>:<EOL><INDENT>set_type, set_items = tokens<EOL>internal_assert(len(set_items) == <NUM_LIT:1>, "<STR_LIT>", tokens[<NUM_LIT:0>])<EOL>if set_type == "<STR_LIT:s>":<EOL><INDENT>return self.set_literal_handle([set_items])<EOL><DEDENT>elif set_type == "<STR_LIT:f>":<EOL><INDENT>return "<STR_LIT>" + set_to_tuple(set_items) + "<STR_LIT:)>"<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", set_type)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>
|
Process set literals.
|
f11255:c0:m58
|
def exec_stmt_handle(self, tokens):
|
internal_assert(<NUM_LIT:1> <= len(tokens) <= <NUM_LIT:3>, "<STR_LIT>", tokens)<EOL>if self.target.startswith("<STR_LIT:2>"):<EOL><INDENT>out = "<STR_LIT>" + tokens[<NUM_LIT:0>]<EOL>if len(tokens) > <NUM_LIT:1>:<EOL><INDENT>out += "<STR_LIT>" + "<STR_LIT:U+002CU+0020>".join(tokens[<NUM_LIT:1>:])<EOL><DEDENT>return out<EOL><DEDENT>else:<EOL><INDENT>return "<STR_LIT>" + "<STR_LIT:U+002CU+0020>".join(tokens) + "<STR_LIT:)>"<EOL><DEDENT>
|
Process Python-3-style exec statements.
|
f11255:c0:m59
|
def stmt_lambda_name(self, index=None):
|
if index is None:<EOL><INDENT>index = len(self.stmt_lambdas)<EOL><DEDENT>return stmt_lambda_var + "<STR_LIT:_>" + str(index)<EOL>
|
Return the next (or specified) statement lambda name.
|
f11255:c0:m60
|
def stmt_lambdef_handle(self, original, loc, tokens):
|
if len(tokens) == <NUM_LIT:2>:<EOL><INDENT>params, stmts = tokens<EOL><DEDENT>elif len(tokens) == <NUM_LIT:3>:<EOL><INDENT>params, stmts, last = tokens<EOL>if "<STR_LIT>" in tokens:<EOL><INDENT>stmts = stmts.asList() + ["<STR_LIT>" + last]<EOL><DEDENT>else:<EOL><INDENT>stmts = stmts.asList() + [last]<EOL><DEDENT><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>name = self.stmt_lambda_name()<EOL>body = openindent + self.stmt_lambda_proc("<STR_LIT:\n>".join(stmts)) + closeindent<EOL>if isinstance(params, str):<EOL><INDENT>self.stmt_lambdas.append(<EOL>"<STR_LIT>" + name + params + "<STR_LIT>" + body,<EOL>)<EOL><DEDENT>else:<EOL><INDENT>params.insert(<NUM_LIT:0>, name) <EOL>self.stmt_lambdas.append(<EOL>"<STR_LIT>".join(self.name_match_funcdef_handle(original, loc, params))<EOL>+ body,<EOL>)<EOL><DEDENT>return name<EOL>
|
Process multi-line lambdef statements.
|
f11255:c0:m61
|
@contextmanager<EOL><INDENT>def complain_on_err(self):<DEDENT>
|
try:<EOL><INDENT>yield<EOL><DEDENT>except ParseBaseException as err:<EOL><INDENT>complain(self.make_parse_err(err, reformat=False, include_ln=False))<EOL><DEDENT>except CoconutException as err:<EOL><INDENT>complain(err)<EOL><DEDENT>
|
Complain about any parsing-related errors raised inside.
|
f11255:c0:m62
|
def split_docstring(self, block):
|
try:<EOL><INDENT>first_line, rest_of_lines = block.split("<STR_LIT:\n>", <NUM_LIT:1>)<EOL><DEDENT>except ValueError:<EOL><INDENT>pass<EOL><DEDENT>else:<EOL><INDENT>raw_first_line = split_leading_trailing_indent(rem_comment(first_line))[<NUM_LIT:1>]<EOL>if match_in(self.just_a_string, raw_first_line):<EOL><INDENT>return first_line, rest_of_lines<EOL><DEDENT><DEDENT>return None, block<EOL>
|
Split a code block into a docstring and a body.
|
f11255:c0:m63
|
def tre_return(self, func_name, func_args, func_store, use_mock=True):
|
def tre_return_handle(loc, tokens):<EOL><INDENT>internal_assert(len(tokens) == <NUM_LIT:1>, "<STR_LIT>", tokens)<EOL>args = tokens[<NUM_LIT:0>][<NUM_LIT:1>:-<NUM_LIT:1>] <EOL>if match_in(self.stores_scope, args):<EOL><INDENT>return ignore_transform <EOL><DEDENT>if self.no_tco:<EOL><INDENT>tco_recurse = "<STR_LIT>" + func_name + "<STR_LIT:(>" + args + "<STR_LIT:)>"<EOL><DEDENT>else:<EOL><INDENT>tco_recurse = "<STR_LIT>" + func_name + ("<STR_LIT:U+002CU+0020>" + args if args else "<STR_LIT>") + "<STR_LIT:)>"<EOL><DEDENT>if not func_args or func_args == args:<EOL><INDENT>tre_recurse = "<STR_LIT>"<EOL><DEDENT>elif use_mock:<EOL><INDENT>tre_recurse = func_args + "<STR_LIT>" + tre_mock_var + "<STR_LIT:(>" + args + "<STR_LIT:)>" + "<STR_LIT>"<EOL><DEDENT>else:<EOL><INDENT>tre_recurse = func_args + "<STR_LIT>" + args + "<STR_LIT>"<EOL><DEDENT>return (<EOL>"<STR_LIT>" + openindent<EOL>+ tre_check_var + "<STR_LIT>" + func_name + "<STR_LIT>" + func_store + "<STR_LIT:\n>" + closeindent<EOL>+ "<STR_LIT>" + openindent<EOL>+ tre_check_var + "<STR_LIT>" + closeindent<EOL>+ "<STR_LIT>" + tre_check_var + "<STR_LIT>" + openindent<EOL>+ tre_recurse + "<STR_LIT:\n>" + closeindent<EOL>+ "<STR_LIT>" + openindent<EOL>+ tco_recurse + "<STR_LIT:\n>" + closeindent<EOL>)<EOL><DEDENT>return attach(<EOL>self.start_marker + (keyword("<STR_LIT>") + keyword(func_name)).suppress() + self.parens + self.end_marker,<EOL>tre_return_handle,<EOL>)<EOL>
|
Generate grammar element that matches a string which is just a TRE return statement.
|
f11255:c0:m64
|
def transform_returns(self, raw_lines, tre_return_grammar=None, use_mock=None, is_async=False):
|
lines = [] <EOL>tco = False <EOL>tre = False <EOL>level = <NUM_LIT:0> <EOL>disabled_until_level = None <EOL>attempt_tre = tre_return_grammar is not None <EOL>attempt_tco = not is_async and not self.no_tco <EOL>if is_async:<EOL><INDENT>internal_assert(not attempt_tre and not attempt_tco, "<STR_LIT>")<EOL><DEDENT>for line in raw_lines:<EOL><INDENT>indent, body, dedent = split_leading_trailing_indent(line)<EOL>base, comment = split_comment(body)<EOL>level += ind_change(indent)<EOL>if disabled_until_level is not None:<EOL><INDENT>if level <= disabled_until_level:<EOL><INDENT>disabled_until_level = None<EOL><DEDENT><DEDENT>if disabled_until_level is None:<EOL><INDENT>if not is_async and self.yield_regex.search(body):<EOL><INDENT>lines = raw_lines <EOL>break<EOL><DEDENT>elif self.def_regex.match(body):<EOL><INDENT>disabled_until_level = level<EOL><DEDENT>elif not is_async and self.tre_disable_regex.match(body):<EOL><INDENT>disabled_until_level = level<EOL><DEDENT>else:<EOL><INDENT>if is_async:<EOL><INDENT>if self.return_regex.match(base):<EOL><INDENT>to_return = base[len("<STR_LIT>"):].strip()<EOL>if to_return: <EOL><INDENT>line = indent + "<STR_LIT>" + to_return + "<STR_LIT:)>" + comment + dedent<EOL><DEDENT><DEDENT><DEDENT>tre_base = None<EOL>if attempt_tre:<EOL><INDENT>with self.complain_on_err():<EOL><INDENT>tre_base = transform(tre_return_grammar, base)<EOL><DEDENT>if tre_base is not None:<EOL><INDENT>line = indent + tre_base + comment + dedent<EOL>tre = True<EOL>tco = not self.no_tco<EOL><DEDENT><DEDENT>if attempt_tco and tre_base is None: <EOL><INDENT>tco_base = None<EOL>with self.complain_on_err():<EOL><INDENT>tco_base = transform(self.tco_return, base)<EOL><DEDENT>if tco_base is not None:<EOL><INDENT>line = indent + tco_base + comment + dedent<EOL>tco = True<EOL><DEDENT><DEDENT><DEDENT><DEDENT>level += ind_change(dedent)<EOL>lines.append(line)<EOL><DEDENT>func_code = "<STR_LIT>".join(lines)<EOL>if is_async:<EOL><INDENT>return func_code<EOL><DEDENT>else:<EOL><INDENT>return func_code, tco, tre<EOL><DEDENT>
|
Apply TCO, TRE, or async universalization to the given function.
|
f11255:c0:m65
|
def decoratable_funcdef_stmt_handle(self, original, loc, tokens, is_async=False):
|
if len(tokens) == <NUM_LIT:1>:<EOL><INDENT>decorators, funcdef = "<STR_LIT>", tokens[<NUM_LIT:0>]<EOL><DEDENT>elif len(tokens) == <NUM_LIT:2>:<EOL><INDENT>decorators, funcdef = tokens<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>raw_lines = funcdef.splitlines(True)<EOL>def_stmt = raw_lines.pop(<NUM_LIT:0>)<EOL>func_name, func_args, func_params = None, None, None<EOL>with self.complain_on_err():<EOL><INDENT>func_name, func_args, func_params = parse(self.split_func_name_args_params, def_stmt)<EOL><DEDENT>undotted_name = None <EOL>if func_name is not None:<EOL><INDENT>if "<STR_LIT:.>" in func_name:<EOL><INDENT>undotted_name = func_name.rsplit("<STR_LIT:.>", <NUM_LIT:1>)[-<NUM_LIT:1>]<EOL>def_stmt = def_stmt.replace(func_name, undotted_name)<EOL><DEDENT><DEDENT>if is_async:<EOL><INDENT>if not self.target:<EOL><INDENT>raise self.make_err(<EOL>CoconutTargetError,<EOL>"<STR_LIT>",<EOL>original, loc,<EOL>target="<STR_LIT>",<EOL>)<EOL><DEDENT>elif self.target_info >= (<NUM_LIT:3>, <NUM_LIT:5>):<EOL><INDENT>def_stmt = "<STR_LIT>" + def_stmt<EOL><DEDENT>else:<EOL><INDENT>decorators += "<STR_LIT>"<EOL><DEDENT>if self.target_info < (<NUM_LIT:3>, <NUM_LIT:3>):<EOL><INDENT>func_code = self.transform_returns(raw_lines, is_async=True)<EOL><DEDENT>else:<EOL><INDENT>func_code = "<STR_LIT>".join(raw_lines)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>attempt_tre = func_name is not None and not decorators<EOL>if attempt_tre:<EOL><INDENT>use_mock = func_args and func_args != func_params[<NUM_LIT:1>:-<NUM_LIT:1>]<EOL>func_store = tre_store_var + "<STR_LIT:_>" + str(self.tre_store_count)<EOL>self.tre_store_count += <NUM_LIT:1><EOL>tre_return_grammar = self.tre_return(func_name, func_args, func_store, use_mock)<EOL><DEDENT>else:<EOL><INDENT>use_mock = func_store = tre_return_grammar = None<EOL><DEDENT>func_code, tco, tre = self.transform_returns(<EOL>raw_lines,<EOL>tre_return_grammar,<EOL>use_mock,<EOL>)<EOL>if tre:<EOL><INDENT>comment, rest = split_leading_comment(func_code)<EOL>indent, base, dedent = split_leading_trailing_indent(rest, <NUM_LIT:1>)<EOL>base, base_dedent = split_trailing_indent(base)<EOL>docstring, base = self.split_docstring(base)<EOL>func_code = (<EOL>comment + indent<EOL>+ (docstring + "<STR_LIT:\n>" if docstring is not None else "<STR_LIT>")<EOL>+ (<EOL>"<STR_LIT>" + tre_mock_var + func_params + "<STR_LIT>" + func_args + "<STR_LIT:\n>"<EOL>if use_mock else "<STR_LIT>"<EOL>) + "<STR_LIT>"<EOL>+ openindent + base + base_dedent<EOL>+ ("<STR_LIT:\n>" if "<STR_LIT:\n>" not in base_dedent else "<STR_LIT>") + "<STR_LIT>"<EOL>+ ("<STR_LIT:\n>" if "<STR_LIT:\n>" not in dedent else "<STR_LIT>") + closeindent + dedent<EOL>+ func_store + "<STR_LIT>" + (func_name if undotted_name is None else undotted_name) + "<STR_LIT:\n>"<EOL>)<EOL><DEDENT>if tco:<EOL><INDENT>decorators += "<STR_LIT>" <EOL><DEDENT><DEDENT>out = decorators + def_stmt + func_code<EOL>if undotted_name is not None:<EOL><INDENT>out += func_name + "<STR_LIT>" + undotted_name + "<STR_LIT:\n>"<EOL><DEDENT>return out<EOL>
|
Determines if TCO or TRE can be done and if so does it,
handles dotted function names, and universalizes async functions.
|
f11255:c0:m66
|
def await_item_handle(self, original, loc, tokens):
|
internal_assert(len(tokens) == <NUM_LIT:1>, "<STR_LIT>", tokens)<EOL>if not self.target:<EOL><INDENT>self.make_err(<EOL>CoconutTargetError,<EOL>"<STR_LIT>",<EOL>original, loc,<EOL>target="<STR_LIT>",<EOL>)<EOL><DEDENT>elif self.target_info >= (<NUM_LIT:3>, <NUM_LIT:5>):<EOL><INDENT>return "<STR_LIT>" + tokens[<NUM_LIT:0>]<EOL><DEDENT>elif self.target_info >= (<NUM_LIT:3>, <NUM_LIT:3>):<EOL><INDENT>return "<STR_LIT>" + tokens[<NUM_LIT:0>] + "<STR_LIT:)>"<EOL><DEDENT>else:<EOL><INDENT>return "<STR_LIT>" + tokens[<NUM_LIT:0>] + "<STR_LIT>"<EOL><DEDENT>
|
Check for Python 3.5 await expression.
|
f11255:c0:m67
|
def unsafe_typedef_handle(self, tokens):
|
return self.typedef_handle(tokens.asList() + ["<STR_LIT:U+002C>"])<EOL>
|
Process type annotations without a comma after them.
|
f11255:c0:m68
|
def wrap_typedef(self, typedef):
|
return self.wrap_str_of(self.reformat(typedef))<EOL>
|
Wrap a type definition in a string to defer it.
|
f11255:c0:m69
|
def typedef_handle(self, tokens):
|
if len(tokens) == <NUM_LIT:1>: <EOL><INDENT>if self.target.startswith("<STR_LIT:3>"):<EOL><INDENT>return "<STR_LIT>" + self.wrap_typedef(tokens[<NUM_LIT:0>]) + "<STR_LIT::>"<EOL><DEDENT>else:<EOL><INDENT>return "<STR_LIT>" + self.wrap_comment("<STR_LIT>" + tokens[<NUM_LIT:0>])<EOL><DEDENT><DEDENT>else: <EOL><INDENT>if len(tokens) == <NUM_LIT:3>:<EOL><INDENT>varname, typedef, comma = tokens<EOL>default = "<STR_LIT>"<EOL><DEDENT>elif len(tokens) == <NUM_LIT:4>:<EOL><INDENT>varname, typedef, default, comma = tokens<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>if self.target.startswith("<STR_LIT:3>"):<EOL><INDENT>return varname + "<STR_LIT>" + self.wrap_typedef(typedef) + default + comma<EOL><DEDENT>else:<EOL><INDENT>return varname + default + comma + self.wrap_passthrough(self.wrap_comment("<STR_LIT>" + typedef) + "<STR_LIT:\n>" + "<STR_LIT:U+0020>" * self.tabideal)<EOL><DEDENT><DEDENT>
|
Process Python 3 type annotations.
|
f11255:c0:m70
|
def typed_assign_stmt_handle(self, tokens):
|
if len(tokens) == <NUM_LIT:2>:<EOL><INDENT>if self.target_info >= (<NUM_LIT:3>, <NUM_LIT:6>):<EOL><INDENT>return tokens[<NUM_LIT:0>] + "<STR_LIT>" + self.wrap_typedef(tokens[<NUM_LIT:1>])<EOL><DEDENT>else:<EOL><INDENT>return tokens[<NUM_LIT:0>] + "<STR_LIT>" + self.wrap_comment("<STR_LIT>" + tokens[<NUM_LIT:1>])<EOL><DEDENT><DEDENT>elif len(tokens) == <NUM_LIT:3>:<EOL><INDENT>if self.target_info >= (<NUM_LIT:3>, <NUM_LIT:6>):<EOL><INDENT>return tokens[<NUM_LIT:0>] + "<STR_LIT>" + self.wrap_typedef(tokens[<NUM_LIT:1>]) + "<STR_LIT>" + tokens[<NUM_LIT:2>]<EOL><DEDENT>else:<EOL><INDENT>return tokens[<NUM_LIT:0>] + "<STR_LIT>" + tokens[<NUM_LIT:2>] + self.wrap_comment("<STR_LIT>" + tokens[<NUM_LIT:1>])<EOL><DEDENT><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>
|
Process Python 3.6 variable type annotations.
|
f11255:c0:m71
|
def with_stmt_handle(self, tokens):
|
internal_assert(len(tokens) == <NUM_LIT:2>, "<STR_LIT>", tokens)<EOL>withs, body = tokens<EOL>if len(withs) == <NUM_LIT:1> or self.target_info >= (<NUM_LIT:2>, <NUM_LIT:7>):<EOL><INDENT>return "<STR_LIT>" + "<STR_LIT:U+002CU+0020>".join(withs) + body<EOL><DEDENT>else:<EOL><INDENT>return (<EOL>"<STR_LIT>".join("<STR_LIT>" + expr + "<STR_LIT>" + openindent for expr in withs[:-<NUM_LIT:1>])<EOL>+ "<STR_LIT>" + withs[-<NUM_LIT:1>] + body<EOL>+ closeindent * (len(withs) - <NUM_LIT:1>)<EOL>)<EOL><DEDENT>
|
Process with statements.
|
f11255:c0:m72
|
def case_stmt_handle(self, loc, tokens):
|
if len(tokens) == <NUM_LIT:2>:<EOL><INDENT>item, cases = tokens<EOL>default = None<EOL><DEDENT>elif len(tokens) == <NUM_LIT:3>:<EOL><INDENT>item, cases, default = tokens<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>check_var = case_check_var + "<STR_LIT:_>" + str(self.case_check_count)<EOL>self.case_check_count += <NUM_LIT:1><EOL>out = (<EOL>match_to_var + "<STR_LIT>" + item + "<STR_LIT:\n>"<EOL>+ match_case_tokens(loc, cases[<NUM_LIT:0>], check_var, True)<EOL>)<EOL>for case in cases[<NUM_LIT:1>:]:<EOL><INDENT>out += (<EOL>"<STR_LIT>" + check_var + "<STR_LIT>" + openindent<EOL>+ match_case_tokens(loc, case, check_var, False) + closeindent<EOL>)<EOL><DEDENT>if default is not None:<EOL><INDENT>out += "<STR_LIT>" + check_var + default<EOL><DEDENT>return out<EOL>
|
Process case blocks.
|
f11255:c0:m74
|
def check_strict(self, name, original, loc, tokens):
|
internal_assert(len(tokens) == <NUM_LIT:1>, "<STR_LIT>" + name + "<STR_LIT>", tokens)<EOL>if self.strict:<EOL><INDENT>raise self.make_err(CoconutStyleError, "<STR_LIT>" + name, original, loc)<EOL><DEDENT>else:<EOL><INDENT>return tokens[<NUM_LIT:0>]<EOL><DEDENT>
|
Check that syntax meets --strict requirements.
|
f11255:c0:m75
|
def lambdef_check(self, original, loc, tokens):
|
return self.check_strict("<STR_LIT>", original, loc, tokens)<EOL>
|
Check for Python-style lambdas.
|
f11255:c0:m76
|
def endline_semicolon_check(self, original, loc, tokens):
|
return self.check_strict("<STR_LIT>", original, loc, tokens)<EOL>
|
Check for semicolons at the end of lines.
|
f11255:c0:m77
|
def u_string_check(self, original, loc, tokens):
|
return self.check_strict("<STR_LIT>", original, loc, tokens)<EOL>
|
Check for Python2-style unicode strings.
|
f11255:c0:m78
|
def check_py(self, version, name, original, loc, tokens):
|
internal_assert(len(tokens) == <NUM_LIT:1>, "<STR_LIT>" + name + "<STR_LIT>", tokens)<EOL>if self.target_info < get_target_info(version):<EOL><INDENT>raise self.make_err(CoconutTargetError, "<STR_LIT>" + "<STR_LIT:.>".join(version) + "<STR_LIT:U+0020>" + name, original, loc, target=version)<EOL><DEDENT>else:<EOL><INDENT>return tokens[<NUM_LIT:0>]<EOL><DEDENT>
|
Check for Python-version-specific syntax.
|
f11255:c0:m79
|
def name_check(self, original, loc, tokens):
|
internal_assert(len(tokens) == <NUM_LIT:1>, "<STR_LIT>", tokens)<EOL>if self.strict:<EOL><INDENT>self.unused_imports.discard(tokens[<NUM_LIT:0>])<EOL><DEDENT>if tokens[<NUM_LIT:0>] == "<STR_LIT>":<EOL><INDENT>return self.check_py("<STR_LIT:3>", "<STR_LIT>", original, loc, tokens)<EOL><DEDENT>elif tokens[<NUM_LIT:0>].startswith(reserved_prefix):<EOL><INDENT>raise self.make_err(CoconutSyntaxError, "<STR_LIT>" + reserved_prefix, original, loc)<EOL><DEDENT>else:<EOL><INDENT>return tokens[<NUM_LIT:0>]<EOL><DEDENT>
|
Check the given base name.
|
f11255:c0:m80
|
def nonlocal_check(self, original, loc, tokens):
|
return self.check_py("<STR_LIT:3>", "<STR_LIT>", original, loc, tokens)<EOL>
|
Check for Python 3 nonlocal statement.
|
f11255:c0:m81
|
def star_assign_item_check(self, original, loc, tokens):
|
return self.check_py("<STR_LIT:3>", "<STR_LIT>", original, loc, tokens)<EOL>
|
Check for Python 3 starred assignment.
|
f11255:c0:m82
|
def star_expr_check(self, original, loc, tokens):
|
return self.check_py("<STR_LIT>", "<STR_LIT>", original, loc, tokens)<EOL>
|
Check for Python 3.5 star unpacking.
|
f11255:c0:m83
|
def star_sep_check(self, original, loc, tokens):
|
return self.check_py("<STR_LIT:3>", "<STR_LIT>", original, loc, tokens)<EOL>
|
Check for Python 3 keyword-only arguments.
|
f11255:c0:m84
|
def matrix_at_check(self, original, loc, tokens):
|
return self.check_py("<STR_LIT>", "<STR_LIT>", original, loc, tokens)<EOL>
|
Check for Python 3.5 matrix multiplication.
|
f11255:c0:m85
|
def async_stmt_check(self, original, loc, tokens):
|
return self.check_py("<STR_LIT>", "<STR_LIT>", original, loc, tokens)<EOL>
|
Check for Python 3.5 async for/with.
|
f11255:c0:m86
|
def async_comp_check(self, original, loc, tokens):
|
return self.check_py("<STR_LIT>", "<STR_LIT>", original, loc, tokens)<EOL>
|
Check for Python 3.6 async comprehension.
|
f11255:c0:m87
|
def f_string_check(self, original, loc, tokens):
|
return self.check_py("<STR_LIT>", "<STR_LIT>", original, loc, tokens)<EOL>
|
Handle Python 3.6 format strings.
|
f11255:c0:m88
|
def parse_single(self, inputstring):
|
return self.parse(inputstring, self.single_parser, {}, {"<STR_LIT>": "<STR_LIT:none>", "<STR_LIT>": "<STR_LIT:none>"})<EOL>
|
Parse line code.
|
f11255:c0:m89
|
def parse_file(self, inputstring, addhash=True):
|
if addhash:<EOL><INDENT>use_hash = self.genhash(False, inputstring)<EOL><DEDENT>else:<EOL><INDENT>use_hash = None<EOL><DEDENT>return self.parse(inputstring, self.file_parser, {"<STR_LIT>": True}, {"<STR_LIT>": "<STR_LIT:file>", "<STR_LIT>": use_hash})<EOL>
|
Parse file code.
|
f11255:c0:m90
|
def parse_exec(self, inputstring):
|
return self.parse(inputstring, self.file_parser, {}, {"<STR_LIT>": "<STR_LIT:file>", "<STR_LIT>": "<STR_LIT:none>"})<EOL>
|
Parse exec code.
|
f11255:c0:m91
|
def parse_package(self, inputstring, addhash=True):
|
if addhash:<EOL><INDENT>use_hash = self.genhash(True, inputstring)<EOL><DEDENT>else:<EOL><INDENT>use_hash = None<EOL><DEDENT>return self.parse(inputstring, self.file_parser, {"<STR_LIT>": True}, {"<STR_LIT>": "<STR_LIT>", "<STR_LIT>": use_hash})<EOL>
|
Parse package code.
|
f11255:c0:m92
|
def parse_block(self, inputstring):
|
return self.parse(inputstring, self.file_parser, {}, {"<STR_LIT>": "<STR_LIT:none>", "<STR_LIT>": "<STR_LIT:none>"})<EOL>
|
Parse block code.
|
f11255:c0:m93
|
def parse_sys(self, inputstring):
|
return self.parse(inputstring, self.file_parser, {}, {"<STR_LIT>": "<STR_LIT>", "<STR_LIT>": "<STR_LIT:none>"})<EOL>
|
Parse module code.
|
f11255:c0:m94
|
def parse_eval(self, inputstring):
|
return self.parse(inputstring, self.eval_parser, {"<STR_LIT>": True}, {"<STR_LIT>": "<STR_LIT:none>", "<STR_LIT>": "<STR_LIT:none>"})<EOL>
|
Parse eval code.
|
f11255:c0:m95
|
def parse_debug(self, inputstring):
|
return self.parse(inputstring, self.file_parser, {"<STR_LIT>": True}, {"<STR_LIT>": "<STR_LIT:none>", "<STR_LIT>": "<STR_LIT:none>", "<STR_LIT>": False})<EOL>
|
Parse debug code.
|
f11255:c0:m96
|
def warm_up(self):
|
result = self.parse_debug("<STR_LIT>")<EOL>internal_assert(result == "<STR_LIT>", "<STR_LIT>", result)<EOL>
|
Warm up the compiler by running something through it.
|
f11255:c0:m97
|
def evaluate_tokens(tokens):
|
if isinstance(tokens, str):<EOL><INDENT>return tokens<EOL><DEDENT>elif isinstance(tokens, ParseResults):<EOL><INDENT>toklist, name, asList, modal = tokens.__getnewargs__()<EOL>new_toklist = [evaluate_tokens(toks) for toks in toklist]<EOL>new_tokens = ParseResults(new_toklist, name, asList, modal)<EOL>new_tokdict = {}<EOL>for name, occurrences in tokens._ParseResults__tokdict.items():<EOL><INDENT>new_occurences = []<EOL>for value, position in occurrences:<EOL><INDENT>if isinstance(value, ParseResults) and value._ParseResults__toklist == toklist:<EOL><INDENT>new_value = new_tokens<EOL><DEDENT>else:<EOL><INDENT>try:<EOL><INDENT>new_value = new_toklist[toklist.index(value)]<EOL><DEDENT>except ValueError:<EOL><INDENT>complain(lambda: CoconutInternalException("<STR_LIT>".format(<EOL>value,<EOL>toklist,<EOL>)))<EOL>new_value = evaluate_tokens(value)<EOL><DEDENT><DEDENT>new_occurences.append(_ParseResultsWithOffset(new_value, position))<EOL><DEDENT>new_tokdict[name] = occurrences<EOL><DEDENT>new_tokens._ParseResults__accumNames.update(tokens._ParseResults__accumNames)<EOL>new_tokens._ParseResults__tokdict.update(new_tokdict)<EOL>return new_tokens<EOL><DEDENT>elif isinstance(tokens, ComputationNode):<EOL><INDENT>return tokens.evaluate()<EOL><DEDENT>elif isinstance(tokens, (list, tuple)):<EOL><INDENT>return [evaluate_tokens(inner_toks) for inner_toks in tokens]<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", tokens)<EOL><DEDENT>
|
Evaluate the given tokens in the computation graph.
|
f11256:m0
|
def add_action(item, action):
|
return item.copy().addParseAction(action)<EOL>
|
Set the parse action for the given item.
|
f11256:m1
|
def attach(item, action, greedy=False, ignore_no_tokens=None, ignore_one_token=None):
|
if use_computation_graph:<EOL><INDENT>if ignore_no_tokens is None:<EOL><INDENT>ignore_no_tokens = getattr(action, "<STR_LIT>", False)<EOL><DEDENT>if ignore_one_token is None:<EOL><INDENT>ignore_one_token = getattr(action, "<STR_LIT>", False)<EOL><DEDENT>kwargs = {}<EOL>if greedy:<EOL><INDENT>kwargs["<STR_LIT>"] = greedy<EOL><DEDENT>if ignore_no_tokens:<EOL><INDENT>kwargs["<STR_LIT>"] = ignore_no_tokens<EOL><DEDENT>if ignore_one_token:<EOL><INDENT>kwargs["<STR_LIT>"] = ignore_one_token<EOL><DEDENT>action = partial(ComputationNode, action, **kwargs)<EOL><DEDENT>return add_action(item, action)<EOL>
|
Set the parse action for the given item to create a node in the computation graph.
|
f11256:m2
|
def final(item):
|
if use_computation_graph:<EOL><INDENT>item = add_action(item, evaluate_tokens)<EOL><DEDENT>return item<EOL>
|
Collapse the computation graph upon parsing the given item.
|
f11256:m3
|
def unpack(tokens):
|
logger.log_tag("<STR_LIT>", tokens)<EOL>if use_computation_graph:<EOL><INDENT>tokens = evaluate_tokens(tokens)<EOL><DEDENT>if isinstance(tokens, ParseResults) and len(tokens) == <NUM_LIT:1>:<EOL><INDENT>tokens = tokens[<NUM_LIT:0>]<EOL><DEDENT>return tokens<EOL>
|
Evaluate and unpack the given computation graph.
|
f11256:m4
|
def parse(grammar, text):
|
return unpack(grammar.parseWithTabs().parseString(text))<EOL>
|
Parse text using grammar.
|
f11256:m5
|
def all_matches(grammar, text):
|
for tokens, start, stop in grammar.parseWithTabs().scanString(text):<EOL><INDENT>yield unpack(tokens), start, stop<EOL><DEDENT>
|
Find all matches for grammar in text.
|
f11256:m6
|
def match_in(grammar, text):
|
for result in grammar.parseWithTabs().scanString(text):<EOL><INDENT>return True<EOL><DEDENT>return False<EOL>
|
Determine if there is a match for grammar in text.
|
f11256:m7
|
def append_it(iterator, last_val):
|
for x in iterator:<EOL><INDENT>yield x<EOL><DEDENT>yield last_val<EOL>
|
Iterate through iterator then yield last_val.
|
f11256:m8
|
def get_vers_for_target(target):
|
target_info = get_target_info(target)<EOL>if not target_info:<EOL><INDENT>return py2_vers + py3_vers<EOL><DEDENT>elif len(target_info) == <NUM_LIT:1>:<EOL><INDENT>if target_info == (<NUM_LIT:2>,):<EOL><INDENT>return py2_vers<EOL><DEDENT>elif target_info == (<NUM_LIT:3>,):<EOL><INDENT>return py3_vers<EOL><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", target_info)<EOL><DEDENT><DEDENT>elif target_info == (<NUM_LIT:3>, <NUM_LIT:3>):<EOL><INDENT>return [(<NUM_LIT:3>, <NUM_LIT:3>), (<NUM_LIT:3>, <NUM_LIT:4>)]<EOL><DEDENT>else:<EOL><INDENT>return [target_info[:<NUM_LIT:2>]]<EOL><DEDENT>
|
Gets a list of the versions supported by the given target.
|
f11256:m9
|
def get_target_info_len2(target, mode="<STR_LIT>"):
|
supported_vers = get_vers_for_target(target)<EOL>if mode == "<STR_LIT>":<EOL><INDENT>return supported_vers[<NUM_LIT:0>]<EOL><DEDENT>elif mode == "<STR_LIT>":<EOL><INDENT>return supported_vers[-<NUM_LIT:1>]<EOL><DEDENT>elif mode == "<STR_LIT>":<EOL><INDENT>if sys.version_info[:<NUM_LIT:2>] in supported_vers:<EOL><INDENT>return sys.version_info[:<NUM_LIT:2>]<EOL><DEDENT>else:<EOL><INDENT>return supported_vers[-<NUM_LIT:1>]<EOL><DEDENT><DEDENT>else:<EOL><INDENT>raise CoconutInternalException("<STR_LIT>", mode)<EOL><DEDENT>
|
Converts target into a length 2 Python version tuple.
Modes:
- "lowest" (default): Gets the lowest version supported by the target.
- "highest": Gets the highest version supported by the target.
- "nearest": If the current version is supported, returns that, otherwise gets the highest.
|
f11256:m10
|
def join_args(*arglists):
|
return "<STR_LIT:U+002CU+0020>".join(arg for args in arglists for arg in args if arg)<EOL>
|
Join split argument tokens.
|
f11256:m11
|
def paren_join(items, sep):
|
return items[<NUM_LIT:0>] if len(items) == <NUM_LIT:1> else "<STR_LIT:(>" + ("<STR_LIT>" + sep + "<STR_LIT>").join(items) + "<STR_LIT:)>"<EOL>
|
Join items by sep with parens around individual items but not the whole.
|
f11256:m12
|
def longest(*args):
|
internal_assert(len(args) >= <NUM_LIT:2>, "<STR_LIT>")<EOL>matcher = args[<NUM_LIT:0>] + skip_whitespace<EOL>for elem in args[<NUM_LIT:1>:]:<EOL><INDENT>matcher ^= elem + skip_whitespace<EOL><DEDENT>return matcher<EOL>
|
Match the longest of the given grammar elements.
|
f11256:m13
|
def addskip(skips, skip):
|
if skip < <NUM_LIT:1>:<EOL><INDENT>complain(CoconutInternalException("<STR_LIT>" + str(skip)))<EOL><DEDENT>else:<EOL><INDENT>skips.append(skip)<EOL><DEDENT>return skips<EOL>
|
Add a line skip to the skips.
|
f11256:m14
|
def paren_change(inputstring, opens=opens, closes=closes):
|
count = <NUM_LIT:0><EOL>for c in inputstring:<EOL><INDENT>if c in opens: <EOL><INDENT>count -= <NUM_LIT:1><EOL><DEDENT>elif c in closes: <EOL><INDENT>count += <NUM_LIT:1><EOL><DEDENT><DEDENT>return count<EOL>
|
Determine the parenthetical change of level (num closes - num opens).
|
f11256:m16
|
def ind_change(inputstring):
|
return inputstring.count(openindent) - inputstring.count(closeindent)<EOL>
|
Determine the change in indentation level (num opens - num closes).
|
f11256:m17
|
def compile_regex(regex):
|
return re.compile(regex, re.U)<EOL>
|
Compiles the given regex to support unicode.
|
f11256:m18
|
def keyword(name):
|
return Regex(name + r"<STR_LIT>", re.U)<EOL>
|
Construct a grammar which matches name as a Python keyword.
|
f11256:m19
|
def fixto(item, output):
|
return add_action(item, replaceWith(output))<EOL>
|
Force an item to result in a specific output.
|
f11256:m20
|
def addspace(item):
|
return attach(item, "<STR_LIT:U+0020>".join, ignore_no_tokens=True, ignore_one_token=True)<EOL>
|
Condense and adds space to the tokenized output.
|
f11256:m21
|
def condense(item):
|
return attach(item, "<STR_LIT>".join, ignore_no_tokens=True, ignore_one_token=True)<EOL>
|
Condense the tokenized output.
|
f11256:m22
|
def maybeparens(lparen, item, rparen):
|
return item | lparen.suppress() + item + rparen.suppress()<EOL>
|
Wrap an item in optional parentheses, only applying them if necessary.
|
f11256:m23
|
def tokenlist(item, sep, suppress=True):
|
if suppress:<EOL><INDENT>sep = sep.suppress()<EOL><DEDENT>return item + ZeroOrMore(sep + item) + Optional(sep)<EOL>
|
Create a list of tokens matching the item.
|
f11256:m24
|
def itemlist(item, sep, suppress_trailing=True):
|
return condense(item + ZeroOrMore(addspace(sep + item)) + Optional(sep.suppress() if suppress_trailing else sep))<EOL>
|
Create a list of items seperated by seps.
|
f11256:m25
|
def exprlist(expr, op):
|
return addspace(expr + ZeroOrMore(op + expr))<EOL>
|
Create a list of exprs seperated by ops.
|
f11256:m26
|
def rem_comment(line):
|
return line.split("<STR_LIT:#>", <NUM_LIT:1>)[<NUM_LIT:0>].rstrip()<EOL>
|
Remove a comment from a line.
|
f11256:m27
|
def should_indent(code):
|
last = rem_comment(code.splitlines()[-<NUM_LIT:1>])<EOL>return last.endswith("<STR_LIT::>") or last.endswith("<STR_LIT:\\>") or paren_change(last) < <NUM_LIT:0><EOL>
|
Determines whether the next line should be indented.
|
f11256:m28
|
def split_comment(line):
|
base = rem_comment(line)<EOL>return base, line[len(base):]<EOL>
|
Split line into base and comment.
|
f11256:m29
|
def split_leading_comment(inputstring):
|
if inputstring.startswith("<STR_LIT:#>"):<EOL><INDENT>comment, rest = inputstring.split("<STR_LIT:\n>", <NUM_LIT:1>)<EOL>return comment + "<STR_LIT:\n>", rest<EOL><DEDENT>else:<EOL><INDENT>return "<STR_LIT>", inputstring<EOL><DEDENT>
|
Split into leading comment and rest.
|
f11256:m30
|
def split_leading_indent(line, max_indents=None):
|
indent = "<STR_LIT>"<EOL>while (<EOL>(max_indents is None or max_indents > <NUM_LIT:0>)<EOL>and line.startswith((openindent, closeindent))<EOL>) or line.lstrip() != line:<EOL><INDENT>if max_indents is not None and line.startswith((openindent, closeindent)):<EOL><INDENT>max_indents -= <NUM_LIT:1><EOL><DEDENT>indent += line[<NUM_LIT:0>]<EOL>line = line[<NUM_LIT:1>:]<EOL><DEDENT>return indent, line<EOL>
|
Split line into leading indent and main.
|
f11256:m31
|
def split_trailing_indent(line, max_indents=None):
|
indent = "<STR_LIT>"<EOL>while (<EOL>(max_indents is None or max_indents > <NUM_LIT:0>)<EOL>and line.endswith((openindent, closeindent))<EOL>) or line.rstrip() != line:<EOL><INDENT>if max_indents is not None and (line.endswith(openindent) or line.endswith(closeindent)):<EOL><INDENT>max_indents -= <NUM_LIT:1><EOL><DEDENT>indent = line[-<NUM_LIT:1>] + indent<EOL>line = line[:-<NUM_LIT:1>]<EOL><DEDENT>return line, indent<EOL>
|
Split line into leading indent and main.
|
f11256:m32
|
def split_leading_trailing_indent(line, max_indents=None):
|
leading_indent, line = split_leading_indent(line, max_indents)<EOL>line, trailing_indent = split_trailing_indent(line, max_indents)<EOL>return leading_indent, line, trailing_indent<EOL>
|
Split leading and trailing indent.
|
f11256:m33
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.