idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
|---|---|---|
7,800
|
def repl_proc ( self , inputstring , log = True , ** kwargs ) : return self . apply_procs ( self . replprocs , kwargs , inputstring , log = log )
|
Process using replprocs .
|
7,801
|
def header_proc ( self , inputstring , header = "file" , initial = "initial" , use_hash = None , ** kwargs ) : pre_header = self . getheader ( initial , use_hash = use_hash , polish = False ) main_header = self . getheader ( header , polish = False ) if self . minify : main_header = minify ( main_header ) return pre_header + self . docstring + main_header + inputstring
|
Add the header .
|
7,802
|
def set_docstring ( self , loc , tokens ) : internal_assert ( len ( tokens ) == 2 , "invalid docstring tokens" , tokens ) self . docstring = self . reformat ( tokens [ 0 ] ) + "\n\n" return tokens [ 1 ]
|
Set the docstring .
|
7,803
|
def yield_from_handle ( self , tokens ) : internal_assert ( len ( tokens ) == 1 , "invalid yield from tokens" , tokens ) if self . target_info < ( 3 , 3 ) : return ( yield_from_var + " = " + tokens [ 0 ] + "\nfor " + yield_item_var + " in " + yield_from_var + ":\n" + openindent + "yield " + yield_item_var + "\n" + closeindent ) else : return "yield from " + tokens [ 0 ]
|
Process Python 3 . 3 yield from .
|
7,804
|
def endline_handle ( self , original , loc , tokens ) : internal_assert ( len ( tokens ) == 1 , "invalid endline tokens" , tokens ) lines = tokens [ 0 ] . splitlines ( True ) if self . minify : lines = lines [ 0 ] out = [ ] ln = lineno ( loc , original ) for endline in lines : out . append ( self . wrap_line_number ( self . adjust ( ln ) ) + endline ) ln += 1 return "" . join ( out )
|
Add line number information to end of line .
|
7,805
|
def comment_handle ( self , original , loc , tokens ) : internal_assert ( len ( tokens ) == 1 , "invalid comment tokens" , tokens ) ln = self . adjust ( lineno ( loc , original ) ) internal_assert ( lambda : ln not in self . comments , "multiple comments on line" , ln ) self . comments [ ln ] = tokens [ 0 ] return ""
|
Store comment in comments .
|
7,806
|
def augassign_handle ( self , tokens ) : internal_assert ( len ( tokens ) == 3 , "invalid assignment tokens" , tokens ) name , op , item = tokens out = "" if op == "|>=" : out += name + " = (" + item + ")(" + name + ")" elif op == "|*>=" : out += name + " = (" + item + ")(*" + name + ")" elif op == "<|=" : out += name + " = " + name + "((" + item + "))" elif op == "<*|=" : out += name + " = " + name + "(*(" + item + "))" elif op == "..=" or op == "<..=" : out += name + " = _coconut_forward_compose((" + item + "), " + name + ")" elif op == "..>=" : out += name + " = _coconut_forward_compose(" + name + ", (" + item + "))" elif op == "<*..=" : out += name + " = _coconut_forward_star_compose((" + item + "), " + name + ")" elif op == "..*>=" : out += name + " = _coconut_forward_star_compose(" + name + ", (" + item + "))" elif op == "??=" : out += name + " = " + item + " if " + name + " is None else " + name elif op == "::=" : ichain_var = lazy_chain_var + "_" + str ( self . ichain_count ) self . ichain_count += 1 out += ( ichain_var + " = " + name + "\n" + name + " = _coconut.itertools.chain.from_iterable(" + lazy_list_handle ( [ ichain_var , "(" + item + ")" ] ) + ")" ) else : out += name + " " + op + " " + item return out
|
Process assignments .
|
7,807
|
def classlist_handle ( self , original , loc , tokens ) : if len ( tokens ) == 0 : if self . target . startswith ( "3" ) : return "" else : return "(_coconut.object)" elif len ( tokens ) == 1 and len ( tokens [ 0 ] ) == 1 : if "tests" in tokens [ 0 ] : if self . strict and tokens [ 0 ] [ 0 ] == "(object)" : raise self . make_err ( CoconutStyleError , "unnecessary inheriting from object (Coconut does this automatically)" , original , loc ) return tokens [ 0 ] [ 0 ] elif "args" in tokens [ 0 ] : if self . target . startswith ( "3" ) : return tokens [ 0 ] [ 0 ] else : raise self . make_err ( CoconutTargetError , "found Python 3 keyword class definition" , original , loc , target = "3" ) else : raise CoconutInternalException ( "invalid inner classlist token" , tokens [ 0 ] ) else : raise CoconutInternalException ( "invalid classlist tokens" , tokens )
|
Process class inheritance lists .
|
7,808
|
def import_handle ( self , original , loc , tokens ) : if len ( tokens ) == 1 : imp_from , imports = None , tokens [ 0 ] elif len ( tokens ) == 2 : imp_from , imports = tokens if imp_from == "__future__" : self . strict_err_or_warn ( "unnecessary from __future__ import (Coconut does these automatically)" , original , loc ) return "" else : raise CoconutInternalException ( "invalid import tokens" , tokens ) if self . strict : self . unused_imports . update ( imported_names ( imports ) ) return universal_import ( imports , imp_from = imp_from , target = self . target )
|
Universalizes imports .
|
7,809
|
def complex_raise_stmt_handle ( self , tokens ) : internal_assert ( len ( tokens ) == 2 , "invalid raise from tokens" , tokens ) if self . target . startswith ( "3" ) : return "raise " + tokens [ 0 ] + " from " + tokens [ 1 ] else : return ( raise_from_var + " = " + tokens [ 0 ] + "\n" + raise_from_var + ".__cause__ = " + tokens [ 1 ] + "\n" + "raise " + raise_from_var )
|
Process Python 3 raise from statement .
|
7,810
|
def dict_comp_handle ( self , loc , tokens ) : internal_assert ( len ( tokens ) == 3 , "invalid dictionary comprehension tokens" , tokens ) if self . target . startswith ( "3" ) : key , val , comp = tokens return "{" + key + ": " + val + " " + comp + "}" else : key , val , comp = tokens return "dict(((" + key + "), (" + val + ")) " + comp + ")"
|
Process Python 2 . 7 dictionary comprehension .
|
7,811
|
def pattern_error ( self , original , loc , value_var , check_var ) : base_line = clean ( self . reformat ( getline ( loc , original ) ) ) line_wrap = self . wrap_str_of ( base_line ) repr_wrap = self . wrap_str_of ( ascii ( base_line ) ) return ( "if not " + check_var + ":\n" + openindent + match_err_var + ' = _coconut_MatchError("pattern-matching failed for " ' + repr_wrap + ' " in " + _coconut.repr(_coconut.repr(' + value_var + ")))\n" + match_err_var + ".pattern = " + line_wrap + "\n" + match_err_var + ".value = " + value_var + "\nraise " + match_err_var + "\n" + closeindent )
|
Construct a pattern - matching error message .
|
7,812
|
def destructuring_stmt_handle ( self , original , loc , tokens ) : internal_assert ( len ( tokens ) == 2 , "invalid destructuring assignment tokens" , tokens ) matches , item = tokens out = match_handle ( loc , [ matches , "in" , item , None ] ) out += self . pattern_error ( original , loc , match_to_var , match_check_var ) return out
|
Process match assign blocks .
|
7,813
|
def name_match_funcdef_handle ( self , original , loc , tokens ) : if len ( tokens ) == 2 : func , matches = tokens cond = None elif len ( tokens ) == 3 : func , matches , cond = tokens else : raise CoconutInternalException ( "invalid match function definition tokens" , tokens ) matcher = Matcher ( loc , match_check_var ) req_args , def_args , star_arg , kwd_args , dubstar_arg = split_args_list ( matches , loc ) matcher . match_function ( match_to_args_var , match_to_kwargs_var , req_args + def_args , star_arg , kwd_args , dubstar_arg ) if cond is not None : matcher . add_guard ( cond ) before_docstring = ( "def " + func + "(*" + match_to_args_var + ", **" + match_to_kwargs_var + "):\n" + openindent ) after_docstring = ( match_check_var + " = False\n" + matcher . out ( ) + self . pattern_error ( original , loc , match_to_args_var , match_check_var ) + closeindent ) return before_docstring , after_docstring
|
Process match defs . Result must be passed to insert_docstring_handle .
|
7,814
|
def op_match_funcdef_handle ( self , original , loc , tokens ) : if len ( tokens ) == 3 : func , args = get_infix_items ( tokens ) cond = None elif len ( tokens ) == 4 : func , args = get_infix_items ( tokens [ : - 1 ] ) cond = tokens [ - 1 ] else : raise CoconutInternalException ( "invalid infix match function definition tokens" , tokens ) name_tokens = [ func , args ] if cond is not None : name_tokens . append ( cond ) return self . name_match_funcdef_handle ( original , loc , name_tokens )
|
Process infix match defs . Result must be passed to insert_docstring_handle .
|
7,815
|
def set_literal_handle ( self , tokens ) : internal_assert ( len ( tokens ) == 1 and len ( tokens [ 0 ] ) == 1 , "invalid set literal tokens" , tokens ) if self . target_info < ( 2 , 7 ) : return "_coconut.set(" + set_to_tuple ( tokens [ 0 ] ) + ")" else : return "{" + tokens [ 0 ] [ 0 ] + "}"
|
Converts set literals to the right form for the target Python .
|
7,816
|
def set_letter_literal_handle ( self , tokens ) : if len ( tokens ) == 1 : set_type = tokens [ 0 ] if set_type == "s" : return "_coconut.set()" elif set_type == "f" : return "_coconut.frozenset()" else : raise CoconutInternalException ( "invalid set type" , set_type ) elif len ( tokens ) == 2 : set_type , set_items = tokens internal_assert ( len ( set_items ) == 1 , "invalid set literal item" , tokens [ 0 ] ) if set_type == "s" : return self . set_literal_handle ( [ set_items ] ) elif set_type == "f" : return "_coconut.frozenset(" + set_to_tuple ( set_items ) + ")" else : raise CoconutInternalException ( "invalid set type" , set_type ) else : raise CoconutInternalException ( "invalid set literal tokens" , tokens )
|
Process set literals .
|
7,817
|
def exec_stmt_handle ( self , tokens ) : internal_assert ( 1 <= len ( tokens ) <= 3 , "invalid exec statement tokens" , tokens ) if self . target . startswith ( "2" ) : out = "exec " + tokens [ 0 ] if len ( tokens ) > 1 : out += " in " + ", " . join ( tokens [ 1 : ] ) return out else : return "exec(" + ", " . join ( tokens ) + ")"
|
Process Python - 3 - style exec statements .
|
7,818
|
def stmt_lambdef_handle ( self , original , loc , tokens ) : if len ( tokens ) == 2 : params , stmts = tokens elif len ( tokens ) == 3 : params , stmts , last = tokens if "tests" in tokens : stmts = stmts . asList ( ) + [ "return " + last ] else : stmts = stmts . asList ( ) + [ last ] else : raise CoconutInternalException ( "invalid statement lambda tokens" , tokens ) name = self . stmt_lambda_name ( ) body = openindent + self . stmt_lambda_proc ( "\n" . join ( stmts ) ) + closeindent if isinstance ( params , str ) : self . stmt_lambdas . append ( "def " + name + params + ":\n" + body , ) else : params . insert ( 0 , name ) self . stmt_lambdas . append ( "" . join ( self . name_match_funcdef_handle ( original , loc , params ) ) + body , ) return name
|
Process multi - line lambdef statements .
|
7,819
|
def complain_on_err ( self ) : try : yield except ParseBaseException as err : complain ( self . make_parse_err ( err , reformat = False , include_ln = False ) ) except CoconutException as err : complain ( err )
|
Complain about any parsing - related errors raised inside .
|
7,820
|
def split_docstring ( self , block ) : try : first_line , rest_of_lines = block . split ( "\n" , 1 ) except ValueError : pass else : raw_first_line = split_leading_trailing_indent ( rem_comment ( first_line ) ) [ 1 ] if match_in ( self . just_a_string , raw_first_line ) : return first_line , rest_of_lines return None , block
|
Split a code block into a docstring and a body .
|
7,821
|
def tre_return ( self , func_name , func_args , func_store , use_mock = True ) : def tre_return_handle ( loc , tokens ) : internal_assert ( len ( tokens ) == 1 , "invalid tail recursion elimination tokens" , tokens ) args = tokens [ 0 ] [ 1 : - 1 ] if match_in ( self . stores_scope , args ) : return ignore_transform if self . no_tco : tco_recurse = "return " + func_name + "(" + args + ")" else : tco_recurse = "return _coconut_tail_call(" + func_name + ( ", " + args if args else "" ) + ")" if not func_args or func_args == args : tre_recurse = "continue" elif use_mock : tre_recurse = func_args + " = " + tre_mock_var + "(" + args + ")" + "\ncontinue" else : tre_recurse = func_args + " = " + args + "\ncontinue" return ( "try:\n" + openindent + tre_check_var + " = " + func_name + " is " + func_store + "\n" + closeindent + "except _coconut.NameError:\n" + openindent + tre_check_var + " = False\n" + closeindent + "if " + tre_check_var + ":\n" + openindent + tre_recurse + "\n" + closeindent + "else:\n" + openindent + tco_recurse + "\n" + closeindent ) return attach ( self . start_marker + ( keyword ( "return" ) + keyword ( func_name ) ) . suppress ( ) + self . parens + self . end_marker , tre_return_handle , )
|
Generate grammar element that matches a string which is just a TRE return statement .
|
7,822
|
def transform_returns ( self , raw_lines , tre_return_grammar = None , use_mock = None , is_async = False ) : lines = [ ] tco = False tre = False level = 0 disabled_until_level = None attempt_tre = tre_return_grammar is not None attempt_tco = not is_async and not self . no_tco if is_async : internal_assert ( not attempt_tre and not attempt_tco , "cannot tail call optimize async functions" ) for line in raw_lines : indent , body , dedent = split_leading_trailing_indent ( line ) base , comment = split_comment ( body ) level += ind_change ( indent ) if disabled_until_level is not None : if level <= disabled_until_level : disabled_until_level = None if disabled_until_level is None : if not is_async and self . yield_regex . search ( body ) : lines = raw_lines break elif self . def_regex . match ( body ) : disabled_until_level = level elif not is_async and self . tre_disable_regex . match ( body ) : disabled_until_level = level else : if is_async : if self . return_regex . match ( base ) : to_return = base [ len ( "return" ) : ] . strip ( ) if to_return : line = indent + "raise _coconut.asyncio.Return(" + to_return + ")" + comment + dedent tre_base = None if attempt_tre : with self . complain_on_err ( ) : tre_base = transform ( tre_return_grammar , base ) if tre_base is not None : line = indent + tre_base + comment + dedent tre = True tco = not self . no_tco if attempt_tco and tre_base is None : tco_base = None with self . complain_on_err ( ) : tco_base = transform ( self . tco_return , base ) if tco_base is not None : line = indent + tco_base + comment + dedent tco = True level += ind_change ( dedent ) lines . append ( line ) func_code = "" . join ( lines ) if is_async : return func_code else : return func_code , tco , tre
|
Apply TCO TRE or async universalization to the given function .
|
7,823
|
def decoratable_funcdef_stmt_handle ( self , original , loc , tokens , is_async = False ) : if len ( tokens ) == 1 : decorators , funcdef = "" , tokens [ 0 ] elif len ( tokens ) == 2 : decorators , funcdef = tokens else : raise CoconutInternalException ( "invalid function definition tokens" , tokens ) raw_lines = funcdef . splitlines ( True ) def_stmt = raw_lines . pop ( 0 ) func_name , func_args , func_params = None , None , None with self . complain_on_err ( ) : func_name , func_args , func_params = parse ( self . split_func_name_args_params , def_stmt ) undotted_name = None if func_name is not None : if "." in func_name : undotted_name = func_name . rsplit ( "." , 1 ) [ - 1 ] def_stmt = def_stmt . replace ( func_name , undotted_name ) if is_async : if not self . target : raise self . make_err ( CoconutTargetError , "async function definition requires a specific target" , original , loc , target = "sys" , ) elif self . target_info >= ( 3 , 5 ) : def_stmt = "async " + def_stmt else : decorators += "@_coconut.asyncio.coroutine\n" if self . target_info < ( 3 , 3 ) : func_code = self . transform_returns ( raw_lines , is_async = True ) else : func_code = "" . join ( raw_lines ) else : attempt_tre = func_name is not None and not decorators if attempt_tre : use_mock = func_args and func_args != func_params [ 1 : - 1 ] func_store = tre_store_var + "_" + str ( self . tre_store_count ) self . tre_store_count += 1 tre_return_grammar = self . tre_return ( func_name , func_args , func_store , use_mock ) else : use_mock = func_store = tre_return_grammar = None func_code , tco , tre = self . transform_returns ( raw_lines , tre_return_grammar , use_mock , ) if tre : comment , rest = split_leading_comment ( func_code ) indent , base , dedent = split_leading_trailing_indent ( rest , 1 ) base , base_dedent = split_trailing_indent ( base ) docstring , base = self . split_docstring ( base ) func_code = ( comment + indent + ( docstring + "\n" if docstring is not None else "" ) + ( "def " + tre_mock_var + func_params + ": return " + func_args + "\n" if use_mock else "" ) + "while True:\n" + openindent + base + base_dedent + ( "\n" if "\n" not in base_dedent else "" ) + "return None" + ( "\n" if "\n" not in dedent else "" ) + closeindent + dedent + func_store + " = " + ( func_name if undotted_name is None else undotted_name ) + "\n" ) if tco : decorators += "@_coconut_tco\n" out = decorators + def_stmt + func_code if undotted_name is not None : out += func_name + " = " + undotted_name + "\n" return out
|
Determines if TCO or TRE can be done and if so does it handles dotted function names and universalizes async functions .
|
7,824
|
def await_item_handle ( self , original , loc , tokens ) : internal_assert ( len ( tokens ) == 1 , "invalid await statement tokens" , tokens ) if not self . target : self . make_err ( CoconutTargetError , "await requires a specific target" , original , loc , target = "sys" , ) elif self . target_info >= ( 3 , 5 ) : return "await " + tokens [ 0 ] elif self . target_info >= ( 3 , 3 ) : return "(yield from " + tokens [ 0 ] + ")" else : return "(yield _coconut.asyncio.From(" + tokens [ 0 ] + "))"
|
Check for Python 3 . 5 await expression .
|
7,825
|
def typedef_handle ( self , tokens ) : if len ( tokens ) == 1 : if self . target . startswith ( "3" ) : return " -> " + self . wrap_typedef ( tokens [ 0 ] ) + ":" else : return ":\n" + self . wrap_comment ( " type: (...) -> " + tokens [ 0 ] ) else : if len ( tokens ) == 3 : varname , typedef , comma = tokens default = "" elif len ( tokens ) == 4 : varname , typedef , default , comma = tokens else : raise CoconutInternalException ( "invalid type annotation tokens" , tokens ) if self . target . startswith ( "3" ) : return varname + ": " + self . wrap_typedef ( typedef ) + default + comma else : return varname + default + comma + self . wrap_passthrough ( self . wrap_comment ( " type: " + typedef ) + "\n" + " " * self . tabideal )
|
Process Python 3 type annotations .
|
7,826
|
def typed_assign_stmt_handle ( self , tokens ) : if len ( tokens ) == 2 : if self . target_info >= ( 3 , 6 ) : return tokens [ 0 ] + ": " + self . wrap_typedef ( tokens [ 1 ] ) else : return tokens [ 0 ] + " = None" + self . wrap_comment ( " type: " + tokens [ 1 ] ) elif len ( tokens ) == 3 : if self . target_info >= ( 3 , 6 ) : return tokens [ 0 ] + ": " + self . wrap_typedef ( tokens [ 1 ] ) + " = " + tokens [ 2 ] else : return tokens [ 0 ] + " = " + tokens [ 2 ] + self . wrap_comment ( " type: " + tokens [ 1 ] ) else : raise CoconutInternalException ( "invalid variable type annotation tokens" , tokens )
|
Process Python 3 . 6 variable type annotations .
|
7,827
|
def with_stmt_handle ( self , tokens ) : internal_assert ( len ( tokens ) == 2 , "invalid with statement tokens" , tokens ) withs , body = tokens if len ( withs ) == 1 or self . target_info >= ( 2 , 7 ) : return "with " + ", " . join ( withs ) + body else : return ( "" . join ( "with " + expr + ":\n" + openindent for expr in withs [ : - 1 ] ) + "with " + withs [ - 1 ] + body + closeindent * ( len ( withs ) - 1 ) )
|
Process with statements .
|
7,828
|
def case_stmt_handle ( self , loc , tokens ) : if len ( tokens ) == 2 : item , cases = tokens default = None elif len ( tokens ) == 3 : item , cases , default = tokens else : raise CoconutInternalException ( "invalid case tokens" , tokens ) check_var = case_check_var + "_" + str ( self . case_check_count ) self . case_check_count += 1 out = ( match_to_var + " = " + item + "\n" + match_case_tokens ( loc , cases [ 0 ] , check_var , True ) ) for case in cases [ 1 : ] : out += ( "if not " + check_var + ":\n" + openindent + match_case_tokens ( loc , case , check_var , False ) + closeindent ) if default is not None : out += "if not " + check_var + default return out
|
Process case blocks .
|
7,829
|
def lambdef_check ( self , original , loc , tokens ) : return self . check_strict ( "Python-style lambda" , original , loc , tokens )
|
Check for Python - style lambdas .
|
7,830
|
def endline_semicolon_check ( self , original , loc , tokens ) : return self . check_strict ( "semicolon at end of line" , original , loc , tokens )
|
Check for semicolons at the end of lines .
|
7,831
|
def u_string_check ( self , original , loc , tokens ) : return self . check_strict ( "Python-2-style unicode string" , original , loc , tokens )
|
Check for Python2 - style unicode strings .
|
7,832
|
def check_py ( self , version , name , original , loc , tokens ) : internal_assert ( len ( tokens ) == 1 , "invalid " + name + " tokens" , tokens ) if self . target_info < get_target_info ( version ) : raise self . make_err ( CoconutTargetError , "found Python " + "." . join ( version ) + " " + name , original , loc , target = version ) else : return tokens [ 0 ]
|
Check for Python - version - specific syntax .
|
7,833
|
def name_check ( self , original , loc , tokens ) : internal_assert ( len ( tokens ) == 1 , "invalid name tokens" , tokens ) if self . strict : self . unused_imports . discard ( tokens [ 0 ] ) if tokens [ 0 ] == "exec" : return self . check_py ( "3" , "exec function" , original , loc , tokens ) elif tokens [ 0 ] . startswith ( reserved_prefix ) : raise self . make_err ( CoconutSyntaxError , "variable names cannot start with reserved prefix " + reserved_prefix , original , loc ) else : return tokens [ 0 ]
|
Check the given base name .
|
7,834
|
def nonlocal_check ( self , original , loc , tokens ) : return self . check_py ( "3" , "nonlocal statement" , original , loc , tokens )
|
Check for Python 3 nonlocal statement .
|
7,835
|
def star_assign_item_check ( self , original , loc , tokens ) : return self . check_py ( "3" , "starred assignment (add 'match' to front to produce universal code)" , original , loc , tokens )
|
Check for Python 3 starred assignment .
|
7,836
|
def star_expr_check ( self , original , loc , tokens ) : return self . check_py ( "35" , "star unpacking (add 'match' to front to produce universal code)" , original , loc , tokens )
|
Check for Python 3 . 5 star unpacking .
|
7,837
|
def star_sep_check ( self , original , loc , tokens ) : return self . check_py ( "3" , "keyword-only argument separator (add 'match' to front to produce universal code)" , original , loc , tokens )
|
Check for Python 3 keyword - only arguments .
|
7,838
|
def matrix_at_check ( self , original , loc , tokens ) : return self . check_py ( "35" , "matrix multiplication" , original , loc , tokens )
|
Check for Python 3 . 5 matrix multiplication .
|
7,839
|
def async_comp_check ( self , original , loc , tokens ) : return self . check_py ( "36" , "async comprehension" , original , loc , tokens )
|
Check for Python 3 . 6 async comprehension .
|
7,840
|
def f_string_check ( self , original , loc , tokens ) : return self . check_py ( "36" , "format string" , original , loc , tokens )
|
Handle Python 3 . 6 format strings .
|
7,841
|
def parse_file ( self , inputstring , addhash = True ) : if addhash : use_hash = self . genhash ( False , inputstring ) else : use_hash = None return self . parse ( inputstring , self . file_parser , { "nl_at_eof_check" : True } , { "header" : "file" , "use_hash" : use_hash } )
|
Parse file code .
|
7,842
|
def parse_package ( self , inputstring , addhash = True ) : if addhash : use_hash = self . genhash ( True , inputstring ) else : use_hash = None return self . parse ( inputstring , self . file_parser , { "nl_at_eof_check" : True } , { "header" : "package" , "use_hash" : use_hash } )
|
Parse package code .
|
7,843
|
def parse_debug ( self , inputstring ) : return self . parse ( inputstring , self . file_parser , { "strip" : True } , { "header" : "none" , "initial" : "none" , "final_endline" : False } )
|
Parse debug code .
|
7,844
|
def get_match_names ( match ) : names = [ ] if "paren" in match : ( match , ) = match names += get_match_names ( match ) elif "var" in match : ( setvar , ) = match if setvar != wildcard : names . append ( setvar ) elif "trailer" in match : match , trailers = match [ 0 ] , match [ 1 : ] for i in range ( 0 , len ( trailers ) , 2 ) : op , arg = trailers [ i ] , trailers [ i + 1 ] if op == "as" : names . append ( arg ) names += get_match_names ( match ) return names
|
Gets keyword names for the given match .
|
7,845
|
def duplicate ( self ) : other = Matcher ( self . loc , self . check_var , self . checkdefs , self . names , self . var_index ) other . insert_check ( 0 , "not " + self . check_var ) self . others . append ( other ) return other
|
Duplicates the matcher to others .
|
7,846
|
def get_checks ( self , position = None ) : if position is None : position = self . position return self . checkdefs [ position ] [ 0 ]
|
Gets the checks at the position .
|
7,847
|
def set_checks ( self , checks , position = None ) : if position is None : position = self . position self . checkdefs [ position ] [ 0 ] = checks
|
Sets the checks at the position .
|
7,848
|
def get_defs ( self , position = None ) : if position is None : position = self . position return self . checkdefs [ position ] [ 1 ]
|
Gets the defs at the position .
|
7,849
|
def set_defs ( self , defs , position = None ) : if position is None : position = self . position self . checkdefs [ position ] [ 1 ] = defs
|
Sets the defs at the position .
|
7,850
|
def add_check ( self , check_item ) : self . checks . append ( check_item ) for other in self . others : other . add_check ( check_item )
|
Adds a check universally .
|
7,851
|
def add_def ( self , def_item ) : self . defs . append ( def_item ) for other in self . others : other . add_def ( def_item )
|
Adds a def universally .
|
7,852
|
def insert_check ( self , index , check_item ) : self . checks . insert ( index , check_item ) for other in self . others : other . insert_check ( index , check_item )
|
Inserts a check universally .
|
7,853
|
def insert_def ( self , index , def_item ) : self . defs . insert ( index , def_item ) for other in self . others : other . insert_def ( index , def_item )
|
Inserts a def universally .
|
7,854
|
def set_position ( self , position ) : if position < 0 : position += len ( self . checkdefs ) while position >= len ( self . checkdefs ) : self . checkdefs . append ( ( [ ] , [ ] ) ) self . position = position
|
Sets the if - statement position .
|
7,855
|
def only_self ( self ) : others , self . others = self . others , [ ] try : yield finally : self . others = others + self . others
|
Only match in self not others .
|
7,856
|
def get_temp_var ( self ) : tempvar = match_temp_var + "_" + str ( self . var_index ) self . var_index += 1 return tempvar
|
Gets the next match_temp_var .
|
7,857
|
def match_all_in ( self , matches , item ) : for i , match in enumerate ( matches ) : self . match ( match , item + "[" + str ( i ) + "]" )
|
Matches all matches to elements of item .
|
7,858
|
def match_function ( self , args , kwargs , match_args = ( ) , star_arg = None , kwd_args = ( ) , dubstar_arg = None ) : self . match_in_args_kwargs ( match_args , args , kwargs , allow_star_args = star_arg is not None ) if star_arg is not None : self . match ( star_arg , args + "[" + str ( len ( match_args ) ) + ":]" ) self . match_in_kwargs ( kwd_args , kwargs ) with self . down_a_level ( ) : if dubstar_arg is None : self . add_check ( "not " + kwargs ) else : self . match ( dubstar_arg , kwargs )
|
Matches a pattern - matching function .
|
7,859
|
def match_in_kwargs ( self , match_args , kwargs ) : for match , default in match_args : names = get_match_names ( match ) if names : tempvar = self . get_temp_var ( ) self . add_def ( tempvar + " = " + "" . join ( kwargs + '.pop("' + name + '") if "' + name + '" in ' + kwargs + " else " for name in names ) + default , ) with self . down_a_level ( ) : self . match ( match , tempvar ) else : raise CoconutDeferredSyntaxError ( "keyword-only pattern-matching function arguments must have names" , self . loc )
|
Matches against kwargs .
|
7,860
|
def match_dict ( self , tokens , item ) : if len ( tokens ) == 1 : matches , rest = tokens [ 0 ] , None else : matches , rest = tokens self . add_check ( "_coconut.isinstance(" + item + ", _coconut.abc.Mapping)" ) if rest is None : self . add_check ( "_coconut.len(" + item + ") == " + str ( len ( matches ) ) ) if matches : self . use_sentinel = True for k , v in matches : key_var = self . get_temp_var ( ) self . add_def ( key_var + " = " + item + ".get(" + k + ", " + sentinel_var + ")" ) with self . down_a_level ( ) : self . add_check ( key_var + " is not " + sentinel_var ) self . match ( v , key_var ) if rest is not None and rest != wildcard : match_keys = [ k for k , v in matches ] with self . down_a_level ( ) : self . add_def ( rest + " = dict((k, v) for k, v in " + item + ".items() if k not in set((" + ", " . join ( match_keys ) + ( "," if len ( match_keys ) == 1 else "" ) + ")))" , )
|
Matches a dictionary .
|
7,861
|
def assign_to_series ( self , name , series_type , item ) : if series_type == "(" : self . add_def ( name + " = _coconut.tuple(" + item + ")" ) elif series_type == "[" : self . add_def ( name + " = _coconut.list(" + item + ")" ) else : raise CoconutInternalException ( "invalid series match type" , series_type )
|
Assign name to item converted to the given series_type .
|
7,862
|
def match_sequence ( self , tokens , item ) : tail = None if len ( tokens ) == 2 : series_type , matches = tokens else : series_type , matches , tail = tokens self . add_check ( "_coconut.isinstance(" + item + ", _coconut.abc.Sequence)" ) if tail is None : self . add_check ( "_coconut.len(" + item + ") == " + str ( len ( matches ) ) ) else : self . add_check ( "_coconut.len(" + item + ") >= " + str ( len ( matches ) ) ) if tail != wildcard : if len ( matches ) > 0 : splice = "[" + str ( len ( matches ) ) + ":]" else : splice = "" self . assign_to_series ( tail , series_type , item + splice ) self . match_all_in ( matches , item )
|
Matches a sequence .
|
7,863
|
def match_iterator ( self , tokens , item ) : tail = None if len ( tokens ) == 2 : _ , matches = tokens else : _ , matches , tail = tokens self . add_check ( "_coconut.isinstance(" + item + ", _coconut.abc.Iterable)" ) if tail is None : itervar = self . get_temp_var ( ) self . add_def ( itervar + " = _coconut.tuple(" + item + ")" ) elif matches : itervar = self . get_temp_var ( ) if tail == wildcard : tail = item else : self . add_def ( tail + " = _coconut.iter(" + item + ")" ) self . add_def ( itervar + " = _coconut.tuple(_coconut_igetitem(" + tail + ", _coconut.slice(None, " + str ( len ( matches ) ) + ")))" ) else : itervar = None if tail != wildcard : self . add_def ( tail + " = " + item ) if itervar is not None : with self . down_a_level ( ) : self . add_check ( "_coconut.len(" + itervar + ") == " + str ( len ( matches ) ) ) self . match_all_in ( matches , itervar )
|
Matches a lazy list or a chain .
|
7,864
|
def match_star ( self , tokens , item ) : head_matches , last_matches = None , None if len ( tokens ) == 1 : middle = tokens [ 0 ] elif len ( tokens ) == 2 : if isinstance ( tokens [ 0 ] , str ) : middle , last_matches = tokens else : head_matches , middle = tokens else : head_matches , middle , last_matches = tokens self . add_check ( "_coconut.isinstance(" + item + ", _coconut.abc.Iterable)" ) if head_matches is None and last_matches is None : if middle != wildcard : self . add_def ( middle + " = _coconut.list(" + item + ")" ) else : itervar = self . get_temp_var ( ) self . add_def ( itervar + " = _coconut.list(" + item + ")" ) with self . down_a_level ( ) : req_length = ( len ( head_matches ) if head_matches is not None else 0 ) + ( len ( last_matches ) if last_matches is not None else 0 ) self . add_check ( "_coconut.len(" + itervar + ") >= " + str ( req_length ) ) if middle != wildcard : head_splice = str ( len ( head_matches ) ) if head_matches is not None else "" last_splice = "-" + str ( len ( last_matches ) ) if last_matches is not None else "" self . add_def ( middle + " = " + itervar + "[" + head_splice + ":" + last_splice + "]" ) if head_matches is not None : self . match_all_in ( head_matches , itervar ) if last_matches is not None : for x in range ( 1 , len ( last_matches ) + 1 ) : self . match ( last_matches [ - x ] , itervar + "[-" + str ( x ) + "]" )
|
Matches starred assignment .
|
7,865
|
def match_rsequence ( self , tokens , item ) : front , series_type , matches = tokens self . add_check ( "_coconut.isinstance(" + item + ", _coconut.abc.Sequence)" ) self . add_check ( "_coconut.len(" + item + ") >= " + str ( len ( matches ) ) ) if front != wildcard : if len ( matches ) : splice = "[:" + str ( - len ( matches ) ) + "]" else : splice = "" self . assign_to_series ( front , series_type , item + splice ) for i , match in enumerate ( matches ) : self . match ( match , item + "[" + str ( i - len ( matches ) ) + "]" )
|
Matches a reverse sequence .
|
7,866
|
def match_msequence ( self , tokens , item ) : series_type , head_matches , middle , _ , last_matches = tokens self . add_check ( "_coconut.isinstance(" + item + ", _coconut.abc.Sequence)" ) self . add_check ( "_coconut.len(" + item + ") >= " + str ( len ( head_matches ) + len ( last_matches ) ) ) if middle != wildcard : if len ( head_matches ) and len ( last_matches ) : splice = "[" + str ( len ( head_matches ) ) + ":" + str ( - len ( last_matches ) ) + "]" elif len ( head_matches ) : splice = "[" + str ( len ( head_matches ) ) + ":]" elif len ( last_matches ) : splice = "[:" + str ( - len ( last_matches ) ) + "]" else : splice = "" self . assign_to_series ( middle , series_type , item + splice ) self . match_all_in ( head_matches , item ) for i , match in enumerate ( last_matches ) : self . match ( match , item + "[" + str ( i - len ( last_matches ) ) + "]" )
|
Matches a middle sequence .
|
7,867
|
def match_string ( self , tokens , item ) : prefix , name = tokens return self . match_mstring ( ( prefix , name , None ) , item , use_bytes = prefix . startswith ( "b" ) )
|
Match prefix string .
|
7,868
|
def match_rstring ( self , tokens , item ) : name , suffix = tokens return self . match_mstring ( ( None , name , suffix ) , item , use_bytes = suffix . startswith ( "b" ) )
|
Match suffix string .
|
7,869
|
def match_mstring ( self , tokens , item , use_bytes = None ) : prefix , name , suffix = tokens if use_bytes is None : if prefix . startswith ( "b" ) or suffix . startswith ( "b" ) : if prefix . startswith ( "b" ) and suffix . startswith ( "b" ) : use_bytes = True else : raise CoconutDeferredSyntaxError ( "string literals and byte literals cannot be added in patterns" , self . loc ) if use_bytes : self . add_check ( "_coconut.isinstance(" + item + ", _coconut.bytes)" ) else : self . add_check ( "_coconut.isinstance(" + item + ", _coconut.str)" ) if prefix is not None : self . add_check ( item + ".startswith(" + prefix + ")" ) if suffix is not None : self . add_check ( item + ".endswith(" + suffix + ")" ) if name != wildcard : self . add_def ( name + " = " + item + "[" + ( "" if prefix is None else "_coconut.len(" + prefix + ")" ) + ":" + ( "" if suffix is None else "-_coconut.len(" + suffix + ")" ) + "]" , )
|
Match prefix and suffix string .
|
7,870
|
def match_const ( self , tokens , item ) : match , = tokens if match in const_vars : self . add_check ( item + " is " + match ) else : self . add_check ( item + " == " + match )
|
Matches a constant .
|
7,871
|
def match_var ( self , tokens , item ) : setvar , = tokens if setvar != wildcard : if setvar in self . names : self . add_check ( self . names [ setvar ] + " == " + item ) else : self . add_def ( setvar + " = " + item ) self . names [ setvar ] = item
|
Matches a variable .
|
7,872
|
def match_set ( self , tokens , item ) : match , = tokens self . add_check ( "_coconut.isinstance(" + item + ", _coconut.abc.Set)" ) self . add_check ( "_coconut.len(" + item + ") == " + str ( len ( match ) ) ) for const in match : self . add_check ( const + " in " + item )
|
Matches a set .
|
7,873
|
def match_data ( self , tokens , item ) : if len ( tokens ) == 2 : data_type , matches = tokens star_match = None elif len ( tokens ) == 3 : data_type , matches , star_match = tokens else : raise CoconutInternalException ( "invalid data match tokens" , tokens ) self . add_check ( "_coconut.isinstance(" + item + ", " + data_type + ")" ) if star_match is None : self . add_check ( "_coconut.len(" + item + ") == " + str ( len ( matches ) ) ) elif len ( matches ) : self . add_check ( "_coconut.len(" + item + ") >= " + str ( len ( matches ) ) ) self . match_all_in ( matches , item ) if star_match is not None : self . match ( star_match , item + "[" + str ( len ( matches ) ) + ":]" )
|
Matches a data type .
|
7,874
|
def match_paren ( self , tokens , item ) : match , = tokens return self . match ( match , item )
|
Matches a paren .
|
7,875
|
def match_trailer ( self , tokens , item ) : internal_assert ( len ( tokens ) > 1 and len ( tokens ) % 2 == 1 , "invalid trailer match tokens" , tokens ) match , trailers = tokens [ 0 ] , tokens [ 1 : ] for i in range ( 0 , len ( trailers ) , 2 ) : op , arg = trailers [ i ] , trailers [ i + 1 ] if op == "is" : self . add_check ( "_coconut.isinstance(" + item + ", " + arg + ")" ) elif op == "as" : if arg in self . names : self . add_check ( self . names [ arg ] + " == " + item ) elif arg != wildcard : self . add_def ( arg + " = " + item ) self . names [ arg ] = item else : raise CoconutInternalException ( "invalid trailer match operation" , op ) self . match ( match , item )
|
Matches typedefs and as patterns .
|
7,876
|
def match_and ( self , tokens , item ) : for match in tokens : self . match ( match , item )
|
Matches and .
|
7,877
|
def match_or ( self , tokens , item ) : for x in range ( 1 , len ( tokens ) ) : self . duplicate ( ) . match ( tokens [ x ] , item ) with self . only_self ( ) : self . match ( tokens [ 0 ] , item )
|
Matches or .
|
7,878
|
def match ( self , tokens , item ) : for flag , get_handler in self . matchers . items ( ) : if flag in tokens : return get_handler ( self ) ( tokens , item ) raise CoconutInternalException ( "invalid pattern-matching tokens" , tokens )
|
Performs pattern - matching processing .
|
7,879
|
def out ( self ) : out = "" if self . use_sentinel : out += sentinel_var + " = _coconut.object()\n" closes = 0 for checks , defs in self . checkdefs : if checks : out += "if " + paren_join ( checks , "and" ) + ":\n" + openindent closes += 1 if defs : out += "\n" . join ( defs ) + "\n" return out + ( self . check_var + " = True\n" + closeindent * closes + "" . join ( other . out ( ) for other in self . others ) + ( "if " + self . check_var + " and not (" + paren_join ( self . guards , "and" ) + "):\n" + openindent + self . check_var + " = False\n" + closeindent if self . guards else "" ) )
|
Return pattern - matching code .
|
7,880
|
def build ( self , stmts = None , set_check_var = True , invert = False ) : out = "" if set_check_var : out += self . check_var + " = False\n" out += self . out ( ) if stmts is not None : out += "if " + ( "not " if invert else "" ) + self . check_var + ":" + "\n" + openindent + "" . join ( stmts ) + closeindent return out
|
Construct code for performing the match then executing stmts .
|
7,881
|
def _indent ( code , by = 1 ) : return "" . join ( ( " " * by if line else "" ) + line for line in code . splitlines ( True ) )
|
Indents every nonempty line of the given code .
|
7,882
|
def clean ( inputline , strip = True , rem_indents = True , encoding_errors = "replace" ) : stdout_encoding = get_encoding ( sys . stdout ) inputline = str ( inputline ) if rem_indents : inputline = inputline . replace ( openindent , "" ) . replace ( closeindent , "" ) if strip : inputline = inputline . strip ( ) return inputline . encode ( stdout_encoding , encoding_errors ) . decode ( stdout_encoding )
|
Clean and strip a line .
|
7,883
|
def displayable ( inputstr , strip = True ) : return clean ( str ( inputstr ) , strip , rem_indents = False , encoding_errors = "backslashreplace" )
|
Make a string displayable with minimal loss of information .
|
7,884
|
def internal_assert ( condition , message = None , item = None , extra = None ) : if DEVELOP and callable ( condition ) : condition = condition ( ) if not condition : if message is None : message = "assertion failed" if item is None : item = condition raise CoconutInternalException ( message , item , extra )
|
Raise InternalException if condition is False . If condition is a function execute it on DEVELOP only .
|
7,885
|
def message ( self , message , item , extra ) : if item is not None : message += ": " + ascii ( item ) if extra is not None : message += " (" + str ( extra ) + ")" return message
|
Uses arguments to create the message .
|
7,886
|
def message ( self , message , source , point , ln ) : if message is None : message = "parsing failed" if ln is not None : message += " (line " + str ( ln ) + ")" if source : if point is None : message += "\n" + " " * taberrfmt + clean ( source ) else : part = clean ( source . splitlines ( ) [ lineno ( point , source ) - 1 ] , False ) . lstrip ( ) point -= len ( source ) - len ( part ) part = part . rstrip ( ) message += "\n" + " " * taberrfmt + part if point > 0 : if point >= len ( part ) : point = len ( part ) - 1 message += "\n" + " " * ( taberrfmt + point ) + "^" return message
|
Creates a SyntaxError - like message .
|
7,887
|
def syntax_err ( self ) : args = self . args [ : 2 ] + ( None , None ) + self . args [ 4 : ] err = SyntaxError ( self . message ( * args ) ) err . offset = args [ 2 ] err . lineno = args [ 3 ] return err
|
Creates a SyntaxError .
|
7,888
|
def message ( self , message , item , extra ) : return ( super ( CoconutInternalException , self ) . message ( message , item , extra ) + " " + report_this_text )
|
Creates the Coconut internal exception message .
|
7,889
|
def memoized_parse_block ( code ) : success , result = parse_block_memo . get ( code , ( None , None ) ) if success is None : try : parsed = COMPILER . parse_block ( code ) except Exception as err : success , result = False , err else : success , result = True , parsed parse_block_memo [ code ] = ( success , result ) if success : return result else : raise result
|
Memoized version of parse_block .
|
7,890
|
def patched_nested_parse ( self , * args , ** kwargs ) : kwargs [ "match_titles" ] = True return self . stored_nested_parse ( * args , ** kwargs )
|
Sets match_titles then calls stored_nested_parse .
|
7,891
|
def auto_code_block ( self , * args , ** kwargs ) : self . stored_nested_parse = self . state_machine . state . nested_parse self . state_machine . state . nested_parse = self . patched_nested_parse try : return super ( PatchedAutoStructify , self ) . auto_code_block ( * args , ** kwargs ) finally : self . state_machine . state . nested_parse = self . stored_nested_parse
|
Modified auto_code_block that patches nested_parse .
|
7,892
|
def setup ( self , * args , ** kwargs ) : if self . comp is None : self . comp = Compiler ( * args , ** kwargs ) else : self . comp . setup ( * args , ** kwargs )
|
Set parameters for the compiler .
|
7,893
|
def exit_on_error ( self ) : if self . exit_code : if self . errmsg is not None : logger . show ( "Exiting due to " + self . errmsg + "." ) self . errmsg = None if self . using_jobs : kill_children ( ) sys . exit ( self . exit_code )
|
Exit if exit_code is abnormal .
|
7,894
|
def register_error ( self , code = 1 , errmsg = None ) : if errmsg is not None : if self . errmsg is None : self . errmsg = errmsg elif errmsg not in self . errmsg : self . errmsg += ", " + errmsg if code is not None : self . exit_code = max ( self . exit_code , code )
|
Update the exit code .
|
7,895
|
def handling_exceptions ( self ) : try : if self . using_jobs : with handling_broken_process_pool ( ) : yield else : yield except SystemExit as err : self . register_error ( err . code ) except BaseException as err : if isinstance ( err , CoconutException ) : logger . display_exc ( ) elif not isinstance ( err , KeyboardInterrupt ) : traceback . print_exc ( ) printerr ( report_this_text ) self . register_error ( errmsg = err . __class__ . __name__ )
|
Perform proper exception handling .
|
7,896
|
def compile_path ( self , path , write = True , package = None , * args , ** kwargs ) : path = fixpath ( path ) if not isinstance ( write , bool ) : write = fixpath ( write ) if os . path . isfile ( path ) : if package is None : package = False destpath = self . compile_file ( path , write , package , * args , ** kwargs ) return [ destpath ] if destpath is not None else [ ] elif os . path . isdir ( path ) : if package is None : package = True return self . compile_folder ( path , write , package , * args , ** kwargs ) else : raise CoconutException ( "could not find source path" , path )
|
Compile a path and returns paths to compiled files .
|
7,897
|
def compile_folder ( self , directory , write = True , package = True , * args , ** kwargs ) : if not isinstance ( write , bool ) and os . path . isfile ( write ) : raise CoconutException ( "destination path cannot point to a file when compiling a directory" ) filepaths = [ ] for dirpath , dirnames , filenames in os . walk ( directory ) : if isinstance ( write , bool ) : writedir = write else : writedir = os . path . join ( write , os . path . relpath ( dirpath , directory ) ) for filename in filenames : if os . path . splitext ( filename ) [ 1 ] in code_exts : with self . handling_exceptions ( ) : destpath = self . compile_file ( os . path . join ( dirpath , filename ) , writedir , package , * args , ** kwargs ) if destpath is not None : filepaths . append ( destpath ) for name in dirnames [ : ] : if not is_special_dir ( name ) and name . startswith ( "." ) : if logger . verbose : logger . show_tabulated ( "Skipped directory" , name , "(explicitly pass as source to override)." ) dirnames . remove ( name ) return filepaths
|
Compile a directory and returns paths to compiled files .
|
7,898
|
def compile_file ( self , filepath , write = True , package = False , * args , ** kwargs ) : set_ext = False if write is False : destpath = None elif write is True : destpath = filepath set_ext = True elif os . path . splitext ( write ) [ 1 ] : destpath = write else : destpath = os . path . join ( write , os . path . basename ( filepath ) ) set_ext = True if set_ext : base , ext = os . path . splitext ( os . path . splitext ( destpath ) [ 0 ] ) if not ext : ext = comp_ext destpath = fixpath ( base + ext ) if filepath == destpath : raise CoconutException ( "cannot compile " + showpath ( filepath ) + " to itself" , extra = "incorrect file extension" ) self . compile ( filepath , destpath , package , * args , ** kwargs ) return destpath
|
Compile a file and returns the compiled file s path .
|
7,899
|
def compile ( self , codepath , destpath = None , package = False , run = False , force = False , show_unchanged = True ) : with openfile ( codepath , "r" ) as opened : code = readfile ( opened ) if destpath is not None : destdir = os . path . dirname ( destpath ) if not os . path . exists ( destdir ) : os . makedirs ( destdir ) if package is True : self . create_package ( destdir ) foundhash = None if force else self . has_hash_of ( destpath , code , package ) if foundhash : if show_unchanged : logger . show_tabulated ( "Left unchanged" , showpath ( destpath ) , "(pass --force to override)." ) if self . show : print ( foundhash ) if run : self . execute_file ( destpath ) else : logger . show_tabulated ( "Compiling" , showpath ( codepath ) , "..." ) if package is True : compile_method = "parse_package" elif package is False : compile_method = "parse_file" else : raise CoconutInternalException ( "invalid value for package" , package ) def callback ( compiled ) : if destpath is None : logger . show_tabulated ( "Compiled" , showpath ( codepath ) , "without writing to file." ) else : with openfile ( destpath , "w" ) as opened : writefile ( opened , compiled ) logger . show_tabulated ( "Compiled to" , showpath ( destpath ) , "." ) if self . show : print ( compiled ) if run : if destpath is None : self . execute ( compiled , path = codepath , allow_show = False ) else : self . execute_file ( destpath ) self . submit_comp_job ( codepath , callback , compile_method , code )
|
Compile a source Coconut file to a destination Python file .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.