idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
|---|---|---|
7,900
|
def running_jobs ( self , exit_on_error = True ) : with self . handling_exceptions ( ) : if self . using_jobs : from concurrent . futures import ProcessPoolExecutor try : with ProcessPoolExecutor ( self . jobs ) as self . executor : yield finally : self . executor = None else : yield if exit_on_error : self . exit_on_error ( )
|
Initialize multiprocessing .
|
7,901
|
def create_package ( self , dirpath ) : dirpath = fixpath ( dirpath ) filepath = os . path . join ( dirpath , "__coconut__.py" ) with openfile ( filepath , "w" ) as opened : writefile ( opened , self . comp . getheader ( "__coconut__" ) )
|
Set up a package directory .
|
7,902
|
def has_hash_of ( self , destpath , code , package ) : if destpath is not None and os . path . isfile ( destpath ) : with openfile ( destpath , "r" ) as opened : compiled = readfile ( opened ) hashash = gethash ( compiled ) if hashash is not None and hashash == self . comp . genhash ( package , code ) : return compiled return None
|
Determine if a file has the hash of the code .
|
7,903
|
def start_running ( self ) : self . comp . warm_up ( ) self . check_runner ( ) self . running = True
|
Start running the Runner .
|
7,904
|
def start_prompt ( self ) : logger . show ( "Coconut Interpreter:" ) logger . show ( "(type 'exit()' or press Ctrl-D to end)" ) self . start_running ( ) while self . running : try : code = self . get_input ( ) if code : compiled = self . handle_input ( code ) if compiled : self . execute ( compiled , use_eval = None ) except KeyboardInterrupt : printerr ( "\nKeyboardInterrupt" )
|
Start the interpreter .
|
7,905
|
def handle_input ( self , code ) : if not self . prompt . multiline : if not should_indent ( code ) : try : return self . comp . parse_block ( code ) except CoconutException : pass while True : line = self . get_input ( more = True ) if line is None : return None elif line : code += "\n" + line else : break try : return self . comp . parse_block ( code ) except CoconutException : logger . display_exc ( ) return None
|
Compile Coconut interpreter input .
|
7,906
|
def execute ( self , compiled = None , path = None , use_eval = False , allow_show = True ) : self . check_runner ( ) if compiled is not None : if allow_show and self . show : print ( compiled ) if path is not None : compiled = rem_encoding ( compiled ) self . runner . run ( compiled , use_eval = use_eval , path = path , all_errors_exit = ( path is not None ) ) self . run_mypy ( code = self . runner . was_run_code ( ) )
|
Execute compiled code .
|
7,907
|
def check_runner ( self ) : if os . getcwd ( ) not in sys . path : sys . path . append ( os . getcwd ( ) ) if self . runner is None : self . runner = Runner ( self . comp , exit = self . exit_runner , store = self . mypy )
|
Make sure there is a runner .
|
7,908
|
def set_mypy_args ( self , mypy_args = None ) : if mypy_args is None : self . mypy_args = None else : self . mypy_errs = [ ] self . mypy_args = list ( mypy_args ) if not any ( arg . startswith ( "--python-version" ) for arg in mypy_args ) : self . mypy_args += [ "--python-version" , "." . join ( str ( v ) for v in get_target_info_len2 ( self . comp . target , mode = "nearest" ) ) , ] if logger . verbose : for arg in verbose_mypy_args : if arg not in self . mypy_args : self . mypy_args . append ( arg ) logger . log ( "MyPy args:" , self . mypy_args )
|
Set MyPy arguments .
|
7,909
|
def run_mypy ( self , paths = ( ) , code = None ) : if self . mypy : set_mypy_path ( stub_dir ) from coconut . command . mypy import mypy_run args = list ( paths ) + self . mypy_args if code is not None : args += [ "-c" , code ] for line , is_err in mypy_run ( args ) : if code is None or line not in self . mypy_errs : printerr ( line ) if line not in self . mypy_errs : self . mypy_errs . append ( line ) self . register_error ( errmsg = "MyPy error" )
|
Run MyPy with arguments .
|
7,910
|
def start_jupyter ( self , args ) : install_func = partial ( run_cmd , show_output = logger . verbose ) try : install_func ( [ "jupyter" , "--version" ] ) except CalledProcessError : jupyter = "ipython" else : jupyter = "jupyter" do_install = not args if not do_install : kernel_list = run_cmd ( [ jupyter , "kernelspec" , "list" ] , show_output = False , raise_errs = False ) do_install = any ( ker not in kernel_list for ker in icoconut_kernel_names ) if do_install : success = True for icoconut_kernel_dir in icoconut_kernel_dirs : install_args = [ jupyter , "kernelspec" , "install" , icoconut_kernel_dir , "--replace" ] try : install_func ( install_args ) except CalledProcessError : user_install_args = install_args + [ "--user" ] try : install_func ( user_install_args ) except CalledProcessError : logger . warn ( "kernel install failed on command'" , " " . join ( install_args ) ) self . register_error ( errmsg = "Jupyter error" ) success = False if success : logger . show_sig ( "Successfully installed Coconut Jupyter kernel." ) if args : if args [ 0 ] == "console" : ver = "2" if PY2 else "3" try : install_func ( [ "python" + ver , "-m" , "coconut.main" , "--version" ] ) except CalledProcessError : kernel_name = "coconut" else : kernel_name = "coconut" + ver run_args = [ jupyter , "console" , "--kernel" , kernel_name ] + args [ 1 : ] else : run_args = [ jupyter ] + args self . register_error ( run_cmd ( run_args , raise_errs = False ) , errmsg = "Jupyter error" )
|
Start Jupyter with the Coconut kernel .
|
7,911
|
def watch ( self , source , write = True , package = None , run = False , force = False ) : from coconut . command . watch import Observer , RecompilationWatcher source = fixpath ( source ) logger . show ( ) logger . show_tabulated ( "Watching" , showpath ( source ) , "(press Ctrl-C to end)..." ) def recompile ( path ) : path = fixpath ( path ) if os . path . isfile ( path ) and os . path . splitext ( path ) [ 1 ] in code_exts : with self . handling_exceptions ( ) : if write is True or write is None : writedir = write else : dirpath = os . path . dirname ( path ) writedir = os . path . join ( write , os . path . relpath ( dirpath , source ) ) filepaths = self . compile_path ( path , writedir , package , run , force , show_unchanged = False ) self . run_mypy ( filepaths ) watcher = RecompilationWatcher ( recompile ) observer = Observer ( ) observer . schedule ( watcher , source , recursive = True ) with self . running_jobs ( ) : observer . start ( ) try : while True : time . sleep ( watch_interval ) watcher . keep_watching ( ) except KeyboardInterrupt : logger . show_sig ( "Got KeyboardInterrupt; stopping watcher." ) finally : observer . stop ( ) observer . join ( )
|
Watch a source and recompiles on change .
|
7,912
|
def load_ipython_extension ( ipython ) : from coconut import __coconut__ newvars = { } for var , val in vars ( __coconut__ ) . items ( ) : if not var . startswith ( "__" ) : newvars [ var ] = val ipython . push ( newvars ) from coconut . convenience import cmd , parse , CoconutException from coconut . terminal import logger def magic ( line , cell = None ) : try : if cell is None : code = line else : line = line . strip ( ) if line : cmd ( line , interact = False ) code = cell compiled = parse ( code ) except CoconutException : logger . display_exc ( ) else : ipython . run_cell ( compiled , shell_futures = False ) ipython . register_magic_function ( magic , "line_cell" , "coconut" )
|
Loads Coconut as an IPython extension .
|
7,913
|
def evaluate_tokens ( tokens ) : if isinstance ( tokens , str ) : return tokens elif isinstance ( tokens , ParseResults ) : toklist , name , asList , modal = tokens . __getnewargs__ ( ) new_toklist = [ evaluate_tokens ( toks ) for toks in toklist ] new_tokens = ParseResults ( new_toklist , name , asList , modal ) new_tokdict = { } for name , occurrences in tokens . _ParseResults__tokdict . items ( ) : new_occurences = [ ] for value , position in occurrences : if isinstance ( value , ParseResults ) and value . _ParseResults__toklist == toklist : new_value = new_tokens else : try : new_value = new_toklist [ toklist . index ( value ) ] except ValueError : complain ( lambda : CoconutInternalException ( "inefficient reevaluation of tokens: {} not in {}" . format ( value , toklist , ) ) ) new_value = evaluate_tokens ( value ) new_occurences . append ( _ParseResultsWithOffset ( new_value , position ) ) new_tokdict [ name ] = occurrences new_tokens . _ParseResults__accumNames . update ( tokens . _ParseResults__accumNames ) new_tokens . _ParseResults__tokdict . update ( new_tokdict ) return new_tokens elif isinstance ( tokens , ComputationNode ) : return tokens . evaluate ( ) elif isinstance ( tokens , ( list , tuple ) ) : return [ evaluate_tokens ( inner_toks ) for inner_toks in tokens ] else : raise CoconutInternalException ( "invalid computation graph tokens" , tokens )
|
Evaluate the given tokens in the computation graph .
|
7,914
|
def attach ( item , action , greedy = False , ignore_no_tokens = None , ignore_one_token = None ) : if use_computation_graph : if ignore_no_tokens is None : ignore_no_tokens = getattr ( action , "ignore_no_tokens" , False ) if ignore_one_token is None : ignore_one_token = getattr ( action , "ignore_one_token" , False ) kwargs = { } if greedy : kwargs [ "greedy" ] = greedy if ignore_no_tokens : kwargs [ "ignore_no_tokens" ] = ignore_no_tokens if ignore_one_token : kwargs [ "ignore_one_token" ] = ignore_one_token action = partial ( ComputationNode , action , ** kwargs ) return add_action ( item , action )
|
Set the parse action for the given item to create a node in the computation graph .
|
7,915
|
def unpack ( tokens ) : logger . log_tag ( "unpack" , tokens ) if use_computation_graph : tokens = evaluate_tokens ( tokens ) if isinstance ( tokens , ParseResults ) and len ( tokens ) == 1 : tokens = tokens [ 0 ] return tokens
|
Evaluate and unpack the given computation graph .
|
7,916
|
def all_matches ( grammar , text ) : for tokens , start , stop in grammar . parseWithTabs ( ) . scanString ( text ) : yield unpack ( tokens ) , start , stop
|
Find all matches for grammar in text .
|
7,917
|
def match_in ( grammar , text ) : for result in grammar . parseWithTabs ( ) . scanString ( text ) : return True return False
|
Determine if there is a match for grammar in text .
|
7,918
|
def get_vers_for_target ( target ) : target_info = get_target_info ( target ) if not target_info : return py2_vers + py3_vers elif len ( target_info ) == 1 : if target_info == ( 2 , ) : return py2_vers elif target_info == ( 3 , ) : return py3_vers else : raise CoconutInternalException ( "invalid target info" , target_info ) elif target_info == ( 3 , 3 ) : return [ ( 3 , 3 ) , ( 3 , 4 ) ] else : return [ target_info [ : 2 ] ]
|
Gets a list of the versions supported by the given target .
|
7,919
|
def get_target_info_len2 ( target , mode = "lowest" ) : supported_vers = get_vers_for_target ( target ) if mode == "lowest" : return supported_vers [ 0 ] elif mode == "highest" : return supported_vers [ - 1 ] elif mode == "nearest" : if sys . version_info [ : 2 ] in supported_vers : return sys . version_info [ : 2 ] else : return supported_vers [ - 1 ] else : raise CoconutInternalException ( "unknown get_target_info_len2 mode" , mode )
|
Converts target into a length 2 Python version tuple .
|
7,920
|
def longest ( * args ) : internal_assert ( len ( args ) >= 2 , "longest expects at least two args" ) matcher = args [ 0 ] + skip_whitespace for elem in args [ 1 : ] : matcher ^= elem + skip_whitespace return matcher
|
Match the longest of the given grammar elements .
|
7,921
|
def addskip ( skips , skip ) : if skip < 1 : complain ( CoconutInternalException ( "invalid skip of line " + str ( skip ) ) ) else : skips . append ( skip ) return skips
|
Add a line skip to the skips .
|
7,922
|
def count_end ( teststr , testchar ) : count = 0 x = len ( teststr ) - 1 while x >= 0 and teststr [ x ] == testchar : count += 1 x -= 1 return count
|
Count instances of testchar at end of teststr .
|
7,923
|
def maybeparens ( lparen , item , rparen ) : return item | lparen . suppress ( ) + item + rparen . suppress ( )
|
Wrap an item in optional parentheses only applying them if necessary .
|
7,924
|
def tokenlist ( item , sep , suppress = True ) : if suppress : sep = sep . suppress ( ) return item + ZeroOrMore ( sep + item ) + Optional ( sep )
|
Create a list of tokens matching the item .
|
7,925
|
def itemlist ( item , sep , suppress_trailing = True ) : return condense ( item + ZeroOrMore ( addspace ( sep + item ) ) + Optional ( sep . suppress ( ) if suppress_trailing else sep ) )
|
Create a list of items seperated by seps .
|
7,926
|
def should_indent ( code ) : last = rem_comment ( code . splitlines ( ) [ - 1 ] ) return last . endswith ( ":" ) or last . endswith ( "\\" ) or paren_change ( last ) < 0
|
Determines whether the next line should be indented .
|
7,927
|
def split_leading_comment ( inputstring ) : if inputstring . startswith ( "#" ) : comment , rest = inputstring . split ( "\n" , 1 ) return comment + "\n" , rest else : return "" , inputstring
|
Split into leading comment and rest .
|
7,928
|
def split_leading_trailing_indent ( line , max_indents = None ) : leading_indent , line = split_leading_indent ( line , max_indents ) line , trailing_indent = split_trailing_indent ( line , max_indents ) return leading_indent , line , trailing_indent
|
Split leading and trailing indent .
|
7,929
|
def collapse_indents ( indentation ) : change_in_level = ind_change ( indentation ) if change_in_level == 0 : indents = "" elif change_in_level < 0 : indents = closeindent * ( - change_in_level ) else : indents = openindent * change_in_level return indentation . replace ( openindent , "" ) . replace ( closeindent , "" ) + indents
|
Removes all openindent - closeindent pairs .
|
7,930
|
def transform ( grammar , text ) : results = [ ] intervals = [ ] for result , start , stop in all_matches ( grammar , text ) : if result is not ignore_transform : internal_assert ( isinstance ( result , str ) , "got non-string transform result" , result ) if start == 0 and stop == len ( text ) : return result results . append ( result ) intervals . append ( ( start , stop ) ) if not results : return None split_indices = [ 0 ] split_indices . extend ( start for start , _ in intervals ) split_indices . extend ( stop for _ , stop in intervals ) split_indices . sort ( ) split_indices . append ( None ) out = [ ] for i in range ( len ( split_indices ) - 1 ) : if i % 2 == 0 : start , stop = split_indices [ i ] , split_indices [ i + 1 ] out . append ( text [ start : stop ] ) else : out . append ( results [ i // 2 ] ) if i // 2 < len ( results ) - 1 : raise CoconutInternalException ( "unused transform results" , results [ i // 2 + 1 : ] ) if stop is not None : raise CoconutInternalException ( "failed to properly split text to be transformed" ) return "" . join ( out )
|
Transform text by replacing matches to grammar .
|
7,931
|
def disable_inside ( item , * elems , ** kwargs ) : _invert = kwargs . get ( "_invert" , False ) internal_assert ( set ( kwargs . keys ( ) ) <= set ( ( "_invert" , ) ) , "excess keyword arguments passed to disable_inside" ) level = [ 0 ] @ contextmanager def manage_item ( self , instring , loc ) : level [ 0 ] += 1 try : yield finally : level [ 0 ] -= 1 yield Wrap ( item , manage_item ) @ contextmanager def manage_elem ( self , instring , loc ) : if level [ 0 ] == 0 if not _invert else level [ 0 ] > 0 : yield else : raise ParseException ( instring , loc , self . errmsg , self ) for elem in elems : yield Wrap ( elem , manage_elem )
|
Prevent elems from matching inside of item .
|
7,932
|
def name ( self ) : name = getattr ( self . action , "__name__" , None ) return name if name is not None else ascii ( self . action )
|
Get the name of the action .
|
7,933
|
def evaluate ( self ) : if DEVELOP : internal_assert ( not self . been_called , "inefficient reevaluation of action " + self . name + " with tokens" , self . tokens ) self . been_called = True evaluated_toks = evaluate_tokens ( self . tokens ) if logger . tracing : logger . log_trace ( self . name , self . original , self . loc , evaluated_toks , self . tokens ) try : return _trim_arity ( self . action ) ( self . original , self . loc , evaluated_toks , ) except CoconutException : raise except ( Exception , AssertionError ) : traceback . print_exc ( ) raise CoconutInternalException ( "error computing action " + self . name + " of evaluated tokens" , evaluated_toks )
|
Get the result of evaluating the computation graph at this node .
|
7,934
|
def _combine ( self , original , loc , tokens ) : combined_tokens = super ( CombineNode , self ) . postParse ( original , loc , tokens ) internal_assert ( len ( combined_tokens ) == 1 , "Combine produced multiple tokens" , combined_tokens ) return combined_tokens [ 0 ]
|
Implement the parse action for Combine .
|
7,935
|
def postParse ( self , original , loc , tokens ) : return ComputationNode ( self . _combine , original , loc , tokens , ignore_no_tokens = True , ignore_one_token = True )
|
Create a ComputationNode for Combine .
|
7,936
|
def parseImpl ( self , instring , loc , * args , ** kwargs ) : with self . wrapper ( self , instring , loc ) : return super ( Wrap , self ) . parseImpl ( instring , loc , * args , ** kwargs )
|
Wrapper around ParseElementEnhance . parseImpl .
|
7,937
|
def gethash ( compiled ) : lines = compiled . splitlines ( ) if len ( lines ) < 3 or not lines [ 2 ] . startswith ( hash_prefix ) : return None else : return lines [ 2 ] [ len ( hash_prefix ) : ]
|
Retrieve a hash from a header .
|
7,938
|
def minify ( compiled ) : compiled = compiled . strip ( ) if compiled : out = [ ] for line in compiled . splitlines ( ) : line = line . split ( "#" , 1 ) [ 0 ] . rstrip ( ) if line : ind = 0 while line . startswith ( " " ) : line = line [ 1 : ] ind += 1 internal_assert ( ind % tabideal == 0 , "invalid indentation in" , line ) out . append ( " " * ( ind // tabideal ) + line ) compiled = "\n" . join ( out ) + "\n" return compiled
|
Perform basic minifications .
|
7,939
|
def get_template ( template ) : with open ( os . path . join ( template_dir , template ) + template_ext , "r" ) as template_file : return template_file . read ( )
|
Read the given template file .
|
7,940
|
def fixpath ( path ) : return os . path . normpath ( os . path . realpath ( os . path . expanduser ( path ) ) )
|
Uniformly format a path .
|
7,941
|
def ver_str_to_tuple ( ver_str ) : out = [ ] for x in ver_str . split ( "." ) : try : x = int ( x ) except ValueError : pass out . append ( x ) return tuple ( out )
|
Convert a version string into a version tuple .
|
7,942
|
def mypy_run ( args ) : logger . log_cmd ( [ "mypy" ] + args ) try : stdout , stderr , exit_code = run ( args ) except BaseException : traceback . print_exc ( ) else : for line in stdout . splitlines ( ) : yield line , False for line in stderr . splitlines ( ) : yield line , True
|
Runs mypy with given arguments and shows the result .
|
7,943
|
def add_coconut_to_path ( ) : try : import coconut except ImportError : sys . path . insert ( 0 , os . path . dirname ( os . path . dirname ( os . path . abspath ( __file__ ) ) ) )
|
Adds coconut to sys . path if it isn t there already .
|
7,944
|
def writefile ( openedfile , newcontents ) : openedfile . seek ( 0 ) openedfile . truncate ( ) openedfile . write ( newcontents )
|
Set the contents of a file .
|
7,945
|
def showpath ( path ) : if logger . verbose : return os . path . abspath ( path ) else : path = os . path . relpath ( path ) if path . startswith ( os . curdir + os . sep ) : path = path [ len ( os . curdir + os . sep ) : ] return path
|
Format a path for displaying .
|
7,946
|
def rem_encoding ( code ) : old_lines = code . splitlines ( ) new_lines = [ ] for i in range ( min ( 2 , len ( old_lines ) ) ) : line = old_lines [ i ] if not ( line . lstrip ( ) . startswith ( "#" ) and "coding" in line ) : new_lines . append ( line ) new_lines += old_lines [ 2 : ] return "\n" . join ( new_lines )
|
Remove encoding declarations from compiled code so it can be passed to exec .
|
7,947
|
def exec_func ( code , glob_vars , loc_vars = None ) : if loc_vars is None : exec ( code , glob_vars ) else : exec ( code , glob_vars , loc_vars )
|
Wrapper around exec .
|
7,948
|
def interpret ( code , in_vars ) : try : result = eval ( code , in_vars ) except SyntaxError : pass else : if result is not None : print ( ascii ( result ) ) return exec_func ( code , in_vars )
|
Try to evaluate the given code otherwise execute it .
|
7,949
|
def kill_children ( ) : try : import psutil except ImportError : logger . warn ( "missing psutil; --jobs may not properly terminate" , extra = "run 'pip install coconut[jobs]' to fix" , ) else : master = psutil . Process ( ) children = master . children ( recursive = True ) while children : for child in children : try : child . terminate ( ) except psutil . NoSuchProcess : pass children = master . children ( recursive = True )
|
Terminate all child processes .
|
7,950
|
def splitname ( path ) : dirpath , filename = os . path . split ( path ) name , exts = filename . split ( os . extsep , 1 ) return dirpath , name , exts
|
Split a path into a directory name and extensions .
|
7,951
|
def run_file ( path ) : if PY26 : dirpath , name , _ = splitname ( path ) found = imp . find_module ( name , [ dirpath ] ) module = imp . load_module ( "__main__" , * found ) return vars ( module ) else : return runpy . run_path ( path , run_name = "__main__" )
|
Run a module from a path and return its variables .
|
7,952
|
def call_output ( cmd , stdin = None , encoding_errors = "replace" , ** kwargs ) : p = subprocess . Popen ( cmd , stdout = subprocess . PIPE , stderr = subprocess . PIPE , ** kwargs ) stdout , stderr , retcode = [ ] , [ ] , None while retcode is None : if stdin is not None : logger . log_prefix ( "<0 " , stdin . rstrip ( ) ) raw_out , raw_err = p . communicate ( stdin ) stdin = None out = raw_out . decode ( get_encoding ( sys . stdout ) , encoding_errors ) if raw_out else "" if out : logger . log_prefix ( "1> " , out . rstrip ( ) ) stdout . append ( out ) err = raw_err . decode ( get_encoding ( sys . stderr ) , encoding_errors ) if raw_err else "" if err : logger . log_prefix ( "2> " , err . rstrip ( ) ) stderr . append ( err ) retcode = p . poll ( ) return stdout , stderr , retcode
|
Run command and read output .
|
7,953
|
def run_cmd ( cmd , show_output = True , raise_errs = True , ** kwargs ) : internal_assert ( cmd and isinstance ( cmd , list ) , "console commands must be passed as non-empty lists" ) try : from shutil import which except ImportError : pass else : cmd [ 0 ] = which ( cmd [ 0 ] ) or cmd [ 0 ] logger . log_cmd ( cmd ) try : if show_output and raise_errs : return subprocess . check_call ( cmd , ** kwargs ) elif show_output : return subprocess . call ( cmd , ** kwargs ) else : stdout , stderr , retcode = call_output ( cmd , ** kwargs ) output = "" . join ( stdout + stderr ) if retcode and raise_errs : raise subprocess . CalledProcessError ( retcode , cmd , output = output ) return output except OSError : logger . log_exc ( ) if raise_errs : raise subprocess . CalledProcessError ( oserror_retcode , cmd ) elif show_output : return oserror_retcode else : return ""
|
Run a console command .
|
7,954
|
def set_mypy_path ( mypy_path ) : original = os . environ . get ( mypy_path_env_var ) if original is None : new_mypy_path = mypy_path elif not original . startswith ( mypy_path ) : new_mypy_path = mypy_path + os . pathsep + original else : new_mypy_path = None if new_mypy_path is not None : logger . log ( mypy_path_env_var + ":" , new_mypy_path ) os . environ [ mypy_path_env_var ] = new_mypy_path
|
Prepend to MYPYPATH .
|
7,955
|
def stdin_readable ( ) : if not WINDOWS : try : return bool ( select ( [ sys . stdin ] , [ ] , [ ] , 0 ) [ 0 ] ) except Exception : logger . log_exc ( ) try : return not sys . stdin . isatty ( ) except Exception : logger . log_exc ( ) return False
|
Determine whether stdin has any data to read .
|
7,956
|
def set_recursion_limit ( limit ) : if limit < minimum_recursion_limit : raise CoconutException ( "--recursion-limit must be at least " + str ( minimum_recursion_limit ) ) sys . setrecursionlimit ( limit )
|
Set the Python recursion limit .
|
7,957
|
def canparse ( argparser , args ) : old_error_method = argparser . error argparser . error = _raise_ValueError try : argparser . parse_args ( args ) except ValueError : return False else : return True finally : argparser . error = old_error_method
|
Determines if argparser can parse args .
|
7,958
|
def set_style ( self , style ) : if style == "none" : self . style = None elif prompt_toolkit is None : raise CoconutException ( "syntax highlighting is not supported on this Python version" ) elif style == "list" : print ( "Coconut Styles: none, " + ", " . join ( pygments . styles . get_all_styles ( ) ) ) sys . exit ( 0 ) elif style in pygments . styles . get_all_styles ( ) : self . style = style else : raise CoconutException ( "unrecognized pygments style" , style , extra = "use '--style list' to show all valid styles" )
|
Set pygments syntax highlighting style .
|
7,959
|
def set_history_file ( self , path ) : if path : self . history = prompt_toolkit . history . FileHistory ( fixpath ( path ) ) else : self . history = prompt_toolkit . history . InMemoryHistory ( )
|
Set path to history file . produces no file .
|
7,960
|
def prompt ( self , msg ) : try : prompt = prompt_toolkit . PromptSession ( history = self . history ) . prompt except AttributeError : prompt = partial ( prompt_toolkit . prompt , history = self . history ) return prompt ( msg , multiline = self . multiline , vi_mode = self . vi_mode , wrap_lines = self . wrap_lines , enable_history_search = self . history_search , lexer = PygmentsLexer ( CoconutLexer ) , style = style_from_pygments_cls ( pygments . styles . get_style_by_name ( self . style ) , ) , )
|
Get input using prompt_toolkit .
|
7,961
|
def build_vars ( path = None ) : init_vars = { "__name__" : "__main__" , "__package__" : None , "reload" : reload , } if path is not None : init_vars [ "__file__" ] = fixpath ( path ) for var in reserved_vars : init_vars [ var ] = None return init_vars
|
Build initial vars .
|
7,962
|
def fix_pickle ( self ) : from coconut import __coconut__ for var in self . vars : if not var . startswith ( "__" ) and var in dir ( __coconut__ ) : self . vars [ var ] = getattr ( __coconut__ , var )
|
Fix pickling of Coconut header objects .
|
7,963
|
def handling_errors ( self , all_errors_exit = False ) : try : yield except SystemExit as err : self . exit ( err . code ) except BaseException : etype , value , tb = sys . exc_info ( ) for _ in range ( num_added_tb_layers ) : if tb is None : break tb = tb . tb_next traceback . print_exception ( etype , value , tb ) if all_errors_exit : self . exit ( 1 )
|
Handle execution errors .
|
7,964
|
def run ( self , code , use_eval = None , path = None , all_errors_exit = False , store = True ) : if use_eval is None : run_func = interpret elif use_eval is True : run_func = eval else : run_func = exec_func with self . handling_errors ( all_errors_exit ) : if path is None : result = run_func ( code , self . vars ) else : use_vars = self . build_vars ( path ) try : result = run_func ( code , use_vars ) finally : self . vars . update ( use_vars ) if store : self . store ( code ) return result
|
Execute Python code .
|
7,965
|
def run_file ( self , path , all_errors_exit = True ) : path = fixpath ( path ) with self . handling_errors ( all_errors_exit ) : module_vars = run_file ( path ) self . vars . update ( module_vars ) self . store ( "from " + splitname ( path ) [ 1 ] + " import *" )
|
Execute a Python file .
|
7,966
|
def was_run_code ( self , get_all = True ) : if self . stored is None : return "" else : if get_all : self . stored = [ "\n" . join ( self . stored ) ] return self . stored [ - 1 ]
|
Get all the code that was run .
|
7,967
|
def get_reqs ( which = "main" ) : reqs = [ ] for req in all_reqs [ which ] : req_str = req + ">=" + ver_tuple_to_str ( min_versions [ req ] ) if req in version_strictly : req_str += ",<" + ver_tuple_to_str ( min_versions [ req ] [ : - 1 ] ) + "." + str ( min_versions [ req ] [ - 1 ] + 1 ) reqs . append ( req_str ) return reqs
|
Gets requirements from all_reqs with versions .
|
7,968
|
def uniqueify_all ( init_reqs , * other_reqs ) : union = set ( init_reqs ) for reqs in other_reqs : union . update ( reqs ) return list ( union )
|
Find the union of all the given requirements .
|
7,969
|
def all_versions ( req ) : import requests url = "https://pypi.python.org/pypi/" + req + "/json" return tuple ( requests . get ( url ) . json ( ) [ "releases" ] . keys ( ) )
|
Get all versions of req from PyPI .
|
7,970
|
def newer ( new_ver , old_ver , strict = False ) : if old_ver == new_ver or old_ver + ( 0 , ) == new_ver : return False for n , o in zip ( new_ver , old_ver ) : if not isinstance ( n , int ) : o = str ( o ) if o < n : return True elif o > n : return False return not strict
|
Determines if the first version tuple is newer than the second . True if newer False if older None if difference is after specified version parts .
|
7,971
|
def print_new_versions ( strict = False ) : new_updates = [ ] same_updates = [ ] for req in everything_in ( all_reqs ) : new_versions = [ ] same_versions = [ ] for ver_str in all_versions ( req ) : if newer ( ver_str_to_tuple ( ver_str ) , min_versions [ req ] , strict = True ) : new_versions . append ( ver_str ) elif not strict and newer ( ver_str_to_tuple ( ver_str ) , min_versions [ req ] ) : same_versions . append ( ver_str ) update_str = req + ": " + ver_tuple_to_str ( min_versions [ req ] ) + " -> " + ", " . join ( new_versions + [ "(" + v + ")" for v in same_versions ] , ) if new_versions : new_updates . append ( update_str ) elif same_versions : same_updates . append ( update_str ) print ( "\n" . join ( new_updates + same_updates ) )
|
Prints new requirement versions .
|
7,972
|
def format_error ( err_type , err_value , err_trace = None ) : if err_trace is None : err_parts = "" . join ( traceback . format_exception_only ( err_type , err_value ) ) . strip ( ) . split ( ": " , 1 ) if len ( err_parts ) == 1 : err_name , err_msg = err_parts [ 0 ] , "" else : err_name , err_msg = err_parts err_name = err_name . split ( "." ) [ - 1 ] return err_name + ": " + err_msg else : return "" . join ( traceback . format_exception ( err_type , err_value , err_trace ) ) . strip ( )
|
Properly formats the specified error .
|
7,973
|
def complain ( error ) : if callable ( error ) : if DEVELOP : raise error ( ) elif DEVELOP : raise error else : logger . warn_err ( error )
|
Raises in develop ; warns in release .
|
7,974
|
def copy_from ( self , other ) : self . verbose , self . quiet , self . path , self . name , self . tracing = other . verbose , other . quiet , other . path , other . name , other . tracing
|
Copy other onto self .
|
7,975
|
def display ( self , messages , sig = "" , debug = False ) : full_message = "" . join ( sig + line for line in " " . join ( str ( msg ) for msg in messages ) . splitlines ( True ) ) if not full_message : full_message = sig . rstrip ( ) if debug : printerr ( full_message ) else : print ( full_message )
|
Prints an iterator of messages .
|
7,976
|
def get_error ( self ) : exc_info = sys . exc_info ( ) if exc_info [ 0 ] is None : return None else : err_type , err_value , err_trace = exc_info [ 0 ] , exc_info [ 1 ] , None if self . verbose and len ( exc_info ) > 2 : err_trace = exc_info [ 2 ] return format_error ( err_type , err_value , err_trace )
|
Properly formats the current error .
|
7,977
|
def in_path ( self , new_path , old_path = None ) : self . path = new_path try : yield finally : self . path = old_path
|
Temporarily enters a path .
|
7,978
|
def display_exc ( self ) : errmsg = self . get_error ( ) if errmsg is not None : if self . path is not None : errmsg_lines = [ "in " + self . path + ":" ] for line in errmsg . splitlines ( ) : if line : line = " " * taberrfmt + line errmsg_lines . append ( line ) errmsg = "\n" . join ( errmsg_lines ) printerr ( errmsg )
|
Properly prints an exception in the exception context .
|
7,979
|
def show_tabulated ( self , begin , middle , end ) : internal_assert ( len ( begin ) < info_tabulation , "info message too long" , begin ) self . show ( begin + " " * ( info_tabulation - len ( begin ) ) + middle + " " + end )
|
Shows a tabulated message .
|
7,980
|
def log_tag ( self , tag , code , multiline = False ) : if self . tracing : if callable ( code ) : code = code ( ) tagstr = "[" + str ( tag ) + "]" if multiline : printerr ( tagstr + "\n" + displayable ( code ) ) else : printerr ( tagstr , ascii ( code ) )
|
Logs a tagged message if tracing .
|
7,981
|
def log_trace ( self , tag , original , loc , tokens = None , extra = None ) : if self . tracing : tag , original , loc = displayable ( tag ) , displayable ( original ) , int ( loc ) if "{" not in tag : out = [ "[" + tag + "]" ] add_line_col = True if tokens is not None : if isinstance ( tokens , Exception ) : msg = displayable ( str ( tokens ) ) if "{" in msg : head , middle = msg . split ( "{" , 1 ) middle , tail = middle . rsplit ( "}" , 1 ) msg = head + "{...}" + tail out . append ( msg ) add_line_col = False elif len ( tokens ) == 1 and isinstance ( tokens [ 0 ] , str ) : out . append ( ascii ( tokens [ 0 ] ) ) else : out . append ( ascii ( tokens ) ) if add_line_col : out . append ( "(line:" + str ( lineno ( loc , original ) ) + ", col:" + str ( col ( loc , original ) ) + ")" ) if extra is not None : out . append ( "from " + ascii ( extra ) ) printerr ( * out )
|
Formats and displays a trace if tracing .
|
7,982
|
def patch_logging ( self ) : if not hasattr ( logging , "getLogger" ) : def getLogger ( name = None ) : other = Logger ( self ) if name is not None : other . name = name return other logging . getLogger = getLogger
|
Patches built - in Python logging if necessary .
|
7,983
|
def pylog ( self , * args , ** kwargs ) : printerr ( self . name , args , kwargs , traceback . format_exc ( ) )
|
Display all available logging information .
|
7,984
|
def lenient_add_filter ( self , * args , ** kwargs ) : if args and args [ 0 ] != "raiseonerror" : self . original_add_filter ( * args , ** kwargs )
|
Disables the raiseonerror filter .
|
7,985
|
def on_modified ( self , event ) : path = event . src_path if path not in self . saw : self . saw . add ( path ) self . recompile ( path )
|
Handle a file modified event .
|
7,986
|
def split_function_call ( tokens , loc ) : pos_args = [ ] star_args = [ ] kwd_args = [ ] dubstar_args = [ ] for arg in tokens : argstr = "" . join ( arg ) if len ( arg ) == 1 : if star_args or kwd_args or dubstar_args : raise CoconutDeferredSyntaxError ( "positional arguments must come first" , loc ) pos_args . append ( argstr ) elif len ( arg ) == 2 : if arg [ 0 ] == "*" : if kwd_args or dubstar_args : raise CoconutDeferredSyntaxError ( "star unpacking must come before keyword arguments" , loc ) star_args . append ( argstr ) elif arg [ 0 ] == "**" : dubstar_args . append ( argstr ) else : kwd_args . append ( argstr ) else : raise CoconutInternalException ( "invalid function call argument" , arg ) return pos_args , star_args , kwd_args , dubstar_args
|
Split into positional arguments and keyword arguments .
|
7,987
|
def get_infix_items ( tokens , callback = infix_error ) : internal_assert ( len ( tokens ) >= 3 , "invalid infix tokens" , tokens ) ( arg1 , func , arg2 ) , tokens = tokens [ : 3 ] , tokens [ 3 : ] args = list ( arg1 ) + list ( arg2 ) while tokens : args = [ callback ( [ args , func , [ ] ] ) ] ( func , newarg ) , tokens = tokens [ : 2 ] , tokens [ 2 : ] args += list ( newarg ) return func , args
|
Perform infix token processing .
|
7,988
|
def comp_pipe_handle ( loc , tokens ) : internal_assert ( len ( tokens ) >= 3 and len ( tokens ) % 2 == 1 , "invalid composition pipe tokens" , tokens ) funcs = [ tokens [ 0 ] ] stars = [ ] direction = None for i in range ( 1 , len ( tokens ) , 2 ) : op , fn = tokens [ i ] , tokens [ i + 1 ] new_direction , star = comp_pipe_info ( op ) if direction is None : direction = new_direction elif new_direction != direction : raise CoconutDeferredSyntaxError ( "cannot mix function composition pipe operators with different directions" , loc ) funcs . append ( fn ) stars . append ( star ) if direction == "backwards" : funcs . reverse ( ) stars . reverse ( ) func = funcs . pop ( 0 ) funcstars = zip ( funcs , stars ) return "_coconut_base_compose(" + func + ", " + ", " . join ( "(%s, %s)" % ( f , star ) for f , star in funcstars ) + ")"
|
Process pipe function composition .
|
7,989
|
def none_coalesce_handle ( tokens ) : if len ( tokens ) == 1 : return tokens [ 0 ] elif tokens [ 0 ] . isalnum ( ) : return "({b} if {a} is None else {a})" . format ( a = tokens [ 0 ] , b = none_coalesce_handle ( tokens [ 1 : ] ) , ) else : return "(lambda {x}: {b} if {x} is None else {x})({a})" . format ( x = none_coalesce_var , a = tokens [ 0 ] , b = none_coalesce_handle ( tokens [ 1 : ] ) , )
|
Process the None - coalescing operator .
|
7,990
|
def attrgetter_atom_handle ( loc , tokens ) : name , args = attrgetter_atom_split ( tokens ) if args is None : return '_coconut.operator.attrgetter("' + name + '")' elif "." in name : raise CoconutDeferredSyntaxError ( "cannot have attribute access in implicit methodcaller partial" , loc ) elif args == "" : return '_coconut.operator.methodcaller("' + tokens [ 0 ] + '")' else : return '_coconut.operator.methodcaller("' + tokens [ 0 ] + '", ' + tokens [ 2 ] + ")"
|
Process attrgetter literals .
|
7,991
|
def lazy_list_handle ( tokens ) : if len ( tokens ) == 0 : return "_coconut.iter(())" else : return ( "(%s() for %s in (" % ( func_var , func_var ) + "lambda: " + ", lambda: " . join ( tokens ) + ( "," if len ( tokens ) == 1 else "" ) + "))" )
|
Process lazy lists .
|
7,992
|
def infix_handle ( tokens ) : func , args = get_infix_items ( tokens , callback = infix_handle ) return "(" + func + ")(" + ", " . join ( args ) + ")"
|
Process infix calls .
|
7,993
|
def op_funcdef_handle ( tokens ) : func , base_args = get_infix_items ( tokens ) args = [ ] for arg in base_args [ : - 1 ] : rstrip_arg = arg . rstrip ( ) if not rstrip_arg . endswith ( unwrapper ) : if not rstrip_arg . endswith ( "," ) : arg += ", " elif arg . endswith ( "," ) : arg += " " args . append ( arg ) last_arg = base_args [ - 1 ] if last_arg . rstrip ( ) . endswith ( "," ) : last_arg = last_arg . rsplit ( "," ) [ 0 ] args . append ( last_arg ) return func + "(" + "" . join ( args ) + ")"
|
Process infix defs .
|
7,994
|
def typedef_callable_handle ( tokens ) : if len ( tokens ) == 1 : return '_coconut.typing.Callable[..., ' + tokens [ 0 ] + ']' elif len ( tokens ) == 2 : return '_coconut.typing.Callable[[' + tokens [ 0 ] + '], ' + tokens [ 1 ] + ']' else : raise CoconutInternalException ( "invalid Callable typedef tokens" , tokens )
|
Process - > to Callable inside type annotations .
|
7,995
|
def math_funcdef_handle ( tokens ) : internal_assert ( len ( tokens ) == 2 , "invalid assignment function definition tokens" , tokens ) return tokens [ 0 ] + ( "" if tokens [ 1 ] . startswith ( "\n" ) else " " ) + tokens [ 1 ]
|
Process assignment function definition .
|
7,996
|
def decorator_handle ( tokens ) : defs = [ ] decorates = [ ] for i , tok in enumerate ( tokens ) : if "simple" in tok and len ( tok ) == 1 : decorates . append ( "@" + tok [ 0 ] ) elif "test" in tok and len ( tok ) == 1 : varname = decorator_var + "_" + str ( i ) defs . append ( varname + " = " + tok [ 0 ] ) decorates . append ( "@" + varname ) else : raise CoconutInternalException ( "invalid decorator tokens" , tok ) return "\n" . join ( defs + decorates ) + "\n"
|
Process decorators .
|
7,997
|
def match_handle ( loc , tokens ) : if len ( tokens ) == 4 : matches , match_type , item , stmts = tokens cond = None elif len ( tokens ) == 5 : matches , match_type , item , cond , stmts = tokens else : raise CoconutInternalException ( "invalid match statement tokens" , tokens ) if match_type == "in" : invert = False elif match_type == "not in" : invert = True else : raise CoconutInternalException ( "invalid match type" , match_type ) matching = Matcher ( loc , match_check_var ) matching . match ( matches , match_to_var ) if cond : matching . add_guard ( cond ) return ( match_to_var + " = " + item + "\n" + matching . build ( stmts , invert = invert ) )
|
Process match blocks .
|
7,998
|
def except_handle ( tokens ) : if len ( tokens ) == 1 : errs , asname = tokens [ 0 ] , None elif len ( tokens ) == 2 : errs , asname = tokens else : raise CoconutInternalException ( "invalid except tokens" , tokens ) out = "except " if "list" in tokens : out += "(" + errs + ")" else : out += errs if asname is not None : out += " as " + asname return out
|
Process except statements .
|
7,999
|
def subscriptgroup_handle ( tokens ) : internal_assert ( 0 < len ( tokens ) <= 3 , "invalid slice args" , tokens ) args = [ ] for arg in tokens : if not arg : arg = "None" args . append ( arg ) if len ( args ) == 1 : return args [ 0 ] else : return "_coconut.slice(" + ", " . join ( args ) + ")"
|
Process subscriptgroups .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.