idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
53,400
def unlock ( self , request , * args , ** kwargs ) : self . object = self . get_object ( ) success_url = self . get_success_url ( ) self . object . status = Topic . TOPIC_UNLOCKED self . object . save ( ) messages . success ( self . request , self . success_message ) return HttpResponseRedirect ( success_url )
Unlocks the considered topic and retirects the user to the success URL .
53,401
def update_type ( self , request , * args , ** kwargs ) : self . object = self . get_object ( ) success_url = self . get_success_url ( ) self . object . type = self . target_type self . object . save ( ) messages . success ( self . request , self . success_message ) return HttpResponseRedirect ( success_url )
Updates the type of the considered topic and retirects the user to the success URL .
53,402
def approve ( self , request , * args , ** kwargs ) : self . object = self . get_object ( ) success_url = self . get_success_url ( ) self . object . approved = True self . object . save ( ) messages . success ( self . request , self . success_message ) return HttpResponseRedirect ( success_url )
Approves the considered post and retirects the user to the success URL .
53,403
def disapprove ( self , request , * args , ** kwargs ) : self . object = self . get_object ( ) success_url = self . get_success_url ( ) self . object . delete ( ) messages . success ( self . request , self . success_message ) return HttpResponseRedirect ( success_url )
Disapproves the considered post and retirects the user to the success URL .
53,404
def poster ( self ) : user_model = get_user_model ( ) return get_object_or_404 ( user_model , pk = self . kwargs [ self . user_pk_url_kwarg ] )
Returns the considered user .
53,405
def subscribe ( self , request , * args , ** kwargs ) : self . object = self . get_object ( ) self . object . subscribers . add ( request . user ) messages . success ( self . request , self . success_message ) return HttpResponseRedirect ( self . get_success_url ( ) )
Performs the subscribe action .
53,406
def unsubscribe ( self , request , * args , ** kwargs ) : self . object = self . get_object ( ) self . object . subscribers . remove ( request . user ) messages . success ( self . request , self . success_message ) return HttpResponseRedirect ( self . get_success_url ( ) )
Performs the unsubscribe action .
53,407
def update_topic_counter ( sender , topic , user , request , response , ** kwargs ) : topic . __class__ . _default_manager . filter ( id = topic . id ) . update ( views_count = F ( 'views_count' ) + 1 )
Handles the update of the views counter associated with topics .
53,408
def get_topic ( self ) : if not hasattr ( self , 'topic' ) : self . topic = get_object_or_404 ( Topic . objects . select_related ( 'forum' ) . all ( ) , pk = self . kwargs [ 'pk' ] , ) return self . topic
Returns the topic to consider .
53,409
def init_attachment_cache ( self ) : if self . request . method == 'GET' : attachments_cache . delete ( self . get_attachments_cache_key ( self . request ) ) return attachments_cache_key = self . get_attachments_cache_key ( self . request ) restored_attachments_dict = attachments_cache . get ( attachments_cache_key ) if restored_attachments_dict : restored_attachments_dict . update ( self . request . FILES ) self . request . _files = restored_attachments_dict if self . request . FILES : attachments_cache . set ( attachments_cache_key , self . request . FILES )
Initializes the attachment cache for the current view .
53,410
def get_post_form_kwargs ( self ) : kwargs = { 'user' : self . request . user , 'forum' : self . get_forum ( ) , 'topic' : self . get_topic ( ) , } post = self . get_post ( ) if post : kwargs . update ( { 'instance' : post } ) if self . request . method in ( 'POST' , 'PUT' ) : kwargs . update ( { 'data' : self . request . POST , 'files' : self . request . FILES , } ) return kwargs
Returns the keyword arguments for instantiating the post form .
53,411
def get_attachment_formset ( self , formset_class ) : if ( self . request . forum_permission_handler . can_attach_files ( self . get_forum ( ) , self . request . user , ) ) : return formset_class ( ** self . get_attachment_formset_kwargs ( ) )
Returns an instance of the attachment formset to be used in the view .
53,412
def get_attachment_formset_kwargs ( self ) : kwargs = { 'prefix' : 'attachment' , } if self . request . method in ( 'POST' , 'PUT' ) : kwargs . update ( { 'data' : self . request . POST , 'files' : self . request . FILES , } ) else : post = self . get_post ( ) attachment_queryset = Attachment . objects . filter ( post = post ) kwargs . update ( { 'queryset' : attachment_queryset , } ) return kwargs
Returns the keyword arguments for instantiating the attachment formset .
53,413
def get_forum ( self ) : pk = self . kwargs . get ( self . forum_pk_url_kwarg , None ) if not pk : return if not hasattr ( self , '_forum' ) : self . _forum = get_object_or_404 ( Forum , pk = pk ) return self . _forum
Returns the considered forum .
53,414
def get_topic ( self ) : pk = self . kwargs . get ( self . topic_pk_url_kwarg , None ) if not pk : return if not hasattr ( self , '_topic' ) : self . _topic = get_object_or_404 ( Topic , pk = pk ) return self . _topic
Returns the considered topic if applicable .
53,415
def get_post ( self ) : pk = self . kwargs . get ( self . post_pk_url_kwarg , None ) if not pk : return if not hasattr ( self , '_forum_post' ) : self . _forum_post = get_object_or_404 ( Post , pk = pk ) return self . _forum_post
Returns the considered post if applicable .
53,416
def get_poll_option_formset ( self , formset_class ) : if self . request . forum_permission_handler . can_create_polls ( self . get_forum ( ) , self . request . user , ) : return formset_class ( ** self . get_poll_option_formset_kwargs ( ) )
Returns an instance of the poll option formset to be used in the view .
53,417
def get_poll_option_formset_kwargs ( self ) : kwargs = { 'prefix' : 'poll' , } if self . request . method in ( 'POST' , 'PUT' ) : kwargs . update ( { 'data' : self . request . POST , 'files' : self . request . FILES , } ) else : topic = self . get_topic ( ) poll_option_queryset = TopicPollOption . objects . filter ( poll__topic = topic ) kwargs . update ( { 'queryset' : poll_option_queryset , } ) return kwargs
Returns the keyword arguments for instantiating the poll option formset .
53,418
def _remove_exts ( self , string ) : if string . lower ( ) . endswith ( ( '.png' , '.gif' , '.jpg' , '.bmp' , '.jpeg' , '.ppm' , '.datauri' ) ) : format = string [ string . rfind ( '.' ) + 1 : len ( string ) ] if format . lower ( ) == 'jpg' : format = 'jpeg' self . format = format string = string [ 0 : string . rfind ( '.' ) ] return string
Sets the string to create the Robohash
53,419
def _get_list_of_files ( self , path ) : chosen_files = [ ] directories = [ ] for root , dirs , files in natsort . natsorted ( os . walk ( path , topdown = False ) ) : for name in dirs : if name [ : 1 ] is not '.' : directories . append ( os . path . join ( root , name ) ) directories = natsort . natsorted ( directories ) for directory in directories : files_in_dir = [ ] for imagefile in natsort . natsorted ( os . listdir ( directory ) ) : files_in_dir . append ( os . path . join ( directory , imagefile ) ) files_in_dir = natsort . natsorted ( files_in_dir ) element_in_list = self . hasharray [ self . iter ] % len ( files_in_dir ) chosen_files . append ( files_in_dir [ element_in_list ] ) self . iter += 1 return chosen_files
Go through each subdirectory of path and choose one file from each to use in our hash . Continue to increase self . iter so we use a different slot of randomness each time .
53,420
def assemble ( self , roboset = None , color = None , format = None , bgset = None , sizex = 300 , sizey = 300 ) : if roboset == 'any' : roboset = self . sets [ self . hasharray [ 1 ] % len ( self . sets ) ] elif roboset in self . sets : roboset = roboset else : roboset = self . sets [ 0 ] if roboset == 'set1' : if color in self . colors : roboset = 'set1/' + color else : randomcolor = self . colors [ self . hasharray [ 0 ] % len ( self . colors ) ] roboset = 'set1/' + randomcolor if bgset in self . bgsets : bgset = bgset elif bgset == 'any' : bgset = self . bgsets [ self . hasharray [ 2 ] % len ( self . bgsets ) ] if format is None : format = self . format roboparts = self . _get_list_of_files ( self . resourcedir + 'sets/' + roboset ) roboparts . sort ( key = lambda x : x . split ( "#" ) [ 1 ] ) if bgset is not None : bglist = [ ] backgrounds = natsort . natsorted ( os . listdir ( self . resourcedir + 'backgrounds/' + bgset ) ) backgrounds . sort ( ) for ls in backgrounds : if not ls . startswith ( "." ) : bglist . append ( self . resourcedir + 'backgrounds/' + bgset + "/" + ls ) background = bglist [ self . hasharray [ 3 ] % len ( bglist ) ] roboimg = Image . open ( roboparts [ 0 ] ) roboimg = roboimg . resize ( ( 1024 , 1024 ) ) for png in roboparts : img = Image . open ( png ) img = img . resize ( ( 1024 , 1024 ) ) roboimg . paste ( img , ( 0 , 0 ) , img ) if format == 'bmp' : r , g , b , a = roboimg . split ( ) roboimg = Image . merge ( "RGB" , ( r , g , b ) ) if bgset is not None : bg = Image . open ( background ) bg = bg . resize ( ( 1024 , 1024 ) ) bg . paste ( roboimg , ( 0 , 0 ) , roboimg ) roboimg = bg self . img = roboimg . resize ( ( sizex , sizey ) , Image . ANTIALIAS ) self . format = format
Build our Robot! Returns the robot image itself .
53,421
def collect_members ( module_to_name ) : members = { } for module , module_name in module_to_name . items ( ) : all_names = getattr ( module , "__all__" , None ) for name , member in inspect . getmembers ( module ) : if ( ( inspect . isfunction ( member ) or inspect . isclass ( member ) ) and not _always_drop_symbol_re . match ( name ) and ( all_names is None or name in all_names ) ) : fullname = '%s.%s' % ( module_name , name ) if name in members : other_fullname , other_member = members [ name ] if member is not other_member : raise RuntimeError ( "Short name collision between %s and %s" % ( fullname , other_fullname ) ) if len ( fullname ) == len ( other_fullname ) : raise RuntimeError ( "Can't decide whether to use %s or %s for %s: " "both full names have length %d" % ( fullname , other_fullname , name , len ( fullname ) ) ) if len ( fullname ) > len ( other_fullname ) : continue members [ name ] = fullname , member return members
Collect all symbols from a list of modules .
53,422
def _get_anchor ( module_to_name , fullname ) : if not _anchor_re . match ( fullname ) : raise ValueError ( "'%s' is not a valid anchor" % fullname ) anchor = fullname for module_name in module_to_name . values ( ) : if fullname . startswith ( module_name + "." ) : rest = fullname [ len ( module_name ) + 1 : ] if len ( anchor ) > len ( rest ) : anchor = rest return anchor
Turn a full member name into an anchor .
53,423
def write_libraries ( dir , libraries ) : files = [ open ( os . path . join ( dir , k ) , "w" ) for k , _ in libraries ] for f , ( _ , v ) in zip ( files , libraries ) : v . write_markdown_to_file ( f ) for f , ( _ , v ) in zip ( files , libraries ) : v . write_other_members ( f ) f . close ( )
Write a list of libraries to disk .
53,424
def write_markdown_to_file ( self , f ) : print ( "---" , file = f ) print ( "---" , file = f ) print ( "<!-- This file is machine generated: DO NOT EDIT! , file = f ) print ( "" , file = f ) print ( "# TensorFlow Python reference documentation" , file = f ) print ( "" , file = f ) fullname_f = lambda name : self . _members [ name ] [ 0 ] anchor_f = lambda name : _get_anchor ( self . _module_to_name , fullname_f ( name ) ) for filename , library in self . _filename_to_library_map : sorted_names = sorted ( library . mentioned , key = lambda x : ( str . lower ( x ) , x ) ) member_names = [ n for n in sorted_names if n in self . _members ] full_filename = self . _path_prefix + filename links = [ "[`%s`](%s#%s)" % ( name , full_filename [ : - 3 ] , anchor_f ( name ) ) for name in member_names ] if links : print ( "* **[%s](%s)**:" % ( library . title , full_filename [ : - 3 ] ) , file = f ) for link in links : print ( " * %s" % link , file = f ) print ( "" , file = f )
Writes this index to file f .
53,425
def _should_include_member ( self , name , member ) : if _always_drop_symbol_re . match ( name ) : return False if name in self . _exclude_symbols : return False return True
Returns True if this member should be included in the document .
53,426
def get_imported_modules ( self , module ) : for name , member in inspect . getmembers ( module ) : if inspect . ismodule ( member ) : yield name , member
Returns the list of modules imported from module .
53,427
def get_class_members ( self , cls_name , cls ) : for name , member in inspect . getmembers ( cls ) : is_method = inspect . ismethod ( member ) or inspect . isfunction ( member ) if not ( is_method or isinstance ( member , property ) ) : continue if ( ( is_method and member . __name__ == "__init__" ) or self . _should_include_member ( name , member ) ) : yield name , ( "%s.%s" % ( cls_name , name ) , member )
Returns the list of class members to document in cls .
53,428
def _generate_signature_for_function ( self , func ) : args_list = [ ] argspec = inspect . getargspec ( func ) first_arg_with_default = ( len ( argspec . args or [ ] ) - len ( argspec . defaults or [ ] ) ) for arg in argspec . args [ : first_arg_with_default ] : if arg == "self" : continue args_list . append ( arg ) if argspec . varargs == "args" and argspec . keywords == "kwds" : original_func = func . __closure__ [ 0 ] . cell_contents return self . _generate_signature_for_function ( original_func ) if argspec . defaults : for arg , default in zip ( argspec . args [ first_arg_with_default : ] , argspec . defaults ) : if callable ( default ) : args_list . append ( "%s=%s" % ( arg , default . __name__ ) ) else : args_list . append ( "%s=%r" % ( arg , default ) ) if argspec . varargs : args_list . append ( "*" + argspec . varargs ) if argspec . keywords : args_list . append ( "**" + argspec . keywords ) return "(" + ", " . join ( args_list ) + ")"
Given a function returns a string representing its args .
53,429
def _remove_docstring_indent ( self , docstring ) : docstring = docstring or "" lines = docstring . strip ( ) . split ( "\n" ) min_indent = len ( docstring ) for l in lines [ 1 : ] : l = l . rstrip ( ) if l : i = 0 while i < len ( l ) and l [ i ] == " " : i += 1 if i < min_indent : min_indent = i for i in range ( 1 , len ( lines ) ) : l = lines [ i ] . rstrip ( ) if len ( l ) >= min_indent : l = l [ min_indent : ] lines [ i ] = l return lines
Remove indenting .
53,430
def _print_formatted_docstring ( self , docstring , f ) : lines = self . _remove_docstring_indent ( docstring ) i = 0 def _at_start_of_section ( ) : l = lines [ i ] match = _section_re . match ( l ) if match and i + 1 < len ( lines ) and lines [ i + 1 ] . startswith ( " " ) : return match . group ( 1 ) else : return None while i < len ( lines ) : l = lines [ i ] section_header = _at_start_of_section ( ) if section_header : if i == 0 or lines [ i - 1 ] : print ( "" , file = f ) print ( "##### " + section_header + ":" , file = f ) print ( "" , file = f ) i += 1 outputting_list = False while i < len ( lines ) : l = lines [ i ] if _at_start_of_section ( ) : break match = _arg_re . match ( l ) if match : if not outputting_list : print ( "" , file = f ) outputting_list = True suffix = l [ len ( match . group ( ) ) : ] . lstrip ( ) print ( "* <b>`" + match . group ( 1 ) + "`</b>: " + suffix , file = f ) else : outputting_list &= l . startswith ( " " ) print ( l , file = f ) i += 1 else : print ( l , file = f ) i += 1
Formats the given docstring as Markdown and prints it to f .
53,431
def _print_function ( self , f , prefix , fullname , func ) : heading = prefix + " `" + fullname if not isinstance ( func , property ) : heading += self . _generate_signature_for_function ( func ) heading += "` {#%s}" % _get_anchor ( self . _module_to_name , fullname ) print ( heading , file = f ) print ( "" , file = f ) self . _print_formatted_docstring ( inspect . getdoc ( func ) , f ) print ( "" , file = f )
Prints the given function to f .
53,432
def _write_member_markdown_to_file ( self , f , prefix , name , member ) : if ( inspect . isfunction ( member ) or inspect . ismethod ( member ) or isinstance ( member , property ) ) : print ( "- - -" , file = f ) print ( "" , file = f ) self . _print_function ( f , prefix , name , member ) print ( "" , file = f ) elif inspect . isclass ( member ) : print ( "- - -" , file = f ) print ( "" , file = f ) print ( "%s `class %s` {#%s}" % ( prefix , name , _get_anchor ( self . _module_to_name , name ) ) , file = f ) print ( "" , file = f ) self . _write_class_markdown_to_file ( f , name , member ) print ( "" , file = f ) else : raise RuntimeError ( "Member %s has unknown type %s" % ( name , type ( member ) ) )
Print member to f .
53,433
def _write_class_markdown_to_file ( self , f , name , cls ) : methods = dict ( self . get_class_members ( name , cls ) ) num_methods = len ( methods ) try : self . _write_docstring_markdown_to_file ( f , "####" , inspect . getdoc ( cls ) , methods , { } ) except ValueError as e : raise ValueError ( str ( e ) + " in class `%s`" % cls . __name__ ) any_method_called_out = ( len ( methods ) != num_methods ) if any_method_called_out : other_methods = { n : m for n , m in methods . items ( ) if n in cls . __dict__ } if other_methods : print ( "\n#### Other Methods" , file = f ) else : other_methods = methods for name in sorted ( other_methods ) : self . _write_member_markdown_to_file ( f , "####" , * other_methods [ name ] )
Write the class doc to f .
53,434
def write_markdown_to_file ( self , f ) : print ( "---" , file = f ) print ( "---" , file = f ) print ( "<!-- This file is machine generated: DO NOT EDIT! , file = f ) print ( "" , file = f ) print ( "#" , self . _title , file = f ) if self . _prefix : print ( self . _prefix , file = f ) print ( "[TOC]" , file = f ) print ( "" , file = f ) if self . _module is not None : self . _write_module_markdown_to_file ( f , self . _module )
Prints this library to file f .
53,435
def write_other_members ( self , f , catch_all = False ) : if catch_all : names = self . _members . items ( ) else : names = inspect . getmembers ( self . _module ) leftovers = [ ] for name , _ in names : if name in self . _members and name not in self . _documented : leftovers . append ( name ) if leftovers : print ( "%s: undocumented members: %d" % ( self . _title , len ( leftovers ) ) ) print ( "\n## Other Functions and Classes" , file = f ) for name in sorted ( leftovers ) : print ( " %s" % name ) self . _documented . add ( name ) self . _mentioned . add ( name ) self . _write_member_markdown_to_file ( f , "###" , * self . _members [ name ] )
Writes the leftover members to f .
53,436
def assert_no_leftovers ( self ) : leftovers = [ ] for name in self . _members . keys ( ) : if name in self . _members and name not in self . _documented : leftovers . append ( name ) if leftovers : raise RuntimeError ( "%s: undocumented members: %s" % ( self . _title , ", " . join ( leftovers ) ) )
Generate an error if there are leftover members .
53,437
def start_http_server ( self , port , host = '0.0.0.0' , endpoint = None ) : if self . should_start_http_server ( ) : pc_start_http_server ( port , host , registry = self . registry )
Start an HTTP server for exposing the metrics if the should_start_http_server function says we should otherwise just return . Uses the implementation from prometheus_client rather than a Flask app .
53,438
def init_app ( self , app ) : if self . path : self . register_endpoint ( self . path , app ) if self . _export_defaults : self . export_defaults ( self . buckets , self . group_by , self . _defaults_prefix , app )
This callback can be used to initialize an application for the use with this prometheus reporter setup .
53,439
def register_endpoint ( self , path , app = None ) : if is_running_from_reloader ( ) and not os . environ . get ( 'DEBUG_METRICS' ) : return if app is None : app = self . app or current_app @ app . route ( path ) @ self . do_not_track ( ) def prometheus_metrics ( ) : from prometheus_client import multiprocess , CollectorRegistry if 'prometheus_multiproc_dir' in os . environ : registry = CollectorRegistry ( ) else : registry = self . registry if 'name[]' in request . args : registry = registry . restricted_registry ( request . args . getlist ( 'name[]' ) ) if 'prometheus_multiproc_dir' in os . environ : multiprocess . MultiProcessCollector ( registry ) headers = { 'Content-Type' : CONTENT_TYPE_LATEST } return generate_latest ( registry ) , 200 , headers
Register the metrics endpoint on the Flask application .
53,440
def start_http_server ( self , port , host = '0.0.0.0' , endpoint = '/metrics' ) : if is_running_from_reloader ( ) : return app = Flask ( 'prometheus-flask-exporter-%d' % port ) self . register_endpoint ( endpoint , app ) def run_app ( ) : app . run ( host = host , port = port ) thread = threading . Thread ( target = run_app ) thread . setDaemon ( True ) thread . start ( )
Start an HTTP server for exposing the metrics . This will be an individual Flask application not the one registered with this class .
53,441
def histogram ( self , name , description , labels = None , ** kwargs ) : return self . _track ( Histogram , lambda metric , time : metric . observe ( time ) , kwargs , name , description , labels , registry = self . registry )
Use a Histogram to track the execution time and invocation count of the method .
53,442
def summary ( self , name , description , labels = None , ** kwargs ) : return self . _track ( Summary , lambda metric , time : metric . observe ( time ) , kwargs , name , description , labels , registry = self . registry )
Use a Summary to track the execution time and invocation count of the method .
53,443
def gauge ( self , name , description , labels = None , ** kwargs ) : return self . _track ( Gauge , lambda metric , time : metric . dec ( ) , kwargs , name , description , labels , registry = self . registry , before = lambda metric : metric . inc ( ) )
Use a Gauge to track the number of invocations in progress for the method .
53,444
def counter ( self , name , description , labels = None , ** kwargs ) : return self . _track ( Counter , lambda metric , time : metric . inc ( ) , kwargs , name , description , labels , registry = self . registry )
Use a Counter to track the total number of invocations of the method .
53,445
def _track ( metric_type , metric_call , metric_kwargs , name , description , labels , registry , before = None ) : if labels is not None and not isinstance ( labels , dict ) : raise TypeError ( 'labels needs to be a dictionary of {labelname: callable}' ) label_names = labels . keys ( ) if labels else tuple ( ) parent_metric = metric_type ( name , description , labelnames = label_names , registry = registry , ** metric_kwargs ) def argspec ( func ) : if hasattr ( inspect , 'getfullargspec' ) : return inspect . getfullargspec ( func ) else : return inspect . getargspec ( func ) def label_value ( f ) : if not callable ( f ) : return lambda x : f if argspec ( f ) . args : return lambda x : f ( x ) else : return lambda x : f ( ) label_generator = tuple ( ( key , label_value ( call ) ) for key , call in labels . items ( ) ) if labels else tuple ( ) def get_metric ( response ) : if label_names : return parent_metric . labels ( ** { key : call ( response ) for key , call in label_generator } ) else : return parent_metric def decorator ( f ) : @ functools . wraps ( f ) def func ( * args , ** kwargs ) : if before : metric = get_metric ( None ) before ( metric ) else : metric = None start_time = default_timer ( ) try : response = f ( * args , ** kwargs ) except HTTPException as ex : response = ex except Exception as ex : response = make_response ( 'Exception: %s' % ex , 500 ) total_time = max ( default_timer ( ) - start_time , 0 ) if not metric : response_for_metric = response if not isinstance ( response , Response ) : if request . endpoint == f . __name__ : response_for_metric = make_response ( response ) metric = get_metric ( response_for_metric ) metric_call ( metric , time = total_time ) return response return func return decorator
Internal method decorator logic .
53,446
def do_not_track ( ) : def decorator ( f ) : @ functools . wraps ( f ) def func ( * args , ** kwargs ) : request . prom_do_not_track = True return f ( * args , ** kwargs ) return func return decorator
Decorator to skip the default metrics collection for the method .
53,447
def info ( self , name , description , labelnames = None , labelvalues = None , ** labels ) : if labels and labelnames : raise ValueError ( 'Cannot have labels defined as `dict` ' 'and collections of names and values' ) if labelnames is None and labels : labelnames = labels . keys ( ) elif labelnames and labelvalues : for idx , label_name in enumerate ( labelnames ) : labels [ label_name ] = labelvalues [ idx ] gauge = Gauge ( name , description , labelnames or tuple ( ) , registry = self . registry ) if labels : gauge = gauge . labels ( ** labels ) gauge . set ( 1 ) return gauge
Report any information as a Prometheus metric . This will create a Gauge with the initial value of 1 .
53,448
def inve ( env , command , * args , ** kwargs ) : with temp_environ ( ) : os . environ [ 'VIRTUAL_ENV' ] = str ( workon_home / env ) os . environ [ 'PATH' ] = compute_path ( env ) unsetenv ( 'PYTHONHOME' ) unsetenv ( '__PYVENV_LAUNCHER__' ) try : return check_call ( [ command ] + list ( args ) , shell = windows , ** kwargs ) except OSError as e : if e . errno == 2 : err ( 'Unable to find' , command ) return 2 else : raise
Run a command in the given virtual environment .
53,449
def ls_cmd ( argv ) : parser = argparse . ArgumentParser ( ) p_group = parser . add_mutually_exclusive_group ( ) p_group . add_argument ( '-b' , '--brief' , action = 'store_false' ) p_group . add_argument ( '-l' , '--long' , action = 'store_true' ) args = parser . parse_args ( argv ) lsvirtualenv ( args . long )
List available environments .
53,450
def workon_cmd ( argv ) : parser = argparse . ArgumentParser ( prog = 'pew workon' ) parser . add_argument ( 'envname' , nargs = '?' ) parser . add_argument ( '-n' , '--no-cd' , action = 'store_true' , help = ( 'Do not change working directory to project directory after ' 'activating virtualenv.' ) ) args = parser . parse_args ( argv ) def list_and_exit ( ) : lsvirtualenv ( False ) sys . exit ( 0 ) env = parse_envname ( [ args . envname ] , list_and_exit ) project_dir = get_project_dir ( env ) if project_dir is None or args . no_cd : project_dir = os . getcwd ( ) return shell ( env , cwd = project_dir )
List or change working virtual environments .
53,451
def add_cmd ( argv ) : parser = argparse . ArgumentParser ( ) parser . add_argument ( '-d' , dest = 'remove' , action = 'store_true' ) parser . add_argument ( 'dirs' , nargs = '+' ) args = parser . parse_args ( argv ) extra_paths = sitepackages_dir ( ) / '_virtualenv_path_extensions.pth' new_paths = [ os . path . abspath ( d ) + "\n" for d in args . dirs ] if not extra_paths . exists ( ) : with extra_paths . open ( 'w' ) as extra : extra . write ( ) def rewrite ( f ) : with extra_paths . open ( 'r+' ) as extra : to_write = f ( extra . readlines ( ) ) extra . seek ( 0 ) extra . truncate ( ) extra . writelines ( to_write ) if args . remove : rewrite ( lambda ls : [ line for line in ls if line not in new_paths ] ) else : rewrite ( lambda lines : lines [ 0 : 1 ] + new_paths + lines [ 1 : ] )
Add the specified directories to the Python path for the currently active virtualenv .
53,452
def lssitepackages_cmd ( argv ) : site = sitepackages_dir ( ) print ( * sorted ( site . iterdir ( ) ) , sep = os . linesep ) extra_paths = site / '_virtualenv_path_extensions.pth' if extra_paths . exists ( ) : print ( 'from _virtualenv_path_extensions.pth:' ) with extra_paths . open ( ) as extra : print ( '' . join ( extra . readlines ( ) ) )
Show the content of the site - packages directory of the current virtualenv .
53,453
def toggleglobalsitepackages_cmd ( argv ) : quiet = argv == [ '-q' ] site = sitepackages_dir ( ) ngsp_file = site . parent / 'no-global-site-packages.txt' if ngsp_file . exists ( ) : ngsp_file . unlink ( ) if not quiet : print ( 'Enabled global site-packages' ) else : with ngsp_file . open ( 'w' ) : if not quiet : print ( 'Disabled global site-packages' )
Toggle the current virtualenv between having and not having access to the global site - packages .
53,454
def cp_cmd ( argv ) : parser = argparse . ArgumentParser ( ) parser . add_argument ( 'source' ) parser . add_argument ( 'target' , nargs = '?' ) parser . add_argument ( '-d' , '--dont-activate' , action = 'store_false' , default = True , dest = 'activate' , help = "After \ creation, continue with the existing shell (don't \ activate the new environment)." ) args = parser . parse_args ( argv ) target_name = copy_virtualenv_project ( args . source , args . target ) if args . activate : shell ( target_name )
Duplicate the named virtualenv to make a new one .
53,455
def rename_cmd ( argv ) : parser = argparse . ArgumentParser ( ) parser . add_argument ( 'source' ) parser . add_argument ( 'target' ) pargs = parser . parse_args ( argv ) copy_virtualenv_project ( pargs . source , pargs . target ) return rmvirtualenvs ( [ pargs . source ] )
Rename a virtualenv
53,456
def setproject_cmd ( argv ) : args = dict ( enumerate ( argv ) ) project = os . path . abspath ( args . get ( 1 , '.' ) ) env = args . get ( 0 , os . environ . get ( 'VIRTUAL_ENV' ) ) if not env : sys . exit ( 'pew setproject [virtualenv] [project_path]' ) if not ( workon_home / env ) . exists ( ) : sys . exit ( "Environment '%s' doesn't exist." % env ) if not os . path . isdir ( project ) : sys . exit ( 'pew setproject: %s does not exist' % project ) setvirtualenvproject ( env , project )
Given a virtualenv directory and a project directory set the \ virtualenv up to be associated with the project .
53,457
def getproject_cmd ( argv ) : parser = argparse . ArgumentParser ( description = "Print an environment's project directory." , ) parser . add_argument ( 'envname' , nargs = '?' , default = os . environ . get ( 'VIRTUAL_ENV' ) , help = ( 'The name of the environment to return the project directory ' 'for. If omitted, will use the currently active environment.' ) , ) args = parser . parse_args ( argv ) if not args . envname : sys . exit ( 'ERROR: no virtualenv active' ) if not ( workon_home / args . envname ) . exists ( ) : sys . exit ( "ERROR: Environment '{0}' does not exist." . format ( args . envname ) ) project_dir = get_project_dir ( args . envname ) if project_dir is None : sys . exit ( "ERROR: no project directory set for Environment '{0}'" . format ( args . envname ) ) print ( project_dir )
Print a virtualenv s project directory if set .
53,458
def mkproject_cmd ( argv ) : if '-l' in argv or '--list' in argv : templates = [ t . name [ 9 : ] for t in workon_home . glob ( "template_*" ) ] print ( "Available project templates:" , * templates , sep = '\n' ) return parser = mkvirtualenv_argparser ( ) parser . add_argument ( 'envname' ) parser . add_argument ( '-t' , action = 'append' , default = [ ] , dest = 'templates' , help = 'Multiple \templates may be selected. They are applied in the order specified on the \command line.' ) parser . add_argument ( '-l' , '--list' , action = 'store_true' , help = 'List available templates.' ) args , rest = parser . parse_known_args ( argv ) projects_home = Path ( os . environ . get ( 'PROJECT_HOME' , '.' ) ) if not projects_home . exists ( ) : sys . exit ( 'ERROR: Projects directory %s does not exist. \Create it or set PROJECT_HOME to an existing directory.' % projects_home ) project = ( projects_home / args . envname ) . absolute ( ) if project . exists ( ) : sys . exit ( 'Project %s already exists.' % args . envname ) mkvirtualenv ( args . envname , args . python , args . packages , project . absolute ( ) , args . requirements , rest ) project . mkdir ( ) for template_name in args . templates : template = workon_home / ( "template_" + template_name ) inve ( args . envname , str ( template ) , args . envname , str ( project ) ) if args . activate : shell ( args . envname , cwd = str ( project ) )
Create a new project directory and its associated virtualenv .
53,459
def mktmpenv_cmd ( argv ) : parser = mkvirtualenv_argparser ( ) env = '.' while ( workon_home / env ) . exists ( ) : env = hex ( random . getrandbits ( 64 ) ) [ 2 : - 1 ] args , rest = parser . parse_known_args ( argv ) mkvirtualenv ( env , args . python , args . packages , requirements = args . requirements , rest = rest ) print ( 'This is a temporary environment. It will be deleted when you exit' ) try : if args . activate : shell ( env ) finally : return rmvirtualenvs ( [ env ] )
Create a temporary virtualenv .
53,460
def inall_cmd ( argv ) : envs = lsenvs ( ) errors = False for env in envs : print ( "\n%s:" % env ) try : inve ( env , * argv ) except CalledProcessError as e : errors = True err ( e ) sys . exit ( errors )
Run a command in each virtualenv .
53,461
def in_cmd ( argv ) : if len ( argv ) == 1 : return workon_cmd ( argv ) parse_envname ( argv , lambda : sys . exit ( 'You must provide a valid virtualenv to target' ) ) return inve ( * argv )
Run a command in the given virtualenv .
53,462
def restore_cmd ( argv ) : if len ( argv ) < 1 : sys . exit ( 'You must provide a valid virtualenv to target' ) env = argv [ 0 ] path = workon_home / env py = path / env_bin_dir / ( 'python.exe' if windows else 'python' ) exact_py = py . resolve ( ) . name return check_call ( [ sys . executable , "-m" , "virtualenv" , str ( path . absolute ( ) ) , "--python=%s" % exact_py ] )
Try to restore a broken virtualenv by reinstalling the same python version on top of it
53,463
def dir_cmd ( argv ) : env = parse_envname ( argv , lambda : sys . exit ( 'You must provide a valid virtualenv to target' ) ) print ( workon_home / env )
Print the path for the virtualenv directory
53,464
def install_cmd ( argv ) : installer = InstallCommand ( ) options , versions = installer . parser . parse_args ( argv ) if len ( versions ) != 1 : installer . parser . print_help ( ) sys . exit ( 1 ) else : try : actual_installer = PythonInstaller . get_installer ( versions [ 0 ] , options ) return actual_installer . install ( ) except AlreadyInstalledError as e : print ( e )
Use Pythonz to download and build the specified Python version
53,465
def version_cmd ( argv ) : import pkg_resources try : __version__ = pkg_resources . get_distribution ( 'pew' ) . version except pkg_resources . DistributionNotFound : __version__ = 'unknown' print ( 'Setuptools has some issues here, failed to get our own package.' , file = sys . stderr ) print ( __version__ )
Prints current pew version
53,466
def csstext_to_pairs ( csstext ) : with csstext_to_pairs . _lock : return sorted ( [ ( prop . name . strip ( ) , format_value ( prop ) ) for prop in cssutils . parseStyle ( csstext ) ] , key = itemgetter ( 0 ) , )
csstext_to_pairs takes css text and make it to list of tuple of key value .
53,467
def merge_styles ( inline_style , new_styles , classes , remove_unset_properties = False ) : styles = OrderedDict ( [ ( "" , OrderedDict ( ) ) ] ) for pc in set ( classes ) : styles [ pc ] = OrderedDict ( ) for i , style in enumerate ( new_styles ) : for k , v in style : styles [ classes [ i ] ] [ k ] = v if inline_style : for k , v in csstext_to_pairs ( inline_style ) : styles [ "" ] [ k ] = v normal_styles = [ ] pseudo_styles = [ ] for pseudoclass , kv in styles . items ( ) : if remove_unset_properties : kv = OrderedDict ( ( k , v ) for ( k , v ) in kv . items ( ) if not v . lower ( ) == "unset" ) if not kv : continue if pseudoclass : pseudo_styles . append ( "%s{%s}" % ( pseudoclass , "; " . join ( "%s:%s" % ( k , v ) for k , v in kv . items ( ) ) ) ) else : normal_styles . append ( "; " . join ( "%s:%s" % ( k , v ) for k , v in kv . items ( ) ) ) if pseudo_styles : all_styles = ( ( [ "{%s}" % "" . join ( normal_styles ) ] + pseudo_styles ) if normal_styles else pseudo_styles ) else : all_styles = normal_styles return " " . join ( all_styles ) . strip ( )
This will merge all new styles where the order is important The last one will override the first When that is done it will apply old inline style again The old inline style is always important and override all new ones . The inline style must be valid .
53,468
def make_important ( bulk ) : return ";" . join ( "%s !important" % p if not p . endswith ( "!important" ) else p for p in bulk . split ( ";" ) )
makes every property in a string !important .
53,469
def capitalize_float_margin ( css_body ) : def _capitalize_property ( match ) : return "{0}:{1}{2}" . format ( match . group ( "property" ) . capitalize ( ) , match . group ( "value" ) , match . group ( "terminator" ) , ) return _lowercase_margin_float_rule . sub ( _capitalize_property , css_body )
Capitalize float and margin CSS property names
53,470
def _load_external ( self , url ) : if url . startswith ( "//" ) : if self . base_url and "https://" in self . base_url : url = "https:" + url else : url = "http:" + url if url . startswith ( "http://" ) or url . startswith ( "https://" ) : css_body = self . _load_external_url ( url ) else : stylefile = url if not os . path . isabs ( stylefile ) : stylefile = os . path . abspath ( os . path . join ( self . base_path or "" , stylefile ) ) if os . path . exists ( stylefile ) : with codecs . open ( stylefile , encoding = "utf-8" ) as f : css_body = f . read ( ) elif self . base_url : url = urljoin ( self . base_url , url ) return self . _load_external ( url ) else : raise ExternalNotFoundError ( stylefile ) return css_body
loads an external stylesheet from a remote url or local path
53,471
def _css_rules_to_string ( self , rules ) : lines = [ ] for item in rules : if isinstance ( item , tuple ) : k , v = item lines . append ( "%s {%s}" % ( k , make_important ( v ) ) ) else : for rule in item . cssRules : if isinstance ( rule , ( cssutils . css . csscomment . CSSComment , cssutils . css . cssunknownrule . CSSUnknownRule , ) , ) : continue for key in rule . style . keys ( ) : rule . style [ key ] = ( rule . style . getPropertyValue ( key , False ) , "!important" , ) lines . append ( item . cssText ) return "\n" . join ( lines )
given a list of css rules returns a css string
53,472
def check_workers ( self ) : if time . time ( ) - self . _worker_alive_time > 5 : self . _worker_alive_time = time . time ( ) [ worker . join ( ) for worker in self . _workers if not worker . is_alive ( ) ] self . _workers = [ worker for worker in self . _workers if worker . is_alive ( ) ] if len ( self . _workers ) < self . _num_workers : raise ProcessKilled ( 'One of the workers has been killed.' )
Kill workers that have been pending for a while and check if all workers are alive .
53,473
def kill_all ( self ) : while self . _num_workers > 0 and self . _worker_backend_socket . poll ( 1000 ) : msg = self . _worker_backend_socket . recv_pyobj ( ) self . _worker_backend_socket . send_pyobj ( None ) self . _num_workers -= 1 self . report ( f'Kill {msg[1:]}' ) [ worker . join ( ) for worker in self . _workers ]
Kill all workers
53,474
def _install ( self , name , autoinstall ) : import importlib import pkg_resources spam_spec = importlib . util . find_spec ( name ) reinstall = False if spam_spec is not None : if self . _version : mod = importlib . __import__ ( name ) if hasattr ( mod , '__version__' ) : ver = mod . __version__ else : try : ver = pkg_resources . get_distribution ( name ) . version except Exception as e : env . logger . debug ( f'Failed to get version of {name}: {e}' ) env . logger . debug ( f'Comparing exiting version {ver} against requested version {self._version}' ) if self . _version . startswith ( '==' ) and pkg_resources . parse_version ( ver ) == pkg_resources . parse_version ( self . _version [ 2 : ] ) : pass elif self . _version . startswith ( '<=' ) and pkg_resources . parse_version ( ver ) <= pkg_resources . parse_version ( self . _version [ 2 : ] ) : pass elif self . _version . startswith ( '<' ) and not self . _version . startswith ( '<=' ) and pkg_resources . parse_version ( ver ) < pkg_resources . parse_version ( self . _version [ 1 : ] ) : pass elif self . _version . startswith ( '>=' ) and pkg_resources . parse_version ( ver ) >= pkg_resources . parse_version ( self . _version [ 2 : ] ) : pass elif self . _version . startswith ( '>' ) and not self . _version . startswith ( '>=' ) and pkg_resources . parse_version ( ver ) > pkg_resources . parse_version ( self . _version [ 1 : ] ) : pass elif self . _version . startswith ( '!=' ) and pkg_resources . parse_version ( ver ) != pkg_resources . parse_version ( self . _version [ 2 : ] ) : pass elif self . _version [ 0 ] not in ( '=' , '>' , '<' , '!' ) and pkg_resources . parse_version ( ver ) == pkg_resources . parse_version ( self . _version ) : pass else : env . logger . warning ( f'Version {ver} of installed {name} does not match specified version {self._version}.' ) reinstall = True if spam_spec and not reinstall : return True if not autoinstall : return False import subprocess cmd = [ 'pip' , 'install' ] + ( [ ] if self . _version else [ '-U' ] ) + [ self . _module + ( self . _version if self . _version else '' ) if self . _autoinstall is True else self . _autoinstall ] env . logger . info ( f'Installing python module {name} with command {" ".join(cmd)}' ) ret = subprocess . call ( cmd ) if reinstall : import sys importlib . reload ( sys . modules [ name ] ) return ret == 0 and self . _install ( name , False )
Check existence of Python module and install it using command pip install if necessary .
53,475
def execute_task ( task_id , verbosity = None , runmode = 'run' , sigmode = None , monitor_interval = 5 , resource_monitor_interval = 60 ) : tf = TaskFile ( task_id ) tf . status = 'running' try : signal . signal ( signal . SIGTERM , signal_handler ) res = _execute_task ( task_id , verbosity , runmode , sigmode , monitor_interval , resource_monitor_interval ) except KeyboardInterrupt : tf . status = 'aborted' raise except ProcessKilled : tf . status = 'aborted' raise ProcessKilled ( 'task interrupted' ) finally : signal . signal ( signal . SIGTERM , signal . SIG_DFL ) if res [ 'ret_code' ] != 0 and 'exception' in res : with open ( os . path . join ( os . path . expanduser ( '~' ) , '.sos' , 'tasks' , task_id + '.err' ) , 'a' ) as err : err . write ( f'Task {task_id} exits with code {res["ret_code"]}' ) if res . get ( 'skipped' , False ) : tf . status = 'skipped' else : tf . add_outputs ( ) sig = res . get ( 'signature' , { } ) res . pop ( 'signature' , None ) tf . add_result ( res ) if sig : tf . add_signature ( sig ) tf . status = 'completed' if res [ 'ret_code' ] == 0 else 'failed' return res [ 'ret_code' ]
Execute single or master task return a dictionary
53,476
def textMD5 ( text ) : m = hash_md5 ( ) if isinstance ( text , str ) : m . update ( text . encode ( ) ) else : m . update ( text ) return m . hexdigest ( )
Get md5 of a piece of text
53,477
def objectMD5 ( obj ) : if hasattr ( obj , 'target_name' ) : return obj . target_name ( ) try : return textMD5 ( pickle . dumps ( obj ) ) except : return ''
Get md5 of an object
53,478
def fileMD5 ( filename , partial = True ) : filesize = os . path . getsize ( filename ) md5 = hash_md5 ( ) block_size = 2 ** 20 try : if ( not partial ) or filesize < 2 ** 24 : with open ( filename , 'rb' ) as f : while True : data = f . read ( block_size ) if not data : break md5 . update ( data ) else : count = 16 with open ( filename , 'rb' ) as f : while True : data = f . read ( block_size ) count -= 1 if count == 8 : f . seek ( - 2 ** 23 , 2 ) if not data or count == 0 : break md5 . update ( data ) except IOError as e : sys . exit ( f'Failed to read {filename}: {e}' ) return md5 . hexdigest ( )
Calculate partial MD5 basically the first and last 8M of the file for large files . This should signicicantly reduce the time spent on the creation and comparison of file signature when dealing with large bioinformat ics datasets .
53,479
def target_signature ( self ) : if self . exists ( ) : if not self . _md5 : self . _md5 = fileMD5 ( self ) return ( os . path . getmtime ( self ) , os . path . getsize ( self ) , self . _md5 ) elif ( self + '.zapped' ) . is_file ( ) : with open ( self + '.zapped' ) as sig : line = sig . readline ( ) _ , mtime , size , md5 = line . strip ( ) . rsplit ( '\t' , 3 ) self . _md5 = md5 return ( float ( mtime ) , int ( size ) , md5 ) else : raise ValueError ( f'{self} does not exist.' )
Return file signature
53,480
def validate ( self , sig = None ) : if sig is not None : sig_mtime , sig_size , sig_md5 = sig else : try : with open ( self . sig_file ( ) ) as sig : sig_mtime , sig_size , sig_md5 = sig . read ( ) . strip ( ) . split ( ) except : return False if not self . exists ( ) : if ( self + '.zapped' ) . is_file ( ) : with open ( self + '.zapped' ) as sig : line = sig . readline ( ) return sig_md5 == line . strip ( ) . rsplit ( '\t' , 3 ) [ - 1 ] else : return False if sig_mtime == os . path . getmtime ( self ) and sig_size == os . path . getsize ( self ) : return True return fileMD5 ( self ) == sig_md5
Check if file matches its signature
53,481
def write_sig ( self ) : if not self . _md5 : self . _md5 = fileMD5 ( self ) with open ( self . sig_file ( ) , 'w' ) as sig : sig . write ( f'{os.path.getmtime(self)}\t{os.path.getsize(self)}\t{self._md5}' )
Write signature to sig store
53,482
def remove_targets ( self , type , kept = None ) : if kept is None : kept = [ i for i , x in enumerate ( self . _targets ) if not isinstance ( x , type ) ] if len ( kept ) == len ( self . _targets ) : return self self . _targets = [ self . _targets [ x ] for x in kept ] self . _labels = [ self . _labels [ x ] for x in kept ] if not self . _groups : return self index_map = { o_idx : n_idx for n_idx , o_idx in zip ( range ( len ( self . _targets ) ) , kept ) } kept = set ( kept ) for idx , grp in enumerate ( self . _groups ) : self . _groups [ idx ] = _sos_group ( [ index_map [ x ] for x in grp . _indexes if x in kept ] , [ y for x , y in zip ( grp . _indexes , grp . _labels ) if x in kept ] ) . set ( ** grp . _dict ) return self
Remove targets of certain type
53,483
def resolve_remote ( self ) : for idx , target in enumerate ( self . _targets ) : if isinstance ( target , remote ) : resolved = target . resolve ( ) if isinstance ( resolved , str ) : resolved = interpolate ( resolved , env . sos_dict . dict ( ) ) self . _targets [ idx ] = file_target ( resolved ) . set ( ** target . _dict ) return self
If target is of remote type resolve it
53,484
def _handle_paired_with ( self , paired_with ) : if paired_with is None or not paired_with : var_name = [ ] var_value = [ ] elif isinstance ( paired_with , str ) : var_name = [ '_' + paired_with ] if paired_with not in env . sos_dict : raise ValueError ( f'Variable {paired_with} does not exist.' ) var_value = [ env . sos_dict [ paired_with ] ] elif isinstance ( paired_with , dict ) : var_name = [ ] var_value = [ ] for k , v in paired_with . items ( ) : var_name . append ( k ) var_value . append ( v ) elif isinstance ( paired_with , Iterable ) : try : var_name = [ '_' + x for x in paired_with ] except Exception : raise ValueError ( f'Invalud value for option paired_with {paired_with}' ) var_value = [ ] for vn in var_name : if vn [ 1 : ] not in env . sos_dict : raise ValueError ( f'Variable {vn[1:]} does not exist.' ) var_value . append ( env . sos_dict [ vn [ 1 : ] ] ) else : raise ValueError ( f'Unacceptable value for parameter paired_with: {paired_with}' ) for vn , vv in zip ( var_name , var_value ) : self . paired_with ( vn , vv )
Handle input option paired_with
53,485
def _handle_group_with ( self , group_with ) : if group_with is None or not group_with : var_name = [ ] var_value = [ ] elif isinstance ( group_with , str ) : var_name = [ '_' + group_with ] if group_with not in env . sos_dict : raise ValueError ( f'Variable {group_with} does not exist.' ) var_value = [ env . sos_dict [ group_with ] ] elif isinstance ( group_with , dict ) : var_name = [ ] var_value = [ ] for k , v in group_with . items ( ) : var_name . append ( k ) var_value . append ( v ) elif isinstance ( group_with , Iterable ) : try : var_name = [ '_' + x for x in group_with ] except Exception : raise ValueError ( f'Invalud value for option group_with {group_with}' ) var_value = [ ] for vn in var_name : if vn [ 1 : ] not in env . sos_dict : raise ValueError ( f'Variable {vn[1:]} does not exist.' ) var_value . append ( env . sos_dict [ vn [ 1 : ] ] ) else : raise ValueError ( f'Unacceptable value for parameter group_with: {group_with}' ) for vn , vv in zip ( var_name , var_value ) : self . group_with ( vn , vv )
Handle input option group_with
53,486
def _handle_extract_pattern ( self , pattern ) : if pattern is None or not pattern : patterns = [ ] elif isinstance ( pattern , str ) : patterns = [ pattern ] elif isinstance ( pattern , Iterable ) : patterns = pattern else : raise ValueError ( f'Unacceptable value for parameter pattern: {pattern}' ) for pattern in patterns : res = extract_pattern ( pattern , self . _targets ) self . set ( ** res ) self . _handle_paired_with ( { '_' + x : y for x , y in res . items ( ) } )
Handle input option pattern
53,487
def write ( self ) : if not self . output_files . valid ( ) : raise ValueError ( f'Cannot write signature with undetermined output {self.output_files}' ) else : if 'TARGET' in env . config [ 'SOS_DEBUG' ] or 'ALL' in env . config [ 'SOS_DEBUG' ] : env . log_to_file ( 'TARGET' , f'write signature {self.sig_id} with output {self.output_files}' ) ret = super ( RuntimeInfo , self ) . write ( ) if ret is False : env . logger . debug ( f'Failed to write signature {self.sig_id}' ) return ret send_message_to_controller ( [ 'step_sig' , self . sig_id , ret ] ) send_message_to_controller ( [ 'workflow_sig' , 'tracked_files' , self . sig_id , repr ( { 'input_files' : [ str ( f . resolve ( ) ) for f in self . input_files if isinstance ( f , file_target ) ] , 'dependent_files' : [ str ( f . resolve ( ) ) for f in self . dependent_files if isinstance ( f , file_target ) ] , 'output_files' : [ str ( f . resolve ( ) ) for f in self . output_files if isinstance ( f , file_target ) ] } ) ] ) return True
Write signature file with signature of script input output and dependent files . Because local input and output files can only be determined after the execution of workflow . They are not part of the construction .
53,488
def clear_output ( output = None ) : for target in env . sos_dict [ '_output' ] if output is None else output : if isinstance ( target , file_target ) and target . exists ( ) : try : target . unlink ( ) except Exception as e : env . logger . warning ( f'Failed to remove {target}: {e}' )
Remove file targets in _output when a step fails to complete
53,489
def add_forward_workflow ( self , dag , sections , satisfies = None ) : dag . new_forward_workflow ( ) if 'DAG' in env . config [ 'SOS_DEBUG' ] or 'ALL' in env . config [ 'SOS_DEBUG' ] : env . log_to_file ( 'DAG' , f'Adding mini-workflow with {len(sections)} sections' ) default_input : sos_targets = sos_targets ( [ ] ) for idx , section in enumerate ( sections ) : res = analyze_section ( section , default_input = default_input ) environ_vars = res [ 'environ_vars' ] signature_vars = res [ 'signature_vars' ] changed_vars = res [ 'changed_vars' ] environ_vars |= env . parameter_vars & signature_vars if res [ 'changed_vars' ] : if 'provides' in section . options : if isinstance ( section . options [ 'provides' ] , str ) : section . options . set ( 'provides' , [ section . options [ 'provides' ] ] ) else : section . options . set ( 'provides' , [ ] ) section . options . set ( 'provides' , section . options [ 'provides' ] + [ sos_variable ( var ) for var in changed_vars ] ) context = { '__signature_vars__' : signature_vars , '__environ_vars__' : environ_vars , '__changed_vars__' : changed_vars , '__dynamic_depends__' : res [ 'dynamic_depends' ] , '__dynamic_input__' : res [ 'dynamic_input' ] } if idx == 0 : context [ '__step_output__' ] = env . sos_dict [ '__step_output__' ] if idx == len ( sections ) - 1 and satisfies is not None : res [ 'step_output' ] . extend ( satisfies ) dag . add_step ( section . uuid , section . step_name ( ) , idx , res [ 'step_input' ] , res [ 'step_depends' ] , res [ 'step_output' ] , context = context ) default_input = res [ 'step_output' ] return len ( sections )
Add a forward - workflow return number of nodes added
53,490
def initialize_dag ( self , targets : Optional [ List [ str ] ] = [ ] , nested : bool = False ) -> SoS_DAG : self . reset_dict ( ) dag = SoS_DAG ( name = self . md5 ) targets = sos_targets ( targets ) self . add_forward_workflow ( dag , self . workflow . sections ) if self . resolve_dangling_targets ( dag , targets ) == 0 : if targets : raise UnknownTarget ( f'No step to generate target {targets}.' ) dag . build ( ) if targets : dag = dag . subgraph_from ( targets ) cycle = dag . circular_dependencies ( ) if cycle : raise RuntimeError ( f'Circular dependency detected {cycle}. It is likely a later step produces input of a previous step.' ) dag . save ( env . config [ 'output_dag' ] ) return dag
Create a DAG by analyzing sections statically .
53,491
def short_repr ( obj , noneAsNA = False ) : if obj is None : return 'unspecified' if noneAsNA else 'None' elif isinstance ( obj , str ) and len ( obj ) > 80 : return '{}...{}' . format ( obj [ : 60 ] . replace ( '\n' , '\\n' ) , obj [ - 20 : ] . replace ( '\n' , '\\n' ) ) elif isinstance ( obj , ( str , int , float , bool ) ) : return repr ( obj ) elif hasattr ( obj , '__short_repr__' ) : return obj . __short_repr__ ( ) elif isinstance ( obj , Sequence ) : if len ( obj ) == 0 : return '[]' elif len ( obj ) == 1 : return f'{short_repr(obj[0])}' elif len ( obj ) == 2 : return f'{short_repr(obj[0])}, {short_repr(obj[1])}' else : return f'{short_repr(obj[0])}, {short_repr(obj[1])}, ... ({len(obj)} items)' elif isinstance ( obj , dict ) : if not obj : return '' elif len ( obj ) == 1 : first_key = list ( obj . keys ( ) ) [ 0 ] return f'{short_repr(first_key)!r}:{short_repr(obj[first_key])!r}' else : first_key = list ( obj . keys ( ) ) [ 0 ] return f'{short_repr(first_key)}:{short_repr(obj[first_key])}, ... ({len(obj)} items)' elif isinstance ( obj , KeysView ) : if not obj : return '' elif len ( obj ) == 1 : return short_repr ( next ( iter ( obj ) ) ) else : return f'{short_repr(next(iter(obj)))}, ... ({len(obj)} items)' else : ret = str ( obj ) if len ( ret ) > 40 : return f'{repr(obj)[:35]}...' else : return ret
Return a short representation of obj for clarity .
53,492
def tail_of_file ( filename , n , ansi2html = False ) : avg_line_length = 74 to_read = n with open ( filename ) as f : while 1 : try : f . seek ( - ( avg_line_length * to_read ) , 2 ) except IOError : f . seek ( 0 ) pos = f . tell ( ) lines = f . read ( ) . splitlines ( ) if len ( lines ) >= to_read or pos == 0 : if ansi2html : return convertAnsi2html ( '\n' . join ( lines [ - to_read : ] ) ) return '\n' . join ( lines [ - to_read : ] ) + '\n' avg_line_length *= 1.3
Reads a n lines from f with an offset of offset lines .
53,493
def sample_lines ( lines , n ) : if len ( lines ) <= n : return '' . join ( lines ) else : m = len ( lines ) return '' . join ( [ lines [ x * m // n + m // ( 2 * n ) ] for x in range ( n ) ] )
Draw a sample of n lines from filename largely evenly .
53,494
def set ( self , key , value ) : if hasattr ( value , 'labels' ) : if 'VARIABLE' in env . config [ 'SOS_DEBUG' ] or 'ALL' in env . config [ 'SOS_DEBUG' ] : env . log_to_file ( 'VARIABLE' , f"Set {key} to {short_repr(value)} with labels {short_repr(value.labels)}" ) else : if 'VARIABLE' in env . config [ 'SOS_DEBUG' ] or 'ALL' in env . config [ 'SOS_DEBUG' ] : env . log_to_file ( 'VARIABLE' , f"Set {key} to {short_repr(value)} of type {value.__class__.__name__}" ) self . _dict [ key ] = value
A short cut to set value to key without triggering any logging or warning message .
53,495
def update ( self , obj ) : self . _dict . update ( obj ) for k , v in obj . items ( ) : if env . verbosity > 2 : self . _log ( k , v )
Redefine update to trigger logging message
53,496
def execute_substep ( stmt , global_def , global_vars , task = '' , task_params = '' , proc_vars = { } , shared_vars = [ ] , config = { } ) : assert not env . zmq_context . closed assert 'workflow_id' in proc_vars assert 'step_id' in proc_vars assert '_input' in proc_vars assert '_output' in proc_vars assert '_depends' in proc_vars assert 'step_output' in proc_vars assert '_index' in proc_vars assert 'result_push_socket' in config [ "sockets" ] if env . result_socket_port is not None and env . result_socket_port != config [ "sockets" ] [ "result_push_socket" ] : close_socket ( env . result_socket ) env . result_socket = None if env . result_socket is None : env . result_socket = create_socket ( env . zmq_context , zmq . PUSH ) env . result_socket_port = config [ "sockets" ] [ "result_push_socket" ] env . result_socket . connect ( f'tcp://127.0.0.1:{env.result_socket_port}' ) res = _execute_substep ( stmt = stmt , global_def = global_def , global_vars = global_vars , task = task , task_params = task_params , proc_vars = proc_vars , shared_vars = shared_vars , config = config ) env . result_socket . send_pyobj ( res )
Execute a substep with specific input etc
53,497
def files ( self ) : try : cur = self . conn . cursor ( ) cur . execute ( 'SELECT id, item FROM workflows WHERE entry_type = "tracked_files"' ) return [ ( x [ 0 ] , eval ( x [ 1 ] ) ) for x in cur . fetchall ( ) ] except sqlite3 . DatabaseError as e : env . logger . warning ( f'Failed to get files from signature database: {e}' ) return [ ]
Listing files related to workflows related to current directory
53,498
def find_executable ( self ) : if 'DAG' in env . config [ 'SOS_DEBUG' ] or 'ALL' in env . config [ 'SOS_DEBUG' ] : env . log_to_file ( 'DAG' , 'find_executable' ) for node in self . nodes ( ) : if node . _status is None : with_dependency = False for edge in self . in_edges ( node ) : if edge [ 0 ] . _status != 'completed' : with_dependency = True break if not with_dependency : return node pending_jobs = [ x for x in self . nodes ( ) if x . _status == 'signature_pending' ] if pending_jobs : try : notifier = ActivityNotifier ( f'Waiting for {len(pending_jobs)} pending job{"s: e.g." if len(pending_jobs) > 1 else ":"} output {short_repr(pending_jobs[0]._signature[0])} with signature file {pending_jobs[0]._signature[1] + "_"}. You can manually remove this lock file if you are certain that no other process is working on the output.' ) while True : for node in pending_jobs : lock = fasteners . InterProcessLock ( node . _signature [ 1 ] + '_' ) if lock . acquire ( blocking = False ) : lock . release ( ) node . _status = None return node time . sleep ( 0.1 ) except Exception as e : env . logger . error ( e ) finally : notifier . stop ( ) return None
Find an executable node which means nodes that has not been completed and has no input dependency .
53,499
def dangling ( self , targets : sos_targets ) : existing = [ ] missing = [ ] if env . config [ 'trace_existing' ] : for x in self . _all_depends_files . keys ( ) : if x not in self . _all_output_files : if x . target_exists ( ) : existing . append ( x ) else : missing . append ( x ) else : missing = [ x for x in self . _all_depends_files . keys ( ) if x not in self . _all_output_files and not x . target_exists ( ) ] for x in targets : if x not in self . _all_output_files : if x . target_exists ( 'target' ) : existing . append ( x ) else : missing . append ( x ) return missing , existing
returns 1 . missing targets which are missing from the DAG or from the provided targets 2 . existing targets of provided target list not in DAG