idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
53,400 | def unlock ( self , request , * args , ** kwargs ) : self . object = self . get_object ( ) success_url = self . get_success_url ( ) self . object . status = Topic . TOPIC_UNLOCKED self . object . save ( ) messages . success ( self . request , self . success_message ) return HttpResponseRedirect ( success_url ) | Unlocks the considered topic and retirects the user to the success URL . |
53,401 | def update_type ( self , request , * args , ** kwargs ) : self . object = self . get_object ( ) success_url = self . get_success_url ( ) self . object . type = self . target_type self . object . save ( ) messages . success ( self . request , self . success_message ) return HttpResponseRedirect ( success_url ) | Updates the type of the considered topic and retirects the user to the success URL . |
53,402 | def approve ( self , request , * args , ** kwargs ) : self . object = self . get_object ( ) success_url = self . get_success_url ( ) self . object . approved = True self . object . save ( ) messages . success ( self . request , self . success_message ) return HttpResponseRedirect ( success_url ) | Approves the considered post and retirects the user to the success URL . |
53,403 | def disapprove ( self , request , * args , ** kwargs ) : self . object = self . get_object ( ) success_url = self . get_success_url ( ) self . object . delete ( ) messages . success ( self . request , self . success_message ) return HttpResponseRedirect ( success_url ) | Disapproves the considered post and retirects the user to the success URL . |
53,404 | def poster ( self ) : user_model = get_user_model ( ) return get_object_or_404 ( user_model , pk = self . kwargs [ self . user_pk_url_kwarg ] ) | Returns the considered user . |
53,405 | def subscribe ( self , request , * args , ** kwargs ) : self . object = self . get_object ( ) self . object . subscribers . add ( request . user ) messages . success ( self . request , self . success_message ) return HttpResponseRedirect ( self . get_success_url ( ) ) | Performs the subscribe action . |
53,406 | def unsubscribe ( self , request , * args , ** kwargs ) : self . object = self . get_object ( ) self . object . subscribers . remove ( request . user ) messages . success ( self . request , self . success_message ) return HttpResponseRedirect ( self . get_success_url ( ) ) | Performs the unsubscribe action . |
53,407 | def update_topic_counter ( sender , topic , user , request , response , ** kwargs ) : topic . __class__ . _default_manager . filter ( id = topic . id ) . update ( views_count = F ( 'views_count' ) + 1 ) | Handles the update of the views counter associated with topics . |
53,408 | def get_topic ( self ) : if not hasattr ( self , 'topic' ) : self . topic = get_object_or_404 ( Topic . objects . select_related ( 'forum' ) . all ( ) , pk = self . kwargs [ 'pk' ] , ) return self . topic | Returns the topic to consider . |
53,409 | def init_attachment_cache ( self ) : if self . request . method == 'GET' : attachments_cache . delete ( self . get_attachments_cache_key ( self . request ) ) return attachments_cache_key = self . get_attachments_cache_key ( self . request ) restored_attachments_dict = attachments_cache . get ( attachments_cache_key ) i... | Initializes the attachment cache for the current view . |
53,410 | def get_post_form_kwargs ( self ) : kwargs = { 'user' : self . request . user , 'forum' : self . get_forum ( ) , 'topic' : self . get_topic ( ) , } post = self . get_post ( ) if post : kwargs . update ( { 'instance' : post } ) if self . request . method in ( 'POST' , 'PUT' ) : kwargs . update ( { 'data' : self . reques... | Returns the keyword arguments for instantiating the post form . |
53,411 | def get_attachment_formset ( self , formset_class ) : if ( self . request . forum_permission_handler . can_attach_files ( self . get_forum ( ) , self . request . user , ) ) : return formset_class ( ** self . get_attachment_formset_kwargs ( ) ) | Returns an instance of the attachment formset to be used in the view . |
53,412 | def get_attachment_formset_kwargs ( self ) : kwargs = { 'prefix' : 'attachment' , } if self . request . method in ( 'POST' , 'PUT' ) : kwargs . update ( { 'data' : self . request . POST , 'files' : self . request . FILES , } ) else : post = self . get_post ( ) attachment_queryset = Attachment . objects . filter ( post ... | Returns the keyword arguments for instantiating the attachment formset . |
53,413 | def get_forum ( self ) : pk = self . kwargs . get ( self . forum_pk_url_kwarg , None ) if not pk : return if not hasattr ( self , '_forum' ) : self . _forum = get_object_or_404 ( Forum , pk = pk ) return self . _forum | Returns the considered forum . |
53,414 | def get_topic ( self ) : pk = self . kwargs . get ( self . topic_pk_url_kwarg , None ) if not pk : return if not hasattr ( self , '_topic' ) : self . _topic = get_object_or_404 ( Topic , pk = pk ) return self . _topic | Returns the considered topic if applicable . |
53,415 | def get_post ( self ) : pk = self . kwargs . get ( self . post_pk_url_kwarg , None ) if not pk : return if not hasattr ( self , '_forum_post' ) : self . _forum_post = get_object_or_404 ( Post , pk = pk ) return self . _forum_post | Returns the considered post if applicable . |
53,416 | def get_poll_option_formset ( self , formset_class ) : if self . request . forum_permission_handler . can_create_polls ( self . get_forum ( ) , self . request . user , ) : return formset_class ( ** self . get_poll_option_formset_kwargs ( ) ) | Returns an instance of the poll option formset to be used in the view . |
53,417 | def get_poll_option_formset_kwargs ( self ) : kwargs = { 'prefix' : 'poll' , } if self . request . method in ( 'POST' , 'PUT' ) : kwargs . update ( { 'data' : self . request . POST , 'files' : self . request . FILES , } ) else : topic = self . get_topic ( ) poll_option_queryset = TopicPollOption . objects . filter ( po... | Returns the keyword arguments for instantiating the poll option formset . |
53,418 | def _remove_exts ( self , string ) : if string . lower ( ) . endswith ( ( '.png' , '.gif' , '.jpg' , '.bmp' , '.jpeg' , '.ppm' , '.datauri' ) ) : format = string [ string . rfind ( '.' ) + 1 : len ( string ) ] if format . lower ( ) == 'jpg' : format = 'jpeg' self . format = format string = string [ 0 : string . rfind (... | Sets the string to create the Robohash |
53,419 | def _get_list_of_files ( self , path ) : chosen_files = [ ] directories = [ ] for root , dirs , files in natsort . natsorted ( os . walk ( path , topdown = False ) ) : for name in dirs : if name [ : 1 ] is not '.' : directories . append ( os . path . join ( root , name ) ) directories = natsort . natsorted ( directorie... | Go through each subdirectory of path and choose one file from each to use in our hash . Continue to increase self . iter so we use a different slot of randomness each time . |
53,420 | def assemble ( self , roboset = None , color = None , format = None , bgset = None , sizex = 300 , sizey = 300 ) : if roboset == 'any' : roboset = self . sets [ self . hasharray [ 1 ] % len ( self . sets ) ] elif roboset in self . sets : roboset = roboset else : roboset = self . sets [ 0 ] if roboset == 'set1' : if col... | Build our Robot! Returns the robot image itself . |
53,421 | def collect_members ( module_to_name ) : members = { } for module , module_name in module_to_name . items ( ) : all_names = getattr ( module , "__all__" , None ) for name , member in inspect . getmembers ( module ) : if ( ( inspect . isfunction ( member ) or inspect . isclass ( member ) ) and not _always_drop_symbol_re... | Collect all symbols from a list of modules . |
53,422 | def _get_anchor ( module_to_name , fullname ) : if not _anchor_re . match ( fullname ) : raise ValueError ( "'%s' is not a valid anchor" % fullname ) anchor = fullname for module_name in module_to_name . values ( ) : if fullname . startswith ( module_name + "." ) : rest = fullname [ len ( module_name ) + 1 : ] if len (... | Turn a full member name into an anchor . |
53,423 | def write_libraries ( dir , libraries ) : files = [ open ( os . path . join ( dir , k ) , "w" ) for k , _ in libraries ] for f , ( _ , v ) in zip ( files , libraries ) : v . write_markdown_to_file ( f ) for f , ( _ , v ) in zip ( files , libraries ) : v . write_other_members ( f ) f . close ( ) | Write a list of libraries to disk . |
53,424 | def write_markdown_to_file ( self , f ) : print ( "---" , file = f ) print ( "---" , file = f ) print ( "<!-- This file is machine generated: DO NOT EDIT! , file = f ) print ( "" , file = f ) print ( "# TensorFlow Python reference documentation" , file = f ) print ( "" , file = f ) fullname_f = lambda name : self . _m... | Writes this index to file f . |
53,425 | def _should_include_member ( self , name , member ) : if _always_drop_symbol_re . match ( name ) : return False if name in self . _exclude_symbols : return False return True | Returns True if this member should be included in the document . |
53,426 | def get_imported_modules ( self , module ) : for name , member in inspect . getmembers ( module ) : if inspect . ismodule ( member ) : yield name , member | Returns the list of modules imported from module . |
53,427 | def get_class_members ( self , cls_name , cls ) : for name , member in inspect . getmembers ( cls ) : is_method = inspect . ismethod ( member ) or inspect . isfunction ( member ) if not ( is_method or isinstance ( member , property ) ) : continue if ( ( is_method and member . __name__ == "__init__" ) or self . _should_... | Returns the list of class members to document in cls . |
53,428 | def _generate_signature_for_function ( self , func ) : args_list = [ ] argspec = inspect . getargspec ( func ) first_arg_with_default = ( len ( argspec . args or [ ] ) - len ( argspec . defaults or [ ] ) ) for arg in argspec . args [ : first_arg_with_default ] : if arg == "self" : continue args_list . append ( arg ) if... | Given a function returns a string representing its args . |
53,429 | def _remove_docstring_indent ( self , docstring ) : docstring = docstring or "" lines = docstring . strip ( ) . split ( "\n" ) min_indent = len ( docstring ) for l in lines [ 1 : ] : l = l . rstrip ( ) if l : i = 0 while i < len ( l ) and l [ i ] == " " : i += 1 if i < min_indent : min_indent = i for i in range ( 1 , l... | Remove indenting . |
53,430 | def _print_formatted_docstring ( self , docstring , f ) : lines = self . _remove_docstring_indent ( docstring ) i = 0 def _at_start_of_section ( ) : l = lines [ i ] match = _section_re . match ( l ) if match and i + 1 < len ( lines ) and lines [ i + 1 ] . startswith ( " " ) : return match . group ( 1 ) else : return No... | Formats the given docstring as Markdown and prints it to f . |
53,431 | def _print_function ( self , f , prefix , fullname , func ) : heading = prefix + " `" + fullname if not isinstance ( func , property ) : heading += self . _generate_signature_for_function ( func ) heading += "` {#%s}" % _get_anchor ( self . _module_to_name , fullname ) print ( heading , file = f ) print ( "" , file = f... | Prints the given function to f . |
53,432 | def _write_member_markdown_to_file ( self , f , prefix , name , member ) : if ( inspect . isfunction ( member ) or inspect . ismethod ( member ) or isinstance ( member , property ) ) : print ( "- - -" , file = f ) print ( "" , file = f ) self . _print_function ( f , prefix , name , member ) print ( "" , file = f ) elif... | Print member to f . |
53,433 | def _write_class_markdown_to_file ( self , f , name , cls ) : methods = dict ( self . get_class_members ( name , cls ) ) num_methods = len ( methods ) try : self . _write_docstring_markdown_to_file ( f , "####" , inspect . getdoc ( cls ) , methods , { } ) except ValueError as e : raise ValueError ( str ( e ) + " in cla... | Write the class doc to f . |
53,434 | def write_markdown_to_file ( self , f ) : print ( "---" , file = f ) print ( "---" , file = f ) print ( "<!-- This file is machine generated: DO NOT EDIT! , file = f ) print ( "" , file = f ) print ( "#" , self . _title , file = f ) if self . _prefix : print ( self . _prefix , file = f ) print ( "[TOC]" , file = f ) p... | Prints this library to file f . |
53,435 | def write_other_members ( self , f , catch_all = False ) : if catch_all : names = self . _members . items ( ) else : names = inspect . getmembers ( self . _module ) leftovers = [ ] for name , _ in names : if name in self . _members and name not in self . _documented : leftovers . append ( name ) if leftovers : print ( ... | Writes the leftover members to f . |
53,436 | def assert_no_leftovers ( self ) : leftovers = [ ] for name in self . _members . keys ( ) : if name in self . _members and name not in self . _documented : leftovers . append ( name ) if leftovers : raise RuntimeError ( "%s: undocumented members: %s" % ( self . _title , ", " . join ( leftovers ) ) ) | Generate an error if there are leftover members . |
53,437 | def start_http_server ( self , port , host = '0.0.0.0' , endpoint = None ) : if self . should_start_http_server ( ) : pc_start_http_server ( port , host , registry = self . registry ) | Start an HTTP server for exposing the metrics if the should_start_http_server function says we should otherwise just return . Uses the implementation from prometheus_client rather than a Flask app . |
53,438 | def init_app ( self , app ) : if self . path : self . register_endpoint ( self . path , app ) if self . _export_defaults : self . export_defaults ( self . buckets , self . group_by , self . _defaults_prefix , app ) | This callback can be used to initialize an application for the use with this prometheus reporter setup . |
53,439 | def register_endpoint ( self , path , app = None ) : if is_running_from_reloader ( ) and not os . environ . get ( 'DEBUG_METRICS' ) : return if app is None : app = self . app or current_app @ app . route ( path ) @ self . do_not_track ( ) def prometheus_metrics ( ) : from prometheus_client import multiprocess , Collect... | Register the metrics endpoint on the Flask application . |
53,440 | def start_http_server ( self , port , host = '0.0.0.0' , endpoint = '/metrics' ) : if is_running_from_reloader ( ) : return app = Flask ( 'prometheus-flask-exporter-%d' % port ) self . register_endpoint ( endpoint , app ) def run_app ( ) : app . run ( host = host , port = port ) thread = threading . Thread ( target = r... | Start an HTTP server for exposing the metrics . This will be an individual Flask application not the one registered with this class . |
53,441 | def histogram ( self , name , description , labels = None , ** kwargs ) : return self . _track ( Histogram , lambda metric , time : metric . observe ( time ) , kwargs , name , description , labels , registry = self . registry ) | Use a Histogram to track the execution time and invocation count of the method . |
53,442 | def summary ( self , name , description , labels = None , ** kwargs ) : return self . _track ( Summary , lambda metric , time : metric . observe ( time ) , kwargs , name , description , labels , registry = self . registry ) | Use a Summary to track the execution time and invocation count of the method . |
53,443 | def gauge ( self , name , description , labels = None , ** kwargs ) : return self . _track ( Gauge , lambda metric , time : metric . dec ( ) , kwargs , name , description , labels , registry = self . registry , before = lambda metric : metric . inc ( ) ) | Use a Gauge to track the number of invocations in progress for the method . |
53,444 | def counter ( self , name , description , labels = None , ** kwargs ) : return self . _track ( Counter , lambda metric , time : metric . inc ( ) , kwargs , name , description , labels , registry = self . registry ) | Use a Counter to track the total number of invocations of the method . |
53,445 | def _track ( metric_type , metric_call , metric_kwargs , name , description , labels , registry , before = None ) : if labels is not None and not isinstance ( labels , dict ) : raise TypeError ( 'labels needs to be a dictionary of {labelname: callable}' ) label_names = labels . keys ( ) if labels else tuple ( ) parent_... | Internal method decorator logic . |
53,446 | def do_not_track ( ) : def decorator ( f ) : @ functools . wraps ( f ) def func ( * args , ** kwargs ) : request . prom_do_not_track = True return f ( * args , ** kwargs ) return func return decorator | Decorator to skip the default metrics collection for the method . |
53,447 | def info ( self , name , description , labelnames = None , labelvalues = None , ** labels ) : if labels and labelnames : raise ValueError ( 'Cannot have labels defined as `dict` ' 'and collections of names and values' ) if labelnames is None and labels : labelnames = labels . keys ( ) elif labelnames and labelvalues : ... | Report any information as a Prometheus metric . This will create a Gauge with the initial value of 1 . |
53,448 | def inve ( env , command , * args , ** kwargs ) : with temp_environ ( ) : os . environ [ 'VIRTUAL_ENV' ] = str ( workon_home / env ) os . environ [ 'PATH' ] = compute_path ( env ) unsetenv ( 'PYTHONHOME' ) unsetenv ( '__PYVENV_LAUNCHER__' ) try : return check_call ( [ command ] + list ( args ) , shell = windows , ** kw... | Run a command in the given virtual environment . |
53,449 | def ls_cmd ( argv ) : parser = argparse . ArgumentParser ( ) p_group = parser . add_mutually_exclusive_group ( ) p_group . add_argument ( '-b' , '--brief' , action = 'store_false' ) p_group . add_argument ( '-l' , '--long' , action = 'store_true' ) args = parser . parse_args ( argv ) lsvirtualenv ( args . long ) | List available environments . |
53,450 | def workon_cmd ( argv ) : parser = argparse . ArgumentParser ( prog = 'pew workon' ) parser . add_argument ( 'envname' , nargs = '?' ) parser . add_argument ( '-n' , '--no-cd' , action = 'store_true' , help = ( 'Do not change working directory to project directory after ' 'activating virtualenv.' ) ) args = parser . pa... | List or change working virtual environments . |
53,451 | def add_cmd ( argv ) : parser = argparse . ArgumentParser ( ) parser . add_argument ( '-d' , dest = 'remove' , action = 'store_true' ) parser . add_argument ( 'dirs' , nargs = '+' ) args = parser . parse_args ( argv ) extra_paths = sitepackages_dir ( ) / '_virtualenv_path_extensions.pth' new_paths = [ os . path . abspa... | Add the specified directories to the Python path for the currently active virtualenv . |
53,452 | def lssitepackages_cmd ( argv ) : site = sitepackages_dir ( ) print ( * sorted ( site . iterdir ( ) ) , sep = os . linesep ) extra_paths = site / '_virtualenv_path_extensions.pth' if extra_paths . exists ( ) : print ( 'from _virtualenv_path_extensions.pth:' ) with extra_paths . open ( ) as extra : print ( '' . join ( e... | Show the content of the site - packages directory of the current virtualenv . |
53,453 | def toggleglobalsitepackages_cmd ( argv ) : quiet = argv == [ '-q' ] site = sitepackages_dir ( ) ngsp_file = site . parent / 'no-global-site-packages.txt' if ngsp_file . exists ( ) : ngsp_file . unlink ( ) if not quiet : print ( 'Enabled global site-packages' ) else : with ngsp_file . open ( 'w' ) : if not quiet : prin... | Toggle the current virtualenv between having and not having access to the global site - packages . |
53,454 | def cp_cmd ( argv ) : parser = argparse . ArgumentParser ( ) parser . add_argument ( 'source' ) parser . add_argument ( 'target' , nargs = '?' ) parser . add_argument ( '-d' , '--dont-activate' , action = 'store_false' , default = True , dest = 'activate' , help = "After \ creation, continue with... | Duplicate the named virtualenv to make a new one . |
53,455 | def rename_cmd ( argv ) : parser = argparse . ArgumentParser ( ) parser . add_argument ( 'source' ) parser . add_argument ( 'target' ) pargs = parser . parse_args ( argv ) copy_virtualenv_project ( pargs . source , pargs . target ) return rmvirtualenvs ( [ pargs . source ] ) | Rename a virtualenv |
53,456 | def setproject_cmd ( argv ) : args = dict ( enumerate ( argv ) ) project = os . path . abspath ( args . get ( 1 , '.' ) ) env = args . get ( 0 , os . environ . get ( 'VIRTUAL_ENV' ) ) if not env : sys . exit ( 'pew setproject [virtualenv] [project_path]' ) if not ( workon_home / env ) . exists ( ) : sys . exit ( "Envir... | Given a virtualenv directory and a project directory set the \ virtualenv up to be associated with the project . |
53,457 | def getproject_cmd ( argv ) : parser = argparse . ArgumentParser ( description = "Print an environment's project directory." , ) parser . add_argument ( 'envname' , nargs = '?' , default = os . environ . get ( 'VIRTUAL_ENV' ) , help = ( 'The name of the environment to return the project directory ' 'for. If omitted, w... | Print a virtualenv s project directory if set . |
53,458 | def mkproject_cmd ( argv ) : if '-l' in argv or '--list' in argv : templates = [ t . name [ 9 : ] for t in workon_home . glob ( "template_*" ) ] print ( "Available project templates:" , * templates , sep = '\n' ) return parser = mkvirtualenv_argparser ( ) parser . add_argument ( 'envname' ) parser . add_argument ( '-t'... | Create a new project directory and its associated virtualenv . |
53,459 | def mktmpenv_cmd ( argv ) : parser = mkvirtualenv_argparser ( ) env = '.' while ( workon_home / env ) . exists ( ) : env = hex ( random . getrandbits ( 64 ) ) [ 2 : - 1 ] args , rest = parser . parse_known_args ( argv ) mkvirtualenv ( env , args . python , args . packages , requirements = args . requirements , rest = r... | Create a temporary virtualenv . |
53,460 | def inall_cmd ( argv ) : envs = lsenvs ( ) errors = False for env in envs : print ( "\n%s:" % env ) try : inve ( env , * argv ) except CalledProcessError as e : errors = True err ( e ) sys . exit ( errors ) | Run a command in each virtualenv . |
53,461 | def in_cmd ( argv ) : if len ( argv ) == 1 : return workon_cmd ( argv ) parse_envname ( argv , lambda : sys . exit ( 'You must provide a valid virtualenv to target' ) ) return inve ( * argv ) | Run a command in the given virtualenv . |
53,462 | def restore_cmd ( argv ) : if len ( argv ) < 1 : sys . exit ( 'You must provide a valid virtualenv to target' ) env = argv [ 0 ] path = workon_home / env py = path / env_bin_dir / ( 'python.exe' if windows else 'python' ) exact_py = py . resolve ( ) . name return check_call ( [ sys . executable , "-m" , "virtualenv" , ... | Try to restore a broken virtualenv by reinstalling the same python version on top of it |
53,463 | def dir_cmd ( argv ) : env = parse_envname ( argv , lambda : sys . exit ( 'You must provide a valid virtualenv to target' ) ) print ( workon_home / env ) | Print the path for the virtualenv directory |
53,464 | def install_cmd ( argv ) : installer = InstallCommand ( ) options , versions = installer . parser . parse_args ( argv ) if len ( versions ) != 1 : installer . parser . print_help ( ) sys . exit ( 1 ) else : try : actual_installer = PythonInstaller . get_installer ( versions [ 0 ] , options ) return actual_installer . i... | Use Pythonz to download and build the specified Python version |
53,465 | def version_cmd ( argv ) : import pkg_resources try : __version__ = pkg_resources . get_distribution ( 'pew' ) . version except pkg_resources . DistributionNotFound : __version__ = 'unknown' print ( 'Setuptools has some issues here, failed to get our own package.' , file = sys . stderr ) print ( __version__ ) | Prints current pew version |
53,466 | def csstext_to_pairs ( csstext ) : with csstext_to_pairs . _lock : return sorted ( [ ( prop . name . strip ( ) , format_value ( prop ) ) for prop in cssutils . parseStyle ( csstext ) ] , key = itemgetter ( 0 ) , ) | csstext_to_pairs takes css text and make it to list of tuple of key value . |
53,467 | def merge_styles ( inline_style , new_styles , classes , remove_unset_properties = False ) : styles = OrderedDict ( [ ( "" , OrderedDict ( ) ) ] ) for pc in set ( classes ) : styles [ pc ] = OrderedDict ( ) for i , style in enumerate ( new_styles ) : for k , v in style : styles [ classes [ i ] ] [ k ] = v if inline_sty... | This will merge all new styles where the order is important The last one will override the first When that is done it will apply old inline style again The old inline style is always important and override all new ones . The inline style must be valid . |
53,468 | def make_important ( bulk ) : return ";" . join ( "%s !important" % p if not p . endswith ( "!important" ) else p for p in bulk . split ( ";" ) ) | makes every property in a string !important . |
53,469 | def capitalize_float_margin ( css_body ) : def _capitalize_property ( match ) : return "{0}:{1}{2}" . format ( match . group ( "property" ) . capitalize ( ) , match . group ( "value" ) , match . group ( "terminator" ) , ) return _lowercase_margin_float_rule . sub ( _capitalize_property , css_body ) | Capitalize float and margin CSS property names |
53,470 | def _load_external ( self , url ) : if url . startswith ( "//" ) : if self . base_url and "https://" in self . base_url : url = "https:" + url else : url = "http:" + url if url . startswith ( "http://" ) or url . startswith ( "https://" ) : css_body = self . _load_external_url ( url ) else : stylefile = url if not os .... | loads an external stylesheet from a remote url or local path |
53,471 | def _css_rules_to_string ( self , rules ) : lines = [ ] for item in rules : if isinstance ( item , tuple ) : k , v = item lines . append ( "%s {%s}" % ( k , make_important ( v ) ) ) else : for rule in item . cssRules : if isinstance ( rule , ( cssutils . css . csscomment . CSSComment , cssutils . css . cssunknownrule .... | given a list of css rules returns a css string |
53,472 | def check_workers ( self ) : if time . time ( ) - self . _worker_alive_time > 5 : self . _worker_alive_time = time . time ( ) [ worker . join ( ) for worker in self . _workers if not worker . is_alive ( ) ] self . _workers = [ worker for worker in self . _workers if worker . is_alive ( ) ] if len ( self . _workers ) < ... | Kill workers that have been pending for a while and check if all workers are alive . |
53,473 | def kill_all ( self ) : while self . _num_workers > 0 and self . _worker_backend_socket . poll ( 1000 ) : msg = self . _worker_backend_socket . recv_pyobj ( ) self . _worker_backend_socket . send_pyobj ( None ) self . _num_workers -= 1 self . report ( f'Kill {msg[1:]}' ) [ worker . join ( ) for worker in self . _worker... | Kill all workers |
53,474 | def _install ( self , name , autoinstall ) : import importlib import pkg_resources spam_spec = importlib . util . find_spec ( name ) reinstall = False if spam_spec is not None : if self . _version : mod = importlib . __import__ ( name ) if hasattr ( mod , '__version__' ) : ver = mod . __version__ else : try : ver = pkg... | Check existence of Python module and install it using command pip install if necessary . |
53,475 | def execute_task ( task_id , verbosity = None , runmode = 'run' , sigmode = None , monitor_interval = 5 , resource_monitor_interval = 60 ) : tf = TaskFile ( task_id ) tf . status = 'running' try : signal . signal ( signal . SIGTERM , signal_handler ) res = _execute_task ( task_id , verbosity , runmode , sigmode , monit... | Execute single or master task return a dictionary |
53,476 | def textMD5 ( text ) : m = hash_md5 ( ) if isinstance ( text , str ) : m . update ( text . encode ( ) ) else : m . update ( text ) return m . hexdigest ( ) | Get md5 of a piece of text |
53,477 | def objectMD5 ( obj ) : if hasattr ( obj , 'target_name' ) : return obj . target_name ( ) try : return textMD5 ( pickle . dumps ( obj ) ) except : return '' | Get md5 of an object |
53,478 | def fileMD5 ( filename , partial = True ) : filesize = os . path . getsize ( filename ) md5 = hash_md5 ( ) block_size = 2 ** 20 try : if ( not partial ) or filesize < 2 ** 24 : with open ( filename , 'rb' ) as f : while True : data = f . read ( block_size ) if not data : break md5 . update ( data ) else : count = 16 wi... | Calculate partial MD5 basically the first and last 8M of the file for large files . This should signicicantly reduce the time spent on the creation and comparison of file signature when dealing with large bioinformat ics datasets . |
53,479 | def target_signature ( self ) : if self . exists ( ) : if not self . _md5 : self . _md5 = fileMD5 ( self ) return ( os . path . getmtime ( self ) , os . path . getsize ( self ) , self . _md5 ) elif ( self + '.zapped' ) . is_file ( ) : with open ( self + '.zapped' ) as sig : line = sig . readline ( ) _ , mtime , size , ... | Return file signature |
53,480 | def validate ( self , sig = None ) : if sig is not None : sig_mtime , sig_size , sig_md5 = sig else : try : with open ( self . sig_file ( ) ) as sig : sig_mtime , sig_size , sig_md5 = sig . read ( ) . strip ( ) . split ( ) except : return False if not self . exists ( ) : if ( self + '.zapped' ) . is_file ( ) : with ope... | Check if file matches its signature |
53,481 | def write_sig ( self ) : if not self . _md5 : self . _md5 = fileMD5 ( self ) with open ( self . sig_file ( ) , 'w' ) as sig : sig . write ( f'{os.path.getmtime(self)}\t{os.path.getsize(self)}\t{self._md5}' ) | Write signature to sig store |
53,482 | def remove_targets ( self , type , kept = None ) : if kept is None : kept = [ i for i , x in enumerate ( self . _targets ) if not isinstance ( x , type ) ] if len ( kept ) == len ( self . _targets ) : return self self . _targets = [ self . _targets [ x ] for x in kept ] self . _labels = [ self . _labels [ x ] for x in ... | Remove targets of certain type |
53,483 | def resolve_remote ( self ) : for idx , target in enumerate ( self . _targets ) : if isinstance ( target , remote ) : resolved = target . resolve ( ) if isinstance ( resolved , str ) : resolved = interpolate ( resolved , env . sos_dict . dict ( ) ) self . _targets [ idx ] = file_target ( resolved ) . set ( ** target . ... | If target is of remote type resolve it |
53,484 | def _handle_paired_with ( self , paired_with ) : if paired_with is None or not paired_with : var_name = [ ] var_value = [ ] elif isinstance ( paired_with , str ) : var_name = [ '_' + paired_with ] if paired_with not in env . sos_dict : raise ValueError ( f'Variable {paired_with} does not exist.' ) var_value = [ env . s... | Handle input option paired_with |
53,485 | def _handle_group_with ( self , group_with ) : if group_with is None or not group_with : var_name = [ ] var_value = [ ] elif isinstance ( group_with , str ) : var_name = [ '_' + group_with ] if group_with not in env . sos_dict : raise ValueError ( f'Variable {group_with} does not exist.' ) var_value = [ env . sos_dict ... | Handle input option group_with |
53,486 | def _handle_extract_pattern ( self , pattern ) : if pattern is None or not pattern : patterns = [ ] elif isinstance ( pattern , str ) : patterns = [ pattern ] elif isinstance ( pattern , Iterable ) : patterns = pattern else : raise ValueError ( f'Unacceptable value for parameter pattern: {pattern}' ) for pattern in pat... | Handle input option pattern |
53,487 | def write ( self ) : if not self . output_files . valid ( ) : raise ValueError ( f'Cannot write signature with undetermined output {self.output_files}' ) else : if 'TARGET' in env . config [ 'SOS_DEBUG' ] or 'ALL' in env . config [ 'SOS_DEBUG' ] : env . log_to_file ( 'TARGET' , f'write signature {self.sig_id} with outp... | Write signature file with signature of script input output and dependent files . Because local input and output files can only be determined after the execution of workflow . They are not part of the construction . |
53,488 | def clear_output ( output = None ) : for target in env . sos_dict [ '_output' ] if output is None else output : if isinstance ( target , file_target ) and target . exists ( ) : try : target . unlink ( ) except Exception as e : env . logger . warning ( f'Failed to remove {target}: {e}' ) | Remove file targets in _output when a step fails to complete |
53,489 | def add_forward_workflow ( self , dag , sections , satisfies = None ) : dag . new_forward_workflow ( ) if 'DAG' in env . config [ 'SOS_DEBUG' ] or 'ALL' in env . config [ 'SOS_DEBUG' ] : env . log_to_file ( 'DAG' , f'Adding mini-workflow with {len(sections)} sections' ) default_input : sos_targets = sos_targets ( [ ] )... | Add a forward - workflow return number of nodes added |
53,490 | def initialize_dag ( self , targets : Optional [ List [ str ] ] = [ ] , nested : bool = False ) -> SoS_DAG : self . reset_dict ( ) dag = SoS_DAG ( name = self . md5 ) targets = sos_targets ( targets ) self . add_forward_workflow ( dag , self . workflow . sections ) if self . resolve_dangling_targets ( dag , targets ) =... | Create a DAG by analyzing sections statically . |
53,491 | def short_repr ( obj , noneAsNA = False ) : if obj is None : return 'unspecified' if noneAsNA else 'None' elif isinstance ( obj , str ) and len ( obj ) > 80 : return '{}...{}' . format ( obj [ : 60 ] . replace ( '\n' , '\\n' ) , obj [ - 20 : ] . replace ( '\n' , '\\n' ) ) elif isinstance ( obj , ( str , int , float , b... | Return a short representation of obj for clarity . |
53,492 | def tail_of_file ( filename , n , ansi2html = False ) : avg_line_length = 74 to_read = n with open ( filename ) as f : while 1 : try : f . seek ( - ( avg_line_length * to_read ) , 2 ) except IOError : f . seek ( 0 ) pos = f . tell ( ) lines = f . read ( ) . splitlines ( ) if len ( lines ) >= to_read or pos == 0 : if an... | Reads a n lines from f with an offset of offset lines . |
53,493 | def sample_lines ( lines , n ) : if len ( lines ) <= n : return '' . join ( lines ) else : m = len ( lines ) return '' . join ( [ lines [ x * m // n + m // ( 2 * n ) ] for x in range ( n ) ] ) | Draw a sample of n lines from filename largely evenly . |
53,494 | def set ( self , key , value ) : if hasattr ( value , 'labels' ) : if 'VARIABLE' in env . config [ 'SOS_DEBUG' ] or 'ALL' in env . config [ 'SOS_DEBUG' ] : env . log_to_file ( 'VARIABLE' , f"Set {key} to {short_repr(value)} with labels {short_repr(value.labels)}" ) else : if 'VARIABLE' in env . config [ 'SOS_DEBUG' ] o... | A short cut to set value to key without triggering any logging or warning message . |
53,495 | def update ( self , obj ) : self . _dict . update ( obj ) for k , v in obj . items ( ) : if env . verbosity > 2 : self . _log ( k , v ) | Redefine update to trigger logging message |
53,496 | def execute_substep ( stmt , global_def , global_vars , task = '' , task_params = '' , proc_vars = { } , shared_vars = [ ] , config = { } ) : assert not env . zmq_context . closed assert 'workflow_id' in proc_vars assert 'step_id' in proc_vars assert '_input' in proc_vars assert '_output' in proc_vars assert '_depends'... | Execute a substep with specific input etc |
53,497 | def files ( self ) : try : cur = self . conn . cursor ( ) cur . execute ( 'SELECT id, item FROM workflows WHERE entry_type = "tracked_files"' ) return [ ( x [ 0 ] , eval ( x [ 1 ] ) ) for x in cur . fetchall ( ) ] except sqlite3 . DatabaseError as e : env . logger . warning ( f'Failed to get files from signature databa... | Listing files related to workflows related to current directory |
53,498 | def find_executable ( self ) : if 'DAG' in env . config [ 'SOS_DEBUG' ] or 'ALL' in env . config [ 'SOS_DEBUG' ] : env . log_to_file ( 'DAG' , 'find_executable' ) for node in self . nodes ( ) : if node . _status is None : with_dependency = False for edge in self . in_edges ( node ) : if edge [ 0 ] . _status != 'complet... | Find an executable node which means nodes that has not been completed and has no input dependency . |
53,499 | def dangling ( self , targets : sos_targets ) : existing = [ ] missing = [ ] if env . config [ 'trace_existing' ] : for x in self . _all_depends_files . keys ( ) : if x not in self . _all_output_files : if x . target_exists ( ) : existing . append ( x ) else : missing . append ( x ) else : missing = [ x for x in self .... | returns 1 . missing targets which are missing from the DAG or from the provided targets 2 . existing targets of provided target list not in DAG |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.