idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
10,200 | def filter ( self , filters ) : new_elements = [ e for e in self . elements if all ( function ( e ) for function in filters ) ] return Pileup ( self . locus , new_elements ) | Apply filters to the pileup elements and return a new Pileup with the filtered elements removed . |
10,201 | def new_task ( func ) : @ wraps ( func ) async def wrapper ( self , * args , ** kwargs ) : loop = get_event_loop ( ) loop . create_task ( func ( self , * args , ** kwargs ) ) return wrapper | Runs the decorated function in a new task |
10,202 | async def providers ( ) : for provider in settings . ANALYTICS_PROVIDERS : cls : BaseAnalytics = import_class ( provider [ 'class' ] ) yield await cls . instance ( * provider [ 'args' ] ) | Iterates over all instances of analytics provider found in configuration |
10,203 | async def page_view ( self , url : str , title : str , user_id : str , user_lang : str = '' ) -> None : raise NotImplementedError | Track the view of a page |
10,204 | def hash_user_id ( self , user_id : str ) -> str : h = sha256 ( ) h . update ( user_id . encode ( ) ) return h . hexdigest ( ) | As per the law anonymize user identifier before sending it . |
10,205 | def delete ( cls , uuid ) : to_delete = Workflow . query . get ( uuid ) db . session . delete ( to_delete ) | Delete a workflow . |
10,206 | def run_worker ( wname , data , engine_uuid_hex = None , ** kwargs ) : if 'stop_on_halt' not in kwargs : kwargs [ 'stop_on_halt' ] = False if engine_uuid_hex : engine_uuid = uuid . UUID ( hex = engine_uuid_hex ) engine = WorkflowEngine . from_uuid ( uuid = engine_uuid , ** kwargs ) else : engine = WorkflowEngine . with_name ( wname , ** kwargs ) engine . save ( ) objects = get_workflow_object_instances ( data , engine ) db . session . commit ( ) engine . process ( objects , ** kwargs ) return engine | Run a workflow by name with list of data objects . |
10,207 | def restart_worker ( uuid , ** kwargs ) : if 'stop_on_halt' not in kwargs : kwargs [ 'stop_on_halt' ] = False engine = WorkflowEngine . from_uuid ( uuid = uuid , ** kwargs ) if "data" not in kwargs : objects = workflow_object_class . query ( id_workflow = uuid ) else : data = kwargs . pop ( "data" ) if not isinstance ( data , ( list , tuple ) ) : data = [ data ] objects = get_workflow_object_instances ( data , engine ) db . session . commit ( ) engine . process ( objects , ** kwargs ) return engine | Restart workflow from beginning with given engine UUID and any data . |
10,208 | def get_workflow_object_instances ( data , engine ) : workflow_objects = [ ] data_type = engine . get_default_data_type ( ) for data_object in data : if isinstance ( data_object , workflow_object_class . _get_current_object ( ) ) : if not data_object . data_type : data_object . data_type = data_type if data_object . id : data_object . log . debug ( "Existing workflow object found for " "this object." ) if data_object . status == data_object . known_statuses . COMPLETED : data_object . status = data_object . known_statuses . INITIAL workflow_objects . append ( data_object ) else : current_obj = create_data_object_from_data ( data_object , engine , data_type ) workflow_objects . append ( current_obj ) return workflow_objects | Analyze data and create corresponding WorkflowObjects . |
10,209 | def create_data_object_from_data ( data_object , engine , data_type ) : return workflow_object_class . create ( data = data_object , id_workflow = engine . uuid , status = workflow_object_class . known_statuses . INITIAL , data_type = data_type , ) | Create a new WorkflowObject from given data and return it . |
10,210 | def _print_rst ( self , what ) : print print "Command - %s::" % what exec ( "h = self.do_%s.__doc__" % what ) h = textwrap . dedent ( h ) . replace ( "::\n\n" , "" ) h = textwrap . dedent ( h ) . replace ( "\n" , "\n " ) print h | prints the rst page of the command what |
10,211 | def load_json ( cls , data , default_rule = None , raise_error = False ) : rules = { k : _parser . parse_rule ( v , raise_error ) for k , v in json . loads ( data ) . items ( ) } return cls ( rules , default_rule ) | Allow loading of JSON rule data . |
10,212 | def from_dict ( cls , rules_dict : dict , default_rule = None , raise_error = False ) : rules = { k : _parser . parse_rule ( v , raise_error ) for k , v in rules_dict . items ( ) } return cls ( rules , default_rule ) | Allow loading of rule data from a dictionary . |
10,213 | def _set_rules ( self , rules : dict , overwrite = True ) : if not isinstance ( rules , dict ) : raise TypeError ( 'rules must be an instance of dict or Rules,' 'got %r instead' % type ( rules ) ) if overwrite : self . rules = Rules ( rules , self . default_rule ) else : self . rules . update ( rules ) | Created a new Rules object based on the provided dict of rules . |
10,214 | def load_rules ( self , force_reload = False , overwrite = True ) : if self . load_once and self . _policy_loaded : return with self . _load_lock : if self . load_once and self . _policy_loaded : return reloaded , data = _cache . read_file ( self . policy_file , force_reload = force_reload ) self . _policy_loaded = True if reloaded or not self . rules : rules = Rules . load_json ( data , self . default_rule , self . raise_error ) self . _set_rules ( rules , overwrite = overwrite ) LOG . debug ( 'Reload policy file: %s' , self . policy_file ) | Load rules from policy file or cache . |
10,215 | def enforce ( self , rule , target , creds , exc = None , * args , ** kwargs ) : self . load_rules ( ) if isinstance ( rule , checks . BaseCheck ) : result = rule ( target , creds , self , rule ) elif not self . rules : result = False else : try : result = self . rules [ rule ] ( target , creds , self , rule ) except KeyError : LOG . debug ( 'Rule [%s] does not exist' , rule ) result = False if self . raise_error and not result : if exc : raise exc ( * args , ** kwargs ) else : raise PolicyNotAuthorized ( rule , target , creds ) return result | Checks authorization of a rule against the target and credentials . |
10,216 | def get_flattened_bsp_keys_from_schema ( schema ) : keys = [ ] for key in schema . declared_fields . keys ( ) : field = schema . declared_fields [ key ] if isinstance ( field , mm . fields . Nested ) and isinstance ( field . schema , BoundSpatialPoint ) : keys . append ( "{}.{}" . format ( key , "position" ) ) return keys | Returns the flattened keys of BoundSpatialPoints in a schema |
10,217 | def lock ( self ) -> asyncio . Lock : if self . lock_key not in self . request . custom_content : self . request . custom_content [ self . lock_key ] = asyncio . Lock ( ) return self . request . custom_content [ self . lock_key ] | Return and generate if required the lock for this request . |
10,218 | async def get_value ( self ) : cc = self . request . custom_content async with self . lock : if self . content_key not in cc : cc [ self . content_key ] = await self . call_api ( ) return cc [ self . content_key ] | Get the value from the API . Make sure to use a lock in order not to fetch the value twice at the same time . |
10,219 | async def rank ( self ) -> Optional [ float ] : if not self . request . has_layer ( l . RawText ) : return tl = self . request . get_layer ( l . RawText ) matcher = Matcher ( [ tuple ( Trigram ( y ) for y in x ) for x in await self . intent . strings ( self . request ) ] ) return matcher % Trigram ( tl . text ) | If there is a text layer inside the request try to find a matching text in the specified intent . |
10,220 | def _rank_qr ( self , choices ) : from bernard . platforms . facebook import layers as fbl try : qr = self . request . get_layer ( fbl . QuickReply ) self . chosen = choices [ qr . slug ] self . slug = qr . slug if self . when is None or self . when == qr . slug : return 1.0 except KeyError : pass | Look for the QuickReply layer s slug into available choices . |
10,221 | async def _rank_text ( self , choices ) : tl = self . request . get_layer ( l . RawText ) best = 0.0 for slug , params in choices . items ( ) : strings = [ ] if params [ 'intent' ] : intent = getattr ( intents , params [ 'intent' ] ) strings += await intent . strings ( self . request ) if params [ 'text' ] : strings . append ( ( params [ 'text' ] , ) ) matcher = Matcher ( [ tuple ( Trigram ( y ) for y in x ) for x in strings ] ) score = matcher % Trigram ( await render ( tl . text , self . request ) ) if score > best : self . chosen = params self . slug = slug best = score if self . when is None or self . slug == self . when : return best | Try to match the TextLayer with choice s intents . |
10,222 | def check_recommended_attributes ( self , dataset ) : results = [ ] recommended_ctx = TestCtx ( BaseCheck . MEDIUM , 'Recommended global attributes' ) for attr in [ 'time_coverage_duration' , 'time_coverage_resolution' ] : attr_value = getattr ( dataset , attr , '' ) try : parse_duration ( attr_value ) recommended_ctx . assert_true ( True , '' ) except Exception : recommended_ctx . assert_true ( False , '{} should exist and be ISO-8601 format (example: PT1M30S), currently: {}' . format ( attr , attr_value ) ) results . append ( recommended_ctx . to_result ( ) ) return results | Feature type specific check of global recommended attributes . |
10,223 | def check_dimensions ( self , dataset ) : required_ctx = TestCtx ( BaseCheck . HIGH , 'All geophysical variables are time-series incomplete feature types' ) message = '{} must be a valid timeseries feature type. It must have dimensions of (timeSeries, time).' message += ' And all coordinates must have dimensions of (timeSeries)' for variable in util . get_geophysical_variables ( dataset ) : is_valid = util . is_multi_timeseries_incomplete ( dataset , variable ) required_ctx . assert_true ( is_valid , message . format ( variable ) ) return required_ctx . to_result ( ) | Checks that the feature types of this dataset are consitent with a time series incomplete dataset |
10,224 | def read_file ( filename : str , force_reload = False ) : if force_reload : _delete_cached_file ( filename ) reloaded = False mtime = os . path . getmtime ( filename ) cache_info = CACHE . setdefault ( filename , { } ) if not cache_info or mtime > cache_info . get ( 'mtime' , 0 ) : LOG . debug ( 'Reloading cached file %s' , filename ) with open ( filename ) as fp : cache_info [ 'data' ] = fp . read ( ) cache_info [ 'mtime' ] = mtime reloaded = True return reloaded , cache_info [ 'data' ] | Read a file if it has been modified . |
10,225 | def use_model_attr ( attr ) : def use_model_validator ( instance , attribute , value ) : getattr ( instance , attr ) ( instance , attribute , value ) return use_model_validator | Use the validator set on a separate attribute on the class . |
10,226 | def is_creation_model ( instance , attribute , value ) : creation_name = value . get ( 'name' ) if not isinstance ( creation_name , str ) : instance_name = instance . __class__ . __name__ err_str = ( "'name' must be given as a string in the '{attr}' " "parameter of a '{cls}'. Given " "'{value}'" ) . format ( attr = attribute . name , cls = instance_name , value = creation_name ) raise ModelDataError ( err_str ) | Must include at least a name key . |
10,227 | def is_manifestation_model ( instance , attribute , value ) : instance_name = instance . __class__ . __name__ is_creation_model ( instance , attribute , value ) manifestation_of = value . get ( 'manifestationOfWork' ) if not isinstance ( manifestation_of , str ) : err_str = ( "'manifestationOfWork' must be given as a string in the " "'{attr}' parameter of a '{cls}'. Given " "'{value}'" ) . format ( attr = attribute . name , cls = instance_name , value = manifestation_of ) print ( err_str ) | Must include a manifestationOfWork key . |
10,228 | def add_preprocessor ( preproc ) : def decorator ( func ) : func = ScriptAdaptor . _wrap ( func ) func . _add_preprocessor ( preproc ) return func return decorator | Define a preprocessor to run after the arguments are parsed and before the function is executed when running in console script mode . |
10,229 | def add_postprocessor ( postproc ) : def decorator ( func ) : func = ScriptAdaptor . _wrap ( func ) func . _add_postprocessor ( postproc ) return func return decorator | Define a postprocessor to run after the function is executed when running in console script mode . |
10,230 | def _setup_logging ( args ) : log_conf = getattr ( args , 'logging' , None ) if log_conf : logging . config . fileConfig ( log_conf ) else : logging . basicConfig ( ) | Set up logging for the script based on the configuration specified by the logging attribute of the command line arguments . |
10,231 | def setup_limits ( conf_file , limits_file , do_reload = True , dry_run = False , debug = False ) : if dry_run : debug = True conf = config . Config ( conf_file = conf_file ) db = conf . get_database ( ) limits_key = conf [ 'control' ] . get ( 'limits_key' , 'limits' ) control_channel = conf [ 'control' ] . get ( 'channel' , 'control' ) limits_tree = etree . parse ( limits_file ) lims = [ ] for idx , lim in enumerate ( limits_tree . getroot ( ) ) : if lim . tag != 'limit' : warnings . warn ( "Unrecognized tag %r in limits file at index %d" % ( lim . tag , idx ) ) continue try : lims . append ( parse_limit_node ( db , idx , lim ) ) except Exception as exc : warnings . warn ( "Couldn't understand limit at index %d: %s" % ( idx , exc ) ) continue if debug : print >> sys . stderr , "Installing the following limits:" for lim in lims : print >> sys . stderr , " %r" % lim if not dry_run : database . limit_update ( db , limits_key , lims ) if do_reload is False : return params = [ ] if do_reload is True : pass elif ( isinstance ( do_reload , ( int , long , float ) ) or ( isinstance ( do_reload , basestring ) and do_reload . isdigit ( ) ) ) : params = [ 'spread' , do_reload ] else : params = [ str ( do_reload ) ] if debug : cmd = [ 'reload' ] cmd . extend ( params ) print >> sys . stderr , ( "Issuing command: %s" % ' ' . join ( str ( c ) for c in cmd ) ) if not dry_run : database . command ( db , control_channel , 'reload' , * params ) | Set up or update limits in the Redis database . |
10,232 | def make_limit_node ( root , limit ) : limit_node = etree . SubElement ( root , 'limit' , { 'class' : limit . _limit_full_name } ) for attr in sorted ( limit . attrs ) : desc = limit . attrs [ attr ] attr_type = desc . get ( 'type' , str ) value = getattr ( limit , attr ) if 'default' in desc : default = ( desc [ 'default' ] ( ) if callable ( desc [ 'default' ] ) else desc [ 'default' ] ) if value == default : continue attr_node = etree . SubElement ( limit_node , 'attr' , name = attr ) if attr_type == list : for val in value : val_node = etree . SubElement ( attr_node , 'value' ) val_node . text = str ( val ) elif attr_type == dict : for key , val in sorted ( value . items ( ) , key = lambda x : x [ 0 ] ) : val_node = etree . SubElement ( attr_node , 'value' , key = key ) val_node . text = str ( val ) else : attr_node . text = str ( value ) | Given a Limit object generate an XML node . |
10,233 | def dump_limits ( conf_file , limits_file , debug = False ) : conf = config . Config ( conf_file = conf_file ) db = conf . get_database ( ) limits_key = conf [ 'control' ] . get ( 'limits_key' , 'limits' ) lims = [ limits . Limit . hydrate ( db , msgpack . loads ( lim ) ) for lim in db . zrange ( limits_key , 0 , - 1 ) ] root = etree . Element ( 'limits' ) limit_tree = etree . ElementTree ( root ) for idx , lim in enumerate ( lims ) : if debug : print >> sys . stderr , "Dumping limit index %d: %r" % ( idx , lim ) make_limit_node ( root , lim ) if limits_file == '-' : limits_file = sys . stdout if debug : print >> sys . stderr , "Dumping limits to file %r" % limits_file limit_tree . write ( limits_file , xml_declaration = True , encoding = 'UTF-8' , pretty_print = True ) | Dump the current limits from the Redis database . |
10,234 | def remote_daemon ( conf_file ) : eventlet . monkey_patch ( ) conf = config . Config ( conf_file = conf_file ) daemon = remote . RemoteControlDaemon ( None , conf ) daemon . serve ( ) | Run the external control daemon . |
10,235 | def turnstile_command ( conf_file , command , arguments = [ ] , channel = None , debug = False ) : conf = config . Config ( conf_file = conf_file ) db = conf . get_database ( ) control_channel = conf [ 'control' ] . get ( 'channel' , 'control' ) command = command . lower ( ) ts_conv = False if command == 'ping' : if arguments : channel = arguments [ 0 ] else : channel = str ( uuid . uuid4 ( ) ) arguments = [ channel ] if len ( arguments ) < 2 : arguments . append ( time . time ( ) ) ts_conv = True arguments = arguments [ : 2 ] if debug : cmd = [ command ] + arguments print >> sys . stderr , ( "Issuing command: %s" % ' ' . join ( cmd ) ) database . command ( db , control_channel , command , * arguments ) if not channel : return pubsub = db . pubsub ( ) pubsub . subscribe ( channel ) try : count = 0 for msg in pubsub . listen ( ) : if debug : formatted = pprint . pformat ( msg ) print >> sys . stderr , "Received message: %s" % formatted if ( msg [ 'type' ] not in ( 'pmessage' , 'message' ) or msg [ 'channel' ] != channel ) : continue count += 1 response = msg [ 'data' ] . split ( ':' ) if ts_conv and response [ 0 ] == 'pong' : try : rtt = ( time . time ( ) - float ( response [ 2 ] ) ) * 100 response . append ( '(RTT %.2fms)' % rtt ) except Exception : pass print "Response % 5d: %s" % ( count , ' ' . join ( response ) ) except KeyboardInterrupt : pass | Issue a command to all running control daemons . |
10,236 | def compactor_daemon ( conf_file ) : eventlet . monkey_patch ( ) conf = config . Config ( conf_file = conf_file ) compactor . compactor ( conf ) | Run the compactor daemon . |
10,237 | def _wrap ( cls , func ) : if isinstance ( func , cls ) : return func return functools . update_wrapper ( cls ( func ) , func ) | Ensures that the function is wrapped in a ScriptAdaptor object . If it is not a new ScriptAdaptor will be returned . If it is the ScriptAdaptor is returned . |
10,238 | def setup_args ( self , parser ) : for args , kwargs in self . _arguments : parser . add_argument ( * args , ** kwargs ) | Set up an argparse . ArgumentParser object by adding all the arguments taken by the function . |
10,239 | def get_kwargs ( self , args ) : kwargs = { } argspec = inspect . getargspec ( self . _func ) required = set ( argspec . args [ : - len ( argspec . defaults ) ] if argspec . defaults else argspec . args ) for arg_name in argspec . args : try : kwargs [ arg_name ] = getattr ( args , arg_name ) except AttributeError : if arg_name in required : raise if argspec . keywords : for key , value in args . __dict__ . items ( ) : if key in kwargs : continue kwargs [ key ] = value return kwargs | Given a Namespace object drawn from argparse determines the keyword arguments to pass to the underlying function . Note that if the underlying function accepts all keyword arguments the dictionary returned will contain the entire contents of the Namespace object . Also note that an AttributeError will be raised if any argument required by the function is not set in the Namespace object . |
10,240 | def console ( self ) : parser = argparse . ArgumentParser ( description = self . description ) self . setup_args ( parser ) args = parser . parse_args ( ) for proc in self . _preprocess : try : proc ( args ) except Exception as exc : if getattr ( args , 'debug' , False ) : raise return str ( exc ) result = self . safe_call ( self . get_kwargs ( args ) , args ) for proc in self . _postprocess : result = proc ( args , result ) return result | Call the function as a console script . Command line arguments are parsed preprocessors are called then the function is called . If a debug attribute is set by the command line arguments and it is True any exception raised by the underlying function will be reraised ; otherwise the return value will be either the return value of the function or the text contents of the exception . |
10,241 | def import_class ( name : Text ) -> Type : parts = name . split ( '.' ) module_name = parts [ : - 1 ] class_name = parts [ - 1 ] module_ = importlib . import_module ( '.' . join ( module_name ) ) return getattr ( module_ , class_name ) | Import a class based on its full name . |
10,242 | def make_ro ( obj : Any , forgive_type = False ) : if isinstance ( obj , ( str , bytes , int , float , bool , RoDict , RoList ) ) or obj is None : return obj elif isinstance ( obj , Mapping ) : return RoDict ( obj , forgive_type ) elif isinstance ( obj , Sequence ) : return RoList ( obj , forgive_type ) elif forgive_type : return obj else : raise ValueError ( 'Trying to make read-only an object of type "{}"' . format ( obj . __class__ . __name__ ) ) | Make a json - serializable type recursively read - only |
10,243 | def make_rw ( obj : Any ) : if isinstance ( obj , RoDict ) : return { k : make_rw ( v ) for k , v in obj . items ( ) } elif isinstance ( obj , RoList ) : return [ make_rw ( x ) for x in obj ] else : return obj | Copy a RO object into a RW structure made with standard Python classes . |
10,244 | def patch_qs ( url : Text , data : Dict [ Text , Text ] ) -> Text : qs_id = 4 p = list ( urlparse ( url ) ) qs = parse_qsl ( p [ qs_id ] ) patched_qs = list ( chain ( filter ( lambda x : x [ 0 ] not in data , qs ) , data . items ( ) , ) ) p [ qs_id ] = urlencode ( patched_qs ) return urlunparse ( p ) | Given an URL change the query string to include the values specified in the dictionary . |
10,245 | def dict_is_subset ( subset : Any , full_set : Any ) -> bool : if not isinstance ( subset , full_set . __class__ ) : return False elif isinstance ( subset , dict ) : for k , v in subset . items ( ) : if k not in full_set or not dict_is_subset ( v , full_set [ k ] ) : return False return True elif isinstance ( subset , list ) : if len ( subset ) != len ( full_set ) : return False for a , b in zip ( subset , full_set ) : if not dict_is_subset ( a , b ) : return False return True else : return subset == full_set | Checks that all keys present in subset are present and have the same value in full_set . If a key is in full_set but not in subset then True will be returned anyways . |
10,246 | def _compile ( self , expression ) : x = self . RE_PYTHON_VAR . sub ( '(?:\\1,)' , expression ) x = self . RE_SPACES . sub ( '' , x ) return re . compile ( x ) | Transform a class exp into an actual regex |
10,247 | def _make_string ( self , objects : List [ Any ] ) -> Text : return '' . join ( x . __class__ . __name__ + ',' for x in objects ) | Transforms a list of objects into a matchable string |
10,248 | def match ( self , objects : List [ Any ] ) -> bool : s = self . _make_string ( objects ) m = self . _compiled_expression . match ( s ) return m is not None | Return True if the list of objects matches the expression . |
10,249 | def get_conf ( conf , sect , opt ) : argu = getattr ( args , "mambupy_" + opt . lower ( ) ) if not argu : envir = os . environ . get ( "MAMBUPY_" + opt . upper ( ) ) if not envir : try : return conf . get ( sect , opt ) except NoSectionError : return default_configs [ opt ] return envir return argu | Gets a config opt from conf file under section sect . |
10,250 | def iso8601timestamp ( T = None , nanos = True , utc = False ) : T = time . time ( ) if T is None else T Ti = math . floor ( T ) Tn = round ( ( T - Ti ) * 1e9 ) if Tn >= 1e9 : Ti += 1 Tn = 0 s = time . gmtime ( Ti ) if utc else time . localtime ( Ti ) f = time . strftime ( "%Y%m%dT%H%M%S" , s ) n = ".{:09d}" . format ( Tn ) if nanos else "" tz = "Z" if utc else time . strftime ( "%z" , s ) return f + n + tz | Get ISO8601 - formatted timestamp string . |
10,251 | def createWorkDir ( baseDir , projName , expUUID , expNames = [ ] , nanos = True , utc = False ) : projDir = os . path . join ( baseDir , projName ) byuuidDir = os . path . join ( projDir , "by-uuid" ) bytimeDir = os . path . join ( projDir , "by-time" ) bynameDir = os . path . join ( projDir , "by-name" , * expNames ) byuuidPath = os . path . join ( byuuidDir , expUUID ) os . makedirs ( byuuidDir , mode = 0o755 , exist_ok = True ) os . makedirs ( bytimeDir , mode = 0o755 , exist_ok = True ) os . makedirs ( bynameDir , mode = 0o755 , exist_ok = True ) try : preexisting = False os . makedirs ( byuuidPath , mode = 0o755 , exist_ok = False ) except FileExistsError : preexisting = True if not preexisting : expTime = iso8601timestamp ( nanos = nanos , utc = utc ) expTimeUUID = expTime + "-" + expUUID bytimePath = os . path . join ( bytimeDir , expTimeUUID ) bynamePath = os . path . join ( bynameDir , expUUID ) os . symlink ( os . path . relpath ( byuuidPath , bytimeDir ) , bytimePath , True ) os . symlink ( os . path . relpath ( byuuidPath , bynameDir ) , bynamePath , True ) with contextlib . suppress ( OSError ) : with open ( os . path . join ( baseDir , ".rsync-filter" ) , "x" ) as f : f . write ( "#\n" "# rsync filter rules.\n" "#\n" "# When the argument -F is given to rsync, the rules within will be obeyed.\n" "#\n" ) with contextlib . suppress ( OSError ) : with open ( os . path . join ( projDir , ".rsync-filter" ) , "x" ) as f : f . write ( "#\n" "# rsync filter rules.\n" "#\n" "# When the argument -F is given to rsync, the rules within will be obeyed.\n" "#\n" ) return byuuidPath | Create working directory for experiment if not existing already . |
10,252 | def humanize_timesince ( start_time ) : if not start_time : return start_time delta = local_now ( ) - start_time if delta . total_seconds ( ) < 0 : return 'a few seconds ago' num_years = delta . days // 365 if num_years > 0 : return '{} year{} ago' . format ( * ( ( num_years , 's' ) if num_years > 1 else ( num_years , '' ) ) ) num_weeks = delta . days // 7 if num_weeks > 0 : return '{} week{} ago' . format ( * ( ( num_weeks , 's' ) if num_weeks > 1 else ( num_weeks , '' ) ) ) num_days = delta . days if num_days > 0 : return '{} day{} ago' . format ( * ( ( num_days , 's' ) if num_days > 1 else ( num_days , '' ) ) ) num_hours = delta . seconds // 3600 if num_hours > 0 : return '{} hour{} ago' . format ( * ( ( num_hours , 's' ) if num_hours > 1 else ( num_hours , '' ) ) ) num_minutes = delta . seconds // 60 if num_minutes > 0 : return '{} minute{} ago' . format ( * ( ( num_minutes , 's' ) if num_minutes > 1 else ( num_minutes , '' ) ) ) return 'a few seconds ago' | Creates a string representation of time since the given start_time . |
10,253 | def humanize_timedelta ( seconds ) : hours , remainder = divmod ( seconds , 3600 ) days , hours = divmod ( hours , 24 ) minutes , seconds = divmod ( remainder , 60 ) if days : result = '{}d' . format ( days ) if hours : result += ' {}h' . format ( hours ) if minutes : result += ' {}m' . format ( minutes ) return result if hours : result = '{}h' . format ( hours ) if minutes : result += ' {}m' . format ( minutes ) return result if minutes : result = '{}m' . format ( minutes ) if seconds : result += ' {}s' . format ( seconds ) return result return '{}s' . format ( seconds ) | Creates a string representation of timedelta . |
10,254 | def start ( self ) : if self . _http_last_send is not None : raise RuntimeError ( 'HttpMock has already been started' ) super ( HttpMock , self ) . start ( ) self . _patch_last_send ( ) | Overrides default start behaviour by raising ConnectionError instead of custom requests_mock . exceptions . NoMockAddress . |
10,255 | def unhandle ( self , handler ) : h , _ , _ = self . _extract ( handler ) key = hash ( h ) with self . _hlock : if key not in self . handlers : raise ValueError ( 'Handler "%s" was not found' % str ( h ) ) handlers = self . handlers . copy ( ) del handlers [ key ] self . handlers = handlers return self | Unregisters a handler |
10,256 | def fire ( self , * args , ** kw ) : result = [ ] with self . _hlock : handlers = self . handlers if self . threads == 0 : for k in handlers : h , m , t = handlers [ k ] try : r = self . _memoize ( h , m , t , * args , ** kw ) result . append ( tuple ( r ) ) except : result . append ( ( False , self . _error ( sys . exc_info ( ) ) , h ) ) elif self . threads > 0 : queue = Queue ( ) rlock = RLock ( ) def _execute ( * args , ** kw ) : while True : try : item = queue . get ( ) if item is None : queue . task_done ( ) break h , m , t = handlers [ item ] try : r = self . _memoize ( h , m , t , * args , ** kw ) if not self . asynch : with rlock : result . append ( tuple ( r ) ) except : if not self . asynch : with rlock : result . append ( ( False , self . _error ( sys . exc_info ( ) ) , h ) ) queue . task_done ( ) except Empty : break if handlers : threads = self . _threads ( handlers = handlers ) for _ in range ( threads ) : t = Thread ( target = _execute , args = args , kwargs = kw ) t . daemon = True t . start ( ) for k in handlers : queue . put ( k ) if self . asynch : h , _ , _ = handlers [ k ] result . append ( ( None , None , h ) ) for _ in range ( threads ) : queue . put ( None ) if not self . asynch : queue . join ( ) return tuple ( result ) or None | Stores all registered handlers in a queue for processing |
10,257 | def clear ( self ) : with self . _hlock : self . handlers . clear ( ) with self . _mlock : self . memoize . clear ( ) | Discards all registered handlers and cached results |
10,258 | def _timeout ( self , timeout , handler , * args , ** kw ) : t = spawn_thread ( target = handler , args = args , kw = kw ) t . daemon = True t . start ( ) t . join ( timeout ) if not t . is_alive ( ) : if t . exc_info : return t . exc_info return t . result else : try : msg = '[%s] Execution was forcefully terminated' raise RuntimeError ( msg % t . name ) except : return sys . exc_info ( ) | Controls the time allocated for the execution of a method |
10,259 | def _threads ( self , handlers ) : if self . threads < len ( handlers ) : return self . threads return len ( handlers ) | Calculates maximum number of threads that will be started |
10,260 | def update ( self ) : if not self . _ok : self . log_error ( "Trying to restore OK mode w/ soft reset" ) self . _ok = self . _soft_reset ( ) try : self . _bus . write_byte ( self . _i2c_add , CMD_READ_TEMP_NOHOLD ) sleep ( MEASUREMENT_WAIT_TIME ) buf_t = self . _bus . read_i2c_block_data ( self . _i2c_add , CMD_READ_TEMP_HOLD , 3 ) self . _bus . write_byte ( self . _i2c_add , CMD_READ_HUM_NOHOLD ) sleep ( MEASUREMENT_WAIT_TIME ) buf_h = self . _bus . read_i2c_block_data ( self . _i2c_add , CMD_READ_HUM_HOLD , 3 ) except OSError as exc : self . _ok = False self . log_error ( "Bad reading: %s" , exc ) return if self . _crc8check ( buf_t ) : temp = ( buf_t [ 0 ] << 8 | buf_t [ 1 ] ) & 0xFFFC self . _temperature = self . _calc_temp ( temp ) if self . _crc8check ( buf_h ) : humid = ( buf_h [ 0 ] << 8 | buf_h [ 1 ] ) & 0xFFFC rh_actual = self . _calc_humid ( humid ) rh_final = self . _temp_coefficient ( rh_actual , self . _temperature ) rh_final = 100.0 if rh_final > 100 else rh_final rh_final = 0.0 if rh_final < 0 else rh_final self . _humidity = rh_final else : self . _humidity = - 255 self . _ok = False self . log_error ( "Bad CRC error with humidity" ) else : self . _temperature = - 255 self . _ok = False self . log_error ( "Bad CRC error with temperature" ) | Read raw data and calculate temperature and humidity . |
10,261 | def get_owner_access_token ( self ) : from . database import Session db_session = Session . object_session ( self ) owner = db_session . query ( User ) . filter_by ( id_ = self . owner_id ) . first ( ) return owner . access_token | Return workflow owner access token . |
10,262 | def update_workflow_status ( db_session , workflow_uuid , status , new_logs = '' , message = None ) : try : workflow = db_session . query ( Workflow ) . filter_by ( id_ = workflow_uuid ) . first ( ) if not workflow : raise Exception ( 'Workflow {0} doesn\'t exist in database.' . format ( workflow_uuid ) ) if status : workflow . status = status if new_logs : workflow . logs = ( workflow . logs or '' ) + new_logs + '\n' db_session . commit ( ) except Exception as e : raise e | Update database workflow status . |
10,263 | def parse_server_addr ( str_addr , default_port = 26000 ) : m = ADDR_STR_RE . match ( str_addr ) if m is None : raise ValueError ( 'Bad address string "{0}"' . format ( str_addr ) ) dct = m . groupdict ( ) port = dct . get ( 'port' ) if port is None : port = default_port else : port = int ( port ) if port == 0 : raise ValueError ( "Port can't be zero" ) host = dct [ 'host' ] if dct [ 'host' ] else dct [ 'host6' ] return host , port | Parse address and returns host and port |
10,264 | def request_goto ( self , tc = None ) : if not tc : tc = TextHelper ( self . editor ) . word_under_cursor ( select_whole_word = True ) if not self . _definition or isinstance ( self . sender ( ) , QAction ) : self . select_word ( tc ) if self . _definition is not None : QTimer . singleShot ( 100 , self . _goto_def ) | Request a go to assignment . |
10,265 | def get_template ( name ) : path = os . path . join ( base_dir , name ) if path not in templates : try : templates [ path ] = Template ( path ) except IOError : return None return copy . deepcopy ( templates [ path ] ) | Return a copy of the template with the specified name . If not found or an error occurs during the load return None . |
10,266 | def set_value ( self , eid , val , idx = '*' ) : if eid in self . __element_ids : elems = self . __element_ids [ eid ] if type ( val ) in SEQ_TYPES : idx = 0 if idx == '*' : for elem in elems : self . __set_value ( eid , elem , val , idx ) elif idx < len ( elems ) : self . __set_value ( eid , elems [ idx ] , val , idx ) | Set the content of an xml element marked with the matching eid attribute . |
10,267 | def set_attribute ( self , aid , attrib , val , idx = '*' ) : if aid in self . __attrib_ids : elems = self . __attrib_ids [ aid ] if idx == '*' : for elem in elems : self . __set_attribute ( elem , attrib , val ) elif idx < len ( elems ) : elem = elems [ idx ] self . __set_attribute ( elem , attrib , val ) | Set the value of an xml attribute marked with the matching aid attribute . |
10,268 | def hide ( self , eid , index = 0 ) : elems = None if eid in self . __element_ids : elems = self . __element_ids [ eid ] elif eid in self . __repeat_ids : elems = self . __repeat_ids [ eid ] if elems and index < len ( elems ) : elem = elems [ index ] elem . parent . children . remove ( elem ) | Hide the element with the matching eid . If no match look for an element with a matching rid . |
10,269 | def repeat ( self , rid , count , index = 0 ) : elems = None if rid in self . __repeat_ids : elems = self . __repeat_ids [ rid ] elif rid in self . __element_ids : elems = self . __element_ids if elems and index < len ( elems ) : elem = elems [ index ] self . __repeat ( elem , count ) | Repeat an xml element marked with the matching rid . |
10,270 | def replace ( self , eid , replacement , index = 0 ) : if eid in self . __element_ids : elems = self . __element_ids [ eid ] elif eid in self . __repeat_ids : elems = self . __repeat_ids [ eid ] else : return if index < len ( elems ) : elem = elems [ index ] current_pos = elem . parent . children . index ( elem ) elem . parent . children . remove ( elem ) replacement_type = type ( replacement ) if replacement_type in ( Element , TextElement ) : self . check_element ( replacement , True ) elem . parent . children . insert ( current_pos , replacement ) replacement . parent = elem . parent elif replacement_type == Template : for child in replacement . root . children : elem . parent . children . insert ( current_pos , child ) child . parent = elem . parent current_pos += 1 self . __merge_ids ( self . __element_ids , replacement . __element_ids ) self . __merge_ids ( self . __attrib_ids , replacement . __attrib_ids ) self . __merge_ids ( self . __repeat_ids , replacement . __repeat_ids ) else : elem . parent . children . insert ( current_pos , TextElement ( replacement ) ) | Replace an xml element marked with the matching eid . If the replacement value is an Element or TextElement it s swapped in untouched . If it s a Template the children of the root element in the template are used . Otherwise the replacement value is wrapped with a TextElement . |
10,271 | def set_hasher ( self , hash , rounds = None ) : hash = hash . replace ( '-' , '_' ) if hash not in VALID_HASHERS : raise WrongHashAlgorithm ( WRONG_HASH_MESSAGE ) hasher = getattr ( ph , hash ) utils . test_hasher ( hasher ) default_rounds = getattr ( hasher , 'default_rounds' , 1 ) min_rounds = getattr ( hasher , 'min_rounds' , 1 ) max_rounds = getattr ( hasher , 'max_rounds' , float ( "inf" ) ) rounds = min ( max ( rounds or default_rounds , min_rounds ) , max_rounds ) op = { 'schemes' : VALID_HASHERS + DEPRECATED_HASHERS , 'deprecated' : DEPRECATED_HASHERS , 'default' : hash , hash + '__default_rounds' : rounds } self . hasher = CryptContext ( ** op ) self . hash = hash . replace ( '_' , '-' ) self . rounds = rounds | Updates the has algorithm and optionally the number of rounds to use . |
10,272 | def to_bool ( value , do_raise = True ) : value = value . lower ( ) if value . isdigit ( ) : return bool ( int ( value ) ) if value in _str_true : return True elif value in _str_false : return False if do_raise : raise ValueError ( "invalid literal for to_bool(): %r" % value ) return False | Convert a string to a boolean value . |
10,273 | async def become ( self , layer_type : Type [ L ] , request : 'Request' ) -> L : raise ValueError ( 'Cannot become "{}"' . format ( layer_type . __name__ ) ) | Transform this layer into another layer type |
10,274 | async def become ( self , layer_type : Type [ L ] , request : 'Request' ) : if layer_type != RawText : super ( Text , self ) . become ( layer_type , request ) return RawText ( await render ( self . text , request ) ) | Transforms the translatable string into an actual string and put it inside a RawText . |
10,275 | def _make_register ( self ) -> BaseRegisterStore : s = settings . REGISTER_STORE store_class = import_class ( s [ 'class' ] ) return store_class ( ** s [ 'params' ] ) | Make the register storage . |
10,276 | def _make_transitions ( self ) -> List [ Transition ] : module_name = settings . TRANSITIONS_MODULE module_ = importlib . import_module ( module_name ) return module_ . transitions | Load the transitions file . |
10,277 | def _make_allowed_states ( self ) -> Iterator [ Text ] : for trans in self . transitions : yield trans . dest . name ( ) if trans . origin : yield trans . origin . name ( ) | Sometimes we load states from the database . In order to avoid loading an arbitrary class we list here the state classes that are allowed . |
10,278 | async def _find_trigger ( self , request : Request , origin : Optional [ Text ] = None , internal : bool = False ) -> Tuple [ Optional [ BaseTrigger ] , Optional [ Type [ BaseState ] ] , Optional [ bool ] , ] : reg = request . register if not origin : origin = reg . get ( Register . STATE ) logger . debug ( 'From state: %s' , origin ) results = await asyncio . gather ( * ( x . rank ( request , origin ) for x in self . transitions if x . internal == internal ) ) if len ( results ) : score , trigger , state , dnr = max ( results , key = lambda x : x [ 0 ] ) if score >= settings . MINIMAL_TRIGGER_SCORE : return trigger , state , dnr return None , None , None | Find the best trigger for this request or go away . |
10,279 | def _confused_state ( self , request : Request ) -> Type [ BaseState ] : origin = request . register . get ( Register . STATE ) if origin in self . _allowed_states : try : return import_class ( origin ) except ( AttributeError , ImportError ) : pass return import_class ( settings . DEFAULT_STATE ) | If we re confused find which state to call . |
10,280 | async def _build_state ( self , request : Request , message : BaseMessage , responder : Responder ) -> Tuple [ Optional [ BaseState ] , Optional [ BaseTrigger ] , Optional [ bool ] , ] : trigger , state_class , dnr = await self . _find_trigger ( request ) if trigger is None : if not message . should_confuse ( ) : return None , None , None state_class = self . _confused_state ( request ) logger . debug ( 'Next state: %s (confused)' , state_class . name ( ) ) else : logger . debug ( 'Next state: %s' , state_class . name ( ) ) state = state_class ( request , responder , trigger , trigger ) return state , trigger , dnr | Build the state for this request . |
10,281 | async def _run_state ( self , responder , state , trigger , request ) -> BaseState : user_trigger = trigger try : if trigger : await state . handle ( ) else : await state . confused ( ) for i in range ( 0 , settings . MAX_INTERNAL_JUMPS + 1 ) : if i == settings . MAX_INTERNAL_JUMPS : raise MaxInternalJump ( ) trigger , state_class , dnr = await self . _find_trigger ( request , state . name ( ) , True ) if not trigger : break logger . debug ( 'Jumping to state: %s' , state_class . name ( ) ) state = state_class ( request , responder , trigger , user_trigger ) await state . handle ( ) except Exception : logger . exception ( 'Error while handling state "%s"' , state . name ( ) ) responder . clear ( ) reporter . report ( request , state . name ( ) ) await state . error ( ) return state | Execute the state or if execution fails handle it . |
10,282 | async def _build_state_register ( self , state : BaseState , request : Request , responder : Responder ) -> Dict : return { Register . STATE : state . name ( ) , Register . TRANSITION : await responder . make_transition_register ( request ) , } | Build the next register to store . |
10,283 | def runGetResults ( cmd , stdout = True , stderr = True , encoding = sys . getdefaultencoding ( ) ) : if stderr in ( 'stdout' , subprocess . STDOUT ) : stderr = subprocess . STDOUT elif stderr == True or stderr == subprocess . PIPE : stderr = subprocess . PIPE else : stderr = None if stdout == True or stdout == subprocess . STDOUT : stdout = subprocess . PIPE else : stdout = None if stderr == subprocess . PIPE : raise ValueError ( 'Cannot redirect stderr to stdout if stdout is not captured.' ) if issubclass ( cmd . __class__ , ( list , tuple ) ) : shell = False else : shell = True try : pipe = subprocess . Popen ( cmd , stdout = stdout , stderr = stderr , shell = shell ) except Exception as e : try : if shell is True : cmdStr = ' ' . join ( cmd ) else : cmdStr = cmd except : cmdStr = repr ( cmd ) raise SimpleCommandFailure ( 'Failed to execute "%s": %s' % ( cmdStr , str ( e ) ) , returnCode = 255 ) streams = [ ] fileNoToKey = { } ret = { } if stdout == subprocess . PIPE : streams . append ( pipe . stdout ) fileNoToKey [ pipe . stdout . fileno ( ) ] = 'stdout' ret [ 'stdout' ] = [ ] if stderr == subprocess . PIPE : streams . append ( pipe . stderr ) fileNoToKey [ pipe . stderr . fileno ( ) ] = 'stderr' ret [ 'stderr' ] = [ ] returnCode = None time . sleep ( .02 ) while returnCode is None or streams : returnCode = pipe . poll ( ) while True : ( readyToRead , junk1 , junk2 ) = select . select ( streams , [ ] , [ ] , .005 ) if not readyToRead : time . sleep ( .01 ) break for readyStream in readyToRead : retKey = fileNoToKey [ readyStream . fileno ( ) ] curRead = readyStream . read ( ) if curRead in ( b'' , '' ) : streams . remove ( readyStream ) continue ret [ retKey ] . append ( curRead ) for key in list ( ret . keys ( ) ) : ret [ key ] = b'' . join ( ret [ key ] ) if encoding : ret [ key ] = ret [ key ] . decode ( encoding ) ret [ 'returnCode' ] = returnCode return ret | runGetResults - Simple method to run a command and return the results of the execution as a dict . |
10,284 | def create_context_store ( name = 'default' , ttl = settings . CONTEXT_DEFAULT_TTL , store = settings . CONTEXT_STORE ) -> 'BaseContextStore' : store_class = import_class ( store [ 'class' ] ) return store_class ( name = name , ttl = ttl , ** store [ 'params' ] ) | Create a context store . By default using the default configured context store but you can use a custom class if you want to using the store setting . |
10,285 | def camelcase ( text , acronyms = None ) : words , _case , _sep = case_parse . parse_case ( text , acronyms ) if words : words [ 0 ] = words [ 0 ] . lower ( ) return '' . join ( words ) | Return text in camelCase style . |
10,286 | def dotcase ( text , acronyms = None ) : words , _case , _sep = case_parse . parse_case ( text , acronyms ) return '.' . join ( [ w . lower ( ) for w in words ] ) | Return text in dot . case style . |
10,287 | def separate_words ( text , acronyms = None ) : words , _case , _sep = case_parse . parse_case ( text , acronyms , preserve_case = True ) return ' ' . join ( words ) | Return text in seperate words style . |
10,288 | def init_db ( ) : import reana_db . models if not database_exists ( engine . url ) : create_database ( engine . url ) Base . metadata . create_all ( bind = engine ) | Initialize the DB . |
10,289 | def _load_significant_pathways_file ( path_to_file ) : feature_pathway_df = pd . read_table ( path_to_file , header = 0 , usecols = [ "feature" , "side" , "pathway" ] ) feature_pathway_df = feature_pathway_df . sort_values ( by = [ "feature" , "side" ] ) return feature_pathway_df | Read in the significant pathways file as a pandas . DataFrame . |
10,290 | def _pathway_feature_permutation ( pathway_feature_tuples , permutation_max_iters ) : pathways , features = [ list ( elements_at_position ) for elements_at_position in zip ( * pathway_feature_tuples ) ] original_pathways = pathways [ : ] random . shuffle ( pathways ) feature_block_locations = { } i = 0 while i < len ( pathways ) : starting_index = i current_feature = features [ i ] pathway_set = set ( ) while i < len ( pathways ) and features [ i ] == current_feature : if pathways [ i ] not in pathway_set : pathway_set . add ( pathways [ i ] ) else : k = 0 random_pathway = None while True : j = random . choice ( range ( 0 , len ( pathways ) ) ) random_pathway = pathways [ j ] random_feature = features [ j ] if ( random_pathway != pathways [ i ] and random_pathway not in pathway_set ) : if random_feature not in feature_block_locations : break feature_block_start , feature_block_end = feature_block_locations [ random_feature ] pathway_block = pathways [ feature_block_start : feature_block_end ] if pathways [ i ] not in pathway_block : break k += 1 if k > permutation_max_iters : print ( "Permutation step: reached the maximum " "number of iterations {0}." . format ( permutation_max_iters ) ) return None pathway_set . add ( random_pathway ) pathways [ j ] = pathways [ i ] pathways [ i ] = random_pathway i += 1 ending_index = i feature_block_locations [ current_feature ] = ( starting_index , ending_index ) if original_pathways == pathways : return None return list ( zip ( pathways , features ) ) | Permute the pathways across features for one side in the network . Used in permute_pathways_across_features |
10,291 | def weight_by_edge_odds_ratios ( self , edges_expected_weight , flag_as_significant ) : for edge_id , expected_weight in edges_expected_weight : edge_obj = self . edges [ edge_id ] edge_obj . weight /= expected_weight if edge_id in flag_as_significant : edge_obj . significant = True else : edge_obj . significant = False | Applied during the permutation test . Update the edges in the network to be weighted by their odds ratios . The odds ratio measures how unexpected the observed edge weight is based on the expected weight . |
10,292 | def aggregate ( self , merge ) : self . features = set ( ) self . n_features += merge . n_features vertex_id_conversion = self . convert_pathway_mapping ( merge . pathways ) for edge_id , edge in merge . edges . items ( ) : edge_key = self . remapped_edge ( vertex_id_conversion , edge_id ) if edge_key in self . edges : if self . edges [ edge_key ] . which_features : self . edges [ edge_key ] . which_features = [ ] self . edges [ edge_key ] . weight += edge . weight else : vertex0_id , vertex1_id = edge_key new_edge_obj = Edge ( vertex0_id , vertex1_id , [ ] ) new_edge_obj . weight = edge . weight self . edges [ edge_key ] = new_edge_obj self . _add_edge_to_vertex ( vertex0_id , new_edge_obj ) self . _add_edge_to_vertex ( vertex1_id , new_edge_obj ) | Combine this network with another network . The aggregation step takes the union of the edges in the two networks where we take the sum of weights for edges common to both networks . |
10,293 | def edge_tuple ( self , vertex0_id , vertex1_id ) : pw0 = self . __getitem__ ( vertex0_id ) pw1 = self . __getitem__ ( vertex1_id ) if not pw0 or not pw1 : return None if pw0 < pw1 : return ( vertex0_id , vertex1_id ) elif pw0 > pw1 : return ( vertex1_id , vertex0_id ) else : return None | To avoid duplicate edges where the vertex ids are reversed we maintain that the vertex ids are ordered so that the corresponding pathway names are alphabetical . |
10,294 | def add_pathway ( self , pathway ) : if pathway not in self . pathways : self . pathways [ pathway ] = self . n_pathways self . n_pathways += 1 return self . pathways [ pathway ] | Updates self . pathways and self . n_pathways . |
10,295 | def get_edge_pathways ( self , edge_id ) : vertex0_id , vertex1_id = edge_id pw0 = self . get_pathway_from_vertex_id ( vertex0_id ) pw1 = self . get_pathway_from_vertex_id ( vertex1_id ) if not pw0 or not pw1 : return None return ( pw0 , pw1 ) | Get the pathways associated with an edge . |
10,296 | def get_vertex_obj_from_pathway ( self , pathway ) : if pathway in self . pathways : vertex_id = self . pathways [ pathway ] return self . vertices [ vertex_id ] else : return None | Get the vertex object that corresponds to a pathway name |
10,297 | def get_adjacent_pathways ( self , pathway ) : vertex_id = self . pathways [ pathway ] adjacent = self . vertices [ vertex_id ] . get_adjacent_vertex_ids ( ) adjacent_pathways = [ ] for adjacent_id in adjacent : adjacent_pathways . append ( self . get_pathway_from_vertex_id ( adjacent_id ) ) return adjacent_pathways | Get the pathways adjacent to this pathway in the network |
10,298 | def to_dataframe ( self , drop_weights_below = 0 , whitelist = None ) : network_df_cols = [ "pw0" , "pw1" , "weight" ] if self . features : network_df_cols . append ( "features" ) network_df = pd . DataFrame ( columns = network_df_cols ) idx = 0 edge_pathways = set ( ) for ( v0 , v1 ) , edge_obj in self . edges . items ( ) : if ( edge_obj . weight > drop_weights_below and ( whitelist is None or ( v0 , v1 ) in whitelist ) ) : row = [ self . __getitem__ ( v0 ) , self . __getitem__ ( v1 ) , edge_obj . weight ] edge_pathways . add ( v0 ) edge_pathways . add ( v1 ) if self . features : features = edge_obj . features_to_string ( ) row . append ( features ) network_df . loc [ idx ] = row idx += 1 network_df = network_df . sort_values ( by = [ "weight" ] , ascending = False ) print ( "The pathway co-occurrence network " "contains {0} pathways." . format ( len ( edge_pathways ) ) ) return network_df | Conversion of the network to a pandas . DataFrame . |
10,299 | def _add_edge_to_vertex ( self , vertex_id , edge ) : connected_to = edge . connected_to ( vertex_id ) if vertex_id not in self . vertices : vertex_obj = Vertex ( vertex_id ) self . vertices [ vertex_id ] = vertex_obj self . vertices [ vertex_id ] . edges [ connected_to ] = edge . weight | Adds the edge to the Vertex object s edges dictionary |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.