idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
10,700
def on_presence ( self , session , presence ) : from_jid = presence . getFrom ( ) is_member = self . is_member ( from_jid . getStripped ( ) ) if is_member : member = self . get_member ( from_jid . getStripped ( ) ) else : member = None logger . info ( 'presence: from=%s is_member=%s type=%s' % ( from_jid , is_member , presence . getType ( ) ) ) if presence . getType ( ) == 'subscribed' : if is_member : logger . info ( '[%s] accepted their invitation' % ( from_jid , ) ) member [ 'STATUS' ] = 'ACTIVE' else : pass elif presence . getType ( ) == 'subscribe' : if is_member : logger . info ( 'Acknowledging subscription request from [%s]' % ( from_jid , ) ) self . client . sendPresence ( jid = from_jid , typ = 'subscribed' ) member [ 'STATUS' ] = 'ACTIVE' self . broadcast ( '%s has accepted their invitation!' % ( from_jid , ) ) else : pass elif presence . getType ( ) == None : if is_member : member [ 'ONLINE' ] += 1 elif presence . getType ( ) == 'unavailable' : if is_member : member [ 'ONLINE' ] -= 1 else : logger . info ( 'Unhandled presence stanza of type [%s] from [%s]' % ( presence . getType ( ) , from_jid ) )
Handles presence stanzas
10,701
def on_message ( self , con , event ) : msg_type = event . getType ( ) nick = event . getFrom ( ) . getResource ( ) from_jid = event . getFrom ( ) . getStripped ( ) body = event . getBody ( ) if msg_type == 'chat' and body is None : return logger . debug ( 'msg_type[%s] from[%s] nick[%s] body[%s]' % ( msg_type , from_jid , nick , body , ) ) sender = filter ( lambda m : m [ 'JID' ] == from_jid , self . params [ 'MEMBERS' ] ) should_process = msg_type in [ 'message' , 'chat' , None ] and body is not None and len ( sender ) == 1 if not should_process : return sender = sender [ 0 ] try : for p in self . command_patterns : reg , cmd = p m = reg . match ( body ) if m : logger . info ( 'pattern matched for bot command \'%s\'' % ( cmd , ) ) function = getattr ( self , str ( cmd ) , None ) if function : return function ( sender , body , m ) words = body . split ( ' ' ) cmd , args = words [ 0 ] , words [ 1 : ] if cmd and cmd [ 0 ] == '/' : cmd = cmd [ 1 : ] command_handler = getattr ( self , 'do_' + cmd , None ) if command_handler : return command_handler ( sender , body , args ) broadcast_body = '[%s] %s' % ( sender [ 'NICK' ] , body , ) return self . broadcast ( broadcast_body , exclude = ( sender , ) ) except : logger . exception ( 'Error handling message [%s] from [%s]' % ( body , sender [ 'JID' ] ) )
Handles messge stanzas
10,702
def activate ( self ) : d = dir ( self ) self . plugins = [ ] for key in d : if key . startswith ( "shell_activate_" ) : if self . echo : Console . ok ( "Shell Activate: {0}" . format ( key ) ) self . plugins . append ( key ) for key in d : if key . startswith ( "activate_" ) : if self . echo : Console . ok ( "Activate: {0}" . format ( key ) ) self . plugins . append ( key ) for key in self . plugins : if self . echo : Console . ok ( "> {0}" . format ( key . replace ( "_" , " " , 1 ) ) ) exec ( "self.%s()" % key )
method to activate all activation methods in the shell and its plugins .
10,703
def do_help ( self , arg ) : if arg : try : func = getattr ( self , 'help_' + arg ) except AttributeError : try : doc = getattr ( self , 'do_' + arg ) . __doc__ if doc : self . stdout . write ( "%s\n" % str ( doc ) ) return except AttributeError : pass self . stdout . write ( "%s\n" % str ( self . nohelp % ( arg , ) ) ) return func ( ) else : names = self . get_names ( ) cmds_doc = [ ] cmds_undoc = [ ] help_page = { } for name in names : if name [ : 5 ] == 'help_' : help_page [ name [ 5 : ] ] = 1 names . sort ( ) prevname = '' for name in names : if name [ : 3 ] == 'do_' : if name == prevname : continue prevname = name cmd = name [ 3 : ] if cmd in help_page : cmds_doc . append ( cmd ) del help_page [ cmd ] elif getattr ( self , name ) . __doc__ : cmds_doc . append ( cmd ) else : cmds_undoc . append ( cmd ) self . stdout . write ( "%s\n" % str ( self . doc_leader ) ) self . print_topics ( self . doc_header , cmds_doc , 15 , 80 ) self . print_topics ( self . misc_header , list ( help_page . keys ( ) ) , 15 , 80 ) self . print_topics ( self . undoc_header , cmds_undoc , 15 , 80 ) for topic in self . command_topics : topic_cmds = self . command_topics [ topic ] self . print_topics ( string . capwords ( topic + " commands" ) , topic_cmds , 15 , 80 )
List available commands with help or detailed help with help cmd .
10,704
def _fetch_channels ( self ) : json = requests . get ( self . _channels_url ) . json ( ) self . _channels = { c [ 'channel' ] [ 'code' ] : c [ 'channel' ] [ 'name' ] for c in json [ 'channels' ] }
Retrieve Ziggo channel information .
10,705
def send_keys ( self , keys ) : try : sock = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) sock . settimeout ( self . _timeout ) sock . connect ( ( self . _ip , self . _port [ 'cmd' ] ) ) version_info = sock . recv ( 15 ) sock . send ( version_info ) sock . recv ( 2 ) sock . send ( bytes . fromhex ( '01' ) ) sock . recv ( 4 ) sock . recv ( 24 ) for key in keys : if key in self . _keys : sock . send ( bytes . fromhex ( "04 01 00 00 00 00 " + self . _keys [ key ] ) ) sock . send ( bytes . fromhex ( "04 00 00 00 00 00 " + self . _keys [ key ] ) ) sock . close ( ) except socket . error : raise
Send keys to the device .
10,706
def make_echoicefield ( echoices , * args , klass_name = None , ** kwargs ) : assert issubclass ( echoices , EChoice ) value_type = echoices . __getvaluetype__ ( ) if value_type is str : cls_ = models . CharField elif value_type is int : cls_ = models . IntegerField elif value_type is float : cls_ = models . FloatField elif value_type is bool : cls_ = models . BooleanField else : raise NotImplementedError ( "Please open an issue if you wish your value type to be supported: " "https://github.com/mbourqui/django-echoices/issues/new" ) if klass_name and StrictVersion ( django_version ( ) ) < StrictVersion ( '1.9.0' ) : warnings . warn ( "Django < 1.9 throws an 'ImportError' if the class name is not defined in the module. " "The provided klass_name will be replaced by {}" . format ( EChoiceField . __name__ ) , RuntimeWarning ) klass_name = EChoiceField . __name__ if StrictVersion ( django_version ( ) ) < StrictVersion ( '1.9.0' ) else klass_name if klass_name else "{}Field" . format ( echoices . __name__ ) d = dict ( cls_ . __dict__ ) d . update ( dict ( EChoiceField . __dict__ ) ) return type ( klass_name , ( cls_ , ) , d ) ( echoices , * args , ** kwargs )
Construct a subclass of a derived models . Field specific to the type of the EChoice values .
10,707
def make_dummy ( instance , relations = { } , datetime_default = dt . strptime ( '1901-01-01' , '%Y-%m-%d' ) , varchar_default = "" , integer_default = 0 , numeric_default = 0.0 , * args , ** kwargs ) : init_data = { 'DATETIME' : datetime_default , 'VARCHAR' : varchar_default , 'INTEGER' : integer_default , 'NUMERIC(50, 10)' : numeric_default , 'TEXT' : varchar_default , } table = type ( instance ) for col in table . __table__ . columns : try : setattr ( instance , col . name , kwargs [ col . name ] ) except KeyError : setattr ( instance , col . name , init_data [ str ( col . type ) ] ) for k , v in relations . iteritems ( ) : setattr ( instance , k , v [ 0 ] ) return instance
Make an instance to look like an empty dummy .
10,708
def set_up_network ( self , genes : List [ Gene ] , gene_filter : bool = False , disease_associations : Optional [ Dict ] = None ) -> None : if gene_filter : self . filter_genes ( [ gene . entrez_id for gene in genes ] ) self . _add_vertex_attributes ( genes , disease_associations ) self . print_summary ( "Graph of all genes" )
Set up the network .
10,709
def filter_genes ( self , relevant_entrez : list ) -> None : logger . info ( "In filter_genes()" ) irrelevant_genes = self . graph . vs . select ( name_notin = relevant_entrez ) self . graph . delete_vertices ( irrelevant_genes )
Filter out the genes that are not in list relevant_entrez .
10,710
def _add_vertex_attributes ( self , genes : List [ Gene ] , disease_associations : Optional [ dict ] = None ) -> None : self . _set_default_vertex_attributes ( ) self . _add_vertex_attributes_by_genes ( genes ) up_regulated = self . get_upregulated_genes ( ) down_regulated = self . get_downregulated_genes ( ) self . graph . vs ( up_regulated . indices ) [ "diff_expressed" ] = True self . graph . vs ( up_regulated . indices ) [ "up_regulated" ] = True self . graph . vs ( down_regulated . indices ) [ "diff_expressed" ] = True self . graph . vs ( down_regulated . indices ) [ "down_regulated" ] = True self . _add_disease_associations ( disease_associations ) logger . info ( "Number of all differentially expressed genes is: {}" . format ( len ( up_regulated ) + len ( down_regulated ) ) )
Add attributes to vertices .
10,711
def _set_default_vertex_attributes ( self ) -> None : self . graph . vs [ "l2fc" ] = 0 self . graph . vs [ "padj" ] = 0.5 self . graph . vs [ "symbol" ] = self . graph . vs [ "name" ] self . graph . vs [ "diff_expressed" ] = False self . graph . vs [ "up_regulated" ] = False self . graph . vs [ "down_regulated" ] = False
Assign default values on attributes to all vertices .
10,712
def _add_vertex_attributes_by_genes ( self , genes : List [ Gene ] ) -> None : for gene in genes : try : vertex = self . graph . vs . find ( name = str ( gene . entrez_id ) ) . index self . graph . vs [ vertex ] [ 'l2fc' ] = gene . log2_fold_change self . graph . vs [ vertex ] [ 'symbol' ] = gene . symbol self . graph . vs [ vertex ] [ 'padj' ] = gene . padj except ValueError : pass
Assign values to attributes on vertices .
10,713
def _add_disease_associations ( self , disease_associations : dict ) -> None : if disease_associations is not None : for target_id , disease_id_list in disease_associations . items ( ) : if target_id in self . graph . vs [ "name" ] : self . graph . vs . find ( name = target_id ) [ "associated_diseases" ] = disease_id_list
Add disease association annotation to the network .
10,714
def get_upregulated_genes ( self ) -> VertexSeq : up_regulated = self . graph . vs . select ( self . _is_upregulated_gene ) logger . info ( f"No. of up-regulated genes after laying on network: {len(up_regulated)}" ) return up_regulated
Get genes that are up - regulated .
10,715
def get_downregulated_genes ( self ) -> VertexSeq : down_regulated = self . graph . vs . select ( self . _is_downregulated_gene ) logger . info ( f"No. of down-regulated genes after laying on network: {len(down_regulated)}" ) return down_regulated
Get genes that are down - regulated .
10,716
def print_summary ( self , heading : str ) -> None : logger . info ( heading ) logger . info ( "Number of nodes: {}" . format ( len ( self . graph . vs ) ) ) logger . info ( "Number of edges: {}" . format ( len ( self . graph . es ) ) )
Print the summary of a graph .
10,717
def get_differentially_expressed_genes ( self , diff_type : str ) -> VertexSeq : if diff_type == "up" : diff_expr = self . graph . vs . select ( up_regulated_eq = True ) elif diff_type == "down" : diff_expr = self . graph . vs . select ( down_regulated_eq = True ) else : diff_expr = self . graph . vs . select ( diff_expressed_eq = True ) return diff_expr
Get the differentially expressed genes based on diff_type .
10,718
def write_adj_list ( self , path : str ) -> None : adj_list = self . get_adjlist ( ) with open ( path , mode = "w" ) as file : for i , line in enumerate ( adj_list ) : print ( i , * line , file = file )
Write the network as an adjacency list to a file .
10,719
def get_attribute_from_indices ( self , indices : list , attribute_name : str ) : return list ( np . array ( self . graph . vs [ attribute_name ] ) [ indices ] )
Get attribute values for the requested indices .
10,720
def read_headers ( rfile , hdict = None ) : if hdict is None : hdict = { } while True : line = rfile . readline ( ) if not line : raise ValueError ( "Illegal end of headers." ) if line == CRLF : break if not line . endswith ( CRLF ) : raise ValueError ( "HTTP requires CRLF terminators" ) if line [ 0 ] in ' \t' : v = line . strip ( ) else : try : k , v = line . split ( ":" , 1 ) except ValueError : raise ValueError ( "Illegal header line." ) k = k . strip ( ) . title ( ) v = v . strip ( ) hname = k if k in comma_separated_headers : existing = hdict . get ( hname ) if existing : v = ", " . join ( ( existing , v ) ) hdict [ hname ] = v return hdict
Read headers from the given stream into the given header dict . If hdict is None a new header dict is created . Returns the populated header dict . Headers which are repeated are folded together using a comma if their specification so dictates . This function raises ValueError when the read bytes violate the HTTP spec . You should probably return 400 Bad Request if this happens .
10,721
def parse_request ( self ) : self . rfile = SizeCheckWrapper ( self . conn . rfile , self . server . max_request_header_size ) try : self . read_request_line ( ) except MaxSizeExceeded : self . simple_response ( "414 Request-URI Too Long" , "The Request-URI sent with the request exceeds the maximum " "allowed bytes." ) return try : success = self . read_request_headers ( ) except MaxSizeExceeded : self . simple_response ( "413 Request Entity Too Large" , "The headers sent with the request exceed the maximum " "allowed bytes." ) return else : if not success : return self . ready = True
Parse the next HTTP request start - line and message - headers .
10,722
def send_headers ( self ) : hkeys = [ key . lower ( ) for key , value in self . outheaders ] status = int ( self . status [ : 3 ] ) if status == 413 : self . close_connection = True elif "content-length" not in hkeys : if status < 200 or status in ( 204 , 205 , 304 ) : pass else : if ( self . response_protocol == 'HTTP/1.1' and self . method != 'HEAD' ) : self . chunked_write = True self . outheaders . append ( ( "Transfer-Encoding" , "chunked" ) ) else : self . close_connection = True if "connection" not in hkeys : if self . response_protocol == 'HTTP/1.1' : if self . close_connection : self . outheaders . append ( ( "Connection" , "close" ) ) else : if not self . close_connection : self . outheaders . append ( ( "Connection" , "Keep-Alive" ) ) if ( not self . close_connection ) and ( not self . chunked_read ) : remaining = getattr ( self . rfile , 'remaining' , 0 ) if remaining > 0 : self . rfile . read ( remaining ) if "date" not in hkeys : self . outheaders . append ( ( "Date" , rfc822 . formatdate ( ) ) ) if "server" not in hkeys : self . outheaders . append ( ( "Server" , self . server . server_name ) ) buf = [ self . server . protocol + " " + self . status + CRLF ] for k , v in self . outheaders : buf . append ( k + ": " + v + CRLF ) buf . append ( CRLF ) self . conn . wfile . sendall ( "" . join ( buf ) )
Assert process and send the HTTP response message - headers . You must set self . status and self . outheaders before calling this .
10,723
def start ( self ) : for i in range ( self . min ) : self . _threads . append ( WorkerThread ( self . server ) ) for worker in self . _threads : worker . setName ( "CP Server " + worker . getName ( ) ) worker . start ( ) for worker in self . _threads : while not worker . ready : time . sleep ( .1 )
Start the pool of threads .
10,724
def fields ( self ) : return ( self . locus , self . offset_start , self . offset_end , self . alignment_key )
Fields that should be considered for our notion of object equality .
10,725
def bases ( self ) : sequence = self . alignment . query_sequence assert self . offset_end <= len ( sequence ) , "End offset=%d > sequence length=%d. CIGAR=%s. SEQUENCE=%s" % ( self . offset_end , len ( sequence ) , self . alignment . cigarstring , sequence ) return sequence [ self . offset_start : self . offset_end ]
The sequenced bases in the alignment that align to this locus in the genome as a string .
10,726
def min_base_quality ( self ) : try : return min ( self . base_qualities ) except ValueError : assert self . offset_start == self . offset_end adjacent_qualities = [ self . alignment . query_qualities [ offset ] for offset in [ self . offset_start - 1 , self . offset_start ] if 0 <= offset < len ( self . alignment . query_qualities ) ] return min ( adjacent_qualities )
The minimum of the base qualities . In the case of a deletion in which case there are no bases in this PileupElement the minimum is taken over the sequenced bases immediately before and after the deletion .
10,727
def from_pysam_alignment ( locus , pileup_read ) : assert not pileup_read . is_refskip , ( "Can't create a PileupElement in a refskip (typically an intronic " "gap in an RNA alignment)" ) offset_start = None offset_end = len ( pileup_read . alignment . query_sequence ) for ( offset , position ) in pileup_read . alignment . aligned_pairs : if offset is not None and position is not None : if position == locus . position : offset_start = offset elif position > locus . position : offset_end = offset break if offset_start is None : offset_start = offset_end assert pileup_read . is_del == ( offset_end - offset_start == 0 ) , "Deletion=%s but | [%d,%d) |=%d for locus %d in: \n%s" % ( pileup_read . is_del , offset_start , offset_end , offset_end - offset_start , locus . position , pileup_read . alignment . aligned_pairs ) assert offset_end >= offset_start result = PileupElement ( locus , offset_start , offset_end , pileup_read . alignment ) return result
Factory function to create a new PileupElement from a pysam PileupRead .
10,728
def safe_request ( url , method = None , params = None , data = None , json = None , headers = None , allow_redirects = False , timeout = 30 , verify_ssl = True , ) : session = requests . Session ( ) kwargs = { } if json : kwargs [ 'json' ] = json if not headers : headers = { } headers . setdefault ( 'Content-Type' , 'application/json' ) if data : kwargs [ 'data' ] = data if params : kwargs [ 'params' ] = params if headers : kwargs [ 'headers' ] = headers if method is None : method = 'POST' if ( data or json ) else 'GET' response = session . request ( method = method , url = url , allow_redirects = allow_redirects , timeout = timeout , verify = verify_ssl , ** kwargs ) return response
A slightly safer version of request .
10,729
def remote ( func ) : @ functools . wraps ( func ) def wrapper ( self , * args , ** kwargs ) : if self . mode == 'server' : return func ( self , * args , ** kwargs ) if not self . conn : self . connect ( ) self . conn . send ( 'CALL' , func . __name__ , args , kwargs ) cmd , payload = self . conn . recv ( ) if cmd == 'ERR' : self . close ( ) raise Exception ( "Catastrophic error from server: %s" % payload [ 0 ] ) elif cmd == 'EXC' : exc_type = utils . find_entrypoint ( None , payload [ 0 ] ) raise exc_type ( payload [ 1 ] ) elif cmd != 'RES' : self . close ( ) raise Exception ( "Invalid command response from server: %s" % cmd ) return payload [ 0 ] wrapper . _remote = True return wrapper
Decorator to mark a function as invoking a remote procedure call . When invoked in server mode the function will be called ; when invoked in client mode an RPC will be initiated .
10,730
def send ( self , cmd , * payload ) : if not self . _sock : raise ConnectionClosed ( "Connection closed" ) msg = json . dumps ( dict ( cmd = cmd , payload = payload ) ) + '\n' try : self . _sock . sendall ( msg ) except socket . error : e_type , e_value , e_tb = sys . exc_info ( ) self . close ( ) raise e_type , e_value , e_tb
Send a command message to the other end .
10,731
def _recvbuf_pop ( self ) : msg = self . _recvbuf . pop ( 0 ) if isinstance ( msg , Exception ) : raise msg return msg [ 'cmd' ] , msg [ 'payload' ]
Internal helper to pop a message off the receive buffer . If the message is an Exception that exception will be raised ; otherwise a tuple of command and payload will be returned .
10,732
def ping ( self ) : if not self . conn : self . connect ( ) self . conn . send ( 'PING' , time . time ( ) ) cmd , payload = self . conn . recv ( ) recv_ts = time . time ( ) if cmd != 'PONG' : raise Exception ( "Invalid response from server" ) return recv_ts - payload [ 0 ]
Ping the server . Returns the time interval in seconds required for the server to respond to the PING message .
10,733
def listen ( self ) : if self . mode and self . mode != 'server' : raise ValueError ( "%s is not in server mode" % self . __class__ . __name__ ) self . mode = 'server' serv = _create_server ( self . host , self . port ) err_thresh = 0 while True : try : sock , addr = serv . accept ( ) except Exception as exc : err_thresh += 1 if err_thresh >= self . max_err_thresh : LOG . exception ( "Too many errors accepting " "connections: %s" % str ( exc ) ) break continue err_thresh = max ( err_thresh - 1 , 0 ) LOG . info ( "Accepted connection from %s port %s" % ( addr [ 0 ] , addr [ 1 ] ) ) eventlet . spawn_n ( self . serve , self . connection_class ( sock ) , addr ) with utils . ignore_except ( ) : serv . close ( )
Listen for clients . This method causes the SimpleRPC object to switch to server mode . One thread will be created for each client .
10,734
def serve ( self , conn , addr , auth = False ) : try : while True : try : cmd , payload = conn . recv ( ) except ValueError as exc : conn . send ( 'ERR' , "Failed to parse command: %s" % str ( exc ) ) if not auth : return continue LOG . debug ( "Received command %r from %s port %s; payload: %r" % ( cmd , addr [ 0 ] , addr [ 1 ] , payload ) ) if cmd == 'AUTH' : if auth : conn . send ( 'ERR' , "Already authenticated" ) elif payload [ 0 ] != self . authkey : conn . send ( 'ERR' , "Invalid authentication key" ) return else : conn . send ( 'OK' ) auth = True elif not auth : conn . send ( 'ERR' , "Not authenticated" ) return elif cmd == 'PING' : conn . send ( 'PONG' , * payload ) elif cmd == 'CALL' : try : try : funcname , args , kwargs = payload except ValueError as exc : conn . send ( 'ERR' , "Invalid payload for 'CALL' " "command: %s" % str ( exc ) ) continue func = self . _get_remote_method ( funcname ) result = func ( * args , ** kwargs ) except Exception as exc : exc_name = '%s:%s' % ( exc . __class__ . __module__ , exc . __class__ . __name__ ) conn . send ( 'EXC' , exc_name , str ( exc ) ) else : conn . send ( 'RES' , result ) else : conn . send ( 'ERR' , "Unrecognized command %r" % cmd ) except ConnectionClosed : pass except Exception as exc : LOG . exception ( "Error serving client at %s port %s: %s" % ( addr [ 0 ] , addr [ 1 ] , str ( exc ) ) ) finally : LOG . info ( "Closing connection from %s port %s" % ( addr [ 0 ] , addr [ 1 ] ) ) conn . close ( )
Handle a single client .
10,735
def get_limits ( self ) : if not self . remote_limits : self . remote_limits = RemoteLimitData ( self . remote ) return self . remote_limits
Retrieve the LimitData object the middleware will use for getting the limits . This implementation returns a RemoteLimitData instance that can access the LimitData stored in the RemoteControlDaemon process .
10,736
def waitUpTo ( self , timeoutSeconds , pollInterval = DEFAULT_POLL_INTERVAL ) : i = 0 numWaits = timeoutSeconds / float ( pollInterval ) ret = self . poll ( ) if ret is None : while i < numWaits : time . sleep ( pollInterval ) ret = self . poll ( ) if ret is not None : break i += 1 return ret
Popen . waitUpTo - Wait up to a certain number of seconds for the process to end .
10,737
def waitOrTerminate ( self , timeoutSeconds , pollInterval = DEFAULT_POLL_INTERVAL , terminateToKillSeconds = SUBPROCESS2_DEFAULT_TERMINATE_TO_KILL_SECONDS ) : returnCode = self . waitUpTo ( timeoutSeconds , pollInterval ) actionTaken = SUBPROCESS2_PROCESS_COMPLETED if returnCode is None : if terminateToKillSeconds is None : self . terminate ( ) actionTaken |= SUBPROCESS2_PROCESS_TERMINATED time . sleep ( pollInterval ) returnCode = self . poll ( ) elif terminateToKillSeconds == 0 : self . kill ( ) actionTaken |= SUBPROCESS2_PROCESS_KILLED time . sleep ( .01 ) self . poll ( ) returnCode = None else : self . terminate ( ) actionTaken |= SUBPROCESS2_PROCESS_TERMINATED returnCode = self . waitUpTo ( terminateToKillSeconds , pollInterval ) if returnCode is None : actionTaken |= SUBPROCESS2_PROCESS_KILLED self . kill ( ) time . sleep ( .01 ) self . poll ( ) return { 'returnCode' : returnCode , 'actionTaken' : actionTaken }
waitOrTerminate - Wait up to a certain number of seconds for the process to end .
10,738
def runInBackground ( self , pollInterval = .1 , encoding = False ) : from . BackgroundTask import BackgroundTaskThread taskInfo = BackgroundTaskInfo ( encoding ) thread = BackgroundTaskThread ( self , taskInfo , pollInterval , encoding ) thread . start ( ) return taskInfo
runInBackground - Create a background thread which will manage this process automatically read from streams and perform any cleanups
10,739
def setClients ( self , * args , ** kwargs ) : requests = 0 if 'fullDetails' in kwargs : fullDetails = kwargs [ 'fullDetails' ] kwargs . pop ( 'fullDetails' ) else : fullDetails = True clients = [ ] for m in self [ 'groupMembers' ] : try : client = self . mambuclientclass ( entid = m [ 'clientKey' ] , fullDetails = fullDetails , * args , ** kwargs ) except AttributeError as ae : from . mambuclient import MambuClient self . mambuclientclass = MambuClient client = self . mambuclientclass ( entid = m [ 'clientKey' ] , fullDetails = fullDetails , * args , ** kwargs ) requests += 1 clients . append ( client ) self [ 'clients' ] = clients return requests
Adds the clients for this group to a clients field .
10,740
def setActivities ( self , * args , ** kwargs ) : def activityDate ( activity ) : try : return activity [ 'activity' ] [ 'timestamp' ] except KeyError as kerr : return None try : activities = self . mambuactivitiesclass ( groupId = self [ 'encodedKey' ] , * args , ** kwargs ) except AttributeError as ae : from . mambuactivity import MambuActivities self . mambuactivitiesclass = MambuActivities activities = self . mambuactivitiesclass ( groupId = self [ 'encodedKey' ] , * args , ** kwargs ) activities . attrs = sorted ( activities . attrs , key = activityDate ) self [ 'activities' ] = activities return 1
Adds the activities for this group to a activities field .
10,741
def set_sensitivity ( self , sensitivity = DEFAULT_SENSITIVITY ) : if sensitivity < 31 : self . _mtreg = 31 elif sensitivity > 254 : self . _mtreg = 254 else : self . _mtreg = sensitivity self . _power_on ( ) self . _set_mode ( 0x40 | ( self . _mtreg >> 5 ) ) self . _set_mode ( 0x60 | ( self . _mtreg & 0x1f ) ) self . _power_down ( )
Set the sensitivity value .
10,742
def _get_result ( self ) -> float : try : data = self . _bus . read_word_data ( self . _i2c_add , self . _mode ) self . _ok = True except OSError as exc : self . log_error ( "Bad reading in bus: %s" , exc ) self . _ok = False return - 1 count = data >> 8 | ( data & 0xff ) << 8 mode2coeff = 2 if self . _high_res else 1 ratio = 1 / ( 1.2 * ( self . _mtreg / 69.0 ) * mode2coeff ) return ratio * count
Return current measurement result in lx .
10,743
def _wait_for_result ( self ) : basetime = 0.018 if self . _low_res else 0.128 sleep ( basetime * ( self . _mtreg / 69.0 ) + self . _delay )
Wait for the sensor to be ready for measurement .
10,744
def update ( self ) : if not self . _continuous_sampling or self . _light_level < 0 or self . _operation_mode != self . _mode : self . _reset ( ) self . _set_mode ( self . _operation_mode ) self . _wait_for_result ( ) self . _light_level = self . _get_result ( ) if not self . _continuous_sampling : self . _power_down ( )
Update the measured light level in lux .
10,745
def get_token ( user , secret , timestamp = None ) : timestamp = int ( timestamp or time ( ) ) secret = to_bytes ( secret ) key = '|' . join ( [ hashlib . sha1 ( secret ) . hexdigest ( ) , str ( user . id ) , get_hash_extract ( user . password ) , str ( getattr ( user , 'last_sign_in' , 0 ) ) , str ( timestamp ) , ] ) key = key . encode ( 'utf8' , 'ignore' ) mac = hmac . new ( key , msg = None , digestmod = hashlib . sha512 ) mac = mac . hexdigest ( ) [ : 50 ] token = '{0}${1}${2}' . format ( user . id , to36 ( timestamp ) , mac ) return token
Make a timestamped one - time - use token that can be used to identifying the user .
10,746
def __get_user ( self ) : storage = object . __getattribute__ ( self , '_LazyUser__storage' ) user = getattr ( self . __auth , 'get_user' ) ( ) setattr ( storage , self . __user_name , user ) return user
Return the real user object .
10,747
def _expand_filename ( self , line ) : newline = line path = os . getcwd ( ) if newline . startswith ( "." ) : newline = newline . replace ( "." , path , 1 ) newline = os . path . expanduser ( newline ) return newline
expands the filename if there is a . as leading path
10,748
def setCustomField ( mambuentity , customfield = "" , * args , ** kwargs ) : from . import mambuuser from . import mambuclient try : customFieldValue = mambuentity [ customfield ] datatype = [ l [ 'customField' ] [ 'dataType' ] for l in mambuentity [ mambuentity . customFieldName ] if ( l [ 'name' ] == customfield or l [ 'id' ] == customfield ) ] [ 0 ] except IndexError as ierr : try : datatype = [ l [ 'customField' ] [ 'dataType' ] for l in mambuentity [ mambuentity . customFieldName ] if ( l [ 'name' ] == customfield . split ( '_' ) [ 0 ] or l [ 'id' ] == customfield . split ( '_' ) [ 0 ] ) ] [ 0 ] except IndexError : err = MambuError ( "Object %s has no custom field '%s'" % ( mambuentity [ 'id' ] , customfield ) ) raise err except AttributeError : err = MambuError ( "Object does not have a custom field to set" ) raise err if datatype == "USER_LINK" : mambuentity [ customfield ] = mambuuser . MambuUser ( entid = customFieldValue , * args , ** kwargs ) elif datatype == "CLIENT_LINK" : mambuentity [ customfield ] = mambuclient . MambuClient ( entid = customFieldValue , * args , ** kwargs ) else : mambuentity [ customfield ] = customFieldValue return 0 return 1
Modifies the customField field for the given object with something related to the value of the given field .
10,749
def serializeFields ( data ) : if isinstance ( data , MambuStruct ) : return data . serializeStruct ( ) try : it = iter ( data ) except TypeError as terr : return unicode ( data ) if type ( it ) == type ( iter ( [ ] ) ) : l = [ ] for e in it : l . append ( MambuStruct . serializeFields ( e ) ) return l elif type ( it ) == type ( iter ( { } ) ) : d = { } for k in it : d [ k ] = MambuStruct . serializeFields ( data [ k ] ) return d return unicode ( data )
Turns every attribute of the Mambu object in to a string representation .
10,750
def init ( self , attrs = { } , * args , ** kwargs ) : self . attrs = attrs self . preprocess ( ) self . convertDict2Attrs ( * args , ** kwargs ) self . postprocess ( ) try : for meth in kwargs [ 'methods' ] : try : getattr ( self , meth ) ( ) except Exception : pass except Exception : pass try : for propname , propval in kwargs [ 'properties' ] . items ( ) : setattr ( self , propname , propval ) except Exception : pass
Default initialization from a dictionary responded by Mambu
10,751
def connect ( self , * args , ** kwargs ) : from copy import deepcopy if args : self . __args = deepcopy ( args ) if kwargs : for k , v in kwargs . items ( ) : self . __kwargs [ k ] = deepcopy ( v ) jsresp = { } if not self . __urlfunc : return offset = self . __offset window = True jsresp = { } while window : if not self . __limit or self . __limit > OUT_OF_BOUNDS_PAGINATION_LIMIT_VALUE : limit = OUT_OF_BOUNDS_PAGINATION_LIMIT_VALUE else : limit = self . __limit retries = 0 while retries < MambuStruct . RETRIES : try : user = self . __kwargs . get ( 'user' , apiuser ) pwd = self . __kwargs . get ( 'pwd' , apipwd ) if self . __data : headers = { 'content-type' : 'application/json' } data = json . dumps ( encoded_dict ( self . __data ) ) url = iriToUri ( self . __urlfunc ( self . entid , limit = limit , offset = offset , * self . __args , ** self . __kwargs ) ) if self . __method == "PATCH" : resp = requests . patch ( url , data = data , headers = headers , auth = ( user , pwd ) ) else : resp = requests . post ( url , data = data , headers = headers , auth = ( user , pwd ) ) else : url = iriToUri ( self . __urlfunc ( self . entid , limit = limit , offset = offset , * self . __args , ** self . __kwargs ) ) resp = requests . get ( url , auth = ( user , pwd ) ) self . rc . add ( datetime . now ( ) ) try : jsonresp = json . loads ( resp . content ) if type ( jsonresp ) == list : try : jsresp . extend ( jsonresp ) except AttributeError : jsresp = jsonresp if len ( jsonresp ) < limit : window = False else : jsresp = jsonresp window = False except ValueError as ex : raise ex except Exception as ex : raise MambuError ( "JSON Error: %s" % repr ( ex ) ) break except MambuError as merr : raise merr except requests . exceptions . RequestException : retries += 1 except Exception as ex : raise ex else : raise MambuCommError ( "ERROR I can't communicate with Mambu" ) offset = offset + limit if self . __limit : self . __limit -= limit if self . __limit <= 0 : window = False self . __limit = self . __inilimit try : if u'returnCode' in jsresp and u'returnStatus' in jsresp and jsresp [ u'returnCode' ] != 0 : raise MambuError ( jsresp [ u'returnStatus' ] ) except AttributeError : pass if self . __method != "PATCH" : self . init ( attrs = jsresp , * self . __args , ** self . __kwargs )
Connect to Mambu make the request to the REST API .
10,752
def convertDict2Attrs ( self , * args , ** kwargs ) : constantFields = [ 'id' , 'groupName' , 'name' , 'homePhone' , 'mobilePhone1' , 'phoneNumber' , 'postcode' , 'emailAddress' ] def convierte ( data ) : try : it = iter ( data ) if type ( it ) == type ( iter ( { } ) ) : d = { } for k in it : if k in constantFields : d [ k ] = data [ k ] else : d [ k ] = convierte ( data [ k ] ) data = d if type ( it ) == type ( iter ( [ ] ) ) : l = [ ] for e in it : l . append ( convierte ( e ) ) data = l except TypeError as terr : pass except Exception as ex : raise ex try : d = int ( data ) if str ( d ) != data : return data return d except ( TypeError , ValueError ) as tverr : try : return float ( data ) except ( TypeError , ValueError ) as tverr : try : return self . util_dateFormat ( data ) except ( TypeError , ValueError ) as tverr : return data return data self . attrs = convierte ( self . attrs )
Each element on the atttrs attribute gest converted to a proper python object depending on type .
10,753
def util_dateFormat ( self , field , formato = None ) : if not formato : try : formato = self . __formatoFecha except AttributeError : formato = "%Y-%m-%dT%H:%M:%S+0000" return datetime . strptime ( datetime . strptime ( field , "%Y-%m-%dT%H:%M:%S+0000" ) . strftime ( formato ) , formato )
Converts a datetime field to a datetime using some specified format .
10,754
def create ( self , data , * args , ** kwargs ) : if self . create . __func__ . __module__ != self . __module__ : raise Exception ( "Child method not implemented" ) self . _MambuStruct__method = "POST" self . _MambuStruct__data = data self . connect ( * args , ** kwargs ) self . _MambuStruct__method = "GET" self . _MambuStruct__data = None
Creates an entity in Mambu
10,755
def make ( self , cmd_args , db_args ) : with NamedTemporaryFile ( delete = True ) as f : format_file = f . name + '.bcp-format' format_args = cmd_args + [ 'format' , NULL_FILE , '-c' , '-f' , format_file , '-t,' ] + db_args _run_cmd ( format_args ) self . load ( format_file ) return format_file
Runs bcp FORMAT command to create a format file that will assist in creating the bulk data file
10,756
def load ( self , filename = None ) : fields = [ ] with open ( filename , 'r' ) as f : format_data = f . read ( ) . strip ( ) lines = format_data . split ( '\n' ) self . _sql_version = lines . pop ( 0 ) self . _num_fields = int ( lines . pop ( 0 ) ) for line in lines : line = re . sub ( ' +' , ' ' , line . strip ( ) ) row_format = BCPFormatRow ( line . split ( ' ' ) ) fields . append ( row_format ) self . fields = fields self . filename = filename
Reads a non - XML bcp FORMAT file and parses it into fields list used for creating bulk data file
10,757
def retrieve_content ( self ) : path = self . _construct_path_to_source_content ( ) res = self . _http . get ( path ) self . _populated_fields [ 'content' ] = res [ 'content' ] return res [ 'content' ]
Retrieve the content of a resource .
10,758
def _update ( self , ** kwargs ) : if 'content' in kwargs : content = kwargs . pop ( 'content' ) path = self . _construct_path_to_source_content ( ) self . _http . put ( path , json . dumps ( { 'content' : content } ) ) super ( Resource , self ) . _update ( ** kwargs )
Use separate URL for updating the source file .
10,759
def all ( ) : dir ( ) cmd3 ( ) banner ( "CLEAN PREVIOUS CLOUDMESH INSTALLS" ) r = int ( local ( "pip freeze |fgrep cloudmesh | wc -l" , capture = True ) ) while r > 0 : local ( 'echo "y\n" | pip uninstall cloudmesh' ) r = int ( local ( "pip freeze |fgrep cloudmesh | wc -l" , capture = True ) )
clean the dis and uninstall cloudmesh
10,760
def find ( cls , text ) : if isinstance ( cls . pattern , string_types ) : cls . pattern = re . compile ( cls . pattern ) return cls . pattern . finditer ( text )
This method should return an iterable containing matches of this element .
10,761
def main ( ) : parser = argparse . ArgumentParser ( description = 'Monitor your crons with cronitor.io & sentry.io' , epilog = 'https://github.com/youversion/crony' , prog = 'crony' ) parser . add_argument ( '-c' , '--cronitor' , action = 'store' , help = 'Cronitor link identifier. This can be found in your Cronitor unique' ' ping URL right after https://cronitor.link/' ) parser . add_argument ( '-e' , '--venv' , action = 'store' , help = 'Path to virtualenv to source before running script. May be passed' ' as an argument or loaded from an environment variable or config file.' ) parser . add_argument ( '-d' , '--cd' , action = 'store' , help = 'If the script needs ran in a specific directory, than can be passed' ' or cd can be ran prior to running crony.' ) parser . add_argument ( '-l' , '--log' , action = 'store' , help = 'Log file to direct stdout of script run to. Can be passed or ' 'defined in config file with "log_file"' ) parser . add_argument ( '-o' , '--config' , action = 'store' , help = 'Path to a crony config file to use.' ) parser . add_argument ( '-p' , '--path' , action = 'store' , help = 'Paths to append to the PATH environment variable before running. ' ' Can be passed as an argument or loaded from config file.' ) parser . add_argument ( '-s' , '--dsn' , action = 'store' , help = 'Sentry DSN. May be passed or loaded from an environment variable ' 'or a config file.' ) parser . add_argument ( '-t' , '--timeout' , action = 'store' , default = 10 , help = 'Timeout to use when' ' sending requests to Cronitor' , type = int ) parser . add_argument ( '-v' , '--verbose' , action = 'store_true' , help = 'Increase level of verbosity' ' output by crony' ) parser . add_argument ( '--version' , action = 'store_true' , help = 'Output crony version # and exit' ) parser . add_argument ( 'cmd' , nargs = argparse . REMAINDER , help = 'Command to run and monitor' ) cc = CommandCenter ( parser . parse_args ( ) ) sys . exit ( cc . log ( * cc . func ( ) ) )
Entry point for running crony .
10,762
def cronitor ( self ) : url = f'https://cronitor.link/{self.opts.cronitor}/{{}}' try : run_url = url . format ( 'run' ) self . logger . debug ( f'Pinging {run_url}' ) requests . get ( run_url , timeout = self . opts . timeout ) except requests . exceptions . RequestException as e : self . logger . exception ( e ) output , exit_status = self . run ( ) endpoint = 'complete' if exit_status == 0 else 'fail' try : ping_url = url . format ( endpoint ) self . logger . debug ( 'Pinging {}' . format ( ping_url ) ) requests . get ( ping_url , timeout = self . opts . timeout ) except requests . exceptions . RequestException as e : self . logger . exception ( e ) return output , exit_status
Wrap run with requests to cronitor .
10,763
def load_config ( self , custom_config ) : self . config = configparser . ConfigParser ( ) if custom_config : self . config . read ( custom_config ) return f'Loading config from file {custom_config}.' home = os . path . expanduser ( '~{}' . format ( getpass . getuser ( ) ) ) home_conf_file = os . path . join ( home , '.cronyrc' ) system_conf_file = '/etc/crony.conf' conf_precedence = ( home_conf_file , system_conf_file ) for conf_file in conf_precedence : if os . path . exists ( conf_file ) : self . config . read ( conf_file ) return f'Loading config from file {conf_file}.' self . config [ 'crony' ] = { } return 'No config file found.'
Attempt to load config from file .
10,764
def log ( self , output , exit_status ) : if exit_status != 0 : self . logger . error ( f'Error running command! Exit status: {exit_status}, {output}' ) return exit_status
Log given CompletedProcess and return exit status code .
10,765
def run ( self ) : self . logger . debug ( f'Running command: {self.cmd}' ) def execute ( cmd ) : output = "" popen = subprocess . Popen ( cmd , stdout = subprocess . PIPE , stderr = subprocess . STDOUT , universal_newlines = True , shell = True ) for stdout_line in iter ( popen . stdout . readline , "" ) : stdout_line = stdout_line . strip ( '\n' ) output += stdout_line yield stdout_line popen . stdout . close ( ) return_code = popen . wait ( ) if return_code : raise subprocess . CalledProcessError ( return_code , cmd , output ) try : for out in execute ( self . cmd ) : self . logger . info ( out ) return "" , 0 except subprocess . CalledProcessError as e : return e . output , e . returncode
Run command and report errors to Sentry .
10,766
def setup_dir ( self ) : cd = self . opts . cd or self . config [ 'crony' ] . get ( 'directory' ) if cd : self . logger . debug ( f'Adding cd to {cd}' ) self . cmd = f'cd {cd} && {self.cmd}'
Change directory for script if necessary .
10,767
def setup_logging ( self ) : date_format = '%Y-%m-%dT%H:%M:%S' log_format = '%(asctime)s %(levelname)s: %(message)s' if self . opts . verbose : lvl = logging . DEBUG else : lvl = logging . INFO logging . getLogger ( 'requests' ) . setLevel ( 'WARNING' ) self . logger . setLevel ( lvl ) stdout = logging . StreamHandler ( sys . stdout ) stdout . setLevel ( lvl ) formatter = logging . Formatter ( log_format , date_format ) stdout . setFormatter ( formatter ) self . logger . addHandler ( stdout ) log = self . opts . log or self . config [ 'crony' ] . get ( 'log_file' ) if log : logfile = logging . FileHandler ( log ) logfile . setLevel ( lvl ) formatter = logging . Formatter ( log_format , date_format ) logfile . setFormatter ( formatter ) self . logger . addHandler ( logfile ) if self . sentry_client : sentry = SentryHandler ( self . sentry_client ) sentry . setLevel ( logging . ERROR ) self . logger . addHandler ( sentry ) self . logger . debug ( 'Logging setup complete.' )
Setup python logging handler .
10,768
def setup_path ( self ) : path = self . opts . path or self . config [ 'crony' ] . get ( 'path' ) if path : self . logger . debug ( f'Adding {path} to PATH environment variable' ) self . cmd = f'export PATH={path}:$PATH && {self.cmd}'
Setup PATH env var if necessary .
10,769
def setup_venv ( self ) : venv = self . opts . venv if not venv : venv = os . environ . get ( 'CRONY_VENV' ) if not venv and self . config [ 'crony' ] : venv = self . config [ 'crony' ] . get ( 'venv' ) if venv : if not venv . endswith ( 'activate' ) : add_path = os . path . join ( 'bin' , 'activate' ) self . logger . debug ( f'Venv directory given, adding {add_path}' ) venv = os . path . join ( venv , add_path ) self . logger . debug ( f'Adding sourcing virtualenv {venv}' ) self . cmd = f'. {venv} && {self.cmd}'
Setup virtualenv if necessary .
10,770
def get_repos ( path ) : p = str ( path ) ret = [ ] if not os . path . exists ( p ) : return ret for d in os . listdir ( p ) : pd = os . path . join ( p , d ) if os . path . exists ( pd ) and is_repo ( pd ) : ret . append ( Local ( pd ) ) return ret
Returns list of found branches .
10,771
def get_repo_parent ( path ) : if is_repo ( path ) : return Local ( path ) elif not os . path . isdir ( path ) : _rel = '' while path and path != '/' : if is_repo ( path ) : return Local ( path ) else : _rel = os . path . join ( os . path . basename ( path ) , _rel ) path = os . path . dirname ( path ) return path
Returns parent repo or input path if none found .
10,772
def setVersion ( self , version ) : try : sha = self . versions ( version ) . commit . sha self . git . reset ( "--hard" , sha ) except Exception , e : raise RepoError ( e )
Checkout a version of the repo .
10,773
def _commits ( self , head = 'HEAD' ) : pending_commits = [ head ] history = [ ] while pending_commits != [ ] : head = pending_commits . pop ( 0 ) try : commit = self [ head ] except KeyError : raise KeyError ( head ) if type ( commit ) != Commit : raise TypeError ( commit ) if commit in history : continue i = 0 for known_commit in history : if known_commit . commit_time > commit . commit_time : break i += 1 history . insert ( i , commit ) pending_commits += commit . parents return history
Returns a list of the commits reachable from head .
10,774
def versions ( self , version = None ) : try : versions = [ Version ( self , c ) for c in self . _commits ( ) ] except Exception , e : log . debug ( 'No versions exist' ) return [ ] if version is not None and versions : try : versions = versions [ version ] except IndexError : raise VersionError ( 'Version %s does not exist' % version ) return versions
List of Versions of this repository .
10,775
def setDescription ( self , desc = 'No description' ) : try : self . _put_named_file ( 'description' , desc ) except Exception , e : raise RepoError ( e )
sets repository description
10,776
def new ( self , path , desc = None , bare = True ) : if os . path . exists ( path ) : raise RepoError ( 'Path already exists: %s' % path ) try : os . mkdir ( path ) if bare : Repo . init_bare ( path ) else : Repo . init ( path ) repo = Local ( path ) if desc : repo . setDescription ( desc ) version = repo . addVersion ( ) version . save ( 'Repo Initialization' ) return repo except Exception , e : traceback . print_exc ( ) raise RepoError ( 'Error creating repo' )
Create a new bare repo . Local instance .
10,777
def branch ( self , name , desc = None ) : return Local . new ( path = os . path . join ( self . path , name ) , desc = desc , bare = True )
Create a branch of this repo at name .
10,778
def addItem ( self , item , message = None ) : if message is None : message = 'Adding item %s' % item . path try : v = Version . new ( repo = self ) v . addItem ( item ) v . save ( message ) except VersionError , e : raise RepoError ( e )
add a new Item class object
10,779
def items ( self , path = None , version = None ) : if version is None : version = - 1 items = { } for item in self . versions ( version ) . items ( ) : items [ item . path ] = item parent = self . parent while parent : for item in parent . items ( path = path ) : if item . path not in items . keys ( ) : items [ item . path ] = item parent = parent . parent if path is not None : path += '$' regex = re . compile ( path ) return [ item for path , item in items . items ( ) if regex . match ( path ) ] else : return items . values ( )
Returns a list of items .
10,780
async def set_reply_markup ( msg : Dict , request : 'Request' , stack : 'Stack' ) -> None : from bernard . platforms . telegram . layers import InlineKeyboard , ReplyKeyboard , ReplyKeyboardRemove try : keyboard = stack . get_layer ( InlineKeyboard ) except KeyError : pass else : msg [ 'reply_markup' ] = await keyboard . serialize ( request ) try : keyboard = stack . get_layer ( ReplyKeyboard ) except KeyError : pass else : msg [ 'reply_markup' ] = await keyboard . serialize ( request ) try : remove = stack . get_layer ( ReplyKeyboardRemove ) except KeyError : pass else : msg [ 'reply_markup' ] = remove . serialize ( )
Add the reply markup to a message from the layers
10,781
def split_locale ( locale : Text ) -> Tuple [ Text , Optional [ Text ] ] : items = re . split ( r'[_\-]' , locale . lower ( ) , 1 ) try : return items [ 0 ] , items [ 1 ] except IndexError : return items [ 0 ] , None
Decompose the locale into a normalized tuple .
10,782
def compare_locales ( a , b ) : if a is None or b is None : if a == b : return 2 else : return 0 a = split_locale ( a ) b = split_locale ( b ) if a == b : return 2 elif a [ 0 ] == b [ 0 ] : return 1 else : return 0
Compares two locales to find the level of compatibility
10,783
def list_locales ( self ) -> List [ Optional [ Text ] ] : locales = list ( self . dict . keys ( ) ) if not locales : locales . append ( None ) return locales
Returns the list of available locales . The first locale is the default locale to be used . If no locales are known then None will be the first item .
10,784
def choose_locale ( self , locale : Text ) -> Text : if locale not in self . _choice_cache : locales = self . list_locales ( ) best_choice = locales [ 0 ] best_level = 0 for candidate in locales : cmp = compare_locales ( locale , candidate ) if cmp > best_level : best_choice = candidate best_level = cmp self . _choice_cache [ locale ] = best_choice return self . _choice_cache [ locale ]
Returns the best matching locale in what is available .
10,785
def update ( self , new_data : Dict [ Text , Dict [ Text , Text ] ] ) : for locale , data in new_data . items ( ) : if locale not in self . dict : self . dict [ locale ] = { } self . dict [ locale ] . update ( data )
Receive an update from a loader .
10,786
async def _make_url ( self , url : Text , request : 'Request' ) -> Text : if self . sign_webview : return await request . sign_url ( url ) return url
Signs the URL if needed
10,787
def is_sharable ( self ) : if self . buttons : return ( all ( b . is_sharable ( ) for b in self . buttons ) and self . default_action and self . default_action . is_sharable ( ) )
Make sure that nothing inside blocks sharing .
10,788
def check_bounds_variables ( self , dataset ) : recommended_ctx = TestCtx ( BaseCheck . MEDIUM , 'Recommended variables to describe grid boundaries' ) bounds_map = { 'lat_bounds' : { 'units' : 'degrees_north' , 'comment' : 'latitude values at the north and south bounds of each pixel.' } , 'lon_bounds' : { 'units' : 'degrees_east' , 'comment' : 'longitude values at the west and east bounds of each pixel.' } , 'z_bounds' : { 'comment' : 'z bounds for each z value' , } , 'time_bounds' : { 'comment' : 'time bounds for each time value' } } bounds_variables = [ v . bounds for v in dataset . get_variables_by_attributes ( bounds = lambda x : x is not None ) ] for variable in bounds_variables : ncvar = dataset . variables . get ( variable , { } ) recommended_ctx . assert_true ( ncvar != { } , 'a variable {} should exist as indicated by a bounds attribute' . format ( variable ) ) if ncvar == { } : continue units = getattr ( ncvar , 'units' , '' ) if variable in bounds_map and 'units' in bounds_map [ variable ] : recommended_ctx . assert_true ( units == bounds_map [ variable ] [ 'units' ] , 'variable {} should have units {}' . format ( variable , bounds_map [ variable ] [ 'units' ] ) ) else : recommended_ctx . assert_true ( units != '' , 'variable {} should have a units attribute that is not empty' . format ( variable ) ) comment = getattr ( ncvar , 'comment' , '' ) recommended_ctx . assert_true ( comment != '' , 'variable {} should have a comment and not be empty' ) return recommended_ctx . to_result ( )
Checks the grid boundary variables .
10,789
def geocode ( self , string , bounds = None , region = None , language = None , sensor = False ) : if isinstance ( string , unicode ) : string = string . encode ( 'utf-8' ) params = { 'address' : self . format_string % string , 'sensor' : str ( sensor ) . lower ( ) } if bounds : params [ 'bounds' ] = bounds if region : params [ 'region' ] = region if language : params [ 'language' ] = language if not self . premier : url = self . get_url ( params ) else : url = self . get_signed_url ( params ) return self . GetService_url ( url )
Geocode an address . Pls refer to the Google Maps Web API for the details of the parameters
10,790
def reverse ( self , point , language = None , sensor = False ) : params = { 'latlng' : point , 'sensor' : str ( sensor ) . lower ( ) } if language : params [ 'language' ] = language if not self . premier : url = self . get_url ( params ) else : url = self . get_signed_url ( params ) return self . GetService_url ( url )
Reverse geocode a point . Pls refer to the Google Maps Web API for the details of the parameters
10,791
def GetDirections ( self , origin , destination , sensor = False , mode = None , waypoints = None , alternatives = None , avoid = None , language = None , units = None , region = None , departure_time = None , arrival_time = None ) : params = { 'origin' : origin , 'destination' : destination , 'sensor' : str ( sensor ) . lower ( ) } if mode : params [ 'mode' ] = mode if waypoints : params [ 'waypoints' ] = waypoints if alternatives : params [ 'alternatives' ] = alternatives if avoid : params [ 'avoid' ] = avoid if language : params [ 'language' ] = language if units : params [ 'units' ] = units if region : params [ 'region' ] = region if departure_time : params [ 'departure_time' ] = departure_time if arrival_time : params [ 'arrival_time' ] = arrival_time if not self . premier : url = self . get_url ( params ) else : url = self . get_signed_url ( params ) return self . GetService_url ( url )
Get Directions Service Pls refer to the Google Maps Web API for the details of the remained parameters
10,792
def get ( self ) : self . _cast = type ( [ ] ) source_value = os . getenv ( self . env_name ) if source_value is None : os . environ [ self . env_name ] = json . dumps ( self . default ) return self . default try : val = json . loads ( source_value ) except JSONDecodeError as e : click . secho ( str ( e ) , err = True , color = 'red' ) sys . exit ( 1 ) except ValueError as e : click . secho ( e . message , err = True , color = 'red' ) sys . exit ( 1 ) if self . validator : val = self . validator ( val ) return val
convert json env variable if set to list
10,793
def parse_ppi_graph ( path : str , min_edge_weight : float = 0.0 ) -> Graph : logger . info ( "In parse_ppi_graph()" ) graph = igraph . read ( os . path . expanduser ( path ) , format = "ncol" , directed = False , names = True ) graph . delete_edges ( graph . es . select ( weight_lt = min_edge_weight ) ) graph . delete_vertices ( graph . vs . select ( _degree = 0 ) ) logger . info ( f"Loaded PPI network.\n" f"Number of proteins: {len(graph.vs)}\n" f"Number of interactions: {len(graph.es)}\n" ) return graph
Build an undirected graph of gene interactions from edgelist file .
10,794
def parse_excel ( file_path : str , entrez_id_header , log_fold_change_header , adjusted_p_value_header , entrez_delimiter , base_mean_header = None ) -> List [ Gene ] : logger . info ( "In parse_excel()" ) df = pd . read_excel ( file_path ) return handle_dataframe ( df , entrez_id_name = entrez_id_header , log2_fold_change_name = log_fold_change_header , adjusted_p_value_name = adjusted_p_value_header , entrez_delimiter = entrez_delimiter , base_mean = base_mean_header , )
Read an excel file on differential expression values as Gene objects .
10,795
def parse_csv ( file_path : str , entrez_id_header , log_fold_change_header , adjusted_p_value_header , entrez_delimiter , base_mean_header = None , sep = "," ) -> List [ Gene ] : logger . info ( "In parse_csv()" ) df = pd . read_csv ( file_path , sep = sep ) return handle_dataframe ( df , entrez_id_name = entrez_id_header , log2_fold_change_name = log_fold_change_header , adjusted_p_value_name = adjusted_p_value_header , entrez_delimiter = entrez_delimiter , base_mean = base_mean_header , )
Read a csv file on differential expression values as Gene objects .
10,796
def handle_dataframe ( df : pd . DataFrame , entrez_id_name , log2_fold_change_name , adjusted_p_value_name , entrez_delimiter , base_mean = None , ) -> List [ Gene ] : logger . info ( "In _handle_df()" ) if base_mean is not None and base_mean in df . columns : df = df [ pd . notnull ( df [ base_mean ] ) ] df = df [ pd . notnull ( df [ entrez_id_name ] ) ] df = df [ pd . notnull ( df [ log2_fold_change_name ] ) ] df = df [ pd . notnull ( df [ adjusted_p_value_name ] ) ] return [ Gene ( entrez_id = entrez_id , log2_fold_change = data [ log2_fold_change_name ] , padj = data [ adjusted_p_value_name ] ) for _ , data in df . iterrows ( ) for entrez_id in str ( data [ entrez_id_name ] ) . split ( entrez_delimiter ) ]
Convert data frame on differential expression values as Gene objects .
10,797
def parse_gene_list ( path : str , graph : Graph , anno_type : str = "name" ) -> list : genes = pd . read_csv ( path , header = None ) [ 0 ] . tolist ( ) genes = [ str ( int ( gene ) ) for gene in genes ] ind = [ ] if anno_type == "name" : ind = graph . vs . select ( name_in = genes ) . indices elif anno_type == "symbol" : ind = graph . vs . select ( symbol_in = genes ) . indices else : raise Exception ( "The type can either be name or symbol, {} is not " "supported" . format ( anno_type ) ) genes = graph . vs [ ind ] [ anno_type ] return genes
Parse a list of genes and return them if they are in the network .
10,798
def parse_disease_ids ( path : str ) : if os . path . isdir ( path ) or not os . path . exists ( path ) : logger . info ( "Couldn't find the disease identifiers file. Returning empty list." ) return [ ] df = pd . read_csv ( path , names = [ "ID" ] ) return set ( df [ "ID" ] . tolist ( ) )
Parse the disease identifier file .
10,799
def parse_disease_associations ( path : str , excluded_disease_ids : set ) : if os . path . isdir ( path ) or not os . path . exists ( path ) : logger . info ( "Couldn't find the disease associations file. Returning empty list." ) return { } disease_associations = defaultdict ( list ) with open ( path ) as input_file : for line in input_file : target_id , disease_id = line . strip ( ) . split ( " " ) if disease_id not in excluded_disease_ids : disease_associations [ target_id ] . append ( disease_id ) return disease_associations
Parse the disease - drug target associations file .