idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
55,500
def get_rows ( self ) : table = self . soup . find_all ( 'tr' ) [ 1 : - 3 ] return [ row for row in table if row . contents [ 3 ] . string ]
Get the rows from a broadcast ratings chart
55,501
def fetch_entries ( self ) : current_time = '' data = [ ] for row in self . get_rows ( ) : if exceeded_limit ( self . limit , len ( data ) ) : break entry = row . find_all ( 'td' ) entry_dict = { } show_time = entry [ 0 ] . string if show_time and show_time != current_time : current_time = show_time if not show_time : show_time = current_time entry_dict [ 'time' ] = show_time show_string = entry [ 1 ] . string . split ( '(' ) show = show_string [ 0 ] [ : - 1 ] net = self . _get_net ( show_string ) if not self . _match_query ( show , net ) : continue entry_dict [ 'show' ] = show entry_dict [ 'net' ] = net entry_dict [ 'viewers' ] = entry [ 3 ] . string . strip ( '*' ) entry_dict [ 'rating' ] , entry_dict [ 'share' ] = self . _get_rating ( entry ) data . append ( Entry ( ** entry_dict ) ) return data
Fetch data and parse it to build a list of broadcast entries .
55,502
def get_averages ( self ) : networks = [ unescape_html ( n . string ) for n in self . soup . find_all ( 'td' , width = '77' ) ] table = self . soup . find_all ( 'td' , style = re . compile ( '^font' ) ) rateshares = [ r . string . split ( '/' ) for r in table [ : 5 ] if r . string ] viewers = [ v . string for v in table [ 5 : ] if v . string ] averages = { } for index , network in enumerate ( networks ) : viewer = convert_float ( unescape_html ( viewers [ index ] ) ) rating = convert_float ( unescape_html ( rateshares [ index ] [ 0 ] ) ) share = convert_float ( unescape_html ( rateshares [ index ] [ 1 ] ) ) averages [ network ] = { 'viewer' : viewer , 'rating' : rating , 'share' : share } return averages
Get the broadcast network averages for that day .
55,503
def _get_net ( self , entry ) : try : net = entry [ 1 ] return net [ net . find ( '(' ) + 1 : net . find ( ')' ) ] except IndexError : return None
Get the network for a specific row
55,504
def _get_rating ( self , entry ) : r_info = '' for string in entry [ 2 ] . strings : r_info += string rating , share = r_info . split ( '/' ) return ( rating , share . strip ( '*' ) )
Get the rating and share for a specific row
55,505
def _visit ( self , L , marked , tempmarked ) : assert not self . is_pseudo if self in tempmarked : raise Exception ( 'feature graph is cyclic' ) if self not in marked : tempmarked [ self ] = True features = list ( ) if self . siblings is not None and self . is_toplevel : features . extend ( reversed ( self . siblings ) ) if self . children is not None : features . extend ( reversed ( self . children ) ) if len ( features ) > 0 : for feature in features : feature . _visit ( L , marked , tempmarked ) marked [ self ] = True del tempmarked [ self ] L . insert ( 0 , self )
Sort features topologically .
55,506
def add_child ( self , child , rangecheck = False ) : assert self . seqid == child . seqid , ( 'seqid mismatch for feature {} ({} vs {})' . format ( self . fid , self . seqid , child . seqid ) ) if rangecheck is True : assert self . _strand == child . _strand , ( 'child of feature {} has a different strand' . format ( self . fid ) ) assert self . _range . contains ( child . _range ) , ( 'child of feature {} is not contained within its span ' '({}-{})' . format ( self . fid , child . start , child . end ) ) if self . children is None : self . children = list ( ) self . children . append ( child ) self . children . sort ( )
Add a child feature to this feature .
55,507
def pseudoify ( self ) : assert self . is_toplevel assert self . is_multi assert len ( self . multi_rep . siblings ) > 0 rep = self . multi_rep start = min ( [ s . start for s in rep . siblings + [ rep ] ] ) end = max ( [ s . end for s in rep . siblings + [ rep ] ] ) parent = Feature ( None ) parent . _pseudo = True parent . _seqid = self . _seqid parent . set_coord ( start , end ) parent . _strand = self . _strand for sibling in rep . siblings + [ rep ] : parent . add_child ( sibling , rangecheck = True ) parent . children = sorted ( parent . children ) rep . siblings = sorted ( rep . siblings ) return parent
Derive a pseudo - feature parent from the given multi - feature .
55,508
def slug ( self ) : return '{:s}@{:s}[{:d}, {:d}]' . format ( self . type , self . seqid , self . start + 1 , self . end )
A concise slug for this feature .
55,509
def add_sibling ( self , sibling ) : assert self . is_pseudo is False if self . siblings is None : self . siblings = list ( ) self . multi_rep = self sibling . multi_rep = self self . siblings . append ( sibling )
Designate this a multi - feature representative and add a co - feature .
55,510
def source ( self , newsource ) : oldsource = self . source for feature in self : if feature . source == oldsource : feature . _source = newsource
When modifying source also update children with matching source .
55,511
def type ( self , newtype ) : self . _type = newtype if self . is_multi : for sibling in self . multi_rep . siblings : sibling . _type = newtype
If the feature is a multifeature update all entries .
55,512
def transform ( self , offset , newseqid = None ) : for feature in self : feature . _range . transform ( offset ) if newseqid is not None : feature . seqid = newseqid
Transform the feature s coordinates by the given offset .
55,513
def add_attribute ( self , attrkey , attrvalue , append = False , oldvalue = None ) : if attrkey == 'ID' : if self . children is not None : oldid = self . get_attribute ( 'ID' ) for child in self . children : child . add_attribute ( 'Parent' , attrvalue , oldvalue = oldid ) self . _attrs [ attrkey ] = attrvalue if self . is_multi : self . multi_rep . _attrs [ attrkey ] = attrvalue for sibling in self . multi_rep . siblings : sibling . _attrs [ attrkey ] = attrvalue return if oldvalue is not None : if attrkey in self . _attrs : assert oldvalue in self . _attrs [ attrkey ] del self . _attrs [ attrkey ] [ oldvalue ] if attrkey not in self . _attrs or append is False : self . _attrs [ attrkey ] = dict ( ) self . _attrs [ attrkey ] [ attrvalue ] = True
Add an attribute to this feature .
55,514
def get_attribute ( self , attrkey , as_string = False , as_list = False ) : assert not as_string or not as_list if attrkey not in self . _attrs : return None if attrkey == 'ID' : return self . _attrs [ attrkey ] attrvalues = list ( self . _attrs [ attrkey ] ) attrvalues . sort ( ) if len ( attrvalues ) == 1 and not as_list : return attrvalues [ 0 ] elif as_string : return ',' . join ( attrvalues ) return attrvalues
Get the value of an attribute .
55,515
def parse_attributes ( self , attrstring ) : if attrstring in [ None , '' , '.' ] : return dict ( ) attributes = dict ( ) keyvaluepairs = attrstring . split ( ';' ) for kvp in keyvaluepairs : if kvp == '' : continue key , value = kvp . split ( '=' ) if key == 'ID' : assert ',' not in value attributes [ key ] = value continue values = value . split ( ',' ) valdict = dict ( ( val , True ) for val in values ) attributes [ key ] = valdict return attributes
Parse an attribute string .
55,516
def attribute_crawl ( self , key ) : union = set ( ) for feature in self : values = feature . get_attribute ( key , as_list = True ) if values is not None : union . update ( set ( values ) ) return union
Grab all attribute values associated with the given feature .
55,517
def ncbi_geneid ( self ) : values = self . get_attribute ( 'Dbxref' , as_list = True ) if values is None : return None for value in values : if value . startswith ( 'GeneID:' ) : key , geneid = value . split ( ':' ) return geneid return None
Retrieve this feature s NCBI GeneID if it s present .
55,518
def cdslen ( self ) : if self . type != 'mRNA' : return None return sum ( [ len ( c ) for c in self . children if c . type == 'CDS' ] )
Translated length of this feature .
55,519
def parse_querystring ( msg ) : 'parse a querystring into keys and values' for part in msg . querystring . strip ( ) . lstrip ( '?' ) . split ( '&' ) : key , value = part . split ( '=' ) yield key , value
parse a querystring into keys and values
55,520
def AddClusterTags ( r , tags , dry_run = False ) : query = { "dry-run" : dry_run , "tag" : tags , } return r . request ( "put" , "/2/tags" , query = query )
Adds tags to the cluster .
55,521
def DeleteClusterTags ( r , tags , dry_run = False ) : query = { "dry-run" : dry_run , "tag" : tags , } return r . request ( "delete" , "/2/tags" , query = query )
Deletes tags from the cluster .
55,522
def GetInstances ( r , bulk = False ) : if bulk : return r . request ( "get" , "/2/instances" , query = { "bulk" : 1 } ) else : instances = r . request ( "get" , "/2/instances" ) return r . applier ( itemgetters ( "id" ) , instances )
Gets information about instances on the cluster .
55,523
def GetInstanceInfo ( r , instance , static = None ) : if static is None : return r . request ( "get" , "/2/instances/%s/info" % instance ) else : return r . request ( "get" , "/2/instances/%s/info" % instance , query = { "static" : static } )
Gets information about an instance .
55,524
def DeleteInstance ( r , instance , dry_run = False ) : return r . request ( "delete" , "/2/instances/%s" % instance , query = { "dry-run" : dry_run } )
Deletes an instance .
55,525
def ActivateInstanceDisks ( r , instance , ignore_size = False ) : return r . request ( "put" , "/2/instances/%s/activate-disks" % instance , query = { "ignore_size" : ignore_size } )
Activates an instance s disks .
55,526
def RecreateInstanceDisks ( r , instance , disks = None , nodes = None ) : body = { } if disks is not None : body [ "disks" ] = disks if nodes is not None : body [ "nodes" ] = nodes return r . request ( "post" , "/2/instances/%s/recreate-disks" % instance , content = body )
Recreate an instance s disks .
55,527
def GrowInstanceDisk ( r , instance , disk , amount , wait_for_sync = False ) : body = { "amount" : amount , "wait_for_sync" : wait_for_sync , } return r . request ( "post" , "/2/instances/%s/disk/%s/grow" % ( instance , disk ) , content = body )
Grows a disk of an instance .
55,528
def AddInstanceTags ( r , instance , tags , dry_run = False ) : query = { "tag" : tags , "dry-run" : dry_run , } return r . request ( "put" , "/2/instances/%s/tags" % instance , query = query )
Adds tags to an instance .
55,529
def DeleteInstanceTags ( r , instance , tags , dry_run = False ) : query = { "tag" : tags , "dry-run" : dry_run , } return r . request ( "delete" , "/2/instances/%s/tags" % instance , query = query )
Deletes tags from an instance .
55,530
def RebootInstance ( r , instance , reboot_type = None , ignore_secondaries = False , dry_run = False ) : query = { "ignore_secondaries" : ignore_secondaries , "dry-run" : dry_run , } if reboot_type : if reboot_type not in ( "hard" , "soft" , "full" ) : raise GanetiApiError ( "reboot_type must be one of 'hard'," " 'soft', or 'full'" ) query [ "type" ] = reboot_type return r . request ( "post" , "/2/instances/%s/reboot" % instance , query = query )
Reboots an instance .
55,531
def ShutdownInstance ( r , instance , dry_run = False , no_remember = False , timeout = 120 ) : query = { "dry-run" : dry_run , "no-remember" : no_remember , } content = { "timeout" : timeout , } return r . request ( "put" , "/2/instances/%s/shutdown" % instance , query = query , content = content )
Shuts down an instance .
55,532
def StartupInstance ( r , instance , dry_run = False , no_remember = False ) : query = { "dry-run" : dry_run , "no-remember" : no_remember , } return r . request ( "put" , "/2/instances/%s/startup" % instance , query = query )
Starts up an instance .
55,533
def ReinstallInstance ( r , instance , os = None , no_startup = False , osparams = None ) : if INST_REINSTALL_REQV1 in r . features : body = { "start" : not no_startup , } if os is not None : body [ "os" ] = os if osparams is not None : body [ "osparams" ] = osparams return r . request ( "post" , "/2/instances/%s/reinstall" % instance , content = body ) if osparams : raise GanetiApiError ( "Server does not support specifying OS" " parameters for instance reinstallation" ) query = { "nostartup" : no_startup , } if os : query [ "os" ] = os return r . request ( "post" , "/2/instances/%s/reinstall" % instance , query = query )
Reinstalls an instance .
55,534
def ReplaceInstanceDisks ( r , instance , disks = None , mode = REPLACE_DISK_AUTO , remote_node = None , iallocator = None , dry_run = False ) : if mode not in REPLACE_DISK : raise GanetiApiError ( "Invalid mode %r not one of %r" % ( mode , REPLACE_DISK ) ) query = { "mode" : mode , "dry-run" : dry_run , } if disks : query [ "disks" ] = "," . join ( str ( idx ) for idx in disks ) if remote_node : query [ "remote_node" ] = remote_node if iallocator : query [ "iallocator" ] = iallocator return r . request ( "post" , "/2/instances/%s/replace-disks" % instance , query = query )
Replaces disks on an instance .
55,535
def ExportInstance ( r , instance , mode , destination , shutdown = None , remove_instance = None , x509_key_name = None , destination_x509_ca = None ) : body = { "destination" : destination , "mode" : mode , } if shutdown is not None : body [ "shutdown" ] = shutdown if remove_instance is not None : body [ "remove_instance" ] = remove_instance if x509_key_name is not None : body [ "x509_key_name" ] = x509_key_name if destination_x509_ca is not None : body [ "destination_x509_ca" ] = destination_x509_ca return r . request ( "put" , "/2/instances/%s/export" % instance , content = body )
Exports an instance .
55,536
def MigrateInstance ( r , instance , mode = None , cleanup = None ) : body = { } if mode is not None : body [ "mode" ] = mode if cleanup is not None : body [ "cleanup" ] = cleanup return r . request ( "put" , "/2/instances/%s/migrate" % instance , content = body )
Migrates an instance .
55,537
def FailoverInstance ( r , instance , iallocator = None , ignore_consistency = False , target_node = None ) : body = { "ignore_consistency" : ignore_consistency , } if iallocator is not None : body [ "iallocator" ] = iallocator if target_node is not None : body [ "target_node" ] = target_node return r . request ( "put" , "/2/instances/%s/failover" % instance , content = body )
Does a failover of an instance .
55,538
def RenameInstance ( r , instance , new_name , ip_check , name_check = None ) : body = { "ip_check" : ip_check , "new_name" : new_name , } if name_check is not None : body [ "name_check" ] = name_check return r . request ( "put" , "/2/instances/%s/rename" % instance , content = body )
Changes the name of an instance .
55,539
def WaitForJobChange ( r , job_id , fields , prev_job_info , prev_log_serial ) : body = { "fields" : fields , "previous_job_info" : prev_job_info , "previous_log_serial" : prev_log_serial , } return r . request ( "get" , "/2/jobs/%s/wait" % job_id , content = body )
Waits for job changes .
55,540
def CancelJob ( r , job_id , dry_run = False ) : return r . request ( "delete" , "/2/jobs/%s" % job_id , query = { "dry-run" : dry_run } )
Cancels a job .
55,541
def GetNodes ( r , bulk = False ) : if bulk : return r . request ( "get" , "/2/nodes" , query = { "bulk" : 1 } ) else : nodes = r . request ( "get" , "/2/nodes" ) return r . applier ( itemgetters ( "id" ) , nodes )
Gets all nodes in the cluster .
55,542
def EvacuateNode ( r , node , iallocator = None , remote_node = None , dry_run = False , early_release = False , mode = None , accept_old = False ) : if iallocator and remote_node : raise GanetiApiError ( "Only one of iallocator or remote_node can" " be used" ) query = { "dry-run" : dry_run , } if iallocator : query [ "iallocator" ] = iallocator if remote_node : query [ "remote_node" ] = remote_node if NODE_EVAC_RES1 in r . features : body = { "early_release" : early_release , } if iallocator is not None : body [ "iallocator" ] = iallocator if remote_node is not None : body [ "remote_node" ] = remote_node if mode is not None : body [ "mode" ] = mode else : body = None if not accept_old : raise GanetiApiError ( "Server is version 2.4 or earlier and" " caller does not accept old-style" " results (parameter accept_old)" ) if mode is not None and mode != NODE_EVAC_SEC : raise GanetiApiError ( "Server can only evacuate secondary instances" ) if iallocator is not None : query [ "iallocator" ] = iallocator if remote_node is not None : query [ "remote_node" ] = remote_node if query : query [ "early_release" ] = 1 return r . request ( "post" , "/2/nodes/%s/evacuate" % node , query = query , content = body )
Evacuates instances from a Ganeti node .
55,543
def MigrateNode ( r , node , mode = None , dry_run = False , iallocator = None , target_node = None ) : query = { "dry-run" : dry_run , } if NODE_MIGRATE_REQV1 in r . features : body = { } if mode is not None : body [ "mode" ] = mode if iallocator is not None : body [ "iallocator" ] = iallocator if target_node is not None : body [ "target_node" ] = target_node else : if target_node is not None : raise GanetiApiError ( "Server does not support specifying" " target node for node migration" ) body = None if mode is not None : query [ "mode" ] = mode return r . request ( "post" , "/2/nodes/%s/migrate" % node , query = query , content = body )
Migrates all primary instances from a node .
55,544
def SetNodeRole ( r , node , role , force = False , auto_promote = False ) : query = { "force" : force , "auto_promote" : auto_promote , } return r . request ( "put" , "/2/nodes/%s/role" % node , query = query , content = role )
Sets the role for a node .
55,545
def PowercycleNode ( r , node , force = False ) : query = { "force" : force , } return r . request ( "post" , "/2/nodes/%s/powercycle" % node , query = query )
Powercycles a node .
55,546
def GetNodeStorageUnits ( r , node , storage_type , output_fields ) : query = { "storage_type" : storage_type , "output_fields" : output_fields , } return r . request ( "get" , "/2/nodes/%s/storage" % node , query = query )
Gets the storage units for a node .
55,547
def ModifyNodeStorageUnits ( r , node , storage_type , name , allocatable = None ) : query = { "storage_type" : storage_type , "name" : name , } if allocatable is not None : query [ "allocatable" ] = allocatable return r . request ( "put" , "/2/nodes/%s/storage/modify" % node , query = query )
Modifies parameters of storage units on the node .
55,548
def RepairNodeStorageUnits ( r , node , storage_type , name ) : query = { "storage_type" : storage_type , "name" : name , } return r . request ( "put" , "/2/nodes/%s/storage/repair" % node , query = query )
Repairs a storage unit on the node .
55,549
def AddNodeTags ( r , node , tags , dry_run = False ) : query = { "tag" : tags , "dry-run" : dry_run , } return r . request ( "put" , "/2/nodes/%s/tags" % node , query = query , content = tags )
Adds tags to a node .
55,550
def DeleteNodeTags ( r , node , tags , dry_run = False ) : query = { "tag" : tags , "dry-run" : dry_run , } return r . request ( "delete" , "/2/nodes/%s/tags" % node , query = query )
Delete tags from a node .
55,551
def GetGroups ( r , bulk = False ) : if bulk : return r . request ( "get" , "/2/groups" , query = { "bulk" : 1 } ) else : groups = r . request ( "get" , "/2/groups" ) return r . applier ( itemgetters ( "name" ) , groups )
Gets all node groups in the cluster .
55,552
def CreateGroup ( r , name , alloc_policy = None , dry_run = False ) : query = { "dry-run" : dry_run , } body = { "name" : name , "alloc_policy" : alloc_policy } return r . request ( "post" , "/2/groups" , query = query , content = body )
Creates a new node group .
55,553
def DeleteGroup ( r , group , dry_run = False ) : query = { "dry-run" : dry_run , } return r . request ( "delete" , "/2/groups/%s" % group , query = query )
Deletes a node group .
55,554
def RenameGroup ( r , group , new_name ) : body = { "new_name" : new_name , } return r . request ( "put" , "/2/groups/%s/rename" % group , content = body )
Changes the name of a node group .
55,555
def AssignGroupNodes ( r , group , nodes , force = False , dry_run = False ) : query = { "force" : force , "dry-run" : dry_run , } body = { "nodes" : nodes , } return r . request ( "put" , "/2/groups/%s/assign-nodes" % group , query = query , content = body )
Assigns nodes to a group .
55,556
def AddGroupTags ( r , group , tags , dry_run = False ) : query = { "dry-run" : dry_run , "tag" : tags , } return r . request ( "put" , "/2/groups/%s/tags" % group , query = query )
Adds tags to a node group .
55,557
def DeleteGroupTags ( r , group , tags , dry_run = False ) : query = { "dry-run" : dry_run , "tag" : tags , } return r . request ( "delete" , "/2/groups/%s/tags" % group , query = query )
Deletes tags from a node group .
55,558
def Query ( r , what , fields , qfilter = None ) : body = { "fields" : fields , } if qfilter is not None : body [ "qfilter" ] = body [ "filter" ] = qfilter return r . request ( "put" , "/2/query/%s" % what , content = body )
Retrieves information about resources .
55,559
def QueryFields ( r , what , fields = None ) : query = { } if fields is not None : query [ "fields" ] = "," . join ( fields ) return r . request ( "get" , "/2/query/%s/fields" % what , query = query )
Retrieves available fields for a resource .
55,560
def createalphabet ( alphabetinput = None ) : if alphabetinput and os . path . isfile ( alphabetinput ) : return _load_alphabet ( alphabetinput ) elif alphabetinput : alpha = [ ] setlist = alphabetinput . split ( ',' ) for alphaset in setlist : a = int ( alphaset . split ( '-' ) [ 0 ] ) b = int ( alphaset . split ( '-' ) [ 1 ] ) for i in range ( a , b ) : alpha . append ( str ( unichr ( i ) ) ) return alpha alpha = [ ] for i in range ( 32 , 127 ) : alpha . append ( str ( unichr ( i ) ) ) return alpha
Creates a sample alphabet containing printable ASCII characters
55,561
def _instant_search ( self ) : _keys = [ ] for k , v in self . searchables . iteritems ( ) : if self . string in v : _keys . append ( k ) self . candidates . append ( _keys )
Determine possible keys after a push or pop
55,562
def best_guess ( self ) : best_guess_ever = ( 0 , 0 ) points = defaultdict ( float ) points [ 0 ] = 0 if len ( self . string ) > 0 : for key in self . candidate_keys : guess = self . searchables [ key ] if guess == self . string : points [ key ] += 100 break if len ( self . string ) > len ( guess ) : continue if guess . startswith ( self . string ) : points [ key ] += 1 if self . string in guess : points [ key ] += 1 if points [ key ] > 0 : points [ key ] += float ( len ( self . string ) ) / len ( guess ) for k , v in points . iteritems ( ) : if points [ best_guess_ever [ 0 ] ] < points [ k ] : best_guess_ever = ( k , self . searchables [ k ] ) return best_guess_ever
Return the gnomekeyring position of the closest matching
55,563
def find_html_files ( self , destination ) : for root , dirs , files in os . walk ( destination ) : for f in files : if f . endswith ( '.html' ) : yield os . path . join ( root , f )
Finds all html files in the given destination .
55,564
def minify_file ( self , target ) : html = open ( target , 'rb' ) . read ( ) enc = chardet . detect ( html ) [ 'encoding' ] with codecs . open ( target , 'r+' , enc ) as f : result = htmlmin . minify ( f . read ( ) , ** self . options ) f . seek ( 0 ) f . write ( result ) f . truncate ( )
Minifies the target html file .
55,565
def on_after_build_all ( self , builder , ** extra ) : try : is_enabled = self . is_enabled ( builder . build_flags ) except AttributeError : is_enabled = self . is_enabled ( builder . extra_flags ) if not is_enabled : return reporter . report_generic ( 'Starting HTML minification' ) for htmlfile in self . find_html_files ( builder . destination_path ) : self . minify_file ( htmlfile ) reporter . report_generic ( 'HTML minification finished' )
after - build - all lektor event
55,566
def features ( entrystream , type = None , traverse = False ) : for feature in entry_type_filter ( entrystream , tag . Feature ) : if traverse : if type is None : message = 'cannot traverse without a specific feature type' raise ValueError ( message ) if type == feature . type : yield feature else : for subfeature in feature : if type == subfeature . type : yield subfeature else : if not type or type == feature . type : yield feature
Pull features out of the specified entry stream .
55,567
def window ( featurestream , seqid , start = None , end = None , strict = True ) : region = None if start and end : region = tag . Range ( start , end ) for feature in featurestream : if feature . seqid != seqid : continue if region : if strict : if region . contains ( feature . _range ) : yield feature else : if region . overlap ( feature . _range ) : yield feature else : yield feature
Pull features out of the designated genomic interval .
55,568
def directives ( entrystream , type = None ) : for directive in entry_type_filter ( entrystream , tag . Directive ) : if not type or type == directive . type : yield directive
Pull directives out of the specified entry stream .
55,569
def validate_driver ( f ) : def check_driver ( request ) : drivers = get_all_driver ( ) drivers = filter ( drivers , request ) if drivers : return f ( request , drivers ) else : raise Exception ( 'Driver is not found' ) return check_driver
Check driver on
55,570
def strings_to_integers ( strings : Iterable [ str ] ) -> Iterable [ int ] : return strings_to_ ( strings , lambda x : int ( float ( x ) ) )
Convert a list of strings to a list of integers .
55,571
def string_to_double_precision_float ( s : str ) -> float : first , second , exponential = re . match ( "(-?\d*)\.?(-?\d*)d(-?\d+)" , s , re . IGNORECASE ) . groups ( ) return float ( first + '.' + second + 'e' + exponential )
Double precision float in Fortran file will have form x . ydz or x . yDz this cannot be convert directly to float by Python float function so I wrote this function to help conversion . For example
55,572
def string_to_general_float ( s : str ) -> float : if 'D' in s . upper ( ) : try : return string_to_double_precision_float ( s ) except ValueError : raise ValueError ( "The string '{0}' does not corresponds to a double precision number!" . format ( s ) ) else : return float ( s )
Convert a string to corresponding single or double precision scientific number .
55,573
def match_one_string ( pattern : str , s : str , * args ) : try : match , = re . findall ( pattern , s ) if len ( args ) == 0 : return match elif len ( args ) == 1 : wrapper , = args return wrapper ( match ) else : raise TypeError ( 'Multiple wrappers are given! Only one should be given!' ) except ValueError : print ( "Pattern \"{0}\" not found, or more than one found in string {1}!" . format ( pattern , s ) )
Make sure you know only none or one string will be matched! If you are not sure use match_one_pattern instead .
55,574
def match_one_pattern ( pattern : str , s : str , * args : Optional [ Callable ] , ** flags ) : match : Optional [ List [ str ] ] = re . findall ( pattern , s , ** flags ) if match : if len ( args ) == 0 : return match elif len ( args ) == 1 : wrapper , = args return [ wrapper ( m ) for m in match ] else : raise TypeError ( 'Multiple wrappers are given! Only one should be given!' ) else : print ( "Pattern \"{0}\" not found in string {1}!" . format ( pattern , s ) ) return None
Find a pattern in a certain string . If found and a wrapper is given then return the wrapped matched - string ; if no wrapper is given return the pure matched string . If no match is found return None .
55,575
def all_string_like ( iterable : Iterable [ object ] ) -> bool : return all ( is_string_like ( _ ) for _ in iterable )
If any element of an iterable is not a string return True .
55,576
def source_filename ( self , docname : str , srcdir : str ) : docpath = Path ( srcdir , docname ) parent = docpath . parent imgpath = parent . joinpath ( self . filename ) if not imgpath . exists ( ) : msg = f'Image does not exist at "{imgpath}"' raise SphinxError ( msg ) return imgpath
Get the full filename to referenced image
55,577
def env_updated ( self , kb_app , sphinx_app : Sphinx , sphinx_env : BuildEnvironment , resource ) : docname = resource . docname srcdir = sphinx_app . env . srcdir source_imgpath = self . source_filename ( docname , srcdir ) build_dir = sphinx_app . outdir docpath = Path ( docname ) parent = docpath . parent target_imgpath = str ( Path ( build_dir , parent , self . filename ) ) target_dir = Path ( build_dir , parent ) if not target_dir . exists ( ) : target_dir . mkdir ( parents = True , exist_ok = True ) shutil . copy ( source_imgpath , target_imgpath )
Make images and enter them in Sphinx s output writer
55,578
def catalog ( self , table = '' , column = '' ) : lookup_table = self . lookup_table if lookup_table is not None : if table : if column : column = column . upper ( ) return lookup_table [ table ] [ column ] return lookup_table [ table ] return self . lookup_methods return None
Lookup the values available for querying .
55,579
def _resolve_call ( self , table , column = '' , value = '' , ** kwargs ) : if not column : return self . catalog ( table ) elif not value : return self . catalog ( table , column ) column = column . upper ( ) value = str ( value ) . upper ( ) data = self . call_api ( table , column , value , ** kwargs ) if isinstance ( data , dict ) : data = data . values ( ) [ 0 ] return data
Internal method to resolve the API wrapper call .
55,580
def call_api ( self , table , column , value , ** kwargs ) : try : output_format = kwargs . pop ( 'output_format' ) except KeyError : output_format = self . output_format url_list = [ self . base_url , table , column , quote ( value ) , 'rows' ] rows_count = self . _number_of_rows ( ** kwargs ) url_list . append ( rows_count ) url_string = '/' . join ( url_list ) xml_data = urlopen ( url_string ) . read ( ) data = self . _format_data ( output_format , xml_data ) return data
Exposed method to connect and query the EPA s API .
55,581
def _number_of_rows ( self , start = 0 , count = 100 , ** kwargs ) : first = str ( start ) last = str ( start + count ) string_format = ':' . join ( [ first , last ] ) return string_format
Internal method to format the number of rows the EPA API returns .
55,582
def resource_references ( self , resource ) -> Mapping [ str , List [ Any ] ] : references = dict ( ) for reference_label in resource . props . references : references [ reference_label ] = [ ] for target_label in resource . props . references . get ( reference_label ) : target = self . get_reference ( reference_label , target_label ) references [ reference_label ] . append ( target ) return references
Resolve and return reference resources pointed to by object
55,583
def start ( self , retry_limit = None ) : wrapper_listener = TweepyWrapperListener ( listener = self . listener ) stream = tweepy . Stream ( auth = self . client . tweepy_api . auth , listener = wrapper_listener ) retry_counter = 0 while retry_limit is None or retry_counter <= retry_limit : try : retry_counter += 1 if not self . client . config . get ( 'user_stream' ) : logging . info ( 'Listening to public stream' ) stream . filter ( follow = self . filter . follow , track = self . filter . track ) else : if self . filter . follow : logging . warning ( 'Follow filters won\'t be used in user stream' ) logging . info ( 'Listening to user stream' ) stream . userstream ( track = self . filter . track ) except AttributeError as e : if "'NoneType' object has no attribute 'strip'" in str ( e ) : pass else : raise
Try to connect to Twitter s streaming API .
55,584
def _findProteinClusters ( protToPeps , pepToProts ) : clusters = list ( ) resolvingProteins = set ( protToPeps ) while resolvingProteins : protein = resolvingProteins . pop ( ) proteinCluster = set ( [ protein ] ) peptides = set ( protToPeps [ protein ] ) parsedPeptides = set ( ) while len ( peptides ) != len ( parsedPeptides ) : for peptide in peptides : proteinCluster . update ( pepToProts [ peptide ] ) parsedPeptides . update ( peptides ) for protein in proteinCluster : peptides . update ( protToPeps [ protein ] ) clusters . append ( proteinCluster ) resolvingProteins = resolvingProteins . difference ( proteinCluster ) return clusters
Find protein clusters in the specified protein to peptide mappings .
55,585
def _findSamesetProteins ( protToPeps , proteins = None ) : proteins = viewkeys ( protToPeps ) if proteins is None else proteins equalEvidence = ddict ( set ) for protein in proteins : peptides = protToPeps [ protein ] equalEvidence [ tuple ( sorted ( peptides ) ) ] . add ( protein ) equalProteins = list ( ) for proteins in viewvalues ( equalEvidence ) : if len ( proteins ) > 1 : equalProteins . append ( tuple ( sorted ( proteins ) ) ) return equalProteins
Find proteins that are mapped to an identical set of peptides .
55,586
def _findSubsetProteins ( proteins , protToPeps , pepToProts ) : proteinsEqual = lambda prot1 , prot2 : protToPeps [ prot1 ] == protToPeps [ prot2 ] subGroups = list ( ) for protein in proteins : peptideCounts = Counter ( ) for peptide in protToPeps [ protein ] : proteins = pepToProts [ peptide ] peptideCounts . update ( proteins ) peptideCount = peptideCounts . pop ( protein ) superGroups = set ( ) for sharingProtein , sharedPeptides in peptideCounts . most_common ( ) : if peptideCount == sharedPeptides : if not proteinsEqual ( protein , sharingProtein ) : superGroups . add ( sharingProtein ) else : break if superGroups : subGroups . append ( ( protein , superGroups ) ) return subGroups
Find proteins which peptides are a sub - set but not a same - set to other proteins .
55,587
def _findRedundantProteins ( protToPeps , pepToProts , proteins = None ) : if proteins is None : proteins = viewkeys ( protToPeps ) pepFrequency = _getValueCounts ( pepToProts ) protPepCounts = _getValueCounts ( protToPeps ) getCount = operator . itemgetter ( 1 ) getProt = operator . itemgetter ( 0 ) proteinTuples = list ( ) for protein in proteins : if isinstance ( protein , tuple ) : proteinTuples . append ( protein ) else : proteinTuples . append ( tuple ( [ protein ] ) ) sort = list ( ) for protein in sorted ( proteinTuples , reverse = True ) : if len ( protein ) == 1 : protein = protein [ 0 ] protPepFreq = [ pepFrequency [ pep ] for pep in protToPeps [ protein ] ] if min ( protPepFreq ) > 1 : sortValue = ( len ( protPepFreq ) * - 1 , sorted ( protPepFreq , reverse = True ) ) sort . append ( ( protein , sortValue ) ) sortedProteins = map ( getProt , sorted ( sort , key = getCount , reverse = True ) ) redundantProteins = set ( ) for protein in sortedProteins : for pep in protToPeps [ protein ] : if pepFrequency [ pep ] <= 1 : break else : protPepFrequency = Counter ( protToPeps [ protein ] ) pepFrequency . subtract ( protPepFrequency ) redundantProteins . add ( protein ) return redundantProteins
Returns a set of proteins with redundant peptide evidence .
55,588
def _mergeProteinEntries ( proteinLists , protToPeps ) : mergedProtToPeps = dict ( protToPeps ) for proteins in proteinLists : for protein in proteins : peptides = mergedProtToPeps . pop ( protein ) mergedProtein = tuple ( sorted ( proteins ) ) mergedProtToPeps [ mergedProtein ] = peptides return mergedProtToPeps
Returns a new protToPeps dictionary with entries merged that are present in proteinLists .
55,589
def _reducedProtToPeps ( protToPeps , proteins ) : return { k : v for k , v in viewitems ( protToPeps ) if k not in proteins }
Returns a new reduced protToPeps dictionary that does not contain entries present in proteins .
55,590
def _findUniqueMappingKeys ( mapping ) : uniqueMappingKeys = set ( ) for key , entries in viewitems ( mapping ) : if len ( entries ) == 1 : uniqueMappingKeys . add ( key ) return uniqueMappingKeys
Find mapping keys that only have one entry ( value length of 1 .
55,591
def _invertMapping ( mapping ) : invertedMapping = ddict ( set ) for key , values in viewitems ( mapping ) : for value in values : invertedMapping [ value ] . add ( key ) return invertedMapping
Converts a protein to peptide or peptide to protein mapping .
55,592
def _mappingGetValueSet ( mapping , keys ) : setUnion = set ( ) for k in keys : setUnion = setUnion . union ( mapping [ k ] ) return setUnion
Return a combined set of values from the mapping .
55,593
def _flattenMergedProteins ( proteins ) : proteinSet = set ( ) for protein in proteins : if isinstance ( protein , tuple ) : proteinSet . update ( protein ) else : proteinSet . add ( protein ) return proteinSet
Return a set where merged protein entries in proteins are flattened .
55,594
def getGroups ( self , proteinId ) : return [ self . groups [ gId ] for gId in self . _proteinToGroupIds [ proteinId ] ]
Return a list of protein groups a protein is associated with .
55,595
def addProteinGroup ( self , groupRepresentative ) : groupId = self . _getNextGroupId ( ) self . groups [ groupId ] = ProteinGroup ( groupId , groupRepresentative ) self . addLeadingToGroups ( groupRepresentative , groupId ) return groupId
Adds a new protein group and returns the groupId .
55,596
def addLeadingToGroups ( self , proteinIds , groupIds ) : for groupId in AUX . toList ( groupIds ) : self . groups [ groupId ] . addLeadingProteins ( proteinIds ) self . _addProteinIdsToGroupMapping ( proteinIds , groupId )
Add one or multiple leading proteins to one or multiple protein groups .
55,597
def addSubsetToGroups ( self , proteinIds , groupIds ) : for groupId in AUX . toList ( groupIds ) : self . groups [ groupId ] . addSubsetProteins ( proteinIds ) self . _addProteinIdsToGroupMapping ( proteinIds , groupId )
Add one or multiple subset proteins to one or multiple protein groups .
55,598
def addSubsumableToGroups ( self , proteinIds , groupIds ) : for groupId in AUX . toList ( groupIds ) : self . groups [ groupId ] . addSubsumableProteins ( proteinIds ) self . _addProteinIdsToGroupMapping ( proteinIds , groupId )
Add one or multiple subsumable proteins to one or multiple protein groups .
55,599
def _addProteinIdsToGroupMapping ( self , proteinIds , groupId ) : for proteinId in AUX . toList ( proteinIds ) : self . _proteinToGroupIds [ proteinId ] . add ( groupId )
Add a groupId to one or multiple entries of the internal proteinToGroupId mapping .