idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
55,300
def add_list_member ( self , list_id , user_id ) : return List ( tweepy_list_to_json ( self . _client . add_list_member ( list_id = list_id , user_id = user_id ) ) )
Add a user to list
55,301
def remove_list_member ( self , list_id , user_id ) : return List ( tweepy_list_to_json ( self . _client . remove_list_member ( list_id = list_id , user_id = user_id ) ) )
Remove a user from a list
55,302
def list_members ( self , list_id ) : return [ User ( user . _json ) for user in self . _client . list_members ( list_id = list_id ) ]
List users in a list
55,303
def is_list_member ( self , list_id , user_id ) : try : return bool ( self . _client . show_list_member ( list_id = list_id , user_id = user_id ) ) except TweepError as e : if e . api_code == TWITTER_USER_IS_NOT_LIST_MEMBER_SUBSCRIBER : return False raise
Check if a user is member of a list
55,304
def subscribe_list ( self , list_id ) : return List ( tweepy_list_to_json ( self . _client . subscribe_list ( list_id = list_id ) ) )
Subscribe to a list
55,305
def unsubscribe_list ( self , list_id ) : return List ( tweepy_list_to_json ( self . _client . unsubscribe_list ( list_id = list_id ) ) )
Unsubscribe to a list
55,306
def list_subscribers ( self , list_id ) : return [ User ( user . _json ) for user in self . _client . list_subscribers ( list_id = list_id ) ]
List subscribers of a list
55,307
def is_subscribed_list ( self , list_id , user_id ) : try : return bool ( self . _client . show_list_subscriber ( list_id = list_id , user_id = user_id ) ) except TweepError as e : if e . api_code == TWITTER_USER_IS_NOT_LIST_MEMBER_SUBSCRIBER : return False raise
Check if user is a subscribed of specified list
55,308
def auth ( config ) : auth = tweepy . OAuthHandler ( config . get ( 'consumer_key' ) , config . get ( 'consumer_secret' ) ) auth . set_access_token ( config . get ( 'token_key' ) , config . get ( 'token_secret' ) ) api = tweepy . API ( auth ) try : api . verify_credentials ( ) except RateLimitError as e : raise APIQuot...
Perform authentication with Twitter and return a client instance to communicate with Twitter
55,309
def bind ( self , instance_id : str , binding_id : str , details : BindDetails ) -> Binding : instance = self . _backend . find ( instance_id ) binding = self . _backend . find ( binding_id , instance ) return self . _backend . bind ( binding , details . parameters )
Binding the instance see openbrokerapi documentation
55,310
def post ( self , path , data = { } ) : response = requests . post ( API_URL + path , data = json . dumps ( data ) , headers = self . _set_headers ( ) ) return self . _check_response ( response , self . post , path , data )
Perform POST Request
55,311
def delete ( self , path , data = { } ) : if len ( data ) != 0 : parameter_string = '' for k , v in data . items ( ) : parameter_string += '{}={}' . format ( k , v ) parameter_string += '&' path += '?' + parameter_string response = requests . delete ( API_URL + path , headers = self . _set_headers ( ) ) return self . _...
Perform DELETE Request
55,312
def parsed ( self ) : if not self . _parsed : self . _parsed = ConfigParser ( ) self . _parsed . readfp ( io . StringIO ( self . content ) ) return self . _parsed
Get the ConfigParser object which represents the content .
55,313
def create_cache ( directory , compress_level = 6 , value_type_is_binary = False , ** kwargs ) : cache = diskcache . Cache ( directory , disk = CompressedDisk , disk_compress_level = compress_level , disk_value_type_is_binary = value_type_is_binary , ** kwargs ) return cache
Create a html cache . Html string will be automatically compressed .
55,314
def timeticks ( tdiff ) : if isinstance ( tdiff , xarray . DataArray ) : tdiff = timedelta ( seconds = tdiff . values / np . timedelta64 ( 1 , 's' ) ) assert isinstance ( tdiff , timedelta ) , 'expecting datetime.timedelta' if tdiff > timedelta ( hours = 2 ) : return None , None elif tdiff > timedelta ( minutes = 20 ) ...
NOTE do NOT use interval or ticks are misaligned! use bysecond only!
55,315
def consume ( self , msg ) : self . log . info ( msg ) body = msg [ 'body' ] topic = body [ 'topic' ] repo = None if 'rawhide' in topic : arch = body [ 'msg' ] [ 'arch' ] self . log . info ( 'New rawhide %s compose ready' , arch ) repo = 'rawhide' elif 'branched' in topic : arch = body [ 'msg' ] [ 'arch' ] branch = bod...
Called with each incoming fedmsg .
55,316
def parse_addr ( text ) : "Parse a 1- to 3-part address spec." if text : parts = text . split ( ':' ) length = len ( parts ) if length == 3 : return parts [ 0 ] , parts [ 1 ] , int ( parts [ 2 ] ) elif length == 2 : return None , parts [ 0 ] , int ( parts [ 1 ] ) elif length == 1 : return None , '' , int ( parts [ 0 ] ...
Parse a 1 - to 3 - part address spec .
55,317
def start ( self ) : "Start the service" gevent . signal ( signal . SIGINT , self . _shutdown ) def _flush_impl ( ) : while 1 : gevent . sleep ( self . _stats . interval ) stats = self . _stats self . _reset_stats ( ) try : self . _sink . send ( stats ) except Exception , ex : trace = traceback . format_tb ( sys . exc_...
Start the service
55,318
def _process ( self , data ) : "Process a single packet and update the internal tables." parts = data . split ( ':' ) if self . _debug : self . error ( 'packet: %r' % data ) if not parts : return stats = self . _stats key = parts [ 0 ] . translate ( KEY_TABLE , KEY_DELETIONS ) if self . _key_prefix : key = '.' . join (...
Process a single packet and update the internal tables .
55,319
def section ( self , resources ) : section = [ p for p in self . parents ( resources ) if p . rtype == 'section' ] if section : return section [ 0 ] return None
Which section is this in if any
55,320
def in_navitem ( self , resources , nav_href ) : if nav_href . endswith ( '/index' ) : nav_href = nav_href [ : - 6 ] return self . docname . startswith ( nav_href )
Given href of nav item determine if resource is in it
55,321
def is_published ( self ) : now = datetime . now ( ) published = self . props . published if published : return published < now return False
Return true if this resource has published date in the past
55,322
def _create_driver ( self , ** kwargs ) : if self . driver is None : self . driver = self . create_driver ( ** kwargs ) self . init_driver_func ( self . driver )
Create webdriver assign it to self . driver and run webdriver initiation process which is usually used for manual login .
55,323
def deserialize_time ( data ) : parsed = parser . parse ( data ) return parsed . time ( ) . replace ( tzinfo = parsed . tzinfo )
Return a time instance based on the values of the data param
55,324
def require ( * args , ** kwargs ) : if not args and not kwargs : return freeze ( ) requirements = list ( args ) extra = [ '{}{}' . format ( kw , kwargs [ kw ] ) for kw in kwargs ] requirements . extend ( extra ) args = [ 'install' , '-q' ] args . extend ( requirements ) pip . main ( args )
Install a set of packages using pip This is designed to be an interface for IPython notebooks that replicates the requirements . txt pip format . This lets notebooks specify which versions of packages they need inside the notebook itself .
55,325
def handle ( self , * args , ** options ) : self . db = options . get ( "database" , DEFAULT_DB_ALIAS ) self . current_name = connections [ self . db ] . settings_dict [ "NAME" ] self . compare_name = options . get ( "db_name" ) self . lines = options . get ( "lines" ) self . ignore = int ( options . get ( 'ignore' ) )...
Compares current database with a migrations . Creates a temporary database applies all the migrations to it and then dumps the schema from both current and temporary diffs them then report the diffs to the user .
55,326
def render_widgets ( kb_app : kb , sphinx_app : Sphinx , doctree : doctree , fromdocname : str , ) : builder : StandaloneHTMLBuilder = sphinx_app . builder for node in doctree . traverse ( widget ) : w = sphinx_app . env . widgets . get ( node . name ) context = builder . globalcontext . copy ( ) context [ 'resources' ...
Go through docs and replace widget directive with rendering
55,327
def auth_string ( self ) : if not self . _token : self . execute ( ) if not self . _token . expired : return 'Bearer {}' . format ( self . _token . access_token ) if self . auto_refresh : self . execute ( ) return 'Bearer {}' . format ( self . _token . access_token ) raise TokenExpired ( )
Get the auth string . If the token is expired and auto refresh enabled a new token will be fetched
55,328
def admin_penalty ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'PCS_ADMIN_PENALTY_ORDER' , column , value , ** kwargs )
An enforcement action that results in levying the permit holder with a penalty or fine . It is used to track judicial hearing dates penalty amounts and type of administrative penalty order .
55,329
def compliance_schedule ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'PCS_CMPL_SCHD' , column , value , ** kwargs )
A sequence of activities with associated milestones which pertains to a given permit .
55,330
def compliance_violation ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'PCS_CMPL_SCHD_VIOL' , column , value , ** kwargs )
A compliance schedule violation reflects the non - achievement of a given compliance schedule event including the type of violation and ty pe of resolution .
55,331
def enforcement_action ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'PCS_ENFOR_ACTION' , column , value , ** kwargs )
A disciplinary action taken against a permit facility . The action may be applicable to one or more violations .
55,332
def hearing ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'PCS_EVIDENTIARY_HEARING_EVENT' , column , value , ** kwargs )
An evidentiary hearing .
55,333
def industrial_user ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'PCS_INDUSTRIAL_USER_INFO' , column , value , ** kwargs )
Information from the PCI_AUDIT table pertaining to industrial users i . e . the number of significant industrial users .
55,334
def permit_event ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'PCS_PERMIT_EVENT' , column , value , ** kwargs )
A permit event tracks the lifecycle of a permit from issuance to expiration . Examples include Application Received and Permit Issued etc .
55,335
def pipe_schedule ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'PCS_PIPE_SCHED' , column , value , ** kwargs )
Particular discharge points at a permit facility that are governed by effluent limitations and monitoring and submission requirements .
55,336
def single_violation ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'PCS_SINGLE_EVENT_VIOL' , column , value , ** kwargs )
A single event violation is a one - time event that occurred on a fixed date and is associated with one permitted facility .
55,337
def typify ( value : Union [ dict , list , set , str ] ) : if type ( value ) == dict : return walk_values ( typify , value ) if type ( value ) in [ list , set ] : return list ( map ( typify , value ) ) if type ( value ) == str : if re . match ( '^\d+\.\d+ (STEEM|SBD|VESTS)$' , value ) : return keep_in_dict ( dict ( Amo...
Enhance block operation with native types .
55,338
def json_expand ( json_op ) : if type ( json_op ) == dict and 'json' in json_op : return update_in ( json_op , [ 'json' ] , safe_json_loads ) return json_op
For custom_json ops .
55,339
def delete ( self , subnet_id ) : subnet = self . client . describe_subnets ( SubnetIds = [ subnet_id ] ) . get ( 'Subnets' ) [ 0 ] vpc_id = subnet . get ( 'VpcId' ) self . client . delete_subnet ( SubnetId = subnet_id ) return self . client . delete_vpc ( VpcId = vpc_id )
This is bad delete function because one vpc can have more than one subnet . It is Ok if user only use CAL for manage cloud resource We will update ASAP .
55,340
def _clean_terminals ( self ) : new_terminals = [ ] for term in self . grammar . grammar_terminals : x_term = term . rfind ( '@' ) y_term = term . rfind ( 'A' ) if y_term > x_term : x_term = y_term ids = term [ x_term + 1 : ] . split ( ',' ) if len ( ids ) < 2 : new_terminals . append ( term ) self . grammar . grammar_...
Because of the optimization there are some non existing terminals on the generated list . Remove them by checking for terms in form Ax x
55,341
def _check_self_replicate ( self , myntr ) : find = 0 for nonterm in self . grammar . grammar_nonterminals_map : for i in self . grammar . grammar_nonterminals_map [ nonterm ] : if self . grammar . grammar_rules [ i ] [ 0 ] not in self . resolved and not isinstance ( self . grammar . grammar_rules [ i ] [ 1 ] , ( set ,...
For each Rule B - > c where c is a known terminal this function searches for B occurences in rules with the form A - > B and sets A - > c .
55,342
def describe ( self ) : lines = [ ] lines . append ( "Symbol = {}" . format ( self . name ) ) if len ( self . tags ) : tgs = ", " . join ( x . tag for x in self . tags ) lines . append ( " tagged = {}" . format ( tgs ) ) if len ( self . aliases ) : als = ", " . join ( x . alias for x in self . aliases ) lines . append...
describes a Symbol returns a string
55,343
def datatable_df ( self ) : data = self . _all_datatable_data ( ) adf = pd . DataFrame ( data ) adf . columns = self . dt_all_cols return self . _finish_df ( adf , 'ALL' )
returns the dataframe representation of the symbol s final data
55,344
def _init_datatable ( self ) : try : self . datatable = Table ( self . name , Base . metadata , autoload = True ) except NoSuchTableError : print "Creating datatable, cause it doesn't exist" self . datatable = self . _datatable_factory ( ) self . datatable . create ( ) self . datatable_exists = True
Instantiates the . datatable attribute pointing to a table in the database that stores all the cached data
55,345
def _datatable_factory ( self ) : feed_cols = [ 'feed{0:03d}' . format ( i + 1 ) for i in range ( self . n_feeds ) ] feed_cols = [ 'override_feed000' ] + feed_cols + [ 'failsafe_feed999' ] ind_sqlatyp = indexingtypes [ self . index . indimp ] . sqlatyp dat_sqlatyp = datadefs [ self . dtype . datadef ] . sqlatyp atbl = ...
creates a SQLAlchemy Table object with the appropriate number of columns given the number of feeds
55,346
def add_tags ( self , tags ) : if isinstance ( tags , ( str , unicode ) ) : tags = [ tags ] objs = object_session ( self ) tmps = [ FeedTag ( tag = t , feed = self ) for t in tags ] objs . add_all ( tmps ) objs . commit ( )
add a tag or tags to a Feed
55,347
def initiate_browser ( self ) : tempdir = os . getenv ( TEMPDIR_ENVVAR , DEFAULT_TEMPDIR ) tempsubdir = uuid4 ( ) . hex self . tempdir = os . path . join ( tempdir , tempsubdir ) try : os . makedirs ( self . tempdir ) except OSError : if not os . path . isdir ( self . tempdir ) : raise profile = webdriver . FirefoxProf...
The button for expanded detailed options . This also happens to be a good indicator as to wheter all content is loaded .
55,348
def step_I_create_logrecords_with_table ( context ) : assert context . table , "REQUIRE: context.table" context . table . require_columns ( [ "category" , "level" , "message" ] ) for row in context . table . rows : category = row [ "category" ] if category == "__ROOT__" : category = None level = LogLevel . parse_type (...
Step definition that creates one more log records by using a table .
55,349
def step_I_create_logrecord_with_table ( context ) : assert context . table , "REQUIRE: context.table" assert len ( context . table . rows ) == 1 , "REQUIRE: table.row.size == 1" step_I_create_logrecords_with_table ( context )
Create an log record by using a table to provide the parts .
55,350
def step_use_log_record_configuration ( context ) : assert context . table , "REQUIRE: context.table" context . table . require_columns ( [ "property" , "value" ] ) for row in context . table . rows : property_name = row [ "property" ] value = row [ "value" ] if property_name == "format" : context . log_record_format =...
Define log record configuration parameters .
55,351
def smart_decode ( binary , errors = "strict" ) : d = chardet . detect ( binary ) encoding = d [ "encoding" ] confidence = d [ "confidence" ] text = binary . decode ( encoding , errors = errors ) return text , encoding , confidence
Automatically find the right codec to decode binary data to string .
55,352
def decode ( self , binary , url , encoding = None , errors = "strict" ) : if encoding is None : domain = util . get_domain ( url ) if domain in self . domain_encoding_table : encoding = self . domain_encoding_table [ domain ] html = binary . decode ( encoding , errors = errors ) else : html , encoding , confidence = s...
Decode binary to string .
55,353
def modify_number_pattern ( number_pattern , ** kwargs ) : params = [ 'pattern' , 'prefix' , 'suffix' , 'grouping' , 'int_prec' , 'frac_prec' , 'exp_prec' , 'exp_plus' ] for param in params : if param in kwargs : continue kwargs [ param ] = getattr ( number_pattern , param ) return NumberPattern ( ** kwargs )
Modifies a number pattern by specified keyword arguments .
55,354
def format_currency_field ( __ , prec , number , locale ) : locale = Locale . parse ( locale ) currency = get_territory_currencies ( locale . territory ) [ 0 ] if prec is None : pattern , currency_digits = None , True else : prec = int ( prec ) pattern = locale . currency_formats [ 'standard' ] pattern = modify_number_...
Formats a currency field .
55,355
def format_float_field ( __ , prec , number , locale ) : format_ = u'0.' if prec is None : format_ += u'#' * NUMBER_DECIMAL_DIGITS else : format_ += u'0' * int ( prec ) pattern = parse_pattern ( format_ ) return pattern . apply ( number , locale )
Formats a fixed - point field .
55,356
def format_number_field ( __ , prec , number , locale ) : prec = NUMBER_DECIMAL_DIGITS if prec is None else int ( prec ) locale = Locale . parse ( locale ) pattern = locale . decimal_formats . get ( None ) return pattern . apply ( number , locale , force_frac = ( prec , prec ) )
Formats a number field .
55,357
def format_percent_field ( __ , prec , number , locale ) : prec = PERCENT_DECIMAL_DIGITS if prec is None else int ( prec ) locale = Locale . parse ( locale ) pattern = locale . percent_formats . get ( None ) return pattern . apply ( number , locale , force_frac = ( prec , prec ) )
Formats a percent field .
55,358
def format_hexadecimal_field ( spec , prec , number , locale ) : if number < 0 : number &= ( 1 << ( 8 * int ( math . log ( - number , 1 << 8 ) + 1 ) ) ) - 1 format_ = u'0%d%s' % ( int ( prec or 0 ) , spec ) return format ( number , format_ )
Formats a hexadeciaml field .
55,359
def delegate ( attribute_name , method_names ) : call_attribute_method = partial ( _call_delegated_method , attribute_name ) def decorate ( class_ ) : for method in method_names : setattr ( class_ , method , partialmethod ( call_attribute_method , method ) ) return class_ return decorate
Decorator factory to delegate methods to an attribute .
55,360
def prepare_query ( query ) : for name in query : value = query [ name ] if value is None : query [ name ] = "" elif isinstance ( value , bool ) : query [ name ] = int ( value ) elif isinstance ( value , dict ) : raise ValueError ( "Invalid query data type %r" % type ( value ) . __name__ )
Prepare a query object for the RAPI .
55,361
def itemgetters ( * args ) : f = itemgetter ( * args ) def inner ( l ) : return [ f ( x ) for x in l ] return inner
Get a handful of items from an iterable .
55,362
def stat_container ( self , container ) : LOG . debug ( 'stat_container() with %s is success.' , self . driver ) return self . driver . stat_container ( container )
Stat container metadata
55,363
def update_container ( self , container , metadata , ** kwargs ) : LOG . debug ( 'update_object() with %s is success.' , self . driver ) return self . driver . update_container ( container , metadata , ** kwargs )
Update container metadata
55,364
def stat_object ( self , container , obj ) : LOG . debug ( 'stat_object() with %s is success.' , self . driver ) return self . driver . stat_object ( container , obj )
Stat object metadata
55,365
def delete_object ( self , container , obj , ** kwargs ) : try : LOG . debug ( 'delete_object() with %s is success.' , self . driver ) return self . driver . delete_object ( container , obj , ** kwargs ) except DriverException as e : LOG . exception ( 'download_object() with %s raised\ an exc...
Delete object in container
55,366
def list_container_objects ( self , container , prefix = None , delimiter = None ) : LOG . debug ( 'list_container_objects() with %s is success.' , self . driver ) return self . driver . list_container_objects ( container , prefix , delimiter )
List container objects
55,367
def update_object ( self , container , obj , metadata , ** kwargs ) : try : LOG . debug ( 'update_object() with %s is success.' , self . driver ) return self . driver . update_object ( container , obj , metadata , ** kwargs ) except DriverException as e : LOG . exception ( 'copy_object() with %s raised\ ...
Update object metadata
55,368
def get_path_fields ( cls , base = [ ] ) : pfs = [ ] for pf in cls . TutelaryMeta . path_fields : if pf == 'pk' : pfs . append ( base + [ 'pk' ] ) else : f = cls . _meta . get_field ( pf ) if isinstance ( f , models . ForeignKey ) : pfs += get_path_fields ( f . target_field . model , base = base + [ pf ] ) else : pfs ....
Get object fields used for calculation of django - tutelary object paths .
55,369
def get_perms_object ( obj , action ) : def get_one ( pf ) : if isinstance ( pf , str ) : return pf else : return str ( reduce ( lambda o , f : getattr ( o , f ) , pf , obj ) ) return Object ( [ get_one ( pf ) for pf in obj . __class__ . TutelaryMeta . pfs ] )
Get the django - tutelary path for an object based on the fields listed in TutelaryMeta . pfs .
55,370
def permissioned_model ( cls , perm_type = None , path_fields = None , actions = None ) : if not issubclass ( cls , models . Model ) : raise DecoratorException ( 'permissioned_model' , "class '" + cls . __name__ + "' is not a Django model" ) added = False try : if not hasattr ( cls , 'TutelaryMeta' ) : if perm_type is ...
Function to set up a model for permissioning . Can either be called directly passing a class and suitable values for perm_type path_fields and actions or can be used as a class decorator taking values for perm_type path_fields and actions from the TutelaryMeta subclass of the decorated class .
55,371
def _getArrays ( items , attr , defaultValue ) : arrays = dict ( [ ( key , [ ] ) for key in attr ] ) for item in items : for key in attr : arrays [ key ] . append ( getattr ( item , key , defaultValue ) ) for key in [ _ for _ in viewkeys ( arrays ) ] : arrays [ key ] = numpy . array ( arrays [ key ] ) return arrays
Return arrays with equal size of item attributes from a list of sorted items for fast and convenient data processing .
55,372
def addMsrunContainers ( mainContainer , subContainer ) : typeToContainer = { 'rm' : 'rmc' , 'ci' : 'cic' , 'smi' : 'smic' , 'sai' : 'saic' , 'si' : 'sic' } for specfile in subContainer . info : if specfile in mainContainer . info : continue mainContainer . addSpecfile ( specfile , subContainer . info [ specfile ] [ 'p...
Adds the complete content of all specfile entries from the subContainer to the mainContainer . However if a specfile of subContainer . info is already present in mainContainer . info its contents are not added to the mainContainer .
55,373
def setPath ( self , folderpath , specfiles = None ) : if specfiles is None : specfiles = [ _ for _ in viewkeys ( self . info ) ] else : specfiles = aux . toList ( specfiles ) _containerSetPath ( self , folderpath , specfiles )
Changes the folderpath of the specified specfiles . The folderpath is used for saving and loading of mrc files .
55,374
def removeSpecfile ( self , specfiles ) : for specfile in aux . toList ( specfiles ) : for datatypeContainer in [ 'rmc' , 'cic' , 'smic' , 'saic' , 'sic' ] : dataContainer = getattr ( self , datatypeContainer ) try : del dataContainer [ specfile ] except KeyError : pass del self . info [ specfile ]
Completely removes the specified specfiles from the msrunContainer .
55,375
def _processDatatypes ( self , rm , ci , smi , sai , si ) : datatypes = list ( ) for datatype , value in [ ( 'rm' , rm ) , ( 'ci' , ci ) , ( 'smi' , smi ) , ( 'sai' , sai ) , ( 'si' , si ) ] : if value : datatypes . append ( datatype ) return datatypes
Helper function that returns a list of datatype strings depending on the parameters boolean value .
55,376
def save ( self , specfiles = None , rm = False , ci = False , smi = False , sai = False , si = False , compress = True , path = None ) : if specfiles is None : specfiles = [ _ for _ in viewkeys ( self . info ) ] else : specfiles = aux . toList ( specfiles ) datatypes = self . _processDatatypes ( rm , ci , smi , sai , ...
Writes the specified datatypes to mrc files on the hard disk .
55,377
def _writeRmc ( self , filelike , specfile ) : xmlString = ETREE . tostring ( self . rmc [ specfile ] , pretty_print = True ) filelike . write ( xmlString )
Writes the . rmc container entry of the specified specfile as an human readable and pretty formatted xml string .
55,378
def load ( self , specfiles = None , rm = False , ci = False , smi = False , sai = False , si = False ) : if specfiles is None : specfiles = [ _ for _ in viewkeys ( self . info ) ] else : specfiles = aux . toList ( specfiles ) selectedSpecfiles = list ( ) for specfile in specfiles : if specfile not in self . info : war...
Import the specified datatypes from mrc files on the hard disk .
55,379
def jsonHook ( encoded ) : if '__Ci__' in encoded : return Ci . _fromJSON ( encoded [ '__Ci__' ] ) elif '__MzmlProduct__' in encoded : return MzmlProduct . _fromJSON ( encoded [ '__MzmlProduct__' ] ) elif '__MzmlPrecursor__' in encoded : return MzmlPrecursor . _fromJSON ( encoded [ '__MzmlPrecursor__' ] ) else : return...
Custom JSON decoder that allows construction of a new Ci instance from a decoded JSON object .
55,380
def jsonHook ( encoded ) : if '__Smi__' in encoded : return Smi . _fromJSON ( encoded [ '__Smi__' ] ) elif '__MzmlScan__' in encoded : return MzmlScan . _fromJSON ( encoded [ '__MzmlScan__' ] ) elif '__MzmlProduct__' in encoded : return MzmlProduct . _fromJSON ( encoded [ '__MzmlProduct__' ] ) elif '__MzmlPrecursor__' ...
Custom JSON decoder that allows construction of a new Smi instance from a decoded JSON object .
55,381
def removeSpecfile ( self , specfiles ) : for specfile in aux . toList ( specfiles ) : del self . container [ specfile ] del self . info [ specfile ]
Completely removes the specified specfiles from the SiiContainer .
55,382
def save ( self , specfiles = None , compress = True , path = None ) : if specfiles is None : specfiles = [ _ for _ in viewkeys ( self . info ) ] else : specfiles = aux . toList ( specfiles ) for specfile in specfiles : if specfile not in self . info : warntext = 'Error while calling "SiiContainer.save()": "%s" is' ' n...
Writes the specified specfiles to siic files on the hard disk .
55,383
def calcMz ( self , specfiles = None , guessCharge = True , obsMzKey = 'obsMz' ) : _calcMass = maspy . peptidemethods . calcPeptideMass _calcMzFromMass = maspy . peptidemethods . calcMzFromMass _massProton = maspy . constants . atomicMassProton _guessCharge = lambda mass , mz : round ( mass / ( mz - _massProton ) , 0 )...
Calculate the exact mass for Sii elements from the Sii . peptide sequence .
55,384
def _writeContainer ( self , filelike , specfile , compress ) : aux . writeJsonZipfile ( filelike , self . container [ specfile ] , compress = compress )
Writes the self . container entry of the specified specfile to the fic format .
55,385
def load ( self , specfiles = None ) : if specfiles is None : specfiles = [ _ for _ in viewkeys ( self . info ) ] else : specfiles = aux . toList ( specfiles ) for specfile in specfiles : if specfile not in self . info : warntext = 'Error while calling "FiContainer.load()": "%s" is' ' not present in "FiContainer.info"!...
Imports the specified fic files from the hard disk .
55,386
def as_dict ( self ) : def conv ( v ) : if isinstance ( v , SerializableAttributesHolder ) : return v . as_dict ( ) elif isinstance ( v , list ) : return [ conv ( x ) for x in v ] elif isinstance ( v , dict ) : return { x : conv ( y ) for ( x , y ) in v . items ( ) } else : return v return { k . replace ( '_' , '-' ) :...
Returns a JSON - serializeable object representing this tree .
55,387
def from_json ( cls , data ) : assert isinstance ( data , str ) data = json . loads ( data ) assert isinstance ( data , dict ) return cls . from_dict ( data )
Decode a JSON string and inflate a node instance .
55,388
def extract_keywords ( func ) : if hasattr ( func , 'im_func' ) : func = func . im_func try : return func . func_code . co_varnames [ - len ( func . func_defaults ) : ] except ( TypeError , ValueError , IndexError ) : return tuple ( )
Parses the keywords from the given function .
55,389
def jtag_enable ( self ) : status , _ = self . bulkCommand ( _BMSG_ENABLE_JTAG ) if status == 0 : self . _jtagon = True elif status == 3 : self . _jtagon = True raise JTAGAlreadyEnabledError ( ) else : raise JTAGEnableFailedError ( "Error enabling JTAG. Error code: %s." % status )
Enables JTAG output on the controller . JTAG operations executed before this function is called will return useless data or fail .
55,390
def jtag_disable ( self ) : if not self . _jtagon : return status , _ = self . bulkCommand ( _BMSG_DISABLE_JTAG ) if status == 0 : self . _jtagon = False elif status == 3 : raise JTAGControlError ( "Error Code %s" % status ) self . close_handle ( )
Disables JTAG output on the controller . JTAG operations executed immediately after this function will return useless data or fail .
55,391
def write_tms_tdi_bits ( self , tmsdata , tdidata , return_tdo = False ) : self . _check_jtag ( ) if len ( tmsdata ) != len ( tdidata ) : raise Exception ( "TMSdata and TDIData must be the same length" ) self . _update_scanchain ( tmsdata ) count = len ( tmsdata ) t = time ( ) outdata = bitarray ( [ val for pair in zip...
Command controller to write arbitrary TDI and TMS data to the physical scan chain . Optionally return TDO bits sent back from the scan chain .
55,392
def _readFastaFile ( filepath ) : processSequences = lambda i : '' . join ( [ s . rstrip ( ) for s in i ] ) . rstrip ( '*' ) processHeaderLine = lambda line : line [ 1 : ] . rstrip ( ) with io . open ( filepath ) as openfile : try : line = next ( openfile ) while line [ 0 ] != '>' : line = next ( openfile ) header = pr...
Read a FASTA file and yields tuples of header and sequence entries .
55,393
def fastaParseSgd ( header ) : rePattern = '([\S]+)\s([\S]+).+(\".+\")' ID , name , description = re . match ( rePattern , header ) . groups ( ) info = { 'id' : ID , 'name' : name , 'description' : description } return info
Custom parser for fasta headers in the SGD format see www . yeastgenome . org .
55,394
def save ( self , path , compress = True ) : with aux . PartiallySafeReplace ( ) as msr : filename = self . info [ 'name' ] + '.proteindb' filepath = aux . joinpath ( path , filename ) with msr . open ( filepath , mode = 'w+b' ) as openfile : self . _writeContainer ( openfile , compress = compress )
Writes the . proteins and . peptides entries to the hard disk as a proteindb file .
55,395
def load ( cls , path , name ) : filepath = aux . joinpath ( path , name + '.proteindb' ) with zipfile . ZipFile ( filepath , 'r' , allowZip64 = True ) as containerZip : proteinsString = io . TextIOWrapper ( containerZip . open ( 'proteins' ) , encoding = 'utf-8' ) . read ( ) peptidesString = io . TextIOWrapper ( conta...
Imports the specified proteindb file from the hard disk .
55,396
def fetch_keywords ( codedata ) : tmp = { } language_counts = { } for index , ( language , code ) in enumerate ( codedata ) : if language not in shaman . SUPPORTING_LANGUAGES : continue if language not in tmp : tmp [ language ] = { } language_counts [ language ] = 0 language_counts [ language ] += 1 for keyword in sham...
Fetch keywords by shaman . KeywordFetcher Get average probabilities of keyword and language
55,397
def match_patterns ( codedata ) : ret = { } for index1 , pattern in enumerate ( shaman . PatternMatcher . PATTERNS ) : print ( 'Matching pattern %d "%s"' % ( index1 + 1 , pattern ) ) matcher = shaman . PatternMatcher ( pattern ) tmp = { } for index2 , ( language , code ) in enumerate ( codedata ) : if language not in s...
Match patterns by shaman . PatternMatcher Get average ratio of pattern and language
55,398
def facility ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'RAD_FACILITY' , column , value , ** kwargs )
Check information related to Radiation facilities .
55,399
def geo ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'RAD_GEO_LOCATION' , column , value , ** kwargs )
Locate a facility through geographic location .