idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
55,300
def add_list_member ( self , list_id , user_id ) : return List ( tweepy_list_to_json ( self . _client . add_list_member ( list_id = list_id , user_id = user_id ) ) )
Add a user to list
55,301
def remove_list_member ( self , list_id , user_id ) : return List ( tweepy_list_to_json ( self . _client . remove_list_member ( list_id = list_id , user_id = user_id ) ) )
Remove a user from a list
55,302
def list_members ( self , list_id ) : return [ User ( user . _json ) for user in self . _client . list_members ( list_id = list_id ) ]
List users in a list
55,303
def is_list_member ( self , list_id , user_id ) : try : return bool ( self . _client . show_list_member ( list_id = list_id , user_id = user_id ) ) except TweepError as e : if e . api_code == TWITTER_USER_IS_NOT_LIST_MEMBER_SUBSCRIBER : return False raise
Check if a user is member of a list
55,304
def subscribe_list ( self , list_id ) : return List ( tweepy_list_to_json ( self . _client . subscribe_list ( list_id = list_id ) ) )
Subscribe to a list
55,305
def unsubscribe_list ( self , list_id ) : return List ( tweepy_list_to_json ( self . _client . unsubscribe_list ( list_id = list_id ) ) )
Unsubscribe to a list
55,306
def list_subscribers ( self , list_id ) : return [ User ( user . _json ) for user in self . _client . list_subscribers ( list_id = list_id ) ]
List subscribers of a list
55,307
def is_subscribed_list ( self , list_id , user_id ) : try : return bool ( self . _client . show_list_subscriber ( list_id = list_id , user_id = user_id ) ) except TweepError as e : if e . api_code == TWITTER_USER_IS_NOT_LIST_MEMBER_SUBSCRIBER : return False raise
Check if user is a subscribed of specified list
55,308
def auth ( config ) : auth = tweepy . OAuthHandler ( config . get ( 'consumer_key' ) , config . get ( 'consumer_secret' ) ) auth . set_access_token ( config . get ( 'token_key' ) , config . get ( 'token_secret' ) ) api = tweepy . API ( auth ) try : api . verify_credentials ( ) except RateLimitError as e : raise APIQuotaError ( e . args [ 0 ] [ 0 ] [ 'message' ] ) except TweepError as e : raise AuthenticationError ( e . args [ 0 ] [ 0 ] [ 'message' ] ) else : logging . info ( 'Successfully authenticated as %s' % api . me ( ) . screen_name ) return ResponseBotClient ( config = config , client = api )
Perform authentication with Twitter and return a client instance to communicate with Twitter
55,309
def bind ( self , instance_id : str , binding_id : str , details : BindDetails ) -> Binding : instance = self . _backend . find ( instance_id ) binding = self . _backend . find ( binding_id , instance ) return self . _backend . bind ( binding , details . parameters )
Binding the instance see openbrokerapi documentation
55,310
def post ( self , path , data = { } ) : response = requests . post ( API_URL + path , data = json . dumps ( data ) , headers = self . _set_headers ( ) ) return self . _check_response ( response , self . post , path , data )
Perform POST Request
55,311
def delete ( self , path , data = { } ) : if len ( data ) != 0 : parameter_string = '' for k , v in data . items ( ) : parameter_string += '{}={}' . format ( k , v ) parameter_string += '&' path += '?' + parameter_string response = requests . delete ( API_URL + path , headers = self . _set_headers ( ) ) return self . _check_response ( response , self . delete , path , data )
Perform DELETE Request
55,312
def parsed ( self ) : if not self . _parsed : self . _parsed = ConfigParser ( ) self . _parsed . readfp ( io . StringIO ( self . content ) ) return self . _parsed
Get the ConfigParser object which represents the content .
55,313
def create_cache ( directory , compress_level = 6 , value_type_is_binary = False , ** kwargs ) : cache = diskcache . Cache ( directory , disk = CompressedDisk , disk_compress_level = compress_level , disk_value_type_is_binary = value_type_is_binary , ** kwargs ) return cache
Create a html cache . Html string will be automatically compressed .
55,314
def timeticks ( tdiff ) : if isinstance ( tdiff , xarray . DataArray ) : tdiff = timedelta ( seconds = tdiff . values / np . timedelta64 ( 1 , 's' ) ) assert isinstance ( tdiff , timedelta ) , 'expecting datetime.timedelta' if tdiff > timedelta ( hours = 2 ) : return None , None elif tdiff > timedelta ( minutes = 20 ) : return MinuteLocator ( byminute = range ( 0 , 60 , 5 ) ) , MinuteLocator ( byminute = range ( 0 , 60 , 2 ) ) elif ( timedelta ( minutes = 10 ) < tdiff ) & ( tdiff <= timedelta ( minutes = 20 ) ) : return MinuteLocator ( byminute = range ( 0 , 60 , 2 ) ) , MinuteLocator ( byminute = range ( 0 , 60 , 1 ) ) elif ( timedelta ( minutes = 5 ) < tdiff ) & ( tdiff <= timedelta ( minutes = 10 ) ) : return MinuteLocator ( byminute = range ( 0 , 60 , 1 ) ) , SecondLocator ( bysecond = range ( 0 , 60 , 30 ) ) elif ( timedelta ( minutes = 1 ) < tdiff ) & ( tdiff <= timedelta ( minutes = 5 ) ) : return SecondLocator ( bysecond = range ( 0 , 60 , 30 ) ) , SecondLocator ( bysecond = range ( 0 , 60 , 10 ) ) elif ( timedelta ( seconds = 30 ) < tdiff ) & ( tdiff <= timedelta ( minutes = 1 ) ) : return SecondLocator ( bysecond = range ( 0 , 60 , 10 ) ) , SecondLocator ( bysecond = range ( 0 , 60 , 2 ) ) else : return SecondLocator ( bysecond = range ( 0 , 60 , 2 ) ) , SecondLocator ( bysecond = range ( 0 , 60 , 1 ) )
NOTE do NOT use interval or ticks are misaligned! use bysecond only!
55,315
def consume ( self , msg ) : self . log . info ( msg ) body = msg [ 'body' ] topic = body [ 'topic' ] repo = None if 'rawhide' in topic : arch = body [ 'msg' ] [ 'arch' ] self . log . info ( 'New rawhide %s compose ready' , arch ) repo = 'rawhide' elif 'branched' in topic : arch = body [ 'msg' ] [ 'arch' ] branch = body [ 'msg' ] [ 'branch' ] self . log . info ( 'New %s %s branched compose ready' , branch , arch ) log = body [ 'msg' ] [ 'log' ] if log != 'done' : self . log . warn ( 'Compose not done?' ) return repo = branch elif 'updates.fedora' in topic : self . log . info ( 'New Fedora %(release)s %(repo)s compose ready' , body [ 'msg' ] ) repo = 'f%(release)s-%(repo)s' % body [ 'msg' ] else : self . log . warn ( 'Unknown topic: %s' , topic ) release = self . releases [ repo ] reactor . callInThread ( self . compose , release )
Called with each incoming fedmsg .
55,316
def parse_addr ( text ) : "Parse a 1- to 3-part address spec." if text : parts = text . split ( ':' ) length = len ( parts ) if length == 3 : return parts [ 0 ] , parts [ 1 ] , int ( parts [ 2 ] ) elif length == 2 : return None , parts [ 0 ] , int ( parts [ 1 ] ) elif length == 1 : return None , '' , int ( parts [ 0 ] ) return None , None , None
Parse a 1 - to 3 - part address spec .
55,317
def start ( self ) : "Start the service" gevent . signal ( signal . SIGINT , self . _shutdown ) def _flush_impl ( ) : while 1 : gevent . sleep ( self . _stats . interval ) stats = self . _stats self . _reset_stats ( ) try : self . _sink . send ( stats ) except Exception , ex : trace = traceback . format_tb ( sys . exc_info ( ) [ - 1 ] ) self . error ( '' . join ( trace ) ) self . _flush_task = gevent . spawn ( _flush_impl ) self . _sock = socket . socket ( socket . AF_INET , socket . SOCK_DGRAM , socket . IPPROTO_UDP ) self . _sock . bind ( self . _bindaddr ) while 1 : try : data , _ = self . _sock . recvfrom ( MAX_PACKET ) for p in data . split ( '\n' ) : if p : self . _process ( p ) except Exception , ex : self . error ( str ( ex ) )
Start the service
55,318
def _process ( self , data ) : "Process a single packet and update the internal tables." parts = data . split ( ':' ) if self . _debug : self . error ( 'packet: %r' % data ) if not parts : return stats = self . _stats key = parts [ 0 ] . translate ( KEY_TABLE , KEY_DELETIONS ) if self . _key_prefix : key = '.' . join ( [ self . _key_prefix , key ] ) for part in parts [ 1 : ] : srate = 1.0 fields = part . split ( '|' ) length = len ( fields ) if length < 2 : continue value = fields [ 0 ] stype = fields [ 1 ] . strip ( ) with stats_lock : if stype == 'ms' : stats . timers [ key ] . append ( float ( value if value else 0 ) ) elif stype == 'c' : if length == 3 and fields [ 2 ] . startswith ( '@' ) : srate = float ( fields [ 2 ] [ 1 : ] ) value = float ( value if value else 1 ) * ( 1 / srate ) stats . counts [ key ] += value elif stype == 'g' : value = float ( value if value else 1 ) stats . gauges [ key ] = value
Process a single packet and update the internal tables .
55,319
def section ( self , resources ) : section = [ p for p in self . parents ( resources ) if p . rtype == 'section' ] if section : return section [ 0 ] return None
Which section is this in if any
55,320
def in_navitem ( self , resources , nav_href ) : if nav_href . endswith ( '/index' ) : nav_href = nav_href [ : - 6 ] return self . docname . startswith ( nav_href )
Given href of nav item determine if resource is in it
55,321
def is_published ( self ) : now = datetime . now ( ) published = self . props . published if published : return published < now return False
Return true if this resource has published date in the past
55,322
def _create_driver ( self , ** kwargs ) : if self . driver is None : self . driver = self . create_driver ( ** kwargs ) self . init_driver_func ( self . driver )
Create webdriver assign it to self . driver and run webdriver initiation process which is usually used for manual login .
55,323
def deserialize_time ( data ) : parsed = parser . parse ( data ) return parsed . time ( ) . replace ( tzinfo = parsed . tzinfo )
Return a time instance based on the values of the data param
55,324
def require ( * args , ** kwargs ) : if not args and not kwargs : return freeze ( ) requirements = list ( args ) extra = [ '{}{}' . format ( kw , kwargs [ kw ] ) for kw in kwargs ] requirements . extend ( extra ) args = [ 'install' , '-q' ] args . extend ( requirements ) pip . main ( args )
Install a set of packages using pip This is designed to be an interface for IPython notebooks that replicates the requirements . txt pip format . This lets notebooks specify which versions of packages they need inside the notebook itself .
55,325
def handle ( self , * args , ** options ) : self . db = options . get ( "database" , DEFAULT_DB_ALIAS ) self . current_name = connections [ self . db ] . settings_dict [ "NAME" ] self . compare_name = options . get ( "db_name" ) self . lines = options . get ( "lines" ) self . ignore = int ( options . get ( 'ignore' ) ) if not self . compare_name : self . compare_name = "%s_compare" % self . current_name command = NASHVEGAS . get ( "dumpdb" , "pg_dump -s {dbname}" ) print "Getting schema for current database..." current_sql = Popen ( command . format ( dbname = self . current_name ) , shell = True , stdout = PIPE ) . stdout . readlines ( ) print "Getting schema for fresh database..." self . setup_database ( ) connections [ self . db ] . close ( ) connections [ self . db ] . settings_dict [ "NAME" ] = self . compare_name try : call_command ( "syncdb" , interactive = False , verbosity = 0 , migrations = False ) new_sql = Popen ( command . format ( dbname = self . compare_name ) . split ( ) , stdout = PIPE ) . stdout . readlines ( ) finally : connections [ self . db ] . close ( ) connections [ self . db ] . settings_dict [ "NAME" ] = self . current_name self . teardown_database ( ) print "Outputing diff between the two..." print "" . join ( difflib . unified_diff ( normalize_sql ( current_sql , self . ignore ) , normalize_sql ( new_sql , self . ignore ) , n = int ( self . lines ) ) )
Compares current database with a migrations . Creates a temporary database applies all the migrations to it and then dumps the schema from both current and temporary diffs them then report the diffs to the user .
55,326
def render_widgets ( kb_app : kb , sphinx_app : Sphinx , doctree : doctree , fromdocname : str , ) : builder : StandaloneHTMLBuilder = sphinx_app . builder for node in doctree . traverse ( widget ) : w = sphinx_app . env . widgets . get ( node . name ) context = builder . globalcontext . copy ( ) context [ 'resources' ] = sphinx_app . env . resources context [ 'references' ] = sphinx_app . env . references output = w . render ( sphinx_app , context ) listing = [ nodes . raw ( '' , output , format = 'html' ) ] node . replace_self ( listing )
Go through docs and replace widget directive with rendering
55,327
def auth_string ( self ) : if not self . _token : self . execute ( ) if not self . _token . expired : return 'Bearer {}' . format ( self . _token . access_token ) if self . auto_refresh : self . execute ( ) return 'Bearer {}' . format ( self . _token . access_token ) raise TokenExpired ( )
Get the auth string . If the token is expired and auto refresh enabled a new token will be fetched
55,328
def admin_penalty ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'PCS_ADMIN_PENALTY_ORDER' , column , value , ** kwargs )
An enforcement action that results in levying the permit holder with a penalty or fine . It is used to track judicial hearing dates penalty amounts and type of administrative penalty order .
55,329
def compliance_schedule ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'PCS_CMPL_SCHD' , column , value , ** kwargs )
A sequence of activities with associated milestones which pertains to a given permit .
55,330
def compliance_violation ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'PCS_CMPL_SCHD_VIOL' , column , value , ** kwargs )
A compliance schedule violation reflects the non - achievement of a given compliance schedule event including the type of violation and ty pe of resolution .
55,331
def enforcement_action ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'PCS_ENFOR_ACTION' , column , value , ** kwargs )
A disciplinary action taken against a permit facility . The action may be applicable to one or more violations .
55,332
def hearing ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'PCS_EVIDENTIARY_HEARING_EVENT' , column , value , ** kwargs )
An evidentiary hearing .
55,333
def industrial_user ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'PCS_INDUSTRIAL_USER_INFO' , column , value , ** kwargs )
Information from the PCI_AUDIT table pertaining to industrial users i . e . the number of significant industrial users .
55,334
def permit_event ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'PCS_PERMIT_EVENT' , column , value , ** kwargs )
A permit event tracks the lifecycle of a permit from issuance to expiration . Examples include Application Received and Permit Issued etc .
55,335
def pipe_schedule ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'PCS_PIPE_SCHED' , column , value , ** kwargs )
Particular discharge points at a permit facility that are governed by effluent limitations and monitoring and submission requirements .
55,336
def single_violation ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'PCS_SINGLE_EVENT_VIOL' , column , value , ** kwargs )
A single event violation is a one - time event that occurred on a fixed date and is associated with one permitted facility .
55,337
def typify ( value : Union [ dict , list , set , str ] ) : if type ( value ) == dict : return walk_values ( typify , value ) if type ( value ) in [ list , set ] : return list ( map ( typify , value ) ) if type ( value ) == str : if re . match ( '^\d+\.\d+ (STEEM|SBD|VESTS)$' , value ) : return keep_in_dict ( dict ( Amount ( value ) ) , [ 'amount' , 'asset' ] ) if re . match ( '^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}$' , value ) : return parse_time ( value ) return value
Enhance block operation with native types .
55,338
def json_expand ( json_op ) : if type ( json_op ) == dict and 'json' in json_op : return update_in ( json_op , [ 'json' ] , safe_json_loads ) return json_op
For custom_json ops .
55,339
def delete ( self , subnet_id ) : subnet = self . client . describe_subnets ( SubnetIds = [ subnet_id ] ) . get ( 'Subnets' ) [ 0 ] vpc_id = subnet . get ( 'VpcId' ) self . client . delete_subnet ( SubnetId = subnet_id ) return self . client . delete_vpc ( VpcId = vpc_id )
This is bad delete function because one vpc can have more than one subnet . It is Ok if user only use CAL for manage cloud resource We will update ASAP .
55,340
def _clean_terminals ( self ) : new_terminals = [ ] for term in self . grammar . grammar_terminals : x_term = term . rfind ( '@' ) y_term = term . rfind ( 'A' ) if y_term > x_term : x_term = y_term ids = term [ x_term + 1 : ] . split ( ',' ) if len ( ids ) < 2 : new_terminals . append ( term ) self . grammar . grammar_terminals = new_terminals
Because of the optimization there are some non existing terminals on the generated list . Remove them by checking for terms in form Ax x
55,341
def _check_self_replicate ( self , myntr ) : find = 0 for nonterm in self . grammar . grammar_nonterminals_map : for i in self . grammar . grammar_nonterminals_map [ nonterm ] : if self . grammar . grammar_rules [ i ] [ 0 ] not in self . resolved and not isinstance ( self . grammar . grammar_rules [ i ] [ 1 ] , ( set , tuple ) ) and self . grammar . grammar_rules [ i ] [ 1 ] == myntr : self . resolved [ self . grammar . grammar_rules [ i ] [ 0 ] ] = self . resolved [ myntr ] if self . _checkfinal ( self . grammar . grammar_rules [ i ] [ 0 ] ) : return self . resolved [ self . grammar . grammar_rules [ i ] [ 0 ] ] if self . grammar . grammar_rules [ i ] [ 0 ] not in self . bfs_queue : self . bfs_queue . append ( self . grammar . grammar_rules [ i ] [ 0 ] ) find = 1 if find == 1 : return 1 return 0
For each Rule B - > c where c is a known terminal this function searches for B occurences in rules with the form A - > B and sets A - > c .
55,342
def describe ( self ) : lines = [ ] lines . append ( "Symbol = {}" . format ( self . name ) ) if len ( self . tags ) : tgs = ", " . join ( x . tag for x in self . tags ) lines . append ( " tagged = {}" . format ( tgs ) ) if len ( self . aliases ) : als = ", " . join ( x . alias for x in self . aliases ) lines . append ( " aliased = {}" . format ( als ) ) if len ( self . feeds ) : lines . append ( " feeds:" ) for fed in self . feeds : lines . append ( " {}. {}" . format ( fed . fnum , fed . ftype ) ) return "\n" . join ( lines )
describes a Symbol returns a string
55,343
def datatable_df ( self ) : data = self . _all_datatable_data ( ) adf = pd . DataFrame ( data ) adf . columns = self . dt_all_cols return self . _finish_df ( adf , 'ALL' )
returns the dataframe representation of the symbol s final data
55,344
def _init_datatable ( self ) : try : self . datatable = Table ( self . name , Base . metadata , autoload = True ) except NoSuchTableError : print "Creating datatable, cause it doesn't exist" self . datatable = self . _datatable_factory ( ) self . datatable . create ( ) self . datatable_exists = True
Instantiates the . datatable attribute pointing to a table in the database that stores all the cached data
55,345
def _datatable_factory ( self ) : feed_cols = [ 'feed{0:03d}' . format ( i + 1 ) for i in range ( self . n_feeds ) ] feed_cols = [ 'override_feed000' ] + feed_cols + [ 'failsafe_feed999' ] ind_sqlatyp = indexingtypes [ self . index . indimp ] . sqlatyp dat_sqlatyp = datadefs [ self . dtype . datadef ] . sqlatyp atbl = Table ( self . name , Base . metadata , Column ( 'indx' , ind_sqlatyp , primary_key = True ) , Column ( 'final' , dat_sqlatyp ) , * ( Column ( fed_col , dat_sqlatyp ) for fed_col in feed_cols ) , extend_existing = True ) self . dt_feed_cols = feed_cols [ : ] self . dt_all_cols = [ 'indx' , 'final' ] + feed_cols [ : ] return atbl
creates a SQLAlchemy Table object with the appropriate number of columns given the number of feeds
55,346
def add_tags ( self , tags ) : if isinstance ( tags , ( str , unicode ) ) : tags = [ tags ] objs = object_session ( self ) tmps = [ FeedTag ( tag = t , feed = self ) for t in tags ] objs . add_all ( tmps ) objs . commit ( )
add a tag or tags to a Feed
55,347
def initiate_browser ( self ) : tempdir = os . getenv ( TEMPDIR_ENVVAR , DEFAULT_TEMPDIR ) tempsubdir = uuid4 ( ) . hex self . tempdir = os . path . join ( tempdir , tempsubdir ) try : os . makedirs ( self . tempdir ) except OSError : if not os . path . isdir ( self . tempdir ) : raise profile = webdriver . FirefoxProfile ( ) profile . set_preference ( 'browser.download.folderList' , 2 ) profile . set_preference ( 'browser.download.manager.showWhenStarting' , False ) profile . set_preference ( 'browser.download.manager.closeWhenDone' , True ) profile . set_preference ( 'browser.download.dir' , self . tempdir ) profile . set_preference ( "browser.helperApps.neverAsk.saveToDisk" , "application/octet-stream;application/vnd.ms-excel" ) profile . set_preference ( "browser.helperApps.alwaysAsk.force" , False ) profile . set_preference ( "browser.download.manager.useWindow" , False ) self . browser = webdriver . Firefox ( profile ) self . browser . get ( 'http://webbstat.av.se' ) detailed_cls = "Document_TX_GOTOTAB_Avancerad" WebDriverWait ( self . browser , PAGELOAD_TIMEOUT ) . until ( EC . presence_of_element_located ( ( By . CLASS_NAME , detailed_cls ) ) ) self . browser . implicitly_wait ( 3 ) self . browser . find_element_by_class_name ( detailed_cls ) . find_element_by_tag_name ( "td" ) . click ( ) WebDriverWait ( self . browser , PAGELOAD_TIMEOUT ) . until ( EC . presence_of_element_located ( ( By . CLASS_NAME , detailed_cls ) ) ) self . browser . implicitly_wait ( 3 )
The button for expanded detailed options . This also happens to be a good indicator as to wheter all content is loaded .
55,348
def step_I_create_logrecords_with_table ( context ) : assert context . table , "REQUIRE: context.table" context . table . require_columns ( [ "category" , "level" , "message" ] ) for row in context . table . rows : category = row [ "category" ] if category == "__ROOT__" : category = None level = LogLevel . parse_type ( row [ "level" ] ) message = row [ "message" ] make_log_record ( category , level , message )
Step definition that creates one more log records by using a table .
55,349
def step_I_create_logrecord_with_table ( context ) : assert context . table , "REQUIRE: context.table" assert len ( context . table . rows ) == 1 , "REQUIRE: table.row.size == 1" step_I_create_logrecords_with_table ( context )
Create an log record by using a table to provide the parts .
55,350
def step_use_log_record_configuration ( context ) : assert context . table , "REQUIRE: context.table" context . table . require_columns ( [ "property" , "value" ] ) for row in context . table . rows : property_name = row [ "property" ] value = row [ "value" ] if property_name == "format" : context . log_record_format = value elif property_name == "datefmt" : context . log_record_datefmt = value else : raise KeyError ( "Unknown property=%s" % property_name )
Define log record configuration parameters .
55,351
def smart_decode ( binary , errors = "strict" ) : d = chardet . detect ( binary ) encoding = d [ "encoding" ] confidence = d [ "confidence" ] text = binary . decode ( encoding , errors = errors ) return text , encoding , confidence
Automatically find the right codec to decode binary data to string .
55,352
def decode ( self , binary , url , encoding = None , errors = "strict" ) : if encoding is None : domain = util . get_domain ( url ) if domain in self . domain_encoding_table : encoding = self . domain_encoding_table [ domain ] html = binary . decode ( encoding , errors = errors ) else : html , encoding , confidence = smart_decode ( binary , errors = errors ) self . domain_encoding_table [ domain ] = encoding else : html = binary . decode ( encoding , errors = errors ) return html
Decode binary to string .
55,353
def modify_number_pattern ( number_pattern , ** kwargs ) : params = [ 'pattern' , 'prefix' , 'suffix' , 'grouping' , 'int_prec' , 'frac_prec' , 'exp_prec' , 'exp_plus' ] for param in params : if param in kwargs : continue kwargs [ param ] = getattr ( number_pattern , param ) return NumberPattern ( ** kwargs )
Modifies a number pattern by specified keyword arguments .
55,354
def format_currency_field ( __ , prec , number , locale ) : locale = Locale . parse ( locale ) currency = get_territory_currencies ( locale . territory ) [ 0 ] if prec is None : pattern , currency_digits = None , True else : prec = int ( prec ) pattern = locale . currency_formats [ 'standard' ] pattern = modify_number_pattern ( pattern , frac_prec = ( prec , prec ) ) currency_digits = False return format_currency ( number , currency , pattern , locale = locale , currency_digits = currency_digits )
Formats a currency field .
55,355
def format_float_field ( __ , prec , number , locale ) : format_ = u'0.' if prec is None : format_ += u'#' * NUMBER_DECIMAL_DIGITS else : format_ += u'0' * int ( prec ) pattern = parse_pattern ( format_ ) return pattern . apply ( number , locale )
Formats a fixed - point field .
55,356
def format_number_field ( __ , prec , number , locale ) : prec = NUMBER_DECIMAL_DIGITS if prec is None else int ( prec ) locale = Locale . parse ( locale ) pattern = locale . decimal_formats . get ( None ) return pattern . apply ( number , locale , force_frac = ( prec , prec ) )
Formats a number field .
55,357
def format_percent_field ( __ , prec , number , locale ) : prec = PERCENT_DECIMAL_DIGITS if prec is None else int ( prec ) locale = Locale . parse ( locale ) pattern = locale . percent_formats . get ( None ) return pattern . apply ( number , locale , force_frac = ( prec , prec ) )
Formats a percent field .
55,358
def format_hexadecimal_field ( spec , prec , number , locale ) : if number < 0 : number &= ( 1 << ( 8 * int ( math . log ( - number , 1 << 8 ) + 1 ) ) ) - 1 format_ = u'0%d%s' % ( int ( prec or 0 ) , spec ) return format ( number , format_ )
Formats a hexadeciaml field .
55,359
def delegate ( attribute_name , method_names ) : call_attribute_method = partial ( _call_delegated_method , attribute_name ) def decorate ( class_ ) : for method in method_names : setattr ( class_ , method , partialmethod ( call_attribute_method , method ) ) return class_ return decorate
Decorator factory to delegate methods to an attribute .
55,360
def prepare_query ( query ) : for name in query : value = query [ name ] if value is None : query [ name ] = "" elif isinstance ( value , bool ) : query [ name ] = int ( value ) elif isinstance ( value , dict ) : raise ValueError ( "Invalid query data type %r" % type ( value ) . __name__ )
Prepare a query object for the RAPI .
55,361
def itemgetters ( * args ) : f = itemgetter ( * args ) def inner ( l ) : return [ f ( x ) for x in l ] return inner
Get a handful of items from an iterable .
55,362
def stat_container ( self , container ) : LOG . debug ( 'stat_container() with %s is success.' , self . driver ) return self . driver . stat_container ( container )
Stat container metadata
55,363
def update_container ( self , container , metadata , ** kwargs ) : LOG . debug ( 'update_object() with %s is success.' , self . driver ) return self . driver . update_container ( container , metadata , ** kwargs )
Update container metadata
55,364
def stat_object ( self , container , obj ) : LOG . debug ( 'stat_object() with %s is success.' , self . driver ) return self . driver . stat_object ( container , obj )
Stat object metadata
55,365
def delete_object ( self , container , obj , ** kwargs ) : try : LOG . debug ( 'delete_object() with %s is success.' , self . driver ) return self . driver . delete_object ( container , obj , ** kwargs ) except DriverException as e : LOG . exception ( 'download_object() with %s raised\ an exception %s.' , self . driver , e )
Delete object in container
55,366
def list_container_objects ( self , container , prefix = None , delimiter = None ) : LOG . debug ( 'list_container_objects() with %s is success.' , self . driver ) return self . driver . list_container_objects ( container , prefix , delimiter )
List container objects
55,367
def update_object ( self , container , obj , metadata , ** kwargs ) : try : LOG . debug ( 'update_object() with %s is success.' , self . driver ) return self . driver . update_object ( container , obj , metadata , ** kwargs ) except DriverException as e : LOG . exception ( 'copy_object() with %s raised\ an exception %s.' , self . driver , e )
Update object metadata
55,368
def get_path_fields ( cls , base = [ ] ) : pfs = [ ] for pf in cls . TutelaryMeta . path_fields : if pf == 'pk' : pfs . append ( base + [ 'pk' ] ) else : f = cls . _meta . get_field ( pf ) if isinstance ( f , models . ForeignKey ) : pfs += get_path_fields ( f . target_field . model , base = base + [ pf ] ) else : pfs . append ( base + [ f . name ] ) return pfs
Get object fields used for calculation of django - tutelary object paths .
55,369
def get_perms_object ( obj , action ) : def get_one ( pf ) : if isinstance ( pf , str ) : return pf else : return str ( reduce ( lambda o , f : getattr ( o , f ) , pf , obj ) ) return Object ( [ get_one ( pf ) for pf in obj . __class__ . TutelaryMeta . pfs ] )
Get the django - tutelary path for an object based on the fields listed in TutelaryMeta . pfs .
55,370
def permissioned_model ( cls , perm_type = None , path_fields = None , actions = None ) : if not issubclass ( cls , models . Model ) : raise DecoratorException ( 'permissioned_model' , "class '" + cls . __name__ + "' is not a Django model" ) added = False try : if not hasattr ( cls , 'TutelaryMeta' ) : if perm_type is None or path_fields is None or actions is None : raise DecoratorException ( 'permissioned_model' , ( "missing argument: all of perm_type, path_fields and " + "actions must be supplied" ) ) added = True cls . TutelaryMeta = type ( 'TutelaryMeta' , ( object , ) , dict ( perm_type = perm_type , path_fields = path_fields , actions = actions ) ) cls . TutelaryMeta . pfs = ( [ cls . TutelaryMeta . perm_type ] + get_path_fields ( cls ) ) perms_objs = { } for a in cls . TutelaryMeta . actions : an = a ap = { } if isinstance ( a , tuple ) : an = a [ 0 ] ap = a [ 1 ] Action . register ( an ) if isinstance ( ap , dict ) and 'permissions_object' in ap : po = ap [ 'permissions_object' ] if po is not None : try : t = cls . _meta . get_field ( po ) . __class__ if t not in [ models . ForeignKey , models . OneToOneField ] : raise PermissionObjectException ( po ) except : raise PermissionObjectException ( po ) perms_objs [ an ] = po if len ( perms_objs ) == 0 : cls . get_permissions_object = get_perms_object else : cls . get_permissions_object = make_get_perms_object ( perms_objs ) return cls except : if added : del cls . TutelaryMeta raise
Function to set up a model for permissioning . Can either be called directly passing a class and suitable values for perm_type path_fields and actions or can be used as a class decorator taking values for perm_type path_fields and actions from the TutelaryMeta subclass of the decorated class .
55,371
def _getArrays ( items , attr , defaultValue ) : arrays = dict ( [ ( key , [ ] ) for key in attr ] ) for item in items : for key in attr : arrays [ key ] . append ( getattr ( item , key , defaultValue ) ) for key in [ _ for _ in viewkeys ( arrays ) ] : arrays [ key ] = numpy . array ( arrays [ key ] ) return arrays
Return arrays with equal size of item attributes from a list of sorted items for fast and convenient data processing .
55,372
def addMsrunContainers ( mainContainer , subContainer ) : typeToContainer = { 'rm' : 'rmc' , 'ci' : 'cic' , 'smi' : 'smic' , 'sai' : 'saic' , 'si' : 'sic' } for specfile in subContainer . info : if specfile in mainContainer . info : continue mainContainer . addSpecfile ( specfile , subContainer . info [ specfile ] [ 'path' ] ) for datatype , status in listitems ( subContainer . info [ specfile ] [ 'status' ] ) : if not status : continue datatypeContainer = typeToContainer [ datatype ] dataTypeContainer = getattr ( mainContainer , datatypeContainer ) subContainerData = getattr ( subContainer , datatypeContainer ) [ specfile ] dataTypeContainer [ specfile ] = subContainerData mainContainer . info [ specfile ] [ 'status' ] [ datatype ] = True
Adds the complete content of all specfile entries from the subContainer to the mainContainer . However if a specfile of subContainer . info is already present in mainContainer . info its contents are not added to the mainContainer .
55,373
def setPath ( self , folderpath , specfiles = None ) : if specfiles is None : specfiles = [ _ for _ in viewkeys ( self . info ) ] else : specfiles = aux . toList ( specfiles ) _containerSetPath ( self , folderpath , specfiles )
Changes the folderpath of the specified specfiles . The folderpath is used for saving and loading of mrc files .
55,374
def removeSpecfile ( self , specfiles ) : for specfile in aux . toList ( specfiles ) : for datatypeContainer in [ 'rmc' , 'cic' , 'smic' , 'saic' , 'sic' ] : dataContainer = getattr ( self , datatypeContainer ) try : del dataContainer [ specfile ] except KeyError : pass del self . info [ specfile ]
Completely removes the specified specfiles from the msrunContainer .
55,375
def _processDatatypes ( self , rm , ci , smi , sai , si ) : datatypes = list ( ) for datatype , value in [ ( 'rm' , rm ) , ( 'ci' , ci ) , ( 'smi' , smi ) , ( 'sai' , sai ) , ( 'si' , si ) ] : if value : datatypes . append ( datatype ) return datatypes
Helper function that returns a list of datatype strings depending on the parameters boolean value .
55,376
def save ( self , specfiles = None , rm = False , ci = False , smi = False , sai = False , si = False , compress = True , path = None ) : if specfiles is None : specfiles = [ _ for _ in viewkeys ( self . info ) ] else : specfiles = aux . toList ( specfiles ) datatypes = self . _processDatatypes ( rm , ci , smi , sai , si ) if len ( datatypes ) == 0 : datatypes = [ 'rm' , 'ci' , 'smi' , 'sai' , 'si' ] for specfile in specfiles : if specfile not in self . info : warntext = 'Error while calling "MsrunContainer.save()": "%s" ' 'is not present in "MsrunContainer.info"!' % ( specfile , ) warnings . warn ( warntext ) continue else : msrunInfo = self . info [ specfile ] specfilePath = msrunInfo [ 'path' ] if path is None else path with aux . PartiallySafeReplace ( ) as msr : for datatype in datatypes : filename = specfile + '.mrc_' + datatype filepath = aux . joinpath ( specfilePath , filename ) with msr . open ( filepath , 'w+b' ) as openfile : if datatype == 'rm' : self . _writeRmc ( openfile , specfile ) elif datatype == 'ci' : self . _writeCic ( openfile , specfile , compress ) elif datatype == 'si' : self . _writeSic ( openfile , specfile , compress ) elif datatype == 'smi' : self . _writeSmic ( openfile , specfile , compress ) elif datatype == 'sai' : self . _writeSaic ( openfile , specfile , compress )
Writes the specified datatypes to mrc files on the hard disk .
55,377
def _writeRmc ( self , filelike , specfile ) : xmlString = ETREE . tostring ( self . rmc [ specfile ] , pretty_print = True ) filelike . write ( xmlString )
Writes the . rmc container entry of the specified specfile as an human readable and pretty formatted xml string .
55,378
def load ( self , specfiles = None , rm = False , ci = False , smi = False , sai = False , si = False ) : if specfiles is None : specfiles = [ _ for _ in viewkeys ( self . info ) ] else : specfiles = aux . toList ( specfiles ) selectedSpecfiles = list ( ) for specfile in specfiles : if specfile not in self . info : warntext = 'Error while calling "MsrunContainer.load()": "%s" ' 'not present in MsrunContainer.info' % specfile warnings . warn ( warntext ) else : selectedSpecfiles . append ( specfile ) datatypes = self . _processDatatypes ( rm , ci , smi , sai , si ) if len ( datatypes ) == 0 : datatypes = [ 'rm' , 'ci' , 'smi' , 'sai' , 'si' ] for specfile in selectedSpecfiles : msrunInfo = self . info [ specfile ] specfilePath = msrunInfo [ 'path' ] if 'rm' in datatypes : rmPath = aux . joinpath ( specfilePath , specfile + '.mrc_rm' ) with open ( rmPath , 'rb' ) as openfile : xmlString = openfile . read ( ) self . rmc [ specfile ] = ETREE . fromstring ( xmlString ) msrunInfo [ 'status' ] [ 'rm' ] = True if 'ci' in datatypes : ciPath = aux . joinpath ( specfilePath , specfile + '.mrc_ci' ) self . cic [ specfile ] = aux . loadBinaryItemContainer ( ciPath , Ci . jsonHook ) msrunInfo [ 'status' ] [ 'ci' ] = True if 'smi' in datatypes : smiPath = aux . joinpath ( specfilePath , specfile + '.mrc_smi' ) with zipfile . ZipFile ( smiPath , 'r' ) as containerZip : jsonString = io . TextIOWrapper ( containerZip . open ( 'data' ) , encoding = 'utf-8' ) . read ( ) self . smic [ specfile ] = json . loads ( jsonString , object_hook = Smi . jsonHook ) msrunInfo [ 'status' ] [ 'smi' ] = True if 'sai' in datatypes : saiPath = aux . joinpath ( specfilePath , specfile + '.mrc_sai' ) self . saic [ specfile ] = aux . loadBinaryItemContainer ( saiPath , Sai . jsonHook ) msrunInfo [ 'status' ] [ 'sai' ] = True if 'si' in datatypes : siPath = aux . joinpath ( specfilePath , specfile + '.mrc_si' ) with zipfile . ZipFile ( siPath , 'r' ) as containerZip : jsonString = io . TextIOWrapper ( containerZip . open ( 'data' ) , encoding = 'utf-8' ) . read ( ) self . sic [ specfile ] = json . loads ( jsonString , object_hook = Si . jsonHook ) msrunInfo [ 'status' ] [ 'si' ] = True
Import the specified datatypes from mrc files on the hard disk .
55,379
def jsonHook ( encoded ) : if '__Ci__' in encoded : return Ci . _fromJSON ( encoded [ '__Ci__' ] ) elif '__MzmlProduct__' in encoded : return MzmlProduct . _fromJSON ( encoded [ '__MzmlProduct__' ] ) elif '__MzmlPrecursor__' in encoded : return MzmlPrecursor . _fromJSON ( encoded [ '__MzmlPrecursor__' ] ) else : return encoded
Custom JSON decoder that allows construction of a new Ci instance from a decoded JSON object .
55,380
def jsonHook ( encoded ) : if '__Smi__' in encoded : return Smi . _fromJSON ( encoded [ '__Smi__' ] ) elif '__MzmlScan__' in encoded : return MzmlScan . _fromJSON ( encoded [ '__MzmlScan__' ] ) elif '__MzmlProduct__' in encoded : return MzmlProduct . _fromJSON ( encoded [ '__MzmlProduct__' ] ) elif '__MzmlPrecursor__' in encoded : return MzmlPrecursor . _fromJSON ( encoded [ '__MzmlPrecursor__' ] ) else : return encoded
Custom JSON decoder that allows construction of a new Smi instance from a decoded JSON object .
55,381
def removeSpecfile ( self , specfiles ) : for specfile in aux . toList ( specfiles ) : del self . container [ specfile ] del self . info [ specfile ]
Completely removes the specified specfiles from the SiiContainer .
55,382
def save ( self , specfiles = None , compress = True , path = None ) : if specfiles is None : specfiles = [ _ for _ in viewkeys ( self . info ) ] else : specfiles = aux . toList ( specfiles ) for specfile in specfiles : if specfile not in self . info : warntext = 'Error while calling "SiiContainer.save()": "%s" is' ' not present in "SiiContainer.info"!' % ( specfile , ) warnings . warn ( warntext ) continue else : path = self . info [ specfile ] [ 'path' ] if path is None else path with aux . PartiallySafeReplace ( ) as msr : filename = specfile + '.siic' filepath = aux . joinpath ( path , filename ) with msr . open ( filepath , mode = 'w+b' ) as openfile : self . _writeContainer ( openfile , specfile , compress )
Writes the specified specfiles to siic files on the hard disk .
55,383
def calcMz ( self , specfiles = None , guessCharge = True , obsMzKey = 'obsMz' ) : _calcMass = maspy . peptidemethods . calcPeptideMass _calcMzFromMass = maspy . peptidemethods . calcMzFromMass _massProton = maspy . constants . atomicMassProton _guessCharge = lambda mass , mz : round ( mass / ( mz - _massProton ) , 0 ) if specfiles is None : specfiles = [ _ for _ in viewkeys ( self . info ) ] else : specfiles = aux . toList ( specfiles ) tempMasses = dict ( ) for specfile in specfiles : if specfile not in self . info : warntext = 'Error while calling "SiiContainer.calcMz()": ' '"%s" is not present in "SiiContainer.info"!' % ( specfile , ) warnings . warn ( warntext ) else : for sii in self . getItems ( specfiles = specfile ) : peptide = sii . peptide if peptide not in tempMasses : if hasattr ( sii , 'diPeptide' ) : tempMasses [ peptide ] = ( _calcMass ( sii . peptide1 ) + _calcMass ( sii . peptide2 ) ) else : tempMasses [ peptide ] = _calcMass ( peptide ) peptideMass = tempMasses [ peptide ] if sii . charge is not None : sii . excMz = _calcMzFromMass ( peptideMass , sii . charge ) elif guessCharge : guessedCharge = _guessCharge ( peptideMass , getattr ( sii , obsMzKey ) ) sii . excMz = _calcMzFromMass ( peptideMass , guessedCharge ) sii . charge = guessedCharge else : sii . excMz = None del ( tempMasses )
Calculate the exact mass for Sii elements from the Sii . peptide sequence .
55,384
def _writeContainer ( self , filelike , specfile , compress ) : aux . writeJsonZipfile ( filelike , self . container [ specfile ] , compress = compress )
Writes the self . container entry of the specified specfile to the fic format .
55,385
def load ( self , specfiles = None ) : if specfiles is None : specfiles = [ _ for _ in viewkeys ( self . info ) ] else : specfiles = aux . toList ( specfiles ) for specfile in specfiles : if specfile not in self . info : warntext = 'Error while calling "FiContainer.load()": "%s" is' ' not present in "FiContainer.info"!' % ( specfile , ) warnings . warn ( warntext ) continue else : fiPath = aux . joinpath ( self . info [ specfile ] [ 'path' ] , specfile + '.fic' ) with zipfile . ZipFile ( fiPath , 'r' ) as containerZip : jsonString = io . TextIOWrapper ( containerZip . open ( 'data' ) , encoding = 'utf-8' ) . read ( ) self . container [ specfile ] = json . loads ( jsonString , object_hook = Fi . jsonHook )
Imports the specified fic files from the hard disk .
55,386
def as_dict ( self ) : def conv ( v ) : if isinstance ( v , SerializableAttributesHolder ) : return v . as_dict ( ) elif isinstance ( v , list ) : return [ conv ( x ) for x in v ] elif isinstance ( v , dict ) : return { x : conv ( y ) for ( x , y ) in v . items ( ) } else : return v return { k . replace ( '_' , '-' ) : conv ( v ) for ( k , v ) in self . _attributes . items ( ) }
Returns a JSON - serializeable object representing this tree .
55,387
def from_json ( cls , data ) : assert isinstance ( data , str ) data = json . loads ( data ) assert isinstance ( data , dict ) return cls . from_dict ( data )
Decode a JSON string and inflate a node instance .
55,388
def extract_keywords ( func ) : if hasattr ( func , 'im_func' ) : func = func . im_func try : return func . func_code . co_varnames [ - len ( func . func_defaults ) : ] except ( TypeError , ValueError , IndexError ) : return tuple ( )
Parses the keywords from the given function .
55,389
def jtag_enable ( self ) : status , _ = self . bulkCommand ( _BMSG_ENABLE_JTAG ) if status == 0 : self . _jtagon = True elif status == 3 : self . _jtagon = True raise JTAGAlreadyEnabledError ( ) else : raise JTAGEnableFailedError ( "Error enabling JTAG. Error code: %s." % status )
Enables JTAG output on the controller . JTAG operations executed before this function is called will return useless data or fail .
55,390
def jtag_disable ( self ) : if not self . _jtagon : return status , _ = self . bulkCommand ( _BMSG_DISABLE_JTAG ) if status == 0 : self . _jtagon = False elif status == 3 : raise JTAGControlError ( "Error Code %s" % status ) self . close_handle ( )
Disables JTAG output on the controller . JTAG operations executed immediately after this function will return useless data or fail .
55,391
def write_tms_tdi_bits ( self , tmsdata , tdidata , return_tdo = False ) : self . _check_jtag ( ) if len ( tmsdata ) != len ( tdidata ) : raise Exception ( "TMSdata and TDIData must be the same length" ) self . _update_scanchain ( tmsdata ) count = len ( tmsdata ) t = time ( ) outdata = bitarray ( [ val for pair in zip ( tmsdata , tdidata ) for val in pair ] ) outdata = build_byte_align_buff ( outdata ) . tobytes ( ) [ : : - 1 ] if self . _scanchain and self . _scanchain . _print_statistics : print ( "TDI/TDI DATA PREP TIME" , time ( ) - t ) t = time ( ) self . bulkCommandDefault ( _BMSG_WRITE_TMS_TDI % ( return_tdo , count . to_bytes ( 4 , 'little' ) ) ) self . bulkWriteData ( outdata ) if self . _scanchain and self . _scanchain . _print_statistics : print ( "TRANSFER TIME" , time ( ) - t ) t = time ( ) tdo_bits = self . _read_tdo ( count ) if return_tdo else None if self . _scanchain and self . _scanchain . _print_statistics : print ( "TDO READ TIME" , time ( ) - t ) self . _get_adv_trans_stats ( 0x0A , return_tdo ) return tdo_bits
Command controller to write arbitrary TDI and TMS data to the physical scan chain . Optionally return TDO bits sent back from the scan chain .
55,392
def _readFastaFile ( filepath ) : processSequences = lambda i : '' . join ( [ s . rstrip ( ) for s in i ] ) . rstrip ( '*' ) processHeaderLine = lambda line : line [ 1 : ] . rstrip ( ) with io . open ( filepath ) as openfile : try : line = next ( openfile ) while line [ 0 ] != '>' : line = next ( openfile ) header = processHeaderLine ( line ) sequences = list ( ) except StopIteration : errorText = 'File does not contain fasta entries.' raise maspy . errors . FileFormatError ( errorText ) for line in openfile : if line [ 0 ] == '>' : yield header , processSequences ( sequences ) header = processHeaderLine ( line ) sequences = list ( ) else : sequences . append ( line ) if sequences : yield header , processSequences ( sequences )
Read a FASTA file and yields tuples of header and sequence entries .
55,393
def fastaParseSgd ( header ) : rePattern = '([\S]+)\s([\S]+).+(\".+\")' ID , name , description = re . match ( rePattern , header ) . groups ( ) info = { 'id' : ID , 'name' : name , 'description' : description } return info
Custom parser for fasta headers in the SGD format see www . yeastgenome . org .
55,394
def save ( self , path , compress = True ) : with aux . PartiallySafeReplace ( ) as msr : filename = self . info [ 'name' ] + '.proteindb' filepath = aux . joinpath ( path , filename ) with msr . open ( filepath , mode = 'w+b' ) as openfile : self . _writeContainer ( openfile , compress = compress )
Writes the . proteins and . peptides entries to the hard disk as a proteindb file .
55,395
def load ( cls , path , name ) : filepath = aux . joinpath ( path , name + '.proteindb' ) with zipfile . ZipFile ( filepath , 'r' , allowZip64 = True ) as containerZip : proteinsString = io . TextIOWrapper ( containerZip . open ( 'proteins' ) , encoding = 'utf-8' ) . read ( ) peptidesString = io . TextIOWrapper ( containerZip . open ( 'peptides' ) , encoding = 'utf-8' ) . read ( ) infoString = io . TextIOWrapper ( containerZip . open ( 'info' ) , encoding = 'utf-8' ) . read ( ) newInstance = cls ( ) newInstance . proteins = json . loads ( proteinsString , object_hook = ProteinSequence . jsonHook ) newInstance . peptides = json . loads ( peptidesString , object_hook = PeptideSequence . jsonHook ) newInstance . info . update ( json . loads ( infoString ) ) return newInstance
Imports the specified proteindb file from the hard disk .
55,396
def fetch_keywords ( codedata ) : tmp = { } language_counts = { } for index , ( language , code ) in enumerate ( codedata ) : if language not in shaman . SUPPORTING_LANGUAGES : continue if language not in tmp : tmp [ language ] = { } language_counts [ language ] = 0 language_counts [ language ] += 1 for keyword in shaman . KeywordFetcher . fetch ( code ) : tmp [ language ] [ keyword ] = tmp [ language ] . get ( keyword , 0 ) + 1 print ( 'Fetch keyword %d/%d ' % ( index , len ( codedata ) ) , end = '\r' ) ret = { } for language in tmp : for keyword , count in tmp [ language ] . items ( ) : if keyword not in ret : ret [ keyword ] = { } ret [ keyword ] [ language ] = ( count / language_counts [ language ] ) print ( 'Fetch keyword completed ' ) return ret
Fetch keywords by shaman . KeywordFetcher Get average probabilities of keyword and language
55,397
def match_patterns ( codedata ) : ret = { } for index1 , pattern in enumerate ( shaman . PatternMatcher . PATTERNS ) : print ( 'Matching pattern %d "%s"' % ( index1 + 1 , pattern ) ) matcher = shaman . PatternMatcher ( pattern ) tmp = { } for index2 , ( language , code ) in enumerate ( codedata ) : if language not in shaman . SUPPORTING_LANGUAGES : continue if len ( code ) <= 20 or len ( code ) > 100000 : continue if language not in tmp : tmp [ language ] = [ ] ratio = matcher . getratio ( code ) tmp [ language ] . append ( ratio ) print ( 'Matching patterns %d/%d ' % ( index2 , len ( codedata ) ) , end = '\r' ) ret [ pattern ] = { } for language , data in tmp . items ( ) : ret [ pattern ] [ language ] = sum ( tmp [ language ] ) / max ( len ( tmp [ language ] ) , 1 ) print ( 'Matching patterns completed ' ) return ret
Match patterns by shaman . PatternMatcher Get average ratio of pattern and language
55,398
def facility ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'RAD_FACILITY' , column , value , ** kwargs )
Check information related to Radiation facilities .
55,399
def geo ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'RAD_GEO_LOCATION' , column , value , ** kwargs )
Locate a facility through geographic location .