idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
7,300
def _calc_degreeminutes ( decimal_degree ) : sign = compare ( decimal_degree , 0 ) decimal_degree = abs ( decimal_degree ) degree = decimal_degree // 1 decimal_minute = ( decimal_degree - degree ) * 60. minute = decimal_minute // 1 second = ( decimal_minute - minute ) * 60. degree = degree * sign minute = minute * sign second = second * sign return ( degree , minute , decimal_minute , second )
Calculate degree minute second from decimal degree
7,301
def set_hemisphere ( self , hemi_str ) : if hemi_str == 'W' : self . degree = abs ( self . degree ) * - 1 self . minute = abs ( self . minute ) * - 1 self . second = abs ( self . second ) * - 1 self . _update ( ) elif hemi_str == 'E' : self . degree = abs ( self . degree ) self . minute = abs ( self . minute ) self . second = abs ( self . second ) self . _update ( ) else : raise ValueError ( 'Hemisphere identifier for longitudes must be E or W' )
Given a hemisphere identifier set the sign of the coordinate to match that hemisphere
7,302
def project ( self , projection ) : x , y = projection ( self . lon . decimal_degree , self . lat . decimal_degree ) return ( x , y )
Return coordinates transformed to a given projection Projection should be a basemap or pyproj projection object or similar
7,303
def _pyproj_inv ( self , other , ellipse = 'WGS84' ) : lat1 , lon1 = self . lat . decimal_degree , self . lon . decimal_degree lat2 , lon2 = other . lat . decimal_degree , other . lon . decimal_degree g = pyproj . Geod ( ellps = ellipse ) heading_initial , heading_reverse , distance = g . inv ( lon1 , lat1 , lon2 , lat2 , radians = False ) distance = distance / 1000.0 if heading_initial == 0.0 : heading_reverse = 180.0 return { 'heading_initial' : heading_initial , 'heading_reverse' : heading_reverse , 'distance' : distance }
Perform Pyproj s inv operation on two LatLon objects Returns the initial heading and reverse heading in degrees and the distance in km .
7,304
def to_string ( self , formatter = 'D' ) : return ( self . lat . to_string ( formatter ) , self . lon . to_string ( formatter ) )
Return string representation of lat and lon as a 2 - element tuple using the format specified by formatter
7,305
def _sub_latlon ( self , other ) : inv = self . _pyproj_inv ( other ) heading = inv [ 'heading_reverse' ] distance = inv [ 'distance' ] return GeoVector ( initial_heading = heading , distance = distance )
Called when subtracting a LatLon object from self
7,306
def _update ( self ) : try : theta_radians = math . atan ( float ( self . dy ) / self . dx ) except ZeroDivisionError : if self . dy > 0 : theta_radians = 0.5 * math . pi elif self . dy < 0 : theta_radians = 1.5 * math . pi self . magnitude = self . dy else : self . magnitude = 1. / ( math . cos ( theta_radians ) ) * self . dx theta = math . degrees ( theta_radians ) self . heading = self . _angle_or_heading ( theta )
Calculate heading and distance from dx and dy
7,307
def _authstr ( self , auth ) : if type ( auth ) is dict : return '{' + ',' . join ( [ "{0}:{1}" . format ( k , auth [ k ] ) for k in sorted ( auth . keys ( ) ) ] ) + '}' return auth
Convert auth to str so that it can be hashed
7,308
def _call ( self , method , auth , arg , defer , notimeout = False ) : if defer : self . deferred . add ( auth , method , arg , notimeout = notimeout ) return True else : calls = self . _composeCalls ( [ ( method , arg ) ] ) return self . _callJsonRPC ( auth , calls , notimeout = notimeout )
Calls the Exosite One Platform RPC API .
7,309
def create ( self , auth , type , desc , defer = False ) : return self . _call ( 'create' , auth , [ type , desc ] , defer )
Create something in Exosite .
7,310
def drop ( self , auth , resource , defer = False ) : return self . _call ( 'drop' , auth , [ resource ] , defer )
Deletes the specified resource .
7,311
def flush ( self , auth , resource , options = None , defer = False ) : args = [ resource ] if options is not None : args . append ( options ) return self . _call ( 'flush' , auth , args , defer )
Empties the specified resource of data per specified constraints .
7,312
def grant ( self , auth , resource , permissions , ttl = None , defer = False ) : args = [ resource , permissions ] if ttl is not None : args . append ( { "ttl" : ttl } ) return self . _call ( 'grant' , auth , args , defer )
Grant resources with specific permissions and return a token .
7,313
def info ( self , auth , resource , options = { } , defer = False ) : return self . _call ( 'info' , auth , [ resource , options ] , defer )
Request creation and usage information of specified resource according to the specified options .
7,314
def listing ( self , auth , types , options = None , resource = None , defer = False ) : if options is None : return self . _call ( 'listing' , auth , [ types ] , defer ) else : if resource is None : return self . _call ( 'listing' , auth , [ types , options ] , defer ) else : return self . _call ( 'listing' , auth , [ resource , types , options ] , defer )
This provides backward compatibility with two previous variants of listing . To use the non - deprecated API pass both options and resource .
7,315
def map ( self , auth , resource , alias , defer = False ) : return self . _call ( 'map' , auth , [ 'alias' , resource , alias ] , defer )
Creates an alias for a resource .
7,316
def move ( self , auth , resource , destinationresource , options = { "aliases" : True } , defer = False ) : return self . _call ( 'move' , auth , [ resource , destinationresource , options ] , defer )
Moves a resource from one parent client to another .
7,317
def revoke ( self , auth , codetype , code , defer = False ) : return self . _call ( 'revoke' , auth , [ codetype , code ] , defer )
Given an activation code the associated entity is revoked after which the activation code can no longer be used .
7,318
def share ( self , auth , resource , options = { } , defer = False ) : return self . _call ( 'share' , auth , [ resource , options ] , defer )
Generates a share code for the given resource .
7,319
def update ( self , auth , resource , desc = { } , defer = False ) : return self . _call ( 'update' , auth , [ resource , desc ] , defer )
Updates the description of the resource .
7,320
def usage ( self , auth , resource , metric , starttime , endtime , defer = False ) : return self . _call ( 'usage' , auth , [ resource , metric , starttime , endtime ] , defer )
Returns metric usage for client and its subhierarchy .
7,321
def wait ( self , auth , resource , options , defer = False ) : return self . _call ( 'wait' , auth , [ resource , options ] , defer , notimeout = True )
This is a HTTP Long Polling API which allows a user to wait on specific resources to be updated .
7,322
def write ( self , auth , resource , value , options = { } , defer = False ) : return self . _call ( 'write' , auth , [ resource , value , options ] , defer )
Writes a single value to the resource specified .
7,323
def writegroup ( self , auth , entries , defer = False ) : return self . _call ( 'writegroup' , auth , [ entries ] , defer )
Writes the given values for the respective resources in the list all writes have same timestamp .
7,324
def bm3_p ( v , v0 , k0 , k0p , p_ref = 0.0 ) : return cal_p_bm3 ( v , [ v0 , k0 , k0p ] , p_ref = p_ref )
calculate pressure from 3rd order Birch - Murnathan equation
7,325
def cal_p_bm3 ( v , k , p_ref = 0.0 ) : vvr = v / k [ 0 ] p = ( p_ref - 0.5 * ( 3. * k [ 1 ] - 5. * p_ref ) * ( 1. - vvr ** ( - 2. / 3. ) ) + 9. / 8. * k [ 1 ] * ( k [ 2 ] - 4. + 35. / 9. * p_ref / k [ 1 ] ) * ( 1. - vvr ** ( - 2. / 3. ) ) ** 2. ) * vvr ** ( - 5. / 3. ) return p
calculate pressure from 3rd order Birch - Murnaghan equation
7,326
def bm3_v_single ( p , v0 , k0 , k0p , p_ref = 0.0 , min_strain = 0.01 ) : if p <= 1.e-5 : return v0 def f_diff ( v , v0 , k0 , k0p , p , p_ref = 0.0 ) : return bm3_p ( v , v0 , k0 , k0p , p_ref = p_ref ) - p v = brenth ( f_diff , v0 , v0 * min_strain , args = ( v0 , k0 , k0p , p , p_ref ) ) return v
find volume at given pressure using brenth in scipy . optimize this is for single p value not vectorized this cannot handle uncertainties
7,327
def bm3_k ( p , v0 , k0 , k0p ) : return cal_k_bm3 ( p , [ v0 , k0 , k0p ] )
calculate bulk modulus wrapper for cal_k_bm3 cannot handle uncertainties
7,328
def cal_k_bm3 ( p , k ) : v = cal_v_bm3 ( p , k ) return cal_k_bm3_from_v ( v , k )
calculate bulk modulus
7,329
def bm3_g ( p , v0 , g0 , g0p , k0 , k0p ) : return cal_g_bm3 ( p , [ g0 , g0p ] , [ v0 , k0 , k0p ] )
calculate shear modulus at given pressure . not fully tested with mdaap .
7,330
def cal_g_bm3 ( p , g , k ) : v = cal_v_bm3 ( p , k ) v0 = k [ 0 ] k0 = k [ 1 ] kp = k [ 2 ] g0 = g [ 0 ] gp = g [ 1 ] f = 0.5 * ( ( v / v0 ) ** ( - 2. / 3. ) - 1. ) return ( 1. + 2. * f ) ** ( 5. / 2. ) * ( g0 + ( 3. * k0 * gp - 5. * g0 ) * f + ( 6. * k0 * gp - 24. * k0 - 14. * g0 + 9. / 2. * k0 * kp ) * f ** 2. )
calculate shear modulus at given pressure
7,331
def bm3_big_F ( p , v , v0 ) : f = bm3_small_f ( v , v0 ) return cal_big_F ( p , f )
calculate big F for linearlized form not fully tested
7,332
def init_poolmanager ( self , connections , maxsize , block = requests . adapters . DEFAULT_POOLBLOCK , ** pool_kwargs ) : context = create_urllib3_context ( ciphers = self . CIPHERS , ssl_version = ssl . PROTOCOL_TLSv1 ) pool_kwargs [ 'ssl_context' ] = context return super ( TLSv1Adapter , self ) . init_poolmanager ( connections , maxsize , block , ** pool_kwargs )
Initialize poolmanager with cipher and Tlsv1
7,333
def proxy_manager_for ( self , proxy , ** proxy_kwargs ) : context = create_urllib3_context ( ciphers = self . CIPHERS , ssl_version = ssl . PROTOCOL_TLSv1 ) proxy_kwargs [ 'ssl_context' ] = context return super ( TLSv1Adapter , self ) . proxy_manager_for ( proxy , ** proxy_kwargs )
Ensure cipher and Tlsv1
7,334
def parse_stdout ( self , filelike ) : from aiida . orm import Dict formulae = { } content = filelike . read ( ) . strip ( ) if not content : return self . exit_codes . ERROR_EMPTY_OUTPUT_FILE try : for line in content . split ( '\n' ) : datablock , formula = re . split ( r'\s+' , line . strip ( ) , 1 ) formulae [ datablock ] = formula except Exception : self . logger . exception ( 'Failed to parse formulae from the stdout file\n%s' , traceback . format_exc ( ) ) return self . exit_codes . ERROR_PARSING_OUTPUT_DATA else : self . out ( 'formulae' , Dict ( dict = formulae ) ) return
Parse the formulae from the content written by the script to standard out .
7,335
def make_features ( context , frmat = 'table' , str_maximal = False ) : config = Config . create ( context = context , format = frmat , str_maximal = str_maximal ) return FeatureSystem ( config )
Return a new feature system from context string in the given format .
7,336
def vinet_p ( v , v0 , k0 , k0p ) : return cal_p_vinet ( v , [ v0 , k0 , k0p ] , uncertainties = isuncertainties ( [ v , v0 , k0 , k0p ] ) )
calculate pressure from vinet equation
7,337
def vinet_v_single ( p , v0 , k0 , k0p , min_strain = 0.01 ) : if p <= 1.e-5 : return v0 def f_diff ( v , v0 , k0 , k0p , p ) : return vinet_p ( v , v0 , k0 , k0p ) - p v = brenth ( f_diff , v0 , v0 * min_strain , args = ( v0 , k0 , k0p , p ) ) return v
find volume at given pressure using brenth in scipy . optimize this is for single p value not vectorized
7,338
def vinet_v ( p , v0 , k0 , k0p , min_strain = 0.01 ) : if isuncertainties ( [ p , v0 , k0 , k0p ] ) : f_u = np . vectorize ( uct . wrap ( vinet_v_single ) , excluded = [ 1 , 2 , 3 , 4 ] ) return f_u ( p , v0 , k0 , k0p , min_strain = min_strain ) else : f_v = np . vectorize ( vinet_v_single , excluded = [ 1 , 2 , 3 , 4 ] ) return f_v ( p , v0 , k0 , k0p , min_strain = min_strain )
find volume at given pressure
7,339
def vinet_k ( p , v0 , k0 , k0p , numerical = False ) : f_u = uct . wrap ( cal_k_vinet ) return f_u ( p , [ v0 , k0 , k0p ] )
calculate bulk modulus wrapper for cal_k_vinet cannot handle uncertainties
7,340
def user_portals_picker ( self ) : portals = self . get_portals_list ( ) done = False while not done : opts = [ ( i , p ) for i , p in enumerate ( portals ) ] for opt , portal in opts : print ( "\t{0} - {1}" . format ( opt , portal [ 1 ] ) ) valid_choices = [ o [ 0 ] for o in opts ] choice = _input ( "Enter choice ({0}): " . format ( valid_choices ) ) if int ( choice ) in valid_choices : done = True self . set_portal_name ( opts [ int ( choice ) ] [ 1 ] [ 1 ] ) self . set_portal_id ( opts [ int ( choice ) ] [ 1 ] [ 0 ] ) else : print ( "'{0}' is not a valid choice. Please choose from {1}" . format ( choice , valid_choices ) )
This function is broken and needs to either be fixed or discarded .
7,341
def get_portal_by_name ( self , portal_name ) : portals = self . get_portals_list ( ) for p in portals : if portal_name == p [ 1 ] : self . set_portal_name ( p [ 1 ] ) self . set_portal_id ( p [ 0 ] ) self . set_portal_cik ( p [ 2 ] [ 1 ] [ 'info' ] [ 'key' ] ) return p return None
Set active portal according to the name passed in portal_name .
7,342
def delete_device ( self , rid ) : headers = { 'User-Agent' : self . user_agent ( ) , 'Content-Type' : self . content_type ( ) } headers . update ( self . headers ( ) ) r = requests . delete ( self . portals_url ( ) + '/devices/' + rid , headers = headers , auth = self . auth ( ) ) if HTTP_STATUS . NO_CONTENT == r . status_code : print ( "Successfully deleted device with rid: {0}" . format ( rid ) ) return True else : print ( "Something went wrong: <{0}>: {1}" . format ( r . status_code , r . reason ) ) r . raise_for_status ( ) return False
Deletes device object with given rid
7,343
def list_device_data_sources ( self , device_rid ) : headers = { 'User-Agent' : self . user_agent ( ) , } headers . update ( self . headers ( ) ) r = requests . get ( self . portals_url ( ) + '/devices/' + device_rid + '/data-sources' , headers = headers , auth = self . auth ( ) ) if HTTP_STATUS . OK == r . status_code : return r . json ( ) else : print ( "Something went wrong: <{0}>: {1}" . format ( r . status_code , r . reason ) ) return None
List data sources of a portal device with rid device_rid .
7,344
def get_data_source_bulk_request ( self , rids , limit = 5 ) : headers = { 'User-Agent' : self . user_agent ( ) , 'Content-Type' : self . content_type ( ) } headers . update ( self . headers ( ) ) r = requests . get ( self . portals_url ( ) + '/data-sources/[' + "," . join ( rids ) + ']/data?limit=' + str ( limit ) , headers = headers , auth = self . auth ( ) ) if HTTP_STATUS . OK == r . status_code : return r . json ( ) else : print ( "Something went wrong: <{0}>: {1}" . format ( r . status_code , r . reason ) ) return { }
This grabs each datasource and its multiple datapoints for a particular device .
7,345
def get_all_devices_in_portal ( self ) : rids = self . get_portal_by_name ( self . portal_name ( ) ) [ 2 ] [ 1 ] [ 'info' ] [ 'aliases' ] device_rids = [ rid . strip ( ) for rid in rids ] blocks_of_ten = [ device_rids [ x : x + 10 ] for x in range ( 0 , len ( device_rids ) , 10 ) ] devices = [ ] for block_of_ten in blocks_of_ten : retval = self . get_multiple_devices ( block_of_ten ) if retval is not None : devices . extend ( retval ) else : print ( "Not adding to device list: {!r}" . format ( retval ) ) for device in devices : dictify_device_meta ( device ) return devices
This loops through the get_multiple_devices method 10 rids at a time .
7,346
def map_aliases_to_device_objects ( self ) : all_devices = self . get_all_devices_in_portal ( ) for dev_o in all_devices : dev_o [ 'portals_aliases' ] = self . get_portal_by_name ( self . portal_name ( ) ) [ 2 ] [ 1 ] [ 'info' ] [ 'aliases' ] [ dev_o [ 'rid' ] ] return all_devices
A device object knows its rid but not its alias . A portal object knows its device rids and aliases .
7,347
def search_for_devices_by_serial_number ( self , sn ) : import re sn_search = re . compile ( sn ) matches = [ ] for dev_o in self . get_all_devices_in_portal ( ) : try : if sn_search . match ( dev_o [ 'sn' ] ) : matches . append ( dev_o ) except TypeError as err : print ( "Problem checking device {!r}: {!r}" . format ( dev_o [ 'info' ] [ 'description' ] [ 'name' ] , str ( err ) ) ) return matches
Returns a list of device objects that match the serial number in param sn .
7,348
def print_device_list ( self , device_list = None ) : dev_list = device_list if device_list is not None else self . get_all_devices_in_portal ( ) for dev in dev_list : print ( '{0}\t\t{1}\t\t{2}' . format ( dev [ 'info' ] [ 'description' ] [ 'name' ] , dev [ 'sn' ] , dev [ 'portals_aliases' ] if len ( dev [ 'portals_aliases' ] ) != 1 else dev [ 'portals_aliases' ] [ 0 ] ) )
Optional parameter is a list of device objects . If omitted will just print all portal devices objects .
7,349
def print_sorted_device_list ( self , device_list = None , sort_key = 'sn' ) : dev_list = device_list if device_list is not None else self . get_all_devices_in_portal ( ) sorted_dev_list = [ ] if sort_key == 'sn' : sort_keys = [ k [ sort_key ] for k in dev_list if k [ sort_key ] is not None ] sort_keys = sorted ( sort_keys ) for key in sort_keys : sorted_dev_list . extend ( [ d for d in dev_list if d [ 'sn' ] == key ] ) elif sort_key == 'name' : sort_keys = [ k [ 'info' ] [ 'description' ] [ sort_key ] for k in dev_list if k [ 'info' ] [ 'description' ] [ sort_key ] is not None ] sort_keys = sorted ( sort_keys ) for key in sort_keys : sorted_dev_list . extend ( [ d for d in dev_list if d [ 'info' ] [ 'description' ] [ sort_key ] == key ] ) elif sort_key == 'portals_aliases' : sort_keys = [ k [ sort_key ] for k in dev_list if k [ sort_key ] is not None ] sort_keys = sorted ( sort_keys ) for key in sort_keys : sorted_dev_list . extend ( [ d for d in dev_list if d [ sort_key ] == key ] ) else : print ( "Sort key {!r} not recognized." . format ( sort_key ) ) sort_keys = None self . print_device_list ( device_list = sorted_dev_list )
Takes in a sort key and prints the device list according to that sort .
7,350
def get_user_id_from_email ( self , email ) : accts = self . get_all_user_accounts ( ) for acct in accts : if acct [ 'email' ] == email : return acct [ 'id' ] return None
Uses the get - all - user - accounts Portals API to retrieve the user - id by supplying an email .
7,351
def get_user_permission_from_email ( self , email ) : _id = self . get_user_id_from_email ( email ) return self . get_user_permission ( _id )
Returns a user s permissions object when given the user email .
7,352
def add_dplist_permission_for_user_on_portal ( self , user_email , portal_id ) : _id = self . get_user_id_from_email ( user_email ) print ( self . get_user_permission_from_email ( user_email ) ) retval = self . add_user_permission ( _id , json . dumps ( [ { 'access' : 'd_p_list' , 'oid' : { 'id' : portal_id , 'type' : 'Portal' } } ] ) ) print ( self . get_user_permission_from_email ( user_email ) ) return retval
Adds the d_p_list permission to a user object when provided a user_email and portal_id .
7,353
def get_portal_cik ( self , portal_name ) : portal = self . get_portal_by_name ( portal_name ) cik = portal [ 2 ] [ 1 ] [ 'info' ] [ 'key' ] return cik
Retrieves portal object according to portal_name and returns its cik .
7,354
def init_write_index ( es_write , es_write_index ) : logging . info ( "Initializing index: " + es_write_index ) es_write . indices . delete ( es_write_index , ignore = [ 400 , 404 ] ) es_write . indices . create ( es_write_index , body = MAPPING_GIT )
Initializes ES write index
7,355
def enrich ( self , column1 , column2 ) : if column1 not in self . commits . columns or column2 not in self . commits . columns : return self . commits pair_df = self . commits [ self . commits [ column1 ] != self . commits [ column2 ] ] new_values = list ( pair_df [ column2 ] ) pair_df [ column1 ] = new_values return self . commits . append ( pair_df )
This class splits those commits where column1 and column2 values are different
7,356
def enrich ( self , column ) : if column not in self . data : return self . data self . data [ "filetype" ] = 'Other' reg = "\.c$|\.h$|\.cc$|\.cpp$|\.cxx$|\.c\+\+$|\.cp$|\.py$|\.js$|\.java$|\.rs$|\.go$" self . data . loc [ self . data [ column ] . str . contains ( reg ) , 'filetype' ] = 'Code' return self . data
This method adds a new column depending on the extension of the file .
7,357
def enrich ( self , column , projects ) : if column not in self . data . columns : return self . data self . data = pandas . merge ( self . data , projects , how = 'left' , on = column ) return self . data
This method adds a new column named as project that contains information about the associated project that the event in column belongs to .
7,358
def __parse_flags ( self , body ) : flags = [ ] values = [ ] lines = body . split ( '\n' ) for l in lines : for name in self . FLAGS_REGEX : m = re . match ( self . FLAGS_REGEX [ name ] , l ) if m : flags . append ( name ) values . append ( m . group ( "value" ) . strip ( ) ) if flags == [ ] : flags = "" values = "" return flags , values
Parse flags from a message
7,359
def enrich ( self , column ) : if column not in self . data . columns : return self . data self . data [ 'domain' ] = self . data [ column ] . apply ( lambda x : self . __parse_email ( x ) ) return self . data
This enricher returns the same dataframe with a new column named domain . That column is the result of splitting the email address of another column . If there is not a proper email address an unknown domain is returned .
7,360
def __remove_surrogates ( self , s , method = 'replace' ) : if type ( s ) == list and len ( s ) == 1 : if self . __is_surrogate_escaped ( s [ 0 ] ) : return s [ 0 ] . encode ( 'utf-8' , method ) . decode ( 'utf-8' ) else : return "" if type ( s ) == list : return "" if type ( s ) != str : return "" if self . __is_surrogate_escaped ( s ) : return s . encode ( 'utf-8' , method ) . decode ( 'utf-8' ) return s
Remove surrogates in the specified string
7,361
def __is_surrogate_escaped ( self , text ) : try : text . encode ( 'utf-8' ) except UnicodeEncodeError as e : if e . reason == 'surrogates not allowed' : return True return False
Checks if surrogate is escaped
7,362
def enrich ( self , columns ) : for column in columns : if column not in self . data . columns : return self . data for column in columns : a = self . data [ column ] . apply ( self . __remove_surrogates ) self . data [ column ] = a return self . data
This method convert to utf - 8 the provided columns
7,363
def __parse_addr ( self , addr ) : from email . utils import parseaddr value = parseaddr ( addr ) return value [ 0 ] , value [ 1 ]
Parse email addresses
7,364
def enrich ( self , columns ) : for column in columns : if column not in self . data . columns : return self . data first_column = list ( self . data [ columns [ 0 ] ] ) count = 0 append_df = pandas . DataFrame ( ) for cell in first_column : if len ( cell ) >= 1 : df = pandas . DataFrame ( ) for column in columns : df [ column ] = self . data . loc [ count , column ] extra_df = pandas . DataFrame ( [ self . data . loc [ count ] ] * len ( df ) ) for column in columns : extra_df [ column ] = list ( df [ column ] ) append_df = append_df . append ( extra_df , ignore_index = True ) extra_df = pandas . DataFrame ( ) count = count + 1 self . data = self . data . append ( append_df , ignore_index = True ) return self . data
This method appends at the end of the dataframe as many rows as items are found in the list of elemnents in the provided columns .
7,365
def enrich ( self , columns , groupby ) : for column in columns : if column not in self . data . columns : return self . data for column in columns : df_grouped = self . data . groupby ( [ groupby ] ) . agg ( { column : 'max' } ) df_grouped = df_grouped . reset_index ( ) df_grouped . rename ( columns = { column : 'max_' + column } , inplace = True ) self . data = pandas . merge ( self . data , df_grouped , how = 'left' , on = [ groupby ] ) df_grouped = self . data . groupby ( [ groupby ] ) . agg ( { column : 'min' } ) df_grouped = df_grouped . reset_index ( ) df_grouped . rename ( columns = { column : 'min_' + column } , inplace = True ) self . data = pandas . merge ( self . data , df_grouped , how = 'left' , on = [ groupby ] ) return self . data
This method calculates the maximum and minimum value of a given set of columns depending on another column . This is the usual group by clause in SQL .
7,366
def enrich ( self , column ) : if column not in self . data . columns : return self . data splits = self . data [ column ] . str . split ( " " ) splits = splits . str [ 0 ] self . data [ "gender_analyzed_name" ] = splits . fillna ( "noname" ) self . data [ "gender_probability" ] = 0 self . data [ "gender" ] = "Unknown" self . data [ "gender_count" ] = 0 names = list ( self . data [ "gender_analyzed_name" ] . unique ( ) ) for name in names : if name in self . gender . keys ( ) : gender_result = self . gender [ name ] else : try : gender_result = self . connection . get ( [ name ] ) [ 0 ] except Exception : continue self . gender [ name ] = gender_result if gender_result [ "gender" ] is None : gender_result [ "gender" ] = "NotKnown" self . data . loc [ self . data [ "gender_analyzed_name" ] == name , 'gender' ] = gender_result [ "gender" ] if "probability" in gender_result . keys ( ) : self . data . loc [ self . data [ "gender_analyzed_name" ] == name , 'gender_probability' ] = gender_result [ "probability" ] self . data . loc [ self . data [ "gender_analyzed_name" ] == name , 'gender_count' ] = gender_result [ "count" ] self . data . fillna ( "noname" ) return self . data
This method calculates thanks to the genderize . io API the gender of a given name .
7,367
def enrich ( self , columns ) : for column in columns : if column not in self . data . columns : return self . data self . data = pandas . merge ( self . data , self . uuids_df , how = 'left' , on = columns ) self . data = self . data . fillna ( "notavailable" ) return self . data
Merges the original dataframe with corresponding entity uuids based on the given columns . Also merges other additional information associated to uuids provided in the uuids dataframe if any .
7,368
def echo_utc ( string ) : from datetime import datetime click . echo ( '{} | {}' . format ( datetime . utcnow ( ) . isoformat ( ) , string ) )
Echo the string to standard out prefixed with the current date and time in UTC format .
7,369
def from_string ( cls , string ) : if string is None : string = '' if not isinstance ( string , six . string_types ) : raise TypeError ( 'string has to be a string type, got: {}' . format ( type ( string ) ) ) dictionary = { } tokens = [ token . strip ( ) for token in shlex . split ( string ) ] def list_tuples ( some_iterable ) : items , nexts = itertools . tee ( some_iterable , 2 ) nexts = itertools . chain ( itertools . islice ( nexts , 1 , None ) , [ None ] ) return list ( zip ( items , nexts ) ) for token_current , token_next in list_tuples ( tokens ) : if not token_current . startswith ( '-' ) : continue if not token_next or token_next . startswith ( '-' ) : dictionary [ token_current . lstrip ( '-' ) ] = True else : dictionary [ token_current . lstrip ( '-' ) ] = token_next return cls . from_dictionary ( dictionary )
Parse a single string representing all command line parameters .
7,370
def from_dictionary ( cls , dictionary ) : if not isinstance ( dictionary , dict ) : raise TypeError ( 'dictionary has to be a dict type, got: {}' . format ( type ( dictionary ) ) ) return cls ( dictionary )
Parse a dictionary representing all command line parameters .
7,371
def get_list ( self ) : result = [ ] for key , value in self . parameters . items ( ) : if value is None : continue if not isinstance ( value , list ) : value = [ value ] if len ( key ) == 1 : string_key = '-{}' . format ( key ) else : string_key = '--{}' . format ( key ) for sub_value in value : if isinstance ( sub_value , bool ) and sub_value is False : continue result . append ( string_key ) if not isinstance ( sub_value , bool ) : if ' ' in sub_value : string_value = "'{}'" . format ( sub_value ) else : string_value = sub_value result . append ( str ( string_value ) ) return result
Return the command line parameters as a list of options their values and arguments .
7,372
def run ( self , daemon = False ) : from aiida . engine import launch if daemon : node = launch . submit ( self . process , ** self . inputs ) echo . echo_info ( 'Submitted {}<{}>' . format ( self . process_name , node . pk ) ) return echo . echo_info ( 'Running {}' . format ( self . process_name ) ) try : _ , node = launch . run_get_node ( self . process , ** self . inputs ) except Exception as exception : echo . echo_critical ( 'an exception occurred during execution: {}' . format ( str ( exception ) ) ) if node . is_killed : echo . echo_critical ( '{}<{}> was killed' . format ( self . process_name , node . pk ) ) elif not node . is_finished_ok : arguments = [ self . process_name , node . pk , node . exit_status , node . exit_message ] echo . echo_warning ( '{}<{}> failed with exit status {}: {}' . format ( * arguments ) ) else : output = [ ] echo . echo_success ( '{}<{}> finished successfully\n' . format ( self . process_name , node . pk ) ) for triple in sorted ( node . get_outgoing ( ) . all ( ) , key = lambda triple : triple . link_label ) : output . append ( [ triple . link_label , '{}<{}>' . format ( triple . node . __class__ . __name__ , triple . node . pk ) ] ) echo . echo ( tabulate . tabulate ( output , headers = [ 'Output label' , 'Node' ] ) )
Launch the process with the given inputs by default running in the current interpreter .
7,373
def _xpathDict ( xml , xpath , cls , parent , ** kwargs ) : children = [ ] for child in xml . xpath ( xpath , namespaces = XPATH_NAMESPACES ) : children . append ( cls . parse ( resource = child , parent = parent , ** kwargs ) ) return children
Returns a default Dict given certain information
7,374
def _parse_structured_metadata ( obj , xml ) : for metadata in xml . xpath ( "cpt:structured-metadata/*" , namespaces = XPATH_NAMESPACES ) : tag = metadata . tag if "{" in tag : ns , tag = tuple ( tag . split ( "}" ) ) tag = URIRef ( ns [ 1 : ] + tag ) s_m = str ( metadata ) if s_m . startswith ( "urn:" ) or s_m . startswith ( "http:" ) or s_m . startswith ( "https:" ) or s_m . startswith ( "hdl:" ) : obj . metadata . add ( tag , URIRef ( metadata ) ) elif '{http://www.w3.org/XML/1998/namespace}lang' in metadata . attrib : obj . metadata . add ( tag , s_m , lang = metadata . attrib [ '{http://www.w3.org/XML/1998/namespace}lang' ] ) else : if "{http://www.w3.org/1999/02/22-rdf-syntax-ns#}datatype" in metadata . attrib : datatype = metadata . attrib [ "{http://www.w3.org/1999/02/22-rdf-syntax-ns#}datatype" ] if not datatype . startswith ( "http" ) and ":" in datatype : datatype = expand_namespace ( metadata . nsmap , datatype ) obj . metadata . add ( tag , Literal ( s_m , datatype = URIRef ( datatype ) ) ) elif isinstance ( metadata , IntElement ) : obj . metadata . add ( tag , Literal ( int ( metadata ) , datatype = XSD . integer ) ) elif isinstance ( metadata , FloatElement ) : obj . metadata . add ( tag , Literal ( float ( metadata ) , datatype = XSD . float ) ) else : obj . metadata . add ( tag , s_m )
Parse an XML object for structured metadata
7,375
def ingest ( cls , resource , element = None , xpath = "ti:citation" ) : results = resource . xpath ( xpath , namespaces = XPATH_NAMESPACES ) if len ( results ) > 0 : citation = cls ( name = results [ 0 ] . get ( "label" ) , xpath = results [ 0 ] . get ( "xpath" ) , scope = results [ 0 ] . get ( "scope" ) ) if isinstance ( element , cls ) : element . child = citation cls . ingest ( resource = results [ 0 ] , element = element . child ) else : element = citation cls . ingest ( resource = results [ 0 ] , element = element ) return citation return None
Ingest xml to create a citation
7,376
def parse_metadata ( cls , obj , xml ) : for child in xml . xpath ( "ti:description" , namespaces = XPATH_NAMESPACES ) : lg = child . get ( "{http://www.w3.org/XML/1998/namespace}lang" ) if lg is not None : obj . set_cts_property ( "description" , child . text , lg ) for child in xml . xpath ( "ti:label" , namespaces = XPATH_NAMESPACES ) : lg = child . get ( "{http://www.w3.org/XML/1998/namespace}lang" ) if lg is not None : obj . set_cts_property ( "label" , child . text , lg ) obj . citation = cls . CLASS_CITATION . ingest ( xml , obj . citation , "ti:online/ti:citationMapping/ti:citation" ) for child in xml . xpath ( "ti:about" , namespaces = XPATH_NAMESPACES ) : obj . set_link ( RDF_NAMESPACES . CTS . term ( "about" ) , child . get ( 'urn' ) ) _parse_structured_metadata ( obj , xml )
Parse a resource to feed the object
7,377
def parse ( cls , resource , parent = None ) : xml = xmlparser ( resource ) o = cls ( urn = xml . get ( "urn" ) , parent = parent ) for child in xml . xpath ( "ti:groupname" , namespaces = XPATH_NAMESPACES ) : lg = child . get ( "{http://www.w3.org/XML/1998/namespace}lang" ) if lg is not None : o . set_cts_property ( "groupname" , child . text , lg ) _xpathDict ( xml = xml , xpath = 'ti:work' , cls = cls . CLASS_WORK , parent = o ) _parse_structured_metadata ( o , xml ) return o
Parse a textgroup resource
7,378
def install ( label , plist ) : fname = launchd . plist . write ( label , plist ) launchd . load ( fname )
Utility function to store a new . plist file and load it
7,379
def uninstall ( label ) : if launchd . LaunchdJob ( label ) . exists ( ) : fname = launchd . plist . discover_filename ( label ) launchd . unload ( fname ) os . unlink ( fname )
Utility function to remove a . plist file and unload it
7,380
def write_hex ( fout , buf , offset , width = 16 ) : skipped_zeroes = 0 for i , chunk in enumerate ( chunk_iter ( buf , width ) ) : if chunk == ( b"\x00" * width ) : skipped_zeroes += 1 continue elif skipped_zeroes != 0 : fout . write ( " -- skipped zeroes: {}\n" . format ( skipped_zeroes ) ) skipped_zeroes = 0 fout . write ( "{:016x} " . format ( i * width + offset ) ) column = " " . join ( [ " " . join ( [ "{:02x}" . format ( c ) for c in subchunk ] ) for subchunk in chunk_iter ( chunk , 8 ) ] ) w = width * 2 + ( width - 1 ) + ( ( width // 8 ) - 1 ) if len ( column ) != w : column += " " * ( w - len ( column ) ) fout . write ( column ) fout . write ( " |" ) for c in chunk : if c in PRINTABLE_CHARS : fout . write ( chr ( c ) ) else : fout . write ( "." ) if len ( chunk ) < width : fout . write ( " " * ( width - len ( chunk ) ) ) fout . write ( "|" ) fout . write ( "\n" )
Write the content of buf out in a hexdump style
7,381
def _read_config ( self ) : default_config_filepath = Path2 ( os . path . dirname ( __file__ ) , DEAFULT_CONFIG_FILENAME ) log . debug ( "Read defaults from: '%s'" % default_config_filepath ) if not default_config_filepath . is_file ( ) : raise RuntimeError ( "Internal error: Can't locate the default .ini file here: '%s'" % default_config_filepath ) config = self . _read_and_convert ( default_config_filepath , all_values = True ) log . debug ( "Defaults: %s" , pprint . pformat ( config ) ) self . ini_filepath = get_ini_filepath ( ) if not self . ini_filepath : self . ini_filepath = get_user_ini_filepath ( ) with default_config_filepath . open ( "r" ) as infile : with self . ini_filepath . open ( "w" ) as outfile : outfile . write ( infile . read ( ) ) print ( "\n*************************************************************" ) print ( "Default config file was created into your home:" ) print ( "\t%s" % self . ini_filepath ) print ( "Change it for your needs ;)" ) print ( "*************************************************************\n" ) else : print ( "\nread user configuration from:" ) print ( "\t%s\n" % self . ini_filepath ) config . update ( self . _read_and_convert ( self . ini_filepath , all_values = False ) ) log . debug ( "RawConfig changed to: %s" , pprint . pformat ( config ) ) return config
returns the config as a dict .
7,382
def bind_graph ( graph = None ) : if graph is None : graph = Graph ( ) for prefix , ns in GRAPH_BINDINGS . items ( ) : graph . bind ( prefix , ns , True ) return graph
Bind a graph with generic MyCapytain prefixes
7,383
def zharkov_pel ( v , temp , v0 , e0 , g , n , z , t_ref = 300. , three_r = 3. * constants . R ) : v_mol = vol_uc2mol ( v , z ) x = v / v0 def f ( t ) : return three_r * n / 2. * e0 * np . power ( x , g ) * np . power ( t , 2. ) * g / v_mol * 1.e-9 return f ( temp ) - f ( t_ref )
calculate electronic contributions in pressure for the Zharkov equation the equation can be found in Sokolova and Dorogokupets 2013
7,384
def tsuchiya_pel ( v , temp , v0 , a , b , c , d , n , z , three_r = 3. * constants . R , t_ref = 300. ) : def f ( temp ) : return a + b * temp + c * np . power ( temp , 2. ) + d * np . power ( temp , 3. ) return f ( temp ) - f ( t_ref )
calculate electronic contributions in pressure for the Tsuchiya equation
7,385
def _validate_resources ( self ) : resources = self . options . resources for key in [ 'num_machines' , 'num_mpiprocs_per_machine' , 'tot_num_mpiprocs' ] : if key in resources and resources [ key ] != 1 : raise exceptions . FeatureNotAvailable ( "Cannot set resource '{}' to value '{}' for {}: parallelization is not supported, " "only a value of '1' is accepted." . format ( key , resources [ key ] , self . __class__ . __name__ ) )
Validate the resources defined in the options .
7,386
def prepare_for_submission ( self , folder ) : from aiida_codtools . common . cli import CliParameters try : parameters = self . inputs . parameters . get_dict ( ) except AttributeError : parameters = { } self . _validate_resources ( ) cli_parameters = copy . deepcopy ( self . _default_cli_parameters ) cli_parameters . update ( parameters ) codeinfo = datastructures . CodeInfo ( ) codeinfo . code_uuid = self . inputs . code . uuid codeinfo . cmdline_params = CliParameters . from_dictionary ( cli_parameters ) . get_list ( ) codeinfo . stdin_name = self . options . input_filename codeinfo . stdout_name = self . options . output_filename codeinfo . stderr_name = self . options . error_filename calcinfo = datastructures . CalcInfo ( ) calcinfo . uuid = str ( self . uuid ) calcinfo . codes_info = [ codeinfo ] calcinfo . retrieve_list = [ self . options . output_filename , self . options . error_filename ] calcinfo . local_copy_list = [ ( self . inputs . cif . uuid , self . inputs . cif . filename , self . options . input_filename ) ] calcinfo . remote_copy_list = [ ] return calcinfo
This method is called prior to job submission with a set of calculation input nodes .
7,387
def hugoniot_p ( rho , rho0 , c0 , s ) : eta = 1. - ( rho0 / rho ) Ph = rho0 * c0 * c0 * eta / np . power ( ( 1. - s * eta ) , 2. ) return Ph
calculate pressure along a Hugoniot
7,388
def _dT_h_delta ( T_in_kK , eta , k , threenk , c_v ) : rho0 = k [ 0 ] gamma0 = k [ 3 ] q = k [ 4 ] theta0_in_kK = k [ 5 ] rho = rho0 / ( 1. - eta ) c0 = k [ 1 ] s = k [ 2 ] dPhdelta_H = rho0 * c0 * c0 * ( 1. + s * eta ) / np . power ( ( 1. - s * eta ) , 3. ) Ph = hugoniot_p ( rho , rho0 , c0 , s ) gamma = gamma0 * np . power ( ( 1. - eta ) , q ) theta_in_kK = theta0_in_kK * np . exp ( ( gamma0 - gamma ) / q ) x = theta_in_kK / T_in_kK debye3 = debye_E ( x ) if c_v == 0. : c_v = threenk * ( 4. * debye3 - 3. * x / ( np . exp ( x ) - 1. ) ) dYdX = ( gamma / ( 1. - eta ) * T_in_kK ) + ( dPhdelta_H * eta - Ph ) / ( 2. * c_v * rho0 ) return dYdX
internal function for calculation of temperature along a Hugoniot
7,389
def hugoniot_t_single ( rho , rho0 , c0 , s , gamma0 , q , theta0 , n , mass , three_r = 3. * constants . R , t_ref = 300. , c_v = 0. ) : eta = 1. - rho0 / rho if eta == 0.0 : return 300. threenk = three_r / mass * n k = [ rho0 , c0 , s , gamma0 , q , theta0 / 1.e3 ] t_h = odeint ( _dT_h_delta , t_ref / 1.e3 , [ 0. , eta ] , args = ( k , threenk , c_v ) , full_output = 1 ) temp_h = np . squeeze ( t_h [ 0 ] [ 1 ] ) return temp_h * 1.e3
internal function to calculate pressure along Hugoniot
7,390
def hugoniot_t ( rho , rho0 , c0 , s , gamma0 , q , theta0 , n , mass , three_r = 3. * constants . R , t_ref = 300. , c_v = 0. ) : if isuncertainties ( [ rho , rho0 , c0 , s , gamma0 , q , theta0 ] ) : f_v = np . vectorize ( uct . wrap ( hugoniot_t_single ) , excluded = [ 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10 , 11 ] ) else : f_v = np . vectorize ( hugoniot_t_single , excluded = [ 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10 , 11 ] ) return f_v ( rho , rho0 , c0 , s , gamma0 , q , theta0 , n , mass , three_r = three_r , t_ref = t_ref , c_v = c_v )
calculate temperature along a hugoniot
7,391
def to_str ( self ) : val = c_backend . pystring_get_str ( self . _ptr ) delattr ( self , '_ptr' ) setattr ( self , 'to_str' , _dangling_pointer ) return val . decode ( "utf-8" )
Consumes the wrapper and returns a Python string . Afterwards is not necessary to destruct it as it has already been consumed .
7,392
def servers ( self , server = 'api.telldus.com' , port = http . HTTPS_PORT ) : logging . debug ( "Fetching server list from %s:%d" , server , port ) conn = http . HTTPSConnection ( server , port , context = self . ssl_context ( ) ) conn . request ( 'GET' , "/server/assign?protocolVersion=2" ) response = conn . getresponse ( ) if response . status != http . OK : raise RuntimeError ( "Could not connect to {}:{}: {} {}" . format ( server , port , response . status , response . reason ) ) servers = [ ] def extract_servers ( name , attributes ) : if name == "server" : servers . append ( ( attributes [ 'address' ] , int ( attributes [ 'port' ] ) ) ) parser = expat . ParserCreate ( ) parser . StartElementHandler = extract_servers parser . ParseFile ( response ) logging . debug ( "Found %d available servers" , len ( servers ) ) return servers
Fetch list of servers that can be connected to .
7,393
def execute ( cmd ) : proc = Popen ( cmd , stdout = PIPE ) stdout , _ = proc . communicate ( ) if proc . returncode != 0 : raise CalledProcessError ( proc . returncode , " " . join ( cmd ) ) return stdout . decode ( 'utf8' )
Run a shell command and return stdout
7,394
def isuncertainties ( arg_list ) : for arg in arg_list : if isinstance ( arg , ( list , tuple ) ) and isinstance ( arg [ 0 ] , uct . UFloat ) : return True elif isinstance ( arg , np . ndarray ) and isinstance ( np . atleast_1d ( arg ) [ 0 ] , uct . UFloat ) : return True elif isinstance ( arg , ( float , uct . UFloat ) ) and isinstance ( arg , uct . UFloat ) : return True return False
check if the input list contains any elements with uncertainties class
7,395
def filter_tzfiles ( name_list ) : for src_name in name_list : parts = src_name . split ( '/' ) if len ( parts ) > 3 and parts [ 2 ] == 'zoneinfo' : dst_name = '/' . join ( parts [ 2 : ] ) yield src_name , dst_name
Returns a list of tuples for names that are tz data files .
7,396
def setup ( self , app ) : super ( ) . setup ( app ) self . handlers = OrderedDict ( ) app . ps . jinja2 . cfg . template_folders . append ( op . join ( PLUGIN_ROOT , 'templates' ) ) @ app . ps . jinja2 . filter def admtest ( value , a , b = None ) : return a if value else b @ app . ps . jinja2 . filter def admeq ( a , b , result = True ) : return result if a == b else not result @ app . ps . jinja2 . register def admurl ( request , prefix ) : qs = { k : v for k , v in request . query . items ( ) if not k . startswith ( prefix ) } if not qs : qs = { 'ap' : 0 } return "%s?%s" % ( request . path , urlparse . urlencode ( qs ) ) if self . cfg . name is None : self . cfg . name = "%s admin" % app . name . title ( ) if not callable ( self . cfg . home ) : def admin_home ( request ) : yield from self . authorize ( request ) return app . ps . jinja2 . render ( self . cfg . template_home , active = None ) self . cfg . home = admin_home app . register ( self . cfg . prefix ) ( self . cfg . home ) if not self . cfg . i18n : app . ps . jinja2 . env . globals . update ( { '_' : lambda s : s , 'gettext' : lambda s : s , 'ngettext' : lambda s , p , n : ( n != 1 and ( p , ) or ( s , ) ) [ 0 ] , } ) return if 'babel' not in app . ps or not isinstance ( app . ps . babel , BPlugin ) : raise PluginException ( 'Plugin `%s` requires for plugin `%s` to be installed to the application.' % ( self . name , BPlugin ) ) app . ps . babel . cfg . locales_dirs . append ( op . join ( PLUGIN_ROOT , 'locales' ) ) if not app . ps . babel . locale_selector_func : app . ps . babel . locale_selector_func = app . ps . babel . select_locale_by_request
Initialize the application .
7,397
def register ( self , * handlers , ** params ) : for handler in handlers : if issubclass ( handler , PWModel ) : handler = type ( handler . _meta . db_table . title ( ) + 'Admin' , ( PWAdminHandler , ) , dict ( model = handler , ** params ) ) self . app . register ( handler ) continue name = handler . name . lower ( ) self . handlers [ name ] = handler
Ensure that handler is not registered .
7,398
def authorization ( self , func ) : if self . app is None : raise PluginException ( 'The plugin must be installed to application.' ) self . authorize = muffin . to_coroutine ( func ) return func
Define a authorization process .
7,399
def scandir_limited ( top , limit , deep = 0 ) : deep += 1 try : scandir_it = Path2 ( top ) . scandir ( ) except PermissionError as err : log . error ( "scandir error: %s" % err ) return for entry in scandir_it : if entry . is_dir ( follow_symlinks = False ) : if deep < limit : yield from scandir_limited ( entry . path , limit , deep ) else : yield entry
yields only directories with the given deep limit