idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
49,200
def islitlet_progress ( islitlet , islitlet_max ) : if islitlet % 10 == 0 : cout = str ( islitlet // 10 ) else : cout = '.' sys . stdout . write ( cout ) if islitlet == islitlet_max : sys . stdout . write ( '\n' ) sys . stdout . flush ( )
Auxiliary function to print out progress in loop of slitlets .
49,201
def get_corrector_f ( rinput , meta , ins , datamodel ) : from emirdrp . processing . flatfield import FlatFieldCorrector flat_info = meta [ 'master_flat' ] with rinput . master_flat . open ( ) as hdul : _logger . info ( 'loading intensity flat' ) _logger . debug ( 'flat info: %s' , flat_info ) mflat = hdul [ 0 ] . dat...
Corrector for intensity flat
49,202
def loginfo ( method ) : def loginfo_method ( self , rinput ) : klass = rinput . __class__ for key in klass . stored ( ) : val = getattr ( rinput , key ) if isinstance ( val , DataFrame ) : self . logger . debug ( "DataFrame %s" , info . gather_info_dframe ( val ) ) elif isinstance ( val , ObservationResult ) : for f i...
Log the contents of Recipe Input
49,203
def to_host ( self ) : return Host ( self . address , alias = self . alias , user = self . user , keyfile = self . keyfile , port = self . port , extra = self . extra )
Copy or coerce to a Host .
49,204
def get_table_info ( * table_names ) : ret = { } if table_names : for table_name in table_names : for name , inter in get_interfaces ( ) . items ( ) : if inter . has_table ( table_name ) : yield table_name , inter , inter . get_fields ( table_name ) else : for name , inter in get_interfaces ( ) . items ( ) : table_name...
Returns a dict with table_name keys mapped to the Interface that table exists in
49,205
def main_generate ( table_names , stream ) : with stream . open ( ) as fp : fp . write_line ( "from datetime import datetime, date" ) fp . write_line ( "from decimal import Decimal" ) fp . write_line ( "from prom import Orm, Field" ) fp . write_newlines ( ) for table_name , inter , fields in get_table_info ( * table_na...
This will print out valid prom python code for given tables that already exist in a database .
49,206
def exvp ( pos_x , pos_y ) : pos_x = numpy . asarray ( pos_x ) pos_y = numpy . asarray ( pos_y ) center = [ 1024.5 , 1024.5 ] cf = EMIR_PLATESCALE_RADS pos_base_x = pos_x - center [ 0 ] pos_base_y = pos_y - center [ 1 ] ra = numpy . hypot ( pos_base_x , pos_base_y ) thet = numpy . arctan2 ( pos_base_y , pos_base_x ) r ...
Convert virtual pixel to real pixel
49,207
def match_sp_sep ( first , second ) : if isinstance ( first , list ) : one = [ set ( v . split ( " " ) ) for v in first ] else : one = [ { v } for v in first . split ( " " ) ] if isinstance ( second , list ) : other = [ set ( v . split ( " " ) ) for v in second ] else : other = [ { v } for v in second . split ( " " ) ]...
Verify that all the values in first appear in second . The values can either be in the form of lists or as space separated items .
49,208
def _verify_sector_identifier ( self , request ) : si_url = request [ "sector_identifier_uri" ] try : res = self . endpoint_context . httpc . get ( si_url ) except Exception as err : logger . error ( err ) res = None if not res : raise InvalidSectorIdentifier ( "Couldn't read from sector_identifier_uri" ) logger . debu...
Verify sector_identifier_uri is reachable and that it contains redirect_uri s .
49,209
def read ( self ) : __result = [ ] __ll = self . readline ( ) while __ll : __result . append ( __ll ) __ll = self . readline ( ) return list ( __result )
Read the file until EOF and return a list of dictionaries .
49,210
def close ( self ) : if self . _file : if not ( self . _file . closed ) : self . _file . close ( ) self . closed = True
Close the SExtractor file .
49,211
def get_driver ( configuration ) : resources = configuration [ "resources" ] machines = resources [ "machines" ] networks = resources [ "networks" ] oargrid_jobids = configuration . get ( "oargrid_jobids" ) if oargrid_jobids : logger . debug ( "Loading the OargridStaticDriver" ) return OargridStaticDriver ( oargrid_job...
Build an instance of the driver to interact with G5K
49,212
def ver_dec_content ( parts , sign_key = None , enc_key = None , sign_alg = 'SHA256' ) : if parts is None : return None elif len ( parts ) == 3 : timestamp , load , b64_mac = parts mac = base64 . b64decode ( b64_mac ) verifier = HMACSigner ( algorithm = sign_alg ) if verifier . verify ( load . encode ( 'utf-8' ) + time...
Verifies the value of a cookie
49,213
def make_cookie_content ( name , load , sign_key , domain = None , path = None , timestamp = "" , enc_key = None , max_age = 0 , sign_alg = 'SHA256' ) : if not timestamp : timestamp = str ( int ( time . time ( ) ) ) _cookie_value = sign_enc_payload ( load , timestamp , sign_key = sign_key , enc_key = enc_key , sign_alg...
Create and return a cookies content
49,214
def cookie_parts ( name , kaka ) : cookie_obj = SimpleCookie ( as_unicode ( kaka ) ) morsel = cookie_obj . get ( name ) if morsel : return morsel . value . split ( "|" ) else : return None
Give me the parts of the cookie payload
49,215
def delete_cookie ( self , cookie_name = None ) : if cookie_name is None : cookie_name = self . default_value [ 'name' ] return self . create_cookie ( "" , "" , cookie_name = cookie_name , kill = True )
Create a cookie that will immediately expire when it hits the other side .
49,216
def get_cookie_value ( self , cookie = None , cookie_name = None ) : if cookie_name is None : cookie_name = self . default_value [ 'name' ] if cookie is None or cookie_name is None : return None else : try : info , timestamp = parse_cookie ( cookie_name , self . sign_key , cookie , self . enc_key , self . sign_alg ) ex...
Return information stored in a Cookie
49,217
def append_cookie ( self , cookie , name , payload , typ , domain = None , path = None , timestamp = "" , max_age = 0 ) : timestamp = str ( int ( time . time ( ) ) ) try : _payload = "::" . join ( [ payload , timestamp , typ ] ) except TypeError : _payload = "::" . join ( [ payload [ 0 ] , timestamp , typ ] ) content =...
Adds a cookie to a SimpleCookie instance
49,218
def f_ac_power ( inverter , v_mp , p_mp ) : return pvlib . pvsystem . snlinverter ( v_mp , p_mp , inverter ) . flatten ( )
Calculate AC power
49,219
def f_dc_power ( effective_irradiance , cell_temp , module ) : dc = pvlib . pvsystem . sapm ( effective_irradiance , cell_temp , module ) fields = ( 'i_sc' , 'i_mp' , 'v_oc' , 'v_mp' , 'p_mp' ) return tuple ( dc [ field ] for field in fields )
Calculate DC power using Sandia Performance model
49,220
def f_effective_irradiance ( poa_direct , poa_diffuse , am_abs , aoi , module ) : Ee = pvlib . pvsystem . sapm_effective_irradiance ( poa_direct , poa_diffuse , am_abs , aoi , module ) return Ee . reshape ( 1 , - 1 )
Calculate effective irradiance for Sandia Performance model
49,221
def f_cell_temp ( poa_global , wind_speed , air_temp ) : temps = pvlib . pvsystem . sapm_celltemp ( poa_global , wind_speed , air_temp ) return temps [ 'temp_cell' ] . values , temps [ 'temp_module' ] . values
Calculate cell temperature .
49,222
def f_aoi ( surface_tilt , surface_azimuth , solar_zenith , solar_azimuth ) : return pvlib . irradiance . aoi ( surface_tilt , surface_azimuth , solar_zenith , solar_azimuth )
Calculate angle of incidence
49,223
def find_position ( edges , prow , bstart , bend , total = 5 ) : nt = total // 2 if prow - nt < 0 or prow + nt >= edges . shape [ 0 ] : return [ ] s2edges = edges [ prow - nt : prow + nt + 1 , bstart : bend ] structure = morph . generate_binary_structure ( 2 , 2 ) har , num_f = mes . label ( s2edges , structure = struc...
Find a EMIR CSU bar position in a edge image .
49,224
def calc_fwhm ( img , region , fexpand = 3 , axis = 0 ) : xpregion = expand_region ( region , fexpand , fexpand ) cslit = img [ xpregion ] pslit = cslit . mean ( axis = axis ) x2 = len ( pslit ) y1 , y2 = pslit [ 0 ] , pslit [ - 1 ] mslope = ( y2 - y1 ) / x2 backstim = mslope * numpy . arange ( x2 ) + y1 qslit = pslit ...
Compute the FWHM in the direction given by axis
49,225
def simple_prot ( x , start ) : for i in range ( start , len ( x ) - 1 ) : a , b , c = x [ i - 1 ] , x [ i ] , x [ i + 1 ] if b - a > 0 and b - c >= 0 : return i else : return None
Find the first peak to the right of start
49,226
def position_half_h ( pslit , cpix , backw = 4 ) : next_peak = simple_prot ( pslit , cpix ) if next_peak is None : raise ValueError dis_peak = next_peak - cpix wpos2 = cpix - dis_peak wpos1 = wpos2 - backw left_background = pslit [ wpos1 : wpos2 ] . min ( ) height = pslit [ next_peak ] - left_background half_height = l...
Find the position where the value is half of the peak
49,227
def locate_bar_l ( icut , epos ) : def swap_coor ( x ) : return x def swap_line ( tab ) : return tab return _locate_bar_gen ( icut , epos , transform1 = swap_coor , transform2 = swap_line )
Fine position of the left CSU bar
49,228
def locate_bar_r ( icut , epos ) : sm = len ( icut ) def swap_coor ( x ) : return sm - 1 - x def swap_line ( tab ) : return tab [ : : - 1 ] return _locate_bar_gen ( icut , epos , transform1 = swap_coor , transform2 = swap_line )
Fine position of the right CSU bar
49,229
def _locate_bar_gen ( icut , epos , transform1 , transform2 ) : epos_pix = coor_to_pix_1d ( epos ) epos_pix_s = transform1 ( epos_pix ) icut2 = transform2 ( icut ) try : res = position_half_h ( icut2 , epos_pix_s ) xint_s , next_peak_s , wpos1_s , wpos2_s , background_level , half_height = res xint = transform1 ( xint_...
Generic function for the fine position of the CSU
49,230
def overlap ( intv1 , intv2 ) : return max ( 0 , min ( intv1 [ 1 ] , intv2 [ 1 ] ) - max ( intv1 [ 0 ] , intv2 [ 0 ] ) )
Overlaping of two intervals
49,231
def exvp_scalar ( x , y , x0 , y0 , c2 , c4 , theta0 , ff ) : factor = 0.1944 * np . pi / ( 180.0 * 3600 ) r_pix = np . sqrt ( ( x - x0 * 1000 ) ** 2 + ( y - y0 * 1000 ) ** 2 ) r_rad = factor * r_pix rdist = ( 1 + c2 * 1.0E4 * r_rad ** 2 + c4 * 1.0E9 * r_rad ** 4 ) theta = np . arctan ( ( x - x0 * 1000 ) / ( y - y0 * 1...
Convert virtual pixel to real pixel .
49,232
def expected_distorted_boundaries ( islitlet , csu_bar_slit_center , borderlist , params , parmodel , numpts , deg , debugplot = 0 ) : c2 , c4 , ff , slit_gap , slit_height , theta0 , x0 , y0 , y_baseline = return_params ( islitlet , csu_bar_slit_center , params , parmodel ) xp = np . linspace ( 1 , EMIR_NAXIS1 , numpt...
Return expected SpectrumTrail instances associated to a given slitlet .
49,233
def fun_residuals ( params , parmodel , bounddict , shrinking_factor , numresolution , islitmin , islitmax , debugplot ) : global FUNCTION_EVALUATIONS global_residual = 0.0 nsummed = 0 read_slitlets = list ( bounddict [ 'contents' ] . keys ( ) ) for tmp_slitlet in read_slitlets : islitlet = int ( tmp_slitlet [ 7 : ] ) ...
Function to be minimised .
49,234
def overplot_boundaries_from_bounddict ( ax , bounddict , micolors , linetype = '-' ) : for islitlet in range ( 1 , EMIR_NBARS + 1 ) : tmpcolor = micolors [ islitlet % 2 ] tmp_slitlet = 'slitlet' + str ( islitlet ) . zfill ( 2 ) if tmp_slitlet in bounddict [ 'contents' ] . keys ( ) : read_dateobs = list ( bounddict [ '...
Overplot boundaries on current plot .
49,235
def overplot_boundaries_from_params ( ax , params , parmodel , list_islitlet , list_csu_bar_slit_center , micolors = ( 'm' , 'c' ) , linetype = '--' , labels = True , alpha_fill = None , global_offset_x_pix = 0 , global_offset_y_pix = 0 ) : xoff = float ( global_offset_x_pix ) yoff = float ( global_offset_y_pix ) list_...
Overplot boundaries computed from fitted parameters .
49,236
def bound_params_from_dict ( bound_param_dict ) : params = Parameters ( ) for mainpar in EXPECTED_PARAMETER_LIST : if mainpar not in bound_param_dict [ 'contents' ] . keys ( ) : raise ValueError ( 'Parameter ' + mainpar + ' not found!' ) if bound_param_dict [ 'meta_info' ] [ 'parmodel' ] == "longslit" : dumdict = bound...
Define ~lmfit . parameter . Parameters object from dictionary .
49,237
def transaction_start ( self , name ) : if not name : raise ValueError ( "Transaction name cannot be empty" ) self . transaction_count += 1 logger . debug ( "{}. Start transaction {}" . format ( self . transaction_count , name ) ) if self . transaction_count == 1 : self . _transaction_start ( ) else : self . _transacti...
start a transaction
49,238
def transaction_fail ( self , name ) : if not name : raise ValueError ( "Transaction name cannot be empty" ) if self . transaction_count > 0 : logger . debug ( "{}. Failing transaction {}" . format ( self . transaction_count , name ) ) if self . transaction_count == 1 : self . _transaction_fail ( ) else : self . _trans...
rollback a transaction if currently in one
49,239
def connect ( self , connection_config = None , * args , ** kwargs ) : if self . connected : return self . connected if connection_config : self . connection_config = connection_config self . connected = True try : self . _connect ( self . connection_config ) except Exception as e : self . connected = False self . rais...
connect to the interface
49,240
def close ( self ) : if not self . connected : return True self . _close ( ) self . connected = False self . log ( "Closed Connection {}" , self . connection_config . interface_name ) return True
close an open connection
49,241
def query ( self , query_str , * query_args , ** query_options ) : with self . connection ( ** query_options ) as connection : query_options [ 'connection' ] = connection return self . _query ( query_str , query_args , ** query_options )
run a raw query on the db
49,242
def set_table ( self , schema , ** kwargs ) : with self . connection ( ** kwargs ) as connection : kwargs [ 'connection' ] = connection if self . has_table ( str ( schema ) , ** kwargs ) : return True try : with self . transaction ( ** kwargs ) : self . _set_table ( schema , ** kwargs ) for index_name , index in schema...
add the table to the db
49,243
def has_table ( self , table_name , ** kwargs ) : with self . connection ( kwargs . get ( 'connection' , None ) ) as connection : kwargs [ 'connection' ] = connection tables = self . get_tables ( table_name , ** kwargs ) return len ( tables ) > 0
check to see if a table is in the db
49,244
def get_tables ( self , table_name = "" , ** kwargs ) : with self . connection ( ** kwargs ) as connection : kwargs [ 'connection' ] = connection return self . _get_tables ( str ( table_name ) , ** kwargs )
get all the tables of the currently connected db
49,245
def delete_table ( self , schema , ** kwargs ) : with self . connection ( ** kwargs ) as connection : kwargs [ 'connection' ] = connection if not self . has_table ( str ( schema ) , ** kwargs ) : return True with self . transaction ( ** kwargs ) : self . _delete_table ( schema , ** kwargs ) return True
remove a table matching schema from the db
49,246
def delete_tables ( self , ** kwargs ) : if not kwargs . get ( 'disable_protection' , False ) : raise ValueError ( 'In order to delete all the tables, pass in disable_protection=True' ) with self . connection ( ** kwargs ) as connection : kwargs [ 'connection' ] = connection self . _delete_tables ( ** kwargs )
removes all the tables from the db
49,247
def get_indexes ( self , schema , ** kwargs ) : with self . connection ( ** kwargs ) as connection : kwargs [ 'connection' ] = connection return self . _get_indexes ( schema , ** kwargs )
get all the indexes
49,248
def set_index ( self , schema , name , fields , ** index_options ) : with self . transaction ( ** index_options ) as connection : index_options [ 'connection' ] = connection self . _set_index ( schema , name , fields , ** index_options ) return True
add an index to the table
49,249
def insert ( self , schema , fields , ** kwargs ) : r = 0 with self . connection ( ** kwargs ) as connection : kwargs [ 'connection' ] = connection try : with self . transaction ( ** kwargs ) : r = self . _insert ( schema , fields , ** kwargs ) except Exception as e : exc_info = sys . exc_info ( ) if self . handle_erro...
Persist d into the db
49,250
def update ( self , schema , fields , query , ** kwargs ) : with self . connection ( ** kwargs ) as connection : kwargs [ 'connection' ] = connection try : with self . transaction ( ** kwargs ) : r = self . _update ( schema , fields , query , ** kwargs ) except Exception as e : exc_info = sys . exc_info ( ) if self . h...
Persist the query . fields into the db that match query . fields_where
49,251
def _get_query ( self , callback , schema , query = None , * args , ** kwargs ) : if not query : query = Query ( ) ret = None with self . connection ( ** kwargs ) as connection : kwargs [ 'connection' ] = connection try : if connection . in_transaction ( ) : with self . transaction ( ** kwargs ) : ret = callback ( sche...
this is just a common wrapper around all the get queries since they are all really similar in how they execute
49,252
def get_one ( self , schema , query = None , ** kwargs ) : ret = self . _get_query ( self . _get_one , schema , query , ** kwargs ) if not ret : ret = { } return ret
get one row from the db matching filters set in query
49,253
def get ( self , schema , query = None , ** kwargs ) : ret = self . _get_query ( self . _get , schema , query , ** kwargs ) if not ret : ret = [ ] return ret
get matching rows from the db matching filters set in query
49,254
def log ( self , format_str , * format_args , ** log_options ) : if isinstance ( format_str , Exception ) : logger . exception ( format_str , * format_args ) else : log_level = log_options . get ( 'level' , logging . DEBUG ) if logger . isEnabledFor ( log_level ) : if format_args : logger . log ( log_level , format_str...
wrapper around the module s logger
49,255
def raise_error ( self , e , exc_info = None ) : if not exc_info : exc_info = sys . exc_info ( ) if not isinstance ( e , InterfaceError ) : if not hasattr ( builtins , e . __class__ . __name__ ) : e = self . _create_error ( e , exc_info ) reraise ( e . __class__ , e , exc_info [ 2 ] )
this is just a wrapper to make the passed in exception an InterfaceError
49,256
def _set_all_tables ( self , schema , ** kwargs ) : with self . transaction ( ** kwargs ) as connection : kwargs [ 'connection' ] = connection for field_name , field_val in schema . fields . items ( ) : s = field_val . schema if s : self . _set_all_tables ( s , ** kwargs ) self . set_table ( schema , ** kwargs ) return...
You can run into a problem when you are trying to set a table and it has a foreign key to a table that doesn t exist so this method will go through all fk refs and make sure the tables exist
49,257
def _set_all_fields ( self , schema , ** kwargs ) : current_fields = self . get_fields ( schema , ** kwargs ) for field_name , field in schema . fields . items ( ) : if field_name not in current_fields : if field . required : raise ValueError ( 'Cannot safely add {} on the fly because it is required' . format ( field_n...
this will add fields that don t exist in the table if they can be set to NULL the reason they have to be NULL is adding fields to Postgres that can be NULL is really light but if they have a default value then it can be costly
49,258
def random_path ( instance , filename ) : uuid_hex = get_uuid ( ) return os . path . join ( uuid_hex [ : 3 ] , uuid_hex [ 3 : ] , filename )
Random path generator for uploads specify this for upload_to = argument of FileFields
49,259
def initialize ( signal_number = DEFAULT_TIMER_SIGNAL_NUMBER , update_period_s = DEFAULT_UPDATE_PERIOD_S ) : global initialized if initialized : return initialized = True uwsgi . add_timer ( signal_number , update_period_s ) uwsgi . register_signal ( signal_number , MULE , emit )
Initialize metrics must be invoked at least once prior to invoking any other method .
49,260
def emit ( _ ) : if not initialized : raise NotInitialized view = { 'version' : __version__ , 'counters' : { } , 'gauges' : { } , 'histograms' : { } , 'meters' : { } , 'timers' : { } , } for ( ty , module , name ) , metric in six . iteritems ( all_metrics ) : view [ ty ] [ '%s.%s' % ( module , name ) ] = metric . view ...
Serialize metrics to the memory mapped buffer .
49,261
def view ( ) : if not initialized : raise NotInitialized marshalled_metrics_mmap . seek ( 0 ) try : uwsgi . lock ( ) marshalled_view = marshalled_metrics_mmap . read ( MAX_MARSHALLED_VIEW_SIZE ) finally : uwsgi . unlock ( ) return marshal . loads ( marshalled_view )
Get a dictionary representation of current metrics .
49,262
def convert_out ( self , obj ) : newobj = super ( ProcessedImageProduct , self ) . convert_out ( obj ) if newobj : hdulist = newobj . open ( ) hdr = hdulist [ 0 ] . header if 'EMIRUUID' not in hdr : hdr [ 'EMIRUUID' ] = str ( uuid . uuid1 ( ) ) return newobj
Write EMIRUUID header on reduction
49,263
def verify_client ( endpoint_context , request , authorization_info ) : if not authorization_info : if 'client_id' in request and 'client_secret' in request : auth_info = ClientSecretPost ( endpoint_context ) . verify ( request ) auth_info [ 'method' ] = 'client_secret_post' elif 'client_assertion' in request : auth_in...
Initiated Guessing !
49,264
def _post_parse_request ( self , request , client_id = '' , ** kwargs ) : request = RefreshAccessTokenRequest ( ** request . to_dict ( ) ) try : keyjar = self . endpoint_context . keyjar except AttributeError : keyjar = "" request . verify ( keyjar = keyjar , opponent_id = client_id ) if "client_id" not in request : re...
This is where clients come to refresh their access tokens
49,265
def random_id ( length ) : def char ( ) : return random . choice ( string . ascii_letters + string . digits ) return "" . join ( char ( ) for _ in range ( length ) )
Generates a random ID of given length
49,266
def delayed_close ( closable ) : close = getattr ( closable , "close" , None ) if close : def replacement_close ( * args , ** kw ) : pass setattr ( closable , "close" , replacement_close ) try : yield closable finally : if close : setattr ( closable , "close" , close ) closable . close ( )
Delay close until this contextmanager dies
49,267
def map_sid2uid ( self , sid , uid ) : self . set ( 'sid2uid' , sid , uid ) self . set ( 'uid2sid' , uid , sid )
Store the connection between a Session ID and a User ID
49,268
def map_sid2sub ( self , sid , sub ) : self . set ( 'sid2sub' , sid , sub ) self . set ( 'sub2sid' , sub , sid )
Store the connection between a Session ID and a subject ID .
49,269
def get_subs_by_uid ( self , uid ) : res = set ( ) for sid in self . get ( 'uid2sid' , uid ) : res |= set ( self . get ( 'sid2sub' , sid ) ) return res
Find all subject identifiers that is connected to a User ID .
49,270
def remove_session_id ( self , sid ) : for uid in self . get ( 'sid2uid' , sid ) : self . remove ( 'uid2sid' , uid , sid ) self . delete ( 'sid2uid' , sid ) for sub in self . get ( 'sid2sub' , sid ) : self . remove ( 'sub2sid' , sub , sid ) self . delete ( 'sid2sub' , sid )
Remove all references to a specific Session ID
49,271
def remove_uid ( self , uid ) : for sid in self . get ( 'uid2sid' , uid ) : self . remove ( 'sid2uid' , sid , uid ) self . delete ( 'uid2sid' , uid )
Remove all references to a specific User ID
49,272
def remove_sub ( self , sub ) : for _sid in self . get ( 'sub2sid' , sub ) : self . remove ( 'sid2sub' , _sid , sub ) self . delete ( 'sub2sid' , sub )
Remove all references to a specific Subject ID
49,273
def up ( force = True , env = None , ** kwargs ) : "Starts a new experiment" inventory = os . path . join ( os . getcwd ( ) , "hosts" ) conf = Configuration . from_dictionnary ( provider_conf ) provider = Enos_vagrant ( conf ) roles , networks = provider . init ( ) check_networks ( roles , networks ) env [ "roles" ] = ...
Starts a new experiment
49,274
def convert_args ( test_fcn , * test_args ) : def wrapper ( origfcn ) : @ functools . wraps ( origfcn ) def newfcn ( * args , ** kwargs ) : argspec = getargspec ( origfcn ) kwargs . update ( zip ( argspec . args , args ) ) for a in test_args : if a in argspec . args : kwargs [ a ] = test_fcn ( kwargs [ a ] ) return ori...
Decorator to be using in formulas to convert test_args depending on the test_fcn .
49,275
def get_public_attributes ( cls , as_list = True ) : attrs = ( a for a in dir ( cls ) if not a . startswith ( '_' ) ) if as_list : return list ( attrs ) return attrs
Return class attributes that are neither private nor magic .
49,276
def register ( self , newitems , * args , ** kwargs ) : newkeys = newitems . viewkeys ( ) if any ( self . viewkeys ( ) & newkeys ) : raise DuplicateRegItemError ( self . viewkeys ( ) & newkeys ) self . update ( newitems ) kwargs . update ( zip ( self . meta_names , args ) ) for k , v in kwargs . iteritems ( ) : meta = ...
Register newitems in registry .
49,277
def unregister ( self , items ) : items = _listify ( items ) meta_names = ( m for m in vars ( self ) . iterkeys ( ) if ( not m . startswith ( '_' ) and m not in dir ( Registry ) ) ) for m in meta_names : if m not in self . meta_names : raise AttributeError ( 'Meta name %s not listed.' ) for it in items : if it in self ...
Remove items from registry .
49,278
def default ( self , o ) : if isinstance ( o , Q_ ) : return o . magnitude elif isinstance ( o , np . ndarray ) : return o . tolist ( ) else : return super ( SimKitJSONEncoder , self ) . default ( o )
JSONEncoder default method that converts NumPy arrays and quantities objects to lists .
49,279
def set_meta ( mcs , bases , attr ) : meta = attr . pop ( mcs . _meta_cls , types . ClassType ( mcs . _meta_cls , ( ) , { } ) ) meta_attrs = get_public_attributes ( meta ) for base in bases : base_meta = getattr ( base , mcs . _meta_cls , None ) if base_meta is None : continue for a in get_public_attributes ( base_meta...
Get all of the Meta classes from bases and combine them with this class .
49,280
def factory ( cls , ** kwargs ) : for name , obj in inspect . getmembers ( sys . modules [ __name__ ] ) : if inspect . isclass ( obj ) and issubclass ( obj , UserAuthnMethod ) : try : if obj . __name__ == cls : return obj ( ** kwargs ) except AttributeError : pass
Factory method that can be used to easily instantiate a class instance
49,281
def register ( self , new_formulas , * args , ** kwargs ) : kwargs . update ( zip ( self . meta_names , args ) ) super ( FormulaRegistry , self ) . register ( new_formulas , ** kwargs )
Register formula and meta data .
49,282
def create_blazar_client ( config , session ) : return blazar_client . Client ( session = session , service_type = "reservation" , region_name = os . environ [ "OS_REGION_NAME" ] )
Check the reservation creates a new one if nescessary .
49,283
def reconnecting ( count = None , backoff = None ) : reconn_params = { "count" : count , "backoff" : backoff } def retry_decorator ( func ) : @ wraps ( func ) def wrapper ( self , * args , ** kwargs ) : count = reconn_params [ "count" ] backoff = reconn_params [ "backoff" ] if count is None : count = self . connection_...
this is a very specific decorator meant to be used on Interface classes . It will attempt to reconnect if the connection is closed and run the same method again .
49,284
def escape_header ( val ) : if val is None : return None try : return quote ( val , encoding = "ascii" , safe = "/ " ) except ValueError : return "utf-8''" + quote ( val , encoding = "utf-8" , safe = "/ " )
Escapes a value so that it can be used in a mime header
49,285
def make_streams ( name , value , boundary , encoding ) : filename = None mime = None if isinstance ( value , collections . Mapping ) and "name" in value and "value" in value : filename = value [ "name" ] try : mime = value [ "mime" ] except KeyError : pass value = value [ "value" ] if not filename : filename = getattr...
Generates one or more streams for each name value pair
49,286
def len ( self ) : def stream_len ( stream ) : cur = stream . tell ( ) try : stream . seek ( 0 , 2 ) return stream . tell ( ) - cur finally : stream . seek ( cur ) return sum ( stream_len ( s ) for s in self . streams )
Length of the data stream
49,287
def headers ( self ) : return { "Content-Type" : ( "multipart/form-data; boundary={}" . format ( self . boundary ) ) , "Content-Length" : str ( self . len ) , "Content-Encoding" : self . encoding , }
All headers needed to make a request
49,288
def mk_pools ( things , keyfnc = lambda x : x ) : "Indexes a thing by the keyfnc to construct pools of things." pools = { } sthings = sorted ( things , key = keyfnc ) for key , thingz in groupby ( sthings , key = keyfnc ) : pools . setdefault ( key , [ ] ) . extend ( list ( thingz ) ) return pools
Indexes a thing by the keyfnc to construct pools of things .
49,289
def pick_things ( pools , key , n ) : "Picks a maximum of n things in a dict of indexed pool of things." pool = pools . get ( key ) if not pool : return [ ] things = pool [ : n ] del pool [ : n ] return things
Picks a maximum of n things in a dict of indexed pool of things .
49,290
def listen ( room ) : def onmessage ( m ) : print ( m ) if m . admin or m . nick == r . user . name : return if "parrot" in m . msg . lower ( ) : r . post_chat ( "ayy lmao" ) elif m . msg . lower ( ) in ( "lol" , "lel" , "kek" ) : r . post_chat ( "*kok" ) else : r . post_chat ( re . sub ( r"\blain\b" , "purpleadmin" , ...
Open a volafile room and start listening to it
49,291
def average_dtu_configurations ( list_of_objects ) : result = DtuConfiguration ( ) if len ( list_of_objects ) == 0 : return result list_of_members = result . __dict__ . keys ( ) for member in list_of_members : result . __dict__ [ member ] = np . mean ( [ tmp_dtu . __dict__ [ member ] for tmp_dtu in list_of_objects ] ) ...
Return DtuConfiguration instance with averaged values .
49,292
def maxdiff_dtu_configurations ( list_of_objects ) : result = DtuConfiguration ( ) if len ( list_of_objects ) == 0 : return result list_of_members = result . __dict__ . keys ( ) for member in list_of_members : tmp_array = np . array ( [ tmp_dtu . __dict__ [ member ] for tmp_dtu in list_of_objects ] ) minval = tmp_array...
Return DtuConfiguration instance with maximum differences .
49,293
def define_from_fits ( cls , fitsobj , extnum = 0 ) : with fits . open ( fitsobj ) as hdulist : image_header = hdulist [ extnum ] . header return cls . define_from_header ( image_header )
Define class object from header information in FITS file .
49,294
def define_from_dictionary ( cls , inputdict ) : self = DtuConfiguration ( ) for item in self . __dict__ : self . __dict__ [ item ] = inputdict [ item ] return self
Define class object from dictionary .
49,295
def define_from_values ( cls , xdtu , ydtu , zdtu , xdtu_0 , ydtu_0 , zdtu_0 ) : self = DtuConfiguration ( ) self . xdtu = xdtu self . ydtu = ydtu self . zdtu = zdtu self . xdtu_0 = xdtu_0 self . ydtu_0 = ydtu_0 self . zdtu_0 = zdtu_0 return self
Define class object from from provided values .
49,296
def closeto ( self , other , abserror ) : result = ( abs ( self . xdtu - other . xdtu ) <= abserror ) and ( abs ( self . ydtu - other . ydtu ) <= abserror ) and ( abs ( self . zdtu - other . zdtu ) <= abserror ) and ( abs ( self . xdtu_0 - other . xdtu_0 ) <= abserror ) and ( abs ( self . ydtu_0 - other . ydtu_0 ) <= a...
Check that all the members are equal within provided absolute error .
49,297
def init ( self , force_deploy = False ) : machines = self . provider_conf . machines networks = self . provider_conf . networks _networks = [ ] for network in networks : ipnet = IPNetwork ( network . cidr ) _networks . append ( { "netpool" : list ( ipnet ) [ 10 : - 10 ] , "cidr" : network . cidr , "roles" : network . ...
Reserve and deploys the vagrant boxes .
49,298
def destroy ( self ) : v = vagrant . Vagrant ( root = os . getcwd ( ) , quiet_stdout = False , quiet_stderr = True ) v . destroy ( )
Destroy all vagrant box involved in the deployment .
49,299
def tick ( self ) : instant_rate = self . count / float ( self . tick_interval_s ) self . count = 0 if self . initialized : self . rate += ( self . alpha * ( instant_rate - self . rate ) ) else : self . rate = instant_rate self . initialized = True
Mark the passage of time and decay the current rate accordingly .