idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
47,900
def reset_coord ( self ) : ( x , y , idx ) = self . world2pix ( self . init_skycoord . ra , self . init_skycoord . dec , usepv = True ) self . update_pixel_location ( ( x , y ) , idx )
Reset the source location based on the init_skycoord values
47,901
def pixel_coord ( self ) : return self . get_pixel_coordinates ( self . reading . pix_coord , self . reading . get_ccd_num ( ) )
Return the coordinates of the source in the cutout reference frame .
47,902
def get_pixel_coordinates ( self , point , ccdnum ) : hdulist_index = self . get_hdulist_idx ( ccdnum ) if isinstance ( point [ 0 ] , Quantity ) and isinstance ( point [ 1 ] , Quantity ) : pix_point = point [ 0 ] . value , point [ 1 ] . value else : pix_point = point if self . reading . inverted : pix_point = self . reading . obs . naxis1 - pix_point [ 0 ] + 1 , self . reading . obs . naxis2 - pix_point [ 1 ] + 1 ( x , y ) = self . hdulist [ hdulist_index ] . converter . convert ( pix_point ) return x , y , hdulist_index
Retrieves the pixel location of a point within the current HDUList given the location in the original FITS image . This takes into account that the image may be a cutout of a larger original .
47,903
def get_observation_coordinates ( self , x , y , hdulist_index ) : return self . hdulist [ hdulist_index ] . converter . get_inverse_converter ( ) . convert ( ( x , y ) )
Retrieves the location of a point using the coordinate system of the original observation i . e . the original image before any cutouts were done .
47,904
def zmag ( self , ) : if self . _zmag is None : hdulist_index = self . get_hdulist_idx ( self . reading . get_ccd_num ( ) ) self . _zmag = self . hdulist [ hdulist_index ] . header . get ( 'PHOTZP' , 0.0 ) return self . _zmag
Return the photometric zeropoint of the CCD associated with the reading .
47,905
def apcor ( self ) : if self . _apcor is None : try : self . _apcor = Downloader ( ) . download_apcor ( self . reading . get_apcor_uri ( ) ) except : self . _apcor = ApcorData . from_string ( "5 15 99.99 99.99" ) return self . _apcor
return the aperture correction of for the CCD assocated with the reading .
47,906
def _hdu_on_disk ( self , hdulist_index ) : if self . _tempfile is None : self . _tempfile = tempfile . NamedTemporaryFile ( mode = "r+b" , suffix = ".fits" ) self . hdulist [ hdulist_index ] . writeto ( self . _tempfile . name ) return self . _tempfile . name
IRAF routines such as daophot need input on disk .
47,907
def comparison_image_list ( self ) : if self . _comparison_image_list is not None : return self . _comparison_image_list ref_ra = self . reading . ra * units . degree ref_dec = self . reading . dec * units . degree radius = self . radius is not None and self . radius or config . read ( 'CUTOUTS.SINGLETS.RADIUS' ) * units . arcminute print ( "Querying CADC for list of possible comparison images at RA: {}, DEC: {}, raidus: {}" . format ( ref_ra , ref_dec , radius ) ) query_result = storage . cone_search ( ref_ra , ref_dec , radius , radius ) print ( "Got {} possible images" . format ( len ( query_result ) ) ) ans = raw_input ( "Do you want to lookup IQ? (y/n)" ) print ( "Building table for presentation and selection" ) if ans == "y" : print ( "Including getting fwhm which is a bit slow." ) comparison_image_list = [ ] if len ( query_result [ 'collectionID' ] ) > 0 : index = - 1 for row in query_result : expnum = row [ 'collectionID' ] if expnum in self . _bad_comparison_images : continue date = Time ( row [ 'mjdate' ] , format = 'mjd' ) . mpc if Time ( row [ 'mjdate' ] , format = 'mjd' ) < Time ( '2013-01-01 00:00:00' , format = 'iso' ) : continue exptime = row [ 'exptime' ] if float ( exptime ) < 250 : continue filter_name = row [ 'filter' ] if 'U' in filter_name : continue if filter_name . lower ( ) in self . hdulist [ - 1 ] . header [ 'FILTER' ] . lower ( ) : filter_name = "* {:8s}" . format ( filter_name ) fwhm = - 1.0 if ans == 'y' : try : fwhm = "{:5.2f}" . format ( float ( storage . get_fwhm_tag ( expnum , 22 ) ) ) except : pass index += 1 comparison_image_list . append ( [ index , expnum , date , exptime , filter_name , fwhm , None ] ) self . _comparison_image_list = Table ( data = numpy . array ( comparison_image_list ) , names = [ "ID" , "EXPNUM" , "DATE-OBS" , "EXPTIME" , "FILTER" , "FWHM" , "REFERENCE" ] ) return self . _comparison_image_list
returns a list of possible comparison images for the current cutout . Will query CADC to create the list when first called .
47,908
def retrieve_comparison_image ( self ) : collectionID = self . comparison_image_list [ self . comparison_image_index ] [ 'EXPNUM' ] ref_ra = self . reading . ra * units . degree ref_dec = self . reading . dec * units . degree radius = self . radius is not None and self . radius or config . read ( 'CUTOUTS.SINGLETS.RADIUS' ) * units . arcminute try : comparison = collectionID hdu_list = storage . ra_dec_cutout ( storage . dbimages_uri ( comparison ) , SkyCoord ( ref_ra , ref_dec ) , radius ) ccd = int ( hdu_list [ - 1 ] . header . get ( 'EXTVER' , 0 ) ) obs = Observation ( str ( comparison ) , 'p' , ccdnum = ccd ) x = hdu_list [ - 1 ] . header . get ( 'NAXIS1' , 0 ) // 2.0 y = hdu_list [ - 1 ] . header . get ( 'NAXIS2' , 0 ) // 2.0 ref_ra = self . reading . ra * units . degree ref_dec = self . reading . dec * units . degree reading = SourceReading ( x , y , self . reading . x , self . reading . y , ref_ra , ref_dec , self . reading . x , self . reading . y , obs ) self . comparison_image_list [ self . comparison_image_index ] [ "REFERENCE" ] = SourceCutout ( reading , hdu_list ) except Exception as ex : print traceback . format_exc ( ) print ex print "Failed to load comparison image;" self . comparison_image_index = None logger . error ( "{} {}" . format ( type ( ex ) , str ( ex ) ) ) logger . error ( traceback . format_exc ( ) )
Search the DB for a comparison image for this cutout .
47,909
def check_password ( self , passwd , group ) : return gms . isMember ( self . login , passwd , group )
check that the passwd provided matches the required password .
47,910
def plant ( expnums , ccd , rmin , rmax , ang , width , number = 10 , mmin = 21.0 , mmax = 25.5 , version = 's' , dry_run = False ) : filename = storage . get_image ( expnums [ 0 ] , ccd = ccd , version = version ) header = fits . open ( filename ) [ 0 ] . header bounds = util . get_pixel_bounds_from_datasec_keyword ( header . get ( 'DATASEC' , '[33:2080,1:4612]' ) ) kbos = KBOGenerator . get_kbos ( n = number , rate = ( rmin , rmax ) , angle = ( ang - width , ang + width ) , mag = ( mmin , mmax ) , x = ( bounds [ 0 ] [ 0 ] , bounds [ 0 ] [ 1 ] ) , y = ( bounds [ 1 ] [ 0 ] , bounds [ 1 ] [ 1 ] ) , filename = 'Object.planted' ) for expnum in expnums : filename = storage . get_image ( expnum , ccd , version ) psf = storage . get_file ( expnum , ccd , version , ext = 'psf.fits' ) plant_kbos ( filename , psf , kbos , get_shifts ( expnum , ccd , version ) , "fk" ) if dry_run : return uri = storage . get_uri ( 'Object' , ext = 'planted' , version = '' , subdir = str ( expnums [ 0 ] ) + "/ccd%s" % ( str ( ccd ) . zfill ( 2 ) ) ) storage . copy ( 'Object.planted' , uri ) for expnum in expnums : uri = storage . get_uri ( expnum , ccd = ccd , version = version , ext = 'fits' , prefix = 'fk' ) filename = os . path . basename ( uri ) storage . copy ( filename , uri ) return
Plant artificial sources into the list of images provided .
47,911
def _fit_radec ( self ) : self . orbfit . fitradec . restype = ctypes . c_int self . orbfit . fitradec . argtypes = [ ctypes . c_char_p , ctypes . c_char_p , ctypes . c_char_p ] mpc_file = tempfile . NamedTemporaryFile ( suffix = '.mpc' ) for observation in self . observations : mpc_file . write ( "{}\n" . format ( str ( observation ) ) ) mpc_file . seek ( 0 ) abg_file = tempfile . NamedTemporaryFile ( ) res_file = tempfile . NamedTemporaryFile ( ) self . orbfit . fitradec ( ctypes . c_char_p ( mpc_file . name ) , ctypes . c_char_p ( abg_file . name ) , ctypes . c_char_p ( res_file . name ) ) self . abg = abg_file self . abg . seek ( 0 ) self . residuals = res_file self . residuals . seek ( 0 )
call fit_radec of BK passing in the observations .
47,912
def predict ( self , date , obs_code = 568 ) : time = Time ( date , scale = 'utc' , precision = 6 ) jd = ctypes . c_double ( time . jd ) self . orbfit . predict . restype = ctypes . POINTER ( ctypes . c_double * 5 ) self . orbfit . predict . argtypes = [ ctypes . c_char_p , ctypes . c_double , ctypes . c_int ] predict = self . orbfit . predict ( ctypes . c_char_p ( self . abg . name ) , jd , ctypes . c_int ( obs_code ) ) self . coordinate = coordinates . SkyCoord ( predict . contents [ 0 ] , predict . contents [ 1 ] , unit = ( units . degree , units . degree ) ) self . dra = predict . contents [ 2 ] self . ddec = predict . contents [ 3 ] self . pa = predict . contents [ 4 ] self . date = str ( time )
use the bk predict method to compute the location of the source on the given date .
47,913
def write ( file , hdu , order = None , format = None ) : if order or format : warnings . warn ( 'Use of <order> and <format> depricated' , DeprecationWarning ) data = 'data' if not order : if not hdu . has_key ( 'order' ) : hdu [ 'order' ] = hdu [ data ] . keys ( ) else : hdu [ 'order' ] = order if not format : if not hdu . has_key ( 'format' ) : hdu [ 'format' ] = { } for o in hdu [ 'order' ] : if not hdu [ 'format' ] . has_key ( o ) : hdu [ 'format' ] [ o ] = '%10s' else : hdu [ 'format' ] = format f = open ( file , 'w' ) kline = '## ' vline = '# ' header = 'header' num = 0 for keyword in hdu [ header ] : kline += '%10s ' % ( keyword , ) vline += '%10s ' % str ( hdu [ header ] [ keyword ] ) num += 1 if not ( num % 6 ) : num = 0 f . write ( kline + '\n' ) f . write ( vline + '\n' ) kline = '## ' vline = '# ' if num > 0 : f . write ( kline + '\n' ) f . write ( vline + '\n' ) f . write ( '## ' ) for column in hdu [ 'order' ] : f . write ( ' %10s' % ( column ) ) f . write ( '\n' ) dlen = len ( hdu [ data ] [ hdu [ 'order' ] [ 0 ] ] ) for i in range ( dlen ) : f . write ( ' ' ) for column in hdu [ 'order' ] : f . write ( hdu [ 'format' ] [ column ] % ( hdu [ data ] [ column ] [ i ] ) ) f . write ( ' ' ) f . write ( '\n' ) f . close ( ) return
Write a file in the crazy MOP format given an mop data hdu .
47,914
def read ( file ) : f = open ( file , 'r' ) lines = f . readlines ( ) f . close ( ) import re , string keywords = [ ] values = [ ] formats = { } header = { } cdata = { } for line in lines : if ( re . match ( r'^##' , line ) ) : m = string . split ( string . lstrip ( line [ 2 : ] ) ) if not m : sys . stderr . write ( "Ill formed header line in %s \ n " % ( file , ) ) sys . stderr . write ( line ) continue keywords = m add_id = False continue if ( re . match ( r'^# ' , line ) ) : m = string . split ( string . lstrip ( line [ 1 : ] ) ) values = m if len ( values ) != len ( keywords ) : sys . stderr . write ( "Ill formed header, keyword/value missmatach\n" ) for index in range ( len ( values ) ) : header [ keywords [ index ] ] = values [ index ] keywords = [ ] values = [ ] continue if ( re . match ( r'^#F' , line ) ) : if not keywords : sys . stderr . write ( "Cann't have formats without column names...\n" ) continue f = string . split ( string . lstrip ( line ) ) if add_id : f . append ( '%8d' ) for col in keywords : formats [ col ] = f . pop ( 0 ) if not keywords : sys . stderr . write ( "No keywords for data columns, assuming x,y,mag,msky\n" ) keywords = ( 'X' , 'Y' , 'MAG' , 'MSKY' ) values = string . split ( string . lstrip ( line ) ) if not 'ID' in keywords : keywords . append ( 'ID' ) add_id = True if not cdata : for keyword in keywords : cdata [ keyword ] = [ ] if add_id : values . append ( len ( cdata [ keywords [ 0 ] ] ) + 1 ) if len ( values ) != len ( keywords ) : sys . stderr . write ( "Keyword and value index have different length?\n" ) continue for index in range ( len ( values ) ) : cdata [ keywords [ index ] ] . append ( values [ index ] ) hdu = { 'data' : cdata , 'header' : header , 'format' : formats , 'order' : keywords } return hdu
Read in a file and create a data strucuture that is a hash with members header and data . The header is a hash of header keywords the data is a hash of columns .
47,915
def default_logging_dict ( * loggers : str , ** kwargs : Any ) -> DictStrAny : r kwargs . setdefault ( 'level' , 'INFO' ) return { 'version' : 1 , 'disable_existing_loggers' : True , 'filters' : { 'ignore_errors' : { '()' : IgnoreErrorsFilter , } , } , 'formatters' : { 'default' : { 'format' : '%(asctime)s [%(levelname)s:%(name)s] %(message)s' , } , 'naked' : { 'format' : u'%(message)s' , } , } , 'handlers' : { 'stdout' : { 'class' : 'logging.StreamHandler' , 'filters' : [ 'ignore_errors' ] , 'formatter' : 'default' , 'level' : 'DEBUG' , 'stream' : sys . stdout , } , 'stderr' : { 'class' : 'logging.StreamHandler' , 'formatter' : 'default' , 'level' : 'WARNING' , 'stream' : sys . stderr , } , } , 'loggers' : { logger : dict ( handlers = [ 'stdout' , 'stderr' ] , ** kwargs ) for logger in loggers } , }
r Prepare logging dict suitable with logging . config . dictConfig .
47,916
def update_sentry_logging ( logging_dict : DictStrAny , sentry_dsn : Optional [ str ] , * loggers : str , level : Union [ str , int ] = None , ** kwargs : Any ) -> None : r if not sentry_dsn : return kwargs [ 'class' ] = 'raven.handlers.logging.SentryHandler' kwargs [ 'dsn' ] = sentry_dsn logging_dict [ 'handlers' ] [ 'sentry' ] = dict ( level = level or 'WARNING' , ** kwargs ) loggers = tuple ( logging_dict [ 'loggers' ] ) if not loggers else loggers for logger in loggers : logger_dict = logging_dict [ 'loggers' ] . get ( logger ) if not logger_dict : continue if logger_dict . pop ( 'ignore_sentry' , False ) : continue handlers = list ( logger_dict . setdefault ( 'handlers' , [ ] ) ) handlers . append ( 'sentry' ) logger_dict [ 'handlers' ] = tuple ( handlers )
r Enable Sentry logging if Sentry DSN passed .
47,917
def ossos_release_parser ( table = False , data_release = parameters . RELEASE_VERSION ) : names = [ 'cl' , 'p' , 'j' , 'k' , 'sh' , 'object' , 'mag' , 'e_mag' , 'Filt' , 'Hsur' , 'dist' , 'e_dist' , 'Nobs' , 'time' , 'av_xres' , 'av_yres' , 'max_x' , 'max_y' , 'a' , 'e_a' , 'e' , 'e_e' , 'i' , 'e_i' , 'Omega' , 'e_Omega' , 'omega' , 'e_omega' , 'tperi' , 'e_tperi' , 'RAdeg' , 'DEdeg' , 'JD' , 'rate' ] if table : retval = Table . read ( parameters . RELEASE_DETECTIONS [ data_release ] , format = 'ascii' , guess = False , delimiter = ' ' , data_start = 0 , comment = '#' , names = names , header_start = None ) else : retval = [ ] with open ( data_release , 'r' ) as detectionsfile : for line in detectionsfile . readlines ( ) [ 1 : ] : obj = TNO . from_string ( line , version = parameters . RELEASE_DETECTIONS [ data_release ] ) retval . append ( obj ) return retval
extra fun as this is space - separated so using CSV parsers is not an option
47,918
def ossos_discoveries ( directory = parameters . REAL_KBO_AST_DIR , suffix = 'ast' , no_nt_and_u = False , single_object = None , all_objects = True , data_release = None , ) : retval = [ ] files = [ f for f in os . listdir ( directory ) if ( f . endswith ( 'mpc' ) or f . endswith ( 'ast' ) or f . endswith ( 'DONE' ) ) ] if single_object is not None : files = filter ( lambda name : name . startswith ( single_object ) , files ) elif all_objects and data_release is not None : data_release = ossos_release_parser ( table = True , data_release = data_release ) objects = data_release [ 'object' ] files = filter ( lambda name : name . partition ( suffix ) [ 0 ] . rstrip ( '.' ) in objects , files ) for filename in files : if no_nt_and_u and ( filename . __contains__ ( 'nt' ) or filename . startswith ( 'u' ) ) : continue mpc_filename = directory + filename abg_filename = os . path . abspath ( directory + '/../abg/' ) + "/" + os . path . splitext ( filename ) [ 0 ] + ".abg" obj = TNO ( None , ast_filename = mpc_filename , abg_filename = abg_filename ) retval . append ( obj ) return retval
Returns a list of objects holding orbfit . Orbfit objects with the observations in the Orbfit . observations field . Default is to return only the objects corresponding to the current Data Release .
47,919
def ossos_release_with_metadata ( ) : discoveries = [ ] observations = ossos_discoveries ( ) for obj in observations : discov = [ n for n in obj [ 0 ] . mpc_observations if n . discovery . is_discovery ] [ 0 ] tno = parameters . tno ( ) tno . dist = obj [ 1 ] . distance tno . ra_discov = discov . coordinate . ra . degrees tno . mag = discov . mag tno . name = discov . provisional_name discoveries . append ( tno ) return discoveries
Wrap the objects from the Version Releases together with the objects instantiated from fitting their mpc lines
47,920
def _kbos_from_survey_sym_model_input_file ( model_file ) : lines = storage . open_vos_or_local ( model_file ) . read ( ) . split ( '\n' ) kbos = [ ] for line in lines : if len ( line ) == 0 or line [ 0 ] == '#' : continue kbo = ephem . EllipticalBody ( ) values = line . split ( ) kbo . name = values [ 8 ] kbo . j = values [ 9 ] kbo . k = values [ 10 ] kbo . _a = float ( values [ 0 ] ) kbo . _e = float ( values [ 1 ] ) kbo . _inc = float ( values [ 2 ] ) kbo . _Om = float ( values [ 3 ] ) kbo . _om = float ( values [ 4 ] ) kbo . _M = float ( values [ 5 ] ) kbo . _H = float ( values [ 6 ] ) epoch = ephem . date ( 2453157.50000 - ephem . julian_date ( 0 ) ) kbo . _epoch_M = epoch kbo . _epoch = epoch kbos . append ( kbo ) return kbos
Load a Survey Simulator model file as an array of ephem EllipticalBody objects .
47,921
def _parse_elcm_response_body_as_json ( response ) : try : body = response . text body_parts = body . split ( '\r\n' ) if len ( body_parts ) > 0 : return jsonutils . loads ( body_parts [ - 1 ] ) else : return None except ( TypeError , ValueError ) : raise ELCMInvalidResponse ( 'eLCM response does not contain valid json ' 'data. Response is "%s".' % body )
parse eLCM response body as json data
47,922
def elcm_request ( irmc_info , method , path , ** kwargs ) : host = irmc_info [ 'irmc_address' ] port = irmc_info . get ( 'irmc_port' , 443 ) auth_method = irmc_info . get ( 'irmc_auth_method' , 'basic' ) userid = irmc_info [ 'irmc_username' ] password = irmc_info [ 'irmc_password' ] client_timeout = irmc_info . get ( 'irmc_client_timeout' , 60 ) headers = kwargs . get ( 'headers' , { 'Accept' : 'application/json' } ) params = kwargs . get ( 'params' ) data = kwargs . get ( 'data' ) auth_obj = None try : protocol = { 80 : 'http' , 443 : 'https' } [ port ] auth_obj = { 'basic' : requests . auth . HTTPBasicAuth ( userid , password ) , 'digest' : requests . auth . HTTPDigestAuth ( userid , password ) } [ auth_method . lower ( ) ] except KeyError : raise scci . SCCIInvalidInputError ( ( "Invalid port %(port)d or " + "auth_method for method %(auth_method)s" ) % { 'port' : port , 'auth_method' : auth_method } ) try : r = requests . request ( method , protocol + '://' + host + path , headers = headers , params = params , data = data , verify = False , timeout = client_timeout , allow_redirects = False , auth = auth_obj ) except requests . exceptions . RequestException as requests_exception : raise scci . SCCIClientError ( requests_exception ) if r . status_code == 401 : raise scci . SCCIClientError ( 'UNAUTHORIZED' ) return r
send an eLCM request to the server
47,923
def elcm_profile_get_versions ( irmc_info ) : resp = elcm_request ( irmc_info , method = 'GET' , path = URL_PATH_PROFILE_MGMT + 'version' ) if resp . status_code == 200 : return _parse_elcm_response_body_as_json ( resp ) else : raise scci . SCCIClientError ( ( 'Failed to get profile versions with ' 'error code %s' % resp . status_code ) )
send an eLCM request to get profile versions
47,924
def elcm_profile_get ( irmc_info , profile_name ) : resp = elcm_request ( irmc_info , method = 'GET' , path = URL_PATH_PROFILE_MGMT + profile_name ) if resp . status_code == 200 : return _parse_elcm_response_body_as_json ( resp ) elif resp . status_code == 404 : raise ELCMProfileNotFound ( 'Profile "%s" not found ' 'in the profile store.' % profile_name ) else : raise scci . SCCIClientError ( ( 'Failed to get profile "%(profile)s" with ' 'error code %(error)s' % { 'profile' : profile_name , 'error' : resp . status_code } ) )
send an eLCM request to get profile data
47,925
def elcm_profile_create ( irmc_info , param_path ) : _irmc_info = dict ( irmc_info ) _irmc_info [ 'irmc_client_timeout' ] = PROFILE_CREATE_TIMEOUT resp = elcm_request ( _irmc_info , method = 'POST' , path = URL_PATH_PROFILE_MGMT + 'get' , params = { 'PARAM_PATH' : param_path } ) if resp . status_code == 202 : return _parse_elcm_response_body_as_json ( resp ) else : raise scci . SCCIClientError ( ( 'Failed to create profile for path ' '"%(param_path)s" with error code ' '%(error)s' % { 'param_path' : param_path , 'error' : resp . status_code } ) )
send an eLCM request to create profile
47,926
def elcm_profile_set ( irmc_info , input_data ) : if isinstance ( input_data , dict ) : data = jsonutils . dumps ( input_data ) else : data = input_data _irmc_info = dict ( irmc_info ) _irmc_info [ 'irmc_client_timeout' ] = PROFILE_SET_TIMEOUT content_type = 'application/x-www-form-urlencoded' if input_data [ 'Server' ] . get ( 'HWConfigurationIrmc' ) : content_type = 'application/json' resp = elcm_request ( _irmc_info , method = 'POST' , path = URL_PATH_PROFILE_MGMT + 'set' , headers = { 'Content-type' : content_type } , data = data ) if resp . status_code == 202 : return _parse_elcm_response_body_as_json ( resp ) else : raise scci . SCCIClientError ( ( 'Failed to apply param values with ' 'error code %(error)s' % { 'error' : resp . status_code } ) )
send an eLCM request to set param values
47,927
def elcm_profile_delete ( irmc_info , profile_name ) : resp = elcm_request ( irmc_info , method = 'DELETE' , path = URL_PATH_PROFILE_MGMT + profile_name ) if resp . status_code == 200 : return elif resp . status_code == 404 : raise ELCMProfileNotFound ( 'Profile "%s" not found ' 'in the profile store.' % profile_name ) else : raise scci . SCCIClientError ( ( 'Failed to delete profile "%(profile)s" ' 'with error code %(error)s' % { 'profile' : profile_name , 'error' : resp . status_code } ) )
send an eLCM request to delete a profile
47,928
def elcm_session_list ( irmc_info ) : resp = elcm_request ( irmc_info , method = 'GET' , path = '/sessionInformation/' ) if resp . status_code == 200 : return _parse_elcm_response_body_as_json ( resp ) else : raise scci . SCCIClientError ( ( 'Failed to list sessions with ' 'error code %s' % resp . status_code ) )
send an eLCM request to list all sessions
47,929
def elcm_session_get_status ( irmc_info , session_id ) : resp = elcm_request ( irmc_info , method = 'GET' , path = '/sessionInformation/%s/status' % session_id ) if resp . status_code == 200 : return _parse_elcm_response_body_as_json ( resp ) elif resp . status_code == 404 : raise ELCMSessionNotFound ( 'Session "%s" does not exist' % session_id ) else : raise scci . SCCIClientError ( ( 'Failed to get status of session ' '"%(session)s" with error code %(error)s' % { 'session' : session_id , 'error' : resp . status_code } ) )
send an eLCM request to get session status
47,930
def elcm_session_terminate ( irmc_info , session_id ) : resp = elcm_request ( irmc_info , method = 'DELETE' , path = '/sessionInformation/%s/terminate' % session_id ) if resp . status_code == 200 : return elif resp . status_code == 404 : raise ELCMSessionNotFound ( 'Session "%s" does not exist' % session_id ) else : raise scci . SCCIClientError ( ( 'Failed to terminate session ' '"%(session)s" with error code %(error)s' % { 'session' : session_id , 'error' : resp . status_code } ) )
send an eLCM request to terminate a session
47,931
def elcm_session_delete ( irmc_info , session_id , terminate = False ) : if terminate : session = elcm_session_get_status ( irmc_info , session_id ) status = session [ 'Session' ] [ 'Status' ] if status == 'running' or status == 'activated' : elcm_session_terminate ( irmc_info , session_id ) resp = elcm_request ( irmc_info , method = 'DELETE' , path = '/sessionInformation/%s/remove' % session_id ) if resp . status_code == 200 : return elif resp . status_code == 404 : raise ELCMSessionNotFound ( 'Session "%s" does not exist' % session_id ) else : raise scci . SCCIClientError ( ( 'Failed to remove session ' '"%(session)s" with error code %(error)s' % { 'session' : session_id , 'error' : resp . status_code } ) )
send an eLCM request to remove a session from the session list
47,932
def backup_bios_config ( irmc_info ) : try : elcm_profile_get ( irmc_info = irmc_info , profile_name = PROFILE_BIOS_CONFIG ) elcm_profile_delete ( irmc_info = irmc_info , profile_name = PROFILE_BIOS_CONFIG ) except ELCMProfileNotFound : pass session = elcm_profile_create ( irmc_info = irmc_info , param_path = PARAM_PATH_BIOS_CONFIG ) session_timeout = irmc_info . get ( 'irmc_bios_session_timeout' , BIOS_CONFIG_SESSION_TIMEOUT ) return _process_session_data ( irmc_info = irmc_info , operation = 'BACKUP_BIOS' , session_id = session [ 'Session' ] [ 'Id' ] , session_timeout = session_timeout )
backup current bios configuration
47,933
def restore_bios_config ( irmc_info , bios_config ) : def _process_bios_config ( ) : try : if isinstance ( bios_config , dict ) : input_data = bios_config else : input_data = jsonutils . loads ( bios_config ) bios_cfg = input_data [ 'Server' ] [ 'SystemConfig' ] [ 'BiosConfig' ] bios_cfg [ '@Processing' ] = 'execute' return input_data except ( TypeError , ValueError , KeyError ) : raise scci . SCCIInvalidInputError ( ( 'Invalid input bios config "%s".' % bios_config ) ) input_data = _process_bios_config ( ) try : elcm_profile_get ( irmc_info = irmc_info , profile_name = PROFILE_BIOS_CONFIG ) elcm_profile_delete ( irmc_info = irmc_info , profile_name = PROFILE_BIOS_CONFIG ) except ELCMProfileNotFound : pass session = elcm_profile_set ( irmc_info = irmc_info , input_data = input_data ) session_timeout = irmc_info . get ( 'irmc_bios_session_timeout' , BIOS_CONFIG_SESSION_TIMEOUT ) _process_session_data ( irmc_info = irmc_info , operation = 'RESTORE_BIOS' , session_id = session [ 'Session' ] [ 'Id' ] , session_timeout = session_timeout )
restore bios configuration
47,934
def get_secure_boot_mode ( irmc_info ) : result = backup_bios_config ( irmc_info = irmc_info ) try : bioscfg = result [ 'bios_config' ] [ 'Server' ] [ 'SystemConfig' ] [ 'BiosConfig' ] return bioscfg [ 'SecurityConfig' ] [ 'SecureBootControlEnabled' ] except KeyError : msg = ( "Failed to get secure boot mode from server %s. Upgrading iRMC " "firmware may resolve this issue." % irmc_info [ 'irmc_address' ] ) raise SecureBootConfigNotFound ( msg )
Get the status if secure boot is enabled or not .
47,935
def _update_raid_input_data ( target_raid_config , raid_input ) : logical_disk_list = target_raid_config [ 'logical_disks' ] raid_input [ 'Server' ] [ 'HWConfigurationIrmc' ] . update ( { '@Processing' : 'execute' } ) array_info = raid_input [ 'Server' ] [ 'HWConfigurationIrmc' ] [ 'Adapters' ] [ 'RAIDAdapter' ] [ 0 ] array_info [ 'LogicalDrives' ] = { 'LogicalDrive' : [ ] } array_info [ 'Arrays' ] = { 'Array' : [ ] } for i , logical_disk in enumerate ( logical_disk_list ) : physical_disks = logical_disk . get ( 'physical_disks' ) if not physical_disks or logical_disk [ 'raid_level' ] in ( '10' , '50' ) : array_info [ 'LogicalDrives' ] [ 'LogicalDrive' ] . append ( { '@Action' : 'Create' , 'RaidLevel' : logical_disk [ 'raid_level' ] , 'InitMode' : 'slow' } ) array_info [ 'LogicalDrives' ] [ 'LogicalDrive' ] [ i ] . update ( { "@Number" : i } ) else : arrays = { "@Number" : i , "@ConfigurationType" : "Setting" , "PhysicalDiskRefs" : { "PhysicalDiskRef" : [ ] } } lo_drive = { "@Number" : i , "@Action" : "Create" , "RaidLevel" : "" , "ArrayRefs" : { "ArrayRef" : [ ] } , "InitMode" : "slow" } array_info [ 'Arrays' ] [ 'Array' ] . append ( arrays ) array_info [ 'LogicalDrives' ] [ 'LogicalDrive' ] . append ( lo_drive ) lo_drive . update ( { 'RaidLevel' : logical_disk [ 'raid_level' ] } ) lo_drive [ 'ArrayRefs' ] [ 'ArrayRef' ] . append ( { "@Number" : i } ) for element in logical_disk [ 'physical_disks' ] : arrays [ 'PhysicalDiskRefs' ] [ 'PhysicalDiskRef' ] . append ( { '@Number' : element } ) if logical_disk [ 'size_gb' ] != "MAX" : size = collections . OrderedDict ( ) size [ '@Unit' ] = 'GB' size [ '#text' ] = logical_disk [ 'size_gb' ] array_info [ 'LogicalDrives' ] [ 'LogicalDrive' ] [ i ] [ 'Size' ] = size return raid_input
Process raid input data .
47,936
def _get_existing_logical_drives ( raid_adapter ) : existing_logical_drives = [ ] logical_drives = raid_adapter [ 'Server' ] [ 'HWConfigurationIrmc' ] [ 'Adapters' ] [ 'RAIDAdapter' ] [ 0 ] . get ( 'LogicalDrives' ) if logical_drives is not None : for drive in logical_drives [ 'LogicalDrive' ] : existing_logical_drives . append ( drive [ '@Number' ] ) return existing_logical_drives
Collect existing logical drives on the server .
47,937
def _create_raid_adapter_profile ( irmc_info ) : try : elcm_profile_delete ( irmc_info , PROFILE_RAID_CONFIG ) except ELCMProfileNotFound : pass session = elcm_profile_create ( irmc_info , PARAM_PATH_RAID_CONFIG ) session_timeout = irmc_info . get ( 'irmc_raid_session_timeout' , RAID_CONFIG_SESSION_TIMEOUT ) return _process_session_data ( irmc_info , 'CONFIG_RAID' , session [ 'Session' ] [ 'Id' ] , session_timeout )
Attempt delete exist adapter then create new raid adapter on the server .
47,938
def create_raid_configuration ( irmc_info , target_raid_config ) : if len ( target_raid_config [ 'logical_disks' ] ) < 1 : raise ELCMValueError ( message = "logical_disks must not be empty" ) raid_adapter = get_raid_adapter ( irmc_info ) logical_drives = raid_adapter [ 'Server' ] [ 'HWConfigurationIrmc' ] [ 'Adapters' ] [ 'RAIDAdapter' ] [ 0 ] . get ( 'LogicalDrives' ) session_timeout = irmc_info . get ( 'irmc_raid_session_timeout' , RAID_CONFIG_SESSION_TIMEOUT ) if logical_drives is not None : delete_raid_configuration ( irmc_info ) raid_adapter = get_raid_adapter ( irmc_info ) raid_input = _update_raid_input_data ( target_raid_config , raid_adapter ) session = elcm_profile_set ( irmc_info , raid_input ) _process_session_data ( irmc_info , 'CONFIG_RAID' , session [ 'Session' ] [ 'Id' ] , session_timeout )
Process raid_input then perform raid configuration into server .
47,939
def delete_raid_configuration ( irmc_info ) : raid_adapter = get_raid_adapter ( irmc_info ) existing_logical_drives = _get_existing_logical_drives ( raid_adapter ) if not existing_logical_drives : return raid_adapter [ 'Server' ] [ 'HWConfigurationIrmc' ] . update ( { '@Processing' : 'execute' } ) logical_drive = raid_adapter [ 'Server' ] [ 'HWConfigurationIrmc' ] [ 'Adapters' ] [ 'RAIDAdapter' ] [ 0 ] [ 'LogicalDrives' ] [ 'LogicalDrive' ] for drive in logical_drive : drive [ '@Action' ] = 'Delete' session = elcm_profile_set ( irmc_info , raid_adapter ) session_timeout = irmc_info . get ( 'irmc_raid_session_timeout' , RAID_CONFIG_SESSION_TIMEOUT ) _process_session_data ( irmc_info , 'CONFIG_RAID' , session [ 'Session' ] [ 'Id' ] , session_timeout ) elcm_profile_delete ( irmc_info , PROFILE_RAID_CONFIG )
Delete whole raid configuration or one of logical drive on the server .
47,940
def set_bios_configuration ( irmc_info , settings ) : bios_config_data = { 'Server' : { 'SystemConfig' : { 'BiosConfig' : { } } } } versions = elcm_profile_get_versions ( irmc_info ) server_version = versions [ 'Server' ] . get ( '@Version' ) bios_version = versions [ 'Server' ] [ 'SystemConfig' ] [ 'BiosConfig' ] . get ( '@Version' ) if server_version : bios_config_data [ 'Server' ] [ '@Version' ] = server_version if bios_version : bios_config_data [ 'Server' ] [ 'SystemConfig' ] [ 'BiosConfig' ] [ '@Version' ] = bios_version configs = { } for setting_param in settings : setting_name = setting_param . get ( "name" ) setting_value = setting_param . get ( "value" ) if isinstance ( setting_value , six . string_types ) : if setting_value . lower ( ) == "true" : setting_value = True elif setting_value . lower ( ) == "false" : setting_value = False try : type_config , config = BIOS_CONFIGURATION_DICTIONARY [ setting_name ] . split ( "_" ) if type_config in configs . keys ( ) : configs [ type_config ] [ config ] = setting_value else : configs . update ( { type_config : { config : setting_value } } ) except KeyError : raise BiosConfigNotFound ( "Invalid BIOS setting: %s" % setting_param ) bios_config_data [ 'Server' ] [ 'SystemConfig' ] [ 'BiosConfig' ] . update ( configs ) restore_bios_config ( irmc_info , bios_config_data )
Set BIOS configurations on the server .
47,941
def get_bios_settings ( irmc_info ) : bios_config = backup_bios_config ( irmc_info ) [ 'bios_config' ] bios_config_data = bios_config [ 'Server' ] [ 'SystemConfig' ] [ 'BiosConfig' ] settings = [ ] for setting_param in BIOS_CONFIGURATION_DICTIONARY : type_config , config = BIOS_CONFIGURATION_DICTIONARY [ setting_param ] . split ( "_" ) if config in bios_config_data . get ( type_config , { } ) : value = six . text_type ( bios_config_data [ type_config ] [ config ] ) settings . append ( { 'name' : setting_param , 'value' : value } ) return settings
Get the current BIOS settings on the server
47,942
def add_resource_context ( router : web . AbstractRouter , url_prefix : str = None , name_prefix : str = None ) -> Iterator [ Any ] : def add_resource ( url : str , get : View = None , * , name : str = None , ** kwargs : Any ) -> web . Resource : kwargs [ 'get' ] = get if url_prefix : url = '/' . join ( ( url_prefix . rstrip ( '/' ) , url . lstrip ( '/' ) ) ) if not name and get : name = get . __name__ if name_prefix and name : name = '.' . join ( ( name_prefix . rstrip ( '.' ) , name . lstrip ( '.' ) ) ) resource = router . add_resource ( url , name = name ) for method , handler in kwargs . items ( ) : if handler is None : continue resource . add_route ( method . upper ( ) , handler ) return resource yield add_resource
Context manager for adding resources for given router .
47,943
def calculate_focus ( self , reading ) : middle_index = len ( self . source . get_readings ( ) ) // 2 middle_reading = self . source . get_reading ( middle_index ) return self . convert_source_location ( middle_reading , reading )
Determines what the focal point of the downloaded image should be .
47,944
def plot_ossos_discoveries ( ax , discoveries , plot_discoveries , plot_colossos = False , split_plutinos = False ) : fc = [ 'b' , '#E47833' , 'k' ] alpha = [ 0.85 , 0.6 , 1. ] marker = [ 'o' , 'd' ] size = [ 7 , 25 ] plottable = [ ] for d in discoveries : for n in plot_discoveries : if d [ 'object' ] . startswith ( n ) : plottable . append ( d ) if plot_colossos : fainter = [ ] colossos = [ ] for n in plottable : if n [ 'object' ] in parameters . COLOSSOS : colossos . append ( n ) else : fainter . append ( n ) plot_ossos_points ( fainter , ax , marker [ 0 ] , size [ 0 ] , fc [ 0 ] , alpha [ 1 ] , 1 ) plot_ossos_points ( colossos , ax , marker [ 1 ] , size [ 1 ] , fc [ 2 ] , alpha [ 2 ] , 2 ) elif split_plutinos : raise NotImplementedError else : plot_ossos_points ( plottable , ax , marker [ 0 ] , size [ 0 ] , fc [ 0 ] , alpha [ 0 ] , 2 ) return
plotted at their discovery locations provided by the Version Releases in decimal degrees .
47,945
def get_irmc_firmware_version ( snmp_client ) : try : bmc_name = snmp_client . get ( BMC_NAME_OID ) irmc_firm_ver = snmp_client . get ( IRMC_FW_VERSION_OID ) return ( '%(bmc)s%(sep)s%(firm_ver)s' % { 'bmc' : bmc_name if bmc_name else '' , 'firm_ver' : irmc_firm_ver if irmc_firm_ver else '' , 'sep' : '-' if bmc_name and irmc_firm_ver else '' } ) except SNMPFailure as e : raise SNMPIRMCFirmwareFailure ( SNMP_FAILURE_MSG % ( "GET IRMC FIRMWARE VERSION" , e ) )
Get irmc firmware version of the node .
47,946
def get_bios_firmware_version ( snmp_client ) : try : bios_firmware_version = snmp_client . get ( BIOS_FW_VERSION_OID ) return six . text_type ( bios_firmware_version ) except SNMPFailure as e : raise SNMPBIOSFirmwareFailure ( SNMP_FAILURE_MSG % ( "GET BIOS FIRMWARE VERSION" , e ) )
Get bios firmware version of the node .
47,947
def get_server_model ( snmp_client ) : try : server_model = snmp_client . get ( SERVER_MODEL_OID ) return six . text_type ( server_model ) except SNMPFailure as e : raise SNMPServerModelFailure ( SNMP_FAILURE_MSG % ( "GET SERVER MODEL" , e ) )
Get server model of the node .
47,948
def _get_auth ( self ) : if self . version == SNMP_V3 : return cmdgen . UsmUserData ( self . security ) else : mp_model = 1 if self . version == SNMP_V2C else 0 return cmdgen . CommunityData ( self . community , mpModel = mp_model )
Return the authorization data for an SNMP request .
47,949
def get ( self , oid ) : try : results = self . cmd_gen . getCmd ( self . _get_auth ( ) , self . _get_transport ( ) , oid ) except snmp_error . PySnmpError as e : raise SNMPFailure ( SNMP_FAILURE_MSG % ( "GET" , e ) ) error_indication , error_status , error_index , var_binds = results if error_indication : raise SNMPFailure ( SNMP_FAILURE_MSG % ( "GET" , error_indication ) ) if error_status : raise SNMPFailure ( "SNMP operation '%(operation)s' failed: %(error)s at" " %(index)s" % { 'operation' : "GET" , 'error' : error_status . prettyPrint ( ) , 'index' : error_index and var_binds [ int ( error_index ) - 1 ] or '?' } ) name , val = var_binds [ 0 ] return val
Use PySNMP to perform an SNMP GET operation on a single object .
47,950
def get_next ( self , oid ) : try : results = self . cmd_gen . nextCmd ( self . _get_auth ( ) , self . _get_transport ( ) , oid ) except snmp_error . PySnmpError as e : raise SNMPFailure ( SNMP_FAILURE_MSG % ( "GET_NEXT" , e ) ) error_indication , error_status , error_index , var_binds = results if error_indication : raise SNMPFailure ( SNMP_FAILURE_MSG % ( "GET_NEXT" , error_indication ) ) if error_status : raise SNMPFailure ( "SNMP operation '%(operation)s' failed: %(error)s at" " %(index)s" % { 'operation' : "GET_NEXT" , 'error' : error_status . prettyPrint ( ) , 'index' : error_index and var_binds [ int ( error_index ) - 1 ] or '?' } ) return [ val for row in var_binds for name , val in row ]
Use PySNMP to perform an SNMP GET NEXT operation on a table object .
47,951
def set ( self , oid , value ) : try : results = self . cmd_gen . setCmd ( self . _get_auth ( ) , self . _get_transport ( ) , ( oid , value ) ) except snmp_error . PySnmpError as e : raise SNMPFailure ( SNMP_FAILURE_MSG % ( "SET" , e ) ) error_indication , error_status , error_index , var_binds = results if error_indication : raise SNMPFailure ( SNMP_FAILURE_MSG % ( "SET" , error_indication ) ) if error_status : raise SNMPFailure ( "SNMP operation '%(operation)s' failed: %(error)s at" " %(index)s" % { 'operation' : "SET" , 'error' : error_status . prettyPrint ( ) , 'index' : error_index and var_binds [ int ( error_index ) - 1 ] or '?' } )
Use PySNMP to perform an SNMP SET operation on a single object .
47,952
def filename ( self ) : if self . _filename is None : self . _filename = storage . get_file ( self . basename , self . ccd , ext = self . extension , version = self . type , prefix = self . prefix ) return self . _filename
Name if the MOP formatted file to parse .
47,953
def _parse ( self ) : with open ( self . filename , 'r' ) as fobj : lines = fobj . read ( ) . split ( '\n' ) self . header = MOPHeader ( self . subfmt ) . parser ( lines ) self . data = MOPDataParser ( self . header ) . parse ( lines )
read in a file and return a MOPFile object .
47,954
def table ( self ) : if self . _table is None : column_names = [ ] for fileid in self . header . file_ids : for column_name in self . header . column_names : column_names . append ( "{}_{}" . format ( column_name , fileid ) ) column_names . append ( "ZP_{}" . format ( fileid ) ) if len ( column_names ) > 0 : self . _table = Table ( names = column_names ) else : self . _table = Table ( ) return self . _table
The astropy . table . Table object that will contain the data result
47,955
def parser ( self , lines ) : while len ( lines ) > 0 : if lines [ 0 ] . startswith ( '##' ) and lines [ 1 ] . startswith ( '# ' ) : self . _header_append ( lines . pop ( 0 ) , lines . pop ( 0 ) ) elif lines [ 0 ] . startswith ( '# ' ) : self . _append_file_id ( lines . pop ( 0 ) ) elif lines [ 0 ] . startswith ( '##' ) : self . _set_column_names ( lines . pop ( 0 ) [ 2 : ] ) else : return self raise IOError ( "Failed trying to read header" )
Given a set of lines parse the into a MOP Header
47,956
def _compute_mjd ( self , kw , val ) : try : idx = kw . index ( 'MJD-OBS-CENTER' ) except ValueError : return if len ( val ) == len ( kw ) : return if len ( val ) - 2 != len ( kw ) : raise ValueError ( "convert: keyword/value lengths don't match: {}/{}" . format ( kw , val ) ) val . insert ( idx , Time ( "{} {} {}" . format ( val . pop ( idx ) , val . pop ( idx ) , val . pop ( idx ) ) , format = 'mpc' , scale = 'utc' ) . mjd ) logging . debug ( "Computed MJD: {}" . format ( val [ idx ] ) )
Sometimes that MJD - OBS - CENTER keyword maps to a three component string instead of a single value .
47,957
def getExpInfo ( expnum ) : col_names = [ 'object' , 'e.expnum' , 'mjdate' , 'uttime' , 'filter' , 'elongation' , 'obs_iq_refccd' , 'triple' , 'qso_status' ] sql = "SELECT " sep = " " for col_name in col_names : sql = sql + sep + col_name sep = "," sql = sql + " FROM bucket.exposure e " sql = sql + " JOIN bucket.circumstance c ON e.expnum=c.expnum " sql = sql + " LEFT JOIN triple_members t ON e.expnum=t.expnum " sql = sql + " WHERE e.expnum=%d " % ( expnum ) cfeps . execute ( sql ) rows = cfeps . fetchall ( ) result = { } for idx in range ( len ( rows [ 0 ] ) ) : result [ col_names [ idx ] ] = rows [ 0 ] [ idx ] return ( result )
Return a dictionary of information about a particular exposure
47,958
def getTripInfo ( triple ) : col_names = [ 'mjdate' , 'filter' , 'elongation' , 'discovery' , 'checkup' , 'recovery' , 'iq' , 'block' ] sql = "SELECT mjdate md," sql = sql + " filter, avg(elongation), d.id, checkup.checkup, recovery.recovery , avg(obs_iq_refccd), b.qname " sql = sql + "FROM triple_members t JOIN bucket.exposure e ON t.expnum=e.expnum " sql = sql + "JOIN bucket.blocks b ON b.expnum=e.expnum " sql = sql + "JOIN bucket.circumstance c on e.expnum=c.expnum " sql = sql + "LEFT JOIN discovery d ON t.triple=d.triple " sql = sql + "LEFT JOIN checkup ON t.triple=checkup.triple " sql = sql + "LEFT JOIN recovery ON t.triple=recovery.triple " sql = sql + "WHERE t.triple=%s " sql = sql + "GROUP BY t.triple ORDER BY t.triple " cfeps . execute ( sql , ( triple , ) ) rows = cfeps . fetchall ( ) result = { } for idx in range ( len ( rows [ 0 ] ) ) : result [ col_names [ idx ] ] = rows [ 0 ] [ idx ] return result
Return a dictionary of information about a particular triple
47,959
def getExpnums ( pointing , night = None ) : if night : night = " floor(e.mjdate-0.0833)=%d " % ( night ) else : night = '' sql = "SELECT e.expnum " sql = sql + "FROM bucket.exposure e " sql = sql + "JOIN bucket.association a on e.expnum=a.expnum " sql = sql + "WHERE a.pointing=" + str ( pointing ) + " AND " + night sql = sql + " ORDER BY mjdate, uttime DESC " cfeps . execute ( sql ) return ( cfeps . fetchall ( ) )
Get all exposures of specified pointing ID .
47,960
def getTriples ( pointing ) : sql = "SELECT id FROM triples t join triple_members m ON t.id=m.triple" sql += " join bucket.exposure e on e.expnum=m.expnum " sql += " WHERE pointing=%s group by id order by e.expnum " cfeps . execute ( sql , ( pointing , ) ) return ( cfeps . fetchall ( ) )
Get all triples of a specified pointing ID .
47,961
def createNewTriples ( Win ) : win . help ( "Building list of exposures to look for triples" ) cols = ( 'e.expnum' , 'object' , 'mjdate' , 'uttime' , 'elongation' , 'filter' , 'obs_iq_refccd' , 'qso_status' ) header = '%6s %-10s%-12s%10s%10s%10s%8s%10s' % cols pointings = getNewTriples ( ) num_p = len ( pointings ) for pointing in pointings : pid = pointing [ 0 ] mjd = pointing [ 1 ] expnums = getExpnums ( pointing = pid , night = mjd ) num_p = num_p - 1 while ( 1 ) : win . help ( "Select (space) members of triplets - %d remaining" % num_p ) explist = [ ] choices = [ ] current_date = '' for expnum in expnums : info = getExpInfo ( expnum [ 0 ] ) row = ( ) if not str ( info [ 'triple' ] ) == 'None' : continue if str ( info [ 'obs_iq_refccd' ] ) == 'None' : info [ 'obs_iq_refccd' ] = - 1.0 choices . append ( '%6d %10s %15s %10s %8.2f %10s %8.2f %10s' % ( int ( info [ 'e.expnum' ] ) , str ( info [ 'object' ] ) , str ( info [ 'mjdate' ] ) , str ( info [ 'uttime' ] ) , float ( str ( info [ 'elongation' ] ) ) , str ( info [ 'filter' ] ) , float ( str ( info [ 'obs_iq_refccd' ] ) ) , str ( info [ 'qso_status' ] ) ) ) explist . append ( expnum [ 0 ] ) if len ( choices ) < 3 : break choice_list = win . list ( header , choices ) if choice_list == None : break if len ( choice_list ) != 3 : win . help ( "Must have 3 members to make a tripple" ) continue sql = "INSERT INTO triples (id, pointing ) VALUES ( NULL, %s ) " cfeps . execute ( sql , ( pid , ) ) sql = "SELECT id FROM triples WHERE pointing=%s order by id desc" cfeps . execute ( sql , ( pid , ) ) ttt = cfeps . fetchall ( ) triple = ttt [ 0 ] [ 0 ] win . help ( str ( triple ) ) sql = "INSERT INTO triple_members (triple, expnum) VALUES ( %s, %s)" win . help ( sql ) for exp in choice_list : cfeps . execute ( sql , ( triple , explist [ exp ] ) ) return ( 0 )
Add entries to the triples tables based on new images in the db
47,962
def setDiscoveryTriples ( win , table = "discovery" ) : win . help ( "Getting a list of pointings with triples from the CFEPS db" ) pointings = getPointingsWithTriples ( ) win . help ( "Select the " + table + " triple form the list..." ) import time for pointing in pointings : header = "%10s %10s %8s %10s %8s" % ( pointing [ 1 ] , 'mjdate' , 'Elongation' , 'Filter' , 'IQ' ) triples = getTriples ( pointing = pointing [ 0 ] ) choices = [ ] triplist = [ ] no_type = 0 previous_list = [ ] for triple in triples : tripinfo = getTripInfo ( triple [ 0 ] ) if not tripinfo [ table ] == None : previous_list . append ( triple [ 0 ] ) triplist . append ( triple ) if str ( tripinfo [ 'iq' ] ) == 'None' : tripinfo [ 'iq' ] = - 1.0 obs_type = ' ' if tripinfo [ 'discovery' ] : obs_type = 'D' elif tripinfo [ 'checkup' ] : obs_type = 'C' elif tripinfo [ 'recovery' ] : obs_type = 'R' if obs_type == ' ' : no_type += 1 line = ( obs_type , tripinfo [ 'mjdate' ] , tripinfo [ 'elongation' ] , tripinfo [ 'filter' ] , tripinfo [ 'iq' ] , tripinfo [ 'block' ] ) choices . append ( '%10s %10s %8.2f %10s %8.2f %8s' % line ) if len ( choices ) == 0 or no_type == 0 : continue win . help ( "Choose a " + table + " triple (space) [no choice means skip] then press enter\n (q) to exit" ) choice = win . list ( header , choices ) if choice == None : win . help ( "Loading next triple" ) break if len ( choice ) != 1 : win . help ( "Loading next triple\n" ) continue discovery_triple = triplist [ choice [ 0 ] ] for triple in previous_list : sql = "DELETE FROM " + table + " WHERE triple=%s " cfeps . execute ( sql , triple ) sql = "INSERT INTO " + table + " ( triple ) VALUES ( %s ) " cfeps . execute ( sql , discovery_triple )
Provide user with a list of triples that could be discovery triples
47,963
def fits_list ( filter , root , fnames ) : import re , pyfits , wcsutil for file in fnames : if re . match ( filter , file ) : fh = pyfits . open ( file ) for ext in fh : obj = ext . header . get ( 'OBJECT' , file ) dx = ext . header . get ( 'NAXIS1' , None ) dy = ext . header . get ( 'NAXIS2' , None ) wcs = wcsutil . WCSObject ( ext ) ( x1 , y1 ) = wcs . xy2rd ( ( 1 , 1 , ) ) ( x2 , y2 ) = wcs . xy2rd ( ( dx , dy ) ) ccds = [ x1 , y1 , x2 , y2 ] pointing = { 'label' : obj , 'camera' : ccds } return files
Get a list of files matching filter in directory root
47,964
def load_fis ( dir = None ) : if dir is None : import tkFileDialog try : dir = tkFileDialog . askdirectory ( ) except : return if dir is None : return None from os . path import walk walk ( dir , fits_list , "*.fits" )
Load fits images in a directory
47,965
def do_objs ( kbos ) : import orbfit , ephem , math import re re_string = w . FilterVar . get ( ) vlist = [ ] for name in kbos : if not re . search ( re_string , name ) : continue vlist . append ( name ) if type ( kbos [ name ] ) == type ( ephem . EllipticalBody ( ) ) : kbos [ name ] . compute ( w . date . get ( ) ) ra = kbos [ name ] . ra dec = kbos [ name ] . dec a = math . radians ( 10.0 / 3600.0 ) b = a ang = 0.0 color = 'blue' yoffset = + 10 xoffset = + 10 else : yoffset = - 10 xoffset = - 10 file = kbos [ name ] jdate = ephem . julian_date ( w . date . get ( ) ) obs = 568 try : position = orbfit . predict ( file , jdate , obs ) except : continue ra = math . radians ( position [ 0 ] ) dec = math . radians ( position [ 1 ] ) a = math . radians ( position [ 2 ] / 3600.0 ) b = math . radians ( position [ 3 ] / 3600.0 ) ang = math . radians ( position [ 4 ] ) if ( a > math . radians ( 1.0 ) ) : color = 'green' else : color = 'black' if w . show_ellipse . get ( ) == 1 : if ( a < math . radians ( 5.0 ) ) : w . create_ellipse ( ra , dec , a , b , ang ) if ( a < math . radians ( 1.0 ) ) : w . create_point ( ra , dec , size = 2 , color = color ) if w . show_labels . get ( ) == 1 : w . label ( ra , dec , name , offset = [ xoffset , yoffset ] ) vlist . sort ( ) for v in vlist : w . objList . insert ( END , v ) w . plot_pointings ( )
Draw the actual plot
47,966
def eps ( self ) : import tkFileDialog , tkMessageBox filename = tkFileDialog . asksaveasfilename ( message = "save postscript to file" , filetypes = [ 'eps' , 'ps' ] ) if filename is None : return self . postscript ( file = filename )
Print the canvas to a postscript file
47,967
def relocate ( self ) : name = self . SearchVar . get ( ) if kbos . has_key ( name ) : import orbfit , ephem , math jdate = ephem . julian_date ( w . date . get ( ) ) try : ( ra , dec , a , b , ang ) = orbfit . predict ( kbos [ name ] , jdate , 568 ) except : return ra = math . radians ( ra ) dec = math . radians ( dec ) elif mpc_objs . has_key ( name ) : ra = mpc_objs [ name ] . ra dec = mpc_objs [ name ] . dec self . recenter ( ra , dec ) self . create_point ( ra , dec , color = 'blue' , size = 4 )
Move to the postion of self . SearchVar
47,968
def create_ellipse ( self , xcen , ycen , a , b , ang , resolution = 40.0 ) : import math e1 = [ ] e2 = [ ] ang = ang - math . radians ( 90 ) for i in range ( 0 , int ( resolution ) + 1 ) : x = ( - 1 * a + 2 * a * float ( i ) / resolution ) y = 1 - ( x / a ) ** 2 if y < 1E-6 : y = 1E-6 y = math . sqrt ( y ) * b ptv = self . p2c ( ( x * math . cos ( ang ) + y * math . sin ( ang ) + xcen , y * math . cos ( ang ) - x * math . sin ( ang ) + ycen ) ) y = - 1 * y ntv = self . p2c ( ( x * math . cos ( ang ) + y * math . sin ( ang ) + xcen , y * math . cos ( ang ) - x * math . sin ( ang ) + ycen ) ) e1 . append ( ptv ) e2 . append ( ntv ) e2 . reverse ( ) e1 . extend ( e2 ) self . create_line ( e1 , fill = 'red' , width = 1 )
Plot ellipse at x y with size a b and orientation ang
47,969
def create_pointing ( self , event ) : import math ( ra , dec ) = self . c2p ( ( self . canvasx ( event . x ) , self . canvasy ( event . y ) ) ) this_camera = camera ( camera = self . camera . get ( ) ) ccds = this_camera . getGeometry ( ra , dec ) items = [ ] for ccd in ccds : ( x1 , y1 ) = self . p2c ( ( ccd [ 0 ] , ccd [ 1 ] ) ) ( x2 , y2 ) = self . p2c ( ( ccd [ 2 ] , ccd [ 3 ] ) ) item = self . create_rectangle ( x1 , y1 , x2 , y2 ) items . append ( item ) label = { } label [ 'text' ] = w . plabel . get ( ) label [ 'id' ] = self . label ( this_camera . ra , this_camera . dec , label [ 'text' ] ) self . pointings . append ( { "label" : label , "items" : items , "camera" : this_camera } ) self . current_pointing ( len ( self . pointings ) - 1 )
Plot the sky coverage of pointing at event . x event . y on the canavas
47,970
def getGeometry ( self , ra = None , dec = None ) : import math , ephem ccds = [ ] if ra is None : ra = self . ra if dec is None : dec = self . dec self . ra = ephem . hours ( ra ) self . dec = ephem . degrees ( dec ) for geo in self . geometry [ self . camera ] : ycen = math . radians ( geo [ "dec" ] ) + dec xcen = math . radians ( geo [ "ra" ] ) / math . cos ( ycen ) + ra dy = math . radians ( geo [ "ddec" ] ) dx = math . radians ( geo [ "dra" ] / math . cos ( ycen ) ) ccds . append ( [ xcen - dx / 2.0 , ycen - dy / 2.0 , xcen + dx / 2.0 , ycen + dy / 2.0 ] ) return ccds
Return an array of rectangles that represent the ra dec corners of the FOV
47,971
def produce_fake_hash ( x ) : h = np . random . binomial ( 1 , 0.5 , ( x . shape [ 0 ] , 1024 ) ) packed = np . packbits ( h , axis = - 1 ) . view ( np . uint64 ) return zounds . ArrayWithUnits ( packed , [ x . dimensions [ 0 ] , zounds . IdentityDimension ( ) ] )
Produce random binary features totally irrespective of the content of x but in the same shape as x .
47,972
def _parse_raw_bytes ( raw_bytes ) : bytes_list = [ int ( x , base = 16 ) for x in raw_bytes . split ( ) ] return bytes_list [ 0 ] , bytes_list [ 1 ] , bytes_list [ 2 : ]
Convert a string of hexadecimal values to decimal values parameters
47,973
def _send_raw_command ( ipmicmd , raw_bytes ) : netfn , command , data = _parse_raw_bytes ( raw_bytes ) response = ipmicmd . raw_command ( netfn , command , data = data ) return response
Use IPMI command object to send raw ipmi command to BMC
47,974
def get_tpm_status ( d_info ) : ipmicmd = ipmi_command . Command ( bmc = d_info [ 'irmc_address' ] , userid = d_info [ 'irmc_username' ] , password = d_info [ 'irmc_password' ] ) try : response = _send_raw_command ( ipmicmd , GET_TPM_STATUS ) if response [ 'code' ] != 0 : raise IPMIFailure ( "IPMI operation '%(operation)s' failed: %(error)s" % { 'operation' : "GET TMP status" , 'error' : response . get ( 'error' ) } ) out = ' ' . join ( '{:02X}' . format ( x ) for x in response [ 'data' ] ) return out is not None and out [ - 5 : ] == 'C0 C0' except ipmi_exception . IpmiException as e : raise IPMIFailure ( "IPMI operation '%(operation)s' failed: %(error)s" % { 'operation' : "GET TMP status" , 'error' : e } )
Get the TPM support status .
47,975
def _pci_seq ( ipmicmd ) : for i in range ( 1 , 0xff + 1 ) : try : res = _send_raw_command ( ipmicmd , GET_PCI % hex ( i ) ) yield i , res except ipmi_exception . IpmiException as e : raise IPMIFailure ( "IPMI operation '%(operation)s' failed: %(error)s" % { 'operation' : "GET PCI device quantity" , 'error' : e } )
Get output of ipmiraw command and the ordinal numbers .
47,976
def get_pci_device ( d_info , pci_device_ids ) : ipmicmd = ipmi_command . Command ( bmc = d_info [ 'irmc_address' ] , userid = d_info [ 'irmc_username' ] , password = d_info [ 'irmc_password' ] ) response = itertools . takewhile ( lambda y : ( y [ 1 ] [ 'code' ] != 0xC9 and y [ 1 ] . get ( 'error' ) is None ) , _pci_seq ( ipmicmd ) ) def _pci_count ( accm , v ) : out = v [ 1 ] [ 'data' ] pci_id = "0x{:02x}{:02x}/0x{:02x}{:02x}" . format ( out [ 7 ] , out [ 6 ] , out [ 9 ] , out [ 8 ] ) return accm + 1 if pci_id in pci_device_ids else accm device_count = functools . reduce ( _pci_count , response , 0 ) return device_count
Get quantity of PCI devices .
47,977
def defaults ( current : dict , * args : AnyMapping ) -> dict : r for data in args : for key , value in data . items ( ) : current . setdefault ( key , value ) return current
r Override current dict with defaults values .
47,978
def validate_func_factory ( validator_class : Any ) -> ValidateFunc : def validate_func ( schema : AnyMapping , pure_data : AnyMapping ) -> AnyMapping : return validator_class ( schema ) . validate ( pure_data ) return validate_func
Provide default function for Schema validation .
47,979
def moreData ( ra , dec , box ) : import cfhtCutout cdata = { 'ra_deg' : ra , 'dec_deg' : dec , 'radius_deg' : 0.2 } inter = cfhtCutout . find_images ( cdata , 0.2 )
Search the CFHT archive for more images of this location
47,980
def xpacheck ( ) : import os f = os . popen ( 'xpaaccess ds9' ) l = f . readline ( ) f . close ( ) if l . strip ( ) != 'yes' : logger . debug ( "\t Can't get ds9 access, xpaccess said: %s" % ( l . strip ( ) ) ) return ( False ) return ( True )
Check if xpa is running
47,981
def mark ( x , y , label = None ) : if label is not None : os . system ( "xpaset -p ds9 regions color red " ) cmd = "echo 'image; text %d %d # text={%s}' | xpaset ds9 regions " % ( x , y , label ) else : os . system ( "xpaset -p ds9 regions color blue" ) cmd = "echo 'image; circle %d %d 10 ' | xpaset ds9 regions " % ( x , y ) os . system ( cmd ) return
Mark a circle on the current image
47,982
def display ( url ) : import os oscmd = "curl --silent -g --fail --max-time 1800 --user jkavelaars '%s'" % ( url ) logger . debug ( oscmd ) os . system ( oscmd + ' | xpaset ds9 fits' ) return
Display a file in ds9
47,983
def inv ( self ) : self . x , self . y = self . y , self . x self . _x_ , self . _y_ = self . _y_ , self . _x_ self . xfac , self . yfac = 1 / self . yfac , 1 / self . xfac self . _xfac_ , self . _yfac_ = 1 / self . _yfac_ , 1 / self . _xfac_ self . _u = 1 / self . _u . conj ( )
Invert the transform .
47,984
def matrix ( self , full = False , keeppads = True ) : v = np . fft . hfft ( self . _u , n = self . N ) / self . N idx = sum ( np . ogrid [ 0 : self . N , - self . N : 0 ] ) C = v [ idx ] if keeppads : a = self . _yfac_ . copy ( ) b = self . _xfac_ . copy ( ) else : a = self . yfac . copy ( ) b = self . xfac . copy ( ) C = self . _unpad ( C , 0 , True ) C = self . _unpad ( C , 1 , False ) a = a . reshape ( - 1 , 1 ) if not full : return a , b , C else : return a * C * b
Return matrix form of the integral transform .
47,985
def _pad ( self , a , axis , extrap , out ) : assert a . shape [ axis ] == self . Nin axis %= a . ndim to_axis = [ 1 ] * a . ndim to_axis [ axis ] = - 1 Npad = self . N - self . Nin if out : _Npad , Npad_ = Npad - Npad // 2 , Npad // 2 else : _Npad , Npad_ = Npad // 2 , Npad - Npad // 2 try : _extrap , extrap_ = extrap except ( TypeError , ValueError ) : _extrap = extrap_ = extrap if isinstance ( _extrap , bool ) : if _extrap : end = np . take ( a , [ 0 ] , axis = axis ) ratio = np . take ( a , [ 1 ] , axis = axis ) / end exp = np . arange ( - _Npad , 0 ) . reshape ( to_axis ) _a = end * ratio ** exp else : _a = np . zeros ( a . shape [ : axis ] + ( _Npad , ) + a . shape [ axis + 1 : ] ) elif _extrap == 'const' : end = np . take ( a , [ 0 ] , axis = axis ) _a = np . repeat ( end , _Npad , axis = axis ) else : raise ValueError ( "left extrap not supported" ) if isinstance ( extrap_ , bool ) : if extrap_ : end = np . take ( a , [ - 1 ] , axis = axis ) ratio = end / np . take ( a , [ - 2 ] , axis = axis ) exp = np . arange ( 1 , Npad_ + 1 ) . reshape ( to_axis ) a_ = end * ratio ** exp else : a_ = np . zeros ( a . shape [ : axis ] + ( Npad_ , ) + a . shape [ axis + 1 : ] ) elif extrap_ == 'const' : end = np . take ( a , [ - 1 ] , axis = axis ) a_ = np . repeat ( end , Npad_ , axis = axis ) else : raise ValueError ( "right extrap not supported" ) return np . concatenate ( ( _a , a , a_ ) , axis = axis )
Add padding to an array .
47,986
def _unpad ( self , a , axis , out ) : assert a . shape [ axis ] == self . N Npad = self . N - self . Nin if out : _Npad , Npad_ = Npad - Npad // 2 , Npad // 2 else : _Npad , Npad_ = Npad // 2 , Npad - Npad // 2 return np . take ( a , range ( _Npad , self . N - Npad_ ) , axis = axis )
Undo padding in an array .
47,987
def check ( self , F ) : assert F . ndim == 1 , "checker only supports 1D" f = self . xfac * F fabs = np . abs ( f ) iQ1 , iQ3 = np . searchsorted ( fabs . cumsum ( ) , np . array ( [ 0.25 , 0.75 ] ) * fabs . sum ( ) ) assert 0 != iQ1 != iQ3 != self . Nin , "checker giving up" fabs_l = fabs [ : iQ1 ] . mean ( ) fabs_m = fabs [ iQ1 : iQ3 ] . mean ( ) fabs_r = fabs [ iQ3 : ] . mean ( ) if fabs_l > fabs_m : warnings . warn ( "left wing seems heavy: {:.2g} vs {:.2g}, " "change tilt and mind convergence" . format ( fabs_l , fabs_m ) , RuntimeWarning ) if fabs_m < fabs_r : warnings . warn ( "right wing seems heavy: {:.2g} vs {:.2g}, " "change tilt and mind convergence" . format ( fabs_m , fabs_r ) , RuntimeWarning ) if fabs [ 0 ] > fabs [ 1 ] : warnings . warn ( "left tail may blow up: {:.2g} vs {:.2g}, " "change tilt or avoid extrapolation" . format ( f [ 0 ] , f [ 1 ] ) , RuntimeWarning ) if fabs [ - 2 ] < fabs [ - 1 ] : warnings . warn ( "right tail may blow up: {:.2g} vs {:.2g}, " "change tilt or avoid extrapolation" . format ( f [ - 2 ] , f [ - 1 ] ) , RuntimeWarning ) if f [ 0 ] * f [ 1 ] <= 0 : warnings . warn ( "left tail looks wiggly: {:.2g} vs {:.2g}, " "avoid extrapolation" . format ( f [ 0 ] , f [ 1 ] ) , RuntimeWarning ) if f [ - 2 ] * f [ - 1 ] <= 0 : warnings . warn ( "right tail looks wiggly: {:.2g} vs {:.2g}, " "avoid extrapolation" . format ( f [ - 2 ] , f [ - 1 ] ) , RuntimeWarning )
Rough sanity checks on the input function .
47,988
def fft ( x , axis = - 1 , padding_samples = 0 ) : if padding_samples > 0 : padded = np . concatenate ( [ x , np . zeros ( ( len ( x ) , padding_samples ) , dtype = x . dtype ) ] , axis = axis ) else : padded = x transformed = np . fft . rfft ( padded , axis = axis , norm = 'ortho' ) sr = audio_sample_rate ( int ( Seconds ( 1 ) / x . dimensions [ axis ] . frequency ) ) scale = LinearScale . from_sample_rate ( sr , transformed . shape [ - 1 ] ) new_dimensions = list ( x . dimensions ) new_dimensions [ axis ] = FrequencyDimension ( scale ) return ArrayWithUnits ( transformed , new_dimensions )
Apply an FFT along the given dimension and with the specified amount of zero - padding
47,989
def set_bias ( self , bias ) : self . x_offset += ( bias - self . _bias ) self . _bias = bias self . _build_cdict ( )
Adjusts the image bias .
47,990
def set_contrast ( self , contrast ) : self . _contrast = contrast self . x_spread = 2 * ( 1.0 - contrast ) self . y_spread = 2.0 - 2 * ( 1.0 - contrast ) self . _build_cdict ( )
Adjusts the image contrast .
47,991
def _copy ( src , dst , src_is_storage , dst_is_storage ) : if src_is_storage and dst_is_storage : system_src = get_instance ( src ) system_dst = get_instance ( dst ) if system_src is system_dst : if system_src . relpath ( src ) == system_dst . relpath ( dst ) : raise same_file_error ( "'%s' and '%s' are the same file" % ( src , dst ) ) try : return system_dst . copy ( src , dst ) except ( UnsupportedOperation , ObjectException ) : pass for caller , called , method in ( ( system_dst , system_src , 'copy_from_%s' ) , ( system_src , system_dst , 'copy_to_%s' ) ) : if hasattr ( caller , method % called . storage ) : try : return getattr ( caller , method % called . storage ) ( src , dst , called ) except ( UnsupportedOperation , ObjectException ) : continue with cos_open ( src , 'rb' ) as fsrc : with cos_open ( dst , 'wb' ) as fdst : for stream in ( fsrc , fdst ) : try : buffer_size = getattr ( stream , '_buffer_size' ) break except AttributeError : continue else : buffer_size = COPY_BUFSIZE copyfileobj ( fsrc , fdst , buffer_size )
Copies file from source to destination
47,992
def copy ( src , dst ) : src , src_is_storage = format_and_is_storage ( src ) dst , dst_is_storage = format_and_is_storage ( dst ) if not src_is_storage and not dst_is_storage : return shutil_copy ( src , dst ) with handle_os_exceptions ( ) : if not hasattr ( dst , 'read' ) : try : if isdir ( dst ) : dst = join ( dst , basename ( src ) ) elif not isdir ( dirname ( dst ) ) : raise IOError ( "No such file or directory: '%s'" % dst ) except ObjectPermissionError : pass _copy ( src , dst , src_is_storage , dst_is_storage )
Copies a source file to a destination file or directory .
47,993
def copyfile ( src , dst , follow_symlinks = True ) : src , src_is_storage = format_and_is_storage ( src ) dst , dst_is_storage = format_and_is_storage ( dst ) if not src_is_storage and not dst_is_storage : return shutil_copyfile ( src , dst , follow_symlinks = follow_symlinks ) with handle_os_exceptions ( ) : try : if not hasattr ( dst , 'read' ) and not isdir ( dirname ( dst ) ) : raise IOError ( "No such file or directory: '%s'" % dst ) except ObjectPermissionError : pass _copy ( src , dst , src_is_storage , dst_is_storage )
Copies a source file to a destination file .
47,994
def _handle_client_error ( ) : try : yield except _ClientError as exception : error = exception . response [ 'Error' ] if error [ 'Code' ] in _ERROR_CODES : raise _ERROR_CODES [ error [ 'Code' ] ] ( error [ 'Message' ] ) raise
Handle boto exception and convert to class IO exceptions
47,995
def _get_session ( self ) : if self . _session is None : self . _session = _boto3 . session . Session ( ** self . _storage_parameters . get ( 'session' , dict ( ) ) ) return self . _session
S3 Boto3 Session .
47,996
def _get_client ( self ) : client_kwargs = self . _storage_parameters . get ( 'client' , dict ( ) ) if self . _unsecure : client_kwargs = client_kwargs . copy ( ) client_kwargs [ 'use_ssl' ] = False return self . _get_session ( ) . client ( "s3" , ** client_kwargs )
S3 Boto3 client
47,997
def _handle_azure_exception ( ) : try : yield except _AzureHttpError as exception : if exception . status_code in _ERROR_CODES : raise _ERROR_CODES [ exception . status_code ] ( str ( exception ) ) raise
Handles Azure exception and convert to class IO exceptions
47,998
def _properties_model_to_dict ( properties ) : result = { } for attr in properties . __dict__ : value = getattr ( properties , attr ) if hasattr ( value , '__module__' ) and 'models' in value . __module__ : value = _properties_model_to_dict ( value ) if not ( value is None or ( isinstance ( value , dict ) and not value ) ) : result [ attr ] = value return result
Convert properties model to dict .
47,999
def _get_endpoint ( self , sub_domain ) : storage_parameters = self . _storage_parameters or dict ( ) account_name = storage_parameters . get ( 'account_name' ) if not account_name : raise ValueError ( '"account_name" is required for Azure storage' ) suffix = storage_parameters . get ( 'endpoint_suffix' , 'core.windows.net' ) self . _endpoint = 'http%s://%s.%s.%s' % ( '' if self . _unsecure else 's' , account_name , sub_domain , suffix ) return account_name , suffix . replace ( '.' , r'\.' )
Get endpoint information from storage parameters .