idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
47,700
def change_user_name ( self , usrname , newusrname , callback = None ) : params = { 'usrName' : usrname , 'newUsrName' : newusrname , } return self . execute_command ( 'changeUserName' , params , callback = callback )
Change user name .
47,701
def change_password ( self , usrname , oldpwd , newpwd , callback = None ) : params = { 'usrName' : usrname , 'oldPwd' : oldpwd , 'newPwd' : newpwd , } return self . execute_command ( 'changePassword' , params , callback = callback )
Change password .
47,702
def set_system_time ( self , time_source , ntp_server , date_format , time_format , time_zone , is_dst , dst , year , mon , day , hour , minute , sec , callback = None ) : if ntp_server not in [ 'time.nist.gov' , 'time.kriss.re.kr' , 'time.windows.com' , 'time.nuri.net' , ] : raise ValueError ( 'Unsupported ntpServer' ) params = { 'timeSource' : time_source , 'ntpServer' : ntp_server , 'dateFormat' : date_format , 'timeFormat' : time_format , 'timeZone' : time_zone , 'isDst' : is_dst , 'dst' : dst , 'year' : year , 'mon' : mon , 'day' : day , 'hour' : hour , 'minute' : minute , 'sec' : sec } return self . execute_command ( 'setSystemTime' , params , callback = callback )
Set systeim time
47,703
def set_dev_name ( self , devname , callback = None ) : params = { 'devName' : devname . encode ( 'gbk' ) } return self . execute_command ( 'setDevName' , params , callback = callback )
Set camera name
47,704
def ptz_goto_preset ( self , name , callback = None ) : params = { 'name' : name } return self . execute_command ( 'ptzGotoPresetPoint' , params , callback = callback )
Move to preset .
47,705
def get_apcor ( expnum , ccd , version = 'p' , prefix = None ) : uri = get_uri ( expnum , ccd , ext = APCOR_EXT , version = version , prefix = prefix ) apcor_file_name = tempfile . NamedTemporaryFile ( ) client . copy ( uri , apcor_file_name . name ) apcor_file_name . seek ( 0 ) return [ float ( x ) for x in apcor_file_name . readline ( ) . split ( ) ]
retrieve the aperture correction for this exposure
47,706
def populate ( dataset_name , data_web_service_url = DATA_WEB_SERVICE + "CFHT" ) : data_dest = get_uri ( dataset_name , version = 'o' , ext = FITS_EXT ) data_source = "%s/%so.{}" % ( data_web_service_url , dataset_name , FITS_EXT ) mkdir ( os . path . dirname ( data_dest ) ) try : client . link ( data_source , data_dest ) except IOError as e : if e . errno == errno . EEXIST : pass else : raise e header_dest = get_uri ( dataset_name , version = 'o' , ext = 'head' ) header_source = "%s/%so.fits.fz?cutout=[0]" % ( data_web_service_url , dataset_name ) try : client . link ( header_source , header_dest ) except IOError as e : if e . errno == errno . EEXIST : pass else : raise e header_dest = get_uri ( dataset_name , version = 'p' , ext = 'head' ) header_source = "%s/%s/%sp.head" % ( 'http://www.cadc-ccda.hia-iha.nrc-cnrc.gc.ca/data/pub' , 'CFHTSG' , dataset_name ) try : client . link ( header_source , header_dest ) except IOError as e : if e . errno == errno . EEXIST : pass else : raise e return True
Given a dataset_name created the desired dbimages directories and links to the raw data files stored at CADC .
47,707
def get_cands_uri ( field , ccd , version = 'p' , ext = 'measure3.cands.astrom' , prefix = None , block = None ) : if prefix is None : prefix = "" if len ( prefix ) > 0 : prefix += "_" if len ( field ) > 0 : field += "_" if ext is None : ext = "" if len ( ext ) > 0 and ext [ 0 ] != "." : ext = ".{}" . format ( ext ) measure3_dir = MEASURE3 if block is not None : measure3_dir + "/{}" . format ( block ) return "{}/{}{}{}{}{}" . format ( measure3_dir , prefix , field , version , ccd , ext )
return the nominal URI for a candidate file .
47,708
def get_uri ( expnum , ccd = None , version = 'p' , ext = FITS_EXT , subdir = None , prefix = None ) : if subdir is None : subdir = str ( expnum ) if prefix is None : prefix = '' uri = os . path . join ( DBIMAGES , subdir ) if ext is None : ext = '' elif len ( ext ) > 0 and ext [ 0 ] != '.' : ext = '.' + ext if version is None : version = '' if ccd is None : uri = os . path . join ( uri , '%s%s%s%s' % ( prefix , str ( expnum ) , version , ext ) ) else : ccd = str ( ccd ) . zfill ( 2 ) uri = os . path . join ( uri , 'ccd{}' . format ( ccd ) , '%s%s%s%s%s' % ( prefix , str ( expnum ) , version , ccd , ext ) ) return uri
Build the uri for an OSSOS image stored in the dbimages containerNode .
47,709
def get_tag ( expnum , key ) : uri = tag_uri ( key ) force = uri not in get_tags ( expnum ) value = get_tags ( expnum , force = force ) . get ( uri , None ) return value
given a key return the vospace tag value .
47,710
def get_process_tag ( program , ccd , version = 'p' ) : return "%s_%s%s" % ( program , str ( version ) , str ( ccd ) . zfill ( 2 ) )
make a process tag have a suffix indicating which ccd its for .
47,711
def get_status ( task , prefix , expnum , version , ccd , return_message = False ) : key = get_process_tag ( prefix + task , ccd , version ) status = get_tag ( expnum , key ) logger . debug ( '%s: %s' % ( key , status ) ) if return_message : return status else : return status == SUCCESS
Report back status of the given program by looking up the associated VOSpace annotation .
47,712
def set_status ( task , prefix , expnum , version , ccd , status ) : return set_tag ( expnum , get_process_tag ( prefix + task , ccd , version ) , status )
set the processing status of the given program .
47,713
def frame2expnum ( frameid ) : result = { } parts = re . search ( '(?P<expnum>\d{7})(?P<type>\S)(?P<ccd>\d\d)' , frameid ) assert parts is not None result [ 'expnum' ] = parts . group ( 'expnum' ) result [ 'ccd' ] = parts . group ( 'ccd' ) result [ 'version' ] = parts . group ( 'type' ) return result
Given a standard OSSOS frameid return the expnum version and ccdnum as a dictionary .
47,714
def reset_datasec ( cutout , datasec , naxis1 , naxis2 ) : if cutout is None or cutout == "[*,*]" : return datasec try : datasec = datasec_to_list ( datasec ) except : return datasec cutout = cutout . replace ( " " , "" ) cutout = cutout . replace ( "[-*," , "{}:1," . format ( naxis1 ) ) cutout = cutout . replace ( ",-*]" , ",{}:1]" . format ( naxis2 ) ) cutout = cutout . replace ( "[*," , "[1:{}," . format ( naxis1 ) ) cutout = cutout . replace ( ",*]" , ",1:{}]" . format ( naxis1 ) ) try : cutout = [ int ( x ) for x in re . findall ( r"([-+]?[*\d]+?)[:,\]]+" , cutout ) ] except : logger . debug ( "Failed to processes the cutout pattern: {}" . format ( cutout ) ) return datasec if len ( cutout ) == 5 : cutout = cutout [ 1 : ] for idx in [ 0 , 1 ] : if cutout [ idx ] < 0 : cutout [ idx ] = naxis1 - cutout [ idx ] + 1 for idx in [ 2 , 3 ] : if cutout [ idx ] < 0 : cutout [ idx ] = naxis2 - cutout [ idx ] + 1 flip = cutout [ 0 ] > cutout [ 1 ] flop = cutout [ 2 ] > cutout [ 3 ] logger . debug ( "Working with cutout: {}" . format ( cutout ) ) if flip : cutout = [ naxis1 - cutout [ 0 ] + 1 , naxis1 - cutout [ 1 ] + 1 , cutout [ 2 ] , cutout [ 3 ] ] datasec = [ naxis1 - datasec [ 1 ] + 1 , naxis1 - datasec [ 0 ] + 1 , datasec [ 2 ] , datasec [ 3 ] ] if flop : cutout = [ cutout [ 0 ] , cutout [ 1 ] , naxis2 - cutout [ 2 ] + 1 , naxis2 - cutout [ 3 ] + 1 ] datasec = [ datasec [ 0 ] , datasec [ 1 ] , naxis2 - datasec [ 3 ] + 1 , naxis2 - datasec [ 2 ] + 1 ] datasec = [ max ( datasec [ 0 ] - cutout [ 0 ] + 1 , 1 ) , min ( datasec [ 1 ] - cutout [ 0 ] + 1 , naxis1 ) , max ( datasec [ 2 ] - cutout [ 2 ] + 1 , 1 ) , min ( datasec [ 3 ] - cutout [ 2 ] + 1 , naxis2 ) ] return "[{}:{},{}:{}]" . format ( datasec [ 0 ] , datasec [ 1 ] , datasec [ 2 ] , datasec [ 3 ] )
reset the datasec to account for a possible cutout .
47,715
def get_hdu ( uri , cutout = None ) : try : filename = os . path . basename ( uri ) if os . access ( filename , os . F_OK ) and cutout is None : logger . debug ( "File already on disk: {}" . format ( filename ) ) hdu_list = fits . open ( filename , scale_back = True ) hdu_list . verify ( 'silentfix+ignore' ) else : logger . debug ( "Pulling: {}{} from VOSpace" . format ( uri , cutout ) ) fpt = tempfile . NamedTemporaryFile ( suffix = '.fits' ) cutout = cutout is not None and cutout or "" copy ( uri + cutout , fpt . name ) fpt . seek ( 0 , 2 ) fpt . seek ( 0 ) logger . debug ( "Read from vospace completed. Building fits object." ) hdu_list = fits . open ( fpt , scale_back = False ) hdu_list . verify ( 'silentfix+ignore' ) logger . debug ( "Got image from vospace" ) try : hdu_list [ 0 ] . header [ 'DATASEC' ] = reset_datasec ( cutout , hdu_list [ 0 ] . header [ 'DATASEC' ] , hdu_list [ 0 ] . header [ 'NAXIS1' ] , hdu_list [ 0 ] . header [ 'NAXIS2' ] ) except Exception as e : logging . debug ( "error converting datasec: {}" . format ( str ( e ) ) ) for hdu in hdu_list : logging . debug ( "Adding converter to {}" . format ( hdu ) ) hdu . converter = CoordinateConverter ( 0 , 0 ) try : hdu . wcs = WCS ( hdu . header ) except Exception as ex : logger . error ( "Failed trying to initialize the WCS: {}" . format ( ex ) ) except Exception as ex : raise ex return hdu_list
Get a at the given uri from VOSpace possibly doing a cutout .
47,716
def get_fwhm_tag ( expnum , ccd , prefix = None , version = 'p' ) : uri = get_uri ( expnum , ccd , version , ext = 'fwhm' , prefix = prefix ) if uri not in fwhm : key = "fwhm_{:1s}{:02d}" . format ( version , int ( ccd ) ) fwhm [ uri ] = get_tag ( expnum , key ) return fwhm [ uri ]
Get the FWHM from the VOSpace annotation .
47,717
def _get_zeropoint ( expnum , ccd , prefix = None , version = 'p' ) : if prefix is not None : DeprecationWarning ( "Prefix is no longer used here as the 'fk' and 's' have the same zeropoint." ) key = "zeropoint_{:1s}{:02d}" . format ( version , int ( ccd ) ) return get_tag ( expnum , key )
Retrieve the zeropoint stored in the tags associated with this image .
47,718
def get_zeropoint ( expnum , ccd , prefix = None , version = 'p' ) : uri = get_uri ( expnum , ccd , version , ext = 'zeropoint.used' , prefix = prefix ) try : return zmag [ uri ] except : pass try : zmag [ uri ] = float ( open_vos_or_local ( uri ) . read ( ) ) return zmag [ uri ] except : pass zmag [ uri ] = 0.0 return zmag [ uri ]
Get the zeropoint for this exposure using the zeropoint . used file created during source planting ..
47,719
def mkdir ( dirname ) : dir_list = [ ] while not client . isdir ( dirname ) : dir_list . append ( dirname ) dirname = os . path . dirname ( dirname ) while len ( dir_list ) > 0 : logging . info ( "Creating directory: %s" % ( dir_list [ - 1 ] ) ) try : client . mkdir ( dir_list . pop ( ) ) except IOError as e : if e . errno == errno . EEXIST : pass else : raise e
make directory tree in vospace .
47,720
def vofile ( filename , ** kwargs ) : basename = os . path . basename ( filename ) if os . access ( basename , os . R_OK ) : return open ( basename , 'r' ) kwargs [ 'view' ] = kwargs . get ( 'view' , 'data' ) return client . open ( filename , ** kwargs )
Open and return a handle on a VOSpace data connection
47,721
def open_vos_or_local ( path , mode = "rb" ) : filename = os . path . basename ( path ) if os . access ( filename , os . F_OK ) : return open ( filename , mode ) if path . startswith ( "vos:" ) : primary_mode = mode [ 0 ] if primary_mode == "r" : vofile_mode = os . O_RDONLY elif primary_mode == "w" : vofile_mode = os . O_WRONLY elif primary_mode == "a" : vofile_mode = os . O_APPEND else : raise ValueError ( "Can't open with mode %s" % mode ) return vofile ( path , mode = vofile_mode ) else : return open ( path , mode )
Opens a file which can either be in VOSpace or the local filesystem .
47,722
def copy ( source , dest ) : logger . info ( "copying {} -> {}" . format ( source , dest ) ) return client . copy ( source , dest )
use the vospace service to get a file .
47,723
def vlink ( s_expnum , s_ccd , s_version , s_ext , l_expnum , l_ccd , l_version , l_ext , s_prefix = None , l_prefix = None ) : source_uri = get_uri ( s_expnum , ccd = s_ccd , version = s_version , ext = s_ext , prefix = s_prefix ) link_uri = get_uri ( l_expnum , ccd = l_ccd , version = l_version , ext = l_ext , prefix = l_prefix ) return client . link ( source_uri , link_uri )
make a link between two version of a file .
47,724
def delete ( expnum , ccd , version , ext , prefix = None ) : uri = get_uri ( expnum , ccd = ccd , version = version , ext = ext , prefix = prefix ) remove ( uri )
delete a file no error on does not exist
47,725
def my_glob ( pattern ) : result = [ ] if pattern [ 0 : 4 ] == 'vos:' : dirname = os . path . dirname ( pattern ) flist = listdir ( dirname ) for fname in flist : fname = '/' . join ( [ dirname , fname ] ) if fnmatch . fnmatch ( fname , pattern ) : result . append ( fname ) else : result = glob ( pattern ) return result
get a listing matching pattern
47,726
def has_property ( node_uri , property_name , ossos_base = True ) : if get_property ( node_uri , property_name , ossos_base ) is None : return False else : return True
Checks if a node in VOSpace has the specified property .
47,727
def get_property ( node_uri , property_name , ossos_base = True ) : node = client . get_node ( node_uri , force = True ) property_uri = tag_uri ( property_name ) if ossos_base else property_name if property_uri not in node . props : return None return node . props [ property_uri ]
Retrieves the value associated with a property on a node in VOSpace .
47,728
def set_property ( node_uri , property_name , property_value , ossos_base = True ) : node = client . get_node ( node_uri ) property_uri = tag_uri ( property_name ) if ossos_base else property_name if property_uri in node . props : node . props [ property_uri ] = None client . add_props ( node ) node . props [ property_uri ] = property_value client . add_props ( node )
Sets the value of a property on a node in VOSpace . If the property already has a value then it is first cleared and then set .
47,729
def increment_object_counter ( node_uri , epoch_field , dry_run = False ) : current_count = read_object_counter ( node_uri , epoch_field , dry_run = dry_run ) if current_count is None : new_count = "01" else : new_count = coding . base36encode ( coding . base36decode ( current_count ) + 1 , pad_length = 2 ) set_property ( node_uri , build_counter_tag ( epoch_field , dry_run = dry_run ) , new_count , ossos_base = True ) return new_count
Increment the object counter used to create unique object identifiers .
47,730
def get_mopheader ( expnum , ccd , version = 'p' , prefix = None ) : prefix = prefix is None and "" or prefix mopheader_uri = dbimages_uri ( expnum = expnum , ccd = ccd , version = version , prefix = prefix , ext = '.mopheader' ) if mopheader_uri in mopheaders : return mopheaders [ mopheader_uri ] filename = os . path . basename ( mopheader_uri ) if os . access ( filename , os . F_OK ) : logger . debug ( "File already on disk: {}" . format ( filename ) ) mopheader_fpt = StringIO ( open ( filename , 'r' ) . read ( ) ) else : mopheader_fpt = StringIO ( open_vos_or_local ( mopheader_uri ) . read ( ) ) with warnings . catch_warnings ( ) : warnings . simplefilter ( 'ignore' , AstropyUserWarning ) mopheader = fits . open ( mopheader_fpt ) header = mopheader [ 0 ] . header try : header [ 'FWHM' ] = get_fwhm ( expnum , ccd ) except IOError : header [ 'FWHM' ] = 10 header [ 'SCALE' ] = mopheader [ 0 ] . header [ 'PIXSCALE' ] header [ 'NAX1' ] = header [ 'NAXIS1' ] header [ 'NAX2' ] = header [ 'NAXIS2' ] header [ 'MOPversion' ] = header [ 'MOP_VER' ] header [ 'MJD_OBS_CENTER' ] = str ( Time ( header [ 'MJD-OBSC' ] , format = 'mjd' , scale = 'utc' , precision = 5 ) . replicate ( format = 'mpc' ) ) header [ 'MAXCOUNT' ] = MAXCOUNT mopheaders [ mopheader_uri ] = header mopheader . close ( ) return mopheaders [ mopheader_uri ]
Retrieve the mopheader either from cache or from vospace
47,731
def _get_sghead ( expnum ) : version = 'p' key = "{}{}" . format ( expnum , version ) if key in sgheaders : return sgheaders [ key ] url = "http://www.cadc-ccda.hia-iha.nrc-cnrc.gc.ca/data/pub/CFHTSG/{}{}.head" . format ( expnum , version ) logging . getLogger ( "requests" ) . setLevel ( logging . ERROR ) logging . debug ( "Attempting to retrieve {}" . format ( url ) ) resp = requests . get ( url ) if resp . status_code != 200 : raise IOError ( errno . ENOENT , "Could not get {}" . format ( url ) ) header_str_list = re . split ( 'END \n' , resp . content ) headers = [ None ] for header_str in header_str_list : headers . append ( fits . Header . fromstring ( header_str , sep = '\n' ) ) logging . debug ( headers [ - 1 ] . get ( 'EXTVER' , - 1 ) ) sgheaders [ key ] = headers return sgheaders [ key ]
Use the data web service to retrieve the stephen s astrometric header .
47,732
def get_header ( uri ) : if uri not in astheaders : astheaders [ uri ] = get_hdu ( uri , cutout = "[1:1,1:1]" ) [ 0 ] . header return astheaders [ uri ]
Pull a FITS header from observation at the given URI
47,733
def get_astheader ( expnum , ccd , version = 'p' , prefix = None ) : logger . debug ( "Getting ast header for {}" . format ( expnum ) ) if version == 'p' : try : sg_key = "{}{}" . format ( expnum , version ) if sg_key not in sgheaders : _get_sghead ( expnum ) if sg_key in sgheaders : for header in sgheaders [ sg_key ] : if header . get ( 'EXTVER' , - 1 ) == int ( ccd ) : return header except : pass try : ast_uri = dbimages_uri ( expnum , ccd , version = version , ext = '.fits' ) if ast_uri not in astheaders : hdulist = get_image ( expnum , ccd = ccd , version = version , prefix = prefix , cutout = "[1:1,1:1]" , return_file = False , ext = '.fits' ) assert isinstance ( hdulist , fits . HDUList ) astheaders [ ast_uri ] = hdulist [ 0 ] . header except : ast_uri = dbimages_uri ( expnum , ccd , version = version , ext = '.fits.fz' ) if ast_uri not in astheaders : hdulist = get_image ( expnum , ccd = ccd , version = version , prefix = prefix , cutout = "[1:1,1:1]" , return_file = False , ext = '.fits.fz' ) assert isinstance ( hdulist , fits . HDUList ) astheaders [ ast_uri ] = hdulist [ 0 ] . header return astheaders [ ast_uri ]
Retrieve the header for a given dbimages file .
47,734
def tag ( self ) : return "{}{}_{}{:02d}" . format ( self . target . prefix , self , self . target . version , self . target . ccd )
Get the string representation of the tag used to annotate the status in VOSpace .
47,735
def scramble ( expnums , ccd , version = 'p' , dry_run = False ) : mjds = [ ] fobjs = [ ] for expnum in expnums : filename = storage . get_image ( expnum , ccd = ccd , version = version ) fobjs . append ( fits . open ( filename ) ) mjds . append ( fobjs [ - 1 ] [ 0 ] . header [ 'MJD-OBS' ] ) order = [ 0 , 2 , 1 ] for idx in range ( len ( fobjs ) ) : logging . info ( "Flipping %d to %d" % ( fobjs [ idx ] [ 0 ] . header [ 'EXPNUM' ] , expnums [ order [ idx ] ] ) ) fobjs [ idx ] [ 0 ] . header [ 'EXPNUM' ] = expnums [ order [ idx ] ] fobjs [ idx ] [ 0 ] . header [ 'MJD-OBS' ] = mjds [ order [ idx ] ] uri = storage . get_uri ( expnums [ order [ idx ] ] , ccd = ccd , version = 's' , ext = 'fits' ) fname = os . path . basename ( uri ) if os . access ( fname , os . F_OK ) : os . unlink ( fname ) fobjs [ idx ] . writeto ( fname ) if dry_run : continue storage . copy ( fname , uri ) return
run the plant script on this combination of exposures
47,736
def read_cands ( filename ) : import sre lines = file ( filename ) . readlines ( ) exps = [ ] cands = [ ] coo = [ ] for line in lines : if ( line [ 0 : 2 ] == "##" ) : break exps . append ( line [ 2 : ] . strip ( ) ) for line in lines : if ( line [ 0 ] == "#" ) : continue if len ( line . strip ( ) ) == 0 : if len ( coo ) != 0 : cands . append ( coo ) coo = [ ] continue vals = line . split ( ) cols = [ 'x' , 'y' , 'x_0' , 'y_0' , 'flux' , 'size' , 'max_int' , 'elon' ] values = { } for j in range ( len ( cols ) ) : col = cols . pop ( ) . strip ( ) val = vals . pop ( ) . strip ( ) values [ col ] = float ( val ) coo . append ( values ) cands . append ( coo ) return { 'fileId' : exps , 'cands' : cands }
Read in the contents of a cands comb file
47,737
def query_for_observations ( mjd , observable , runid_list ) : data = { "QUERY" : ( "SELECT Observation.target_name as TargetName, " "COORD1(CENTROID(Plane.position_bounds)) AS RA," "COORD2(CENTROID(Plane.position_bounds)) AS DEC, " "Plane.time_bounds_lower AS StartDate, " "Plane.time_exposure AS ExposureTime, " "Observation.instrument_name AS Instrument, " "Plane.energy_bandpassName AS Filter, " "Observation.observationID AS dataset_name, " "Observation.proposal_id AS ProposalID, " "Observation.proposal_pi AS PI " "FROM caom2.Observation AS Observation " "JOIN caom2.Plane AS Plane ON " "Observation.obsID = Plane.obsID " "WHERE ( Observation.collection = 'CFHT' ) " "AND Plane.time_bounds_lower > %d " "AND Plane.calibrationLevel=%s " "AND Observation.proposal_id IN %s " ) % ( mjd , observable , str ( runid_list ) ) , "REQUEST" : "doQuery" , "LANG" : "ADQL" , "FORMAT" : "votable" } result = requests . get ( storage . TAP_WEB_SERVICE , params = data , verify = False ) assert isinstance ( result , requests . Response ) logging . debug ( "Doing TAP Query using url: %s" % ( str ( result . url ) ) ) temp_file = tempfile . NamedTemporaryFile ( ) with open ( temp_file . name , 'w' ) as outfile : outfile . write ( result . text ) try : vot = parse ( temp_file . name ) . get_first_table ( ) except Exception as ex : logging . error ( str ( ex ) ) logging . error ( result . text ) raise ex vot . array . sort ( order = 'StartDate' ) t = vot . array temp_file . close ( ) logging . debug ( "Got {} lines from tap query" . format ( len ( t ) ) ) return t
Do a QUERY on the TAP service for all observations that are part of runid where taken after mjd and have calibration observable .
47,738
def crpix ( self ) : try : return self . wcs . crpix1 , self . wcs . crpix2 except Exception as ex : logging . debug ( "Couldn't get CRPIX from WCS: {}" . format ( ex ) ) logging . debug ( "Switching to use DATASEC for CRPIX value computation." ) try : ( x1 , x2 ) , ( y1 , y2 ) = util . get_pixel_bounds_from_datasec_keyword ( self [ 'DETSEC' ] ) dx = float ( self [ 'NAXIS1' ] ) dy = float ( self [ 'NAXIS2' ] ) except KeyError as ke : raise KeyError ( "Header missing keyword: {}, required for CRPIX[12] computation" . format ( ke . args [ 0 ] ) ) crpix1 = self . _DET_X_CEN - ( x1 + x2 ) / 2. + dx / 2. crpix2 = self . _DET_Y_CEN - ( y1 + y2 ) / 2. + dy / 2. return crpix1 , crpix2
The location of the reference coordinate in the pixel frame .
47,739
def mjd_obsc ( self ) : try : utc_end = self [ 'UTCEND' ] exposure_time = float ( self [ 'EXPTIME' ] ) date_obs = self [ 'DATE-OBS' ] except KeyError as ke : raise KeyError ( "Header missing keyword: {}, required for MJD-OBSC computation" . format ( ke . args [ 0 ] ) ) utc_end = Time ( date_obs + "T" + utc_end ) utc_cen = utc_end - TimeDelta ( 0.73 , format = 'sec' ) - TimeDelta ( exposure_time / 2.0 , format = 'sec' ) return round ( utc_cen . mjd , 7 )
Given a CFHT Megaprime image header compute the center of exposure .
47,740
def crval ( self ) : try : return self . wcs . crval1 , self . wcs . crval2 except Exception as ex : logging . debug ( "Couldn't get CRVAL from WCS: {}" . format ( ex ) ) logging . debug ( "Trying RA/DEC values" ) try : return ( float ( self [ 'RA-DEG' ] ) , float ( self [ 'DEC-DEG' ] ) ) except KeyError as ke : KeyError ( "Can't build CRVAL1/2 missing keyword: {}" . format ( ke . args [ 0 ] ) )
Get the world coordinate of the reference pixel .
47,741
def pixscale ( self ) : try : ( x , y ) = self [ 'NAXIS1' ] / 2.0 , self [ 'NAXIS2' ] / 2.0 p1 = SkyCoord ( * self . wcs . xy2sky ( x , y ) * units . degree ) p2 = SkyCoord ( * self . wcs . xy2sky ( x + 1 , y + 1 ) * units . degree ) return round ( p1 . separation ( p2 ) . to ( units . arcsecond ) . value / math . sqrt ( 2 ) , 3 ) except Exception as ex : logging . debug ( "Failed to compute PIXSCALE using WCS: {}" . format ( ex ) ) return float ( self [ 'PIXSCAL' ] )
Return the pixel scale of the detector in arcseconds .
47,742
def get_rates ( file , au_min = 25 , au_max = 150 ) : import os , string rate_command = 'rate.pl --file %s %d ' % ( file , au_min ) rate = os . popen ( rate_command ) line = rate . readline ( ) print line rate . close ( ) ( min_rate , min_ang , min_aw , min_rmin , min_rmax ) = string . split ( line ) rate_command = 'rate.pl --file %s %d ' % ( file , au_min ) rate = os . popen ( rate_command ) line = rate . readline ( ) rate . close ( ) ( max_rate , max_ang , max_aw , max_rmin , max_rmax ) = string . split ( line ) rmin = float ( min ( max_rmin , min_rmin ) ) rmax = float ( max ( max_rmax , min_rmax ) ) aw = float ( max ( max_aw , min_aw ) ) angle = ( float ( max_ang ) + float ( min_ang ) ) / 2.0 rates = { 'rmin' : rmin , 'rmax' : rmax , 'angle' : angle , 'aw' : aw } return rates
Use the rates program to determine the minimum and maximum bounds for planting
47,743
def kbo_gen ( file , outfile = 'objects.list' , mmin = 22.5 , mmax = 24.5 ) : header = get_rates ( file ) print header import pyfits hdulist = pyfits . open ( file ) header [ 'xmin' ] = 1 header [ 'xmax' ] = hdulist [ 0 ] . header . get ( 'NAXIS1' , 2048 ) header [ 'ymin' ] = 1 header [ 'aw' ] = round ( header [ 'aw' ] , 2 ) header [ 'angle' ] = round ( header [ 'angle' ] , 2 ) header [ 'ymax' ] = hdulist [ 0 ] . header . get ( 'NAXIS2' , 4096 ) header [ 'pixscale' ] = hdulist [ 0 ] . header . get ( 'PIXSCALE' , 0.185 ) header [ 'rmax' ] = round ( float ( header [ 'rmax' ] ) / float ( header [ 'pixscale' ] ) , 2 ) header [ 'rmin' ] = round ( float ( header [ 'rmin' ] ) / float ( header [ 'pixscale' ] ) , 2 ) header [ 'mmin' ] = mmin header [ 'mmax' ] = mmax header [ 'expnum' ] = hdulist [ 0 ] . header . get ( 'EXPNUM' , 1000000 ) header [ 'chipnum' ] = hdulist [ 0 ] . header . get ( 'CHIPNUM' ) import random number = 250 cdata = { 'x' : [ ] , 'y' : [ ] , 'mag' : [ ] , 'pix_rate' : [ ] , 'angle' : [ ] , 'id' : [ ] } order = [ 'x' , 'y' , 'mag' , 'pix_rate' , 'angle' , 'arc_rate' , 'id' ] for i in range ( number ) : cdata [ 'x' ] . append ( random . uniform ( header [ 'xmin' ] , header [ 'xmax' ] ) ) cdata [ 'y' ] . append ( random . uniform ( header [ 'ymin' ] , header [ 'ymax' ] ) ) cdata [ 'pix_rate' ] . append ( random . uniform ( header [ 'rmin' ] , header [ 'rmax' ] ) ) cdata [ 'angle' ] . append ( random . uniform ( header [ 'angle' ] - header [ 'aw' ] , header [ 'angle' ] + header [ 'aw' ] ) ) cdata [ 'mag' ] . append ( random . uniform ( header [ 'mmin' ] , header [ 'mmax' ] ) ) cdata [ 'id' ] . append ( i ) hdu = { 'data' : cdata , 'header' : header } return hdu
Generate a file with object moving at a range of rates and angles
47,744
def main ( ) : parser = argparse . ArgumentParser ( description = 'Run SSOIS and return the available images in a particular filter.' ) parser . add_argument ( "--filter" , action = "store" , default = 'r' , dest = "filter" , choices = [ 'r' , 'u' ] , help = "Passband: default is r." ) parser . add_argument ( "--family" , '-f' , action = "store" , default = None , help = 'List of objects to query.' ) parser . add_argument ( "--member" , '-m' , action = "store" , default = None , help = 'Member object of family to query.' ) args = parser . parse_args ( ) if args . family != None and args . member == None : get_family_info ( str ( args . family ) , args . filter ) elif args . family == None and args . member != None : get_member_info ( str ( args . member ) , args . filter ) else : print "Please input either a family or single member name"
Input asteroid family filter type and image type to query SSOIS
47,745
def get_family_info ( familyname , filtertype = 'r' , imagetype = 'p' ) : family_list = '{}/{}_family.txt' . format ( _FAMILY_LISTS , familyname ) if os . path . exists ( family_list ) : with open ( family_list ) as infile : filestr = infile . read ( ) object_list = filestr . split ( '\n' ) elif familyname == 'all' : object_list = find_family . get_all_families_list ( ) else : object_list = find_family . find_family_members ( familyname ) for obj in object_list [ 0 : len ( object_list ) - 1 ] : get_member_info ( obj , filtertype )
Query the ssois table for images of objects in a given family . Then parse through for desired image type filter exposure time and telescope instrument
47,746
def get_member_info ( object_name , filtertype = 'r' , imagetype = 'p' ) : if filtertype . lower ( ) . __contains__ ( 'r' ) : filtertype = 'r.MP9601' if filtertype . lower ( ) . __contains__ ( 'u' ) : filtertype = 'u.MP9301' search_start_date = Time ( '2013-01-01' , scale = 'utc' ) search_end_date = Time ( '2017-01-01' , scale = 'utc' ) print ( "----- Searching for images of object {}" . format ( object_name ) ) image_list = [ ] expnum_list = [ ] ra_list = [ ] dec_list = [ ] query = Query ( object_name , search_start_date = search_start_date , search_end_date = search_end_date ) result = query . get ( ) print ( result ) try : objects = parse_ssois_return ( query . get ( ) , object_name , imagetype , camera_filter = filtertype ) except IOError : print ( "Sleeping 30 seconds" ) time . sleep ( 30 ) objects = parse_ssois_return ( query . get ( ) , object_name , imagetype , camera_filter = filtertype ) print ( objects ) if len ( objects ) > 0 : output = '{}/{}_object_images.txt' . format ( _OUTPUT_DIR , object_name ) with open ( output , 'w' ) as outfile : outfile . write ( "{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\n" . format ( "Object" , "Image" , "Exp_time" , "RA (deg)" , "Dec (deg)" , "time" , "filter" , "RA rate (\"/hr)" , "Dec rate (\"/hr)" ) ) for line in objects : with open ( output , 'a' ) as outfile : image_list . append ( object_name ) expnum_list . append ( line [ 'Exptime' ] ) t_start = Time ( line [ 'MJD' ] , format = 'mjd' , scale = 'utc' ) - 1.0 * units . minute t_stop = t_start + 2 * units . minute hq = horizons . Query ( object_name ) hq . start_time = t_start hq . stop_time = t_stop hq . step_size = 1 * units . minute p_ra = hq . table [ 1 ] [ 'R.A._(ICRF/J2000.0' ] p_dec = hq . table [ 1 ] [ 'DEC_(ICRF/J2000.0' ] ra_dot = hq . table [ 1 ] [ 'dRA*cosD' ] dec_dot = hq . table [ 1 ] [ 'dDEC/dt' ] try : outfile . write ( "{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\n" . format ( object_name , line [ 'Image' ] , line [ 'Exptime' ] , p_ra , p_dec , Time ( line [ 'MJD' ] , format = 'mjd' , scale = 'utc' ) , line [ 'Filter' ] , ra_dot , dec_dot ) ) except Exception as e : print ( "Error writing to outfile: {}" . format ( e ) ) return image_list , expnum_list , ra_list , dec_list
Query the ssois table for images of a given object . Then parse through for desired image type filter exposure time and telescope instrument
47,747
def parse_ssois_return ( ssois_return , object_name , imagetype , camera_filter = 'r.MP9601' , telescope_instrument = 'CFHT/MegaCam' ) : assert camera_filter in [ 'r.MP9601' , 'u.MP9301' ] ret_table = [ ] good_table = 0 table_reader = ascii . get_reader ( Reader = ascii . Basic ) table_reader . inconsistent_handler = _skip_missing_data table_reader . header . splitter . delimiter = '\t' table_reader . data . splitter . delimiter = '\t' table = table_reader . read ( ssois_return ) for row in table : if not 'MegaCam' in row [ 'Telescope_Insturment' ] : continue if not storage . exists ( storage . get_uri ( row [ 'Image' ] [ : - 1 ] ) ) : continue if not str ( row [ 'Image_target' ] ) . startswith ( 'WP' ) : good_table += 1 ret_table . append ( row ) if good_table > 0 : print ( " %d images found" % good_table ) return ret_table
Parse through objects in ssois query and filter out images of desired filter type exposure time and instrument
47,748
def match_mopfiles ( mopfile1 , mopfile2 ) : pos1 = pos2 = numpy . array ( [ ] ) if len ( mopfile1 . data ) > 0 : X_COL = "X_{}" . format ( mopfile1 . header . file_ids [ 0 ] ) Y_COL = "Y_{}" . format ( mopfile1 . header . file_ids [ 0 ] ) pos1 = numpy . array ( [ mopfile1 . data [ X_COL ] . data , mopfile1 . data [ Y_COL ] . data ] ) . transpose ( ) if len ( mopfile2 . data ) > 0 : X_COL = "X_{}" . format ( mopfile2 . header . file_ids [ 0 ] ) Y_COL = "Y_{}" . format ( mopfile2 . header . file_ids [ 0 ] ) pos2 = numpy . array ( [ mopfile2 . data [ X_COL ] . data , mopfile2 . data [ Y_COL ] . data ] ) . transpose ( ) match_idx1 , match_idx2 = util . match_lists ( pos1 , pos2 ) mopfile1 . data . add_column ( Column ( data = match_idx1 . filled ( - 1 ) , name = "real" , length = len ( mopfile1 . data ) ) ) idx = 0 for file_id in mopfile1 . header . file_ids : idx += 1 mopfile1 . data . add_column ( Column ( data = [ file_id ] * len ( mopfile1 . data ) , name = "ID_{}" . format ( idx ) ) ) return mopfile1
Given an input list of real detections and candidate detections provide a result file that contains the measured values from candidate detections with a flag indicating if they are real or false .
47,749
def measure_mags ( measures ) : import daophot image_downloader = ImageDownloader ( ) observations = { } for measure in measures : for reading in measure : if reading . obs not in observations : observations [ reading . obs ] = { 'x' : [ ] , 'y' : [ ] , 'source' : image_downloader . download ( reading , needs_apcor = True ) } assert isinstance ( reading . obs , Observation ) observations [ reading . obs ] [ 'x' ] . append ( reading . x ) observations [ reading . obs ] [ 'y' ] . append ( reading . y ) for observation in observations : source = observations [ observation ] [ 'source' ] assert isinstance ( source , SourceCutout ) source . update_pixel_location ( observations [ observation ] [ 'x' ] , observations [ observation ] [ 'y' ] ) hdulist_index = source . get_hdulist_idx ( observation . ccdnum ) observations [ observation ] [ 'mags' ] = daophot . phot ( source . _hdu_on_disk ( hdulist_index ) , observations [ observation ] [ 'x' ] , observations [ observation ] [ 'y' ] , aperture = source . apcor . aperture , sky = source . apcor . sky , swidth = source . apcor . swidth , apcor = source . apcor . apcor , zmag = source . zmag , maxcount = 30000 , extno = 0 ) return observations
Given a list of readings compute the magnitudes for all sources in each reading .
47,750
def append ( self , item ) : if len ( self ) == 0 : self . index = 0 self . items . append ( item )
Adds a new item to the end of the collection .
47,751
def fix_tags_on_cands_missing_reals ( user_id , vos_dir , property ) : "At the moment this just checks for a single user's missing reals. Easy to generalise it to all users." con = context . get_context ( vos_dir ) user_progress = [ ] listing = con . get_listing ( tasks . get_suffix ( 'reals' ) ) mpc_listing = con . get_listing ( 'mpc' ) for filename in listing : if not filename . startswith ( 'fk' ) : user = storage . get_property ( con . get_full_path ( filename ) , property ) if ( user is not None ) : is_present = False for mpcfile in [ f for f in mpc_listing if not f . startswith ( 'fk' ) ] : if mpcfile . startswith ( filename ) : print filename , user , 'exists!' , mpcfile is_present = True if not is_present : user_progress . append ( filename ) print filename , user , 'no mpc file' storage . set_property ( con . get_full_path ( filename ) , property , None ) print 'Fixed files:' , len ( user_progress ) return
At the moment this just checks for a single user s missing reals . Easy to generalise it to all users .
47,752
def make_error ( self , message : str , * , error : Exception = None , error_class : Any = None ) -> Exception : if error_class is None : error_class = self . error_class if self . error_class else Error return error_class ( message )
Return error instantiated from given message .
47,753
def make_response ( self , data : Any = None , ** kwargs : Any ) -> Any : r if not self . _valid_request : logger . error ( 'Request not validated, cannot make response' ) raise self . make_error ( 'Request not validated before, cannot make ' 'response' ) if data is None and self . response_factory is None : logger . error ( 'Response data omit, but no response factory is used' ) raise self . make_error ( 'Response data could be omitted only when ' 'response factory is used' ) response_schema = getattr ( self . module , 'response' , None ) if response_schema is not None : self . _validate ( data , response_schema ) if self . response_factory is not None : return self . response_factory ( * ( [ data ] if data is not None else [ ] ) , ** kwargs ) return data
r Validate response data and wrap it inside response factory .
47,754
def validate_request ( self , data : Any , * additional : AnyMapping , merged_class : Type [ dict ] = dict ) -> Any : r request_schema = getattr ( self . module , 'request' , None ) if request_schema is None : logger . error ( 'Request schema should be defined' , extra = { 'schema_module' : self . module , 'schema_module_attrs' : dir ( self . module ) } ) raise self . make_error ( 'Request schema should be defined' ) if isinstance ( data , dict ) and additional : data = merged_class ( self . _merge_data ( data , * additional ) ) try : self . _validate ( data , request_schema ) finally : self . _valid_request = False self . _valid_request = True processor = getattr ( self . module , 'request_processor' , None ) return processor ( data ) if processor else data
r Validate request data against request schema from module .
47,755
def _merge_data ( self , data : AnyMapping , * additional : AnyMapping ) -> dict : r return defaults ( dict ( data ) if not isinstance ( data , dict ) else data , * ( dict ( item ) for item in additional ) )
r Merge base data and additional dicts .
47,756
def _pure_data ( self , data : Any ) -> Any : if not isinstance ( data , dict ) and not isinstance ( data , list ) : try : return dict ( data ) except TypeError : ... return data
If data is dict - like object convert it to pure dict instance so it will be possible to pass to default jsonschema . validate func .
47,757
def _validate ( self , data : Any , schema : AnyMapping ) -> Any : try : return self . validate_func ( schema , self . _pure_data ( data ) ) except self . validation_error_class as err : logger . error ( 'Schema validation error' , exc_info = True , extra = { 'schema' : schema , 'schema_module' : self . module } ) if self . error_class is None : raise raise self . make_error ( 'Validation Error' , error = err ) from err
Validate data against given schema .
47,758
def getCert ( certHost = vos . vos . SERVER , certfile = None , certQuery = "/cred/proxyCert?daysValid=" , daysValid = 2 ) : if certfile is None : certfile = os . path . join ( os . getenv ( "HOME" , "/tmp" ) , ".ssl/cadcproxy.pem" ) dirname = os . path . dirname ( certfile ) try : os . makedirs ( dirname ) except OSError as e : if os . path . isdir ( dirname ) : pass elif e . errno == 20 or e . errno == 17 : sys . stderr . write ( str ( e ) + ": %s \n" % dirname ) sys . stderr . write ( "Expected %s to be a directory.\n" % ( dirname ) ) sys . exit ( e . errno ) else : raise e password_mgr = urllib2 . HTTPPasswordMgrWithDefaultRealm ( ) ( username , passwd ) = getUserPassword ( host = certHost ) top_level_url = "http://" + certHost password_mgr . add_password ( None , top_level_url , username , passwd ) handler = urllib2 . HTTPBasicAuthHandler ( password_mgr ) opener = urllib2 . build_opener ( handler ) urllib2 . install_opener ( opener ) url = "http://" + certHost + certQuery + str ( daysValid ) r = urllib2 . urlopen ( url ) w = file ( certfile , 'w' ) while True : buf = r . read ( ) if not buf : break w . write ( buf ) w . close ( ) r . close ( ) return
Access the cadc certificate server
47,759
def to_bool ( value : Any ) -> bool : return bool ( strtobool ( value ) if isinstance ( value , str ) else value )
Convert string or other Python object to boolean .
47,760
def to_int ( value : str , default : T = None ) -> Union [ int , Optional [ T ] ] : try : return int ( value ) except ( TypeError , ValueError ) : return default
Convert given value to int .
47,761
def mk_dict ( results , description ) : rows = [ ] for row in results : row_dict = { } for idx in range ( len ( row ) ) : col = description [ idx ] [ 0 ] row_dict [ col ] = row [ idx ] rows . append ( row_dict ) return rows
Given a result list and descrition sequence return a list of dictionaries
47,762
def get_orbits ( official = '%' ) : sql = "SELECT * FROM orbits WHERE official LIKE '%s' " % ( official , ) cfeps . execute ( sql ) return mk_dict ( cfeps . fetchall ( ) , cfeps . description )
Query the orbit table for the object whose official desingation matches parameter official . By default all entries are returned
47,763
def get_astrom ( official = '%' , provisional = '%' ) : sql = "SELECT m.* FROM measure m " sql += "LEFT JOIN object o ON m.provisional LIKE o.provisional " if not official : sql += "WHERE o.official IS NULL" else : sql += "WHERE o.official LIKE '%s' " % ( official , ) sql += " AND m.provisional LIKE '%s' " % ( provisional , ) cfeps . execute ( sql ) return mk_dict ( cfeps . fetchall ( ) , cfeps . description )
Query the measure table for all measurements of a particular object . Default is to return all the astrometry in the measure table sorted by mjdate
47,764
def getData ( file_id , ra , dec ) : DATA = "www.cadc-ccda.hia-iha.nrc-cnrc.gc.ca" BASE = "http://" + DATA + "/authProxy/getData" archive = "CFHT" wcs = "corrected" import re groups = re . match ( '^(?P<file_id>\d{6}).*' , file_id ) if not groups : return None file_id = groups . group ( 'file_id' ) file_id += "p" URL = BASE + "?dataset_name=" + file_id + "&cutout=circle(" + str ( ra * 57.3 ) + "," URL += str ( dec * 57.3 ) + "," + str ( 5.0 / 60.0 ) + ")" return URL
Create a link that connects to a getData URL
47,765
def delete_event ( self , uid ) : ev_for_deletion = self . calendar . get ( uid ) ev_for_deletion . delete ( )
Delete event and sync calendar
47,766
def simple_lmdb_settings ( path , map_size = 1e9 , user_supplied_id = False ) : def decorator ( cls ) : provider = ff . UserSpecifiedIdProvider ( key = '_id' ) if user_supplied_id else ff . UuidProvider ( ) class Settings ( ff . PersistenceSettings ) : id_provider = provider key_builder = ff . StringDelimitedKeyBuilder ( '|' ) database = ff . LmdbDatabase ( path , key_builder = key_builder , map_size = map_size ) class Model ( cls , Settings ) : pass Model . __name__ = cls . __name__ Model . __module__ = cls . __module__ return Model return decorator
Creates a decorator that can be used to configure sane default LMDB persistence settings for a model
47,767
def offset ( self , index = 0 ) : eta = self . _geometry [ self . camera ] [ index ] [ "ra" ] xi = self . _geometry [ self . camera ] [ index ] [ "dec" ] ra = self . origin . ra - ( eta / math . cos ( self . dec . radian ) ) * units . degree dec = self . origin . dec - xi * units . degree + 45 * units . arcsec self . _coordinate = SkyCoord ( ra , dec )
Offset the camera pointing to be centred on a particular CCD .
47,768
def coord ( self ) : if self . _coordinate is None : self . _coordinate = SkyCoord ( self . origin . ra , self . origin . dec + 45 * units . arcsec ) return self . _coordinate
The center of the camera pointing in sky coordinates
47,769
def requires_lock ( function ) : def new_lock_requiring_function ( self , filename , * args , ** kwargs ) : if self . owns_lock ( filename ) : return function ( self , filename , * args , ** kwargs ) else : raise RequiresLockException ( ) return new_lock_requiring_function
Decorator to check if the user owns the required lock . The first argument must be the filename .
47,770
def clean ( self , suffixes = None ) : if suffixes is None : suffixes = [ DONE_SUFFIX , LOCK_SUFFIX , PART_SUFFIX ] for suffix in suffixes : listing = self . working_context . get_listing ( suffix ) for filename in listing : self . working_context . remove ( filename )
Remove all persistence - related files from the directory .
47,771
def setFigForm ( ) : fig_width_pt = 245.26 * 2 inches_per_pt = 1.0 / 72.27 golden_mean = ( math . sqrt ( 5. ) - 1.0 ) / 2.0 fig_width = fig_width_pt * inches_per_pt fig_height = fig_width * golden_mean fig_size = [ 1.5 * fig_width , fig_height ] params = { 'backend' : 'ps' , 'axes.labelsize' : 12 , 'text.fontsize' : 12 , 'legend.fontsize' : 7 , 'xtick.labelsize' : 11 , 'ytick.labelsize' : 11 , 'text.usetex' : True , 'font.family' : 'serif' , 'font.serif' : 'Times' , 'image.aspect' : 'auto' , 'figure.subplot.left' : 0.1 , 'figure.subplot.bottom' : 0.1 , 'figure.subplot.hspace' : 0.25 , 'figure.figsize' : fig_size } rcParams . update ( params )
set the rcparams to EmulateApJ columnwidth = 245 . 26 pts
47,772
def getCert ( username , password , certHost = _SERVER , certfile = None , certQuery = _PROXY ) : if certfile is None : certfile = tempfile . NamedTemporaryFile ( ) password_mgr = urllib2 . HTTPPasswordMgrWithDefaultRealm ( ) top_level_url = "http://" + certHost logging . debug ( top_level_url ) password_mgr . add_password ( None , top_level_url , username , password ) handler = urllib2 . HTTPBasicAuthHandler ( password_mgr ) logging . debug ( str ( handler ) ) opener = urllib2 . build_opener ( handler ) urllib2 . install_opener ( opener ) url = "http://" + certHost + certQuery logging . debug ( url ) r = None try : r = opener . open ( url ) except urllib2 . HTTPError as e : logging . debug ( url ) logging . debug ( str ( e ) ) return False logging . debug ( str ( r ) ) if r is not None : while True : buf = r . read ( ) logging . debug ( buf ) if not buf : break certfile . write ( buf ) r . close ( ) return certfile
Access the cadc certificate server .
47,773
def getGroupsURL ( certfile , group ) : GMS = "https://" + _SERVER + _GMS certfile . seek ( 0 ) buf = certfile . read ( ) x509 = crypto . load_certificate ( crypto . FILETYPE_PEM , buf ) sep = "" dn = "" parts = [ ] for i in x509 . get_issuer ( ) . get_components ( ) : if i [ 0 ] in parts : continue parts . append ( i [ 0 ] ) dn = i [ 0 ] + "=" + i [ 1 ] + sep + dn sep = "," return GMS + "/" + group + "/" + urllib . quote ( dn )
given a certfile load a list of groups that user is a member of
47,774
def stub ( ) : form = cgi . FieldStorage ( ) userid = form [ 'userid' ] . value password = form [ 'passwd' ] . value group = form [ 'group' ] . value
Just some left over code
47,775
def parse_pv ( header ) : order_fit = parse_order_fit ( header ) def parse_with_base ( i ) : key_base = "PV%d_" % i pvi_x = [ header [ key_base + "0" ] ] def parse_range ( lower , upper ) : for j in range ( lower , upper + 1 ) : pvi_x . append ( header [ key_base + str ( j ) ] ) if order_fit >= 1 : parse_range ( 1 , 3 ) if order_fit >= 2 : parse_range ( 4 , 6 ) if order_fit >= 3 : parse_range ( 7 , 10 ) return pvi_x return [ parse_with_base ( 1 ) , parse_with_base ( 2 ) ]
Parses the PV array from an astropy FITS header .
47,776
def safe_unit_norm ( a ) : if 1 == len ( a . shape ) : n = np . linalg . norm ( a ) if n : return a / n return a norm = np . sum ( np . abs ( a ) ** 2 , axis = - 1 ) ** ( 1. / 2 ) norm [ norm == 0 ] = - 1e12 return a / norm [ : , np . newaxis ]
Ensure that the vector or vectors have unit norm
47,777
def pad ( a , desiredlength ) : if len ( a ) >= desiredlength : return a islist = isinstance ( a , list ) a = np . array ( a ) diff = desiredlength - len ( a ) shape = list ( a . shape ) shape [ 0 ] = diff padded = np . concatenate ( [ a , np . zeros ( shape , dtype = a . dtype ) ] ) return padded . tolist ( ) if islist else padded
Pad an n - dimensional numpy array with zeros along the zero - th dimension so that it is the desired length . Return it unchanged if it is greater than or equal to the desired length
47,778
def append ( self , item ) : try : self . _data [ self . _position ] = item except IndexError : self . _grow ( ) self . _data [ self . _position ] = item self . _position += 1 return self
append a single item to the array growing the wrapped numpy array if necessary
47,779
def extend ( self , items ) : items = np . array ( items ) pos = items . shape [ 0 ] + self . logical_size if pos > self . physical_size : amt = self . _tmp_size ( ) if self . physical_size + amt < pos : amt = pos - self . physical_size self . _grow ( amt = amt ) stop = self . _position + items . shape [ 0 ] self . _data [ self . _position : stop ] = items self . _position += items . shape [ 0 ] return self
extend the numpy array with multiple items growing the wrapped array if necessary
47,780
def align ( self , cutout , reading , source ) : if not self . current_displayable : return if not self . current_displayable . aligned : focus_sky_coord = reading . reference_sky_coord self . current_displayable . pan_to ( focus_sky_coord )
Set the display center to the reference point .
47,781
def phot_mag ( * args , ** kwargs ) : try : return phot ( * args , ** kwargs ) except IndexError : raise TaskError ( "No photometric records returned for {0}" . format ( kwargs ) )
Wrapper around phot which only returns the computed magnitude directly .
47,782
def from_env ( key : str , default : T = None ) -> Union [ str , Optional [ T ] ] : return os . getenv ( key , default )
Shortcut for safely reading environment variable .
47,783
def immutable_settings ( defaults : Settings , ** optionals : Any ) -> types . MappingProxyType : r settings = { key : value for key , value in iter_settings ( defaults ) } for key , value in iter_settings ( optionals ) : settings [ key ] = value return types . MappingProxyType ( settings )
r Initialize and return immutable Settings dictionary .
47,784
def inject_settings ( mixed : Union [ str , Settings ] , context : MutableMapping [ str , Any ] , fail_silently : bool = False ) -> None : if isinstance ( mixed , str ) : try : mixed = import_module ( mixed ) except Exception : if fail_silently : return raise for key , value in iter_settings ( mixed ) : context [ key ] = value
Inject settings values to given context .
47,785
def iter_settings ( mixed : Settings ) -> Iterator [ Tuple [ str , Any ] ] : if isinstance ( mixed , types . ModuleType ) : for attr in dir ( mixed ) : if not is_setting_key ( attr ) : continue yield ( attr , getattr ( mixed , attr ) ) else : yield from filter ( lambda item : is_setting_key ( item [ 0 ] ) , mixed . items ( ) )
Iterate over settings values from settings module or dict - like instance .
47,786
def setup_locale ( lc_all : str , first_weekday : int = None , * , lc_collate : str = None , lc_ctype : str = None , lc_messages : str = None , lc_monetary : str = None , lc_numeric : str = None , lc_time : str = None ) -> str : if first_weekday is not None : calendar . setfirstweekday ( first_weekday ) locale . setlocale ( locale . LC_COLLATE , lc_collate or lc_all ) locale . setlocale ( locale . LC_CTYPE , lc_ctype or lc_all ) locale . setlocale ( locale . LC_MESSAGES , lc_messages or lc_all ) locale . setlocale ( locale . LC_MONETARY , lc_monetary or lc_all ) locale . setlocale ( locale . LC_NUMERIC , lc_numeric or lc_all ) locale . setlocale ( locale . LC_TIME , lc_time or lc_all ) return locale . setlocale ( locale . LC_ALL , lc_all )
Shortcut helper to setup locale for backend application .
47,787
def setup_timezone ( timezone : str ) -> None : if timezone and hasattr ( time , 'tzset' ) : tz_root = '/usr/share/zoneinfo' tz_filename = os . path . join ( tz_root , * ( timezone . split ( '/' ) ) ) if os . path . exists ( tz_root ) and not os . path . exists ( tz_filename ) : raise ValueError ( 'Incorrect timezone value: {0}' . format ( timezone ) ) os . environ [ 'TZ' ] = timezone time . tzset ( )
Shortcut helper to configure timezone for backend application .
47,788
def inputs ( header ) : import string , re inputs = [ ] for h in header . ascardlist ( ) : if h . key == "HISTORY" : g = h . value result = re . search ( 'imcombred: (\d{6}[bfopd])\d{2} .*' , g ) if not result : continue file_id = result . group ( 1 ) import os status = os . system ( "adInfo -a CFHT -s " + file_id ) if status == 0 : result = re . search ( '(\d{6}).*' , file_id ) if not result : continue expnum = result . group ( 1 ) inputs . append ( expnum ) if len ( inputs ) == 0 : nit = header . get ( 'IMCMB_NI' , 0 ) if nit == 0 : return ( inputs ) for nin in range ( nit ) : kwd = 'IMCMB_' + str ( nin ) . zfill ( 2 ) file = ( header . get ( kwd , '' ) ) result = re . search ( '.*(\d{6}[bfopd]).*' , g ) if not result : continue file_id = result . group ( 1 ) import os status = os . system ( "adInfo -a CFHT -s " + file_id ) if status == 0 : result = re . search ( '(\d{6}).*' , file_id ) if not result : continue expnum = result . group ( 1 ) inputs . append ( expnum ) return inputs
Read through the HISTORY cards in an image header looking for detrend input lines .
47,789
def elixir_decode ( elixir_filename ) : import re , pyfits parts_RE = re . compile ( r'([^\.\s]+)' ) dataset_name = parts_RE . findall ( elixir_filename ) if not dataset_name or len ( dataset_name ) < 5 : raise ValueError ( 'String %s does not parse as elixir filename' % elixir_filename ) comments = { 'exptime' : 'Integration time (seconds)' , 'filter' : 'Name of filter in position ' , 'crunid' : 'CFHT Q RunID' , 'obstype' : 'Observation or Exposure type' , 'imageid' : 'CCD chip number' , 'filename' : 'file name at creation of this MEF file' } keywords = { } keywords [ 'filename' ] = elixir_filename keywords [ 'runid' ] = dataset_name [ 0 ] keywords [ 'obstype' ] = dataset_name [ 1 ] keywords [ 'exptime' ] = None keywords [ 'filter' ] = None if re . match ( r'\d+' , dataset_name [ 2 ] ) : keyword [ 'exptime' ] = int ( dataset_name [ 2 ] ) else : keyword [ 'filter' ] = dataset_name [ 2 ] keywords [ 'imageid' ] = dataset_name [ 3 ] keywords [ 'version' ] = dataset_name [ 4 ] header = pyfits . Header ( ) for keyword in keywords . keys ( ) : if keywords [ keyword ] : header . update ( keyword , keywords [ keyword ] , comment = comment [ keyword ] ) return header
Takes an elixir style file name and decodes it s content .
47,790
def create_mef ( filename = None ) : import pyfits , time if not filename : import tempfile filename = tempfile . mktemp ( suffix = '.fits' ) else : import string , re filename = string . strip ( str ( filename ) ) suffix = re . match ( r'^.*.fits$' , filename ) if not suffix : filename = filename + '.fits' temp = pyfits . HDUList ( ) prihdu = pyfits . PrimaryHDU ( ) h = prihdu . header h . update ( 'EXTEND' , pyfits . TRUE , after = 'NAXIS' ) h . update ( 'NEXTEND' , 0 , after = 'EXTEND' ) h . add_comment ( 'MEF created at CADC' ) h . add_comment ( 'Created using ' + __name__ + ' ' + __Version__ ) h . add_comment ( 'Extensions may not be in CCD order' ) h . add_comment ( 'Use the EXTNAME keyword' ) h . add_history ( 'Primary HDU created on ' + time . asctime ( ) ) temp . append ( prihdu ) temp . writeto ( filename ) temp . close ( ) return ( filename )
Create a file an MEF fits file called filename . Generate a random filename if None given
47,791
def strip_pad ( hdu ) : l = hdu . header . ascardlist ( ) d = [ ] for index in range ( len ( l ) ) : if l [ index ] . key in __comment_keys and str ( l [ index ] ) == __cfht_padding : d . append ( index ) d . reverse ( ) for index in d : del l [ index ] return ( 0 )
Remove the padding lines that CFHT adds to headers
47,792
def stack ( outfile , infiles , verbose = 0 ) : import os , sys , string , tempfile , shutil import pyfits , re , time if os . access ( outfile , os . R_OK ) != 1 : if verbose : print "Creating new MEF file: " , outfile outfile = create_mef ( outfile ) out = pyfits . open ( outfile , 'append' ) hdr = out [ 0 ] . header count = 0 det_xmin = None det_xmax = None det_ymin = None det_ymax = None for infile in infiles : if verbose : print "Adding " , infile , " to " , outfile file = _open ( infile ) if not file : raise IOError ( "Cann't get the HDU for " + infile ) for hdu in file : extname = None if hdu . header . has_key ( 'EXTNAME' ) : extname = hdu . header [ 'EXTNAME' ] elif hdu . header . has_key ( 'EXTVER' ) : extname = "ccd" + string . zfill ( hdu . header . has_key ( 'EXTVER' ) , 2 ) if hdu . header . has_key ( 'EPOCH' ) : if hdu . header . has_key ( 'EQUINOX' ) : del hdu . header [ 'EPOCH' ] else : hdu . header . update ( 'EQUINOX' , hdu . header [ 'EQUINOX' ] . value , comment = hdu . header [ 'EQUINOX' ] . comment ) ahdu = pyfits . ImageHDU ( data = hdu . data , header = hdu . header , name = extname ) out . append ( ahdu ) if hdu . header . has_key ( 'DETSEC' ) : values = re . findall ( r'(\d+)' , hdu . header [ 'DETSEC' ] ) if len ( values ) == 4 : xmin = int ( values [ 0 ] ) xmax = int ( values [ 1 ] ) ymin = int ( values [ 2 ] ) ymax = int ( values [ 3 ] ) if xmin > xmax : t = xmin xmin = xmax xmax = t if ymin > ymax : t = ymin ymin = ymax ymax = t if xmin < det_xmin or not det_xmin : det_xmin = xmin if xmax > det_xmax or not det_xmax : det_xmax = xmax if ymin < det_ymin or not det_ymin : det_ymin = ymin if ymax > det_ymax or not det_ymax : det_ymax = ymax file . close ( ) detsize = '[' + str ( det_xmin ) + ':' + str ( det_xmax ) + ',' + str ( det_ymin ) + ':' + str ( det_ymax ) + ']' out [ 0 ] . header . update ( 'DETSIZE' , detsize , comment = 'Size of Mosaic' ) out . close ( ) if verbose : print "Done building MEF: " , outfile return 0
Stick infiles into outfiles as FITS extensions .
47,793
def adGet ( file_id , archive = "CFHT" , extno = None , cutout = None ) : import os , string , re , urllib proxy = "http://test.cadc-ccda.hia-iha.nrc-cnrc.gc.ca/authProxy/getData" if file_id is None : return ( - 1 ) if extno is None : filename = file_id + ".fits" else : filename = "%s%s.fits" % ( file_id , string . zfill ( extno , 2 ) ) print filename if os . access ( filename , os . R_OK ) : return filename args = { "file_id" : file_id , "archive" : archive } if extno is not None : args [ 'cutout' ] = "[" + str ( extno + 1 ) + "]" else : args [ 'cutout' ] = '' if cutout is not None : args [ 'cutout' ] = args [ 'cutout' ] + cutout argline = "" sep = "" import sys mop_data_path = os . curdir if os . environ . has_key ( 'MOP_DATA_PATH' ) : mop_data_path = os . environ [ 'MOP_DATA_PATH' ] suffix = "fits" basefile = mop_data_path + "/" + file_id + ".fits" print basefile if not os . access ( basefile , os . R_OK ) : argdict = { } argline = '' sep = '' for arg in args : if not args [ arg ] : continue argline += sep + "%s=%s" % ( arg , args [ arg ] ) sep = '&' url = proxy + "?" + argline command = "curl --silent -g --fail --max-time 1800 --user jkavelaars:newone '" + url + "' | gunzip > " + filename else : command = "imcopy %s%s %s" % ( basefile , args [ 'cutout' ] , filename ) print command try : status = os . system ( command ) except : sys . stderr . write ( "Failed to execute command: %s\n" % ( command ) ) raise TaskError , "getData failed" if status != 0 : sys . stderr . write ( "Failed while executing command: %s\n" % ( command ) ) raise TaskError , "getData failed" return filename
Use get a fits image from the CADC .
47,794
def _open ( file , mode = 'copyonwrite' ) : import pyfits try : infits = pyfits . open ( file , mode ) hdu = infits except ( ValueError , pyfits . VerifyError , pyfits . FITS_SevereError ) : import sys hdu = _open_fix ( file ) for f in hdu : strip_pad ( f ) return hdu
Opens a FITS format file and calls _open_fix if header doesn t verify correctly .
47,795
def find_proc_date ( header ) : import string , re for h in header . ascardlist ( ) : if h . key == "HISTORY" : g = h . value if ( string . find ( g , 'FLIPS 1.0 -:' ) ) : result = re . search ( 'imred: FLIPS 1.0 - \S{3} (.*) - ([\s\d]\d:\d\d:\d\d)\s*$' , g ) if result : date = result . group ( 1 ) time = result . group ( 2 ) datetime = date + " " + time return datetime return None
Search the HISTORY fields of a header looking for the FLIPS processing date .
47,796
def build_source_reading ( expnum , ccd = None , ftype = 'p' ) : logger . debug ( "Building source reading for expnum:{} ccd:{} ftype:{}" . format ( expnum , ccd , ftype ) ) return astrom . Observation ( expnum = str ( expnum ) , ftype = ftype , ccdnum = ccd )
Build an astrom . Observation object for a SourceReading
47,797
def recenter ( self ) : if self . ctype1 . find ( 'TAN' ) < 0 or self . ctype2 . find ( 'TAN' ) < 0 : print 'WCS.recenter() only supported for TAN projections.' raise TypeError if self . crpix1 == self . naxis1 / 2. and self . crpix2 == self . naxis2 / 2. : return _drz_off = 0. _cen = ( self . naxis1 / 2. + _drz_off , self . naxis2 / 2. + _drz_off ) _cenrd = self . xy2rd ( _cen ) _cd = N . array ( [ [ self . cd11 , self . cd12 ] , [ self . cd21 , self . cd22 ] ] , type = N . Float64 ) _ra0 = DEGTORAD ( self . crval1 ) _dec0 = DEGTORAD ( self . crval2 ) _ra = DEGTORAD ( _cenrd [ 0 ] ) _dec = DEGTORAD ( _cenrd [ 1 ] ) _dx = self . naxis1 / 2. - self . crpix1 _dy = self . naxis2 / 2. - self . crpix2 _dE , _dN = DEGTORAD ( N . dot ( _cd , ( _dx , _dy ) ) ) _dE_dN = 1 + N . power ( _dE , 2 ) + N . power ( _dN , 2 ) _cosdec = N . cos ( _dec ) _sindec = N . sin ( _dec ) _cosdec0 = N . cos ( _dec0 ) _sindec0 = N . sin ( _dec0 ) _n1 = N . power ( _cosdec , 2 ) + _dE * _dE + _dN * _dN * N . power ( _sindec , 2 ) _dra_dE = ( _cosdec0 - _dN * _sindec0 ) / _n1 _dra_dN = _dE * _sindec0 / _n1 _ddec_dE = - _dE * N . tan ( _dec ) / _dE_dN _ddec_dN = ( 1 / _cosdec ) * ( ( _cosdec0 / N . sqrt ( _dE_dN ) ) - ( _dN * N . sin ( _dec ) / _dE_dN ) ) _cd11n = _cosdec * ( self . cd11 * _dra_dE + self . cd21 * _dra_dN ) _cd12n = _cosdec * ( self . cd12 * _dra_dE + self . cd22 * _dra_dN ) _cd21n = self . cd11 * _ddec_dE + self . cd21 * _ddec_dN _cd22n = self . cd12 * _ddec_dE + self . cd22 * _ddec_dN _new_orient = RADTODEG ( N . arctan2 ( _cd12n , _cd22n ) ) self . crpix1 = _cen [ 0 ] self . crpix2 = _cen [ 1 ] self . crval1 = RADTODEG ( _ra ) self . crval2 = RADTODEG ( _dec ) self . rotateCD ( _new_orient ) self . cd11 = _cd11n self . cd12 = _cd12n self . cd21 = _cd21n self . cd22 = _cd22n
Reset the reference position values to correspond to the center of the reference frame . Algorithm used here developed by Colin Cox - 27 - Jan - 2004 .
47,798
def _buildNewKeyname ( self , key , prepend ) : if len ( prepend + key ) <= 8 : _new_key = prepend + key else : _new_key = str ( prepend + key ) [ : 8 ] return _new_key
Builds a new keyword based on original keyword name and a prepend string .
47,799
def ushort ( filename ) : import pyfits f = pyfits . open ( filename , mode = 'update' ) f [ 0 ] . scale ( 'int16' , '' , bzero = 32768 ) f . flush ( ) f . close ( )
Ushort a the pixels