code string | signature string | docstring string | loss_without_docstring float64 | loss_with_docstring float64 | factor float64 |
|---|---|---|---|---|---|
logger.info('Starting rpmbuild to build: {0} SRPM.'.format(specfile))
if save_dir != get_default_save_path():
try:
msg = subprocess.Popen(
['rpmbuild',
'--define', '_sourcedir {0}'.format(save_dir),
'--define', '_builddir {0}'.format(save_dir),
'--define', '_srcrpmdir {0}'.format(save_dir),
'--define', '_rpmdir {0}'.format(save_dir),
'-bs', specfile], stdout=subprocess.PIPE).communicate(
)[0].strip()
except OSError:
logger.error(
"Rpmbuild failed for specfile: {0} and save_dir: {1}".format(
specfile, save_dir), exc_info=True)
msg = 'Rpmbuild failed. See log for more info.'
return msg
else:
if not os.path.exists(save_dir):
raise IOError("Specify folder to store a file (SAVE_DIR) "
"or install rpmdevtools.")
try:
msg = subprocess.Popen(
['rpmbuild',
'--define', '_sourcedir {0}'.format(save_dir + '/SOURCES'),
'--define', '_builddir {0}'.format(save_dir + '/BUILD'),
'--define', '_srcrpmdir {0}'.format(save_dir + '/SRPMS'),
'--define', '_rpmdir {0}'.format(save_dir + '/RPMS'),
'-bs', specfile], stdout=subprocess.PIPE).communicate(
)[0].strip()
except OSError:
logger.error("Rpmbuild failed for specfile: {0} and save_dir: "
"{1}".format(specfile, save_dir), exc_info=True)
msg = 'Rpmbuild failed. See log for more info.'
return msg | def build_srpm(specfile, save_dir) | Builds a srpm from given specfile using rpmbuild.
Generated srpm is stored in directory specified by save_dir.
Args:
specfile: path to a specfile
save_dir: path to source and build tree | 2.194827 | 2.21729 | 0.989869 |
minor_major_regex = re.compile("-\d.?\d?$")
return [x for x in scripts if not minor_major_regex.search(x)] | def remove_major_minor_suffix(scripts) | Checks if executables already contain a "-MAJOR.MINOR" suffix. | 4.665373 | 4.213418 | 1.107265 |
build_deps = copy.deepcopy(runtime_deps)
for dep in build_deps:
if len(dep) > 0:
dep[0] = 'BuildRequires'
return build_deps | def runtime_to_build(runtime_deps) | Adds all runtime deps to build deps | 4.305658 | 4.128199 | 1.042987 |
deps.sort()
return list(k for k, _ in itertools.groupby(deps)) | def unique_deps(deps) | Remove duplicities from deps list of the lists | 5.576541 | 4.74893 | 1.174273 |
old_time_locale = locale.getlocale(locale.LC_TIME)
locale.setlocale(locale.LC_TIME, 'C')
yield
locale.setlocale(locale.LC_TIME, old_time_locale) | def c_time_locale() | Context manager with C LC_TIME locale | 2.005813 | 1.970804 | 1.017764 |
try:
value = subprocess.Popen(
['rpm', '--eval', macro],
stdout=subprocess.PIPE).communicate()[0].strip()
except OSError:
logger.error('Failed to get value of {0} rpm macro'.format(
macro), exc_info=True)
value = b''
return console_to_str(value) | def rpm_eval(macro) | Get value of given macro using rpm tool | 3.906656 | 3.595253 | 1.086615 |
macro = '%{_topdir}'
if rpm:
save_path = rpm.expandMacro(macro)
else:
save_path = rpm_eval(macro)
if not save_path:
logger.warn("rpm tools are missing, using default save path "
"~/rpmbuild/.")
save_path = os.path.expanduser('~/rpmbuild')
return save_path | def get_default_save_path() | Return default save path for the packages | 8.262697 | 7.718862 | 1.070455 |
filteredTable = Table()
run_align(input_list, result=filteredTable, **kwargs)
return filteredTable | def perform_align(input_list, **kwargs) | Main calling function.
Parameters
----------
input_list : list
List of one or more IPPSSOOTs (rootnames) to align.
archive : Boolean
Retain copies of the downloaded files in the astroquery created sub-directories?
clobber : Boolean
Download and overwrite existing local copies of input files?
debug : Boolean
Attempt to use saved sourcelists stored in pickle files if they exist, or if they do not exist, save
sourcelists in pickle files for reuse so that step 4 can be skipped for faster subsequent debug/development
runs??
update_hdr_wcs : Boolean
Write newly computed WCS information to image image headers?
print_fit_parameters : Boolean
Specify whether or not to print out FIT results for each chip.
print_git_info : Boolean
Display git repository information?
output : Boolean
Should utils.astrometric_utils.create_astrometric_catalog() generate file 'ref_cat.ecsv' and should
generate_source_catalogs() generate the .reg region files for every chip of every input image and should
generate_astrometric_catalog() generate file 'refcatalog.cat'?
Updates
-------
filteredTable: Astropy Table
Table which contains processing information and alignment results for every raw image evaluated | 9.081182 | 10.775684 | 0.842748 |
log.info("------------------- STEP 5b: (match_relative_fit) Cross matching and fitting ---------------------------")
# 0: Specify matching algorithm to use
match = tweakwcs.TPMatch(searchrad=75, separation=0.1,
tolerance=2, use2dhist=True)
# match = tweakwcs.TPMatch(searchrad=250, separation=0.1,
# tolerance=100, use2dhist=False)
# Align images and correct WCS
# NOTE: this invocation does not use an astrometric catalog. This call allows all the input images to be aligned in
# a relative way using the first input image as the reference.
# 1: Perform relative alignment
tweakwcs.align_wcs(imglist, None, match=match, expand_refcat=True)
# Set all the group_id values to be the same so the various images/chips will be aligned to the astrometric
# reference catalog as an ensemble.
# BEWARE: If additional iterations of solutions are to be done, the group_id values need to be restored.
for image in imglist:
image.meta["group_id"] = 1234567
# 2: Perform absolute alignment
tweakwcs.align_wcs(imglist, reference_catalog, match=match)
# 3: Interpret RMS values from tweakwcs
interpret_fit_rms(imglist, reference_catalog)
return imglist | def match_relative_fit(imglist, reference_catalog) | Perform cross-matching and final fit using 2dHistogram matching
Parameters
----------
imglist : list
List of input image `~tweakwcs.tpwcs.FITSWCS` objects with metadata and source catalogs
reference_catalog : Table
Astropy Table of reference sources for this field
Returns
--------
imglist : list
List of input image `~tweakwcs.tpwcs.FITSWCS` objects with metadata and source catalogs | 7.978938 | 7.706194 | 1.035393 |
log.info("-------------------- STEP 5b: (match_default_fit) Cross matching and fitting ---------------------------")
# Specify matching algorithm to use
match = tweakwcs.TPMatch(searchrad=250, separation=0.1,
tolerance=100, use2dhist=False)
# Align images and correct WCS
tweakwcs.align_wcs(imglist, reference_catalog, match=match, expand_refcat=False) #TODO: turn on 'expand_refcat' option in future development
# Interpret RMS values from tweakwcs
interpret_fit_rms(imglist, reference_catalog)
return imglist | def match_default_fit(imglist, reference_catalog) | Perform cross-matching and final fit using 2dHistogram matching
Parameters
----------
imglist : list
List of input image `~tweakwcs.tpwcs.FITSWCS` objects with metadata and source catalogs
reference_catalog : Table
Astropy Table of reference sources for this field
Returns
--------
imglist : list
List of input image `~tweakwcs.tpwcs.FITSWCS` objects with metadata and source catalogs | 10.400321 | 9.849094 | 1.055967 |
# generate catalog
temp_pars = pars.copy()
if pars['output'] == True:
pars['output'] = 'ref_cat.ecsv'
else:
pars['output'] = None
out_catalog = amutils.create_astrometric_catalog(imglist,**pars)
pars = temp_pars.copy()
#if the catalog has contents, write the catalog to ascii text file
if len(out_catalog) > 0 and pars['output']:
catalog_filename = "refcatalog.cat"
out_catalog.write(catalog_filename, format="ascii.fast_commented_header")
log.info("Wrote reference catalog {}.".format(catalog_filename))
return(out_catalog) | def generate_astrometric_catalog(imglist, **pars) | Generates a catalog of all sources from an existing astrometric catalog are in or near the FOVs of the images in
the input list.
Parameters
----------
imglist : list
List of one or more calibrated fits images that will be used for catalog generation.
Returns
=======
ref_table : object
Astropy Table object of the catalog | 4.071446 | 3.993756 | 1.019453 |
output = pars.get('output', False)
sourcecatalogdict = {}
for imgname in imglist:
log.info("Image name: {}".format(imgname))
sourcecatalogdict[imgname] = {}
# open image
imghdu = fits.open(imgname)
imgprimaryheader = imghdu[0].header
instrument = imgprimaryheader['INSTRUME'].lower()
detector = imgprimaryheader['DETECTOR'].lower()
# get instrument/detector-specific image alignment parameters
if instrument in detector_specific_params.keys():
if detector in detector_specific_params[instrument].keys():
detector_pars = detector_specific_params[instrument][detector]
# to allow generate_source_catalog to get detector specific parameters
detector_pars.update(pars)
sourcecatalogdict[imgname]["params"] = detector_pars
else:
sys.exit("ERROR! Unrecognized detector '{}'. Exiting...".format(detector))
log.error("ERROR! Unrecognized detector '{}'. Exiting...".format(detector))
else:
sys.exit("ERROR! Unrecognized instrument '{}'. Exiting...".format(instrument))
log.error("ERROR! Unrecognized instrument '{}'. Exiting...".format(instrument))
# Identify sources in image, convert coords from chip x, y form to reference WCS sky RA, Dec form.
imgwcs = HSTWCS(imghdu, 1)
fwhmpsf_pix = sourcecatalogdict[imgname]["params"]['fwhmpsf']/imgwcs.pscale #Convert fwhmpsf from arsec to pixels
sourcecatalogdict[imgname]["catalog_table"] = amutils.generate_source_catalog(imghdu, fwhm=fwhmpsf_pix, **detector_pars)
# write out coord lists to files for diagnostic purposes. Protip: To display the sources in these files in DS9,
# set the "Coordinate System" option to "Physical" when loading the region file.
imgroot = os.path.basename(imgname).split('_')[0]
numSci = amutils.countExtn(imghdu)
# Allow user to decide when and how to write out catalogs to files
if output:
for chip in range(1,numSci+1):
chip_cat = sourcecatalogdict[imgname]["catalog_table"][chip]
if chip_cat and len(chip_cat) > 0:
regfilename = "{}_sci{}_src.reg".format(imgroot, chip)
out_table = Table(chip_cat)
out_table.write(regfilename, include_names=["xcentroid", "ycentroid"], format="ascii.fast_commented_header")
log.info("Wrote region file {}\n".format(regfilename))
imghdu.close()
return(sourcecatalogdict) | def generate_source_catalogs(imglist, **pars) | Generates a dictionary of source catalogs keyed by image name.
Parameters
----------
imglist : list
List of one or more calibrated fits images that will be used for source detection.
Returns
-------
sourcecatalogdict : dictionary
a dictionary (keyed by image name) of two element dictionaries which in tern contain 1) a dictionary of the
detector-specific processing parameters and 2) an astropy table of position and photometry information of all
detected sources | 3.946356 | 3.854948 | 1.023712 |
out_headerlet_dict = {}
for item in tweakwcs_output:
imageName = item.meta['filename']
chipnum = item.meta['chip']
if chipnum == 1:
chipctr = 1
hdulist = fits.open(imageName, mode='update')
num_sci_ext = amutils.countExtn(hdulist)
# generate wcs name for updated image header, headerlet
if not hdulist['SCI',1].header['WCSNAME'] or hdulist['SCI',1].header['WCSNAME'] =="": #Just in case header value 'wcsname' is empty.
wcsName = "FIT_{}".format(item.meta['catalog_name'])
else:
wname = hdulist['sci', 1].header['wcsname']
if "-" in wname:
wcsName = '{}-FIT_{}'.format(wname[:wname.index('-')], item.meta['fit_info']['catalog'])
else:
wcsName = '{}-FIT_{}'.format(wname, item.meta['fit_info']['catalog'])
# establish correct mapping to the science extensions
sciExtDict = {}
for sciExtCtr in range(1, num_sci_ext + 1):
sciExtDict["{}".format(sciExtCtr)] = fileutil.findExtname(hdulist,'sci',extver=sciExtCtr)
# update header with new WCS info
updatehdr.update_wcs(hdulist, sciExtDict["{}".format(item.meta['chip'])], item.wcs, wcsname=wcsName,
reusename=True, verbose=True)
if chipctr == num_sci_ext:
# Close updated flc.fits or flt.fits file
#log.info("CLOSE {}\n".format(imageName)) # TODO: Remove before deployment
hdulist.flush()
hdulist.close()
# Create headerlet
out_headerlet = headerlet.create_headerlet(imageName, hdrname=wcsName, wcsname=wcsName)
# Update headerlet
update_headerlet_phdu(item, out_headerlet)
# Write headerlet
if imageName.endswith("flc.fits"):
headerlet_filename = imageName.replace("flc", "flt_hlet")
if imageName.endswith("flt.fits"):
headerlet_filename = imageName.replace("flt", "flt_hlet")
out_headerlet.writeto(headerlet_filename, clobber=True)
log.info("Wrote headerlet file {}.\n\n".format(headerlet_filename))
out_headerlet_dict[imageName] = headerlet_filename
# Attach headerlet as HDRLET extension
headerlet.attach_headerlet(imageName, headerlet_filename)
chipctr +=1
return (out_headerlet_dict) | def update_image_wcs_info(tweakwcs_output) | Write newly computed WCS information to image headers and write headerlet files
Parameters
----------
tweakwcs_output : list
output of tweakwcs. Contains sourcelist tables, newly computed WCS info, etc. for every chip of every valid
input image.
Returns
-------
out_headerlet_list : dictionary
a dictionary of the headerlet files created by this subroutine, keyed by flt/flc fits filename. | 4.028672 | 3.76856 | 1.069022 |
# Get the data to be used as values for FITS keywords
rms_ra = tweakwcs_item.meta['fit_info']['RMS_RA'].value
rms_dec = tweakwcs_item.meta['fit_info']['RMS_DEC'].value
fit_rms = tweakwcs_item.meta['fit_info']['FIT_RMS']
nmatch = tweakwcs_item.meta['fit_info']['nmatches']
catalog = tweakwcs_item.meta['fit_info']['catalog']
x_shift = (tweakwcs_item.meta['fit_info']['shift'])[0]
y_shift = (tweakwcs_item.meta['fit_info']['shift'])[1]
rot = tweakwcs_item.meta['fit_info']['rot']
scale = tweakwcs_item.meta['fit_info']['scale'][0]
skew = tweakwcs_item.meta['fit_info']['skew']
# Update the existing FITS keywords
primary_header = headerlet[0].header
primary_header['RMS_RA'] = rms_ra
primary_header['RMS_DEC'] = rms_dec
primary_header['NMATCH'] = nmatch
primary_header['CATALOG'] = catalog
# Create a new FITS keyword
primary_header['FIT_RMS'] = (fit_rms, 'RMS (mas) of the 2D fit of the headerlet solution')
# Create the set of HISTORY keywords
primary_header['HISTORY'] = '~~~~~ FIT PARAMETERS ~~~~~'
primary_header['HISTORY'] = '{:>15} : {:9.4f} "/pixels'.format('platescale', tweakwcs_item.wcs.pscale)
primary_header['HISTORY'] = '{:>15} : {:9.4f} pixels'.format('x_shift', x_shift)
primary_header['HISTORY'] = '{:>15} : {:9.4f} pixels'.format('y_shift', y_shift)
primary_header['HISTORY'] = '{:>15} : {:9.4f} degrees'.format('rotation', rot)
primary_header['HISTORY'] = '{:>15} : {:9.4f}'.format('scale', scale)
primary_header['HISTORY'] = '{:>15} : {:9.4f}'.format('skew', skew) | def update_headerlet_phdu(tweakwcs_item, headerlet) | Update the primary header data unit keywords of a headerlet object in-place
Parameters
==========
tweakwc_item :
Basically the output from tweakwcs which contains the cross match and fit
information for every chip of every valid input image.
headerlet :
object containing WCS information | 2.269608 | 2.279821 | 0.99552 |
if input is not None:
inputDict['input']=input
inputDict['output']=None
inputDict['updatewcs']=False
inputDict['group']=group
else:
print("Please supply an input image", file=sys.stderr)
raise ValueError
configObj = util.getDefaultConfigObj(__taskname__,configObj,inputDict,loadOnly=(not editpars))
if configObj is None:
return
if not editpars:
run(configObj,outExt=outExt) | def sky(input=None,outExt=None,configObj=None, group=None, editpars=False, **inputDict) | Perform sky subtraction on input list of images
Parameters
----------
input : str or list of str
a python list of image filenames, or just a single filename
configObj : configObject
an instance of configObject
inputDict : dict, optional
an optional list of parameters specified by the user
outExt : str
The extension of the output image. If the output already exists
then the input image is overwritten
Notes
-----
These are parameters that the configObj should contain by default,
they can be altered on the fly using the inputDict
Parameters that should be in configobj:
========== ===================================================================
Name Definition
========== ===================================================================
skymethod 'Sky computation method'
skysub 'Perform sky subtraction?'
skywidth 'Bin width of histogram for sampling sky statistics (in sigma)'
skystat 'Sky correction statistics parameter'
skylower 'Lower limit of usable data for sky (always in electrons)'
skyupper 'Upper limit of usable data for sky (always in electrons)'
skyclip 'Number of clipping iterations'
skylsigma 'Lower side clipping factor (in sigma)'
skyusigma 'Upper side clipping factor (in sigma)'
skymask_cat 'Catalog file listing image masks'
use_static 'Use static mask for skymatch computations?'
sky_bits 'Integer mask bit values considered good pixels in DQ array'
skyfile 'Name of file with user-computed sky values'
skyuser 'KEYWORD indicating a sky subtraction value if done by user'
in_memory 'Optimize for speed or for memory use'
========== ===================================================================
The output from sky subtraction is a copy of the original input file
where all the science data extensions have been sky subtracted. | 4.65004 | 5.719779 | 0.812975 |
skyKW="MDRIZSKY" #header keyword that contains the sky that's been subtracted
# create dict of fname=sky pairs
skyvals = {}
if apply_sky is None:
skyapplied = False # flag whether sky has already been applied to images
else:
skyapplied = apply_sky
for line in open(skyFile):
if apply_sky is None and line[0] == '#' and 'applied' in line:
if '=' in line: linesep = '='
if ':' in line: linesep = ':'
appliedstr = line.split(linesep)[1].strip()
if appliedstr.lower() in ['yes','true','y','t']:
skyapplied = True
print('...Sky values already applied by user...')
if not util.is_blank(line) and line[0] != '#':
lspl = line.split()
svals = []
for lvals in lspl[1:]:
svals.append(float(lvals))
skyvals[lspl[0]] = svals
# Apply user values to appropriate input images
for imageSet in imageObjList:
fname = imageSet._filename
numchips=imageSet._numchips
sciExt=imageSet.scienceExt
if fname in skyvals:
print(" ...updating MDRIZSKY with user-supplied value.")
for chip in range(1,numchips+1,1):
if len(skyvals[fname]) == 1:
_skyValue = skyvals[fname][0]
else:
_skyValue = skyvals[fname][chip-1]
chipext = '%s,%d'%(sciExt,chip)
_updateKW(imageSet[chipext],fname,(sciExt,chip),skyKW,_skyValue)
# Update internal record with subtracted sky value
#
# .computedSky: value to be applied by the
# adrizzle/ablot steps.
# .subtractedSky: value already (or will be by adrizzle/ablot)
# subtracted from the image
if skyapplied:
imageSet[chipext].computedSky = None # used by adrizzle/ablot
else:
imageSet[chipext].computedSky = _skyValue
imageSet[chipext].subtractedSky = _skyValue
print("Setting ",skyKW,"=",_skyValue)
else:
print("*"*40)
print("*")
print("WARNING:")
print(" .... NO user-supplied sky value found for ",fname)
print(" .... Setting sky to a value of 0.0! ")
print("*")
print("*"*40) | def _skyUserFromFile(imageObjList, skyFile, apply_sky=None) | Apply sky value as read in from a user-supplied input file. | 4.888431 | 4.824945 | 1.013158 |
_skyValue=0.0 #this will be the sky value computed for the exposure
skyKW="MDRIZSKY" #header keyword that contains the sky that's been subtracted
#just making sure, tricky users and all, these are things that will be used
#by the sky function so we want them defined at least
try:
assert imageSet._numchips > 0, "invalid value for number of chips"
assert imageSet._filename != '', "image object filename is empty!, doh!"
assert imageSet._rootname != '', "image rootname is empty!, doh!"
assert imageSet.scienceExt !='', "image object science extension is empty!"
except AssertionError:
raise AssertionError
numchips=imageSet._numchips
sciExt=imageSet.scienceExt
# User Subtraction Case, User has done own sky subtraction,
# so use the image header value for subtractedsky value
skyuser=paramDict["skyuser"]
if skyuser != '':
print("User has computed their own sky values...")
if skyuser != skyKW:
print(" ...updating MDRIZSKY with supplied value.")
for chip in range(1,numchips+1,1):
chipext = '%s,%d'%(sciExt,chip)
if not imageSet[chipext].group_member:
# skip extensions/chips that will not be processed
continue
try:
_skyValue = imageSet[chipext].header[skyuser]
except:
print("**************************************************************")
print("*")
print("* Cannot find keyword ",skyuser," in ",imageSet._filename)
print("*")
print("**************************************************************\n\n\n")
raise KeyError
_updateKW(imageSet[sciExt+','+str(chip)],
imageSet._filename,(sciExt,chip),skyKW,_skyValue)
# Update internal record with subtracted sky value
imageSet[chipext].subtractedSky = _skyValue
imageSet[chipext].computedSky = None
print("Setting ",skyKW,"=",_skyValue) | def _skyUserFromHeaderKwd(imageSet,paramDict) | subtract the sky from all the chips in the imagefile that imageSet represents
imageSet is a single imageObject reference
paramDict should be the subset from an actual config object | 7.155529 | 6.988987 | 1.023829 |
#this object contains the returned values from the image stats routine
_tmp = imagestats.ImageStats(image.data,
fields = skypars['skystat'],
lower = skypars['skylower'],
upper = skypars['skyupper'],
nclip = skypars['skyclip'],
lsig = skypars['skylsigma'],
usig = skypars['skyusigma'],
binwidth = skypars['skywidth']
)
_skyValue = _extractSkyValue(_tmp,skypars['skystat'].lower())
log.info(" Computed sky value/pixel for %s: %s "%
(image.rootname, _skyValue))
del _tmp
return _skyValue | def _computeSky(image, skypars, memmap=False) | Compute the sky value for the data array passed to the function
image is a fits object which contains the data and the header
for one image extension
skypars is passed in as paramDict | 5.794858 | 5.643986 | 1.026732 |
try:
np.subtract(image.data,skyValue,image.data)
except IOError:
print("Unable to perform sky subtraction on data array")
raise IOError | def _subtractSky(image,skyValue,memmap=False) | subtract the given sky value from each the data array
that has been passed. image is a fits object that
contains the data and header for one image extension | 5.902302 | 5.834482 | 1.011624 |
# Update the value in memory
image.header[skyKW] = Value
# Now update the value on disk
if isinstance(exten,tuple):
strexten = '[%s,%s]'%(exten[0],str(exten[1]))
else:
strexten = '[%s]'%(exten)
log.info('Updating keyword %s in %s' % (skyKW, filename + strexten))
fobj = fileutil.openImage(filename, mode='update', memmap=False)
fobj[exten].header[skyKW] = (Value, 'Sky value computed by AstroDrizzle')
fobj.close() | def _updateKW(image, filename, exten, skyKW, Value) | update the header with the kw,value | 3.775806 | 3.757106 | 1.004977 |
skyKW = "MDRIZSKY"
Value = 0.0
for imageSet in imageObjList:
fname = imageSet._filename
numchips=imageSet._numchips
sciExt=imageSet.scienceExt
fobj = fileutil.openImage(fname, mode='update', memmap=False)
for chip in range(1,numchips+1,1):
ext = (sciExt,chip)
if not imageSet[ext].group_member:
# skip over extensions not used in processing
continue
if skyKW not in fobj[ext].header:
fobj[ext].header[skyKW] = (Value, 'Sky value computed by AstroDrizzle')
log.info("MDRIZSKY keyword not found in the %s[%s,%d] header."%(
fname,sciExt,chip))
log.info(" Adding MDRIZSKY to header with default value of 0.")
fobj.close() | def _addDefaultSkyKW(imageObjList) | Add MDRIZSKY keyword to "commanded" SCI headers of all input images,
if that keyword does not already exist. | 5.042875 | 4.641229 | 1.086539 |
helpstr = getHelpAsString(docstring=True, show_ver = True)
if file is None:
print(helpstr)
else:
if os.path.exists(file): os.remove(file)
f = open(file, mode = 'w')
f.write(helpstr)
f.close() | def help(file=None) | Print out syntax help for running astrodrizzle
Parameters
----------
file : str (Default = None)
If given, write out help to the filename specified by this parameter
Any previously existing file with this name will be deleted before
writing out the help. | 3.41576 | 3.620704 | 0.943397 |
single_coord = False
if coordfile is not None:
if colnames in blank_list:
colnames = ['c1','c2']
elif isinstance(colnames,type('a')):
colnames = colnames.split(',')
# convert input file coordinates to lists of decimal degrees values
xlist,ylist = tweakutils.readcols(coordfile,cols=colnames)
else:
if isinstance(ra,np.ndarray):
ralist = ra.tolist()
declist = dec.tolist()
elif not isinstance(ra, list):
ralist = [ra]
declist = [dec]
else:
ralist = ra
declist = dec
xlist = [0]*len(ralist)
ylist = [0]*len(ralist)
if len(xlist) == 1:
single_coord = True
for i,(r,d) in enumerate(zip(ralist,declist)):
# convert input value into decimal degrees value
xval,yval = tweakutils.parse_skypos(r,d)
xlist[i] = xval
ylist[i] = yval
# start by reading in WCS+distortion info for input image
inwcs = wcsutil.HSTWCS(input)
if inwcs.wcs.is_unity():
print("####\nNo valid WCS found in {}.\n Results may be invalid.\n####\n".format(input))
# Now, convert pixel coordinates into sky coordinates
try:
outx,outy = inwcs.all_world2pix(xlist,ylist,1)
except RuntimeError:
outx,outy = inwcs.wcs_world2pix(xlist,ylist,1)
# add formatting based on precision here...
xstr = []
ystr = []
fmt = "%."+repr(precision)+"f"
for x,y in zip(outx,outy):
xstr.append(fmt%x)
ystr.append(fmt%y)
if verbose or (not verbose and util.is_blank(output)):
print ('# Coordinate transformations for ',input)
print('# X Y RA Dec\n')
for x,y,r,d in zip(xstr,ystr,xlist,ylist):
print("%s %s %s %s"%(x,y,r,d))
# Create output file, if specified
if output:
f = open(output,mode='w')
f.write("# Coordinates converted from %s\n"%input)
for x,y in zip(xstr,ystr):
f.write('%s %s\n'%(x,y))
f.close()
print('Wrote out results to: ',output)
if single_coord:
outx = outx[0]
outy = outy[0]
return outx, outy | def rd2xy(input,ra=None,dec=None,coordfile=None,colnames=None,
precision=6,output=None,verbose=True) | Primary interface to perform coordinate transformations from
pixel to sky coordinates using STWCS and full distortion models
read from the input image header. | 3.121752 | 3.143828 | 0.992978 |
# interpret input parameters
catalog = pars.get("catalog", 'GAIADR2')
output = pars.get("output", 'ref_cat.ecsv')
gaia_only = pars.get("gaia_only", False)
table_format = pars.get("table_format", 'ascii.ecsv')
existing_wcs = pars.get("existing_wcs", None)
inputs, _ = parseinput.parseinput(inputs)
# start by creating a composite field-of-view for all inputs
# This default output WCS will have the same plate-scale and orientation
# as the first chip in the list, which for WFPC2 data means the PC.
# Fortunately, for alignment, this doesn't matter since no resampling of
# data will be performed
if existing_wcs:
outwcs = existing_wcs
else:
outwcs = build_reference_wcs(inputs)
radius = compute_radius(outwcs)
ra, dec = outwcs.wcs.crval
# perform query for this field-of-view
ref_dict = get_catalog(ra, dec, sr=radius, catalog=catalog)
colnames = ('ra', 'dec', 'mag', 'objID', 'GaiaID')
col_types = ('f8', 'f8', 'f4', 'U25', 'U25')
ref_table = Table(names=colnames, dtype=col_types)
# Add catalog name as meta data
ref_table.meta['catalog'] = catalog
ref_table.meta['gaia_only'] = gaia_only
# rename coordinate columns to be consistent with tweakwcs
ref_table.rename_column('ra', 'RA')
ref_table.rename_column('dec', 'DEC')
# extract just the columns we want...
num_sources = 0
for source in ref_dict:
if 'GAIAsourceID' in source:
g = source['GAIAsourceID']
if gaia_only and g.strip() == '':
continue
else:
g = "-1" # indicator for no source ID extracted
r = float(source['ra'])
d = float(source['dec'])
m = -999.9 # float(source['mag'])
o = source['objID']
num_sources += 1
ref_table.add_row((r, d, m, o, g))
# Write out table to a file, if specified
if output:
ref_table.write(output, format=table_format)
log.info("Created catalog '{}' with {} sources".format(output, num_sources))
return ref_table | def create_astrometric_catalog(inputs, **pars) | Create an astrometric catalog that covers the inputs' field-of-view.
Parameters
----------
input : str, list
Filenames of images to be aligned to astrometric catalog
catalog : str, optional
Name of catalog to extract astrometric positions for sources in the
input images' field-of-view. Default: GAIADR2. Options available are
documented on the catalog web page.
output : str, optional
Filename to give to the astrometric catalog read in from the master
catalog web service. If None, no file will be written out.
gaia_only : bool, optional
Specify whether or not to only use sources from GAIA in output catalog
Default: False
existing_wcs : ~stwcs.wcsutils.HSTWCS`
existing WCS object specified by the user
Notes
-----
This function will point to astrometric catalog web service defined
through the use of the ASTROMETRIC_CATALOG_URL environment variable.
Returns
-------
ref_table : ~.astropy.table.Table`
Astropy Table object of the catalog | 3.896356 | 3.630044 | 1.073363 |
# start by creating a composite field-of-view for all inputs
wcslist = []
for img in inputs:
nsci = countExtn(img)
for num in range(nsci):
extname = (sciname, num + 1)
if sciname == 'sci':
extwcs = wcsutil.HSTWCS(img, ext=extname)
else:
# Working with HDRLET as input and do the best we can...
extwcs = read_hlet_wcs(img, ext=extname)
wcslist.append(extwcs)
# This default output WCS will have the same plate-scale and orientation
# as the first chip in the list, which for WFPC2 data means the PC.
# Fortunately, for alignment, this doesn't matter since no resampling of
# data will be performed
outwcs = utils.output_wcs(wcslist)
return outwcs | def build_reference_wcs(inputs, sciname='sci') | Create the reference WCS based on all the inputs for a field | 7.941836 | 8.155872 | 0.973757 |
serviceType = 'vo/CatalogSearch.aspx'
spec_str = 'RA={}&DEC={}&SR={}&FORMAT={}&CAT={}&MINDET=5'
headers = {'Content-Type': 'text/csv'}
spec = spec_str.format(ra, dec, sr, fmt, catalog)
serviceUrl = '{}/{}?{}'.format(SERVICELOCATION, serviceType, spec)
rawcat = requests.get(serviceUrl, headers=headers)
r_contents = rawcat.content.decode() # convert from bytes to a String
rstr = r_contents.split('\r\n')
# remove initial line describing the number of sources returned
# CRITICAL to proper interpretation of CSV data
del rstr[0]
r_csv = csv.DictReader(rstr)
return r_csv | def get_catalog(ra, dec, sr=0.1, fmt='CSV', catalog='GSC241') | Extract catalog from VO web service.
Parameters
----------
ra : float
Right Ascension (RA) of center of field-of-view (in decimal degrees)
dec : float
Declination (Dec) of center of field-of-view (in decimal degrees)
sr : float, optional
Search radius (in decimal degrees) from field-of-view center to use
for sources from catalog. Default: 0.1 degrees
fmt : str, optional
Format of output catalog to be returned. Options are determined by
web-service, and currently include (Default: CSV):
VOTABLE(default) | HTML | KML | CSV | TSV | JSON | TEXT
catalog : str, optional
Name of catalog to query, as defined by web-service. Default: 'GSC241'
Returns
-------
csv : CSV object
CSV object of returned sources with all columns as provided by catalog | 6.019142 | 6.192964 | 0.971932 |
ra, dec = wcs.wcs.crval
img_center = SkyCoord(ra=ra * u.degree, dec=dec * u.degree)
wcs_foot = wcs.calc_footprint()
img_corners = SkyCoord(ra=wcs_foot[:, 0] * u.degree,
dec=wcs_foot[:, 1] * u.degree)
radius = img_center.separation(img_corners).max().value
return radius | def compute_radius(wcs) | Compute the radius from the center to the furthest edge of the WCS. | 2.653549 | 2.471237 | 1.073773 |
serviceType = "GSCConvert/GSCconvert.aspx"
spec_str = "TRANSFORM={}-{}&IPPPSSOOT={}"
if 'rootname' in pf.getheader(image):
ippssoot = pf.getval(image, 'rootname').upper()
else:
ippssoot = fu.buildNewRootname(image).upper()
spec = spec_str.format(input_catalog, output_catalog, ippssoot)
serviceUrl = "{}/{}?{}".format(SERVICELOCATION, serviceType, spec)
rawcat = requests.get(serviceUrl)
if not rawcat.ok:
log.info("Problem accessing service with:\n{{}".format(serviceUrl))
raise ValueError
delta_ra = delta_dec = None
tree = BytesIO(rawcat.content)
for _, element in etree.iterparse(tree):
if element.tag == 'deltaRA':
delta_ra = float(element.text)
elif element.tag == 'deltaDEC':
delta_dec = float(element.text)
return delta_ra, delta_dec | def find_gsc_offset(image, input_catalog='GSC1', output_catalog='GAIA') | Find the GSC to GAIA offset based on guide star coordinates
Parameters
----------
image : str
Filename of image to be processed.
Returns
-------
delta_ra, delta_dec : tuple of floats
Offset in decimal degrees of image based on correction to guide star
coordinates relative to GAIA. | 5.469059 | 5.552247 | 0.985017 |
moments = catalog.moments_central
if sources is None:
sources = (0, len(moments))
num_sources = sources[1] - sources[0]
srctype = np.zeros((num_sources,), np.int32)
for src in range(sources[0], sources[1]):
# Protect against spurious detections
src_x = catalog[src].xcentroid
src_y = catalog[src].ycentroid
if np.isnan(src_x) or np.isnan(src_y):
continue
x, y = np.where(moments[src] == moments[src].max())
if (x[0] > 1) and (y[0] > 1):
srctype[src] = 1
return srctype | def classify_sources(catalog, sources=None) | Convert moments_central attribute for source catalog into star/cr flag.
This algorithm interprets the central_moments from the source_properties
generated for the sources as more-likely a star or a cosmic-ray. It is not
intended or expected to be precise, merely a means of making a first cut at
removing likely cosmic-rays or other artifacts.
Parameters
----------
catalog : `~photutils.SourceCatalog`
The photutils catalog for the image/chip.
sources : tuple
Range of objects from catalog to process as a tuple of (min, max).
If None (default) all sources are processed.
Returns
-------
srctype : ndarray
An ndarray where a value of 1 indicates a likely valid, non-cosmic-ray
source, and a value of 0 indicates a likely cosmic-ray. | 2.875177 | 2.583287 | 1.112992 |
if not isinstance(image, pf.HDUList):
raise ValueError("Input {} not fits.HDUList object".format(image))
dqname = kwargs.get('dqname', 'DQ')
output = kwargs.get('output', None)
# Build source catalog for entire image
source_cats = {}
numSci = countExtn(image, extname='SCI')
for chip in range(numSci):
chip += 1
# find sources in image
if output:
rootname = image[0].header['rootname']
outroot = '{}_sci{}_src'.format(rootname, chip)
kwargs['output'] = outroot
imgarr = image['sci', chip].data
# apply any DQ array, if available
dqmask = None
if image.index_of(dqname):
dqarr = image[dqname, chip].data
# "grow out" regions in DQ mask flagged as saturated by several
# pixels in every direction to prevent the
# source match algorithm from trying to match multiple sources
# from one image to a single source in the
# other or vice-versa.
# Create temp DQ mask containing all pixels flagged with any value EXCEPT 256
non_sat_mask = bitfield_to_boolean_mask(dqarr, ignore_flags=256)
# Create temp DQ mask containing saturated pixels ONLY
sat_mask = bitfield_to_boolean_mask(dqarr, ignore_flags=~256)
# Grow out saturated pixels by a few pixels in every direction
grown_sat_mask = ndimage.binary_dilation(sat_mask, iterations=5)
# combine the two temporary DQ masks into a single composite DQ mask.
dqmask = np.bitwise_or(non_sat_mask, grown_sat_mask)
# dqmask = bitfield_to_boolean_mask(dqarr, good_mask_value=False)
# TODO: <---Remove this old no-sat bit grow line once this
# thing works
seg_tab, segmap = extract_sources(imgarr, dqmask=dqmask, **kwargs)
seg_tab_phot = seg_tab
source_cats[chip] = seg_tab_phot
return source_cats | def generate_source_catalog(image, **kwargs) | Build source catalogs for each chip using photutils.
The catalog returned by this function includes sources found in all chips
of the input image with the positions translated to the coordinate frame
defined by the reference WCS `refwcs`. The sources will be
- identified using photutils segmentation-based source finding code
- ignore any input pixel which has been flagged as 'bad' in the DQ
array, should a DQ array be found in the input HDUList.
- classified as probable cosmic-rays (if enabled) using central_moments
properties of each source, with these sources being removed from the
catalog.
Parameters
----------
image : `~astropy.io.fits.HDUList`
Input image as an astropy.io.fits HDUList.
dqname : str
EXTNAME for the DQ array, if present, in the input image HDUList.
output : bool
Specify whether or not to write out a separate catalog file for all the
sources found in each chip. Default: None (False)
threshold : float, optional
This parameter controls the threshold used for identifying sources in
the image relative to the background RMS.
If None, compute a default value of (background+3*rms(background)).
If threshold < 0.0, use absolute value as scaling factor for default value.
fwhm : float, optional
FWHM (in pixels) of the expected sources from the image, comparable to the
'conv_width' parameter from 'tweakreg'. Objects with FWHM closest to
this value will be identified as sources in the catalog.
Returns
-------
source_cats : dict
Dict of astropy Tables identified by chip number with
each table containing sources from image extension ``('sci', chip)``. | 5.466904 | 5.04722 | 1.083152 |
# Extract source catalogs for each chip
source_cats = generate_source_catalog(image, **kwargs)
# Build source catalog for entire image
master_cat = None
numSci = countExtn(image, extname='SCI')
# if no refwcs specified, build one now...
if refwcs is None:
refwcs = build_reference_wcs([image])
for chip in range(numSci):
chip += 1
# work with sources identified from this specific chip
seg_tab_phot = source_cats[chip]
if seg_tab_phot is None:
continue
# Convert pixel coordinates from this chip to sky coordinates
chip_wcs = wcsutil.HSTWCS(image, ext=('sci', chip))
seg_ra, seg_dec = chip_wcs.all_pix2world(seg_tab_phot['xcentroid'], seg_tab_phot['ycentroid'], 1)
# Convert sky positions to pixel positions in the reference WCS frame
seg_xy_out = refwcs.all_world2pix(seg_ra, seg_dec, 1)
seg_tab_phot['xcentroid'] = seg_xy_out[0]
seg_tab_phot['ycentroid'] = seg_xy_out[1]
if master_cat is None:
master_cat = seg_tab_phot
else:
master_cat = vstack([master_cat, seg_tab_phot])
return master_cat | def generate_sky_catalog(image, refwcs, **kwargs) | Build source catalog from input image using photutils.
This script borrows heavily from build_source_catalog.
The catalog returned by this function includes sources found in all chips
of the input image with the positions translated to the coordinate frame
defined by the reference WCS `refwcs`. The sources will be
- identified using photutils segmentation-based source finding code
- ignore any input pixel which has been flagged as 'bad' in the DQ
array, should a DQ array be found in the input HDUList.
- classified as probable cosmic-rays (if enabled) using central_moments
properties of each source, with these sources being removed from the
catalog.
Parameters
----------
image : ~astropy.io.fits.HDUList`
Input image.
refwcs : `~stwcs.wcsutils.HSTWCS`
Definition of the reference frame WCS.
dqname : str
EXTNAME for the DQ array, if present, in the input image.
output : bool
Specify whether or not to write out a separate catalog file for all the
sources found in each chip. Default: None (False)
threshold : float, optional
This parameter controls the S/N threshold used for identifying sources in
the image relative to the background RMS in much the same way that
the 'threshold' parameter in 'tweakreg' works.
fwhm : float, optional
FWHM (in pixels) of the expected sources from the image, comparable to the
'conv_width' parameter from 'tweakreg'. Objects with FWHM closest to
this value will be identified as sources in the catalog.
Returns
--------
master_cat : `~astropy.table.Table`
Source catalog for all 'valid' sources identified from all chips of the
input image with positions translated to the reference WCS coordinate
frame. | 3.471708 | 3.22601 | 1.076162 |
# Determine VEGAMAG zero-point using pysynphot for this photmode
photmode = photmode.replace(' ', ', ')
vega = S.FileSpectrum(VEGASPEC)
bp = S.ObsBandpass(photmode)
vegauvis = S.Observation(vega, bp)
vegazpt = 2.5 * np.log10(vegauvis.countrate())
# Use zero-point to convert flux values from catalog into magnitudes
# source_phot = vegazpt - 2.5*np.log10(catalog['source_sum'])
source_phot = vegazpt - 2.5 * np.log10(catalog['flux'])
source_phot.name = 'vegamag'
# Now add this new column to the catalog table
catalog.add_column(source_phot)
return catalog | def compute_photometry(catalog, photmode) | Compute magnitudes for sources from catalog based on observations photmode.
Parameters
----------
catalog : `~astropy.table.Table`
Astropy Table with 'source_sum' column for the measured flux for each source.
photmode : str
Specification of the observation filter configuration used for the exposure
as reported by the 'PHOTMODE' keyword from the PRIMARY header.
Returns
-------
phot_cat : `~astropy.table.Table`
Astropy Table object of input source catalog with added column for
VEGAMAG photometry (in magnitudes). | 5.349174 | 4.187308 | 1.277473 |
# interpret input pars
bright_limit = kwargs.get('bright_limit', 1.00)
max_bright = kwargs.get('max_bright', None)
min_bright = kwargs.get('min_bright', 20)
colname = kwargs.get('colname', 'vegamag')
# sort by magnitude
phot_column = catalog[colname]
num_sources = len(phot_column)
sort_indx = np.argsort(phot_column)
if max_bright is None:
max_bright = num_sources
# apply limits, insuring no more than full catalog gets selected
limit_num = max(int(num_sources * bright_limit), min_bright)
limit_num = min(max_bright, limit_num, num_sources)
# Extract sources identified by selection
new_catalog = catalog[sort_indx[:limit_num]]
return new_catalog | def filter_catalog(catalog, **kwargs) | Create a new catalog selected from input based on photometry.
Parameters
----------
bright_limit : float
Fraction of catalog based on brightness that should be retained.
Value of 1.00 means full catalog.
max_bright : int
Maximum number of sources to keep regardless of `bright_limit`.
min_bright : int
Minimum number of sources to keep regardless of `bright_limit`.
colname : str
Name of column to use for selection/sorting.
Returns
-------
new_catalog : `~astropy.table.Table`
New table which only has the sources that meet the selection criteria. | 4.102029 | 3.302634 | 1.242048 |
if 'sipwcs' in filename:
sciname = 'sipwcs'
else:
sciname = 'sci'
wcslin = build_reference_wcs([filename], sciname=sciname)
if clean_wcs:
wcsbase = wcslin.wcs
customwcs = build_hstwcs(wcsbase.crval[0], wcsbase.crval[1], wcsbase.crpix[0],
wcsbase.crpix[1], wcslin._naxis1, wcslin._naxis2,
wcslin.pscale, wcslin.orientat)
else:
customwcs = wcslin
return customwcs | def build_self_reference(filename, clean_wcs=False) | This function creates a reference, undistorted WCS that can be used to
apply a correction to the WCS of the input file.
Parameters
----------
filename : str
Filename of image which will be corrected, and which will form the basis
of the undistorted WCS.
clean_wcs : bool
Specify whether or not to return the WCS object without any distortion
information, or any history of the original input image. This converts
the output from `utils.output_wcs()` into a pristine `~stwcs.wcsutils.HSTWCS` object.
Returns
-------
customwcs : `stwcs.wcsutils.HSTWCS`
HSTWCS object which contains the undistorted WCS representing the entire
field-of-view for the input image.
Examples
--------
This function can be used with the following syntax to apply a shift/rot/scale
change to the same image:
>>> import buildref
>>> from drizzlepac import updatehdr
>>> filename = "jce501erq_flc.fits"
>>> wcslin = buildref.build_self_reference(filename)
>>> updatehdr.updatewcs_with_shift(filename, wcslin, xsh=49.5694,
... ysh=19.2203, rot = 359.998, scale = 0.9999964) | 3.496007 | 3.467791 | 1.008137 |
hstwcs = wcsutil.HSTWCS(filename, ext=ext)
if hstwcs.naxis1 is None:
hstwcs.naxis1 = int(hstwcs.wcs.crpix[0] * 2.) # Assume crpix is center of chip
hstwcs.naxis2 = int(hstwcs.wcs.crpix[1] * 2.)
return hstwcs | def read_hlet_wcs(filename, ext) | Insure `stwcs.wcsutil.HSTWCS` includes all attributes of a full image WCS.
For headerlets, the WCS does not contain information about the size of the
image, as the image array is not present in the headerlet. | 2.691509 | 2.80813 | 0.95847 |
wcsout = wcsutil.HSTWCS()
wcsout.wcs.crval = np.array([crval1, crval2])
wcsout.wcs.crpix = np.array([crpix1, crpix2])
wcsout.naxis1 = naxis1
wcsout.naxis2 = naxis2
wcsout.wcs.cd = buildRotMatrix(orientat) * [-1, 1] * pscale / 3600.0
# Synchronize updates with astropy.wcs objects
wcsout.wcs.set()
wcsout.setPscale()
wcsout.setOrient()
wcsout.wcs.ctype = ['RA---TAN', 'DEC--TAN']
return wcsout | def build_hstwcs(crval1, crval2, crpix1, crpix2, naxis1, naxis2, pscale, orientat) | Create an `stwcs.wcsutil.HSTWCS` object for a default instrument without
distortion based on user provided parameter values. | 2.385273 | 2.46378 | 0.968135 |
# start with limits of WCS shape
if hasattr(wcs, 'naxis1'):
naxis1 = wcs.naxis1
naxis2 = wcs.naxis2
elif hasattr(wcs, 'pixel_shape'):
naxis1, naxis2 = wcs.pixel_shape
else:
naxis1 = wcs._naxis1
naxis2 = wcs._naxis2
maskx = np.bitwise_or(x < 0, x > naxis1)
masky = np.bitwise_or(y < 0, y > naxis2)
mask = ~np.bitwise_or(maskx, masky)
x = x[mask]
y = y[mask]
# Now, confirm that these points fall within actual science area of WCS
img_mask = create_image_footprint(img, wcs, border=1.0)
inmask = np.where(img_mask[y.astype(np.int32), x.astype(np.int32)])[0]
x = x[inmask]
y = y[inmask]
return x, y | def within_footprint(img, wcs, x, y) | Determine whether input x, y fall in the science area of the image.
Parameters
----------
img : ndarray
ndarray of image where non-science areas are marked with value of NaN.
wcs : `stwcs.wcsutil.HSTWCS`
HSTWCS or WCS object with naxis terms defined.
x, y : ndarray
arrays of x, y positions for sources to be checked.
Returns
-------
x, y : ndarray
New arrays which have been trimmed of all sources that fall outside
the science areas of the image | 2.616979 | 2.566099 | 1.019828 |
# Interpret input image to generate initial source catalog and WCS
if isinstance(image, str):
image = pf.open(image)
numSci = countExtn(image, extname='SCI')
ref_x = refwcs._naxis1
ref_y = refwcs._naxis2
# convert border value into pixels
border_pixels = int(border / refwcs.pscale)
mask_arr = np.zeros((ref_y, ref_x), dtype=int)
for chip in range(numSci):
chip += 1
# Build arrays of pixel positions for all edges of chip
chip_y, chip_x = image['sci', chip].data.shape
chipwcs = wcsutil.HSTWCS(image, ext=('sci', chip))
xpix = np.arange(chip_x) + 1
ypix = np.arange(chip_y) + 1
edge_x = np.hstack([[1] * chip_y, xpix, [chip_x] * chip_y, xpix])
edge_y = np.hstack([ypix, [1] * chip_x, ypix, [chip_y] * chip_x])
edge_ra, edge_dec = chipwcs.all_pix2world(edge_x, edge_y, 1)
edge_x_out, edge_y_out = refwcs.all_world2pix(edge_ra, edge_dec, 0)
edge_x_out = np.clip(edge_x_out.astype(np.int32), 0, ref_x - 1)
edge_y_out = np.clip(edge_y_out.astype(np.int32), 0, ref_y - 1)
mask_arr[edge_y_out, edge_x_out] = 1
# Fill in outline of each chip
mask_arr = ndimage.binary_fill_holes(ndimage.binary_dilation(mask_arr, iterations=2))
if border > 0.:
mask_arr = ndimage.binary_erosion(mask_arr, iterations=border_pixels)
return mask_arr | def create_image_footprint(image, refwcs, border=0.) | Create the footprint of the image in the reference WCS frame.
Parameters
----------
image : `astropy.io.fits.HDUList` or str
Image to extract sources for matching to
the external astrometric catalog.
refwcs : `stwcs.wcsutil.HSTWCS`
Reference WCS for coordinate frame of image.
border : float
Buffer (in arcseconds) around edge of image to exclude astrometric
sources. | 2.677021 | 2.642776 | 1.012958 |
open_file = False
if isinstance(image, str):
hdulist = pf.open(image)
open_file = True
elif isinstance(image, pf.HDUList):
hdulist = image
else:
log.info("Wrong type of input, {}, for build_wcscat...".format(type(image)))
raise ValueError
wcs_catalogs = []
numsci = countExtn(hdulist)
for chip in range(1, numsci + 1):
w = wcsutil.HSTWCS(hdulist, ('SCI', chip))
imcat = source_catalog[chip]
# rename xcentroid/ycentroid columns, if necessary, to be consistent with tweakwcs
if 'xcentroid' in imcat.colnames:
imcat.rename_column('xcentroid', 'x')
imcat.rename_column('ycentroid', 'y')
wcscat = FITSWCS(
w,
meta={
'chip': chip,
'group_id': group_id,
'filename': image,
'catalog': imcat,
'name': image
}
)
wcs_catalogs.append(wcscat)
if open_file:
hdulist.close()
return wcs_catalogs | def build_wcscat(image, group_id, source_catalog) | Return a list of `~tweakwcs.tpwcs.FITSWCS` objects for all chips in an image.
Parameters
----------
image : str, ~astropy.io.fits.HDUList`
Either filename or HDUList of a single HST observation.
group_id : int
Integer ID for group this image should be associated with; primarily
used when separate chips are in separate files to treat them all as one
exposure.
source_catalog : dict
If provided, these catalogs will be attached as `catalog`
entries in each chip's ``FITSWCS`` object. It should be provided as a
dict of astropy Tables identified by chip number with
each table containing sources from image extension ``('sci', chip)`` as
generated by `generate_source_catalog()`.
Returns
-------
wcs_catalogs : list of `~tweakwcs.tpwcs.FITSWCS`
List of `~tweakwcs.tpwcs.FITSWCS` objects defined for all chips in input image. | 3.311304 | 2.806833 | 1.17973 |
f = open(fileutil.osfn(shiftfile))
shift_lines = [x.strip() for x in f.readlines()]
f.close()
# interpret header of shift file
for line in shift_lines:
if 'refimage' in line or 'reference' in line:
refimage = line.split(':')[-1]
refimage = refimage[:refimage.find('[wcs]')].lstrip()
break
# Determine the max length in the first column (filenames)
fnames = []
for row in shift_lines:
if row[0] == '#': continue
fnames.append(len(row.split(' ')[0]))
fname_fmt = 'S{0}'.format(max(fnames))
# Now read in numerical values from shiftfile
type_list = {'names':('fnames','xsh','ysh','rot','scale','xrms','yrms'),
'formats':(fname_fmt,'f4','f4','f4','f4','f4','f4')}
try:
sdict = np.loadtxt(shiftfile,dtype=type_list,unpack=False)
except IndexError:
tlist = {'names':('fnames','xsh','ysh','rot','scale'),
'formats':(fname_fmt,'f4','f4','f4','f4')}
s = np.loadtxt(shiftfile,dtype=tlist,unpack=False)
sdict = np.zeros([s['fnames'].shape[0],],dtype=type_list)
for sname in s.dtype.names:
sdict[sname] = s[sname]
for img in sdict:
updatewcs_with_shift(img['fnames'], refimage, wcsname=wcsname,
rot=img['rot'], scale=img['scale'],
xsh=img['xsh'], ysh=img['ysh'],
xrms=img['xrms'], yrms=img['yrms'],
force=force) | def update_from_shiftfile(shiftfile,wcsname=None,force=False) | Update headers of all images specified in shiftfile with shifts
from shiftfile.
Parameters
----------
shiftfile : str
Filename of shiftfile.
wcsname : str
Label to give to new WCS solution being created by this fit. If
a value of None is given, it will automatically use 'TWEAK' as the
label. [Default =None]
force : bool
Update header even though WCS already exists with this solution or
wcsname? [Default=False] | 3.30555 | 3.353846 | 0.9856 |
x0 = imcrpix[0]
y0 = imcrpix[1]
p = np.asarray([[x0, y0],
[x0 - hx, y0],
[x0 - hx * 0.5, y0],
[x0 + hx * 0.5, y0],
[x0 + hx, y0],
[x0, y0 - hy],
[x0, y0 - hy * 0.5],
[x0, y0 + hy * 0.5],
[x0, y0 + hy]],
dtype=np.float64)
# convert image coordinates to reference image coordinates:
p = wcsref.wcs_world2pix(wcsim.wcs_pix2world(p, 1), 1).astype(ndfloat128)
# apply linear fit transformation:
p = np.dot(f, (p - shift).T).T
# convert back to image coordinate system:
p = wcsima.wcs_world2pix(
wcsref.wcs_pix2world(p.astype(np.float64), 1), 1).astype(ndfloat128)
# derivative with regard to x:
u1 = ((p[1] - p[4]) + 8 * (p[3] - p[2])) / (6*hx)
# derivative with regard to y:
u2 = ((p[5] - p[8]) + 8 * (p[7] - p[6])) / (6*hy)
return (np.asarray([u1, u2]).T, p[0]) | def linearize(wcsim, wcsima, wcsref, imcrpix, f, shift, hx=1.0, hy=1.0) | linearization using 5-point formula for first order derivative | 2.312346 | 2.292825 | 1.008514 |
# Start by insuring that the correct value of 'orientat' has been computed
new_wcs.setOrient()
fimg_open=False
if not isinstance(image, fits.HDUList):
fimg = fits.open(image, mode='update', memmap=False)
fimg_open = True
fimg_update = True
else:
fimg = image
if fimg.fileinfo(0)['filemode'] is 'update':
fimg_update = True
else:
fimg_update = False
# Determine final (unique) WCSNAME value, either based on the default or
# user-provided name
if util.is_blank(wcsname):
wcsname = 'TWEAK'
if not reusename:
wcsname = create_unique_wcsname(fimg, extnum, wcsname)
idchdr = True
if new_wcs.idcscale is None:
idchdr = False
# Open the file for updating the WCS
try:
logstr = 'Updating header for %s[%s]'%(fimg.filename(),str(extnum))
if verbose:
print(logstr)
else:
log.info(logstr)
hdr = fimg[extnum].header
if verbose:
log.info(' with WCS of')
new_wcs.printwcs()
print("WCSNAME : ",wcsname)
# Insure that if a copy of the WCS has not been created yet, it will be now
wcs_hdr = new_wcs.wcs2header(idc2hdr=idchdr, relax=True)
for key in wcs_hdr:
hdr[key] = wcs_hdr[key]
hdr['ORIENTAT'] = new_wcs.orientat
hdr['WCSNAME'] = wcsname
util.updateNEXTENDKw(fimg)
# Only if this image was opened in update mode should this
# newly updated WCS be archived, as it will never be written out
# to a file otherwise.
if fimg_update:
if not reusename:
# Save the newly updated WCS as an alternate WCS as well
wkey = wcsutil.altwcs.next_wcskey(fimg,ext=extnum)
else:
wkey = wcsutil.altwcs.getKeyFromName(hdr,wcsname)
# wcskey needs to be specified so that archiveWCS will create a
# duplicate WCS with the same WCSNAME as the Primary WCS
wcsutil.altwcs.archiveWCS(fimg,[extnum],wcsname=wcsname,
wcskey=wkey, reusekey=reusename)
finally:
if fimg_open:
# finish up by closing the file now
fimg.close() | def update_wcs(image,extnum,new_wcs,wcsname="",reusename=False,verbose=False) | Updates the WCS of the specified extension number with the new WCS
after archiving the original WCS.
The value of 'new_wcs' needs to be the full
HSTWCS object.
Parameters
----------
image : str
Filename of image with WCS that needs to be updated
extnum : int
Extension number for extension with WCS to be updated/replaced
new_wcs : object
Full HSTWCS object which will replace/update the existing WCS
wcsname : str
Label to give newly updated WCS
reusename : bool
User can choose whether to over-write WCS with same name or not.
[Default: False]
verbose : bool, int
Print extra messages during processing? [Default: False] | 4.963619 | 5.022029 | 0.988369 |
wnames = list(wcsutil.altwcs.wcsnames(fimg, ext=extnum).values())
if wcsname not in wnames:
uniqname = wcsname
else:
# setup pattern to match
rpatt = re.compile(wcsname+'_\d')
index = 0
for wname in wnames:
rmatch = rpatt.match(wname)
if rmatch:
# get index
n = int(wname[wname.rfind('_')+1:])
if n > index: index = 1
index += 1 # for use with new name
uniqname = "%s_%d"%(wcsname,index)
return uniqname | def create_unique_wcsname(fimg, extnum, wcsname) | This function evaluates whether the specified wcsname value has
already been used in this image. If so, it automatically modifies
the name with a simple version ID using wcsname_NNN format.
Parameters
----------
fimg : obj
PyFITS object of image with WCS information to be updated
extnum : int
Index of extension with WCS information to be updated
wcsname : str
Value of WCSNAME specified by user for labelling the new WCS
Returns
-------
uniqname : str
Unique WCSNAME value | 4.237802 | 4.750978 | 0.891985 |
if not can_parallel:
return 1
# Give priority to their specified cfg value, over the actual cpu count
if usr_config_value is not None:
if num_tasks is None:
return usr_config_value
else:
# usr_config_value may be needlessly high
return min(usr_config_value, num_tasks)
# they haven't specified a cfg value, so go with the cpu_count
if num_tasks is None:
return _cpu_count
else:
# run no more workers than tasks
return min(_cpu_count, num_tasks) | def get_pool_size(usr_config_value, num_tasks) | Determine size of thread/process-pool for parallel processing.
This examines the cpu_count to decide and return the right pool
size to use. Also take into account the user's wishes via the config
object value, if specified. On top of that, don't allow the pool size
returned to be any higher than the number of parallel tasks, if specified.
Only use what we need (mp.Pool starts pool_size processes, needed or not).
If number of tasks is unknown, call this with "num_tasks" set to None.
Returns 1 when indicating that parallel processing should not be used.
Consolidate all such logic here, not in the caller. | 5.229695 | 4.542332 | 1.151324 |
if logfile == "INDEF":
if not is_blank(default):
logname = fileutil.buildNewRootname(default, '.log')
else:
logname = DEFAULT_LOGNAME
elif logfile not in [None, "" , " "]:
if logfile.endswith('.log'):
logname = logfile
else:
logname = logfile + '.log'
else:
logname = None
if logname is not None:
logutil.setup_global_logging()
# Don't use logging.basicConfig since it can only be called once in a
# session
# TODO: Would be fine to use logging.config.dictConfig, but it's not
# available in Python 2.5
global _log_file_handler
root_logger = logging.getLogger()
if _log_file_handler:
root_logger.removeHandler(_log_file_handler)
# Default mode is 'a' which is fine
_log_file_handler = logging.FileHandler(logname)
# TODO: Make the default level configurable in the task parameters
_log_file_handler.setLevel(level)
_log_file_handler.setFormatter(
logging.Formatter('[%(levelname)-8s] %(message)s'))
root_logger.setLevel(level)
root_logger.addHandler(_log_file_handler)
print('Setting up logfile : ', logname)
#stdout_logger = logging.getLogger('stsci.tools.logutil.stdout')
# Disable display of prints to stdout from all packages except
# drizzlepac
#stdout_logger.addFilter(logutil.EchoFilter(include=['drizzlepac']))
else:
print('No trailer file created...') | def init_logging(logfile=DEFAULT_LOGNAME, default=None, level=logging.INFO) | Set up logger for capturing stdout/stderr messages.
Must be called prior to writing any messages that you want to log. | 4.374034 | 4.392302 | 0.995841 |
if logutil.global_logging_started:
if filename:
print('Trailer file written to: ', filename)
else:
# This generally shouldn't happen if logging was started with
# init_logging and a filename was given...
print('No trailer file saved...')
logutil.teardown_global_logging()
else:
print('No trailer file saved...') | def end_logging(filename=None) | Close log file and restore system defaults. | 8.043685 | 8.193849 | 0.981674 |
puncloc = [filename.find(char) for char in string.punctuation]
if sys.version_info[0] >= 3:
val = sys.maxsize
else:
val = sys.maxint
for num in puncloc:
if num !=-1 and num < val:
val = num
return filename[0:val] | def findrootname(filename) | Return the rootname of the given file. | 3.718534 | 3.755351 | 0.990196 |
if filename is not None and filename.strip() != '':
if os.path.exists(filename) and clobber: os.remove(filename) | def removeFileSafely(filename,clobber=True) | Delete the file specified, but only if it exists and clobber is True. | 3.20979 | 3.16811 | 1.013156 |
if sys.version_info[0] >= 3:
from tkinter.messagebox import showwarning
else:
from tkMessageBox import showwarning
if display:
msg = 'No valid input files found! '+\
'Please check the value for the "input" parameter.'
showwarning(parent=parent,message=msg, title="No valid inputs!")
return "yes" | def displayEmptyInputWarningBox(display=True, parent=None) | Displays a warning box for the 'input' parameter. | 4.982515 | 4.433644 | 1.123797 |
num_sci = 0
extname = 'SCI'
hdu_list = fileutil.openImage(filename, memmap=False)
for extn in hdu_list:
if 'extname' in extn.header and extn.header['extname'] == extname:
num_sci += 1
if num_sci == 0:
extname = 'PRIMARY'
num_sci = 1
hdu_list.close()
return num_sci,extname | def count_sci_extensions(filename) | Return the number of SCI extensions and the EXTNAME from a input MEF file. | 2.991931 | 2.634947 | 1.13548 |
uniq = True
numsci,extname = count_sci_extensions(fname)
wnames = altwcs.wcsnames(fname,ext=(extname,1))
if wcsname in wnames.values():
uniq = False
return uniq | def verifyUniqueWcsname(fname,wcsname) | Report whether or not the specified WCSNAME already exists in the file | 10.450668 | 10.759377 | 0.971308 |
updated = True
numsci,extname = count_sci_extensions(fname)
for n in range(1,numsci+1):
hdr = fits.getheader(fname, extname=extname, extver=n, memmap=False)
if 'wcsname' not in hdr:
updated = False
break
return updated | def verifyUpdatewcs(fname) | Verify the existence of WCSNAME in the file. If it is not present,
report this to the user and raise an exception. Returns True if WCSNAME
was found in all SCI extensions. | 4.842686 | 3.760391 | 1.287814 |
valid = True
# start by trying to see whether the code can even find the file
if is_blank(refimage):
valid=True
return valid
refroot,extroot = fileutil.parseFilename(refimage)
if not os.path.exists(refroot):
valid = False
return valid
# if a MEF has been specified, make sure extension contains a valid WCS
if valid:
if extroot is None:
extn = findWCSExtn(refimage)
if extn is None:
valid = False
else:
valid = True
else:
# check for CD matrix in WCS object
refwcs = wcsutil.HSTWCS(refimage)
if not refwcs.wcs.has_cd():
valid = False
else:
valid = True
del refwcs
return valid | def verifyRefimage(refimage) | Verify that the value of refimage specified by the user points to an
extension with a proper WCS defined. It starts by making sure an extension gets
specified by the user when using a MEF file. The final check comes by looking
for a CD matrix in the WCS object itself. If either test fails, it returns
a value of False. | 5.757493 | 4.328832 | 1.330034 |
rootname,extroot = fileutil.parseFilename(filename)
extnum = None
if extroot is None:
fimg = fits.open(rootname, memmap=False)
for i,extn in enumerate(fimg):
if 'crval1' in extn.header:
refwcs = wcsutil.HSTWCS('{}[{}]'.format(rootname,i))
if refwcs.wcs.has_cd():
extnum = '{}'.format(i)
break
fimg.close()
else:
try:
refwcs = wcsutil.HSTWCS(filename)
if refwcs.wcs.has_cd():
extnum = extroot
except:
extnum = None
return extnum | def findWCSExtn(filename) | Return new filename with extension that points to an extension with a
valid WCS.
Returns
=======
extnum : str, None
Value of extension name as a string either as provided by the user
or based on the extension number for the first extension which
contains a valid HSTWCS object. Returns None if no extension can be
found with a valid WCS.
Notes
=====
The return value from this function can be used as input to
create another HSTWCS with the syntax::
`HSTWCS('{}[{}]'.format(filename,extnum)) | 3.024672 | 2.756594 | 1.09725 |
badfiles = []
archive_dir = False
for img in filelist:
fname = fileutil.osfn(img)
if 'OrIg_files' in os.path.split(fname)[0]:
archive_dir = True
try:
fp = open(fname,mode='a')
fp.close()
except IOError as e:
if e.errno == errno.EACCES:
badfiles.append(img)
# Not a permission error.
pass
num_bad = len(badfiles)
if num_bad > 0:
if archive_dir:
print('\n')
print('#'*40)
print(' Working in "OrIg_files" (archive) directory. ')
print(' This directory has been created to serve as an archive')
print(' for the original input images. ')
print('\n These files should be copied into another directory')
print(' for processing. ')
print('#'*40)
print('\n')
print('#'*40)
print('Found %d files which can not be updated!'%(num_bad))
for img in badfiles:
print(' %s'%(img))
print('\nPlease reset permissions for these files and restart...')
print('#'*40)
print('\n')
filelist = None
return filelist | def verifyFilePermissions(filelist, chmod=True) | Verify that images specified in 'filelist' can be updated.
A message will be printed reporting the names of any images which
do not have write-permission, then quit. | 4.492291 | 4.282643 | 1.048953 |
plist = []
for par in configObj.keys():
if isinstance(configObj[par],configobj.Section):
plist.extend(getFullParList(configObj[par]))
else:
plist.append(par)
return plist | def getFullParList(configObj) | Return a single list of all parameter names included in the configObj
regardless of which section the parameter was stored | 2.190701 | 2.336761 | 0.937495 |
# check to see whether any input parameters are unexpected.
# Any unexpected parameters provided on input should be reported and
# the code should stop
plist = getFullParList(configObj)
extra_pars = []
for kw in input_dict:
if kw not in plist:
extra_pars.append(kw)
if len(extra_pars) > 0:
print ('='*40)
print ('The following input parameters were not recognized as valid inputs:')
for p in extra_pars:
print(" %s"%(p))
print('\nPlease check the spelling of the parameter(s) and try again...')
print('='*40)
raise ValueError | def validateUserPars(configObj,input_dict) | Compares input parameter names specified by user with those already
recognized by the task.
Any parameters provided by the user that does not match a known
task parameter will be reported and a ValueError exception will be
raised. | 4.531107 | 4.556181 | 0.994497 |
step_kws = {'7a': 'final_wcs', '3a': 'driz_sep_wcs'}
stepname = getSectionName(configObj,step)
finalParDict = configObj[stepname].copy()
del finalParDict[step_kws[step]]
# interpret input_dict to find any parameters for this step specified by the user
user_pars = {}
for kw in finalParDict:
if kw in input_dict: user_pars[kw] = input_dict[kw]
if len(user_pars) > 0:
configObj[stepname][step_kws[step]] = True | def applyUserPars_steps(configObj, input_dict, step='3a') | Apply logic to turn on use of user-specified output WCS if user provides
any parameter on command-line regardless of how final_wcs was set. | 5.31234 | 4.679241 | 1.1353 |
if configObj is None:
# Start by grabbing the default values without using the GUI
# This insures that all subsequent use of the configObj includes
# all parameters and their last saved values
configObj = teal.load(taskname)
elif isinstance(configObj,str):
if configObj.lower().strip() == 'defaults':
# Load task default .cfg file with all default values
configObj = teal.load(taskname,defaults=True)
# define default filename for configObj
configObj.filename = taskname.lower()+'.cfg'
else:
# Load user-specified .cfg file with its special default values
# we need to call 'fileutil.osfn()' to insure all environment
# variables specified by the user in the configObj filename are
# expanded to the full path
configObj = teal.load(fileutil.osfn(configObj))
# merge in the user values for this run
# this, though, does not save the results for use later
if input_dict not in [None,{}]:# and configObj not in [None, {}]:
# check to see whether any input parameters are unexpected.
# Any unexpected parameters provided on input should be reported and
# the code should stop
validateUserPars(configObj,input_dict)
# If everything looks good, merge user inputs with configObj and continue
cfgpars.mergeConfigObj(configObj, input_dict)
# Update the input .cfg file with the updated parameter values
#configObj.filename = os.path.join(cfgpars.getAppDir(),os.path.basename(configObj.filename))
#configObj.write()
if not loadOnly:
# We want to run the GUI AFTER merging in any parameters
# specified by the user on the command-line and provided in
# input_dict
configObj = teal.teal(configObj,loadOnly=False)
return configObj | def getDefaultConfigObj(taskname,configObj,input_dict={},loadOnly=True) | Return default configObj instance for task updated
with user-specified values from input_dict.
Parameters
----------
taskname : string
Name of task to load into TEAL
configObj : string
The valid values for 'configObj' would be::
None - loads last saved user .cfg file
'defaults' - loads task default .cfg file
name of .cfg file (string)- loads user-specified .cfg file
input_dict : dict
Set of parameters and values specified by user to be different from
what gets loaded in from the .cfg file for the task
loadOnly : bool
Setting 'loadOnly' to False causes the TEAL GUI to start allowing the
user to edit the values further and then run the task if desired. | 6.928693 | 6.409406 | 1.081019 |
for key in configObj.keys():
if key.find('STEP '+str(stepnum)+':') >= 0:
return key | def getSectionName(configObj,stepnum) | Return section label based on step number. | 3.866924 | 3.571114 | 1.082834 |
if sys.version_info[0] >= 3:
from tkinter.messagebox import showwarning
else:
from tkMessageBox import showwarning
ans = {'yes':True,'no':False}
if ans[display]:
msg = 'Setting "updatewcs=yes" will result '+ \
'in all input WCS values to be recomputed '+ \
'using the original distortion model and alignment.'
showwarning(parent=parent,message=msg, title="WCS will be overwritten!")
return True | def displayMakewcsWarningBox(display=True, parent=None) | Displays a warning box for the 'makewcs' parameter. | 6.689938 | 6.386334 | 1.04754 |
if log is not None:
def output(msg):
log.info(msg)
else:
def output(msg):
print(msg)
if not paramDictionary:
output('No parameters were supplied')
else:
for key in sorted(paramDictionary):
if all or (not isinstance(paramDictionary[key], dict)) \
and key[0] != '_':
output('\t' + '\t'.join([str(key) + ' :',
str(paramDictionary[key])]))
if log is None:
output('\n') | def printParams(paramDictionary, all=False, log=None) | Print nicely the parameters from the dictionary. | 3.02665 | 3.006906 | 1.006566 |
if isinstance(inputFilelist, int) or isinstance(inputFilelist, np.int32):
ilist = str(inputFilelist)
else:
ilist = inputFilelist
if "," in ilist:
return True
return False | def isCommaList(inputFilelist) | Return True if the input is a comma separated list of names. | 3.055888 | 2.944109 | 1.037967 |
f = open(inputFilelist[1:])
# check the first line in order to determine whether
# IVM files have been specified in a second column...
lines = f.readline()
f.close()
# If there is a second column...
if len(line.split()) == 2:
# ...parse out the names of the IVM files as well
ivmlist = irafglob.irafglob(input, atfile=atfile_ivm)
# Parse the @-file with irafglob to extract the input filename
filelist = irafglob.irafglob(input, atfile=atfile_sci)
return filelist | def loadFileList(inputFilelist) | Open up the '@ file' and read in the science and possible
ivm filenames from the first two columns. | 8.168549 | 6.563791 | 1.244486 |
names=fileList.split(',')
fileList=[]
for item in names:
fileList.append(item)
return fileList | def readCommaList(fileList) | Return a list of the files with the commas removed. | 3.868208 | 3.432045 | 1.127085 |
newfilelist = []
if removed_files == []:
return filelist, ivmlist
else:
sci_ivm = list(zip(filelist, ivmlist))
for f in removed_files:
result=[sci_ivm.remove(t) for t in sci_ivm if t[0] == f ]
ivmlist = [el[1] for el in sci_ivm]
newfilelist = [el[0] for el in sci_ivm]
return newfilelist, ivmlist | def update_input(filelist, ivmlist=None, removed_files=None) | Removes files flagged to be removed from the input filelist.
Removes the corresponding ivm files if present. | 3.181778 | 3.194584 | 0.995992 |
if 'expstart' in primary_hdr:
exphdr = primary_hdr
else:
exphdr = header
if 'EXPSTART' in exphdr:
expstart = float(exphdr['EXPSTART'])
expend = float(exphdr['EXPEND'])
else:
expstart = 0.
expend = 0.0
return (expstart,expend) | def get_expstart(header,primary_hdr) | shouldn't this just be defined in the instrument subclass of imageobject? | 2.106129 | 2.106677 | 0.99974 |
expnames = []
exptimes = []
start = []
end = []
for img in imageObjectList:
expnames += img.getKeywordList('_expname')
exptimes += img.getKeywordList('_exptime')
start += img.getKeywordList('_expstart')
end += img.getKeywordList('_expend')
exptime = 0.
expstart = min(start)
expend = max(end)
exposure = None
for n in range(len(expnames)):
if expnames[n] != exposure:
exposure = expnames[n]
exptime += exptimes[n]
return (exptime,expstart,expend) | def compute_texptime(imageObjectList) | Add up the exposure time for all the members in
the pattern, since 'drizzle' doesn't have the necessary
information to correctly set this itself. | 2.684264 | 2.668788 | 1.005799 |
x = corners[:, 0]
y = corners[:, 1]
_xrange = (np.minimum.reduce(x), np.maximum.reduce(x))
_yrange = (np.minimum.reduce(y), np.maximum.reduce(y))
return _xrange, _yrange | def computeRange(corners) | Determine the range spanned by an array of pixel positions. | 2.209141 | 2.137024 | 1.033746 |
if angle:
_rotm = fileutil.buildRotMatrix(angle)
# Rotate about the center
_corners = np.dot(corners, _rotm)
else:
# If there is no rotation, simply return original values
_corners = corners
return computeRange(_corners) | def getRotatedSize(corners, angle) | Determine the size of a rotated (meta)image. | 8.1059 | 7.898274 | 1.026287 |
fin = open(infile,'r')
outarr = []
for l in fin.readlines():
l = l.strip()
if len(l) == 0 or len(l.split()) < len(cols) or (len(l) > 0 and l[0] == '#' or (l.find("INDEF") > -1)): continue
for i in range(10):
lnew = l.replace(" "," ")
if lnew == l: break
else: l = lnew
lspl = lnew.split(" ")
if len(outarr) == 0:
for c in range(len(cols)): outarr.append([])
for c,n in zip(cols,list(range(len(cols)))):
if not hms:
val = float(lspl[c])
else:
val = lspl[c]
outarr[n].append(val)
fin.close()
for n in range(len(cols)):
outarr[n] = np.array(outarr[n])
return outarr | def readcols(infile, cols=[0, 1, 2, 3], hms=False) | Read the columns from an ASCII file as numpy arrays.
Parameters
----------
infile : str
Filename of ASCII file with array data as columns.
cols : list of int
List of 0-indexed column numbers for columns to be turned into numpy arrays
(DEFAULT- [0,1,2,3]).
Returns
-------
outarr : list of numpy arrays
Simple list of numpy arrays in the order as specifed in the 'cols' parameter. | 2.648438 | 2.709009 | 0.977641 |
cols = []
if not isinstance(colnames,list):
colnames = colnames.split(',')
# parse column names from coords file and match to input values
if coords is not None and fileutil.isFits(coords)[0]:
# Open FITS file with table
ftab = fits.open(coords, memmap=False)
# determine which extension has the table
for extn in ftab:
if isinstance(extn, fits.BinTableHDU):
# parse column names from table and match to inputs
cnames = extn.columns.names
if colnames is not None:
for c in colnames:
for name,i in zip(cnames,list(range(len(cnames)))):
if c == name.lower(): cols.append(i)
if len(cols) < len(colnames):
errmsg = "Not all input columns found in table..."
ftab.close()
raise ValueError(errmsg)
else:
cols = cnames[:2]
break
ftab.close()
else:
for c in colnames:
if isinstance(c, str):
if c[0].lower() == 'c': cols.append(int(c[1:])-1)
else:
cols.append(int(c))
else:
if isinstance(c, int):
cols.append(c)
else:
errmsg = "Unsupported column names..."
raise ValueError(errmsg)
return cols | def parse_colnames(colnames,coords=None) | Convert colnames input into list of column numbers. | 3.130208 | 3.067324 | 1.020501 |
# Insure that at least a data-array has been provided to create the file
assert(dataArray is not None), "Please supply a data array for createFiles"
try:
# Create the output file
fitsobj = fits.HDUList()
if header is not None:
try:
del(header['NAXIS1'])
del(header['NAXIS2'])
if 'XTENSION' in header:
del(header['XTENSION'])
if 'EXTNAME' in header:
del(header['EXTNAME'])
if 'EXTVER' in header:
del(header['EXTVER'])
except KeyError:
pass
if 'NEXTEND' in header:
header['NEXTEND'] = 0
hdu = fits.PrimaryHDU(data=dataArray, header=header)
try:
del hdu.header['PCOUNT']
del hdu.header['GCOUNT']
except KeyError:
pass
else:
hdu = fits.PrimaryHDU(data=dataArray)
fitsobj.append(hdu)
if outfile is not None:
fitsobj.writeto(outfile)
finally:
# CLOSE THE IMAGE FILES
fitsobj.close()
if outfile is not None:
del fitsobj
fitsobj = None
return fitsobj | def createFile(dataArray=None, outfile=None, header=None) | Create a simple fits file for the given data array and header.
Returns either the FITS object in-membory when outfile==None or
None when the FITS file was written out to a file. | 2.619873 | 2.560848 | 1.023049 |
if not isinstance(taskname, str):
return taskname
indx = taskname.rfind('.')
if indx >= 0:
base_taskname = taskname[(indx+1):]
pkg_name = taskname[:indx]
else:
base_taskname = taskname
pkg_name = ''
assert(True if packagename is None else (packagename == pkg_name))
return base_taskname | def base_taskname(taskname, packagename=None) | Extract the base name of the task.
Many tasks in the `drizzlepac` have "compound" names such as
'drizzlepac.sky'. This function will search for the presence of a dot
in the input `taskname` and if found, it will return the string
to the right of the right-most dot. If a dot is not found, it will return
the input string.
Parameters
----------
taskname : str, None
Full task name. If it is `None`, :py:func:`base_taskname` will
return `None`\ .
packagename : str, None (Default = None)
Package name. It is assumed that a compound task name is formed by
concatenating `packagename` + '.' + `taskname`\ . If `packagename`
is not `None`, :py:func:`base_taskname` will check that the string
to the left of the right-most dot matches `packagename` and will
raise an `AssertionError` if the package name derived from the
input `taskname` does not match the supplied `packagename`\ . This
is intended as a check for discrepancies that may arise
during the development of the tasks. If `packagename` is `None`,
no such check will be performed.
Raises
------
AssertionError
Raised when package name derived from the input `taskname` does not
match the supplied `packagename` | 2.836988 | 3.302225 | 0.859114 |
ptime = _ptime()
print('==== Processing Step ',key,' started at ',ptime[0])
self.steps[key] = {'start':ptime}
self.order.append(key) | def addStep(self,key) | Add information about a new step to the dict of steps
The value 'ptime' is the output from '_ptime()' containing
both the formatted and unformatted time for the start of the
step. | 9.8302 | 6.479679 | 1.517081 |
ptime = _ptime()
if key is not None:
self.steps[key]['end'] = ptime
self.steps[key]['elapsed'] = ptime[1] - self.steps[key]['start'][1]
self.end = ptime
print('==== Processing Step ',key,' finished at ',ptime[0])
print('') | def endStep(self,key) | Record the end time for the step.
If key==None, simply record ptime as end time for class to represent
the overall runtime since the initialization of the class. | 5.311592 | 4.875955 | 1.089344 |
self.end = _ptime()
total_time = 0
print(ProcSteps.__report_header)
for step in self.order:
if 'elapsed' in self.steps[step]:
_time = self.steps[step]['elapsed']
else:
_time = 0.0
total_time += _time
print(' %20s %0.4f sec.' % (step, _time))
print(' %20s %s' % ('=' * 20, '=' * 20))
print(' %20s %0.4f sec.' % ('Total', total_time)) | def reportTimes(self) | Print out a formatted summary of the elapsed times for all the
performed steps. | 4.054724 | 3.594251 | 1.128114 |
# If called from interactive user-interface, configObj will not be
# defined yet, so get defaults using EPAR/TEAL.
#
# Also insure that the input_dict (user-specified values) are folded in
# with a fully populated configObj instance.
configObj = util.getDefaultConfigObj(__taskname__,configObj,input_dict,loadOnly=loadOnly)
if configObj is None:
return
# Define list of imageObject instances and output WCSObject instance
# based on input paramters
imgObjList,outwcs = processInput.setCommonInput(configObj)
# Build DQ masks for all input images.
buildMask(imgObjList,configObj) | def run(configObj=None, input_dict={}, loadOnly=False) | Build DQ masks from all input images, then apply static mask(s). | 12.554996 | 11.999645 | 1.046281 |
# Insure that input imageObject is a list
if not isinstance(imageObjectList, list):
imageObjectList = [imageObjectList]
for img in imageObjectList:
img.buildMask(configObj['single'], configObj['bits']) | def buildDQMasks(imageObjectList,configObj) | Build DQ masks for all input images. | 4.967379 | 5.139841 | 0.966446 |
return bitfield_to_boolean_mask(dqarr, bitvalue, good_mask_value=1,
dtype=np.uint8) | def buildMask(dqarr, bitvalue) | Builds a bit-mask from an input DQ array and a bitvalue flag | 8.059976 | 8.834796 | 0.912299 |
# If no bitvalue is set or rootname given, assume no mask is desired
# However, this name would be useful as the output mask from
# other processing, such as MultiDrizzle, so return it anyway.
#if bitvalue == None or rootname == None:
# return None
# build output name
maskname = output
# If an old version of the maskfile was present, remove it and rebuild it.
if fileutil.findFile(maskname):
fileutil.removeFile(maskname)
# Open input file with DQ array
fdq = fileutil.openImage(rootname, mode='readonly', memmap=False)
try:
_extn = fileutil.findExtname(fdq, extname, extver=extver)
if _extn is not None:
# Read in DQ array
dqarr = fdq[_extn].data
else:
dqarr = None
# For the case where there is no DQ array,
# create a mask image of all ones.
if dqarr is None:
# We need to get the dimensions of the output DQ array
# Since the DQ array is non-existent, look for the SCI extension
_sci_extn = fileutil.findExtname(fdq,'SCI',extver=extver)
if _sci_extn is not None:
_shape = fdq[_sci_extn].data.shape
dqarr = np.zeros(_shape,dtype=np.uint16)
else:
raise Exception
# Build mask array from DQ array
maskarr = buildMask(dqarr,bitvalue)
#Write out the mask file as simple FITS file
fmask = fits.open(maskname, mode='append', memmap=False)
maskhdu = fits.PrimaryHDU(data = maskarr)
fmask.append(maskhdu)
#Close files
fmask.close()
del fmask
fdq.close()
del fdq
except:
fdq.close()
del fdq
# Safeguard against leaving behind an incomplete file
if fileutil.findFile(maskname):
os.remove(maskname)
_errstr = "\nWarning: Problem creating MASK file for "+rootname+".\n"
#raise IOError, _errstr
print(_errstr)
return None
# Return the name of the mask image written out
return maskname | def buildMaskImage(rootname, bitvalue, output, extname='DQ', extver=1) | Builds mask image from rootname's DQ array
If there is no valid 'DQ' array in image, then return
an empty string. | 4.02938 | 3.95649 | 1.018423 |
# insure detnum is a string
if type(detnum) != type(''):
detnum = repr(detnum)
_funcroot = '_func_Shadow_WF'
# build template shadow mask's filename
# If an old version of the maskfile was present, remove it and rebuild it.
if fileutil.findFile(maskname):
fileutil.removeFile(maskname)
_use_inmask = not fileutil.findFile(dqfile) or bitvalue is None
# Check for existance of input .c1h file for use in making inmask file
if _use_inmask:
#_mask = 'wfpc2_inmask'+detnum+'.fits'
_mask = maskname
# Check to see if file exists...
if not fileutil.findFile(_mask):
# If not, create the file.
# This takes a long time to run, so it should be done
# only when absolutely necessary...
try:
_funcx = _funcroot+detnum+'x'
_funcy = _funcroot+detnum+'y'
_xarr = np.clip(np.fromfunction(eval(_funcx),(800,800)),0.0,1.0).astype(np.uint8)
_yarr = np.clip(np.fromfunction(eval(_funcy),(800,800)),0.0,1.0).astype(np.uint8)
maskarr = _xarr * _yarr
if binned !=1:
bmaskarr = maskarr[::2,::2]
bmaskarr *= maskarr[1::2,::2]
bmaskarr *= maskarr[::2,1::2]
bmaskarr *= maskarr[1::2,1::2]
maskarr = bmaskarr.copy()
del bmaskarr
#Write out the mask file as simple FITS file
fmask = fits.open(_mask, mode='append', memmap=False)
maskhdu = fits.PrimaryHDU(data=maskarr)
fmask.append(maskhdu)
#Close files
fmask.close()
del fmask
except:
return None
else:
#
# Build full mask based on .c1h and shadow mask
#
fdq = fileutil.openImage(dqfile, mode='readonly', memmap=False)
try:
# Read in DQ array from .c1h and from shadow mask files
dqarr = fdq[int(extnum)].data
#maskarr = fsmask[0].data
# Build mask array from DQ array
dqmaskarr = buildMask(dqarr,bitvalue)
#Write out the mask file as simple FITS file
fdqmask = fits.open(maskname, mode='append', memmap=False)
maskhdu = fits.PrimaryHDU(data=dqmaskarr)
fdqmask.append(maskhdu)
#Close files
fdqmask.close()
del fdqmask
fdq.close()
del fdq
except:
fdq.close()
del fdq
# Safeguard against leaving behind an incomplete file
if fileutil.findFile(maskname):
os.remove(maskname)
_errstr = "\nWarning: Problem creating DQMASK file for "+rootname+".\n"
#raise IOError, _errstr
print(_errstr)
return None
# Return the name of the mask image written out
return maskname | def buildShadowMaskImage(dqfile,detnum,extnum,maskname,bitvalue=None,binned=1) | Builds mask image from WFPC2 shadow calibrations.
detnum - string value for 'DETECTOR' detector | 3.614674 | 3.583806 | 1.008613 |
single_coord = False
# Only use value provided in `coords` if nothing has been specified for coordfile
if coords is not None and coordfile is None:
coordfile = coords
warnings.simplefilter('always',DeprecationWarning)
warnings.warn("Please update calling code to pass in `coordfile` instead of `coords`.",
category=DeprecationWarning)
warnings.simplefilter('default',DeprecationWarning)
if coordfile is not None:
if colnames in blank_list:
colnames = ['c1','c2']
# Determine columns which contain pixel positions
cols = util.parse_colnames(colnames,coordfile)
# read in columns from input coordinates file
xyvals = np.loadtxt(coordfile,usecols=cols,delimiter=separator)
if xyvals.ndim == 1: # only 1 entry in coordfile
xlist = [xyvals[0].copy()]
ylist = [xyvals[1].copy()]
else:
xlist = xyvals[:,0].copy()
ylist = xyvals[:,1].copy()
del xyvals
else:
if isinstance(x, np.ndarray):
xlist = x.tolist()
ylist = y.tolist()
elif not isinstance(x,list):
xlist = [x]
ylist = [y]
single_coord = True
else:
xlist = x
ylist = y
# start by reading in WCS+distortion info for input image
inwcs = wcsutil.HSTWCS(input)
if inwcs.wcs.is_unity():
print("####\nNo valid WCS found in {}.\n Results may be invalid.\n####\n".format(input))
# Now, convert pixel coordinates into sky coordinates
dra,ddec = inwcs.all_pix2world(xlist,ylist,1)
# convert to HH:MM:SS.S format, if specified
if hms:
ra,dec = wcs_functions.ddtohms(dra,ddec,precision=precision)
rastr = ra
decstr = dec
else:
# add formatting based on precision here...
rastr = []
decstr = []
fmt = "%."+repr(precision)+"f"
for r,d in zip(dra,ddec):
rastr.append(fmt%r)
decstr.append(fmt%d)
ra = dra
dec = ddec
if verbose or (not verbose and util.is_blank(output)):
print('# Coordinate transformations for ',input)
print('# X Y RA Dec\n')
for x,y,r,d in zip(xlist,ylist,rastr,decstr):
print("%.4f %.4f %s %s"%(x,y,r,d))
# Create output file, if specified
if output:
f = open(output,mode='w')
f.write("# Coordinates converted from %s\n"%input)
for r,d in zip(rastr,decstr):
f.write('%s %s\n'%(r,d))
f.close()
print('Wrote out results to: ',output)
if single_coord:
ra = ra[0]
dec = dec[0]
return ra,dec | def xy2rd(input,x=None,y=None,coords=None, coordfile=None,colnames=None,separator=None,
hms=True, precision=6,output=None,verbose=True) | Primary interface to perform coordinate transformations from
pixel to sky coordinates using STWCS and full distortion models
read from the input image header. | 3.270715 | 3.239259 | 1.009711 |
maskarr = None
if maskname is not None:
if isinstance(maskname, str):
# working with file on disk (default case)
if os.path.exists(maskname):
mask = fileutil.openImage(maskname, memmap=False)
maskarr = mask[0].data.astype(np.bool)
mask.close()
else:
if isinstance(maskname, fits.HDUList):
# working with a virtual input file
maskarr = maskname[0].data.astype(np.bool)
else:
maskarr = maskname.data.astype(np.bool)
if maskarr is not None:
# merge array with dqarr now
np.bitwise_and(dqarr,maskarr,dqarr) | def mergeDQarray(maskname,dqarr) | Merge static or CR mask with mask created from DQ array on-the-fly here. | 3.413404 | 3.282161 | 1.039987 |
paramDict={"build":True,
"single":True,
"stepsize":10,
"in_units":"cps",
"wt_scl":1.,
"pixfrac":1.,
"kernel":"square",
"fillval":999.,
"maskval": None,
"rot":0.,
"scale":1.,
"xsh":0.,
"ysh":0.,
"blotnx":2048,
"blotny":2048,
"outnx":4096,
"outny":4096,
"data":None,
"driz_separate":True,
"driz_combine":False}
if(len(configObj) !=0):
for key in configObj.keys():
paramDict[key]=configObj[key]
return paramDict | def _setDefaults(configObj={}) | set up the default parameters to run drizzle
build,single,units,wt_scl,pixfrac,kernel,fillval,
rot,scale,xsh,ysh,blotnx,blotny,outnx,outny,data
Used exclusively for unit-testing, if any are defined. | 4.800765 | 2.401552 | 1.999026 |
# interpret user specified final_maskval value to use for initializing
# output SCI array...
if 'maskval' not in paramDict:
return 0
maskval = paramDict['maskval']
if maskval is None:
maskval = np.nan
else:
maskval = float(maskval) # just to be clear and absolutely sure...
return maskval | def interpret_maskval(paramDict) | Apply logic for interpreting final_maskval value... | 6.984269 | 5.792294 | 1.205786 |
maskval = interpret_maskval(paramDict)
# Check for unintialized inputs
here = _outsci is None and _outwht is None and _outctx is None
if _outsci is None:
_outsci=np.empty(output_wcs.array_shape, dtype=np.float32)
if single:
_outsci.fill(0)
else:
_outsci.fill(maskval)
if _outwht is None:
_outwht=np.zeros(output_wcs.array_shape, dtype=np.float32)
if _outctx is None:
_outctx = np.zeros((_nplanes,) + output_wcs.array_shape, dtype=np.int32)
if _hdrlist is None:
_hdrlist = []
# Work on each chip - note that they share access to the arrays above
for chip in chiplist:
# See if we will be writing out data
doWrite = chipIdxCopy == num_in_prod-1
# debuglog('#chips='+str(chipIdxCopy)+', num_in_prod='+\
# str(num_in_prod)+', single='+str(single)+', write='+\
# str(doWrite)+', here='+str(here))
# run_driz_chip
run_driz_chip(img,chip,output_wcs,outwcs,template,paramDict,
single,doWrite,build,_versions,_numctx,_nplanes,
chipIdxCopy,_outsci,_outwht,_outctx,_hdrlist,wcsmap)
# Increment chip counter (also done outside of this function)
chipIdxCopy += 1
#
# Reset for next output image...
#
if here:
del _outsci,_outwht,_outctx,_hdrlist
elif single:
np.multiply(_outsci,0.,_outsci)
np.multiply(_outwht,0.,_outwht)
np.multiply(_outctx,0,_outctx)
# this was "_hdrlist=[]", but we need to preserve the var ptr itself
while len(_hdrlist)>0: _hdrlist.pop() | def run_driz_img(img,chiplist,output_wcs,outwcs,template,paramDict,single,
num_in_prod,build,_versions,_numctx,_nplanes,chipIdxCopy,
_outsci,_outwht,_outctx,_hdrlist,wcsmap) | Perform the drizzle operation on a single image.
This is separated out from :py:func:`run_driz` so as to keep together
the entirety of the code which is inside the loop over
images. See the :py:func:`run_driz` code for more documentation. | 3.365938 | 3.396059 | 0.991131 |
# Insure that the fillval parameter gets properly interpreted for use with tdriz
if util.is_blank(fillval):
fillval = 'INDEF'
else:
fillval = str(fillval)
if in_units == 'cps':
expscale = 1.0
else:
expscale = expin
# Compute what plane of the context image this input would
# correspond to:
planeid = int((uniqid-1) / 32)
# Check if the context image has this many planes
if outcon.ndim == 3:
nplanes = outcon.shape[0]
elif outcon.ndim == 2:
nplanes = 1
else:
nplanes = 0
if nplanes <= planeid:
raise IndexError("Not enough planes in drizzle context image")
# Alias context image to the requested plane if 3d
if outcon.ndim == 2:
outctx = outcon
else:
outctx = outcon[planeid]
pix_ratio = output_wcs.pscale/wcslin_pscale
if wcsmap is None and cdriz is not None:
log.info('Using WCSLIB-based coordinate transformation...')
log.info('stepsize = %s' % stepsize)
mapping = cdriz.DefaultWCSMapping(
input_wcs, output_wcs,
input_wcs.pixel_shape[0], input_wcs.pixel_shape[1],
stepsize
)
else:
#
##Using the Python class for the WCS-based transformation
#
# Use user provided mapping function
log.info('Using coordinate transformation defined by user...')
if wcsmap is None:
wcsmap = wcs_functions.WCSMap
wmap = wcsmap(input_wcs,output_wcs)
mapping = wmap.forward
_shift_fr = 'output'
_shift_un = 'output'
ystart = 0
nmiss = 0
nskip = 0
#
# This call to 'cdriz.tdriz' uses the new C syntax
#
_dny = insci.shape[0]
# Call 'drizzle' to perform image combination
if insci.dtype > np.float32:
#WARNING: Input array recast as a float32 array
insci = insci.astype(np.float32)
_vers,nmiss,nskip = cdriz.tdriz(insci, inwht, outsci, outwht,
outctx, uniqid, ystart, 1, 1, _dny,
pix_ratio, 1.0, 1.0, 'center', pixfrac,
kernel, in_units, expscale, wt_scl,
fillval, nmiss, nskip, 1, mapping)
if nmiss > 0:
log.warning('! %s points were outside the output image.' % nmiss)
if nskip > 0:
log.debug('! Note, %s input lines were skipped completely.' % nskip)
return _vers | def do_driz(insci, input_wcs, inwht,
output_wcs, outsci, outwht, outcon,
expin, in_units, wt_scl,
wcslin_pscale=1.0,uniqid=1, pixfrac=1.0, kernel='square',
fillval="INDEF", stepsize=10,wcsmap=None) | Core routine for performing 'drizzle' operation on a single input image
All input values will be Python objects such as ndarrays, instead
of filenames.
File handling (input and output) will be performed by calling routine. | 5.088443 | 5.120265 | 0.993785 |
# This function corrects bugs and provides improvements over the pyregion's
# ShapeList.write method in the following:
#
# 1. ShapeList.write crashes if regions have no comments;
# 2. ShapeList.write converts 'exclude' ("-") regions to normal regions ("+");
# 3. ShapeList.write does not support mixed coordinate systems in a
# region list.
#
# NOTE: This function is provided as a temoprary workaround for the above
# listed problems of the ShapeList.write. We hope that a future version
# of pyregion will address all these issues.
#
#TODO: Push these changes to pyregion.
if len(shapelist) < 1:
_print_warning("The region list is empty. The region file \"%s\" "\
"will be empty." % outfile)
try:
outf = open(outfile,'w')
outf.close()
return
except IOError as e:
cmsg = "Unable to create region file \'%s\'." % outfile
if e.args:
e.args = (e.args[0] + "\n" + cmsg,) + e.args[1:]
else:
e.args=(cmsg,)
raise e
except:
raise
prev_cs = shapelist[0].coord_format
outf = None
try:
outf = open(outfile,'w')
attr0 = shapelist[0].attr[1]
defaultline = " ".join(["%s=%s" % (a,attr0[a]) for a in attr0 \
if a!='text'])
# first line is globals
print("global", defaultline, file=outf)
# second line must be a coordinate format
print(prev_cs, file=outf)
for shape in shapelist:
shape_attr = '' if prev_cs == shape.coord_format \
else shape.coord_format+"; "
shape_excl = '-' if shape.exclude else ''
text_coordlist = ["%f" % f for f in shape.coord_list]
shape_coords = "(" + ",".join(text_coordlist) + ")"
shape_comment = " # " + shape.comment if shape.comment else ''
shape_str = shape_attr + shape_excl + shape.name + shape_coords + \
shape_comment
print(shape_str, file=outf)
except IOError as e:
cmsg = "Unable to create region file \'%s\'." % outfile
if e.args:
e.args = (e.args[0] + "\n" + cmsg,) + e.args[1:]
else:
e.args=(cmsg,)
if outf: outf.close()
raise e
except:
if outf: outf.close()
raise
outf.close() | def _regwrite(shapelist,outfile) | Writes the current shape list out as a region file | 3.830586 | 3.755006 | 1.020128 |
from pyregion.wcs_helper import image_like_coordformats
for r in reglist:
if r.coord_format in image_like_coordformats:
return True
return False | def _needs_ref_WCS(reglist) | Check if the region list contains shapes in image-like coordinates | 6.970549 | 5.096334 | 1.367757 |
# Parse out any extension specified in filename
_indx1 = filename.find('[')
_indx2 = filename.find(']')
if _indx1 > 0:
# check for closing square bracket:
if _indx2 < _indx1:
raise RuntimeError("Incorrect extension specification in file " \
"name \'%s\'." % filename)
# Read extension name provided
_fname = filename[:_indx1]
_extn = filename[_indx1+1:_indx2].strip()
else:
_fname = filename
_extn = None
return _fname, _extn | def extension_from_filename(filename) | Parse out filename from any specified extensions.
Returns rootname and string version of extension name. | 4.228765 | 4.111446 | 1.028535 |
if isinstance(img, str):
img = fits.open(img, memmap=False)
img.close()
elif not isinstance(img, fits.HDUList):
raise TypeError("Argument 'img' must be either a file name (string) " \
"or a `astropy.io.fits.HDUList` object.")
if extname is None:
return len(img)
if not isinstance(extname, str):
raise TypeError("Argument 'extname' must be either a string " \
"indicating the value of the 'EXTNAME' keyword of the extensions " \
"to be counted or None to return the count of all HDUs in the " \
"'img' FITS file.")
extname = extname.upper()
n = 0
for e in img:
#if isinstance(e, fits.ImageHDU): continue
if 'EXTNAME' in list(map(str.upper, list(e.header.keys()))) \
and e.header['extname'].upper() == extname:
n += 1
return n | def count_extensions(img, extname='SCI') | Return the number of 'extname' extensions. 'img' can be either a file
name, an HDU List object (from fits), or None (to get the number of all
HDU headers. | 2.622525 | 2.619855 | 1.001019 |
if isinstance(img, str):
img = fits.open(img, memmap=False)
img.close()
elif not isinstance(img, fits.HDUList):
raise TypeError("Argument 'img' must be either a file name (string) " \
"or a fits.HDUList object.")
# when extver is None - return the range of all FITS extensions
if extname is None:
extver = list(range(len(img)))
return extver
if not isinstance(extname, str):
raise TypeError("Argument 'extname' must be either a string " \
"indicating the value of the 'EXTNAME' keyword of the extensions " \
"whose versions are to be returned or None to return " \
"extension numbers of all HDUs in the 'img' FITS file.")
extname = extname.upper()
extver = []
for e in img:
#if not isinstance(e, fits.ImageHDU): continue
hkeys = list(map(str.upper, list(e.header.keys())))
if 'EXTNAME' in hkeys and e.header['EXTNAME'].upper() == extname:
extver.append(e.header['EXTVER'] if 'EXTVER' in hkeys else 1)
return extver | def get_extver_list(img, extname='SCI') | Return a list of all extension versions of 'extname' extensions.
'img' can be either a file name or a HDU List object (from fits). | 2.952188 | 2.889096 | 1.021838 |
default_extn = 1 if isinstance(extname, str) else 0
if isinstance(extvers, list):
extv = [default_extn if ext is None else ext for ext in extvers]
else:
extv = [default_extn if extvers is None else extvers]
extv_in_fits = get_extver_list(img, extname)
return set(extv).issubset(set(extv_in_fits)) | def _check_FITS_extvers(img, extname, extvers) | Returns True if all (except None) extension versions specified by the
argument 'extvers' and that are of the type specified by the argument
'extname' are present in the 'img' FITS file. Returns False if some of the
extension versions for a given EXTNAME cannot be found in the FITS image. | 3.339234 | 3.292598 | 1.014164 |
category_generator_mapping = {'single exposure product': single_exposure_product_filename_generator,
'filter product': filter_product_filename_generator,
'total detection product': total_detection_product_filename_generator,
'multivisit mosaic product': multivisit_mosaic_product_filename_generator}
# Determine which name generator to use based on input product_category
for key in category_generator_mapping.keys():
if product_category.startswith(key):
generator_name = category_generator_mapping[key]
category_num = product_category.replace(key+" ","")
break
# parse out obs_info into a list
obs_info = obs_info.split(" ")
# pad 4-character proposal_id values with leading 0s so that proposal_id is
# a 5-character sting.
if key != "multivisit mosaic product": # pad
obs_info[0] = "{}{}".format("0"*(5-len(obs_info[0])),obs_info[0])
# generate and return filenames
product_filename_dict=generator_name(obs_info,category_num)
return(product_filename_dict) | def run_generator(product_category,obs_info) | This is the main calling subroutine. It decides which filename generation subroutine should be run based on the
input product_category, and then passes the information stored in input obs_info to the subroutine so that the
appropriate filenames can be generated.
Parameters
----------
product_category : string
The type of final output product which filenames will be generated for
obs_info : string
A string containing space-separated items that will be used to
generate the filenames.
Returns
--------
product_filename_dict : dictionary
A dictionary containing the generated filenames. | 4.404574 | 4.142764 | 1.063197 |
proposal_id = obs_info[0]
visit_id = obs_info[1]
instrument = obs_info[2]
detector = obs_info[3]
filter = obs_info[4]
ipppssoot = obs_info[5]
product_filename_dict = {}
product_filename_dict["image"] = "hst_{}_{}_{}_{}_{}_{}_{}.fits".format(proposal_id,visit_id,instrument,detector,filter,ipppssoot,nn)
product_filename_dict["source catalog"]= product_filename_dict["image"].replace(".fits",".cat")
return(product_filename_dict) | def single_exposure_product_filename_generator(obs_info,nn) | Generate image and sourcelist filenames for single-exposure products
Parameters
----------
obs_info : list
list of items that will be used to generate the filenames: proposal_id,
visit_id, instrument, detector, filter, and ipppssoot
nn : string
the single-exposure image number (NOTE: only used in
single_exposure_product_filename_generator())
Returns
--------
product_filename_dict : dictionary
A dictionary containing the generated filenames. | 2.505399 | 1.83565 | 1.364856 |
proposal_id = obs_info[0]
visit_id = obs_info[1]
instrument = obs_info[2]
detector = obs_info[3]
filter = obs_info[4]
product_filename_dict = {}
product_filename_dict["image"] = "hst_{}_{}_{}_{}_{}.fits".format(proposal_id,visit_id,instrument,detector,filter)
product_filename_dict["source catalog"] = product_filename_dict["image"].replace(".fits",".cat")
return(product_filename_dict) | def filter_product_filename_generator(obs_info,nn) | Generate image and sourcelist filenames for filter products
Parameters
----------
obs_info : list
list of items that will be used to generate the filenames: proposal_id,
visit_id, instrument, detector, and filter
nn : string
the single-exposure image number (NOTE: only used in
single_exposure_product_filename_generator())
Returns
--------
product_filename_dict : dictionary
A dictionary containing the generated filenames. | 2.525966 | 1.927433 | 1.310534 |
proposal_id = obs_info[0]
visit_id = obs_info[1]
instrument = obs_info[2]
detector = obs_info[3]
product_filename_dict = {}
product_filename_dict["image"] = "hst_{}_{}_{}_{}.fits".format(proposal_id, visit_id, instrument, detector)
product_filename_dict["source catalog"] = product_filename_dict["image"].replace(".fits",".cat")
return (product_filename_dict) | def total_detection_product_filename_generator(obs_info,nn) | Generate image and sourcelist filenames for total detection products
Parameters
----------
obs_info : list
list of items that will be used to generate the filenames: proposal_id,
visit_id, instrument, and detector
nn : string
the single-exposure image number (NOTE: only used in
single_exposure_product_filename_generator())
Returns
--------
product_filename_dict : dictionary
A dictionary containing the generated filenames. | 2.862987 | 2.137231 | 1.339578 |
group_num = obs_info[0]
instrument = obs_info[1]
detector = obs_info[2]
filter = obs_info[3]
product_filename_dict = {}
product_filename_dict["image"] = "hst_mos_{}_{}_{}_{}.fits".format(group_num,instrument,detector,filter)
product_filename_dict["source catalog"] = product_filename_dict["image"].replace(".fits",".cat")
return (product_filename_dict) | def multivisit_mosaic_product_filename_generator(obs_info,nn) | Generate image and sourcelist filenames for multi-visit mosaic products
Parameters
----------
obs_info : list
list of items that will be used to generate the filenames: group_id,
instrument, detector, and filter
nn : string
the single-exposure image number (NOTE: only used in
single_exposure_product_filename_generator())
Returns
--------
product_filename_dict : dictionary
A dictionary containing the generated filenames. | 2.84828 | 2.302074 | 1.237267 |
wcslist = []
for catalog in catalog_list:
for scichip in catalog.catalogs:
wcslist.append(catalog.catalogs[scichip]['wcs'])
return utils.output_wcs(wcslist) | def build_referenceWCS(catalog_list) | Compute default reference WCS from list of Catalog objects. | 4.471758 | 4.514741 | 0.990479 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.