sentence1
stringlengths 52
3.87M
| sentence2
stringlengths 1
47.2k
| label
stringclasses 1
value |
|---|---|---|
def _register_self(self, logfile, key=JobDetails.topkey, status=JobStatus.unknown):
"""Runs this link, captures output to logfile,
and records the job in self.jobs"""
fullkey = JobDetails.make_fullkey(self.full_linkname, key)
if fullkey in self.jobs:
job_details = self.jobs[fullkey]
job_details.status = status
else:
job_details = self._register_job(key, self.args, logfile, status)
|
Runs this link, captures output to logfile,
and records the job in self.jobs
|
entailment
|
def _archive_self(self, logfile, key=JobDetails.topkey, status=JobStatus.unknown):
"""Write info about a job run by this `Link` to the job archive"""
self._register_self(logfile, key, status)
if self._job_archive is None:
return
self._job_archive.register_jobs(self.get_jobs())
|
Write info about a job run by this `Link` to the job archive
|
entailment
|
def _set_status_self(self, key=JobDetails.topkey, status=JobStatus.unknown):
"""Set the status of this job, both in self.jobs and
in the `JobArchive` if it is present. """
fullkey = JobDetails.make_fullkey(self.full_linkname, key)
if fullkey in self.jobs:
self.jobs[fullkey].status = status
if self._job_archive:
self._job_archive.register_job(self.jobs[fullkey])
else:
self._register_self('dummy.log', key, status)
|
Set the status of this job, both in self.jobs and
in the `JobArchive` if it is present.
|
entailment
|
def _write_status_to_log(self, return_code, stream=sys.stdout):
"""Write the status of this job to a log stream.
This is used to check on job completion."""
stream.write("Timestamp: %i\n" % get_timestamp())
if return_code == 0:
stream.write("%s\n" % self._interface.string_successful)
else:
stream.write("%s %i\n" %
(self._interface.string_exited, return_code))
|
Write the status of this job to a log stream.
This is used to check on job completion.
|
entailment
|
def _finalize(self, dry_run=False):
"""Remove / compress files as requested """
for rmfile in self.files.temp_files:
if dry_run:
print("remove %s" % rmfile)
else:
os.remove(rmfile)
for gzfile in self.files.gzip_files:
if dry_run:
# print ("gzip %s" % gzfile)
pass
else:
os.system('gzip -9 %s' % gzfile)
|
Remove / compress files as requested
|
entailment
|
def update_args(self, override_args):
"""Update the argument used to invoke the application
Note that this will also update the dictionary of input and output files.
Parameters
-----------
override_args : dict
Dictionary of arguments to override the current values
"""
self.args = extract_arguments(override_args, self.args)
self._latch_file_info()
scratch_dir = self.args.get('scratch', None)
if is_not_null(scratch_dir):
self._file_stage = FileStageManager(scratch_dir, '.')
|
Update the argument used to invoke the application
Note that this will also update the dictionary of input and output files.
Parameters
-----------
override_args : dict
Dictionary of arguments to override the current values
|
entailment
|
def get_failed_jobs(self, fail_running=False, fail_pending=False):
"""Return a dictionary with the subset of jobs that are marked as failed
Parameters
----------
fail_running : `bool`
If True, consider running jobs as failed
fail_pending : `bool`
If True, consider pending jobs as failed
Returns
-------
failed_jobs : dict
Dictionary mapping from job key to `JobDetails` for the failed jobs.
"""
failed_jobs = {}
for job_key, job_details in self.jobs.items():
if job_details.status == JobStatus.failed:
failed_jobs[job_key] = job_details
elif job_details.status == JobStatus.partial_failed:
failed_jobs[job_key] = job_details
elif fail_running and job_details.status == JobStatus.running:
failed_jobs[job_key] = job_details
elif fail_pending and job_details.status <= JobStatus.pending:
failed_jobs[job_key] = job_details
return failed_jobs
|
Return a dictionary with the subset of jobs that are marked as failed
Parameters
----------
fail_running : `bool`
If True, consider running jobs as failed
fail_pending : `bool`
If True, consider pending jobs as failed
Returns
-------
failed_jobs : dict
Dictionary mapping from job key to `JobDetails` for the failed jobs.
|
entailment
|
def check_job_status(self, key=JobDetails.topkey,
fail_running=False,
fail_pending=False,
force_check=False):
"""Check the status of a particular job
By default this checks the status of the top-level job, but
can by made to drill into the sub-jobs.
Parameters
----------
key : str
Key associated to the job in question
fail_running : `bool`
If True, consider running jobs as failed
fail_pending : `bool`
If True, consider pending jobs as failed
force_check : `bool`
Drill into status of individual jobs` instead of using top level job only
Returns
-------
status : `JobStatus`
Job status flag
"""
if key in self.jobs:
status = self.jobs[key].status
if status in [JobStatus.unknown, JobStatus.ready,
JobStatus.pending, JobStatus.running] or force_check:
status = self._interface.check_job(self.jobs[key])
if status == JobStatus.running and fail_running:
status = JobStatus.failed
if status == JobStatus.pending and fail_pending:
status = JobStatus.failed
self.jobs[key].status = status
if self._job_archive:
self._job_archive.register_job(self.jobs[key])
else:
status = JobStatus.no_job
return status
|
Check the status of a particular job
By default this checks the status of the top-level job, but
can by made to drill into the sub-jobs.
Parameters
----------
key : str
Key associated to the job in question
fail_running : `bool`
If True, consider running jobs as failed
fail_pending : `bool`
If True, consider pending jobs as failed
force_check : `bool`
Drill into status of individual jobs` instead of using top level job only
Returns
-------
status : `JobStatus`
Job status flag
|
entailment
|
def check_jobs_status(self,
fail_running=False,
fail_pending=False):
"""Check the status of all the jobs run from this link
and return a status flag that summarizes that.
Parameters
----------
fail_running : `bool`
If True, consider running jobs as failed
fail_pending : `bool`
If True, consider pending jobs as failed
Returns
-------
status : `JobStatus`
Job status flag that summarizes the status of all the jobs,
"""
n_failed = 0
n_partial = 0
n_passed = 0
n_total = 0
for job_details in self.jobs.values():
n_total += 1
if job_details.status in [JobStatus.failed, JobStatus.partial_failed]:
n_failed += 1
elif fail_running and job_details.status == JobStatus.running:
n_failed += 1
elif fail_pending and job_details.status == JobStatus.pending:
n_failed += 1
elif job_details.status == JobStatus.done:
n_passed += 1
if n_failed > 0:
return JobStatus.failed
elif n_passed == n_total:
return JobStatus.done
elif n_passed > 0:
return JobStatus.running
return JobStatus.pending
|
Check the status of all the jobs run from this link
and return a status flag that summarizes that.
Parameters
----------
fail_running : `bool`
If True, consider running jobs as failed
fail_pending : `bool`
If True, consider pending jobs as failed
Returns
-------
status : `JobStatus`
Job status flag that summarizes the status of all the jobs,
|
entailment
|
def get_jobs(self, recursive=True):
"""Return a dictionary with all the jobs
For sub-classes, if recursive is True this will include jobs
from any internal `Link`
"""
if recursive:
ret_dict = self.jobs.copy()
return ret_dict
return self.jobs
|
Return a dictionary with all the jobs
For sub-classes, if recursive is True this will include jobs
from any internal `Link`
|
entailment
|
def check_input_files(self,
return_found=True,
return_missing=True):
"""Check if input files exist.
Parameters
----------
return_found : list
A list with the paths of the files that were found.
return_missing : list
A list with the paths of the files that were missing.
Returns
-------
found : list
List of the found files, if requested, otherwise `None`
missing : list
List of the missing files, if requested, otherwise `None`
"""
all_input_files = self.files.chain_input_files + self.sub_files.chain_input_files
return check_files(all_input_files, self._file_stage,
return_found, return_missing)
|
Check if input files exist.
Parameters
----------
return_found : list
A list with the paths of the files that were found.
return_missing : list
A list with the paths of the files that were missing.
Returns
-------
found : list
List of the found files, if requested, otherwise `None`
missing : list
List of the missing files, if requested, otherwise `None`
|
entailment
|
def check_output_files(self,
return_found=True,
return_missing=True):
"""Check if output files exist.
Parameters
----------
return_found : list
A list with the paths of the files that were found.
return_missing : list
A list with the paths of the files that were missing.
Returns
-------
found : list
List of the found files, if requested, otherwise `None`
missing : list
List of the missing files, if requested, otherwise `None`
"""
all_output_files = self.files.chain_output_files + \
self.sub_files.chain_output_files
return check_files(all_output_files, self._file_stage,
return_found, return_missing)
|
Check if output files exist.
Parameters
----------
return_found : list
A list with the paths of the files that were found.
return_missing : list
A list with the paths of the files that were missing.
Returns
-------
found : list
List of the found files, if requested, otherwise `None`
missing : list
List of the missing files, if requested, otherwise `None`
|
entailment
|
def missing_input_files(self):
"""Make and return a dictionary of the missing input files.
This returns a dictionary mapping
filepath to list of `Link` that use the file as input.
"""
missing = self.check_input_files(return_found=False)
ret_dict = {}
for miss_file in missing:
ret_dict[miss_file] = [self.linkname]
return ret_dict
|
Make and return a dictionary of the missing input files.
This returns a dictionary mapping
filepath to list of `Link` that use the file as input.
|
entailment
|
def missing_output_files(self):
"""Make and return a dictionary of the missing output files.
This returns a dictionary mapping
filepath to list of links that produce the file as output.
"""
missing = self.check_output_files(return_found=False)
ret_dict = {}
for miss_file in missing:
ret_dict[miss_file] = [self.linkname]
return ret_dict
|
Make and return a dictionary of the missing output files.
This returns a dictionary mapping
filepath to list of links that produce the file as output.
|
entailment
|
def formatted_command(self):
"""Build and return the formatted command for this `Link`.
This is exactly the command as called from the Unix command line.
"""
# FIXME, this isn't really great as it force you to have all the arguments
command_template = self.command_template()
format_dict = self.args.copy()
for key, value in format_dict.items():
# protect whitespace
if isinstance(value, list):
outstr = ""
if key == 'args':
outkey = ""
else:
outkey = "--%s "
for lval in value:
outstr += ' '
outstr += outkey
outstr += lval
format_dict[key] = '"%s"' % outstr
elif isinstance(value, str) and value.find(' ') >= 0 and key != 'args':
format_dict[key] = '"%s"' % value
elif value is None:
format_dict[key] = 'none'
command = command_template.format(**format_dict)
return command
|
Build and return the formatted command for this `Link`.
This is exactly the command as called from the Unix command line.
|
entailment
|
def run_command(self, stream=sys.stdout, dry_run=False):
"""Runs the command for this link. This method can be overridden by
sub-classes to invoke a different command
Parameters
----------
stream : `file`
Stream that this `Link` will print to,
Must have 'write' function
dry_run : bool
Print command but do not run it
Returns
-------
code : int
Return code from sub-process
"""
command = self.formatted_command()
if dry_run:
stream.write("%s\n" % command)
stream.flush()
return 0
proc = subprocess.Popen(command.split(),
stderr=stream,
stdout=stream)
proc.communicate()
return proc.returncode
|
Runs the command for this link. This method can be overridden by
sub-classes to invoke a different command
Parameters
----------
stream : `file`
Stream that this `Link` will print to,
Must have 'write' function
dry_run : bool
Print command but do not run it
Returns
-------
code : int
Return code from sub-process
|
entailment
|
def run_with_log(self, dry_run=False, stage_files=True, resubmit_failed=False):
"""Runs this link with output sent to a pre-defined logfile
Parameters
-----------
dry_run : bool
Print command but do not run it.
stage_files : bool
Copy files to and from scratch staging area.
resubmit_failed : bool
Flag for sub-classes to resubmit failed jobs.
"""
fullkey = JobDetails.make_fullkey(self.full_linkname)
job_details = self.jobs[fullkey]
odir = os.path.dirname(job_details.logfile)
try:
os.makedirs(odir)
except OSError:
pass
ostream = open(job_details.logfile, 'w')
self.run(ostream, dry_run, stage_files, resubmit_failed)
|
Runs this link with output sent to a pre-defined logfile
Parameters
-----------
dry_run : bool
Print command but do not run it.
stage_files : bool
Copy files to and from scratch staging area.
resubmit_failed : bool
Flag for sub-classes to resubmit failed jobs.
|
entailment
|
def command_template(self):
"""Build and return a string that can be used as a template invoking
this chain from the command line.
The actual command can be obtainted by using
`self.command_template().format(**self.args)`
"""
com_out = self.appname
arg_string = ""
flag_string = ""
# Loop over the key, value pairs in self.args
for key, val in self.args.items():
# Check if the value is set in self._options
# If so, get the value from there
if val is None:
opt_val = self._options[key][0]
else:
opt_val = val
opt_type = self._options[key][2]
if key == 'args':
# 'args' is special, pull it out and move it to the back
arg_string += ' {%s}' % key
elif opt_type is bool:
if opt_val:
flag_string += ' --%s' % (key)
elif opt_type is list:
if is_null(opt_val):
continue
elif isinstance(opt_val, str):
com_out += ' --%s %s' % (key, opt_val)
elif isinstance(opt_val, list):
for arg_val in opt_val:
com_out += ' --%s %s' % (key, arg_val)
else:
com_out += ' --%s {%s}' % (key, key)
com_out += flag_string
com_out += arg_string
return com_out
|
Build and return a string that can be used as a template invoking
this chain from the command line.
The actual command can be obtainted by using
`self.command_template().format(**self.args)`
|
entailment
|
def print_summary(self, stream=sys.stdout, indent="", recurse_level=2):
"""Print a summary of the activity done by this `Link`.
Parameters
-----------
stream : `file`
Stream to print to, must have 'write' method.
indent : str
Indentation at start of line
recurse_level : int
Number of recursion levels to print
"""
if recurse_level < 0:
return
stream.write("%sLink: %s\n" % (indent, self.linkname))
stream.write("%sN_jobs: %s\n" % (indent, len(self.get_jobs())))
self.sub_files.print_chain_summary(stream, indent)
|
Print a summary of the activity done by this `Link`.
Parameters
-----------
stream : `file`
Stream to print to, must have 'write' method.
indent : str
Indentation at start of line
recurse_level : int
Number of recursion levels to print
|
entailment
|
def distance_to_edge(geom, skydir):
"""Return the angular distance from the given direction and
the edge of the projection."""
# FIXME: We should add a pixel_size property in gammapy.maps
# FIXME: We should make this into a MapGeom method
xpix, ypix = skydir.to_pixel(geom.wcs, origin=0)
deltax = np.array((xpix - geom.center_pix[0]) * geom._cdelt[0],
ndmin=1)
deltay = np.array((ypix - geom.center_pix[1]) * geom._cdelt[1],
ndmin=1)
deltax = np.abs(deltax) - 0.5 * geom.width[0]
deltay = np.abs(deltay) - 0.5 * geom.width[1]
m0 = (deltax < 0) & (deltay < 0)
m1 = (deltax > 0) & (deltay < 0)
m2 = (deltax < 0) & (deltay > 0)
m3 = (deltax > 0) & (deltay > 0)
mx = np.abs(deltax) <= np.abs(deltay)
my = np.abs(deltay) < np.abs(deltax)
delta = np.zeros(len(deltax))
delta[(m0 & mx) | (m3 & my) | m1] = deltax[(m0 & mx) | (m3 & my) | m1]
delta[(m0 & my) | (m3 & mx) | m2] = deltay[(m0 & my) | (m3 & mx) | m2]
return delta
|
Return the angular distance from the given direction and
the edge of the projection.
|
entailment
|
def create_wcs(skydir, coordsys='CEL', projection='AIT',
cdelt=1.0, crpix=1., naxis=2, energies=None):
"""Create a WCS object.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
Sky coordinate of the WCS reference point.
coordsys : str
projection : str
cdelt : float or (float,float)
In the first case the same value is used for x and y axes
crpix : float or (float,float)
In the first case the same value is used for x and y axes
naxis : {2, 3}
Number of dimensions of the projection.
energies : array-like
Array of energies that defines the third dimension if naxis=3.
"""
w = WCS(naxis=naxis)
if coordsys == 'CEL':
w.wcs.ctype[0] = 'RA---%s' % (projection)
w.wcs.ctype[1] = 'DEC--%s' % (projection)
w.wcs.crval[0] = skydir.icrs.ra.deg
w.wcs.crval[1] = skydir.icrs.dec.deg
elif coordsys == 'GAL':
w.wcs.ctype[0] = 'GLON-%s' % (projection)
w.wcs.ctype[1] = 'GLAT-%s' % (projection)
w.wcs.crval[0] = skydir.galactic.l.deg
w.wcs.crval[1] = skydir.galactic.b.deg
else:
raise Exception('Unrecognized coordinate system.')
try:
w.wcs.crpix[0] = crpix[0]
w.wcs.crpix[1] = crpix[1]
except:
w.wcs.crpix[0] = crpix
w.wcs.crpix[1] = crpix
try:
w.wcs.cdelt[0] = cdelt[0]
w.wcs.cdelt[1] = cdelt[1]
except:
w.wcs.cdelt[0] = -cdelt
w.wcs.cdelt[1] = cdelt
w = WCS(w.to_header())
if naxis == 3 and energies is not None:
w.wcs.crpix[2] = 1
w.wcs.crval[2] = energies[0]
w.wcs.cdelt[2] = energies[1] - energies[0]
w.wcs.ctype[2] = 'Energy'
w.wcs.cunit[2] = 'MeV'
return w
|
Create a WCS object.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
Sky coordinate of the WCS reference point.
coordsys : str
projection : str
cdelt : float or (float,float)
In the first case the same value is used for x and y axes
crpix : float or (float,float)
In the first case the same value is used for x and y axes
naxis : {2, 3}
Number of dimensions of the projection.
energies : array-like
Array of energies that defines the third dimension if naxis=3.
|
entailment
|
def wcs_add_energy_axis(wcs, energies):
"""Copy a WCS object, and add on the energy axis.
Parameters
----------
wcs : `~astropy.wcs.WCS`
WCS
energies : array-like
Array of energies.
"""
if wcs.naxis != 2:
raise Exception(
'wcs_add_energy_axis, input WCS naxis != 2 %i' % wcs.naxis)
w = WCS(naxis=3)
w.wcs.crpix[0] = wcs.wcs.crpix[0]
w.wcs.crpix[1] = wcs.wcs.crpix[1]
w.wcs.ctype[0] = wcs.wcs.ctype[0]
w.wcs.ctype[1] = wcs.wcs.ctype[1]
w.wcs.crval[0] = wcs.wcs.crval[0]
w.wcs.crval[1] = wcs.wcs.crval[1]
w.wcs.cdelt[0] = wcs.wcs.cdelt[0]
w.wcs.cdelt[1] = wcs.wcs.cdelt[1]
w = WCS(w.to_header())
w.wcs.crpix[2] = 1
w.wcs.crval[2] = energies[0]
w.wcs.cdelt[2] = energies[1] - energies[0]
w.wcs.ctype[2] = 'Energy'
return w
|
Copy a WCS object, and add on the energy axis.
Parameters
----------
wcs : `~astropy.wcs.WCS`
WCS
energies : array-like
Array of energies.
|
entailment
|
def offset_to_sky(skydir, offset_lon, offset_lat,
coordsys='CEL', projection='AIT'):
"""Convert a cartesian offset (X,Y) in the given projection into
a pair of spherical coordinates."""
offset_lon = np.array(offset_lon, ndmin=1)
offset_lat = np.array(offset_lat, ndmin=1)
w = create_wcs(skydir, coordsys, projection)
pixcrd = np.vstack((offset_lon, offset_lat)).T
return w.wcs_pix2world(pixcrd, 0)
|
Convert a cartesian offset (X,Y) in the given projection into
a pair of spherical coordinates.
|
entailment
|
def sky_to_offset(skydir, lon, lat, coordsys='CEL', projection='AIT'):
"""Convert sky coordinates to a projected offset. This function
is the inverse of offset_to_sky."""
w = create_wcs(skydir, coordsys, projection)
skycrd = np.vstack((lon, lat)).T
if len(skycrd) == 0:
return skycrd
return w.wcs_world2pix(skycrd, 0)
|
Convert sky coordinates to a projected offset. This function
is the inverse of offset_to_sky.
|
entailment
|
def offset_to_skydir(skydir, offset_lon, offset_lat,
coordsys='CEL', projection='AIT'):
"""Convert a cartesian offset (X,Y) in the given projection into
a SkyCoord."""
offset_lon = np.array(offset_lon, ndmin=1)
offset_lat = np.array(offset_lat, ndmin=1)
w = create_wcs(skydir, coordsys, projection)
return SkyCoord.from_pixel(offset_lon, offset_lat, w, 0)
|
Convert a cartesian offset (X,Y) in the given projection into
a SkyCoord.
|
entailment
|
def skydir_to_pix(skydir, wcs):
"""Convert skydir object to pixel coordinates.
Gracefully handles 0-d coordinate arrays.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
wcs : `~astropy.wcs.WCS`
Returns
-------
xp, yp : `numpy.ndarray`
The pixel coordinates
"""
if len(skydir.shape) > 0 and len(skydir) == 0:
return [np.empty(0), np.empty(0)]
return skydir.to_pixel(wcs, origin=0)
|
Convert skydir object to pixel coordinates.
Gracefully handles 0-d coordinate arrays.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
wcs : `~astropy.wcs.WCS`
Returns
-------
xp, yp : `numpy.ndarray`
The pixel coordinates
|
entailment
|
def pix_to_skydir(xpix, ypix, wcs):
"""Convert pixel coordinates to a skydir object.
Gracefully handles 0-d coordinate arrays.
Always returns a celestial coordinate.
Parameters
----------
xpix : `numpy.ndarray`
ypix : `numpy.ndarray`
wcs : `~astropy.wcs.WCS`
"""
xpix = np.array(xpix)
ypix = np.array(ypix)
if xpix.ndim > 0 and len(xpix) == 0:
return SkyCoord(np.empty(0), np.empty(0), unit='deg',
frame='icrs')
return SkyCoord.from_pixel(xpix, ypix, wcs,
origin=0).transform_to('icrs')
|
Convert pixel coordinates to a skydir object.
Gracefully handles 0-d coordinate arrays.
Always returns a celestial coordinate.
Parameters
----------
xpix : `numpy.ndarray`
ypix : `numpy.ndarray`
wcs : `~astropy.wcs.WCS`
|
entailment
|
def wcs_to_axes(w, npix):
"""Generate a sequence of bin edge vectors corresponding to the
axes of a WCS object."""
npix = npix[::-1]
x = np.linspace(-(npix[0]) / 2., (npix[0]) / 2.,
npix[0] + 1) * np.abs(w.wcs.cdelt[0])
y = np.linspace(-(npix[1]) / 2., (npix[1]) / 2.,
npix[1] + 1) * np.abs(w.wcs.cdelt[1])
if w.wcs.naxis == 2:
return x, y
cdelt2 = np.log10((w.wcs.cdelt[2] + w.wcs.crval[2]) / w.wcs.crval[2])
z = (np.linspace(0, npix[2], npix[2] + 1)) * cdelt2
z += np.log10(w.wcs.crval[2])
return x, y, z
|
Generate a sequence of bin edge vectors corresponding to the
axes of a WCS object.
|
entailment
|
def wcs_to_coords(w, shape):
"""Generate an N x D list of pixel center coordinates where N is
the number of pixels and D is the dimensionality of the map."""
if w.naxis == 2:
y, x = wcs_to_axes(w, shape)
elif w.naxis == 3:
z, y, x = wcs_to_axes(w, shape)
else:
raise Exception("Wrong number of WCS axes %i" % w.naxis)
x = 0.5 * (x[1:] + x[:-1])
y = 0.5 * (y[1:] + y[:-1])
if w.naxis == 2:
x = np.ravel(np.ones(shape) * x[:, np.newaxis])
y = np.ravel(np.ones(shape) * y[np.newaxis, :])
return np.vstack((x, y))
z = 0.5 * (z[1:] + z[:-1])
x = np.ravel(np.ones(shape) * x[:, np.newaxis, np.newaxis])
y = np.ravel(np.ones(shape) * y[np.newaxis, :, np.newaxis])
z = np.ravel(np.ones(shape) * z[np.newaxis, np.newaxis, :])
return np.vstack((x, y, z))
|
Generate an N x D list of pixel center coordinates where N is
the number of pixels and D is the dimensionality of the map.
|
entailment
|
def get_cel_to_gal_angle(skydir):
"""Calculate the rotation angle in radians between the longitude
axes of a local projection in celestial and galactic coordinates.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
Direction of projection center.
Returns
-------
angle : float
Rotation angle in radians.
"""
wcs0 = create_wcs(skydir, coordsys='CEL')
wcs1 = create_wcs(skydir, coordsys='GAL')
x, y = SkyCoord.to_pixel(SkyCoord.from_pixel(1.0, 0.0, wcs0), wcs1)
return np.arctan2(y, x)
|
Calculate the rotation angle in radians between the longitude
axes of a local projection in celestial and galactic coordinates.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
Direction of projection center.
Returns
-------
angle : float
Rotation angle in radians.
|
entailment
|
def extract_mapcube_region(infile, skydir, outfile, maphdu=0):
"""Extract a region out of an all-sky mapcube file.
Parameters
----------
infile : str
Path to mapcube file.
skydir : `~astropy.coordinates.SkyCoord`
"""
h = fits.open(os.path.expandvars(infile))
npix = 200
shape = list(h[maphdu].data.shape)
shape[1] = 200
shape[2] = 200
wcs = WCS(h[maphdu].header)
skywcs = WCS(h[maphdu].header, naxis=[1, 2])
coordsys = get_coordsys(skywcs)
region_wcs = wcs.deepcopy()
if coordsys == 'CEL':
region_wcs.wcs.crval[0] = skydir.ra.deg
region_wcs.wcs.crval[1] = skydir.dec.deg
elif coordsys == 'GAL':
region_wcs.wcs.crval[0] = skydir.galactic.l.deg
region_wcs.wcs.crval[1] = skydir.galactic.b.deg
else:
raise Exception('Unrecognized coordinate system.')
region_wcs.wcs.crpix[0] = npix // 2 + 0.5
region_wcs.wcs.crpix[1] = npix // 2 + 0.5
from reproject import reproject_interp
data, footprint = reproject_interp(h, region_wcs.to_header(),
hdu_in=maphdu,
shape_out=shape)
hdu_image = fits.PrimaryHDU(data, header=region_wcs.to_header())
hdulist = fits.HDUList([hdu_image, h['ENERGIES']])
hdulist.writeto(outfile, clobber=True)
|
Extract a region out of an all-sky mapcube file.
Parameters
----------
infile : str
Path to mapcube file.
skydir : `~astropy.coordinates.SkyCoord`
|
entailment
|
def distance_to_edge(self, skydir):
"""Return the angular distance from the given direction and
the edge of the projection."""
xpix, ypix = skydir.to_pixel(self.wcs, origin=0)
deltax = np.array((xpix - self._pix_center[0]) * self._pix_size[0],
ndmin=1)
deltay = np.array((ypix - self._pix_center[1]) * self._pix_size[1],
ndmin=1)
deltax = np.abs(deltax) - 0.5 * self._width[0]
deltay = np.abs(deltay) - 0.5 * self._width[1]
m0 = (deltax < 0) & (deltay < 0)
m1 = (deltax > 0) & (deltay < 0)
m2 = (deltax < 0) & (deltay > 0)
m3 = (deltax > 0) & (deltay > 0)
mx = np.abs(deltax) <= np.abs(deltay)
my = np.abs(deltay) < np.abs(deltax)
delta = np.zeros(len(deltax))
delta[(m0 & mx) | (m3 & my) | m1] = deltax[(m0 & mx) | (m3 & my) | m1]
delta[(m0 & my) | (m3 & mx) | m2] = deltay[(m0 & my) | (m3 & mx) | m2]
return delta
|
Return the angular distance from the given direction and
the edge of the projection.
|
entailment
|
def readlines(arg):
"""Read lines from a file into a list.
Removes whitespace and lines that start with '#'
"""
fin = open(arg)
lines_in = fin.readlines()
fin.close()
lines_out = []
for line in lines_in:
line = line.strip()
if not line or line[0] == '#':
continue
lines_out.append(line)
return lines_out
|
Read lines from a file into a list.
Removes whitespace and lines that start with '#'
|
entailment
|
def create_inputlist(arglist):
"""Read lines from a file and makes a list of file names.
Removes whitespace and lines that start with '#'
Recursively read all files with the extension '.lst'
"""
lines = []
if isinstance(arglist, list):
for arg in arglist:
if os.path.splitext(arg)[1] == '.lst':
lines += readlines(arg)
else:
lines.append(arg)
elif is_null(arglist):
pass
else:
if os.path.splitext(arglist)[1] == '.lst':
lines += readlines(arglist)
else:
lines.append(arglist)
return lines
|
Read lines from a file and makes a list of file names.
Removes whitespace and lines that start with '#'
Recursively read all files with the extension '.lst'
|
entailment
|
def init(self):
"""Initialize histograms."""
evclass_shape = [16, 40, 10]
evtype_shape = [16, 16, 40, 10]
evclass_psf_shape = [16, 40, 10, 100]
evtype_psf_shape = [16, 16, 40, 10, 100]
self._hists_eff = dict()
self._hists = dict(evclass_on=np.zeros(evclass_shape),
evclass_off=np.zeros(evclass_shape),
evclass_alpha=np.zeros([16, 40, 1]),
evtype_on=np.zeros(evtype_shape),
evtype_off=np.zeros(evtype_shape),
evtype_alpha=np.zeros([16, 1, 40, 1]),
evclass_psf_on=np.zeros(evclass_psf_shape),
evclass_psf_off=np.zeros(evclass_psf_shape),
evtype_psf_on=np.zeros(evtype_psf_shape),
evtype_psf_off=np.zeros(evtype_psf_shape),
)
|
Initialize histograms.
|
entailment
|
def create_hist(self, evclass, evtype, xsep, energy, ctheta,
fill_sep=False, fill_evtype=False):
"""Load into a histogram."""
nevt = len(evclass)
ebin = utils.val_to_bin(self._energy_bins, energy)
scale = self._psf_scale[ebin]
vals = [energy, ctheta]
bins = [self._energy_bins, self._ctheta_bins]
if fill_sep:
vals += [xsep]
bins += [self._xsep_bins]
if fill_evtype:
loopv = [self._evclass_bins[:-1], self._evtype_bins[:-1]]
shape = [16, 16] + [len(b) - 1 for b in bins]
else:
loopv = [self._evclass_bins[:-1]]
shape = [16] + [len(b) - 1 for b in bins]
h = np.zeros(shape)
for t in itertools.product(*loopv):
m = (evclass[:, int(t[0])] == True)
if fill_evtype:
m &= (evtype[:, int(t[1])] == True)
if not np.sum(m):
continue
z = np.vstack(vals)
z = z[:, m]
if fill_evtype:
h[int(t[0]), int(t[1])] += np.histogramdd(z.T, bins=bins)[0]
else:
h[int(t[0])] += np.histogramdd(z.T, bins=bins)[0]
return h
|
Load into a histogram.
|
entailment
|
def calc_eff(self):
"""Calculate the efficiency."""
hists = self.hists
hists_out = self._hists_eff
cth_axis_idx = dict(evclass=2, evtype=3)
for k in ['evclass', 'evtype']:
if k == 'evclass':
ns0 = hists['evclass_on'][4][None, ...]
nb0 = hists['evclass_off'][4][None, ...]
else:
ns0 = hists['evclass_on'][4][None, None, ...]
nb0 = hists['evclass_off'][4][None, None, ...]
eff, eff_var = calc_eff(ns0, nb0,
hists['%s_on' % k], hists['%s_off' % k],
hists['%s_alpha' % k])
hists_out['%s_cth_eff' % k] = eff
hists_out['%s_cth_eff_var' % k] = eff_var
eff, eff_var = calc_eff(ns0, nb0,
hists['%s_on' % k], hists['%s_off' % k],
hists['%s_alpha' % k],
sum_axes=[cth_axis_idx[k]])
hists_out['%s_eff' % k] = np.squeeze(eff)
hists_out['%s_eff_var' % k] = np.squeeze(eff_var)
|
Calculate the efficiency.
|
entailment
|
def calc_containment(self):
"""Calculate PSF containment."""
hists = self.hists
hists_out = self._hists_eff
quantiles = [0.34, 0.68, 0.90, 0.95]
cth_axis_idx = dict(evclass=2, evtype=3)
for k in ['evclass']: # ,'evtype']:
print(k)
non = hists['%s_psf_on' % k]
noff = hists['%s_psf_off' % k]
alpha = hists['%s_alpha' % k][..., None]
if k == 'evclass':
sep = self._sep_bins[None, :, None, 1:]
else:
sep = self._sep_bins[None, None, :, None, 1:]
qval, qerr = calc_quantiles(sep, non, noff, alpha, quantiles)
for i, q in enumerate(quantiles):
hists_out['%s_cth_q%2i' % (k, q * 100)] = qval[i]
hists_out['%s_cth_q%2i_err' % (k, q * 100)] = qerr[i]
non = np.sum(non, axis=cth_axis_idx[k])
noff = np.sum(noff, axis=cth_axis_idx[k])
alpha = np.squeeze(alpha, axis=cth_axis_idx[k])
sep = np.squeeze(sep, axis=cth_axis_idx[k])
qval, qerr = calc_quantiles(sep, non, noff, alpha, quantiles)
for i, q in enumerate(quantiles):
hists_out['%s_q%2i' % (k, q * 100)] = qval[i]
hists_out['%s_q%2i_err' % (k, q * 100)] = qerr[i]
|
Calculate PSF containment.
|
entailment
|
def create_default_config(schema):
"""Create a configuration dictionary from a schema dictionary.
The schema defines the valid configuration keys and their default
values. Each element of ``schema`` should be a tuple/list
containing (default value,docstring,type) or a dict containing a
nested schema."""
o = {}
for key, item in schema.items():
if isinstance(item, dict):
o[key] = create_default_config(item)
elif isinstance(item, tuple):
value, comment, item_type = item
if isinstance(item_type, tuple):
item_type = item_type[0]
if value is None and (item_type == list or item_type == dict):
value = item_type()
if key in o:
raise KeyError('Duplicate key in schema.')
o[key] = value
else:
raise TypeError('Unrecognized type for schema dict element: %s %s' %
(key, type(item)))
return o
|
Create a configuration dictionary from a schema dictionary.
The schema defines the valid configuration keys and their default
values. Each element of ``schema`` should be a tuple/list
containing (default value,docstring,type) or a dict containing a
nested schema.
|
entailment
|
def update_from_schema(cfg, cfgin, schema):
"""Update configuration dictionary ``cfg`` with the contents of
``cfgin`` using the ``schema`` dictionary to determine the valid
input keys.
Parameters
----------
cfg : dict
Configuration dictionary to be updated.
cfgin : dict
New configuration dictionary that will be merged with ``cfg``.
schema : dict
Configuration schema defining the valid configuration keys and
their types.
Returns
-------
cfgout : dict
"""
cfgout = copy.deepcopy(cfg)
for k, v in schema.items():
if k not in cfgin:
continue
if isinstance(v, dict):
cfgout.setdefault(k, {})
cfgout[k] = update_from_schema(cfg[k], cfgin[k], v)
elif v[2] is dict:
cfgout[k] = utils.merge_dict(cfg[k], cfgin[k], add_new_keys=True)
else:
cfgout[k] = cfgin[k]
return cfgout
|
Update configuration dictionary ``cfg`` with the contents of
``cfgin`` using the ``schema`` dictionary to determine the valid
input keys.
Parameters
----------
cfg : dict
Configuration dictionary to be updated.
cfgin : dict
New configuration dictionary that will be merged with ``cfg``.
schema : dict
Configuration schema defining the valid configuration keys and
their types.
Returns
-------
cfgout : dict
|
entailment
|
def write_config(self, outfile):
"""Write the configuration dictionary to an output file."""
utils.write_yaml(self.config, outfile, default_flow_style=False)
|
Write the configuration dictionary to an output file.
|
entailment
|
def create(cls, configfile):
"""Create a configuration dictionary from a yaml config file.
This function will first populate the dictionary with defaults
taken from pre-defined configuration files. The configuration
dictionary is then updated with the user-defined configuration
file. Any settings defined by the user will take precedence
over the default settings."""
# populate config dictionary with an initial set of values
# config_logging = ConfigManager.load('logging.yaml')
config = {}
if config['fileio']['outdir'] is None:
config['fileio']['outdir'] = os.path.abspath(
os.path.dirname(configfile))
user_config = cls.load(configfile)
config = utils.merge_dict(config, user_config, True)
config['fileio']['outdir'] = os.path.abspath(
config['fileio']['outdir'])
return config
|
Create a configuration dictionary from a yaml config file.
This function will first populate the dictionary with defaults
taken from pre-defined configuration files. The configuration
dictionary is then updated with the user-defined configuration
file. Any settings defined by the user will take precedence
over the default settings.
|
entailment
|
def update_null_primary(hdu_in, hdu=None):
""" 'Update' a null primary HDU
This actually just checks hdu exists and creates it from hdu_in if it does not.
"""
if hdu is None:
hdu = fits.PrimaryHDU(header=hdu_in.header)
else:
hdu = hdu_in
hdu.header.remove('FILENAME')
return hdu
|
'Update' a null primary HDU
This actually just checks hdu exists and creates it from hdu_in if it does not.
|
entailment
|
def update_primary(hdu_in, hdu=None):
""" 'Update' a primary HDU
This checks hdu exists and creates it from hdu_in if it does not.
If hdu does exist, this adds the data in hdu_in to hdu
"""
if hdu is None:
hdu = fits.PrimaryHDU(data=hdu_in.data, header=hdu_in.header)
else:
hdu.data += hdu_in.data
return hdu
|
'Update' a primary HDU
This checks hdu exists and creates it from hdu_in if it does not.
If hdu does exist, this adds the data in hdu_in to hdu
|
entailment
|
def update_image(hdu_in, hdu=None):
""" 'Update' an image HDU
This checks hdu exists and creates it from hdu_in if it does not.
If hdu does exist, this adds the data in hdu_in to hdu
"""
if hdu is None:
hdu = fits.ImageHDU(
data=hdu_in.data, header=hdu_in.header, name=hdu_in.name)
else:
hdu.data += hdu_in.data
return hdu
|
'Update' an image HDU
This checks hdu exists and creates it from hdu_in if it does not.
If hdu does exist, this adds the data in hdu_in to hdu
|
entailment
|
def update_ebounds(hdu_in, hdu=None):
""" 'Update' the EBOUNDS HDU
This checks hdu exists and creates it from hdu_in if it does not.
If hdu does exist, this raises an exception if it doesn not match hdu_in
"""
if hdu is None:
hdu = fits.BinTableHDU(
data=hdu_in.data, header=hdu_in.header, name=hdu_in.name)
else:
for col in ['CHANNEL', 'E_MIN', 'E_MAX']:
if (hdu.data[col] != hdu_in.data[col]).any():
raise ValueError("Energy bounds do not match : %s %s" %
(hdu.data[col], hdu_in.data[col]))
return hdu
|
'Update' the EBOUNDS HDU
This checks hdu exists and creates it from hdu_in if it does not.
If hdu does exist, this raises an exception if it doesn not match hdu_in
|
entailment
|
def merge_all_gti_data(datalist_in, nrows, first):
""" Merge together all the GTI data
Parameters
-------
datalist_in : list of `astropy.io.fits.BinTableHDU` data
The GTI data that is being merged
nrows : `~numpy.ndarray` of ints
Array with the number of nrows for each object in datalist_in
first : `astropy.io.fits.BinTableHDU`
BinTableHDU to use as a template
Returns
-------
out_hdu : `astropy.io.fits.BinTableHDU`
BinTableHDU with the merge GTIs
"""
max_row = nrows.cumsum()
min_row = max_row - nrows
out_hdu = fits.BinTableHDU.from_columns(
first.columns, header=first.header, nrows=nrows.sum())
for (imin, imax, data_in) in zip(min_row, max_row, datalist_in):
for col in first.columns:
out_hdu.data[col.name][imin:imax] = data_in[col.name]
return out_hdu
|
Merge together all the GTI data
Parameters
-------
datalist_in : list of `astropy.io.fits.BinTableHDU` data
The GTI data that is being merged
nrows : `~numpy.ndarray` of ints
Array with the number of nrows for each object in datalist_in
first : `astropy.io.fits.BinTableHDU`
BinTableHDU to use as a template
Returns
-------
out_hdu : `astropy.io.fits.BinTableHDU`
BinTableHDU with the merge GTIs
|
entailment
|
def extract_gti_data(hdu_in):
""" Extract some GTI related data
Parameters
-------
hdu_in : `astropy.io.fits.BinTableHDU`
The GTI data
Returns
-------
data : `astropy.io.fits.BinTableHDU` data
exposure : float
Exposure value taken from FITS header
tstop : float
TSTOP value taken from FITS header
"""
data = hdu_in.data
exposure = hdu_in.header['EXPOSURE']
tstop = hdu_in.header['TSTOP']
return (data, exposure, tstop)
|
Extract some GTI related data
Parameters
-------
hdu_in : `astropy.io.fits.BinTableHDU`
The GTI data
Returns
-------
data : `astropy.io.fits.BinTableHDU` data
exposure : float
Exposure value taken from FITS header
tstop : float
TSTOP value taken from FITS header
|
entailment
|
def update_hpx_skymap_allsky(map_in, map_out):
""" 'Update' a HEALPix skymap
This checks map_out exists and creates it from map_in if it does not.
If map_out does exist, this adds the data in map_in to map_out
"""
if map_out is None:
in_hpx = map_in.hpx
out_hpx = HPX.create_hpx(in_hpx.nside, in_hpx.nest, in_hpx.coordsys,
None, in_hpx.ebins, None, in_hpx.conv, None)
data_out = map_in.expanded_counts_map()
print(data_out.shape, data_out.sum())
map_out = HpxMap(data_out, out_hpx)
else:
map_out.data += map_in.expanded_counts_map()
return map_out
|
'Update' a HEALPix skymap
This checks map_out exists and creates it from map_in if it does not.
If map_out does exist, this adds the data in map_in to map_out
|
entailment
|
def merge_wcs_counts_cubes(filelist):
""" Merge all the files in filelist, assuming that they WCS counts cubes
"""
out_prim = None
out_ebounds = None
datalist_gti = []
exposure_sum = 0.
nfiles = len(filelist)
ngti = np.zeros(nfiles, int)
for i, filename in enumerate(filelist):
fin = fits.open(filename)
sys.stdout.write('.')
sys.stdout.flush()
if i == 0:
out_prim = update_primary(fin[0], out_prim)
out_ebounds = update_ebounds(fin["EBOUNDS"], out_ebounds)
(gti_data, exposure, tstop) = extract_gti_data(fin["GTI"])
datalist_gti.append(gti_data)
exposure_sum += exposure
ngti[i] = len(gti_data)
if i == 0:
first = fin
elif i == nfiles - 1:
date_end = fin[0].header['DATE-END']
else:
fin.close()
out_gti = merge_all_gti_data(datalist_gti, ngti, first['GTI'])
out_gti.header['EXPOSURE'] = exposure_sum
out_gti.header['TSTOP'] = tstop
hdulist = [out_prim, out_ebounds, out_gti]
for hdu in hdulist:
hdu.header['DATE-END'] = date_end
out_prim.update_header()
sys.stdout.write("!\n")
return fits.HDUList(hdulist)
|
Merge all the files in filelist, assuming that they WCS counts cubes
|
entailment
|
def merge_hpx_counts_cubes(filelist):
""" Merge all the files in filelist, assuming that they HEALPix counts cubes
"""
out_prim = None
out_skymap = None
out_ebounds = None
datalist_gti = []
exposure_sum = 0.
nfiles = len(filelist)
ngti = np.zeros(nfiles, int)
out_name = None
for i, filename in enumerate(filelist):
fin = fits.open(filename)
sys.stdout.write('.')
sys.stdout.flush()
if i == 0:
out_prim = update_null_primary(fin[0], out_prim)
out_name = fin[1].name
map_in = HpxMap.create_from_hdulist(fin)
out_skymap = update_hpx_skymap_allsky(map_in, out_skymap)
if i == 0:
try:
out_ebounds = update_ebounds(fin["EBOUNDS"], out_ebounds)
except KeyError:
out_ebounds = update_energies(fin["ENERGIES"], out_ebounds)
try:
(gti_data, exposure, tstop) = extract_gti_data(fin["GTI"])
datalist_gti.append(gti_data)
exposure_sum += exposure
ngti[i] = len(gti_data)
except KeyError:
pass
if i == 0:
first = fin
elif i == nfiles - 1:
try:
date_end = fin[0].header['DATE-END']
except KeyError:
date_end = None
else:
fin.close()
out_skymap_hdu = out_skymap.create_image_hdu("SKYMAP")
hdulist = [out_prim, out_skymap_hdu, out_ebounds]
if len(datalist_gti) > 0:
out_gti = merge_all_gti_data(datalist_gti, ngti, first['GTI'])
out_gti.header['EXPOSURE'] = exposure_sum
out_gti.header['TSTOP'] = tstop
hdulist.append(out_gti)
for hdu in hdulist:
if date_end:
hdu.header['DATE-END'] = date_end
out_prim.update_header()
sys.stdout.write("!\n")
return fits.HDUList(hdulist)
|
Merge all the files in filelist, assuming that they HEALPix counts cubes
|
entailment
|
def run_analysis(self, argv):
"""Run this analysis"""
args = self._parser.parse_args(argv)
obs = BinnedAnalysis.BinnedObs(irfs=args.irfs,
expCube=args.expcube,
srcMaps=args.cmap,
binnedExpMap=args.bexpmap)
like = BinnedAnalysis.BinnedAnalysis(obs,
optimizer='MINUIT',
srcModel=GtSrcmapsCatalog.NULL_MODEL,
wmap=None)
source_factory = pyLike.SourceFactory(obs.observation)
source_factory.readXml(args.srcmdl, BinnedAnalysis._funcFactory,
False, True, True)
srcNames = pyLike.StringVector()
source_factory.fetchSrcNames(srcNames)
min_idx = args.srcmin
max_idx = args.srcmax
if max_idx < 0:
max_idx = srcNames.size()
for i in xrange(min_idx, max_idx):
if i == min_idx:
like.logLike.saveSourceMaps(args.outfile)
pyLike.CountsMapBase.copyAndUpdateDssKeywords(args.cmap,
args.outfile,
None,
args.irfs)
srcName = srcNames[i]
source = source_factory.releaseSource(srcName)
like.logLike.addSource(source, False)
like.logLike.saveSourceMap_partial(args.outfile, source)
like.logLike.deleteSource(srcName)
if args.gzip:
os.system("gzip -9 %s" % args.outfile)
|
Run this analysis
|
entailment
|
def _make_xml_files(catalog_info_dict, comp_info_dict):
"""Make all the xml file for individual components
"""
for val in catalog_info_dict.values():
val.roi_model.write_xml(val.srcmdl_name)
for val in comp_info_dict.values():
for val2 in val.values():
val2.roi_model.write_xml(val2.srcmdl_name)
|
Make all the xml file for individual components
|
entailment
|
def build_job_configs(self, args):
"""Hook to build job configurations
"""
job_configs = {}
components = Component.build_from_yamlfile(args['comp'])
NAME_FACTORY.update_base_dict(args['data'])
if self._comp_dict is None or self._comp_dict_file != args['library']:
self._comp_dict_file = args['library']
self._comp_dict = make_catalog_comp_dict(sources=self._comp_dict_file,
basedir=NAME_FACTORY.base_dict['basedir'])
else:
print ("Using cached catalog dict from %s" % args['library'])
catalog_info_dict = self._comp_dict['catalog_info_dict']
comp_info_dict = self._comp_dict['comp_info_dict']
n_src_per_job = args['nsrc']
if args['make_xml']:
SrcmapsCatalog_SG._make_xml_files(catalog_info_dict, comp_info_dict)
for catalog_name, catalog_info in catalog_info_dict.items():
n_cat_src = len(catalog_info.catalog.table)
n_job = int(math.ceil(float(n_cat_src) / n_src_per_job))
for comp in components:
zcut = "zmax%i" % comp.zmax
key = comp.make_key('{ebin_name}_{evtype_name}')
name_keys = dict(zcut=zcut,
sourcekey=catalog_name,
ebin=comp.ebin_name,
psftype=comp.evtype_name,
coordsys=comp.coordsys,
irf_ver=NAME_FACTORY.irf_ver(),
mktime='none',
fullpath=True)
for i_job in range(n_job):
full_key = "%s_%02i" % (key, i_job)
srcmin = i_job * n_src_per_job
srcmax = min(srcmin + n_src_per_job, n_cat_src)
outfile = NAME_FACTORY.srcmaps(
**name_keys).replace('.fits', "_%02i.fits" % (i_job))
logfile = make_nfs_path(outfile.replace('.fits', '.log'))
job_configs[full_key] = dict(cmap=NAME_FACTORY.ccube(**name_keys),
expcube=NAME_FACTORY.ltcube(**name_keys),
irfs=NAME_FACTORY.irfs(**name_keys),
bexpmap=NAME_FACTORY.bexpcube(**name_keys),
outfile=outfile,
logfile=logfile,
srcmdl=catalog_info.srcmdl_name,
evtype=comp.evtype,
srcmin=srcmin,
srcmax=srcmax)
return job_configs
|
Hook to build job configurations
|
entailment
|
def run_analysis(self, argv):
"""Run this analysis"""
args = self._parser.parse_args(argv)
exttype = splitext(args.infile)[-1]
if exttype in ['.fits', '.npy']:
castro_data = CastroData.create_from_sedfile(args.infile)
elif exttype in ['.yaml']:
castro_data = CastroData.create_from_yamlfile(args.infile)
else:
raise ValueError("Can not read file type %s for SED" % extype)
ylims = [1e-8, 1e-5]
plot = plotCastro(castro_data, ylims)
if args.outfile:
plot[0].savefig(args.outfile)
|
Run this analysis
|
entailment
|
def build_job_configs(self, args):
"""Hook to build job configurations
"""
job_configs = {}
ttype = args['ttype']
(targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
if targets_yaml is None:
return job_configs
targets = load_yaml(targets_yaml)
for target_name, target_list in targets.items():
for targ_prof in target_list:
name_keys = dict(target_type=ttype,
target_name=target_name,
profile=targ_prof,
fullpath=True)
targ_key = "%s_%s" % (target_name, targ_prof)
input_path = NAME_FACTORY.sedfile(**name_keys)
output_path = input_path.replace('.fits', '.png')
logfile = make_nfs_path(input_path.replace('.fits', '.log'))
job_config = dict(infile=input_path,
outfile=output_path,
logfile=logfile)
job_configs[targ_key] = job_config
return job_configs
|
Hook to build job configurations
|
entailment
|
def sed(self, name, **kwargs):
"""Generate a spectral energy distribution (SED) for a source. This
function will fit the normalization of the source in each
energy bin. By default the SED will be generated with the
analysis energy bins but a custom binning can be defined with
the ``loge_bins`` parameter.
Parameters
----------
name : str
Source name.
prefix : str
Optional string that will be prepended to all output files
(FITS and rendered images).
loge_bins : `~numpy.ndarray`
Sequence of energies in log10(E/MeV) defining the edges of
the energy bins. If this argument is None then the
analysis energy bins will be used. The energies in this
sequence must align with the bin edges of the underyling
analysis instance.
{options}
optimizer : dict
Dictionary that overrides the default optimizer settings.
Returns
-------
sed : dict
Dictionary containing output of the SED analysis.
"""
timer = Timer.create(start=True)
name = self.roi.get_source_by_name(name).name
# Create schema for method configuration
schema = ConfigSchema(self.defaults['sed'],
optimizer=self.defaults['optimizer'])
schema.add_option('prefix', '')
schema.add_option('outfile', None, '', str)
schema.add_option('loge_bins', None, '', list)
config = utils.create_dict(self.config['sed'],
optimizer=self.config['optimizer'])
config = schema.create_config(config, **kwargs)
self.logger.info('Computing SED for %s' % name)
o = self._make_sed(name, **config)
self.logger.info('Finished SED')
outfile = config.get('outfile', None)
if outfile is None:
outfile = utils.format_filename(self.workdir, 'sed',
prefix=[config['prefix'],
name.lower().replace(' ', '_')])
else:
outfile = os.path.join(self.workdir,
os.path.splitext(outfile)[0])
o['file'] = None
if config['write_fits']:
o['file'] = os.path.basename(outfile) + '.fits'
self._make_sed_fits(o, outfile + '.fits', **config)
if config['write_npy']:
np.save(outfile + '.npy', o)
if config['make_plots']:
self._plotter.make_sed_plots(o, **config)
self.logger.info('Execution time: %.2f s', timer.elapsed_time)
return o
|
Generate a spectral energy distribution (SED) for a source. This
function will fit the normalization of the source in each
energy bin. By default the SED will be generated with the
analysis energy bins but a custom binning can be defined with
the ``loge_bins`` parameter.
Parameters
----------
name : str
Source name.
prefix : str
Optional string that will be prepended to all output files
(FITS and rendered images).
loge_bins : `~numpy.ndarray`
Sequence of energies in log10(E/MeV) defining the edges of
the energy bins. If this argument is None then the
analysis energy bins will be used. The energies in this
sequence must align with the bin edges of the underyling
analysis instance.
{options}
optimizer : dict
Dictionary that overrides the default optimizer settings.
Returns
-------
sed : dict
Dictionary containing output of the SED analysis.
|
entailment
|
def run_analysis(self, argv):
"""Run this analysis"""
args = self._parser.parse_args(argv)
obs = BinnedAnalysis.BinnedObs(irfs=args.irfs,
expCube=args.expcube,
srcMaps=args.cmap,
binnedExpMap=args.bexpmap)
if args.no_psf:
performConvolution = False
else:
performConvolution = True
config = BinnedAnalysis.BinnedConfig(performConvolution=performConvolution)
like = BinnedAnalysis.BinnedAnalysis(obs,
optimizer='MINUIT',
srcModel=GtSrcmapsDiffuse.NULL_MODEL,
wmap=None,
config=config)
source_factory = pyLike.SourceFactory(obs.observation)
source_factory.readXml(args.srcmdl, BinnedAnalysis._funcFactory,
False, True, True)
source = source_factory.releaseSource(args.source)
try:
diffuse_source = pyLike.DiffuseSource.cast(source)
except TypeError:
diffuse_source = None
if diffuse_source is not None:
try:
diffuse_source.mapBaseObject().projmap().setExtrapolation(False)
except RuntimeError:
pass
like.logLike.saveSourceMap_partial(args.outfile, source, args.kmin, args.kmax)
if args.gzip:
os.system("gzip -9 %s" % args.outfile)
|
Run this analysis
|
entailment
|
def _write_xml(xmlfile, srcs):
"""Save the ROI model as an XML """
root = ElementTree.Element('source_library')
root.set('title', 'source_library')
for src in srcs:
src.write_xml(root)
output_file = open(xmlfile, 'w')
output_file.write(utils.prettify_xml(root))
|
Save the ROI model as an XML
|
entailment
|
def _handle_component(sourcekey, comp_dict):
"""Make the source objects and write the xml for a component
"""
if comp_dict.comp_key is None:
fullkey = sourcekey
else:
fullkey = "%s_%s" % (sourcekey, comp_dict.comp_key)
srcdict = make_sources(fullkey, comp_dict)
if comp_dict.model_type == 'IsoSource':
print("Writing xml for %s to %s: %s %s" % (fullkey,
comp_dict.srcmdl_name,
comp_dict.model_type,
comp_dict.Spectral_Filename))
elif comp_dict.model_type == 'MapCubeSource':
print("Writing xml for %s to %s: %s %s" % (fullkey,
comp_dict.srcmdl_name,
comp_dict.model_type,
comp_dict.Spatial_Filename))
SrcmapsDiffuse_SG._write_xml(comp_dict.srcmdl_name, srcdict.values())
|
Make the source objects and write the xml for a component
|
entailment
|
def _make_xml_files(diffuse_comp_info_dict):
"""Make all the xml file for individual components
"""
try:
os.makedirs('srcmdls')
except OSError:
pass
for sourcekey in sorted(diffuse_comp_info_dict.keys()):
comp_info = diffuse_comp_info_dict[sourcekey]
if comp_info.components is None:
SrcmapsDiffuse_SG._handle_component(sourcekey, comp_info)
else:
for sub_comp_info in comp_info.components.values():
SrcmapsDiffuse_SG._handle_component(sourcekey, sub_comp_info)
|
Make all the xml file for individual components
|
entailment
|
def build_job_configs(self, args):
"""Hook to build job configurations
"""
job_configs = {}
components = Component.build_from_yamlfile(args['comp'])
NAME_FACTORY.update_base_dict(args['data'])
ret_dict = make_diffuse_comp_info_dict(components=components,
library=args['library'],
basedir='.')
diffuse_comp_info_dict = ret_dict['comp_info_dict']
if args['make_xml']:
SrcmapsDiffuse_SG._make_xml_files(diffuse_comp_info_dict)
for diffuse_comp_info_key in sorted(diffuse_comp_info_dict.keys()):
diffuse_comp_info_value = diffuse_comp_info_dict[diffuse_comp_info_key]
no_psf = diffuse_comp_info_value.no_psf
for comp in components:
zcut = "zmax%i" % comp.zmax
key = comp.make_key('{ebin_name}_{evtype_name}')
if diffuse_comp_info_value.components is None:
sub_comp_info = diffuse_comp_info_value
else:
sub_comp_info = diffuse_comp_info_value.get_component_info(comp)
name_keys = dict(zcut=zcut,
sourcekey=sub_comp_info.sourcekey,
ebin=comp.ebin_name,
psftype=comp.evtype_name,
mktime='none',
coordsys=comp.coordsys,
irf_ver=NAME_FACTORY.irf_ver(),
fullpath=True)
kmin = 0
kmax = comp.enumbins + 1
outfile_base = NAME_FACTORY.srcmaps(**name_keys)
kstep = HPX_ORDER_TO_KSTEP[comp.hpx_order]
base_dict = dict(cmap=NAME_FACTORY.ccube(**name_keys),
expcube=NAME_FACTORY.ltcube(**name_keys),
irfs=NAME_FACTORY.irfs(**name_keys),
bexpmap=NAME_FACTORY.bexpcube(**name_keys),
srcmdl=sub_comp_info.srcmdl_name,
source=sub_comp_info.source_name,
no_psf=no_psf,
evtype=comp.evtype)
if kstep < 0:
kstep = kmax
else:
pass
for k in range(kmin, kmax, kstep):
full_key = "%s_%s_%02i" % (diffuse_comp_info_key, key, k)
khi = min(kmax, k + kstep)
full_dict = base_dict.copy()
outfile = outfile_base.replace('.fits', '_%02i.fits' % k)
logfile = make_nfs_path(outfile_base.replace('.fits', '_%02i.log' % k))
full_dict.update(dict(outfile=outfile,
kmin=k, kmax=khi,
logfile=logfile))
job_configs[full_key] = full_dict
return job_configs
|
Hook to build job configurations
|
entailment
|
def fit_error_ellipse(tsmap, xy=None, dpix=3, zmin=None):
"""Fit a positional uncertainty ellipse from a TS map. The fit
will be performed over pixels in the vicinity of the peak pixel
with D < dpix OR z > zmin where D is the distance from the peak
pixel in pixel coordinates and z is the difference in amplitude
from the peak pixel.
Parameters
----------
tsmap : `~gammapy.maps.WcsMap`
xy : tuple
dpix : float
zmin : float
Returns
-------
fit : dict
Dictionary with fit results.
"""
if xy is None:
ix, iy = np.unravel_index(np.argmax(tsmap.data.T),
tsmap.data.T.shape)
else:
ix, iy = xy
pbfit0 = utils.fit_parabola(tsmap.data.T, ix, iy, dpix=1.5)
pbfit1 = utils.fit_parabola(tsmap.data.T, ix, iy, dpix=dpix,
zmin=zmin)
wcs = tsmap.geom.wcs
cdelt0 = tsmap.geom.wcs.wcs.cdelt[0]
cdelt1 = tsmap.geom.wcs.wcs.cdelt[1]
npix0 = tsmap.data.T.shape[0]
npix1 = tsmap.data.T.shape[1]
o = {}
o['fit_success'] = pbfit0['fit_success']
o['fit_inbounds'] = True
if pbfit0['fit_success']:
o['xpix'] = pbfit0['x0']
o['ypix'] = pbfit0['y0']
o['zoffset'] = pbfit0['z0']
else:
o['xpix'] = float(ix)
o['ypix'] = float(iy)
o['zoffset'] = tsmap.data.T[ix, iy]
if pbfit1['fit_success']:
sigmax = 2.0**0.5 * pbfit1['sigmax'] * np.abs(cdelt0)
sigmay = 2.0**0.5 * pbfit1['sigmay'] * np.abs(cdelt1)
theta = pbfit1['theta']
sigmax = min(sigmax, np.abs(2.0 * npix0 * cdelt0))
sigmay = min(sigmay, np.abs(2.0 * npix1 * cdelt1))
elif pbfit0['fit_success']:
sigmax = 2.0**0.5 * pbfit0['sigmax'] * np.abs(cdelt0)
sigmay = 2.0**0.5 * pbfit0['sigmay'] * np.abs(cdelt1)
theta = pbfit0['theta']
sigmax = min(sigmax, np.abs(2.0 * npix0 * cdelt0))
sigmay = min(sigmay, np.abs(2.0 * npix1 * cdelt1))
else:
pix_area = np.abs(cdelt0) * np.abs(cdelt1)
mask = get_region_mask(tsmap.data, 1.0, (ix, iy))
area = np.sum(mask) * pix_area
sigmax = (area / np.pi)**0.5
sigmay = (area / np.pi)**0.5
theta = 0.0
if (o['xpix'] <= 0 or o['xpix'] >= npix0 - 1 or
o['ypix'] <= 0 or o['ypix'] >= npix1 - 1):
o['fit_inbounds'] = False
o['xpix'] = float(ix)
o['ypix'] = float(iy)
o['peak_offset'] = np.sqrt((float(ix) - o['xpix'])**2 +
(float(iy) - o['ypix'])**2)
skydir = SkyCoord.from_pixel(o['xpix'], o['ypix'], wcs)
sigma = (sigmax * sigmay)**0.5
r68 = 2.30**0.5 * sigma
r95 = 5.99**0.5 * sigma
r99 = 9.21**0.5 * sigma
if sigmax < sigmay:
o['pos_err_semimajor'] = sigmay
o['pos_err_semiminor'] = sigmax
o['theta'] = np.fmod(2 * np.pi + np.pi / 2. + theta, np.pi)
else:
o['pos_err_semimajor'] = sigmax
o['pos_err_semiminor'] = sigmay
o['theta'] = np.fmod(2 * np.pi + theta, np.pi)
o['pos_angle'] = np.degrees(o['theta'])
o['pos_err'] = sigma
o['pos_r68'] = r68
o['pos_r95'] = r95
o['pos_r99'] = r99
o['ra'] = skydir.icrs.ra.deg
o['dec'] = skydir.icrs.dec.deg
o['glon'] = skydir.galactic.l.deg
o['glat'] = skydir.galactic.b.deg
a = o['pos_err_semimajor']
b = o['pos_err_semiminor']
o['pos_ecc'] = np.sqrt(1 - b**2 / a**2)
o['pos_ecc2'] = np.sqrt(a**2 / b**2 - 1)
o['skydir'] = skydir
if tsmap.geom.coordsys == 'GAL':
gal_cov = utils.ellipse_to_cov(o['pos_err_semimajor'],
o['pos_err_semiminor'],
o['theta'])
theta_cel = wcs_utils.get_cel_to_gal_angle(skydir)
cel_cov = utils.ellipse_to_cov(o['pos_err_semimajor'],
o['pos_err_semiminor'],
o['theta'] + theta_cel)
else:
cel_cov = utils.ellipse_to_cov(o['pos_err_semimajor'],
o['pos_err_semiminor'],
o['theta'])
theta_gal = 2 * np.pi - wcs_utils.get_cel_to_gal_angle(skydir)
gal_cov = utils.ellipse_to_cov(o['pos_err_semimajor'],
o['pos_err_semiminor'],
o['theta'] + theta_gal)
o['pos_gal_cov'] = gal_cov
o['pos_cel_cov'] = cel_cov
o['pos_gal_corr'] = utils.cov_to_correlation(gal_cov)
o['pos_cel_corr'] = utils.cov_to_correlation(cel_cov)
o['glon_err'], o['glat_err'] = np.sqrt(
gal_cov[0, 0]), np.sqrt(gal_cov[1, 1])
o['ra_err'], o['dec_err'] = np.sqrt(cel_cov[0, 0]), np.sqrt(cel_cov[1, 1])
return o
|
Fit a positional uncertainty ellipse from a TS map. The fit
will be performed over pixels in the vicinity of the peak pixel
with D < dpix OR z > zmin where D is the distance from the peak
pixel in pixel coordinates and z is the difference in amplitude
from the peak pixel.
Parameters
----------
tsmap : `~gammapy.maps.WcsMap`
xy : tuple
dpix : float
zmin : float
Returns
-------
fit : dict
Dictionary with fit results.
|
entailment
|
def find_peaks(input_map, threshold, min_separation=0.5):
"""Find peaks in a 2-D map object that have amplitude larger than
`threshold` and lie a distance at least `min_separation` from another
peak of larger amplitude. The implementation of this method uses
`~scipy.ndimage.filters.maximum_filter`.
Parameters
----------
input_map : `~gammapy.maps.WcsMap`
threshold : float
min_separation : float
Radius of region size in degrees. Sets the minimum allowable
separation between peaks.
Returns
-------
peaks : list
List of dictionaries containing the location and amplitude of
each peak.
"""
data = input_map.data
cdelt = max(input_map.geom.wcs.wcs.cdelt)
min_separation = max(min_separation, 2 * cdelt)
region_size_pix = int(min_separation / cdelt)
region_size_pix = max(3, region_size_pix)
deltaxy = utils.make_pixel_distance(region_size_pix * 2 + 3)
deltaxy *= max(input_map.geom.wcs.wcs.cdelt)
region = deltaxy < min_separation
local_max = maximum_filter(data, footprint=region) == data
local_max[data < threshold] = False
labeled, num_objects = scipy.ndimage.label(local_max)
slices = scipy.ndimage.find_objects(labeled)
peaks = []
for s in slices:
skydir = SkyCoord.from_pixel(s[1].start, s[0].start,
input_map.geom.wcs)
peaks.append({'ix': s[1].start,
'iy': s[0].start,
'skydir': skydir,
'amp': data[s[0].start, s[1].start]})
return sorted(peaks, key=lambda t: t['amp'], reverse=True)
|
Find peaks in a 2-D map object that have amplitude larger than
`threshold` and lie a distance at least `min_separation` from another
peak of larger amplitude. The implementation of this method uses
`~scipy.ndimage.filters.maximum_filter`.
Parameters
----------
input_map : `~gammapy.maps.WcsMap`
threshold : float
min_separation : float
Radius of region size in degrees. Sets the minimum allowable
separation between peaks.
Returns
-------
peaks : list
List of dictionaries containing the location and amplitude of
each peak.
|
entailment
|
def estimate_pos_and_err_parabolic(tsvals):
"""Solve for the position and uncertainty of source in one dimension
assuming that you are near the maximum and the errors are parabolic
Parameters
----------
tsvals : `~numpy.ndarray`
The TS values at the maximum TS, and for each pixel on either side
Returns
-------
The position and uncertainty of the source, in pixel units
w.r.t. the center of the maximum pixel
"""
a = tsvals[2] - tsvals[0]
bc = 2. * tsvals[1] - tsvals[0] - tsvals[2]
s = a / (2 * bc)
err = np.sqrt(2 / bc)
return s, err
|
Solve for the position and uncertainty of source in one dimension
assuming that you are near the maximum and the errors are parabolic
Parameters
----------
tsvals : `~numpy.ndarray`
The TS values at the maximum TS, and for each pixel on either side
Returns
-------
The position and uncertainty of the source, in pixel units
w.r.t. the center of the maximum pixel
|
entailment
|
def refine_peak(tsmap, pix):
"""Solve for the position and uncertainty of source assuming that you
are near the maximum and the errors are parabolic
Parameters
----------
tsmap : `~numpy.ndarray`
Array with the TS data.
Returns
-------
The position and uncertainty of the source, in pixel units
w.r.t. the center of the maximum pixel
"""
# Note the annoying WCS convention
nx = tsmap.shape[1]
ny = tsmap.shape[0]
if pix[0] == 0 or pix[0] == (nx - 1):
xval = float(pix[0])
xerr = -1
else:
x_arr = tsmap[pix[1], pix[0] - 1:pix[0] + 2]
xval, xerr = estimate_pos_and_err_parabolic(x_arr)
xval += float(pix[0])
if pix[1] == 0 or pix[1] == (ny - 1):
yval = float(pix[1])
yerr = -1
else:
y_arr = tsmap[pix[1] - 1:pix[1] + 2, pix[0]]
yval, yerr = estimate_pos_and_err_parabolic(y_arr)
yval += float(pix[1])
return (xval, yval), (xerr, yerr)
|
Solve for the position and uncertainty of source assuming that you
are near the maximum and the errors are parabolic
Parameters
----------
tsmap : `~numpy.ndarray`
Array with the TS data.
Returns
-------
The position and uncertainty of the source, in pixel units
w.r.t. the center of the maximum pixel
|
entailment
|
def create_source_table(scan_shape):
"""Create an empty source table.
Returns
-------
tab : `~astropy.table.Table`
"""
cols_dict = collections.OrderedDict()
cols_dict['Source_Name'] = dict(dtype='S48', format='%s')
cols_dict['name'] = dict(dtype='S48', format='%s')
cols_dict['class'] = dict(dtype='S32', format='%s')
cols_dict['SpectrumType'] = dict(dtype='S32', format='%s')
cols_dict['SpatialType'] = dict(dtype='S32', format='%s')
cols_dict['SourceType'] = dict(dtype='S32', format='%s')
cols_dict['SpatialModel'] = dict(dtype='S32', format='%s')
cols_dict['RAJ2000'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['DEJ2000'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['GLON'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['GLAT'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['ts'] = dict(dtype='f8', format='%.3f')
cols_dict['loglike'] = dict(dtype='f8', format='%.3f')
cols_dict['npred'] = dict(dtype='f8', format='%.3f')
cols_dict['offset'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['offset_ra'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['offset_dec'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['offset_glon'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['offset_glat'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['offset_roi_edge'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['pivot_energy'] = dict(dtype='f8', format='%.3f', unit='MeV')
cols_dict['flux_scan'] = dict(dtype='f8', format='%.3f',
shape=scan_shape)
cols_dict['eflux_scan'] = dict(dtype='f8', format='%.3f',
shape=scan_shape)
cols_dict['norm_scan'] = dict(dtype='f8', format='%.3f',
shape=scan_shape)
cols_dict['dloglike_scan'] = dict(dtype='f8', format='%.3f',
shape=scan_shape)
cols_dict['loglike_scan'] = dict(dtype='f8', format='%.3f',
shape=scan_shape)
# Add source dictionary columns
for k, v in sorted(defaults.source_output.items()):
if not k in cols_dict.keys():
if v[2] == float:
cols_dict[k] = dict(dtype='f8', format='%f')
elif k == 'Spectrum_Filename' or k == 'Spatial_Filename':
cols_dict[k] = dict(dtype='S128', format='%s')
elif v[2] == str:
cols_dict[k] = dict(dtype='S32', format='%s')
cols_dict['param_names'] = dict(dtype='S32', format='%s', shape=(10,))
cols_dict['param_values'] = dict(dtype='f8', format='%f', shape=(10,))
cols_dict['param_errors'] = dict(dtype='f8', format='%f', shape=(10,))
# Catalog Parameters
cols_dict['Flux_Density'] = dict(
dtype='f8', format='%.5g', unit='1 / (MeV cm2 s)')
cols_dict['Spectral_Index'] = dict(dtype='f8', format='%.3f')
cols_dict['Pivot_Energy'] = dict(dtype='f8', format='%.3f', unit='MeV')
cols_dict['beta'] = dict(dtype='f8', format='%.3f')
cols_dict['Exp_Index'] = dict(dtype='f8', format='%.3f')
cols_dict['Cutoff'] = dict(dtype='f8', format='%.3f', unit='MeV')
cols_dict['Expfactor'] = dict(dtype='f8', format='%.3f')
cols_dict['Conf_68_PosAng'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['Conf_68_SemiMajor'] = dict(
dtype='f8', format='%.3f', unit='deg')
cols_dict['Conf_68_SemiMinor'] = dict(
dtype='f8', format='%.3f', unit='deg')
cols_dict['Conf_95_PosAng'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['Conf_95_SemiMajor'] = dict(
dtype='f8', format='%.3f', unit='deg')
cols_dict['Conf_95_SemiMinor'] = dict(
dtype='f8', format='%.3f', unit='deg')
for t in ['eflux', 'eflux100', 'eflux1000', 'eflux10000']:
cols_dict[t] = dict(dtype='f8', format='%.3f', unit='MeV / (cm2 s)')
cols_dict[t + '_err'] = dict(dtype='f8',
format='%.3f', unit='MeV / (cm2 s)')
for t in ['eflux_ul95', 'eflux100_ul95', 'eflux1000_ul95', 'eflux10000_ul95']:
cols_dict[t] = dict(dtype='f8', format='%.3f', unit='MeV / (cm2 s)')
for t in ['flux', 'flux100', 'flux1000', 'flux10000']:
cols_dict[t] = dict(dtype='f8', format='%.3f', unit='1 / (cm2 s)')
cols_dict[t + '_err'] = dict(dtype='f8',
format='%.3f', unit='1 / (cm2 s)')
for t in ['flux_ul95', 'flux100_ul95', 'flux1000_ul95', 'flux10000_ul95']:
cols_dict[t] = dict(dtype='f8', format='%.3f', unit='1 / (cm2 s)')
for t in ['dnde', 'dnde100', 'dnde1000', 'dnde10000']:
cols_dict[t] = dict(dtype='f8', format='%.3f', unit='1 / (MeV cm2 s)')
cols_dict[t + '_err'] = dict(dtype='f8',
format='%.3f', unit='1 / (MeV cm2 s)')
cols = [Column(name=k, **v) for k, v in cols_dict.items()]
tab = Table(cols)
return tab
|
Create an empty source table.
Returns
-------
tab : `~astropy.table.Table`
|
entailment
|
def get_skydir_distance_mask(src_skydir, skydir, dist, min_dist=None,
square=False, coordsys='CEL'):
"""Retrieve sources within a certain angular distance of an
(ra,dec) coordinate. This function supports two types of
geometric selections: circular (square=False) and square
(square=True). The circular selection finds all sources with a given
angular distance of the target position. The square selection
finds sources within an ROI-like region of size R x R where R
= 2 x dist.
Parameters
----------
src_skydir : `~astropy.coordinates.SkyCoord`
Array of sky directions.
skydir : `~astropy.coordinates.SkyCoord`
Sky direction with respect to which the selection will be applied.
dist : float
Maximum distance in degrees from the sky coordinate.
square : bool
Choose whether to apply a circular or square selection.
coordsys : str
Coordinate system to use when applying a selection with square=True.
"""
if dist is None:
dist = 180.
if not square:
dtheta = src_skydir.separation(skydir).rad
elif coordsys == 'CEL':
dtheta = get_linear_dist(skydir,
src_skydir.ra.rad,
src_skydir.dec.rad,
coordsys=coordsys)
elif coordsys == 'GAL':
dtheta = get_linear_dist(skydir,
src_skydir.galactic.l.rad,
src_skydir.galactic.b.rad,
coordsys=coordsys)
else:
raise Exception('Unrecognized coordinate system: %s' % coordsys)
msk = (dtheta < np.radians(dist))
if min_dist is not None:
msk &= (dtheta > np.radians(min_dist))
return msk
|
Retrieve sources within a certain angular distance of an
(ra,dec) coordinate. This function supports two types of
geometric selections: circular (square=False) and square
(square=True). The circular selection finds all sources with a given
angular distance of the target position. The square selection
finds sources within an ROI-like region of size R x R where R
= 2 x dist.
Parameters
----------
src_skydir : `~astropy.coordinates.SkyCoord`
Array of sky directions.
skydir : `~astropy.coordinates.SkyCoord`
Sky direction with respect to which the selection will be applied.
dist : float
Maximum distance in degrees from the sky coordinate.
square : bool
Choose whether to apply a circular or square selection.
coordsys : str
Coordinate system to use when applying a selection with square=True.
|
entailment
|
def spectral_pars_from_catalog(cat):
"""Create spectral parameters from 3FGL catalog columns."""
spectrum_type = cat['SpectrumType']
pars = get_function_defaults(cat['SpectrumType'])
par_idxs = {k: i for i, k in
enumerate(get_function_par_names(cat['SpectrumType']))}
for k in pars:
pars[k]['value'] = cat['param_values'][par_idxs[k]]
if spectrum_type == 'PowerLaw':
pars['Index']['value'] *= -1.0
pars['Index']['scale'] = -1.0
pars['Scale']['scale'] = 1.0
pars['Index']['max'] = max(5.0, pars['Index']['value'] + 1.0)
pars['Index']['min'] = min(0.0, pars['Index']['value'] - 1.0)
pars['Prefactor'] = make_parameter_dict(pars['Prefactor'])
pars['Scale'] = make_parameter_dict(pars['Scale'], True, False)
pars['Index'] = make_parameter_dict(pars['Index'], False, False)
elif spectrum_type == 'LogParabola':
pars['norm'] = make_parameter_dict(pars['norm'], False, True)
pars['Eb'] = make_parameter_dict(pars['Eb'], True, False)
pars['alpha'] = make_parameter_dict(pars['alpha'], False, False)
pars['beta'] = make_parameter_dict(pars['beta'], False, False)
elif spectrum_type == 'PLSuperExpCutoff':
pars['Index1']['value'] *= -1.0
pars['Index1']['scale'] = -1.0
pars['Index2']['scale'] = 1.0
pars['Prefactor'] = make_parameter_dict(pars['Prefactor'])
pars['Scale'] = make_parameter_dict(pars['Scale'], True, False)
pars['Index1'] = make_parameter_dict(pars['Index1'], False, False)
pars['Index2'] = make_parameter_dict(pars['Index2'], False, False)
pars['Cutoff'] = make_parameter_dict(pars['Cutoff'], False, True)
elif spectrum_type == 'PLSuperExpCutoff2':
pars['Index1']['value'] *= -1.0
pars['Index1']['scale'] = -1.0
pars['Index2']['scale'] = 1.0
pars['Prefactor'] = make_parameter_dict(pars['Prefactor'])
pars['Scale'] = make_parameter_dict(pars['Scale'], True, False)
pars['Index1'] = make_parameter_dict(pars['Index1'], False, False)
pars['Index2'] = make_parameter_dict(pars['Index2'], False, False)
pars['Expfactor'] = make_parameter_dict(pars['Expfactor'], False, True)
else:
raise Exception('Unsupported spectral type:' + spectrum_type)
return pars
|
Create spectral parameters from 3FGL catalog columns.
|
entailment
|
def is_free(self):
""" returns True if any of the spectral model parameters is set to free, else False
"""
return bool(np.array([int(value.get("free", False)) for key, value in self.spectral_pars.items()]).sum())
|
returns True if any of the spectral model parameters is set to free, else False
|
entailment
|
def set_position(self, skydir):
"""
Set the position of the source.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
"""
if not isinstance(skydir, SkyCoord):
skydir = SkyCoord(ra=skydir[0], dec=skydir[1], unit=u.deg)
if not skydir.isscalar:
skydir = np.ravel(skydir)[0]
radec = np.array([skydir.icrs.ra.deg, skydir.icrs.dec.deg])
self._set_radec(radec)
|
Set the position of the source.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
|
entailment
|
def skydir(self):
"""Return a SkyCoord representation of the source position.
Returns
-------
skydir : `~astropy.coordinates.SkyCoord`
"""
return SkyCoord(self.radec[0] * u.deg, self.radec[1] * u.deg)
|
Return a SkyCoord representation of the source position.
Returns
-------
skydir : `~astropy.coordinates.SkyCoord`
|
entailment
|
def create_from_dict(cls, src_dict, roi_skydir=None, rescale=False):
"""Create a source object from a python dictionary.
Parameters
----------
src_dict : dict
Dictionary defining the properties of the source.
"""
src_dict = copy.deepcopy(src_dict)
src_dict.setdefault('SpatialModel', 'PointSource')
src_dict.setdefault('Spectrum_Filename', None)
src_dict.setdefault('SpectrumType', 'PowerLaw')
src_dict['SpatialType'] = get_spatial_type(src_dict['SpatialModel'])
spectrum_type = src_dict['SpectrumType']
spatial_type = src_dict['SpatialType']
spectral_pars = src_dict.pop('spectral_pars', {})
spatial_pars = src_dict.pop('spatial_pars', {})
if not spectral_pars:
spectral_pars = extract_pars_from_dict(spectrum_type, src_dict)
norm_par_name = get_function_norm_par_name(spectrum_type)
if norm_par_name is not None:
spectral_pars[norm_par_name].setdefault('free', True)
if not spatial_pars:
spatial_pars = extract_pars_from_dict(spatial_type, src_dict)
for k in ['RA', 'DEC', 'Prefactor']:
if k in spatial_pars:
del spatial_pars[k]
spectral_pars = create_pars_from_dict(spectrum_type, spectral_pars,
rescale)
spatial_pars = create_pars_from_dict(spatial_type, spatial_pars,
False)
if 'file' in src_dict:
src_dict['Spectrum_Filename'] = src_dict.pop('file')
if spectrum_type == 'DMFitFunction' and src_dict['Spectrum_Filename'] is None:
src_dict['Spectrum_Filename'] = os.path.join('$FERMIPY_DATA_DIR',
'gammamc_dif.dat')
src_dict['spectral_pars'] = cast_pars_dict(spectral_pars)
src_dict['spatial_pars'] = cast_pars_dict(spatial_pars)
if 'name' in src_dict:
name = src_dict['name']
src_dict['Source_Name'] = src_dict.pop('name')
elif 'Source_Name' in src_dict:
name = src_dict['Source_Name']
else:
raise Exception('Source name undefined.')
skydir = wcs_utils.get_target_skydir(src_dict, roi_skydir)
src_dict['RAJ2000'] = skydir.ra.deg
src_dict['DEJ2000'] = skydir.dec.deg
radec = np.array([skydir.ra.deg, skydir.dec.deg])
return cls(name, src_dict, radec=radec)
|
Create a source object from a python dictionary.
Parameters
----------
src_dict : dict
Dictionary defining the properties of the source.
|
entailment
|
def create_from_xmlfile(cls, xmlfile, extdir=None):
"""Create a Source object from an XML file.
Parameters
----------
xmlfile : str
Path to XML file.
extdir : str
Path to the extended source archive.
"""
root = ElementTree.ElementTree(file=xmlfile).getroot()
srcs = root.findall('source')
if len(srcs) == 0:
raise Exception('No sources found.')
return cls.create_from_xml(srcs[0], extdir=extdir)
|
Create a Source object from an XML file.
Parameters
----------
xmlfile : str
Path to XML file.
extdir : str
Path to the extended source archive.
|
entailment
|
def create_from_xml(root, extdir=None):
"""Create a Source object from an XML node.
Parameters
----------
root : `~xml.etree.ElementTree.Element`
XML node containing the source.
extdir : str
Path to the extended source archive.
"""
src_type = root.attrib['type']
spec = utils.load_xml_elements(root, 'spectrum')
spectral_pars = utils.load_xml_elements(root, 'spectrum/parameter')
spectral_type = spec['type']
spectral_pars = cast_pars_dict(spectral_pars)
spat = {}
spatial_pars = {}
nested_sources = []
if src_type == 'CompositeSource':
spatial_type = 'CompositeSource'
source_library = root.findall('source_library')[0]
for node in source_library.findall('source'):
nested_sources += [Source.create_from_xml(node, extdir=extdir)]
else:
spat = utils.load_xml_elements(root, 'spatialModel')
spatial_pars = utils.load_xml_elements(
root, 'spatialModel/parameter')
spatial_pars = cast_pars_dict(spatial_pars)
spatial_type = spat['type']
xml_dict = copy.deepcopy(root.attrib)
src_dict = {'catalog': xml_dict}
src_dict['Source_Name'] = xml_dict['name']
src_dict['SpectrumType'] = spectral_type
src_dict['SpatialType'] = spatial_type
src_dict['SourceType'] = src_type
src_dict['Spatial_Filename'] = None
src_dict['Spectrum_Filename'] = None
if 'file' in spat:
src_dict['Spatial_Filename'] = utils.xmlpath_to_path(spat['file'])
if not os.path.isfile(src_dict['Spatial_Filename']) \
and extdir is not None:
src_dict['Spatial_Filename'] = \
os.path.join(extdir, 'Templates',
src_dict['Spatial_Filename'])
if 'file' in spec:
src_dict['Spectrum_Filename'] = utils.xmlpath_to_path(spec['file'])
if src_type == 'PointSource':
src_dict['SpatialModel'] = 'PointSource'
elif src_type == 'CompositeSource':
src_dict['SpatialModel'] = 'CompositeSource'
elif spatial_type == 'SpatialMap':
src_dict['SpatialModel'] = 'SpatialMap'
else:
src_dict['SpatialModel'] = spatial_type
if src_type == 'PointSource' or \
spatial_type in ['SpatialMap', 'RadialGaussian', 'RadialDisk']:
if 'RA' in xml_dict:
src_dict['RAJ2000'] = float(xml_dict['RA'])
src_dict['DEJ2000'] = float(xml_dict['DEC'])
elif 'RA' in spatial_pars:
src_dict['RAJ2000'] = float(spatial_pars['RA']['value'])
src_dict['DEJ2000'] = float(spatial_pars['DEC']['value'])
else:
try:
skydir = wcs_utils.get_map_skydir(os.path.expandvars(
src_dict['Spatial_Filename']))
except Exception:
skydir = hpx_utils.get_map_skydir(os.path.expandvars(
src_dict['Spatial_Filename']))
src_dict['RAJ2000'] = skydir.ra.deg
src_dict['DEJ2000'] = skydir.dec.deg
radec = np.array([src_dict['RAJ2000'], src_dict['DEJ2000']])
src_dict['spectral_pars'] = spectral_pars
src_dict['spatial_pars'] = spatial_pars
return Source(src_dict['Source_Name'],
src_dict, radec=radec)
elif src_type == 'DiffuseSource' and spatial_type == 'ConstantValue':
return IsoSource(src_dict['Source_Name'],
{'Spectrum_Filename': spec['file'],
'spectral_pars': spectral_pars,
'spatial_pars': spatial_pars})
elif src_type == 'DiffuseSource' and spatial_type == 'MapCubeFunction':
return MapCubeSource(src_dict['Source_Name'],
{'Spatial_Filename': spat['file'],
'SpectrumType': spectral_type,
'spectral_pars': spectral_pars,
'spatial_pars': spatial_pars})
elif src_type == 'CompositeSource':
return CompositeSource(src_dict['Source_Name'],
{'SpectrumType': spectral_type,
'nested_sources': nested_sources})
else:
raise Exception(
'Unrecognized type for source: %s %s' % (src_dict['Source_Name'], src_type))
|
Create a Source object from an XML node.
Parameters
----------
root : `~xml.etree.ElementTree.Element`
XML node containing the source.
extdir : str
Path to the extended source archive.
|
entailment
|
def write_xml(self, root):
"""Write this source to an XML node."""
if not self.extended:
try:
source_element = utils.create_xml_element(root, 'source',
dict(name=self['Source_Name'],
type='PointSource'))
except TypeError as msg:
print (self['Source_Name'], self)
raise TypeError(msg)
spat_el = ElementTree.SubElement(source_element, 'spatialModel')
spat_el.set('type', 'SkyDirFunction')
elif self['SpatialType'] == 'SpatialMap':
source_element = utils.create_xml_element(root, 'source',
dict(name=self['Source_Name'],
type='DiffuseSource'))
filename = utils.path_to_xmlpath(self['Spatial_Filename'])
spat_el = utils.create_xml_element(source_element, 'spatialModel',
dict(map_based_integral='True',
type='SpatialMap',
file=filename))
else:
source_element = utils.create_xml_element(root, 'source',
dict(name=self['Source_Name'],
type='DiffuseSource'))
spat_el = utils.create_xml_element(source_element, 'spatialModel',
dict(type=self['SpatialType']))
for k, v in self.spatial_pars.items():
utils.create_xml_element(spat_el, 'parameter', v)
el = ElementTree.SubElement(source_element, 'spectrum')
stype = self['SpectrumType'].strip()
el.set('type', stype)
if self['Spectrum_Filename'] is not None:
filename = utils.path_to_xmlpath(self['Spectrum_Filename'])
el.set('file', filename)
for k, v in self.spectral_pars.items():
utils.create_xml_element(el, 'parameter', v)
|
Write this source to an XML node.
|
entailment
|
def clear(self):
"""Clear the contents of the ROI."""
self._srcs = []
self._diffuse_srcs = []
self._src_dict = collections.defaultdict(list)
self._src_radius = []
|
Clear the contents of the ROI.
|
entailment
|
def _create_diffuse_src_from_xml(self, config, src_type='FileFunction'):
"""Load sources from an XML file.
"""
diffuse_xmls = config.get('diffuse_xml')
srcs_out = []
for diffuse_xml in diffuse_xmls:
srcs_out += self.load_xml(diffuse_xml, coordsys=config.get('coordsys', 'CEL'))
return srcs_out
|
Load sources from an XML file.
|
entailment
|
def create_source(self, name, src_dict, build_index=True,
merge_sources=True, rescale=True):
"""Add a new source to the ROI model from a dictionary or an
existing source object.
Parameters
----------
name : str
src_dict : dict or `~fermipy.roi_model.Source`
Returns
-------
src : `~fermipy.roi_model.Source`
"""
src_dict = copy.deepcopy(src_dict)
if isinstance(src_dict, dict):
src_dict['name'] = name
src = Model.create_from_dict(src_dict, self.skydir,
rescale=rescale)
else:
src = src_dict
src.set_name(name)
if isinstance(src, Source):
src.set_roi_direction(self.skydir)
src.set_roi_geom(self.geom)
self.load_source(src, build_index=build_index,
merge_sources=merge_sources)
return self.get_source_by_name(name)
|
Add a new source to the ROI model from a dictionary or an
existing source object.
Parameters
----------
name : str
src_dict : dict or `~fermipy.roi_model.Source`
Returns
-------
src : `~fermipy.roi_model.Source`
|
entailment
|
def load_sources(self, sources):
"""Delete all sources in the ROI and load the input source list."""
self.clear()
for s in sources:
if isinstance(s, dict):
s = Model.create_from_dict(s)
self.load_source(s, build_index=False)
self._build_src_index()
|
Delete all sources in the ROI and load the input source list.
|
entailment
|
def load_source(self, src, build_index=True, merge_sources=True,
**kwargs):
"""
Load a single source.
Parameters
----------
src : `~fermipy.roi_model.Source`
Source object that will be added to the ROI.
merge_sources : bool
When a source matches an existing source in the model
update that source with the properties of the new source.
build_index : bool
Re-make the source index after loading this source.
"""
src = copy.deepcopy(src)
name = src.name.replace(' ', '').lower()
min_sep = kwargs.get('min_separation', None)
if min_sep is not None:
sep = src.skydir.separation(self._src_skydir).deg
if len(sep) > 0 and np.min(sep) < min_sep:
return
match_srcs = self.match_source(src)
if len(match_srcs) == 1:
# self.logger.debug('Found matching source for %s : %s',
# src.name, match_srcs[0].name)
if merge_sources:
match_srcs[0].update_from_source(src)
else:
match_srcs[0].add_name(src.name)
self._add_source_alias(src.name.replace(' ', '').lower(),
match_srcs[0])
return
elif len(match_srcs) > 2:
raise Exception('Multiple sources with name %s' % name)
self._add_source_alias(src.name, src)
for name in src.names:
self._add_source_alias(name.replace(' ', '').lower(), src)
if isinstance(src, Source):
self._srcs.append(src)
else:
self._diffuse_srcs.append(src)
if build_index:
self._build_src_index()
|
Load a single source.
Parameters
----------
src : `~fermipy.roi_model.Source`
Source object that will be added to the ROI.
merge_sources : bool
When a source matches an existing source in the model
update that source with the properties of the new source.
build_index : bool
Re-make the source index after loading this source.
|
entailment
|
def match_source(self, src):
"""Look for source or sources in the model that match the
given source. Sources are matched by name and any association
columns defined in the assoc_xmatch_columns parameter.
"""
srcs = []
names = [src.name]
for col in self.config['assoc_xmatch_columns']:
if col in src.assoc and src.assoc[col]:
names += [src.assoc[col]]
for name in names:
name = name.replace(' ', '').lower()
if name not in self._src_dict:
continue
srcs += [s for s in self._src_dict[name] if s not in srcs]
return srcs
|
Look for source or sources in the model that match the
given source. Sources are matched by name and any association
columns defined in the assoc_xmatch_columns parameter.
|
entailment
|
def load(self, **kwargs):
"""Load both point source and diffuse components."""
coordsys = kwargs.get('coordsys', 'CEL')
extdir = kwargs.get('extdir', self.extdir)
srcname = kwargs.get('srcname', None)
self.clear()
self.load_diffuse_srcs()
for c in self.config['catalogs']:
if isinstance(c, catalog.Catalog):
self.load_existing_catalog(c)
continue
extname = os.path.splitext(c)[1]
if extname != '.xml':
self.load_fits_catalog(c, extdir=extdir, coordsys=coordsys,
srcname=srcname)
elif extname == '.xml':
self.load_xml(c, extdir=extdir, coordsys=coordsys)
else:
raise Exception('Unrecognized catalog file extension: %s' % c)
for c in self.config['sources']:
if 'name' not in c:
raise Exception(
'No name field in source dictionary:\n ' + str(c))
self.create_source(c['name'], c, build_index=False)
self._build_src_index()
|
Load both point source and diffuse components.
|
entailment
|
def create_from_roi_data(cls, datafile):
"""Create an ROI model."""
data = np.load(datafile).flat[0]
roi = cls()
roi.load_sources(data['sources'].values())
return roi
|
Create an ROI model.
|
entailment
|
def create(cls, selection, config, **kwargs):
"""Create an ROIModel instance."""
if selection['target'] is not None:
return cls.create_from_source(selection['target'],
config, **kwargs)
else:
target_skydir = wcs_utils.get_target_skydir(selection)
return cls.create_from_position(target_skydir, config, **kwargs)
|
Create an ROIModel instance.
|
entailment
|
def create_from_position(cls, skydir, config, **kwargs):
"""Create an ROIModel instance centered on a sky direction.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
Sky direction on which the ROI will be centered.
config : dict
Model configuration dictionary.
"""
coordsys = kwargs.pop('coordsys', 'CEL')
roi = cls(config, skydir=skydir, coordsys=coordsys, **kwargs)
return roi
|
Create an ROIModel instance centered on a sky direction.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
Sky direction on which the ROI will be centered.
config : dict
Model configuration dictionary.
|
entailment
|
def create_from_source(cls, name, config, **kwargs):
"""Create an ROI centered on the given source."""
coordsys = kwargs.pop('coordsys', 'CEL')
roi = cls(config, src_radius=None, src_roiwidth=None,
srcname=name, **kwargs)
src = roi.get_source_by_name(name)
return cls.create_from_position(src.skydir, config,
coordsys=coordsys, **kwargs)
|
Create an ROI centered on the given source.
|
entailment
|
def get_source_by_name(self, name):
"""Return a single source in the ROI with the given name. The
input name string can match any of the strings in the names
property of the source object. Case and whitespace are
ignored when matching name strings. If no sources are found
or multiple sources then an exception is thrown.
Parameters
----------
name : str
Name string.
Returns
-------
srcs : `~fermipy.roi_model.Model`
A source object.
"""
srcs = self.get_sources_by_name(name)
if len(srcs) == 1:
return srcs[0]
elif len(srcs) == 0:
raise Exception('No source matching name: ' + name)
elif len(srcs) > 1:
raise Exception('Multiple sources matching name: ' + name)
|
Return a single source in the ROI with the given name. The
input name string can match any of the strings in the names
property of the source object. Case and whitespace are
ignored when matching name strings. If no sources are found
or multiple sources then an exception is thrown.
Parameters
----------
name : str
Name string.
Returns
-------
srcs : `~fermipy.roi_model.Model`
A source object.
|
entailment
|
def get_sources_by_name(self, name):
"""Return a list of sources in the ROI matching the given
name. The input name string can match any of the strings in
the names property of the source object. Case and whitespace
are ignored when matching name strings.
Parameters
----------
name : str
Returns
-------
srcs : list
A list of `~fermipy.roi_model.Model` objects.
"""
index_name = name.replace(' ', '').lower()
if index_name in self._src_dict:
return list(self._src_dict[index_name])
else:
raise Exception('No source matching name: ' + name)
|
Return a list of sources in the ROI matching the given
name. The input name string can match any of the strings in
the names property of the source object. Case and whitespace
are ignored when matching name strings.
Parameters
----------
name : str
Returns
-------
srcs : list
A list of `~fermipy.roi_model.Model` objects.
|
entailment
|
def get_sources(self, skydir=None, distance=None, cuts=None,
minmax_ts=None, minmax_npred=None,
exclude=None, square=False, coordsys='CEL',
names=None):
"""Retrieve list of source objects satisfying the following
selections:
* Angular separation from ``skydir`` or ROI center (if
``skydir`` is None) less than ``distance``.
* Cuts on source properties defined in ``cuts`` list.
* TS and Npred in range specified by ``minmax_ts`` and ``minmax_npred``.
* Name matching a value in ``names``
Sources can be excluded from the selection by adding their
name to the ``exclude`` list.
Returns
-------
srcs : list
List of source objects.
"""
if skydir is None:
skydir = self.skydir
if exclude is None:
exclude = []
rsrc, srcs = self.get_sources_by_position(skydir,
distance,
square=square,
coordsys=coordsys)
o = []
for s in srcs + self.diffuse_sources:
if names and s.name not in names:
continue
if s.name in exclude:
continue
if not s.check_cuts(cuts):
continue
ts = s['ts']
npred = s['npred']
if not utils.apply_minmax_selection(ts, minmax_ts):
continue
if not utils.apply_minmax_selection(npred, minmax_npred):
continue
o.append(s)
return o
|
Retrieve list of source objects satisfying the following
selections:
* Angular separation from ``skydir`` or ROI center (if
``skydir`` is None) less than ``distance``.
* Cuts on source properties defined in ``cuts`` list.
* TS and Npred in range specified by ``minmax_ts`` and ``minmax_npred``.
* Name matching a value in ``names``
Sources can be excluded from the selection by adding their
name to the ``exclude`` list.
Returns
-------
srcs : list
List of source objects.
|
entailment
|
def get_sources_by_position(self, skydir, dist, min_dist=None,
square=False, coordsys='CEL'):
"""Retrieve sources within a certain angular distance of a sky
coordinate. This function supports two types of geometric
selections: circular (square=False) and square (square=True).
The circular selection finds all sources with a given angular
distance of the target position. The square selection finds
sources within an ROI-like region of size R x R where R = 2 x
dist.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
Sky direction with respect to which the selection will be applied.
dist : float
Maximum distance in degrees from the sky coordinate.
square : bool
Choose whether to apply a circular or square selection.
coordsys : str
Coordinate system to use when applying a selection with square=True.
"""
msk = get_skydir_distance_mask(self._src_skydir, skydir, dist,
min_dist=min_dist, square=square,
coordsys=coordsys)
radius = self._src_skydir.separation(skydir).deg
radius = radius[msk]
srcs = [self._srcs[i] for i in np.nonzero(msk)[0]]
isort = np.argsort(radius)
radius = radius[isort]
srcs = [srcs[i] for i in isort]
return radius, srcs
|
Retrieve sources within a certain angular distance of a sky
coordinate. This function supports two types of geometric
selections: circular (square=False) and square (square=True).
The circular selection finds all sources with a given angular
distance of the target position. The square selection finds
sources within an ROI-like region of size R x R where R = 2 x
dist.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
Sky direction with respect to which the selection will be applied.
dist : float
Maximum distance in degrees from the sky coordinate.
square : bool
Choose whether to apply a circular or square selection.
coordsys : str
Coordinate system to use when applying a selection with square=True.
|
entailment
|
def load_fits_catalog(self, name, **kwargs):
"""Load sources from a FITS catalog file.
Parameters
----------
name : str
Catalog name or path to a catalog FITS file.
"""
# EAC split this function to make it easier to load an existing catalog
cat = catalog.Catalog.create(name)
self.load_existing_catalog(cat, **kwargs)
|
Load sources from a FITS catalog file.
Parameters
----------
name : str
Catalog name or path to a catalog FITS file.
|
entailment
|
def load_existing_catalog(self, cat, **kwargs):
"""Load sources from an existing catalog object.
Parameters
----------
cat : `~fermipy.catalog.Catalog`
Catalog object.
"""
coordsys = kwargs.get('coordsys', 'CEL')
extdir = kwargs.get('extdir', self.extdir)
srcname = kwargs.get('srcname', None)
m0 = get_skydir_distance_mask(cat.skydir, self.skydir,
self.config['src_radius'])
m1 = get_skydir_distance_mask(cat.skydir, self.skydir,
self.config['src_radius_roi'],
square=True, coordsys=coordsys)
m = (m0 & m1)
if srcname is not None:
m &= utils.find_rows_by_string(cat.table, [srcname],
self.src_name_cols)
offset = self.skydir.separation(cat.skydir).deg
offset_cel = wcs_utils.sky_to_offset(self.skydir,
cat.radec[:, 0], cat.radec[:, 1],
'CEL')
offset_gal = wcs_utils.sky_to_offset(self.skydir,
cat.glonlat[
:, 0], cat.glonlat[:, 1],
'GAL')
for i, (row, radec) in enumerate(zip(cat.table[m],
cat.radec[m])):
catalog_dict = catalog.row_to_dict(row)
src_dict = {'catalog': catalog_dict}
src_dict['Source_Name'] = row['Source_Name']
src_dict['SpectrumType'] = row['SpectrumType']
if row['extended']:
src_dict['SourceType'] = 'DiffuseSource'
src_dict['SpatialType'] = str(row['Spatial_Function'])
src_dict['SpatialModel'] = str(row['Spatial_Function'])
search_dirs = []
if extdir is not None:
search_dirs += [extdir, os.path.join(extdir, 'Templates')]
search_dirs += [row['extdir'],
os.path.join(row['extdir'], 'Templates')]
if src_dict['SpatialType'] == 'SpatialMap':
src_dict['Spatial_Filename'] = utils.resolve_file_path(
row['Spatial_Filename'],
search_dirs=search_dirs)
else:
src_dict['SourceType'] = 'PointSource'
src_dict['SpatialType'] = 'SkyDirFunction'
src_dict['SpatialModel'] = 'PointSource'
src_dict['spectral_pars'] = spectral_pars_from_catalog(
catalog_dict)
src_dict['spatial_pars'] = spatial_pars_from_catalog(catalog_dict)
src = Source(src_dict['Source_Name'], src_dict, radec=radec)
src.data['offset'] = offset[m][i]
src.data['offset_ra'] = offset_cel[:, 0][m][i]
src.data['offset_dec'] = offset_cel[:, 1][m][i]
src.data['offset_glon'] = offset_gal[:, 0][m][i]
src.data['offset_glat'] = offset_gal[:, 1][m][i]
self.load_source(src, False,
merge_sources=self.config['merge_sources'])
self._build_src_index()
|
Load sources from an existing catalog object.
Parameters
----------
cat : `~fermipy.catalog.Catalog`
Catalog object.
|
entailment
|
def load_xml(self, xmlfile, **kwargs):
"""Load sources from an XML file."""
extdir = kwargs.get('extdir', self.extdir)
coordsys = kwargs.get('coordsys', 'CEL')
if not os.path.isfile(xmlfile):
xmlfile = os.path.join(fermipy.PACKAGE_DATA, 'catalogs', xmlfile)
root = ElementTree.ElementTree(file=xmlfile).getroot()
diffuse_srcs = []
srcs = []
ra, dec = [], []
for s in root.findall('source'):
src = Source.create_from_xml(s, extdir=extdir)
if src.diffuse:
diffuse_srcs += [src]
else:
srcs += [src]
ra += [src['RAJ2000']]
dec += [src['DEJ2000']]
src_skydir = SkyCoord(ra=np.array(ra) * u.deg,
dec=np.array(dec) * u.deg)
radec = np.vstack((src_skydir.ra.deg, src_skydir.dec.deg)).T
glonlat = np.vstack((src_skydir.galactic.l.deg,
src_skydir.galactic.b.deg)).T
offset = self.skydir.separation(src_skydir).deg
offset_cel = wcs_utils.sky_to_offset(self.skydir,
radec[:, 0], radec[:, 1], 'CEL')
offset_gal = wcs_utils.sky_to_offset(self.skydir,
glonlat[:, 0], glonlat[:, 1], 'GAL')
m0 = get_skydir_distance_mask(src_skydir, self.skydir,
self.config['src_radius'])
m1 = get_skydir_distance_mask(src_skydir, self.skydir,
self.config['src_radius_roi'],
square=True, coordsys=coordsys)
m = (m0 & m1)
srcs = np.array(srcs)[m]
for i, s in enumerate(srcs):
s.data['offset'] = offset[m][i]
s.data['offset_ra'] = offset_cel[:, 0][m][i]
s.data['offset_dec'] = offset_cel[:, 1][m][i]
s.data['offset_glon'] = offset_gal[:, 0][m][i]
s.data['offset_glat'] = offset_gal[:, 1][m][i]
self.load_source(s, False,
merge_sources=self.config['merge_sources'])
for i, s in enumerate(diffuse_srcs):
self.load_source(s, False,
merge_sources=self.config['merge_sources'])
self._build_src_index()
return srcs
|
Load sources from an XML file.
|
entailment
|
def _build_src_index(self):
"""Build an indices for fast lookup of a source given its name
or coordinates."""
self._srcs = sorted(self._srcs, key=lambda t: t['offset'])
nsrc = len(self._srcs)
radec = np.zeros((2, nsrc))
for i, src in enumerate(self._srcs):
radec[:, i] = src.radec
self._src_skydir = SkyCoord(ra=radec[0], dec=radec[1], unit=u.deg)
self._src_radius = self._src_skydir.separation(self.skydir)
|
Build an indices for fast lookup of a source given its name
or coordinates.
|
entailment
|
def write_xml(self, xmlfile, config=None):
"""Save the ROI model as an XML file."""
root = ElementTree.Element('source_library')
root.set('title', 'source_library')
for s in self._srcs:
s.write_xml(root)
if config is not None:
srcs = self.create_diffuse_srcs(config)
diffuse_srcs = {s.name: s for s in srcs}
for s in self._diffuse_srcs:
src = copy.deepcopy(diffuse_srcs.get(s.name, s))
src.update_spectral_pars(s.spectral_pars)
src.write_xml(root)
else:
for s in self._diffuse_srcs:
s.write_xml(root)
output_file = open(xmlfile, 'w')
output_file.write(utils.prettify_xml(root))
|
Save the ROI model as an XML file.
|
entailment
|
def create_table(self, names=None):
"""Create an astropy Table object with the contents of the ROI model.
"""
scan_shape = (1,)
for src in self._srcs:
scan_shape = max(scan_shape, src['dloglike_scan'].shape)
tab = create_source_table(scan_shape)
for s in self._srcs:
if names is not None and s.name not in names:
continue
s.add_to_table(tab)
return tab
|
Create an astropy Table object with the contents of the ROI model.
|
entailment
|
def write_fits(self, fitsfile):
"""Write the ROI model to a FITS file."""
tab = self.create_table()
hdu_data = fits.table_to_hdu(tab)
hdus = [fits.PrimaryHDU(), hdu_data]
fits_utils.write_hdus(hdus, fitsfile)
|
Write the ROI model to a FITS file.
|
entailment
|
def to_ds9(self, free='box',fixed='cross',frame='fk5',color='green',header=True):
"""Returns a list of ds9 region definitions
Parameters
----------
free: bool
one of the supported ds9 point symbols, used for free sources, see here: http://ds9.si.edu/doc/ref/region.html
fixed: bool
as free but for fixed sources
frame: str
typically fk5, more to be implemented
color: str
color used for symbols (only ds9 compatible colors)
header: bool
if True, will prepend a global header line.
Returns
-------
lines : list
list of regions (and header if requested)
"""
# todo: add support for extended sources?!
allowed_symbols = ['circle','box','diamond','cross','x','arrow','boxcircle']
# adding some checks.
assert free in allowed_symbols, "symbol %s not supported"%free
assert fixed in allowed_symbols, "symbol %s not supported"%fixed
lines = []
if header:
lines.append("global color=%s"%color)
for src in self.get_sources():
# self.get_sources will return both Source, but also IsoSource and MapCube, in which case the sources
# should be ignored (since they are by construction all-sky and have no corresponding ds9 region string)
if not isinstance(src, Source): continue
# otherwise get ra, dec
ra, dec = src.radec
line = "%s; point( %1.5f, %1.5f) # point=%s text={%s} color=%s"%(frame,ra, dec,
free if src.is_free else fixed,
src.name,
color)
lines.append(line)
return lines
|
Returns a list of ds9 region definitions
Parameters
----------
free: bool
one of the supported ds9 point symbols, used for free sources, see here: http://ds9.si.edu/doc/ref/region.html
fixed: bool
as free but for fixed sources
frame: str
typically fk5, more to be implemented
color: str
color used for symbols (only ds9 compatible colors)
header: bool
if True, will prepend a global header line.
Returns
-------
lines : list
list of regions (and header if requested)
|
entailment
|
def write_ds9region(self, region, *args, **kwargs):
"""Create a ds9 compatible region file from the ROI.
It calls the `to_ds9` method and write the result to the region file. Only the file name is required.
All other parameters will be forwarded to the `to_ds9` method, see the documentation of that method
for all accepted parameters and options.
Parameters
----------
region : str
name of the region file (string)
"""
lines = self.to_ds9(*args,**kwargs)
with open(region,'w') as fo:
fo.write("\n".join(lines))
|
Create a ds9 compatible region file from the ROI.
It calls the `to_ds9` method and write the result to the region file. Only the file name is required.
All other parameters will be forwarded to the `to_ds9` method, see the documentation of that method
for all accepted parameters and options.
Parameters
----------
region : str
name of the region file (string)
|
entailment
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.