code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
if recurse_level < 0: return stream.write("%sLink: %s\n" % (indent, self.linkname)) stream.write("%sN_jobs: %s\n" % (indent, len(self.get_jobs()))) self.sub_files.print_chain_summary(stream, indent)
def print_summary(self, stream=sys.stdout, indent="", recurse_level=2)
Print a summary of the activity done by this `Link`. Parameters ----------- stream : `file` Stream to print to, must have 'write' method. indent : str Indentation at start of line recurse_level : int Number of recursion levels to print
4.293427
4.932304
0.870471
# FIXME: We should add a pixel_size property in gammapy.maps # FIXME: We should make this into a MapGeom method xpix, ypix = skydir.to_pixel(geom.wcs, origin=0) deltax = np.array((xpix - geom.center_pix[0]) * geom._cdelt[0], ndmin=1) deltay = np.array((ypix - geom.center_pix[1]) * geom._cdelt[1], ndmin=1) deltax = np.abs(deltax) - 0.5 * geom.width[0] deltay = np.abs(deltay) - 0.5 * geom.width[1] m0 = (deltax < 0) & (deltay < 0) m1 = (deltax > 0) & (deltay < 0) m2 = (deltax < 0) & (deltay > 0) m3 = (deltax > 0) & (deltay > 0) mx = np.abs(deltax) <= np.abs(deltay) my = np.abs(deltay) < np.abs(deltax) delta = np.zeros(len(deltax)) delta[(m0 & mx) | (m3 & my) | m1] = deltax[(m0 & mx) | (m3 & my) | m1] delta[(m0 & my) | (m3 & mx) | m2] = deltay[(m0 & my) | (m3 & mx) | m2] return delta
def distance_to_edge(geom, skydir)
Return the angular distance from the given direction and the edge of the projection.
2.273669
2.293665
0.991282
w = WCS(naxis=naxis) if coordsys == 'CEL': w.wcs.ctype[0] = 'RA---%s' % (projection) w.wcs.ctype[1] = 'DEC--%s' % (projection) w.wcs.crval[0] = skydir.icrs.ra.deg w.wcs.crval[1] = skydir.icrs.dec.deg elif coordsys == 'GAL': w.wcs.ctype[0] = 'GLON-%s' % (projection) w.wcs.ctype[1] = 'GLAT-%s' % (projection) w.wcs.crval[0] = skydir.galactic.l.deg w.wcs.crval[1] = skydir.galactic.b.deg else: raise Exception('Unrecognized coordinate system.') try: w.wcs.crpix[0] = crpix[0] w.wcs.crpix[1] = crpix[1] except: w.wcs.crpix[0] = crpix w.wcs.crpix[1] = crpix try: w.wcs.cdelt[0] = cdelt[0] w.wcs.cdelt[1] = cdelt[1] except: w.wcs.cdelt[0] = -cdelt w.wcs.cdelt[1] = cdelt w = WCS(w.to_header()) if naxis == 3 and energies is not None: w.wcs.crpix[2] = 1 w.wcs.crval[2] = energies[0] w.wcs.cdelt[2] = energies[1] - energies[0] w.wcs.ctype[2] = 'Energy' w.wcs.cunit[2] = 'MeV' return w
def create_wcs(skydir, coordsys='CEL', projection='AIT', cdelt=1.0, crpix=1., naxis=2, energies=None)
Create a WCS object. Parameters ---------- skydir : `~astropy.coordinates.SkyCoord` Sky coordinate of the WCS reference point. coordsys : str projection : str cdelt : float or (float,float) In the first case the same value is used for x and y axes crpix : float or (float,float) In the first case the same value is used for x and y axes naxis : {2, 3} Number of dimensions of the projection. energies : array-like Array of energies that defines the third dimension if naxis=3.
1.392171
1.429448
0.973922
if wcs.naxis != 2: raise Exception( 'wcs_add_energy_axis, input WCS naxis != 2 %i' % wcs.naxis) w = WCS(naxis=3) w.wcs.crpix[0] = wcs.wcs.crpix[0] w.wcs.crpix[1] = wcs.wcs.crpix[1] w.wcs.ctype[0] = wcs.wcs.ctype[0] w.wcs.ctype[1] = wcs.wcs.ctype[1] w.wcs.crval[0] = wcs.wcs.crval[0] w.wcs.crval[1] = wcs.wcs.crval[1] w.wcs.cdelt[0] = wcs.wcs.cdelt[0] w.wcs.cdelt[1] = wcs.wcs.cdelt[1] w = WCS(w.to_header()) w.wcs.crpix[2] = 1 w.wcs.crval[2] = energies[0] w.wcs.cdelt[2] = energies[1] - energies[0] w.wcs.ctype[2] = 'Energy' return w
def wcs_add_energy_axis(wcs, energies)
Copy a WCS object, and add on the energy axis. Parameters ---------- wcs : `~astropy.wcs.WCS` WCS energies : array-like Array of energies.
1.526648
1.588339
0.96116
offset_lon = np.array(offset_lon, ndmin=1) offset_lat = np.array(offset_lat, ndmin=1) w = create_wcs(skydir, coordsys, projection) pixcrd = np.vstack((offset_lon, offset_lat)).T return w.wcs_pix2world(pixcrd, 0)
def offset_to_sky(skydir, offset_lon, offset_lat, coordsys='CEL', projection='AIT')
Convert a cartesian offset (X,Y) in the given projection into a pair of spherical coordinates.
2.158421
2.428865
0.888654
w = create_wcs(skydir, coordsys, projection) skycrd = np.vstack((lon, lat)).T if len(skycrd) == 0: return skycrd return w.wcs_world2pix(skycrd, 0)
def sky_to_offset(skydir, lon, lat, coordsys='CEL', projection='AIT')
Convert sky coordinates to a projected offset. This function is the inverse of offset_to_sky.
3.227073
3.530896
0.913953
offset_lon = np.array(offset_lon, ndmin=1) offset_lat = np.array(offset_lat, ndmin=1) w = create_wcs(skydir, coordsys, projection) return SkyCoord.from_pixel(offset_lon, offset_lat, w, 0)
def offset_to_skydir(skydir, offset_lon, offset_lat, coordsys='CEL', projection='AIT')
Convert a cartesian offset (X,Y) in the given projection into a SkyCoord.
2.551423
2.749815
0.927852
if len(skydir.shape) > 0 and len(skydir) == 0: return [np.empty(0), np.empty(0)] return skydir.to_pixel(wcs, origin=0)
def skydir_to_pix(skydir, wcs)
Convert skydir object to pixel coordinates. Gracefully handles 0-d coordinate arrays. Parameters ---------- skydir : `~astropy.coordinates.SkyCoord` wcs : `~astropy.wcs.WCS` Returns ------- xp, yp : `numpy.ndarray` The pixel coordinates
4.248734
4.563143
0.931098
xpix = np.array(xpix) ypix = np.array(ypix) if xpix.ndim > 0 and len(xpix) == 0: return SkyCoord(np.empty(0), np.empty(0), unit='deg', frame='icrs') return SkyCoord.from_pixel(xpix, ypix, wcs, origin=0).transform_to('icrs')
def pix_to_skydir(xpix, ypix, wcs)
Convert pixel coordinates to a skydir object. Gracefully handles 0-d coordinate arrays. Always returns a celestial coordinate. Parameters ---------- xpix : `numpy.ndarray` ypix : `numpy.ndarray` wcs : `~astropy.wcs.WCS`
2.676033
2.970307
0.900928
npix = npix[::-1] x = np.linspace(-(npix[0]) / 2., (npix[0]) / 2., npix[0] + 1) * np.abs(w.wcs.cdelt[0]) y = np.linspace(-(npix[1]) / 2., (npix[1]) / 2., npix[1] + 1) * np.abs(w.wcs.cdelt[1]) if w.wcs.naxis == 2: return x, y cdelt2 = np.log10((w.wcs.cdelt[2] + w.wcs.crval[2]) / w.wcs.crval[2]) z = (np.linspace(0, npix[2], npix[2] + 1)) * cdelt2 z += np.log10(w.wcs.crval[2]) return x, y, z
def wcs_to_axes(w, npix)
Generate a sequence of bin edge vectors corresponding to the axes of a WCS object.
2.115718
2.117183
0.999308
if w.naxis == 2: y, x = wcs_to_axes(w, shape) elif w.naxis == 3: z, y, x = wcs_to_axes(w, shape) else: raise Exception("Wrong number of WCS axes %i" % w.naxis) x = 0.5 * (x[1:] + x[:-1]) y = 0.5 * (y[1:] + y[:-1]) if w.naxis == 2: x = np.ravel(np.ones(shape) * x[:, np.newaxis]) y = np.ravel(np.ones(shape) * y[np.newaxis, :]) return np.vstack((x, y)) z = 0.5 * (z[1:] + z[:-1]) x = np.ravel(np.ones(shape) * x[:, np.newaxis, np.newaxis]) y = np.ravel(np.ones(shape) * y[np.newaxis, :, np.newaxis]) z = np.ravel(np.ones(shape) * z[np.newaxis, np.newaxis, :]) return np.vstack((x, y, z))
def wcs_to_coords(w, shape)
Generate an N x D list of pixel center coordinates where N is the number of pixels and D is the dimensionality of the map.
1.593047
1.612385
0.988007
wcs0 = create_wcs(skydir, coordsys='CEL') wcs1 = create_wcs(skydir, coordsys='GAL') x, y = SkyCoord.to_pixel(SkyCoord.from_pixel(1.0, 0.0, wcs0), wcs1) return np.arctan2(y, x)
def get_cel_to_gal_angle(skydir)
Calculate the rotation angle in radians between the longitude axes of a local projection in celestial and galactic coordinates. Parameters ---------- skydir : `~astropy.coordinates.SkyCoord` Direction of projection center. Returns ------- angle : float Rotation angle in radians.
2.967042
3.60318
0.823451
h = fits.open(os.path.expandvars(infile)) npix = 200 shape = list(h[maphdu].data.shape) shape[1] = 200 shape[2] = 200 wcs = WCS(h[maphdu].header) skywcs = WCS(h[maphdu].header, naxis=[1, 2]) coordsys = get_coordsys(skywcs) region_wcs = wcs.deepcopy() if coordsys == 'CEL': region_wcs.wcs.crval[0] = skydir.ra.deg region_wcs.wcs.crval[1] = skydir.dec.deg elif coordsys == 'GAL': region_wcs.wcs.crval[0] = skydir.galactic.l.deg region_wcs.wcs.crval[1] = skydir.galactic.b.deg else: raise Exception('Unrecognized coordinate system.') region_wcs.wcs.crpix[0] = npix // 2 + 0.5 region_wcs.wcs.crpix[1] = npix // 2 + 0.5 from reproject import reproject_interp data, footprint = reproject_interp(h, region_wcs.to_header(), hdu_in=maphdu, shape_out=shape) hdu_image = fits.PrimaryHDU(data, header=region_wcs.to_header()) hdulist = fits.HDUList([hdu_image, h['ENERGIES']]) hdulist.writeto(outfile, clobber=True)
def extract_mapcube_region(infile, skydir, outfile, maphdu=0)
Extract a region out of an all-sky mapcube file. Parameters ---------- infile : str Path to mapcube file. skydir : `~astropy.coordinates.SkyCoord`
2.007223
2.124308
0.944883
xpix, ypix = skydir.to_pixel(self.wcs, origin=0) deltax = np.array((xpix - self._pix_center[0]) * self._pix_size[0], ndmin=1) deltay = np.array((ypix - self._pix_center[1]) * self._pix_size[1], ndmin=1) deltax = np.abs(deltax) - 0.5 * self._width[0] deltay = np.abs(deltay) - 0.5 * self._width[1] m0 = (deltax < 0) & (deltay < 0) m1 = (deltax > 0) & (deltay < 0) m2 = (deltax < 0) & (deltay > 0) m3 = (deltax > 0) & (deltay > 0) mx = np.abs(deltax) <= np.abs(deltay) my = np.abs(deltay) < np.abs(deltax) delta = np.zeros(len(deltax)) delta[(m0 & mx) | (m3 & my) | m1] = deltax[(m0 & mx) | (m3 & my) | m1] delta[(m0 & my) | (m3 & mx) | m2] = deltay[(m0 & my) | (m3 & mx) | m2] return delta
def distance_to_edge(self, skydir)
Return the angular distance from the given direction and the edge of the projection.
1.83262
1.828519
1.002243
fin = open(arg) lines_in = fin.readlines() fin.close() lines_out = [] for line in lines_in: line = line.strip() if not line or line[0] == '#': continue lines_out.append(line) return lines_out
def readlines(arg)
Read lines from a file into a list. Removes whitespace and lines that start with '#'
1.944148
1.97807
0.982851
lines = [] if isinstance(arglist, list): for arg in arglist: if os.path.splitext(arg)[1] == '.lst': lines += readlines(arg) else: lines.append(arg) elif is_null(arglist): pass else: if os.path.splitext(arglist)[1] == '.lst': lines += readlines(arglist) else: lines.append(arglist) return lines
def create_inputlist(arglist)
Read lines from a file and makes a list of file names. Removes whitespace and lines that start with '#' Recursively read all files with the extension '.lst'
2.195077
1.980842
1.108154
evclass_shape = [16, 40, 10] evtype_shape = [16, 16, 40, 10] evclass_psf_shape = [16, 40, 10, 100] evtype_psf_shape = [16, 16, 40, 10, 100] self._hists_eff = dict() self._hists = dict(evclass_on=np.zeros(evclass_shape), evclass_off=np.zeros(evclass_shape), evclass_alpha=np.zeros([16, 40, 1]), evtype_on=np.zeros(evtype_shape), evtype_off=np.zeros(evtype_shape), evtype_alpha=np.zeros([16, 1, 40, 1]), evclass_psf_on=np.zeros(evclass_psf_shape), evclass_psf_off=np.zeros(evclass_psf_shape), evtype_psf_on=np.zeros(evtype_psf_shape), evtype_psf_off=np.zeros(evtype_psf_shape), )
def init(self)
Initialize histograms.
2.004718
1.892876
1.059086
nevt = len(evclass) ebin = utils.val_to_bin(self._energy_bins, energy) scale = self._psf_scale[ebin] vals = [energy, ctheta] bins = [self._energy_bins, self._ctheta_bins] if fill_sep: vals += [xsep] bins += [self._xsep_bins] if fill_evtype: loopv = [self._evclass_bins[:-1], self._evtype_bins[:-1]] shape = [16, 16] + [len(b) - 1 for b in bins] else: loopv = [self._evclass_bins[:-1]] shape = [16] + [len(b) - 1 for b in bins] h = np.zeros(shape) for t in itertools.product(*loopv): m = (evclass[:, int(t[0])] == True) if fill_evtype: m &= (evtype[:, int(t[1])] == True) if not np.sum(m): continue z = np.vstack(vals) z = z[:, m] if fill_evtype: h[int(t[0]), int(t[1])] += np.histogramdd(z.T, bins=bins)[0] else: h[int(t[0])] += np.histogramdd(z.T, bins=bins)[0] return h
def create_hist(self, evclass, evtype, xsep, energy, ctheta, fill_sep=False, fill_evtype=False)
Load into a histogram.
2.78412
2.785213
0.999608
hists = self.hists hists_out = self._hists_eff cth_axis_idx = dict(evclass=2, evtype=3) for k in ['evclass', 'evtype']: if k == 'evclass': ns0 = hists['evclass_on'][4][None, ...] nb0 = hists['evclass_off'][4][None, ...] else: ns0 = hists['evclass_on'][4][None, None, ...] nb0 = hists['evclass_off'][4][None, None, ...] eff, eff_var = calc_eff(ns0, nb0, hists['%s_on' % k], hists['%s_off' % k], hists['%s_alpha' % k]) hists_out['%s_cth_eff' % k] = eff hists_out['%s_cth_eff_var' % k] = eff_var eff, eff_var = calc_eff(ns0, nb0, hists['%s_on' % k], hists['%s_off' % k], hists['%s_alpha' % k], sum_axes=[cth_axis_idx[k]]) hists_out['%s_eff' % k] = np.squeeze(eff) hists_out['%s_eff_var' % k] = np.squeeze(eff_var)
def calc_eff(self)
Calculate the efficiency.
2.594971
2.553085
1.016406
hists = self.hists hists_out = self._hists_eff quantiles = [0.34, 0.68, 0.90, 0.95] cth_axis_idx = dict(evclass=2, evtype=3) for k in ['evclass']: # ,'evtype']: print(k) non = hists['%s_psf_on' % k] noff = hists['%s_psf_off' % k] alpha = hists['%s_alpha' % k][..., None] if k == 'evclass': sep = self._sep_bins[None, :, None, 1:] else: sep = self._sep_bins[None, None, :, None, 1:] qval, qerr = calc_quantiles(sep, non, noff, alpha, quantiles) for i, q in enumerate(quantiles): hists_out['%s_cth_q%2i' % (k, q * 100)] = qval[i] hists_out['%s_cth_q%2i_err' % (k, q * 100)] = qerr[i] non = np.sum(non, axis=cth_axis_idx[k]) noff = np.sum(noff, axis=cth_axis_idx[k]) alpha = np.squeeze(alpha, axis=cth_axis_idx[k]) sep = np.squeeze(sep, axis=cth_axis_idx[k]) qval, qerr = calc_quantiles(sep, non, noff, alpha, quantiles) for i, q in enumerate(quantiles): hists_out['%s_q%2i' % (k, q * 100)] = qval[i] hists_out['%s_q%2i_err' % (k, q * 100)] = qerr[i]
def calc_containment(self)
Calculate PSF containment.
2.477345
2.454061
1.009488
o = {} for key, item in schema.items(): if isinstance(item, dict): o[key] = create_default_config(item) elif isinstance(item, tuple): value, comment, item_type = item if isinstance(item_type, tuple): item_type = item_type[0] if value is None and (item_type == list or item_type == dict): value = item_type() if key in o: raise KeyError('Duplicate key in schema.') o[key] = value else: raise TypeError('Unrecognized type for schema dict element: %s %s' % (key, type(item))) return o
def create_default_config(schema)
Create a configuration dictionary from a schema dictionary. The schema defines the valid configuration keys and their default values. Each element of ``schema`` should be a tuple/list containing (default value,docstring,type) or a dict containing a nested schema.
3.207499
3.093191
1.036955
cfgout = copy.deepcopy(cfg) for k, v in schema.items(): if k not in cfgin: continue if isinstance(v, dict): cfgout.setdefault(k, {}) cfgout[k] = update_from_schema(cfg[k], cfgin[k], v) elif v[2] is dict: cfgout[k] = utils.merge_dict(cfg[k], cfgin[k], add_new_keys=True) else: cfgout[k] = cfgin[k] return cfgout
def update_from_schema(cfg, cfgin, schema)
Update configuration dictionary ``cfg`` with the contents of ``cfgin`` using the ``schema`` dictionary to determine the valid input keys. Parameters ---------- cfg : dict Configuration dictionary to be updated. cfgin : dict New configuration dictionary that will be merged with ``cfg``. schema : dict Configuration schema defining the valid configuration keys and their types. Returns ------- cfgout : dict
2.381792
2.450272
0.972052
utils.write_yaml(self.config, outfile, default_flow_style=False)
def write_config(self, outfile)
Write the configuration dictionary to an output file.
4.54479
4.666806
0.973855
# populate config dictionary with an initial set of values # config_logging = ConfigManager.load('logging.yaml') config = {} if config['fileio']['outdir'] is None: config['fileio']['outdir'] = os.path.abspath( os.path.dirname(configfile)) user_config = cls.load(configfile) config = utils.merge_dict(config, user_config, True) config['fileio']['outdir'] = os.path.abspath( config['fileio']['outdir']) return config
def create(cls, configfile)
Create a configuration dictionary from a yaml config file. This function will first populate the dictionary with defaults taken from pre-defined configuration files. The configuration dictionary is then updated with the user-defined configuration file. Any settings defined by the user will take precedence over the default settings.
4.637837
4.42007
1.049268
if hdu is None: hdu = fits.PrimaryHDU(header=hdu_in.header) else: hdu = hdu_in hdu.header.remove('FILENAME') return hdu
def update_null_primary(hdu_in, hdu=None)
'Update' a null primary HDU This actually just checks hdu exists and creates it from hdu_in if it does not.
2.808754
2.633865
1.0664
if hdu is None: hdu = fits.PrimaryHDU(data=hdu_in.data, header=hdu_in.header) else: hdu.data += hdu_in.data return hdu
def update_primary(hdu_in, hdu=None)
'Update' a primary HDU This checks hdu exists and creates it from hdu_in if it does not. If hdu does exist, this adds the data in hdu_in to hdu
1.925504
1.990896
0.967154
if hdu is None: hdu = fits.ImageHDU( data=hdu_in.data, header=hdu_in.header, name=hdu_in.name) else: hdu.data += hdu_in.data return hdu
def update_image(hdu_in, hdu=None)
'Update' an image HDU This checks hdu exists and creates it from hdu_in if it does not. If hdu does exist, this adds the data in hdu_in to hdu
2.091765
2.060793
1.015029
if hdu is None: hdu = fits.BinTableHDU( data=hdu_in.data, header=hdu_in.header, name=hdu_in.name) else: for col in ['CHANNEL', 'E_MIN', 'E_MAX']: if (hdu.data[col] != hdu_in.data[col]).any(): raise ValueError("Energy bounds do not match : %s %s" % (hdu.data[col], hdu_in.data[col])) return hdu
def update_ebounds(hdu_in, hdu=None)
'Update' the EBOUNDS HDU This checks hdu exists and creates it from hdu_in if it does not. If hdu does exist, this raises an exception if it doesn not match hdu_in
2.439933
2.386353
1.022453
max_row = nrows.cumsum() min_row = max_row - nrows out_hdu = fits.BinTableHDU.from_columns( first.columns, header=first.header, nrows=nrows.sum()) for (imin, imax, data_in) in zip(min_row, max_row, datalist_in): for col in first.columns: out_hdu.data[col.name][imin:imax] = data_in[col.name] return out_hdu
def merge_all_gti_data(datalist_in, nrows, first)
Merge together all the GTI data Parameters ------- datalist_in : list of `astropy.io.fits.BinTableHDU` data The GTI data that is being merged nrows : `~numpy.ndarray` of ints Array with the number of nrows for each object in datalist_in first : `astropy.io.fits.BinTableHDU` BinTableHDU to use as a template Returns ------- out_hdu : `astropy.io.fits.BinTableHDU` BinTableHDU with the merge GTIs
2.659914
2.649125
1.004072
data = hdu_in.data exposure = hdu_in.header['EXPOSURE'] tstop = hdu_in.header['TSTOP'] return (data, exposure, tstop)
def extract_gti_data(hdu_in)
Extract some GTI related data Parameters ------- hdu_in : `astropy.io.fits.BinTableHDU` The GTI data Returns ------- data : `astropy.io.fits.BinTableHDU` data exposure : float Exposure value taken from FITS header tstop : float TSTOP value taken from FITS header
2.497994
2.23074
1.119805
if map_out is None: in_hpx = map_in.hpx out_hpx = HPX.create_hpx(in_hpx.nside, in_hpx.nest, in_hpx.coordsys, None, in_hpx.ebins, None, in_hpx.conv, None) data_out = map_in.expanded_counts_map() print(data_out.shape, data_out.sum()) map_out = HpxMap(data_out, out_hpx) else: map_out.data += map_in.expanded_counts_map() return map_out
def update_hpx_skymap_allsky(map_in, map_out)
'Update' a HEALPix skymap This checks map_out exists and creates it from map_in if it does not. If map_out does exist, this adds the data in map_in to map_out
3.187043
3.352259
0.950715
out_prim = None out_ebounds = None datalist_gti = [] exposure_sum = 0. nfiles = len(filelist) ngti = np.zeros(nfiles, int) for i, filename in enumerate(filelist): fin = fits.open(filename) sys.stdout.write('.') sys.stdout.flush() if i == 0: out_prim = update_primary(fin[0], out_prim) out_ebounds = update_ebounds(fin["EBOUNDS"], out_ebounds) (gti_data, exposure, tstop) = extract_gti_data(fin["GTI"]) datalist_gti.append(gti_data) exposure_sum += exposure ngti[i] = len(gti_data) if i == 0: first = fin elif i == nfiles - 1: date_end = fin[0].header['DATE-END'] else: fin.close() out_gti = merge_all_gti_data(datalist_gti, ngti, first['GTI']) out_gti.header['EXPOSURE'] = exposure_sum out_gti.header['TSTOP'] = tstop hdulist = [out_prim, out_ebounds, out_gti] for hdu in hdulist: hdu.header['DATE-END'] = date_end out_prim.update_header() sys.stdout.write("!\n") return fits.HDUList(hdulist)
def merge_wcs_counts_cubes(filelist)
Merge all the files in filelist, assuming that they WCS counts cubes
2.891915
2.896948
0.998263
out_prim = None out_skymap = None out_ebounds = None datalist_gti = [] exposure_sum = 0. nfiles = len(filelist) ngti = np.zeros(nfiles, int) out_name = None for i, filename in enumerate(filelist): fin = fits.open(filename) sys.stdout.write('.') sys.stdout.flush() if i == 0: out_prim = update_null_primary(fin[0], out_prim) out_name = fin[1].name map_in = HpxMap.create_from_hdulist(fin) out_skymap = update_hpx_skymap_allsky(map_in, out_skymap) if i == 0: try: out_ebounds = update_ebounds(fin["EBOUNDS"], out_ebounds) except KeyError: out_ebounds = update_energies(fin["ENERGIES"], out_ebounds) try: (gti_data, exposure, tstop) = extract_gti_data(fin["GTI"]) datalist_gti.append(gti_data) exposure_sum += exposure ngti[i] = len(gti_data) except KeyError: pass if i == 0: first = fin elif i == nfiles - 1: try: date_end = fin[0].header['DATE-END'] except KeyError: date_end = None else: fin.close() out_skymap_hdu = out_skymap.create_image_hdu("SKYMAP") hdulist = [out_prim, out_skymap_hdu, out_ebounds] if len(datalist_gti) > 0: out_gti = merge_all_gti_data(datalist_gti, ngti, first['GTI']) out_gti.header['EXPOSURE'] = exposure_sum out_gti.header['TSTOP'] = tstop hdulist.append(out_gti) for hdu in hdulist: if date_end: hdu.header['DATE-END'] = date_end out_prim.update_header() sys.stdout.write("!\n") return fits.HDUList(hdulist)
def merge_hpx_counts_cubes(filelist)
Merge all the files in filelist, assuming that they HEALPix counts cubes
2.813784
2.814857
0.999619
args = self._parser.parse_args(argv) obs = BinnedAnalysis.BinnedObs(irfs=args.irfs, expCube=args.expcube, srcMaps=args.cmap, binnedExpMap=args.bexpmap) like = BinnedAnalysis.BinnedAnalysis(obs, optimizer='MINUIT', srcModel=GtSrcmapsCatalog.NULL_MODEL, wmap=None) source_factory = pyLike.SourceFactory(obs.observation) source_factory.readXml(args.srcmdl, BinnedAnalysis._funcFactory, False, True, True) srcNames = pyLike.StringVector() source_factory.fetchSrcNames(srcNames) min_idx = args.srcmin max_idx = args.srcmax if max_idx < 0: max_idx = srcNames.size() for i in xrange(min_idx, max_idx): if i == min_idx: like.logLike.saveSourceMaps(args.outfile) pyLike.CountsMapBase.copyAndUpdateDssKeywords(args.cmap, args.outfile, None, args.irfs) srcName = srcNames[i] source = source_factory.releaseSource(srcName) like.logLike.addSource(source, False) like.logLike.saveSourceMap_partial(args.outfile, source) like.logLike.deleteSource(srcName) if args.gzip: os.system("gzip -9 %s" % args.outfile)
def run_analysis(self, argv)
Run this analysis
6.749307
6.725807
1.003494
for val in catalog_info_dict.values(): val.roi_model.write_xml(val.srcmdl_name) for val in comp_info_dict.values(): for val2 in val.values(): val2.roi_model.write_xml(val2.srcmdl_name)
def _make_xml_files(catalog_info_dict, comp_info_dict)
Make all the xml file for individual components
3.540583
3.401501
1.040888
job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) if self._comp_dict is None or self._comp_dict_file != args['library']: self._comp_dict_file = args['library'] self._comp_dict = make_catalog_comp_dict(sources=self._comp_dict_file, basedir=NAME_FACTORY.base_dict['basedir']) else: print ("Using cached catalog dict from %s" % args['library']) catalog_info_dict = self._comp_dict['catalog_info_dict'] comp_info_dict = self._comp_dict['comp_info_dict'] n_src_per_job = args['nsrc'] if args['make_xml']: SrcmapsCatalog_SG._make_xml_files(catalog_info_dict, comp_info_dict) for catalog_name, catalog_info in catalog_info_dict.items(): n_cat_src = len(catalog_info.catalog.table) n_job = int(math.ceil(float(n_cat_src) / n_src_per_job)) for comp in components: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') name_keys = dict(zcut=zcut, sourcekey=catalog_name, ebin=comp.ebin_name, psftype=comp.evtype_name, coordsys=comp.coordsys, irf_ver=NAME_FACTORY.irf_ver(), mktime='none', fullpath=True) for i_job in range(n_job): full_key = "%s_%02i" % (key, i_job) srcmin = i_job * n_src_per_job srcmax = min(srcmin + n_src_per_job, n_cat_src) outfile = NAME_FACTORY.srcmaps( **name_keys).replace('.fits', "_%02i.fits" % (i_job)) logfile = make_nfs_path(outfile.replace('.fits', '.log')) job_configs[full_key] = dict(cmap=NAME_FACTORY.ccube(**name_keys), expcube=NAME_FACTORY.ltcube(**name_keys), irfs=NAME_FACTORY.irfs(**name_keys), bexpmap=NAME_FACTORY.bexpcube(**name_keys), outfile=outfile, logfile=logfile, srcmdl=catalog_info.srcmdl_name, evtype=comp.evtype, srcmin=srcmin, srcmax=srcmax) return job_configs
def build_job_configs(self, args)
Hook to build job configurations
4.052053
4.066208
0.996519
args = self._parser.parse_args(argv) exttype = splitext(args.infile)[-1] if exttype in ['.fits', '.npy']: castro_data = CastroData.create_from_sedfile(args.infile) elif exttype in ['.yaml']: castro_data = CastroData.create_from_yamlfile(args.infile) else: raise ValueError("Can not read file type %s for SED" % extype) ylims = [1e-8, 1e-5] plot = plotCastro(castro_data, ylims) if args.outfile: plot[0].savefig(args.outfile)
def run_analysis(self, argv)
Run this analysis
4.260242
4.275742
0.996375
job_configs = {} ttype = args['ttype'] (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args) if targets_yaml is None: return job_configs targets = load_yaml(targets_yaml) for target_name, target_list in targets.items(): for targ_prof in target_list: name_keys = dict(target_type=ttype, target_name=target_name, profile=targ_prof, fullpath=True) targ_key = "%s_%s" % (target_name, targ_prof) input_path = NAME_FACTORY.sedfile(**name_keys) output_path = input_path.replace('.fits', '.png') logfile = make_nfs_path(input_path.replace('.fits', '.log')) job_config = dict(infile=input_path, outfile=output_path, logfile=logfile) job_configs[targ_key] = job_config return job_configs
def build_job_configs(self, args)
Hook to build job configurations
4.019923
4.000948
1.004742
timer = Timer.create(start=True) name = self.roi.get_source_by_name(name).name # Create schema for method configuration schema = ConfigSchema(self.defaults['sed'], optimizer=self.defaults['optimizer']) schema.add_option('prefix', '') schema.add_option('outfile', None, '', str) schema.add_option('loge_bins', None, '', list) config = utils.create_dict(self.config['sed'], optimizer=self.config['optimizer']) config = schema.create_config(config, **kwargs) self.logger.info('Computing SED for %s' % name) o = self._make_sed(name, **config) self.logger.info('Finished SED') outfile = config.get('outfile', None) if outfile is None: outfile = utils.format_filename(self.workdir, 'sed', prefix=[config['prefix'], name.lower().replace(' ', '_')]) else: outfile = os.path.join(self.workdir, os.path.splitext(outfile)[0]) o['file'] = None if config['write_fits']: o['file'] = os.path.basename(outfile) + '.fits' self._make_sed_fits(o, outfile + '.fits', **config) if config['write_npy']: np.save(outfile + '.npy', o) if config['make_plots']: self._plotter.make_sed_plots(o, **config) self.logger.info('Execution time: %.2f s', timer.elapsed_time) return o
def sed(self, name, **kwargs)
Generate a spectral energy distribution (SED) for a source. This function will fit the normalization of the source in each energy bin. By default the SED will be generated with the analysis energy bins but a custom binning can be defined with the ``loge_bins`` parameter. Parameters ---------- name : str Source name. prefix : str Optional string that will be prepended to all output files (FITS and rendered images). loge_bins : `~numpy.ndarray` Sequence of energies in log10(E/MeV) defining the edges of the energy bins. If this argument is None then the analysis energy bins will be used. The energies in this sequence must align with the bin edges of the underyling analysis instance. {options} optimizer : dict Dictionary that overrides the default optimizer settings. Returns ------- sed : dict Dictionary containing output of the SED analysis.
4.078626
3.758253
1.085245
args = self._parser.parse_args(argv) obs = BinnedAnalysis.BinnedObs(irfs=args.irfs, expCube=args.expcube, srcMaps=args.cmap, binnedExpMap=args.bexpmap) if args.no_psf: performConvolution = False else: performConvolution = True config = BinnedAnalysis.BinnedConfig(performConvolution=performConvolution) like = BinnedAnalysis.BinnedAnalysis(obs, optimizer='MINUIT', srcModel=GtSrcmapsDiffuse.NULL_MODEL, wmap=None, config=config) source_factory = pyLike.SourceFactory(obs.observation) source_factory.readXml(args.srcmdl, BinnedAnalysis._funcFactory, False, True, True) source = source_factory.releaseSource(args.source) try: diffuse_source = pyLike.DiffuseSource.cast(source) except TypeError: diffuse_source = None if diffuse_source is not None: try: diffuse_source.mapBaseObject().projmap().setExtrapolation(False) except RuntimeError: pass like.logLike.saveSourceMap_partial(args.outfile, source, args.kmin, args.kmax) if args.gzip: os.system("gzip -9 %s" % args.outfile)
def run_analysis(self, argv)
Run this analysis
7.607407
7.58506
1.002946
root = ElementTree.Element('source_library') root.set('title', 'source_library') for src in srcs: src.write_xml(root) output_file = open(xmlfile, 'w') output_file.write(utils.prettify_xml(root))
def _write_xml(xmlfile, srcs)
Save the ROI model as an XML
3.511706
3.674078
0.955806
if comp_dict.comp_key is None: fullkey = sourcekey else: fullkey = "%s_%s" % (sourcekey, comp_dict.comp_key) srcdict = make_sources(fullkey, comp_dict) if comp_dict.model_type == 'IsoSource': print("Writing xml for %s to %s: %s %s" % (fullkey, comp_dict.srcmdl_name, comp_dict.model_type, comp_dict.Spectral_Filename)) elif comp_dict.model_type == 'MapCubeSource': print("Writing xml for %s to %s: %s %s" % (fullkey, comp_dict.srcmdl_name, comp_dict.model_type, comp_dict.Spatial_Filename)) SrcmapsDiffuse_SG._write_xml(comp_dict.srcmdl_name, srcdict.values())
def _handle_component(sourcekey, comp_dict)
Make the source objects and write the xml for a component
3.962252
3.640046
1.088517
try: os.makedirs('srcmdls') except OSError: pass for sourcekey in sorted(diffuse_comp_info_dict.keys()): comp_info = diffuse_comp_info_dict[sourcekey] if comp_info.components is None: SrcmapsDiffuse_SG._handle_component(sourcekey, comp_info) else: for sub_comp_info in comp_info.components.values(): SrcmapsDiffuse_SG._handle_component(sourcekey, sub_comp_info)
def _make_xml_files(diffuse_comp_info_dict)
Make all the xml file for individual components
4.08764
3.94386
1.036457
job_configs = {} components = Component.build_from_yamlfile(args['comp']) NAME_FACTORY.update_base_dict(args['data']) ret_dict = make_diffuse_comp_info_dict(components=components, library=args['library'], basedir='.') diffuse_comp_info_dict = ret_dict['comp_info_dict'] if args['make_xml']: SrcmapsDiffuse_SG._make_xml_files(diffuse_comp_info_dict) for diffuse_comp_info_key in sorted(diffuse_comp_info_dict.keys()): diffuse_comp_info_value = diffuse_comp_info_dict[diffuse_comp_info_key] no_psf = diffuse_comp_info_value.no_psf for comp in components: zcut = "zmax%i" % comp.zmax key = comp.make_key('{ebin_name}_{evtype_name}') if diffuse_comp_info_value.components is None: sub_comp_info = diffuse_comp_info_value else: sub_comp_info = diffuse_comp_info_value.get_component_info(comp) name_keys = dict(zcut=zcut, sourcekey=sub_comp_info.sourcekey, ebin=comp.ebin_name, psftype=comp.evtype_name, mktime='none', coordsys=comp.coordsys, irf_ver=NAME_FACTORY.irf_ver(), fullpath=True) kmin = 0 kmax = comp.enumbins + 1 outfile_base = NAME_FACTORY.srcmaps(**name_keys) kstep = HPX_ORDER_TO_KSTEP[comp.hpx_order] base_dict = dict(cmap=NAME_FACTORY.ccube(**name_keys), expcube=NAME_FACTORY.ltcube(**name_keys), irfs=NAME_FACTORY.irfs(**name_keys), bexpmap=NAME_FACTORY.bexpcube(**name_keys), srcmdl=sub_comp_info.srcmdl_name, source=sub_comp_info.source_name, no_psf=no_psf, evtype=comp.evtype) if kstep < 0: kstep = kmax else: pass for k in range(kmin, kmax, kstep): full_key = "%s_%s_%02i" % (diffuse_comp_info_key, key, k) khi = min(kmax, k + kstep) full_dict = base_dict.copy() outfile = outfile_base.replace('.fits', '_%02i.fits' % k) logfile = make_nfs_path(outfile_base.replace('.fits', '_%02i.log' % k)) full_dict.update(dict(outfile=outfile, kmin=k, kmax=khi, logfile=logfile)) job_configs[full_key] = full_dict return job_configs
def build_job_configs(self, args)
Hook to build job configurations
4.047031
4.040381
1.001646
data = input_map.data cdelt = max(input_map.geom.wcs.wcs.cdelt) min_separation = max(min_separation, 2 * cdelt) region_size_pix = int(min_separation / cdelt) region_size_pix = max(3, region_size_pix) deltaxy = utils.make_pixel_distance(region_size_pix * 2 + 3) deltaxy *= max(input_map.geom.wcs.wcs.cdelt) region = deltaxy < min_separation local_max = maximum_filter(data, footprint=region) == data local_max[data < threshold] = False labeled, num_objects = scipy.ndimage.label(local_max) slices = scipy.ndimage.find_objects(labeled) peaks = [] for s in slices: skydir = SkyCoord.from_pixel(s[1].start, s[0].start, input_map.geom.wcs) peaks.append({'ix': s[1].start, 'iy': s[0].start, 'skydir': skydir, 'amp': data[s[0].start, s[1].start]}) return sorted(peaks, key=lambda t: t['amp'], reverse=True)
def find_peaks(input_map, threshold, min_separation=0.5)
Find peaks in a 2-D map object that have amplitude larger than `threshold` and lie a distance at least `min_separation` from another peak of larger amplitude. The implementation of this method uses `~scipy.ndimage.filters.maximum_filter`. Parameters ---------- input_map : `~gammapy.maps.WcsMap` threshold : float min_separation : float Radius of region size in degrees. Sets the minimum allowable separation between peaks. Returns ------- peaks : list List of dictionaries containing the location and amplitude of each peak.
2.786513
2.704358
1.030378
a = tsvals[2] - tsvals[0] bc = 2. * tsvals[1] - tsvals[0] - tsvals[2] s = a / (2 * bc) err = np.sqrt(2 / bc) return s, err
def estimate_pos_and_err_parabolic(tsvals)
Solve for the position and uncertainty of source in one dimension assuming that you are near the maximum and the errors are parabolic Parameters ---------- tsvals : `~numpy.ndarray` The TS values at the maximum TS, and for each pixel on either side Returns ------- The position and uncertainty of the source, in pixel units w.r.t. the center of the maximum pixel
3.747186
4.657733
0.804509
# Note the annoying WCS convention nx = tsmap.shape[1] ny = tsmap.shape[0] if pix[0] == 0 or pix[0] == (nx - 1): xval = float(pix[0]) xerr = -1 else: x_arr = tsmap[pix[1], pix[0] - 1:pix[0] + 2] xval, xerr = estimate_pos_and_err_parabolic(x_arr) xval += float(pix[0]) if pix[1] == 0 or pix[1] == (ny - 1): yval = float(pix[1]) yerr = -1 else: y_arr = tsmap[pix[1] - 1:pix[1] + 2, pix[0]] yval, yerr = estimate_pos_and_err_parabolic(y_arr) yval += float(pix[1]) return (xval, yval), (xerr, yerr)
def refine_peak(tsmap, pix)
Solve for the position and uncertainty of source assuming that you are near the maximum and the errors are parabolic Parameters ---------- tsmap : `~numpy.ndarray` Array with the TS data. Returns ------- The position and uncertainty of the source, in pixel units w.r.t. the center of the maximum pixel
2.144243
2.104939
1.018672
if dist is None: dist = 180. if not square: dtheta = src_skydir.separation(skydir).rad elif coordsys == 'CEL': dtheta = get_linear_dist(skydir, src_skydir.ra.rad, src_skydir.dec.rad, coordsys=coordsys) elif coordsys == 'GAL': dtheta = get_linear_dist(skydir, src_skydir.galactic.l.rad, src_skydir.galactic.b.rad, coordsys=coordsys) else: raise Exception('Unrecognized coordinate system: %s' % coordsys) msk = (dtheta < np.radians(dist)) if min_dist is not None: msk &= (dtheta > np.radians(min_dist)) return msk
def get_skydir_distance_mask(src_skydir, skydir, dist, min_dist=None, square=False, coordsys='CEL')
Retrieve sources within a certain angular distance of an (ra,dec) coordinate. This function supports two types of geometric selections: circular (square=False) and square (square=True). The circular selection finds all sources with a given angular distance of the target position. The square selection finds sources within an ROI-like region of size R x R where R = 2 x dist. Parameters ---------- src_skydir : `~astropy.coordinates.SkyCoord` Array of sky directions. skydir : `~astropy.coordinates.SkyCoord` Sky direction with respect to which the selection will be applied. dist : float Maximum distance in degrees from the sky coordinate. square : bool Choose whether to apply a circular or square selection. coordsys : str Coordinate system to use when applying a selection with square=True.
1.976462
2.165616
0.912656
spectrum_type = cat['SpectrumType'] pars = get_function_defaults(cat['SpectrumType']) par_idxs = {k: i for i, k in enumerate(get_function_par_names(cat['SpectrumType']))} for k in pars: pars[k]['value'] = cat['param_values'][par_idxs[k]] if spectrum_type == 'PowerLaw': pars['Index']['value'] *= -1.0 pars['Index']['scale'] = -1.0 pars['Scale']['scale'] = 1.0 pars['Index']['max'] = max(5.0, pars['Index']['value'] + 1.0) pars['Index']['min'] = min(0.0, pars['Index']['value'] - 1.0) pars['Prefactor'] = make_parameter_dict(pars['Prefactor']) pars['Scale'] = make_parameter_dict(pars['Scale'], True, False) pars['Index'] = make_parameter_dict(pars['Index'], False, False) elif spectrum_type == 'LogParabola': pars['norm'] = make_parameter_dict(pars['norm'], False, True) pars['Eb'] = make_parameter_dict(pars['Eb'], True, False) pars['alpha'] = make_parameter_dict(pars['alpha'], False, False) pars['beta'] = make_parameter_dict(pars['beta'], False, False) elif spectrum_type == 'PLSuperExpCutoff': pars['Index1']['value'] *= -1.0 pars['Index1']['scale'] = -1.0 pars['Index2']['scale'] = 1.0 pars['Prefactor'] = make_parameter_dict(pars['Prefactor']) pars['Scale'] = make_parameter_dict(pars['Scale'], True, False) pars['Index1'] = make_parameter_dict(pars['Index1'], False, False) pars['Index2'] = make_parameter_dict(pars['Index2'], False, False) pars['Cutoff'] = make_parameter_dict(pars['Cutoff'], False, True) elif spectrum_type == 'PLSuperExpCutoff2': pars['Index1']['value'] *= -1.0 pars['Index1']['scale'] = -1.0 pars['Index2']['scale'] = 1.0 pars['Prefactor'] = make_parameter_dict(pars['Prefactor']) pars['Scale'] = make_parameter_dict(pars['Scale'], True, False) pars['Index1'] = make_parameter_dict(pars['Index1'], False, False) pars['Index2'] = make_parameter_dict(pars['Index2'], False, False) pars['Expfactor'] = make_parameter_dict(pars['Expfactor'], False, True) else: raise Exception('Unsupported spectral type:' + spectrum_type) return pars
def spectral_pars_from_catalog(cat)
Create spectral parameters from 3FGL catalog columns.
1.702803
1.701548
1.000738
return bool(np.array([int(value.get("free", False)) for key, value in self.spectral_pars.items()]).sum())
def is_free(self)
returns True if any of the spectral model parameters is set to free, else False
9.400628
5.628048
1.670318
if not isinstance(skydir, SkyCoord): skydir = SkyCoord(ra=skydir[0], dec=skydir[1], unit=u.deg) if not skydir.isscalar: skydir = np.ravel(skydir)[0] radec = np.array([skydir.icrs.ra.deg, skydir.icrs.dec.deg]) self._set_radec(radec)
def set_position(self, skydir)
Set the position of the source. Parameters ---------- skydir : `~astropy.coordinates.SkyCoord`
2.186761
2.470734
0.885065
return SkyCoord(self.radec[0] * u.deg, self.radec[1] * u.deg)
def skydir(self)
Return a SkyCoord representation of the source position. Returns ------- skydir : `~astropy.coordinates.SkyCoord`
3.296856
3.787069
0.870556
src_dict = copy.deepcopy(src_dict) src_dict.setdefault('SpatialModel', 'PointSource') src_dict.setdefault('Spectrum_Filename', None) src_dict.setdefault('SpectrumType', 'PowerLaw') src_dict['SpatialType'] = get_spatial_type(src_dict['SpatialModel']) spectrum_type = src_dict['SpectrumType'] spatial_type = src_dict['SpatialType'] spectral_pars = src_dict.pop('spectral_pars', {}) spatial_pars = src_dict.pop('spatial_pars', {}) if not spectral_pars: spectral_pars = extract_pars_from_dict(spectrum_type, src_dict) norm_par_name = get_function_norm_par_name(spectrum_type) if norm_par_name is not None: spectral_pars[norm_par_name].setdefault('free', True) if not spatial_pars: spatial_pars = extract_pars_from_dict(spatial_type, src_dict) for k in ['RA', 'DEC', 'Prefactor']: if k in spatial_pars: del spatial_pars[k] spectral_pars = create_pars_from_dict(spectrum_type, spectral_pars, rescale) spatial_pars = create_pars_from_dict(spatial_type, spatial_pars, False) if 'file' in src_dict: src_dict['Spectrum_Filename'] = src_dict.pop('file') if spectrum_type == 'DMFitFunction' and src_dict['Spectrum_Filename'] is None: src_dict['Spectrum_Filename'] = os.path.join('$FERMIPY_DATA_DIR', 'gammamc_dif.dat') src_dict['spectral_pars'] = cast_pars_dict(spectral_pars) src_dict['spatial_pars'] = cast_pars_dict(spatial_pars) if 'name' in src_dict: name = src_dict['name'] src_dict['Source_Name'] = src_dict.pop('name') elif 'Source_Name' in src_dict: name = src_dict['Source_Name'] else: raise Exception('Source name undefined.') skydir = wcs_utils.get_target_skydir(src_dict, roi_skydir) src_dict['RAJ2000'] = skydir.ra.deg src_dict['DEJ2000'] = skydir.dec.deg radec = np.array([skydir.ra.deg, skydir.dec.deg]) return cls(name, src_dict, radec=radec)
def create_from_dict(cls, src_dict, roi_skydir=None, rescale=False)
Create a source object from a python dictionary. Parameters ---------- src_dict : dict Dictionary defining the properties of the source.
2.341515
2.376961
0.985088
root = ElementTree.ElementTree(file=xmlfile).getroot() srcs = root.findall('source') if len(srcs) == 0: raise Exception('No sources found.') return cls.create_from_xml(srcs[0], extdir=extdir)
def create_from_xmlfile(cls, xmlfile, extdir=None)
Create a Source object from an XML file. Parameters ---------- xmlfile : str Path to XML file. extdir : str Path to the extended source archive.
2.727425
3.16678
0.861261
if not self.extended: try: source_element = utils.create_xml_element(root, 'source', dict(name=self['Source_Name'], type='PointSource')) except TypeError as msg: print (self['Source_Name'], self) raise TypeError(msg) spat_el = ElementTree.SubElement(source_element, 'spatialModel') spat_el.set('type', 'SkyDirFunction') elif self['SpatialType'] == 'SpatialMap': source_element = utils.create_xml_element(root, 'source', dict(name=self['Source_Name'], type='DiffuseSource')) filename = utils.path_to_xmlpath(self['Spatial_Filename']) spat_el = utils.create_xml_element(source_element, 'spatialModel', dict(map_based_integral='True', type='SpatialMap', file=filename)) else: source_element = utils.create_xml_element(root, 'source', dict(name=self['Source_Name'], type='DiffuseSource')) spat_el = utils.create_xml_element(source_element, 'spatialModel', dict(type=self['SpatialType'])) for k, v in self.spatial_pars.items(): utils.create_xml_element(spat_el, 'parameter', v) el = ElementTree.SubElement(source_element, 'spectrum') stype = self['SpectrumType'].strip() el.set('type', stype) if self['Spectrum_Filename'] is not None: filename = utils.path_to_xmlpath(self['Spectrum_Filename']) el.set('file', filename) for k, v in self.spectral_pars.items(): utils.create_xml_element(el, 'parameter', v)
def write_xml(self, root)
Write this source to an XML node.
2.609517
2.583923
1.009905
self._srcs = [] self._diffuse_srcs = [] self._src_dict = collections.defaultdict(list) self._src_radius = []
def clear(self)
Clear the contents of the ROI.
8.443079
8.404302
1.004614
diffuse_xmls = config.get('diffuse_xml') srcs_out = [] for diffuse_xml in diffuse_xmls: srcs_out += self.load_xml(diffuse_xml, coordsys=config.get('coordsys', 'CEL')) return srcs_out
def _create_diffuse_src_from_xml(self, config, src_type='FileFunction')
Load sources from an XML file.
4.46479
4.087965
1.092179
src_dict = copy.deepcopy(src_dict) if isinstance(src_dict, dict): src_dict['name'] = name src = Model.create_from_dict(src_dict, self.skydir, rescale=rescale) else: src = src_dict src.set_name(name) if isinstance(src, Source): src.set_roi_direction(self.skydir) src.set_roi_geom(self.geom) self.load_source(src, build_index=build_index, merge_sources=merge_sources) return self.get_source_by_name(name)
def create_source(self, name, src_dict, build_index=True, merge_sources=True, rescale=True)
Add a new source to the ROI model from a dictionary or an existing source object. Parameters ---------- name : str src_dict : dict or `~fermipy.roi_model.Source` Returns ------- src : `~fermipy.roi_model.Source`
2.802246
2.749707
1.019107
self.clear() for s in sources: if isinstance(s, dict): s = Model.create_from_dict(s) self.load_source(s, build_index=False) self._build_src_index()
def load_sources(self, sources)
Delete all sources in the ROI and load the input source list.
6.099519
5.950741
1.025002
src = copy.deepcopy(src) name = src.name.replace(' ', '').lower() min_sep = kwargs.get('min_separation', None) if min_sep is not None: sep = src.skydir.separation(self._src_skydir).deg if len(sep) > 0 and np.min(sep) < min_sep: return match_srcs = self.match_source(src) if len(match_srcs) == 1: # self.logger.debug('Found matching source for %s : %s', # src.name, match_srcs[0].name) if merge_sources: match_srcs[0].update_from_source(src) else: match_srcs[0].add_name(src.name) self._add_source_alias(src.name.replace(' ', '').lower(), match_srcs[0]) return elif len(match_srcs) > 2: raise Exception('Multiple sources with name %s' % name) self._add_source_alias(src.name, src) for name in src.names: self._add_source_alias(name.replace(' ', '').lower(), src) if isinstance(src, Source): self._srcs.append(src) else: self._diffuse_srcs.append(src) if build_index: self._build_src_index()
def load_source(self, src, build_index=True, merge_sources=True, **kwargs)
Load a single source. Parameters ---------- src : `~fermipy.roi_model.Source` Source object that will be added to the ROI. merge_sources : bool When a source matches an existing source in the model update that source with the properties of the new source. build_index : bool Re-make the source index after loading this source.
2.753245
2.616072
1.052434
srcs = [] names = [src.name] for col in self.config['assoc_xmatch_columns']: if col in src.assoc and src.assoc[col]: names += [src.assoc[col]] for name in names: name = name.replace(' ', '').lower() if name not in self._src_dict: continue srcs += [s for s in self._src_dict[name] if s not in srcs] return srcs
def match_source(self, src)
Look for source or sources in the model that match the given source. Sources are matched by name and any association columns defined in the assoc_xmatch_columns parameter.
4.411351
2.933792
1.503635
coordsys = kwargs.get('coordsys', 'CEL') extdir = kwargs.get('extdir', self.extdir) srcname = kwargs.get('srcname', None) self.clear() self.load_diffuse_srcs() for c in self.config['catalogs']: if isinstance(c, catalog.Catalog): self.load_existing_catalog(c) continue extname = os.path.splitext(c)[1] if extname != '.xml': self.load_fits_catalog(c, extdir=extdir, coordsys=coordsys, srcname=srcname) elif extname == '.xml': self.load_xml(c, extdir=extdir, coordsys=coordsys) else: raise Exception('Unrecognized catalog file extension: %s' % c) for c in self.config['sources']: if 'name' not in c: raise Exception( 'No name field in source dictionary:\n ' + str(c)) self.create_source(c['name'], c, build_index=False) self._build_src_index()
def load(self, **kwargs)
Load both point source and diffuse components.
3.464092
3.330754
1.040032
data = np.load(datafile).flat[0] roi = cls() roi.load_sources(data['sources'].values()) return roi
def create_from_roi_data(cls, datafile)
Create an ROI model.
8.773428
8.119504
1.080537
if selection['target'] is not None: return cls.create_from_source(selection['target'], config, **kwargs) else: target_skydir = wcs_utils.get_target_skydir(selection) return cls.create_from_position(target_skydir, config, **kwargs)
def create(cls, selection, config, **kwargs)
Create an ROIModel instance.
3.982733
3.742224
1.064269
coordsys = kwargs.pop('coordsys', 'CEL') roi = cls(config, skydir=skydir, coordsys=coordsys, **kwargs) return roi
def create_from_position(cls, skydir, config, **kwargs)
Create an ROIModel instance centered on a sky direction. Parameters ---------- skydir : `~astropy.coordinates.SkyCoord` Sky direction on which the ROI will be centered. config : dict Model configuration dictionary.
3.967361
5.633594
0.704233
coordsys = kwargs.pop('coordsys', 'CEL') roi = cls(config, src_radius=None, src_roiwidth=None, srcname=name, **kwargs) src = roi.get_source_by_name(name) return cls.create_from_position(src.skydir, config, coordsys=coordsys, **kwargs)
def create_from_source(cls, name, config, **kwargs)
Create an ROI centered on the given source.
7.09056
5.76342
1.23027
srcs = self.get_sources_by_name(name) if len(srcs) == 1: return srcs[0] elif len(srcs) == 0: raise Exception('No source matching name: ' + name) elif len(srcs) > 1: raise Exception('Multiple sources matching name: ' + name)
def get_source_by_name(self, name)
Return a single source in the ROI with the given name. The input name string can match any of the strings in the names property of the source object. Case and whitespace are ignored when matching name strings. If no sources are found or multiple sources then an exception is thrown. Parameters ---------- name : str Name string. Returns ------- srcs : `~fermipy.roi_model.Model` A source object.
1.942195
2.056423
0.944453
index_name = name.replace(' ', '').lower() if index_name in self._src_dict: return list(self._src_dict[index_name]) else: raise Exception('No source matching name: ' + name)
def get_sources_by_name(self, name)
Return a list of sources in the ROI matching the given name. The input name string can match any of the strings in the names property of the source object. Case and whitespace are ignored when matching name strings. Parameters ---------- name : str Returns ------- srcs : list A list of `~fermipy.roi_model.Model` objects.
4.093591
4.327816
0.945879
if skydir is None: skydir = self.skydir if exclude is None: exclude = [] rsrc, srcs = self.get_sources_by_position(skydir, distance, square=square, coordsys=coordsys) o = [] for s in srcs + self.diffuse_sources: if names and s.name not in names: continue if s.name in exclude: continue if not s.check_cuts(cuts): continue ts = s['ts'] npred = s['npred'] if not utils.apply_minmax_selection(ts, minmax_ts): continue if not utils.apply_minmax_selection(npred, minmax_npred): continue o.append(s) return o
def get_sources(self, skydir=None, distance=None, cuts=None, minmax_ts=None, minmax_npred=None, exclude=None, square=False, coordsys='CEL', names=None)
Retrieve list of source objects satisfying the following selections: * Angular separation from ``skydir`` or ROI center (if ``skydir`` is None) less than ``distance``. * Cuts on source properties defined in ``cuts`` list. * TS and Npred in range specified by ``minmax_ts`` and ``minmax_npred``. * Name matching a value in ``names`` Sources can be excluded from the selection by adding their name to the ``exclude`` list. Returns ------- srcs : list List of source objects.
3.035444
3.092651
0.981502
msk = get_skydir_distance_mask(self._src_skydir, skydir, dist, min_dist=min_dist, square=square, coordsys=coordsys) radius = self._src_skydir.separation(skydir).deg radius = radius[msk] srcs = [self._srcs[i] for i in np.nonzero(msk)[0]] isort = np.argsort(radius) radius = radius[isort] srcs = [srcs[i] for i in isort] return radius, srcs
def get_sources_by_position(self, skydir, dist, min_dist=None, square=False, coordsys='CEL')
Retrieve sources within a certain angular distance of a sky coordinate. This function supports two types of geometric selections: circular (square=False) and square (square=True). The circular selection finds all sources with a given angular distance of the target position. The square selection finds sources within an ROI-like region of size R x R where R = 2 x dist. Parameters ---------- skydir : `~astropy.coordinates.SkyCoord` Sky direction with respect to which the selection will be applied. dist : float Maximum distance in degrees from the sky coordinate. square : bool Choose whether to apply a circular or square selection. coordsys : str Coordinate system to use when applying a selection with square=True.
2.516547
3.160969
0.796132
# EAC split this function to make it easier to load an existing catalog cat = catalog.Catalog.create(name) self.load_existing_catalog(cat, **kwargs)
def load_fits_catalog(self, name, **kwargs)
Load sources from a FITS catalog file. Parameters ---------- name : str Catalog name or path to a catalog FITS file.
13.249191
16.302486
0.81271
coordsys = kwargs.get('coordsys', 'CEL') extdir = kwargs.get('extdir', self.extdir) srcname = kwargs.get('srcname', None) m0 = get_skydir_distance_mask(cat.skydir, self.skydir, self.config['src_radius']) m1 = get_skydir_distance_mask(cat.skydir, self.skydir, self.config['src_radius_roi'], square=True, coordsys=coordsys) m = (m0 & m1) if srcname is not None: m &= utils.find_rows_by_string(cat.table, [srcname], self.src_name_cols) offset = self.skydir.separation(cat.skydir).deg offset_cel = wcs_utils.sky_to_offset(self.skydir, cat.radec[:, 0], cat.radec[:, 1], 'CEL') offset_gal = wcs_utils.sky_to_offset(self.skydir, cat.glonlat[ :, 0], cat.glonlat[:, 1], 'GAL') for i, (row, radec) in enumerate(zip(cat.table[m], cat.radec[m])): catalog_dict = catalog.row_to_dict(row) src_dict = {'catalog': catalog_dict} src_dict['Source_Name'] = row['Source_Name'] src_dict['SpectrumType'] = row['SpectrumType'] if row['extended']: src_dict['SourceType'] = 'DiffuseSource' src_dict['SpatialType'] = str(row['Spatial_Function']) src_dict['SpatialModel'] = str(row['Spatial_Function']) search_dirs = [] if extdir is not None: search_dirs += [extdir, os.path.join(extdir, 'Templates')] search_dirs += [row['extdir'], os.path.join(row['extdir'], 'Templates')] if src_dict['SpatialType'] == 'SpatialMap': src_dict['Spatial_Filename'] = utils.resolve_file_path( row['Spatial_Filename'], search_dirs=search_dirs) else: src_dict['SourceType'] = 'PointSource' src_dict['SpatialType'] = 'SkyDirFunction' src_dict['SpatialModel'] = 'PointSource' src_dict['spectral_pars'] = spectral_pars_from_catalog( catalog_dict) src_dict['spatial_pars'] = spatial_pars_from_catalog(catalog_dict) src = Source(src_dict['Source_Name'], src_dict, radec=radec) src.data['offset'] = offset[m][i] src.data['offset_ra'] = offset_cel[:, 0][m][i] src.data['offset_dec'] = offset_cel[:, 1][m][i] src.data['offset_glon'] = offset_gal[:, 0][m][i] src.data['offset_glat'] = offset_gal[:, 1][m][i] self.load_source(src, False, merge_sources=self.config['merge_sources']) self._build_src_index()
def load_existing_catalog(self, cat, **kwargs)
Load sources from an existing catalog object. Parameters ---------- cat : `~fermipy.catalog.Catalog` Catalog object.
2.908007
2.87496
1.011495
extdir = kwargs.get('extdir', self.extdir) coordsys = kwargs.get('coordsys', 'CEL') if not os.path.isfile(xmlfile): xmlfile = os.path.join(fermipy.PACKAGE_DATA, 'catalogs', xmlfile) root = ElementTree.ElementTree(file=xmlfile).getroot() diffuse_srcs = [] srcs = [] ra, dec = [], [] for s in root.findall('source'): src = Source.create_from_xml(s, extdir=extdir) if src.diffuse: diffuse_srcs += [src] else: srcs += [src] ra += [src['RAJ2000']] dec += [src['DEJ2000']] src_skydir = SkyCoord(ra=np.array(ra) * u.deg, dec=np.array(dec) * u.deg) radec = np.vstack((src_skydir.ra.deg, src_skydir.dec.deg)).T glonlat = np.vstack((src_skydir.galactic.l.deg, src_skydir.galactic.b.deg)).T offset = self.skydir.separation(src_skydir).deg offset_cel = wcs_utils.sky_to_offset(self.skydir, radec[:, 0], radec[:, 1], 'CEL') offset_gal = wcs_utils.sky_to_offset(self.skydir, glonlat[:, 0], glonlat[:, 1], 'GAL') m0 = get_skydir_distance_mask(src_skydir, self.skydir, self.config['src_radius']) m1 = get_skydir_distance_mask(src_skydir, self.skydir, self.config['src_radius_roi'], square=True, coordsys=coordsys) m = (m0 & m1) srcs = np.array(srcs)[m] for i, s in enumerate(srcs): s.data['offset'] = offset[m][i] s.data['offset_ra'] = offset_cel[:, 0][m][i] s.data['offset_dec'] = offset_cel[:, 1][m][i] s.data['offset_glon'] = offset_gal[:, 0][m][i] s.data['offset_glat'] = offset_gal[:, 1][m][i] self.load_source(s, False, merge_sources=self.config['merge_sources']) for i, s in enumerate(diffuse_srcs): self.load_source(s, False, merge_sources=self.config['merge_sources']) self._build_src_index() return srcs
def load_xml(self, xmlfile, **kwargs)
Load sources from an XML file.
2.321053
2.292014
1.012669
self._srcs = sorted(self._srcs, key=lambda t: t['offset']) nsrc = len(self._srcs) radec = np.zeros((2, nsrc)) for i, src in enumerate(self._srcs): radec[:, i] = src.radec self._src_skydir = SkyCoord(ra=radec[0], dec=radec[1], unit=u.deg) self._src_radius = self._src_skydir.separation(self.skydir)
def _build_src_index(self)
Build an indices for fast lookup of a source given its name or coordinates.
2.848096
2.716297
1.048522
root = ElementTree.Element('source_library') root.set('title', 'source_library') for s in self._srcs: s.write_xml(root) if config is not None: srcs = self.create_diffuse_srcs(config) diffuse_srcs = {s.name: s for s in srcs} for s in self._diffuse_srcs: src = copy.deepcopy(diffuse_srcs.get(s.name, s)) src.update_spectral_pars(s.spectral_pars) src.write_xml(root) else: for s in self._diffuse_srcs: s.write_xml(root) output_file = open(xmlfile, 'w') output_file.write(utils.prettify_xml(root))
def write_xml(self, xmlfile, config=None)
Save the ROI model as an XML file.
2.940224
2.921923
1.006263
scan_shape = (1,) for src in self._srcs: scan_shape = max(scan_shape, src['dloglike_scan'].shape) tab = create_source_table(scan_shape) for s in self._srcs: if names is not None and s.name not in names: continue s.add_to_table(tab) return tab
def create_table(self, names=None)
Create an astropy Table object with the contents of the ROI model.
5.460429
5.374691
1.015952
tab = self.create_table() hdu_data = fits.table_to_hdu(tab) hdus = [fits.PrimaryHDU(), hdu_data] fits_utils.write_hdus(hdus, fitsfile)
def write_fits(self, fitsfile)
Write the ROI model to a FITS file.
3.488774
3.329422
1.047862
# todo: add support for extended sources?! allowed_symbols = ['circle','box','diamond','cross','x','arrow','boxcircle'] # adding some checks. assert free in allowed_symbols, "symbol %s not supported"%free assert fixed in allowed_symbols, "symbol %s not supported"%fixed lines = [] if header: lines.append("global color=%s"%color) for src in self.get_sources(): # self.get_sources will return both Source, but also IsoSource and MapCube, in which case the sources # should be ignored (since they are by construction all-sky and have no corresponding ds9 region string) if not isinstance(src, Source): continue # otherwise get ra, dec ra, dec = src.radec line = "%s; point( %1.5f, %1.5f) # point=%s text={%s} color=%s"%(frame,ra, dec, free if src.is_free else fixed, src.name, color) lines.append(line) return lines
def to_ds9(self, free='box',fixed='cross',frame='fk5',color='green',header=True)
Returns a list of ds9 region definitions Parameters ---------- free: bool one of the supported ds9 point symbols, used for free sources, see here: http://ds9.si.edu/doc/ref/region.html fixed: bool as free but for fixed sources frame: str typically fk5, more to be implemented color: str color used for symbols (only ds9 compatible colors) header: bool if True, will prepend a global header line. Returns ------- lines : list list of regions (and header if requested)
6.837582
6.290016
1.087053
lines = self.to_ds9(*args,**kwargs) with open(region,'w') as fo: fo.write("\n".join(lines))
def write_ds9region(self, region, *args, **kwargs)
Create a ds9 compatible region file from the ROI. It calls the `to_ds9` method and write the result to the region file. Only the file name is required. All other parameters will be forwarded to the `to_ds9` method, see the documentation of that method for all accepted parameters and options. Parameters ---------- region : str name of the region file (string)
4.036776
5.034608
0.801805
try: l = [len(row.strip()) > 0 for row in cat_table['Extended_Source_Name'].data] return np.array(l, bool) except KeyError: return cat_table['Extended']
def select_extended(cat_table)
Select only rows representing extended sources from a catalog table
6.179445
5.081521
1.216062
cut_var = cut['cut_var'] min_val = cut.get('min_val', None) max_val = cut.get('max_val', None) nsrc = len(cat_table) if min_val is None: min_mask = np.ones((nsrc), bool) else: min_mask = cat_table[cut_var] >= min_val if max_val is None: max_mask = np.ones((nsrc), bool) else: max_mask = cat_table[cut_var] <= max_val full_mask = min_mask * max_mask return full_mask
def make_mask(cat_table, cut)
Mask a bit mask selecting the rows that pass a selection
1.951599
1.955782
0.997861
nsrc = len(cat_table) full_mask = np.ones((nsrc), bool) for cut in cuts: if cut == 'mask_extended': full_mask *= mask_extended(cat_table) elif cut == 'select_extended': full_mask *= select_extended(cat_table) else: full_mask *= make_mask(cat_table, cut) lout = [src_name.strip() for src_name in cat_table['Source_Name'][full_mask]] return lout
def select_sources(cat_table, cuts)
Select only rows passing a set of cuts from catalog table
3.219094
3.224649
0.998278
library_yamlfile = kwargs.pop('library', 'models/library.yaml') csm = kwargs.pop('CatalogSourceManager', CatalogSourceManager(**kwargs)) if library_yamlfile is None or library_yamlfile == 'None': yamldict = {} else: yamldict = yaml.safe_load(open(library_yamlfile)) catalog_info_dict, comp_info_dict = csm.make_catalog_comp_info_dict(yamldict) return dict(catalog_info_dict=catalog_info_dict, comp_info_dict=comp_info_dict, CatalogSourceManager=csm)
def make_catalog_comp_dict(**kwargs)
Build and return the information about the catalog components
3.30913
3.203256
1.033052
catalog_info_yaml = self._name_factory.catalog_split_yaml(sourcekey=splitkey, fullpath=True) yaml_dict = yaml.safe_load(open(catalog_info_yaml)) # resolve env vars yaml_dict['catalog_file'] = os.path.expandvars(yaml_dict['catalog_file']) yaml_dict['catalog_extdir'] = os.path.expandvars(yaml_dict['catalog_extdir']) return yaml_dict
def read_catalog_info_yaml(self, splitkey)
Read the yaml file for a particular split key
3.882908
3.788199
1.025001
cat = SourceFactory.build_catalog(**catalog_info) catalog_info['catalog'] = cat # catalog_info['catalog_table'] = # Table.read(catalog_info['catalog_file']) catalog_info['catalog_table'] = cat.table catalog_info['roi_model'] =\ SourceFactory.make_fermipy_roi_model_from_catalogs([cat]) catalog_info['srcmdl_name'] =\ self._name_factory.srcmdl_xml(sourcekey=catalog_info['catalog_name']) return CatalogInfo(**catalog_info)
def build_catalog_info(self, catalog_info)
Build a CatalogInfo object
5.869689
5.775517
1.016305
return sorted(self._split_comp_info_dicts["%s_%s" % (catalog_name, split_ver)].keys())
def catalog_components(self, catalog_name, split_ver)
Return the set of merged components for a particular split key
7.391747
6.44178
1.14747
return self._split_comp_info_dicts["%s_%s" % (catalog_name, split_ver)][split_key]
def split_comp_info(self, catalog_name, split_ver, split_key)
Return the info for a particular split key
4.626615
4.080379
1.133869
merge = rule_val.get('merge', True) sourcekey = "%s_%s_%s" % ( full_cat_info.catalog_name, split_key, rule_key) srcmdl_name = self._name_factory.srcmdl_xml(sourcekey=sourcekey) srcmdl_name = self._name_factory.fullpath(localpath=srcmdl_name) kwargs = dict(source_name="%s_%s" % (full_cat_info.catalog_name, rule_key), source_ver=split_key, sourcekey=sourcekey, srcmdl_name=srcmdl_name, source_names=sources, catalog_info=full_cat_info, roi_model=SourceFactory.copy_selected_sources(full_cat_info.roi_model, sources)) if merge: return CompositeSourceInfo(**kwargs) return CatalogSourcesInfo(**kwargs)
def make_catalog_comp_info(self, full_cat_info, split_key, rule_key, rule_val, sources)
Make the information about a single merged component Parameters ---------- full_cat_info : `_model_component.CatalogInfo` Information about the full catalog split_key : str Key identifying the version of the spliting used rule_key : str Key identifying the specific rule for this component rule_val : list List of the cuts used to define this component sources : list List of the names of the sources in this component Returns `CompositeSourceInfo` or `CatalogSourcesInfo`
4.682714
4.015873
1.166051
catalog_ret_dict = {} split_ret_dict = {} for key, value in catalog_sources.items(): if value is None: continue if value['model_type'] != 'catalog': continue versions = value['versions'] for version in versions: ver_key = "%s_%s" % (key, version) source_dict = self.read_catalog_info_yaml(ver_key) try: full_cat_info = catalog_ret_dict[key] except KeyError: full_cat_info = self.build_catalog_info(source_dict) catalog_ret_dict[key] = full_cat_info try: all_sources = [x.strip() for x in full_cat_info.catalog_table[ 'Source_Name'].astype(str).tolist()] except KeyError: print(full_cat_info.catalog_table.colnames) used_sources = [] rules_dict = source_dict['rules_dict'] split_dict = {} for rule_key, rule_val in rules_dict.items(): # full_key =\ # self._name_factory.merged_sourcekey(catalog=ver_key, # rulekey=rule_key) sources = select_sources( full_cat_info.catalog_table, rule_val['cuts']) used_sources.extend(sources) split_dict[rule_key] = self.make_catalog_comp_info( full_cat_info, version, rule_key, rule_val, sources) # Now deal with the remainder for source in used_sources: try: all_sources.remove(source) except ValueError: continue rule_val = dict(cuts=[], merge=source_dict['remainder'].get('merge', False)) split_dict['remain'] = self.make_catalog_comp_info( full_cat_info, version, 'remain', rule_val, all_sources) # Merge in the info for this version of splits split_ret_dict[ver_key] = split_dict self._catalog_comp_info_dicts.update(catalog_ret_dict) self._split_comp_info_dicts.update(split_ret_dict) return (catalog_ret_dict, split_ret_dict)
def make_catalog_comp_info_dict(self, catalog_sources)
Make the information about the catalog components Parameters ---------- catalog_sources : dict Dictionary with catalog source defintions Returns ------- catalog_ret_dict : dict Dictionary mapping catalog_name to `model_component.CatalogInfo` split_ret_dict : dict Dictionary mapping sourcekey to `model_component.ModelComponentInfo`
3.194093
2.971369
1.074957
inhdulist = fits.open(infile) wcs = pywcs.WCS(inhdulist[0].header) map_shape = inhdulist[0].data.shape t_eng = Table.read(infile, "EBOUNDS") t_scan = Table.read(infile, "SCANDATA") t_fit = Table.read(infile, "FITDATA") n_ebin = len(t_eng) energies = np.ndarray((n_ebin + 1)) energies[0:-1] = t_eng["E_MIN"] energies[-1] = t_eng["E_MAX"][-1] cube_shape = (n_ebin, map_shape[1], map_shape[0]) wcs_cube = wcs_utils.wcs_add_energy_axis(wcs, energies) outhdulist = [inhdulist[0], inhdulist["EBOUNDS"]] FIT_COLNAMES = ['FIT_TS', 'FIT_STATUS', 'FIT_NORM', 'FIT_NORM_ERR', 'FIT_NORM_ERRP', 'FIT_NORM_ERRN'] SCAN_COLNAMES = ['TS', 'BIN_STATUS', 'NORM', 'NORM_UL', 'NORM_ERR', 'NORM_ERRP', 'NORM_ERRN', 'LOGLIKE'] for c in FIT_COLNAMES: data = t_fit[c].data.reshape(map_shape) hdu = fits.ImageHDU(data, wcs.to_header(), name=c) outhdulist.append(hdu) for c in SCAN_COLNAMES: data = t_scan[c].data.swapaxes(0, 1).reshape(cube_shape) hdu = fits.ImageHDU(data, wcs_cube.to_header(), name=c) outhdulist.append(hdu) hdulist = fits.HDUList(outhdulist) hdulist.writeto(outfile, clobber=True) return hdulist
def extract_images_from_tscube(infile, outfile)
Extract data from table HDUs in TSCube file and convert them to FITS images
2.475406
2.477782
0.999041
slices = [] for i in range(array1.ndim): xmin = 0 xmax = array1.shape[i] dxlo = array1.shape[i] // 2 dxhi = array1.shape[i] - dxlo if position[i] - dxlo < 0: xmin = max(dxlo - position[i], 0) if position[i] + dxhi > array2.shape[i]: xmax = array1.shape[i] - (position[i] + dxhi - array2.shape[i]) xmax = max(xmax, 0) slices += [slice(xmin, xmax)] return array1[slices]
def truncate_array(array1, array2, position)
Truncate array1 by finding the overlap with array2 when the array1 center is located at the given position in array2.
2.217948
2.246735
0.987187
def wrapper(*args, **kwargs): v = 0 new_args = _cast_args_to_list(args) for arg in zip(*new_args): v += fn(*arg, **kwargs) return v return wrapper
def _sum_wrapper(fn)
Wrapper to perform row-wise aggregation of list arguments and pass them to a function. The return value of the function is summed over the argument groups. Non-list arguments will be automatically cast to a list.
3.813072
3.29349
1.15776
if isinstance(counts, list): counts = np.concatenate([t.flat for t in counts]) bkg = np.concatenate([t.flat for t in bkg]) model = np.concatenate([t.flat for t in model]) s_model = np.sum(model) s_counts = np.sum(counts) sn = bkg / model imin = np.argmin(sn) sn_min = sn[imin] c_min = counts[imin] b_min = c_min / s_model - sn_min b_max = s_counts / s_model - sn_min return max(b_min, 0), b_max
def _amplitude_bounds(counts, bkg, model)
Compute bounds for the root of `_f_cash_root_cython`. Parameters ---------- counts : `~numpy.ndarray` Count map. bkg : `~numpy.ndarray` Background map. model : `~numpy.ndarray` Source template (multiplied with exposure).
2.655742
2.720466
0.976209
return np.sum(model * (counts / (x * model + bkg) - 1.0))
def _f_cash_root(x, counts, bkg, model)
Function to find root of. Described in Appendix A, Stewart (2009). Parameters ---------- x : float Model amplitude. counts : `~numpy.ndarray` Count map slice, where model is defined. bkg : `~numpy.ndarray` Background map slice, where model is defined. model : `~numpy.ndarray` Source template (multiplied with exposure).
5.950015
13.426268
0.443162
# Compute amplitude bounds and assert counts > 0 amplitude_min, amplitude_max = _amplitude_bounds(counts, bkg, model) if not np.sum(counts) > 0: return amplitude_min, 0 args = (counts, bkg, model) if root_fn(0.0, *args) < 0: return 0.0, 1 with warnings.catch_warnings(): warnings.simplefilter("ignore") try: result = brentq(root_fn, amplitude_min, amplitude_max, args=args, maxiter=MAX_NITER, full_output=True, rtol=1E-4) return result[0], result[1].iterations except (RuntimeError, ValueError): # Where the root finding fails NaN is set as amplitude return np.nan, MAX_NITER
def _root_amplitude_brentq(counts, bkg, model, root_fn=_f_cash_root)
Fit amplitude by finding roots using Brent algorithm. See Appendix A Stewart (2009). Parameters ---------- counts : `~numpy.ndarray` Slice of count map. bkg : `~numpy.ndarray` Slice of background map. model : `~numpy.ndarray` Model template to fit. Returns ------- amplitude : float Fitted flux amplitude. niter : int Number of function evaluations needed for the fit.
3.478649
3.453561
1.007264
loglike = np.array(model) m = counts > 0 loglike[m] -= counts[m] * np.log(model[m]) return loglike
def poisson_log_like(counts, model)
Compute the Poisson log-likelihood function for the given counts and model arrays.
3.68415
4.073438
0.904433
return 2.0 * poisson_log_like(counts, bkg + x * model)
def f_cash(x, counts, bkg, model)
Wrapper for cash statistics, that defines the model function. Parameters ---------- x : float Model amplitude. counts : `~numpy.ndarray` Count map slice, where model is defined. bkg : `~numpy.ndarray` Background map slice, where model is defined. model : `~numpy.ndarray` Source template (multiplied with exposure).
8.906507
13.244131
0.672487
extract_fn = _collect_wrapper(extract_large_array) truncate_fn = _collect_wrapper(extract_small_array) # Get data slices counts_slice = extract_fn(counts, model, position) bkg_slice = extract_fn(bkg, model, position) C_0_slice = extract_fn(C_0_map, model, position) model_slice = truncate_fn(model, counts, position) # Flattened Arrays counts_ = np.concatenate([t.flat for t in counts_slice]) bkg_ = np.concatenate([t.flat for t in bkg_slice]) model_ = np.concatenate([t.flat for t in model_slice]) C_0_ = np.concatenate([t.flat for t in C_0_slice]) C_0 = np.sum(C_0_) root_fn = _sum_wrapper(_f_cash_root) amplitude, niter = _root_amplitude_brentq(counts_, bkg_, model_, root_fn=_f_cash_root) if niter > MAX_NITER: print('Exceeded maximum number of function evaluations!') return np.nan, amplitude, niter with np.errstate(invalid='ignore', divide='ignore'): C_1 = f_cash_sum(amplitude, counts_, bkg_, model_) # Compute and return TS value return (C_0 - C_1) * np.sign(amplitude), amplitude, niter
def _ts_value(position, counts, bkg, model, C_0_map)
Compute TS value at a given pixel position using the approach described in Stewart (2009). Parameters ---------- position : tuple Pixel position. counts : `~numpy.ndarray` Count map. bkg : `~numpy.ndarray` Background map. model : `~numpy.ndarray` Source model map. Returns ------- TS : float TS value at the given pixel position.
3.477217
3.721219
0.93443
extract_fn = _collect_wrapper(extract_large_array) truncate_fn = _collect_wrapper(extract_small_array) # Get data slices counts_slice = extract_fn(counts, model, position) bkg_slice = extract_fn(bkg, model, position) C_0_map_slice = extract_fn(C_0_map, model, position) model_slice = truncate_fn(model, counts, position) # Mask of pixels with > 0 counts mask = [c > 0 for c in counts_slice] # Sum of background and model in empty pixels bkg_sum = np.sum(np.array([np.sum(t[~m]) for t, m in zip(bkg_slice, mask)])) model_sum = np.sum(np.array([np.sum(t[~m]) for t, m in zip(model_slice, mask)])) # Flattened Arrays counts_ = np.concatenate([t[m].flat for t, m in zip(counts_slice, mask)]) bkg_ = np.concatenate([t[m].flat for t, m in zip(bkg_slice, mask)]) model_ = np.concatenate([t[m].flat for t, m in zip(model_slice, mask)]) C_0 = np.sum(np.array([np.sum(t) for t in C_0_map_slice])) amplitude, niter = _fit_amplitude_newton(counts_, bkg_, model_, model_sum) if niter > MAX_NITER: print('Exceeded maximum number of function evaluations!') return np.nan, amplitude, niter with np.errstate(invalid='ignore', divide='ignore'): C_1 = f_cash_sum(amplitude, counts_, bkg_, model_, bkg_sum, model_sum) # Compute and return TS value return (C_0 - C_1) * np.sign(amplitude), amplitude, niter
def _ts_value_newton(position, counts, bkg, model, C_0_map)
Compute TS value at a given pixel position using the newton method. Parameters ---------- position : tuple Pixel position. counts : `~numpy.ndarray` Count map. bkg : `~numpy.ndarray` Background map. model : `~numpy.ndarray` Source model map. Returns ------- TS : float TS value at the given pixel position. amp : float Best-fit amplitude of the test source. niter : int Number of fit iterations.
2.908997
2.934825
0.9912
timer = Timer.create(start=True) schema = ConfigSchema(self.defaults['tsmap']) schema.add_option('loglevel', logging.INFO) schema.add_option('map_skydir', None, '', astropy.coordinates.SkyCoord) schema.add_option('map_size', 1.0) schema.add_option('threshold', 1E-2, '', float) schema.add_option('use_pylike', True, '', bool) schema.add_option('outfile', None, '', str) config = schema.create_config(self.config['tsmap'], **kwargs) # Defining default properties of test source model config['model'].setdefault('Index', 2.0) config['model'].setdefault('SpectrumType', 'PowerLaw') config['model'].setdefault('SpatialModel', 'PointSource') self.logger.log(config['loglevel'], 'Generating TS map') o = self._make_tsmap_fast(prefix, **config) if config['make_plots']: plotter = plotting.AnalysisPlotter(self.config['plotting'], fileio=self.config['fileio'], logging=self.config['logging']) plotter.make_tsmap_plots(o, self.roi) self.logger.log(config['loglevel'], 'Finished TS map') outfile = config.get('outfile', None) if outfile is None: outfile = utils.format_filename(self.workdir, 'tsmap', prefix=[o['name']]) else: outfile = os.path.join(self.workdir, os.path.splitext(outfile)[0]) if config['write_fits']: o['file'] = os.path.basename(outfile) + '.fits' self._make_tsmap_fits(o, outfile + '.fits') if config['write_npy']: np.save(outfile + '.npy', o) self.logger.log(config['loglevel'], 'Execution time: %.2f s', timer.elapsed_time) return o
def tsmap(self, prefix='', **kwargs)
Generate a spatial TS map for a source component with properties defined by the `model` argument. The TS map will have the same geometry as the ROI. The output of this method is a dictionary containing `~fermipy.skymap.Map` objects with the TS and amplitude of the best-fit test source. By default this method will also save maps to FITS files and render them as image files. This method uses a simplified likelihood fitting implementation that only fits for the normalization of the test source. Before running this method it is recommended to first optimize the ROI model (e.g. by running :py:meth:`~fermipy.gtanalysis.GTAnalysis.optimize`). Parameters ---------- prefix : str Optional string that will be prepended to all output files. {options} Returns ------- tsmap : dict A dictionary containing the `~fermipy.skymap.Map` objects for TS and source amplitude.
3.889718
3.624578
1.07315