code string | signature string | docstring string | loss_without_docstring float64 | loss_with_docstring float64 | factor float64 |
|---|---|---|---|---|---|
srcs = np.load(npfile).flat[0]['sources']
roi = ROIModel()
roi.load_sources(srcs.values())
return roi.create_table() | def read_sources_from_numpy_file(npfile) | Open a numpy pickle file and read all the new sources into a dictionary
Parameters
----------
npfile : file name
The input numpy pickle file
Returns
-------
tab : `~astropy.table.Table` | 9.495911 | 13.980116 | 0.679244 |
f = open(yamlfile)
dd = yaml.load(f)
srcs = dd['sources']
f.close()
roi = ROIModel()
roi.load_sources(srcs.values())
return roi.create_table() | def read_sources_from_yaml_file(yamlfile) | Open a yaml file and read all the new sources into a dictionary
Parameters
----------
yaml : file name
The input yaml file
Returns
-------
tab : `~astropy.table.Table` | 5.550993 | 6.213456 | 0.893382 |
if roi_idx is not None and 'roi' not in tab.columns:
tab.add_column(Column(name='roi', data=len(tab) * [roi_idx]))
remove_rows = []
for i, row in enumerate(tab):
if not all_sources and row['name'].find("PS") != 0:
remove_rows += [i]
continue
sname = "%s%s%s" % (prefix, row['name'], suffix)
row['name'] = sname
tab.remove_rows(remove_rows)
if src_tab is None:
src_tab = tab
else:
src_tab = vstack([src_tab, tab], join_type='outer')
return src_tab | def merge_source_tables(src_tab, tab, all_sources=False, prefix="", suffix="",
roi_idx=None) | Append the sources in a table into another table.
Parameters
----------
src_tab : `~astropy.table.Table`
Master source table that will be appended with the sources in
``tab``.
tab : `~astropy.table.Table`
Table to be merged into ``src_tab``.
all_sources : bool
If true, then all the sources get added to the table.
if false, then only the sources that start with 'PS' get added
prefix : str
Prepended to all source names
suffix : str
Appended to all source names
Returns
-------
tab : `~astropy.table.Table` | 2.705176 | 2.428923 | 1.113735 |
name = self.roi.get_source_by_name(name).name
# Create schema for method configuration
schema = ConfigSchema(self.defaults['lightcurve'],
optimizer=self.defaults['optimizer'])
schema.add_option('prefix', '')
config = utils.create_dict(self.config['lightcurve'],
optimizer=self.config['optimizer'])
config = schema.create_config(config, **kwargs)
self.logger.info('Computing Lightcurve for %s' % name)
o = self._make_lc(name, **config)
filename = utils.format_filename(self.workdir, 'lightcurve',
prefix=[config['prefix'],
name.lower().replace(' ', '_')])
o['file'] = None
if config['write_fits']:
o['file'] = os.path.basename(filename) + '.fits'
self._make_lc_fits(o, filename + '.fits', **config)
if config['write_npy']:
np.save(filename + '.npy', o)
self.logger.info('Finished Lightcurve')
return o | def lightcurve(self, name, **kwargs) | Generate a lightcurve for the named source. The function will
complete the basic analysis steps for each bin and perform a
likelihood fit for each bin. Extracted values (along with
errors) are Integral Flux, spectral model, Spectral index, TS
value, pred. # of photons. Note: successful calculation of
TS:subscript:`var` requires at least one free background
parameter and a previously optimized ROI model.
Parameters
---------
name: str
source name
{options}
Returns
---------
LightCurve : dict
Dictionary containing output of the LC analysis | 4.681553 | 4.65143 | 1.006476 |
usage = "usage: %(prog)s [options] "
description = "Merge a set of Fermi-LAT files."
parser = argparse.ArgumentParser(usage=usage, description=description)
parser.add_argument('-o', '--output', default=None, type=str,
help='Output file.')
parser.add_argument('--ccube', default=None, type=str,
help='Input counts cube file .')
parser.add_argument('--bexpcube', default=None, type=str,
help='Input binned exposure cube.')
parser.add_argument('--hpx_order', default=None, type=int,
help='Order of output map: default = counts map order')
parser.add_argument('--clobber', action='store_true',
help='Overwrite output file')
args = parser.parse_args()
ccube = HpxMap.create_from_fits(args.ccube, hdu='SKYMAP')
bexpcube = HpxMap.create_from_fits(args.bexpcube, hdu='HPXEXPOSURES')
if args.hpx_order:
hpx_order = args.hpx_order
else:
hpx_order = ccube.hpx.order
out_cube = intensity_cube(ccube, bexpcube, hpx_order)
out_cube.hpx.write_fits(out_cube.data, args.output, clobber=args.clobber) | def main() | Main function for command line usage | 2.808326 | 2.780621 | 1.009964 |
if not re.search('\.txt?', ltfile) is None:
files = np.loadtxt(ltfile, unpack=True, dtype='str')
elif not isinstance(ltfile, list):
files = glob.glob(ltfile)
ltc = cls.create_from_fits(files[0])
for f in files[1:]:
ltc.load_ltfile(f)
return ltc | def create(cls, ltfile) | Create a livetime cube from a single file or list of
files. | 4.239477 | 3.674552 | 1.15374 |
cth_edges = np.linspace(0, 1.0, 41)
domega = utils.edge_to_width(cth_edges) * 2.0 * np.pi
hpx = HPX(nside, True, 'CEL', ebins=cth_edges)
data = np.ones((len(cth_edges) - 1, hpx.npix)) * fill
return cls(data, hpx, cth_edges, tstart=tstart, tstop=tstop) | def create_empty(cls, tstart, tstop, fill=0.0, nside=64) | Create an empty livetime cube. | 4.229065 | 4.148274 | 1.019476 |
ra = skydir.ra.deg
dec = skydir.dec.deg
npts = 1
bins = utils.split_bin_edges(cth_bins, npts)
center = edge_to_center(bins)
width = edge_to_width(bins)
ipix = hp.ang2pix(self.hpx.nside, np.pi / 2. - np.radians(dec),
np.radians(ra), nest=self.hpx.nest)
lt = np.histogram(self._cth_center,
weights=self.data[:, ipix], bins=bins)[0]
lt = np.sum(lt.reshape(-1, npts), axis=1)
return lt | def get_skydir_lthist(self, skydir, cth_bins) | Get the livetime distribution (observing profile) for a given sky
direction with binning in incidence angle defined by
``cth_bins``.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
Sky coordinate for which the observing profile will be
computed.
cth_bins : `~numpy.ndarray`
Bin edges in cosine of the incidence angle. | 3.787341 | 4.304321 | 0.879893 |
skydir = SkyCoord(np.array([skydir.ra.deg]),
np.array([skydir.dec.deg]), unit='deg')
lt, lt_wt = fill_livetime_hist(skydir, tab_sc, tab_gti, zmax,
self.costh_edges)
ipix = self.hpx.skydir_to_pixel(skydir)
lt_scale = np.ones_like(lt)
lt_wt_scale = np.ones_like(lt_wt)
m = self.data[:, ipix] > 0.0
lt_scale[m] = lt[m] / self.data[:, ipix][m]
lt_wt_scale[m] = lt_wt[m] / self._data_wt[:, ipix][m]
data = self.data * lt_scale
data_wt = self._data_wt * lt_wt_scale
return LTCube(data, copy.deepcopy(self.hpx), self.costh_edges,
# tstart=np.min(tab_gti_t0),
# tstop=np.max(tab_gti_t1),
zmax=zmax, data_wt=data_wt) | def create_skydir_ltcube(self, skydir, tab_sc, tab_gti, zmax) | Create a new livetime cube by scaling this one by the
observing profile ratio in the direction ``skydir``. This
method can be used to generate an approximate livetime cube
that is accurate in the vicinity of ``skydir``.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
tab_sc : `~astropy.table.Table`
Spacecraft (FT2) table.
tab_gti : `~astropy.table.Table`
Table of GTIs.
zmax : float
Zenith angle cut. | 3.223228 | 3.373505 | 0.955454 |
hdu_pri = fits.PrimaryHDU()
hdu_exp = self._create_exp_hdu(self.data)
hdu_exp.name = 'EXPOSURE'
hdu_exp_wt = self._create_exp_hdu(self._data_wt)
hdu_exp_wt.name = 'WEIGHTED_EXPOSURE'
cols = [Column(name='CTHETA_MIN', dtype='f4',
data=self.costh_edges[:-1][::-1]),
Column(name='CTHETA_MAX', dtype='f4',
data=self.costh_edges[1:][::-1]), ]
hdu_bnds = fits.table_to_hdu(Table(cols))
hdu_bnds.name = 'CTHETABOUNDS'
hdu_gti = fits.table_to_hdu(self._tab_gti)
hdu_gti.name = 'GTI'
hdus = [hdu_pri, hdu_exp, hdu_exp_wt,
hdu_bnds, hdu_gti]
for hdu in hdus:
hdu.header['TSTART'] = self.tstart
hdu.header['TSTOP'] = self.tstop
with fits.HDUList(hdus) as hdulist:
hdulist.writeto(outfile, clobber=True) | def write(self, outfile) | Write the livetime cube to a FITS file. | 2.491333 | 2.442776 | 1.019878 |
usage = "usage: %(prog)s [options] "
description = "Merge a set of Fermi-LAT files."
parser = argparse.ArgumentParser(usage=usage, description=description)
parser.add_argument('-o', '--output', default=None, type=str,
help='Output file.')
parser.add_argument('--clobber', default=False, action='store_true',
help='Overwrite output file.')
parser.add_argument('--hdu', default=None, type=str,
help='HDU name.')
parser.add_argument('--gzip', action='store_true',
help='Compress output file')
parser.add_argument('--rm', action='store_true',
help='Remove input files.')
parser.add_argument('files', nargs='+', default=None,
help='List of input files.')
args = parser.parse_args()
hpx_map = merge_utils.stack_energy_planes_hpx(args.files, hdu=args.hdu)
if args.output:
hpx_map.hpx.write_fits(hpx_map.counts, args.output,
extname=args.hdu, clobber=args.clobber)
if args.gzip:
os.system('gzip -9 %s' % args.output)
if args.rm:
for farg in args.files:
flist = glob.glob(farg)
for ffound in flist:
os.path.unlink(ffound) | def main() | Main function for command line usage | 2.730846 | 2.708363 | 1.008301 |
# FIXME: This functionality should be moved into a slice method of
# gammapy.maps
axis = map_in.geom.axes[0]
i0 = utils.val_to_edge(axis.edges, 10**loge_bounds[0])[0]
i1 = utils.val_to_edge(axis.edges, 10**loge_bounds[1])[0]
new_axis = map_in.geom.axes[0].slice(slice(i0, i1))
geom = map_in.geom.to_image()
geom = geom.to_cube([new_axis])
map_out = WcsNDMap(geom, map_in.data[slice(i0, i1), ...].copy())
return map_out | def make_cube_slice(map_in, loge_bounds) | Extract a slice from a map cube object. | 3.01526 | 2.997004 | 1.006091 |
ax = kwargs.pop('ax', plt.gca())
cmap = kwargs.get('cmap', 'BuGn')
annotate_name(sed, ax=ax)
SEDPlotter.plot_flux_points(sed, **kwargs)
if np.any(sed['ts'] > 9.):
if 'model_flux' in sed:
SEDPlotter.plot_model(sed['model_flux'],
noband=showlnl, **kwargs)
if showlnl:
SEDPlotter.plot_lnlscan(sed, **kwargs)
ax.set_yscale('log')
ax.set_xscale('log')
ax.set_xlabel('Energy [MeV]')
ax.set_ylabel('E$^{2}$dN/dE [MeV cm$^{-2}$ s$^{-1}$]') | def plot_sed(sed, showlnl=False, **kwargs) | Render a plot of a spectral energy distribution.
Parameters
----------
showlnl : bool
Overlay a map of the delta-loglikelihood values vs. flux
in each energy bin.
cmap : str
Colormap that will be used for the delta-loglikelihood
map.
llhcut : float
Minimum delta-loglikelihood value.
ul_ts_threshold : float
TS threshold that determines whether the MLE or UL
is plotted in each energy bin. | 3.761976 | 3.851547 | 0.976744 |
prefix = kwargs.get('prefix', 'test')
format = kwargs.get('format', self.config['format'])
loge_bounds = [None] + self.config['loge_bounds']
for x in loge_bounds:
self.make_roi_plots(gta, mcube_map, loge_bounds=x,
**kwargs)
imfile = utils.format_filename(self.config['fileio']['workdir'],
'counts_spectrum', prefix=[prefix],
extension=format)
make_counts_spectrum_plot(gta._roi_data, gta.roi,
gta.log_energies,
imfile, **kwargs) | def run(self, gta, mcube_map, **kwargs) | Make all plots. | 5.980796 | 5.60312 | 1.067405 |
kwargs.setdefault('graticule_radii', self.config['graticule_radii'])
kwargs.setdefault('label_ts_threshold',
self.config['label_ts_threshold'])
kwargs.setdefault('cmap', self.config['cmap'])
kwargs.setdefault('catalogs', self.config['catalogs'])
fmt = kwargs.get('format', self.config['format'])
figsize = kwargs.get('figsize', self.config['figsize'])
workdir = kwargs.pop('workdir', self.config['fileio']['workdir'])
suffix = kwargs.pop('suffix', 'tsmap')
zoom = kwargs.pop('zoom', None)
if 'ts' not in maps:
return
sigma_levels = [3, 5, 7] + list(np.logspace(1, 3, 17))
prefix = maps['name']
fig = plt.figure(figsize=figsize)
p = ROIPlotter(maps['sqrt_ts'], roi=roi, **kwargs)
p.plot(vmin=0, vmax=5, levels=sigma_levels,
cb_label='Sqrt(TS) [$\sigma$]', interpolation='bicubic',
zoom=zoom)
plt.savefig(utils.format_filename(workdir,
'%s_sqrt_ts' % suffix,
prefix=[prefix],
extension=fmt))
plt.close(fig)
fig = plt.figure(figsize=figsize)
p = ROIPlotter(maps['npred'], roi=roi, **kwargs)
p.plot(vmin=0, cb_label='NPred [Counts]', interpolation='bicubic',
zoom=zoom)
plt.savefig(utils.format_filename(workdir,
'%s_npred' % suffix,
prefix=[prefix],
extension=fmt))
plt.close(fig)
# make and draw histogram
fig, ax = plt.subplots(figsize=figsize)
bins = np.linspace(0, 25, 101)
data = np.nan_to_num(maps['ts'].data.T)
data[data > 25.0] = 25.0
data[data < 0.0] = 0.0
n, bins, patches = ax.hist(data.flatten(), bins, density=True,
histtype='stepfilled',
facecolor='green', alpha=0.75)
# ax.plot(bins,(1-chi2.cdf(x,dof))/2.,**kwargs)
ax.plot(bins, 0.5 * chi2.pdf(bins, 1.0), color='k',
label=r"$\chi^2_{1} / 2$")
ax.set_yscale('log')
ax.set_ylim(1E-4)
ax.legend(loc='upper right', frameon=False)
# labels and such
ax.set_xlabel('TS')
ax.set_ylabel('Probability')
plt.savefig(utils.format_filename(workdir,
'%s_ts_hist' % suffix,
prefix=[prefix],
extension=fmt))
plt.close(fig) | def make_tsmap_plots(self, maps, roi=None, **kwargs) | Make plots from the output of
`~fermipy.gtanalysis.GTAnalysis.tsmap` or
`~fermipy.gtanalysis.GTAnalysis.tscube`. This method
generates a 2D sky map for the best-fit test source in
sqrt(TS) and Npred.
Parameters
----------
maps : dict
Output dictionary of
`~fermipy.gtanalysis.GTAnalysis.tsmap` or
`~fermipy.gtanalysis.GTAnalysis.tscube`.
roi : `~fermipy.roi_model.ROIModel`
ROI Model object. Generate markers at the positions of
the sources in this ROI.
zoom : float
Crop the image by this factor. If None then no crop is
applied. | 2.731901 | 2.626494 | 1.040132 |
# format = kwargs.get('format', self.config['plotting']['format'])
if loge_bounds is None:
loge_bounds = (self.energies[0], self.energies[-1])
name = src['name'].lower().replace(' ', '_')
esuffix = '_%.3f_%.3f' % (loge_bounds[0], loge_bounds[1])
p = ExtensionPlotter(src, self.roi, '',
self.config['fileio']['workdir'],
loge_bounds=loge_bounds)
fig = plt.figure()
p.plot(0)
plt.gca().set_xlim(-2, 2)
ROIPlotter.setup_projection_axis(0)
annotate(src=src, loge_bounds=loge_bounds)
plt.savefig(os.path.join(self.config['fileio']['workdir'],
'%s_%s_extension_xproj%s.png' % (
prefix, name, esuffix)))
plt.close(fig)
fig = plt.figure()
p.plot(1)
plt.gca().set_xlim(-2, 2)
ROIPlotter.setup_projection_axis(1)
annotate(src=src, loge_bounds=loge_bounds)
plt.savefig(os.path.join(self.config['fileio']['workdir'],
'%s_%s_extension_yproj%s.png' % (
prefix, name, esuffix)))
plt.close(fig)
for i, c in enumerate(self.components):
suffix = '_%02i' % i
p = ExtensionPlotter(src, self.roi, suffix,
self.config['fileio']['workdir'],
loge_bounds=loge_bounds)
fig = plt.figure()
p.plot(0)
ROIPlotter.setup_projection_axis(0, loge_bounds=loge_bounds)
annotate(src=src, loge_bounds=loge_bounds)
plt.gca().set_xlim(-2, 2)
plt.savefig(os.path.join(self.config['fileio']['workdir'],
'%s_%s_extension_xproj%s%s.png' % (
prefix, name, esuffix, suffix)))
plt.close(fig)
fig = plt.figure()
p.plot(1)
plt.gca().set_xlim(-2, 2)
ROIPlotter.setup_projection_axis(1, loge_bounds=loge_bounds)
annotate(src=src, loge_bounds=loge_bounds)
plt.savefig(os.path.join(self.config['fileio']['workdir'],
'%s_%s_extension_yproj%s%s.png' % (
prefix, name, esuffix, suffix)))
plt.close(fig) | def _plot_extension(self, gta, prefix, src, loge_bounds=None, **kwargs) | Utility function for generating diagnostic plots for the
extension analysis. | 1.849433 | 1.840417 | 1.004899 |
out_dict = {}
if keys is None:
keys = pil.keys()
for key in keys:
try:
out_dict[key] = pil[key]
except ValueError:
out_dict[key] = None
return out_dict | def extract_parameters(pil, keys=None) | Extract and return parameter names and values from a pil object
Parameters
----------
pil : `Pil` object
keys : list
List of parameter names, if None, extact all parameters
Returns
-------
out_dict : dict
Dictionary with parameter name, value pairs | 1.915985 | 2.418546 | 0.792205 |
for key, val in kwargs.items():
if key in ['pfiles', 'scratch']:
continue
if val is None:
continue
try:
gtapp[key] = val
except ValueError:
raise ValueError(
"gtapp failed to set parameter %s %s" % (key, val))
except KeyError:
raise KeyError("gtapp failed to set parameter %s %s" % (key, val)) | def update_gtapp(gtapp, **kwargs) | Update the parameters of the object that can run ScienceTools applications
Parameters
----------
gtapp : `GtApp.GtApp`
Object that will run the application in question
kwargs : arguments used to invoke the application | 3.193921 | 3.27604 | 0.974934 |
pfiles_orig = os.environ['PFILES']
pfiles = kwargs.get('pfiles', None)
if pfiles:
if dry_run:
print("mkdir %s" % pfiles)
else:
try:
os.makedirs(pfiles)
except OSError:
pass
pfiles = "%s:%s" % (pfiles, pfiles_orig)
os.environ['PFILES'] = pfiles
return pfiles_orig | def _set_pfiles(dry_run, **kwargs) | Set the PFILES env var
Parameters
----------
dry_run : bool
Don't actually run
Keyword arguments
-----------------
pfiles : str
Value to set PFILES
Returns
-------
pfiles_orig : str
Current value of PFILES envar | 2.255384 | 2.234685 | 1.009263 |
pfiles_orig = _set_pfiles(dry_run, **kwargs)
gtapp = GtApp.GtApp(appname)
update_gtapp(gtapp, **kwargs)
_reset_pfiles(pfiles_orig)
return gtapp | def build_gtapp(appname, dry_run, **kwargs) | Build an object that can run ScienceTools application
Parameters
----------
appname : str
Name of the application (e.g., gtbin)
dry_run : bool
Print command but do not run it
kwargs : arguments used to invoke the application
Returns `GtApp.GtApp` object that will run the application in question | 4.88552 | 5.152593 | 0.948167 |
if stream is None:
stream = sys.stdout
pfiles_orig = _set_pfiles(dry_run, **kwargs)
update_gtapp(gtapp, **kwargs)
stream.write("%s\n" % gtapp.command())
stream.flush()
if dry_run:
_reset_pfiles(pfiles_orig)
return 0
try:
stdin, stdout = gtapp.runWithOutput(print_command=False)
for line in stdout:
stream.write(line.strip())
stream.flush()
return_code = 0
except:
stream.write('Exited with exit code -1\n')
return_code = -1
_reset_pfiles(pfiles_orig)
return return_code | def run_gtapp(gtapp, stream, dry_run, **kwargs) | Runs one on the ScienceTools apps
Taken from fermipy.gtanalysis.run_gtapp by Matt Wood
Parameters
----------
gtapp : `GtApp.GtApp` object
The application (e.g., gtbin)
stream : stream object
Must have 'write' function
dry_run : bool
Print command but do not run it
kwargs : arguments used to invoke the application | 3.79558 | 4.143952 | 0.915932 |
Link.update_args(self, override_args)
dry_run = override_args.get('dry_run', False)
if self.__app is None:
self.__app = build_gtapp(self.appname, dry_run, **self.args)
#except:
# raise ValueError("Failed to build link %s %s %s" %
# (self.linkname, self.appname, self.args))
else:
update_gtapp(self.__app, **self.args) | def update_args(self, override_args) | Update the argument used to invoke the application
See help for `chain.Link` for details
This calls the base class function then fills the parameters of the GtApp object | 4.677923 | 4.097535 | 1.141643 |
return run_gtapp(self.__app, stream, dry_run, **self.args) | def run_command(self, stream=sys.stdout, dry_run=False) | Runs the command for this link. This method can be overridden by
sub-classes to invoke a different command
Parameters
-----------
stream : `file`
Must have 'write' function
dry_run : bool
Print command but do not run it | 16.682783 | 38.9744 | 0.428045 |
com_out = self.appname
for key, val in self.args.items():
if key in self._options:
com_out += ' %s={%s}' % (key, key)
else:
com_out += ' %s=%s' % (key, val)
return com_out | def command_template(self) | Build and return a string that can be used as a template invoking
this chain from the command line.
The actual command can be obtainted by using
`self.command_template().format(**self.args)` | 3.679121 | 3.464532 | 1.061939 |
args = self._parser.parse_args(argv)
components = Component.build_from_yamlfile(args.comp)
NAME_FACTORY.update_base_dict(args.data)
model_dict = make_library(**args.__dict__)
model_manager = model_dict['ModelManager']
models = load_yaml(args.models)
data = args.data
hpx_order = args.hpx_order
for modelkey in models:
model_manager.make_srcmap_manifest(modelkey, components, data)
model_manager.make_fermipy_config_yaml(modelkey, components, data,
hpx_order=hpx_order,
irf_ver=NAME_FACTORY.irf_ver()) | def run_analysis(self, argv) | Build the manifest for all the models | 7.140605 | 6.676538 | 1.069507 |
sys.stdout.write(" Copying counts cube from %s to %s\n" % (ccube, outsrcmap))
try:
hdulist_in = fits.open(ccube)
except IOError:
hdulist_in = fits.open("%s.gz" % ccube)
hpx_order_in = hdulist_in[1].header['ORDER']
if hpx_order_in > hpx_order:
hpxmap = HpxMap.create_from_hdulist(hdulist_in)
hpxmap_out = hpxmap.ud_grade(hpx_order, preserve_counts=True)
hpxlist_out = hdulist_in
#hpxlist_out['SKYMAP'] = hpxmap_out.create_image_hdu()
hpxlist_out[1] = hpxmap_out.create_image_hdu()
hpxlist_out[1].name = 'SKYMAP'
hpxlist_out.writeto(outsrcmap)
return hpx_order
else:
os.system('cp %s %s' % (ccube, outsrcmap))
#os.system('cp %s.gz %s.gz' % (ccube, outsrcmap))
#os.system('gunzip -f %s.gz' % (outsrcmap))
return None | def copy_ccube(ccube, outsrcmap, hpx_order) | Copy a counts cube into outsrcmap file
reducing the HEALPix order to hpx_order if needed. | 2.383839 | 2.299653 | 1.036608 |
sys.stdout.write(" Extracting %i sources from %s" % (len(source_names), srcmap_file))
try:
hdulist_in = fits.open(srcmap_file)
except IOError:
try:
hdulist_in = fits.open('%s.gz' % srcmap_file)
except IOError:
sys.stdout.write(" Missing file %s\n" % srcmap_file)
return
for source_name in source_names:
sys.stdout.write('.')
sys.stdout.flush()
if hpx_order is None:
hdulist.append(hdulist_in[source_name])
else:
try:
hpxmap = HpxMap.create_from_hdulist(hdulist_in, hdu=source_name)
except IndexError:
print(" Index error on source %s in file %s" % (source_name, srcmap_file))
continue
except KeyError:
print(" Key error on source %s in file %s" % (source_name, srcmap_file))
continue
hpxmap_out = hpxmap.ud_grade(hpx_order, preserve_counts=True)
hdulist.append(hpxmap_out.create_image_hdu(name=source_name))
sys.stdout.write("\n")
hdulist.flush()
hdulist_in.close() | def append_hdus(hdulist, srcmap_file, source_names, hpx_order) | Append HEALPix maps to a list
Parameters
----------
hdulist : list
The list being appended to
srcmap_file : str
Path to the file containing the HDUs
source_names : list of str
Names of the sources to extract from srcmap_file
hpx_order : int
Maximum order for maps | 2.122545 | 2.113101 | 1.004469 |
sys.stdout.write("Working on component %s\n" % compname)
ccube = compinfo['ccube']
outsrcmap = compinfo['outsrcmap']
source_dict = compinfo['source_dict']
hpx_order = AssembleModel.copy_ccube(ccube, outsrcmap, hpx_order)
hdulist = AssembleModel.open_outsrcmap(outsrcmap)
for comp_name in sorted(source_dict.keys()):
source_info = source_dict[comp_name]
source_names = source_info['source_names']
srcmap_file = source_info['srcmap_file']
AssembleModel.append_hdus(hdulist, srcmap_file,
source_names, hpx_order)
sys.stdout.write("Done!\n") | def assemble_component(compname, compinfo, hpx_order) | Assemble the source map file for one binning component
Parameters
----------
compname : str
The key for this component (e.g., E0_PSF3)
compinfo : dict
Information about this component
hpx_order : int
Maximum order for maps | 3.511224 | 3.362717 | 1.044163 |
args = self._parser.parse_args(argv)
manifest = yaml.safe_load(open(args.input))
compname = args.compname
value = manifest[compname]
self.assemble_component(compname, value, args.hpx_order) | def run_analysis(self, argv) | Assemble the source map file for one binning component
FIXME | 7.39155 | 6.036437 | 1.224489 |
job_configs = {}
components = Component.build_from_yamlfile(args['comp'])
NAME_FACTORY.update_base_dict(args['data'])
models = load_yaml(args['models'])
for modelkey in models:
manifest = os.path.join('analysis', 'model_%s' % modelkey,
'srcmap_manifest_%s.yaml' % modelkey)
for comp in components:
key = comp.make_key('{ebin_name}_{evtype_name}')
fullkey = "%s_%s" % (modelkey, key)
outfile = NAME_FACTORY.merged_srcmaps(modelkey=modelkey,
component=key,
coordsys=comp.coordsys,
mktime='none',
irf_ver=NAME_FACTORY.irf_ver())
logfile = make_nfs_path(outfile.replace('.fits', '.log'))
job_configs[fullkey] = dict(input=manifest,
compname=key,
logfile=logfile)
return job_configs | def build_job_configs(self, args) | Hook to build job configurations | 7.738604 | 7.761982 | 0.996988 |
data = input_dict.get('data')
comp = input_dict.get('comp')
library = input_dict.get('library')
models = input_dict.get('models')
hpx_order = input_dict.get('hpx_order_fitting')
dry_run = input_dict.get('dry_run', False)
self._set_link('init-model', InitModel,
comp=comp, data=data,
library=library,
models=models,
hpx_order=hpx_order,
dry_run=dry_run)
self._set_link('assemble-model', AssembleModel_SG,
comp=comp, data=data,
models=models) | def _map_arguments(self, input_dict) | Map from the top-level arguments to the arguments provided to
the indiviudal links | 3.657781 | 3.488697 | 1.048466 |
args = self._parser.parse_args(argv)
if not HAVE_ST:
raise RuntimeError(
"Trying to run fermipy analysis, but don't have ST")
gta = GTAnalysis(args.config, logging={'verbosity': 3},
fileio={'workdir_regex': '\.xml$|\.npy$'})
gta.setup(overwrite=False)
baseline_roi_fit(gta, make_plots=args.make_plots,
minmax_npred=[1e3, np.inf])
localize_sources(gta, nstep=5, dtheta_max=0.5, update=True,
prefix='base', make_plots=args.make_plots)
gta.find_sources(sqrt_ts_threshold=5.0, search_skydir=gta.roi.skydir,
search_minmax_radius=[1.0, np.nan])
gta.optimize()
gta.print_roi()
gta.print_params()
gta.free_sources(skydir=gta.roi.skydir, distance=1.0, pars='norm')
gta.fit(covar=True)
gta.print_roi()
gta.print_params()
gta.write_roi(args.roi_baseline, make_plots=args.make_plots) | def run_analysis(self, argv) | Run this analysis | 6.526775 | 6.496283 | 1.004694 |
args = self._parser.parse_args(argv)
if not HAVE_ST:
raise RuntimeError(
"Trying to run fermipy analysis, but don't have ST")
if is_null(args.skydirs):
skydir_dict = None
else:
skydir_dict = load_yaml(args.skydirs)
gta = GTAnalysis(args.config,
logging={'verbosity': 3},
fileio={'workdir_regex': '\.xml$|\.npy$'})
#gta.setup(overwrite=False)
gta.load_roi(args.roi_baseline)
gta.print_roi()
basedir = os.path.dirname(args.config)
# This should be a no-op, b/c it was done in the baseline analysis
for profile in args.profiles:
if skydir_dict is None:
skydir_keys = [None]
else:
skydir_keys = sorted(skydir_dict.keys())
for skydir_key in skydir_keys:
if skydir_key is None:
pkey, psrc_name, pdict = build_profile_dict(basedir, profile)
else:
skydir_val = skydir_dict[skydir_key]
pkey, psrc_name, pdict = build_profile_dict(basedir, profile)
pdict['ra'] = skydir_val['ra']
pdict['dec'] = skydir_val['dec']
pkey += "_%06i" % skydir_key
outfile = "sed_%s.fits" % pkey
# Add the source and get the list of correlated soruces
correl_dict, test_src_name = add_source_get_correlated(gta, psrc_name,
pdict, correl_thresh=0.25,
non_null_src=args.non_null_src)
# Write the list of correlated sources
correl_yaml = os.path.join(basedir, "correl_%s.yaml" % pkey)
write_yaml(correl_dict, correl_yaml)
gta.free_sources(False)
for src_name in correl_dict.keys():
gta.free_source(src_name, pars='norm')
# build the SED
if args.non_null_src:
gta.update_source(test_src_name, reoptimize=True)
gta.write_roi("base_%s"% pkey, make_plots=False)
gta.sed(test_src_name, prefix=pkey, outfile=outfile, make_plots=args.make_plots)
# remove the source
gta.delete_source(test_src_name)
# put the ROI back to how it was
gta.load_xml(args.roi_baseline)
return gta | def run_analysis(self, argv) | Run this analysis | 4.242467 | 4.24235 | 1.000028 |
job_configs = {}
ttype = args['ttype']
(targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)
if sim is not None:
raise ValueError("Found 'sim' argument on AnalyzeROI_SG config.")
if targets_yaml is None:
return job_configs
config_yaml = 'config.yaml'
config_override = args.get('config')
if is_not_null(config_override):
config_yaml = config_override
targets = load_yaml(targets_yaml)
base_config = dict(roi_baseline=args['roi_baseline'],
make_plots=args['make_plots'])
for target_name in targets.keys():
name_keys = dict(target_type=ttype,
target_name=target_name,
fullpath=True)
target_dir = NAME_FACTORY.targetdir(**name_keys)
config_path = os.path.join(target_dir, config_yaml)
logfile = make_nfs_path(os.path.join(
target_dir, "%s_%s.log" % (self.linkname, target_name)))
job_config = base_config.copy()
job_config.update(dict(config=config_path,
logfile=logfile))
job_configs[target_name] = job_config
return job_configs | def build_job_configs(self, args) | Hook to build job configurations | 4.340509 | 4.360435 | 0.99543 |
result = list()
data = ctypes.create_string_buffer(8)
while size > 0:
length = min(size, 8)
n = hidapi.hid_read_timeout(self.device, data, length, 100)
if n <= 0:
raise IOError(
'pywws.device_ctypes_hidapi.USBDevice.read_data failed')
for i in range(n):
result.append(ord(data[i]))
size -= n
return result | def read_data(self, size) | Receive data from the device.
If the read fails for any reason, an :obj:`IOError` exception
is raised.
:param size: the number of bytes to read.
:type size: int
:return: the data received.
:rtype: list(int) | 4.477587 | 4.389065 | 1.020169 |
data = ''.join(map(chr, buf))
size = len(data)
if hidapi.hid_write(self.device, ctypes.c_char_p(data), size) != size:
raise IOError(
'pywws.device_ctypes_hidapi.USBDevice.write_data failed')
return True | def write_data(self, buf) | Send data to the device.
:param buf: the data to send.
:type buf: list(int)
:return: success status.
:rtype: bool | 6.546818 | 7.009404 | 0.934005 |
north = literal_eval(params.get('Zambretti', 'north', 'True'))
baro_upper = float(params.get('Zambretti', 'baro upper', '1050.0'))
baro_lower = float(params.get('Zambretti', 'baro lower', '950.0'))
if not hourly_data['rel_pressure']:
return ''
if hourly_data['wind_ave'] is None or hourly_data['wind_ave'] < 0.3:
wind = None
else:
wind = hourly_data['wind_dir']
if hourly_data['pressure_trend'] is None:
trend = 0.0
else:
trend = hourly_data['pressure_trend'] / 3.0
# normalise pressure
pressure = 950.0 + (
(1050.0 - 950.0) * (hourly_data['rel_pressure'] - baro_lower) /
(baro_upper - baro_lower))
# adjust pressure for wind direction
if wind is not None:
if not isinstance(wind, int):
wind = int(wind + 0.5) % 16
if not north:
# southern hemisphere, so add 180 degrees
wind = (wind + 8) % 16
pressure += ( 5.2, 4.2, 3.2, 1.05, -1.1, -3.15, -5.2, -8.35,
-11.5, -9.4, -7.3, -5.25, -3.2, -1.15, 0.9, 3.05)[wind]
# compute base forecast from pressure and trend (hPa / hour)
summer = north == (hourly_data['idx'].month >= 4 and
hourly_data['idx'].month <= 9)
if trend >= 0.1:
# rising pressure
if summer:
pressure += 3.2
F = 0.1740 * (1031.40 - pressure)
LUT = ('A', 'B', 'B', 'C', 'F', 'G', 'I', 'J', 'L', 'M', 'M', 'Q', 'T',
'Y')
elif trend <= -0.1:
# falling pressure
if summer:
pressure -= 3.2
F = 0.1553 * (1029.95 - pressure)
LUT = ('B', 'D', 'H', 'O', 'R', 'U', 'V', 'X', 'X', 'Z')
else:
# steady
F = 0.2314 * (1030.81 - pressure)
LUT = ('A', 'B', 'B', 'B', 'E', 'K', 'N', 'N', 'P', 'P', 'S', 'W', 'W',
'X', 'X', 'X', 'Z')
# clip to range of lookup table
F = min(max(int(F + 0.5), 0), len(LUT) - 1)
# convert to letter code
return LUT[F] | def zambretti_code(params, hourly_data) | Simple implementation of Zambretti forecaster algorithm.
Inspired by beteljuice.com Java algorithm, as converted to Python by
honeysucklecottage.me.uk, and further information
from http://www.meteormetrics.com/zambretti.htm | 3.25153 | 3.255066 | 0.998914 |
buf = [
self.ReadCommand,
address // 256,
address % 256,
self.EndMark,
self.ReadCommand,
address // 256,
address % 256,
self.EndMark,
]
if not self.dev.write_data(buf):
return None
return self.dev.read_data(32) | def read_block(self, address) | Read 32 bytes from the weather station.
If the read fails for any reason, :obj:`None` is returned.
:param address: address to read from.
:type address: int
:return: the data from the weather station.
:rtype: list(int) | 3.443517 | 3.617422 | 0.951926 |
buf = [
self.WriteCommandWord,
address // 256,
address % 256,
self.EndMark,
self.WriteCommandWord,
data,
0,
self.EndMark,
]
if not self.dev.write_data(buf):
return False
buf = self.dev.read_data(8)
if buf is None:
return False
for byte in buf:
if byte != 0xA5:
return False
return True | def write_byte(self, address, data) | Write a single byte to the weather station.
:param address: address to write to.
:type address: int
:param data: the value to write.
:type data: int
:return: success status.
:rtype: bool | 3.732126 | 4.161317 | 0.896862 |
result = ptr + self.reading_len[self.ws_type]
if result >= 0x10000:
result = self.data_start
return result | def inc_ptr(self, ptr) | Get next circular buffer data pointer. | 9.783109 | 8.276117 | 1.182089 |
result = ptr - self.reading_len[self.ws_type]
if result < self.data_start:
result = 0x10000 - self.reading_len[self.ws_type]
return result | def dec_ptr(self, ptr) | Get previous circular buffer data pointer. | 5.961271 | 5.650049 | 1.055083 |
if unbuffered:
self._data_pos = None
# round down ptr to a 'block boundary'
idx = ptr - (ptr % 0x20)
ptr -= idx
count = self.reading_len[self.ws_type]
if self._data_pos == idx:
# cache contains useful data
result = self._data_block[ptr:ptr + count]
if len(result) >= count:
return result
else:
result = list()
if ptr + count > 0x20:
# need part of next block, which may be in cache
if self._data_pos != idx + 0x20:
self._data_pos = idx + 0x20
self._data_block = self._read_block(self._data_pos)
result += self._data_block[0:ptr + count - 0x20]
if len(result) >= count:
return result
# read current block
self._data_pos = idx
self._data_block = self._read_block(self._data_pos)
result = self._data_block[ptr:ptr + count] + result
return result | def get_raw_data(self, ptr, unbuffered=False) | Get raw data from circular buffer.
If unbuffered is false then a cached value that was obtained
earlier may be returned. | 3.579849 | 3.567626 | 1.003426 |
result = _decode(self.get_raw_data(ptr, unbuffered),
self._reading_format[self.ws_type])
return result | def get_data(self, ptr, unbuffered=False) | Get decoded data from circular buffer.
If unbuffered is false then a cached value that was obtained
earlier may be returned. | 11.485029 | 13.936128 | 0.824119 |
new_ptr = _decode(
self._read_fixed_block(0x0020), self.lo_fix_format['current_pos'])
if new_ptr == self._current_ptr:
return self._current_ptr
if self._current_ptr and new_ptr != self.inc_ptr(self._current_ptr):
logger.error(
'unexpected ptr change %06x -> %06x', self._current_ptr, new_ptr)
self._current_ptr = new_ptr
return self._current_ptr | def current_pos(self) | Get circular buffer location where current data is being written. | 4.941401 | 4.795695 | 1.030383 |
if unbuffered or not self._fixed_block:
self._fixed_block = self._read_fixed_block()
return self._fixed_block | def get_raw_fixed_block(self, unbuffered=False) | Get the raw "fixed block" of settings and min/max data. | 3.349566 | 3.046399 | 1.099517 |
if unbuffered or not self._fixed_block:
self._fixed_block = self._read_fixed_block()
format = self.fixed_format
# navigate down list of keys to get to wanted data
for key in keys:
format = format[key]
return _decode(self._fixed_block, format) | def get_fixed_block(self, keys=[], unbuffered=False) | Get the decoded "fixed block" of settings and min/max data.
A subset of the entire block can be selected by keys. | 5.2544 | 5.39675 | 0.973623 |
# send data
for ptr, value in data:
self._write_byte(ptr, value)
# set 'data changed'
self._write_byte(self.fixed_format['data_changed'][0], 0xAA)
# wait for station to clear 'data changed'
while True:
ack = _decode(
self._read_fixed_block(0x0020), self.fixed_format['data_changed'])
if ack == 0:
break
logger.debug('write_data waiting for ack')
time.sleep(6) | def write_data(self, data) | Write a set of single bytes to the weather station. Data must be an
array of (ptr, value) pairs. | 6.269672 | 5.437662 | 1.153009 |
for bus in usb.busses():
for device in bus.devices:
if (device.idVendor == idVendor and
device.idProduct == idProduct):
return device
return None | def _find_device(self, idVendor, idProduct) | Find a USB device by product and vendor id. | 2.210955 | 2.138659 | 1.033805 |
result = self.devh.interruptRead(0x81, size, 1200)
if result is None or len(result) < size:
raise IOError('pywws.device_libusb.USBDevice.read_data failed')
return list(result) | def read_data(self, size) | Receive data from the device.
If the read fails for any reason, an :obj:`IOError` exception
is raised.
:param size: the number of bytes to read.
:type size: int
:return: the data received.
:rtype: list(int) | 8.400145 | 8.066225 | 1.041397 |
result = self.devh.controlMsg(
usb.ENDPOINT_OUT + usb.TYPE_CLASS + usb.RECIP_INTERFACE,
usb.REQ_SET_CONFIGURATION, buf, value=0x200, timeout=50)
if result != len(buf):
raise IOError('pywws.device_libusb.USBDevice.write_data failed')
return True | def write_data(self, buf) | Send data to the device.
If the write fails for any reason, an :obj:`IOError` exception
is raised.
:param buf: the data to send.
:type buf: list(int)
:return: success status.
:rtype: bool | 7.64634 | 8.215351 | 0.930738 |
try:
ts = int(
(dt.replace(tzinfo=pytz.utc)
- datetime(1970,1,1,tzinfo=pytz.utc)
).total_seconds()
)
except (OverflowError,OSError):
if dt < datetime.now():
ts = 0
else:
ts = 2**63-1
return ts | def _adapt_WSDateTime(dt) | Return unix timestamp of the datetime like input.
If conversion overflows high, return sint64_max ,
if underflows, return 0 | 2.754739 | 2.669872 | 1.031787 |
if isinstance(i, slice):
if i.step is not None:
raise TypeError("Slice step not permitted")
if ( (i.start is not None and not isinstance(i.start, datetime))
or (i.stop is not None and not isinstance(i.stop, datetime))
):
raise TypeError(
"Slice indices must be {} or None".format(datetime)
)
if i.start is not None and i.stop is not None:
if i.start > i.stop:
raise ValueError(
"Start index is greater than the End index"
)
else:
# Substitution of the key coloumn, but the
# parameters themselves will be substituted by sqlite3
predicate = "WHERE {} BETWEEN :start AND :stop".format(
self._keycol
)
elif i.start is not None:
# i.stop will also be None
predicate = "WHERE {} >= :start".format(self._keycol)
elif i.stop is not None:
# i.start will also be None
predicate = "WHERE {} <= :stop".format(self._keycol)
else:
# both are None, so equivelent to wanting everything
predicate = ""
multi = True
pred = {"start": i.start, "stop": i.stop}
elif isinstance(i, datetime):
# Substitution of the key coloumn, but the
# parameters themselves will be substituted by sqlite3
predicate = "WHERE {} = :key".format(self._keycol)
multi = False
pred = {"key": i}
else:
# not a slice or a datetime object
raise TypeError("List indices must be {}".format(datetime))
# predicate is the end of the query string.
# multi is a boolean indicating whether the result should be iterable
# or not. pred is a dict of the parameters for substitution
return (predicate, multi, pred) | def _predicate(self, i) | Given a valid datetime or slace, return the predicate portion
of the SQL query, a boolean indicating whether multiple items are
expected from the result, and a dictionary of parameters for the query | 3.069026 | 2.836308 | 1.08205 |
key_list = self.key_list
keynone = {key:None for key in key_list}
# Generator which fills in missing data from the original iterator
def datagen(i):
for datum in i:
tmp = keynone.copy()
tmp.update(datum)
yield tmp
with self._connection as con:
con.executemany(
.format(table=self.table,
keylist=", ".join(self.key_list),
vallist=", :".join(self.key_list)
), datagen(i)) | def update(self, i) | D.update(E) -> None. Update D from iterable E with pre-existing
items being overwritten.
Elements in E are assumed to be dicts containing the primary key to
allow the equivelent of:
for k in E: D[k.primary_key] = k | 6.023016 | 5.395626 | 1.116278 |
if not isinstance(i, datetime):
raise TypeError("'{}' is not a datetime object".format(i))
else:
result = self._connection.execute(
.format(
selkeycol=self.selkeycol,
table=self.table,
keycol=self._keycol
), {"key":i}).fetchone()
return result[self._keycol] if result is not None else None | def before(self, i) | Return datetime of newest existing data record whose datetime
is < idx. If no such record exists, return None. | 5.5006 | 4.940899 | 1.113279 |
return set(
row[self._keycol] for row in self._connection.execute(
.format(
self.selkeycol,
self.table,
self._keycol
)
)
) | def keys(self) | D.keys() -> a set-like object providing a view on D's keys | 7.955917 | 7.220626 | 1.101832 |
keycol = self._keycol
for row in self.__iter__():
yield (row[keycol], dict(row)) | def items(self) | D.items() -> a set-like object providing a view on D's items | 8.516015 | 8.797652 | 0.967987 |
with self._connection as con:
con.execute("DELETE FROM {};".format(self.table)) | def clear(self) | S.clear() -> None -- remove all items from S | 7.782333 | 7.001096 | 1.111588 |
try:
value = next(iter(self))
key = value[self._keycol]
except StopIteration:
raise KeyError
del self[key]
return key, value | def popitem(self) | D.popitem() -> (k, v)
Remove and return some (key, value) pair
as a 2-tuple; but raise KeyError if D is empty. | 4.492609 | 3.983054 | 1.127931 |
# get the default locale
lc, encoding = locale.getdefaultlocale()
try:
if '.' in lang:
locale.setlocale(locale.LC_ALL, lang)
else:
locale.setlocale(locale.LC_ALL, (lang, encoding))
except locale.Error:
return False
return True | def set_locale(lang) | Set the 'locale' used by a program.
This affects the entire application, changing the way dates,
currencies and numbers are represented. It should not be called
from a library routine that may be used in another program.
The ``lang`` parameter can be any string that is recognised by
``locale.setlocale()``, for example ``en``, ``en_GB`` or ``en_GB.UTF-8``.
:param lang: language code.
:type lang: string
:return: success status.
:rtype: bool | 2.539789 | 2.698339 | 0.941242 |
global translation
# make list of possible languages, in order of preference
langs = list()
if lang:
if '.' in lang:
lang = lang.split('.')[0]
langs += [lang, lang[:2]]
# get translation object
path = pkg_resources.resource_filename('pywws', 'lang')
codeset = locale.getpreferredencoding()
if codeset == 'ASCII':
codeset = 'UTF-8'
try:
translation = gettext.translation(
'pywws', path, languages=langs, codeset=codeset)
# Python 3 translations don't have a ugettext method
if not hasattr(translation, 'ugettext'):
translation.ugettext = translation.gettext
except IOError:
return False
return True | def set_translation(lang) | Set the translation used by (some) pywws modules.
This sets the translation object ``pywws.localisation.translation``
to use a particular language.
The ``lang`` parameter can be any string of the form ``en``,
``en_GB`` or ``en_GB.UTF-8``. Anything after a ``.`` character is
ignored. In the case of a string such as ``en_GB``, the routine
will search for an ``en_GB`` language file before searching for an
``en`` one.
:param lang: language code.
:type lang: string
:return: success status.
:rtype: bool | 3.43955 | 3.28985 | 1.045504 |
lang = params.get('config', 'language', None)
if lang:
set_locale(lang)
set_translation(lang) | def set_application_language(params) | Set the locale and translation for a pywws program.
This function reads the language from the configuration file, then
calls :func:`set_locale` and :func:`set_translation`.
:param params: a :class:`pywws.storage.params` object.
:type params: object | 5.814351 | 5.885209 | 0.98796 |
result = list()
while size > 0:
count = min(size, 8)
buf = self.hid.read(count)
if len(buf) < count:
raise IOError(
'pywws.device_cython_hidapi.USBDevice.read_data failed')
result += buf
size -= count
return result | def read_data(self, size) | Receive data from the device.
If the read fails for any reason, an :obj:`IOError` exception
is raised.
:param size: the number of bytes to read.
:type size: int
:return: the data received.
:rtype: list(int) | 6.637438 | 6.622933 | 1.00219 |
if self.hid.write(buf) != len(buf):
raise IOError(
'pywws.device_cython_hidapi.USBDevice.write_data failed')
return True | def write_data(self, buf) | Send data to the device.
:param buf: the data to send.
:type buf: list(int)
:return: success status.
:rtype: bool | 15.985442 | 16.259792 | 0.983127 |
if dt.tzinfo is None:
dt = dt.replace(tzinfo=self.utc)
return dt.astimezone(self.local) | def to_local(self, dt) | Convert any timestamp to local time (with tzinfo). | 2.488616 | 2.216964 | 1.122534 |
if dt.tzinfo is None:
return dt.replace(tzinfo=self.utc)
return dt.astimezone(self.utc) | def to_utc(self, dt) | Convert any timestamp to UTC (with tzinfo). | 2.381537 | 2.221143 | 1.072212 |
if dt.tzinfo is None:
return dt
return dt.astimezone(self.utc).replace(tzinfo=None) | def to_naive(self, dt) | Convert any timestamp to pywws (utc, no tzinfo). | 2.848346 | 2.920513 | 0.97529 |
local_time = dt + self.standard_offset
if use_dst:
dst_offset = self.dst(local_time)
if dst_offset:
local_time += dst_offset
adjusted_time = local_time.replace(**kwds)
if adjusted_time > local_time and not _recurse:
return self.local_replace(
dt - DAY, use_dst=use_dst, _recurse=True, **kwds)
adjusted_time -= dst_offset
if self.dst(adjusted_time):
return adjusted_time - self.standard_offset
adjusted_time = local_time.replace(**kwds)
if use_dst:
dst_offset = self.dst(adjusted_time)
adjusted_time -= dst_offset
if adjusted_time > local_time and not _recurse:
return self.local_replace(
dt - DAY, use_dst=use_dst, _recurse=True, **kwds)
return adjusted_time - self.standard_offset | def local_replace(self, dt, use_dst=True, _recurse=False, **kwds) | Return pywws timestamp (utc, no tzinfo) for the most recent
local time before the pywws timestamp dt, with datetime replace
applied. | 2.029835 | 2.067228 | 0.981911 |
with self._lock:
if not self._config.has_option(section, option):
if default is not None:
self._set(section, option, default)
return default
return self._config.get(section, option) | def get(self, section, option, default=None) | Get a parameter value and return a string.
If default is specified and section or option are not defined
in the file, they are created and set to default, which is
then the return value. | 2.230612 | 2.327912 | 0.958203 |
with self._lock:
self._set(section, option, value) | def set(self, section, option, value) | Set option in section to string value. | 4.545612 | 4.813723 | 0.944303 |
with self._lock:
if not self._config.has_section(section):
return
if self._config.has_option(section, option):
self._config.remove_option(section, option)
self._dirty = True
if not self._config.options(section):
self._config.remove_section(section)
self._dirty = True | def unset(self, section, option) | Remove option from section. | 1.819779 | 1.818556 | 1.000672 |
for key in keys:
if not self.params[key]:
raise RuntimeError('"{}" not set in weather.ini'.format(key)) | def check_params(self, *keys) | Ensure user has set required values in weather.ini.
Normally the :py:data:`~ServiceBase.config` names with
``required`` set are checked, but if your uploader has a
``register`` method you may need to check for other data.
:param str keys: the :py:data:`~ServiceBase.config` names to
verify. | 6.995913 | 4.410091 | 1.586342 |
count = 0
for x in i:
count+=1
if count % 10000 == 0:
logger.info("%d records so far, current record is %s",
count, x["idx"])
yield x | def monitor(i) | Given an iterator, yields data from it
but prints progress every 10,000 records | 5.29813 | 4.10686 | 1.290068 |
start = calib_data.before(datetime.max)
if start is None:
start = datetime.min
start = raw_data.after(start + SECOND)
if start is None:
return start
del calib_data[start:]
calibrator = Calib(params, raw_data)
def calibgen(inputdata):
count = 0
for data in inputdata:
idx = data['idx']
count += 1
if count % 10000 == 0:
logger.info("calib: %s", idx.isoformat(' '))
elif count % 500 == 0:
logger.debug("calib: %s", idx.isoformat(' '))
for key in ('rain', 'abs_pressure', 'temp_in'):
if data[key] is None:
logger.error('Ignoring invalid data at %s', idx.isoformat(' '))
break
else:
yield calibrator.calib(data)
calib_data.update(calibgen(raw_data[start:]))
return start | def calibrate_data(params, raw_data, calib_data) | Calibrate' raw data, using a user-supplied function. | 4.258312 | 4.282339 | 0.994389 |
start = hourly_data.before(datetime.max)
if start is None:
start = datetime.min
start = calib_data.after(start + SECOND)
if process_from:
if start:
start = min(start, process_from)
else:
start = process_from
if start is None:
return start
# set start of hour in local time (not all time offsets are integer hours)
start += timezone.standard_offset
start = start.replace(minute=0, second=0)
start -= timezone.standard_offset
del hourly_data[start:]
# preload pressure history, and find last valid rain
prev = None
pressure_history = deque()
last_rain = None
for data in calib_data[start - HOURx3:start]:
if data['rel_pressure']:
pressure_history.append((data['idx'], data['rel_pressure']))
if data['rain'] is not None:
last_rain = data['rain']
prev = data
# iterate over data in one hour chunks
stop = calib_data.before(datetime.max)
acc = HourAcc(last_rain)
def hourlygen(inputdata, prev):
hour_start = start
count = 0
while hour_start <= stop:
count += 1
if count % 1008 == 0:
logger.info("hourly: %s", hour_start.isoformat(' '))
elif count % 24 == 0:
logger.debug("hourly: %s", hour_start.isoformat(' '))
hour_end = hour_start + HOUR
acc.reset()
for data in inputdata[hour_start:hour_end]:
if data['rel_pressure']:
pressure_history.append((data['idx'], data['rel_pressure']))
if prev:
err = data['idx'] - prev['idx']
if abs(err - timedelta(minutes=data['delay'])) > TIME_ERR:
logger.info('unexpected data interval %s %s',
data['idx'].isoformat(' '), str(err))
acc.add_raw(data)
prev = data
new_data = acc.result()
if new_data and (new_data['idx'] - hour_start) >= timedelta(minutes=9):
# compute pressure trend
new_data['pressure_trend'] = None
if new_data['rel_pressure']:
target = new_data['idx'] - HOURx3
while (len(pressure_history) >= 2 and
abs(pressure_history[0][0] - target) >
abs(pressure_history[1][0] - target)):
pressure_history.popleft()
if (pressure_history and
abs(pressure_history[0][0] - target) < HOUR):
new_data['pressure_trend'] = (
new_data['rel_pressure'] - pressure_history[0][1])
# store new hourly data
yield new_data
hour_start = hour_end
hourly_data.update(hourlygen(calib_data, prev))
return start | def generate_hourly(calib_data, hourly_data, process_from) | Generate hourly summaries from calibrated data. | 3.635947 | 3.628565 | 1.002034 |
start = daily_data.before(datetime.max)
if start is None:
start = datetime.min
start = calib_data.after(start + SECOND)
if process_from:
if start:
start = min(start, process_from)
else:
start = process_from
if start is None:
return start
# round to start of this day, in local time
start = timezone.local_replace(
start, use_dst=use_dst, hour=day_end_hour, minute=0, second=0)
del daily_data[start:]
stop = calib_data.before(datetime.max)
acc = DayAcc()
def dailygen(inputdata):
day_start = start
count = 0
while day_start <= stop:
count += 1
if count % 30 == 0:
logger.info("daily: %s", day_start.isoformat(' '))
else:
logger.debug("daily: %s", day_start.isoformat(' '))
day_end = day_start + DAY
if use_dst:
# day might be 23 or 25 hours long
day_end = timezone.local_replace(
day_end + HOURx3, use_dst=use_dst, hour=day_end_hour)
acc.reset()
for data in inputdata[day_start:day_end]:
acc.add_raw(data)
for data in hourly_data[day_start:day_end]:
acc.add_hourly(data)
new_data = acc.result()
if new_data:
new_data['start'] = day_start
yield new_data
day_start = day_end
daily_data.update(dailygen(calib_data))
return start | def generate_daily(day_end_hour, use_dst,
calib_data, hourly_data, daily_data, process_from) | Generate daily summaries from calibrated and hourly data. | 3.323752 | 3.353147 | 0.991234 |
start = monthly_data.before(datetime.max)
if start is None:
start = datetime.min
start = daily_data.after(start + SECOND)
if process_from:
if start:
start = min(start, process_from)
else:
start = process_from
if start is None:
return start
# set start to start of first day of month (local time)
start = timezone.local_replace(
start, use_dst=use_dst, day=1, hour=day_end_hour, minute=0, second=0)
if day_end_hour >= 12:
# month actually starts on the last day of previous month
start -= DAY
del monthly_data[start:]
stop = daily_data.before(datetime.max)
if stop is None:
return None
acc = MonthAcc(rain_day_threshold)
def monthlygen(inputdata):
month_start = start
count = 0
while month_start <= stop:
count += 1
if count % 12 == 0:
logger.info("monthly: %s", month_start.isoformat(' '))
else:
logger.debug("monthly: %s", month_start.isoformat(' '))
month_end = month_start + WEEK
if month_end.month < 12:
month_end = month_end.replace(month=month_end.month+1)
else:
month_end = month_end.replace(month=1, year=month_end.year+1)
month_end = month_end - WEEK
if use_dst:
# month might straddle summer time start or end
month_end = timezone.local_replace(
month_end + HOURx3, use_dst=use_dst, hour=day_end_hour)
acc.reset()
for data in inputdata[month_start:month_end]:
acc.add_daily(data)
new_data = acc.result()
if new_data:
new_data['start'] = month_start
yield new_data
month_start = month_end
monthly_data.update(monthlygen(daily_data))
return start | def generate_monthly(rain_day_threshold, day_end_hour, use_dst,
daily_data, monthly_data, process_from) | Generate monthly summaries from daily data. | 3.190984 | 3.202855 | 0.996294 |
logger.info('Generating summary data')
# get time of last record
last_raw = context.raw_data.before(datetime.max)
if last_raw is None:
raise IOError('No data found. Check data directory parameter.')
# get daytime end hour (in local time)
day_end_hour, use_dst = get_day_end_hour(context.params)
# get other config
rain_day_threshold = float(
context.params.get('config', 'rain day threshold', '0.2'))
# calibrate raw data
start = calibrate_data(context.params, context.raw_data, context.calib_data)
# generate hourly data
start = generate_hourly(context.calib_data, context.hourly_data, start)
# generate daily data
start = generate_daily(day_end_hour, use_dst,
context.calib_data, context.hourly_data, context.daily_data, start)
# generate monthly data
generate_monthly(rain_day_threshold, day_end_hour, use_dst,
context.daily_data, context.monthly_data, start)
return 0 | def process_data(context) | Generate summaries from raw weather station data.
The meteorological day end (typically 2100 or 0900 local time) is
set in the preferences file ``weather.ini``. The default value is
2100 (2200 during DST), following the historical convention for
weather station readings. | 4.543636 | 4.202493 | 1.081176 |
result = self.dev.bulkRead(0x81, size, timeout=1200)
if not result or len(result) < size:
raise IOError('pywws.device_libusb1.USBDevice.read_data failed')
# Python2 libusb1 version 1.5 and earlier returns a string
if not isinstance(result[0], int):
result = map(ord, result)
return list(result) | def read_data(self, size) | Receive data from the device.
If the read fails for any reason, an :obj:`IOError` exception
is raised.
:param size: the number of bytes to read.
:type size: int
:return: the data received.
:rtype: list(int) | 7.240601 | 7.33171 | 0.987573 |
if sys.version_info[0] < 3:
str_buf = ''.join(map(chr, buf))
else:
str_buf = bytes(buf)
result = self.dev.controlWrite(
libusb1.LIBUSB_ENDPOINT_OUT | libusb1.LIBUSB_TYPE_CLASS |
libusb1.LIBUSB_RECIPIENT_INTERFACE,
libusb1.LIBUSB_REQUEST_SET_CONFIGURATION,
0x200, 0, str_buf, timeout=50)
if result != len(buf):
raise IOError('pywws.device_libusb1.USBDevice.write_data failed')
return True | def write_data(self, buf) | Send data to the device.
If the write fails for any reason, an :obj:`IOError` exception
is raised.
:param buf: the data to send.
:type buf: list(int)
:return: success status.
:rtype: bool | 2.975235 | 3.040844 | 0.978424 |
if not isinstance(idx, datetime):
raise TypeError("'%s' is not %s" % (idx, datetime))
day = min(idx.date(), self._hi_limit - DAY)
while day >= self._lo_limit:
if day < self._rd_cache.lo or day >= self._rd_cache.hi:
self._load(self._rd_cache, day)
self._rd_cache.set_ptr(idx)
if self._rd_cache.ptr > 0:
return self._rd_cache.data[self._rd_cache.ptr - 1]['idx']
day = self._rd_cache.lo - DAY
return None | def before(self, idx) | Return datetime of newest existing data record whose
datetime is < idx.
Might not even be in the same year! If no such record exists,
return None. | 4.488735 | 4.382602 | 1.024217 |
if not isinstance(idx, datetime):
raise TypeError("'%s' is not %s" % (idx, datetime))
day = max(idx.date(), self._lo_limit)
while day < self._hi_limit:
if day < self._rd_cache.lo or day >= self._rd_cache.hi:
self._load(self._rd_cache, day)
self._rd_cache.set_ptr(idx)
if self._rd_cache.ptr < len(self._rd_cache.data):
return self._rd_cache.data[self._rd_cache.ptr]['idx']
day = self._rd_cache.hi
return None | def after(self, idx) | Return datetime of oldest existing data record whose
datetime is >= idx.
Might not even be in the same year! If no such record exists,
return None. | 3.929273 | 3.755083 | 1.046388 |
hi = self.after(idx)
lo = self.before(idx)
if hi is None:
return lo
if lo is None:
return hi
if abs(hi - idx) < abs(lo - idx):
return hi
return lo | def nearest(self, idx) | Return datetime of record whose datetime is nearest idx. | 2.526642 | 2.256732 | 1.119602 |
for root, dirs, files in os.walk(self._root_dir, topdown=False):
for file in files:
os.unlink(os.path.join(root, file))
os.rmdir(root)
# Get the root dir back and re-initialise to start again
root_dir = os.path.abspath(
os.path.join(self._root_dir, os.pardir))
self.__init__(root_dir) | def clear(self) | Clears all data from the data store permanently | 2.913328 | 2.833663 | 1.028114 |
result = self.dev.read(0x81, size, timeout=1200)
if not result or len(result) < size:
raise IOError('pywws.device_pyusb1.USBDevice.read_data failed')
return list(result) | def read_data(self, size) | Receive data from the device.
If the read fails for any reason, an :obj:`IOError` exception
is raised.
:param size: the number of bytes to read.
:type size: int
:return: the data received.
:rtype: list(int) | 7.554928 | 7.778715 | 0.971231 |
bmRequestType = usb.util.build_request_type(
usb.util.ENDPOINT_OUT,
usb.util.CTRL_TYPE_CLASS,
usb.util.CTRL_RECIPIENT_INTERFACE
)
result = self.dev.ctrl_transfer(
bmRequestType=bmRequestType,
bRequest=usb.REQ_SET_CONFIGURATION,
data_or_wLength=buf,
wValue=0x200,
timeout=50)
if result != len(buf):
raise IOError('pywws.device_pyusb1.USBDevice.write_data failed')
return True | def write_data(self, buf) | Send data to the device.
If the write fails for any reason, an :obj:`IOError` exception
is raised.
:param buf: the data to send.
:type buf: list(int)
:return: success status.
:rtype: bool | 3.827161 | 4.194482 | 0.912428 |
_ = pywws.localisation.translation.ugettext
if trend > 6.0:
return _(u'rising very rapidly')
elif trend > 3.5:
return _(u'rising quickly')
elif trend > 1.5:
return _(u'rising')
elif trend >= 0.1:
return _(u'rising slowly')
elif trend < -6.0:
return _(u'falling very rapidly')
elif trend < -3.5:
return _(u'falling quickly')
elif trend < -1.5:
return _(u'falling')
elif trend <= -0.1:
return _(u'falling slowly')
return _(u'steady') | def pressure_trend_text(trend) | Convert pressure trend to a string, as used by the UK met
office. | 2.483397 | 2.433238 | 1.020614 |
wind_filter = pywws.process.WindFilter()
count = 0
for item in data:
wind_filter.add(item)
if item['wind_dir'] is not None:
count += 1
if count < min_count:
return None
speed, direction = wind_filter.result()
if speed is None or speed < threshold:
return None
return direction * 22.5 | def winddir_average(data, threshold, min_count, decay=1.0) | Compute average wind direction (in degrees) for a slice of data.
The wind speed and direction of each data item is converted to a
vector before averaging, so the result reflects the dominant wind
direction during the time period covered by the data.
Setting the ``decay`` parameter converts the filter from a simple
averager to one where the most recent sample carries the highest
weight, and earlier samples have a lower weight according to how
long ago they were.
This process is an approximation of "exponential smoothing". See
`Wikipedia <http://en.wikipedia.org/wiki/Exponential_smoothing>`_
for a detailed discussion.
The parameter ``decay`` corresponds to the value ``(1 - alpha)``
in the Wikipedia description. Because the weather data being
smoothed may not be at regular intervals this parameter is the
decay over 5 minutes. Weather data at other intervals will have
its weight scaled accordingly.
:note: The return value is in degrees, not the 0..15 range used
elsewhere in pywws.
:param data: a slice of pywws raw/calib or hourly data.
:type data: pywws.storage.CoreStore
:param threshold: minimum average windspeed for there to be a
valid wind direction.
:type threshold: float
:param min_count: minimum number of data items for there to be a
valid wind direction.
:type min_count: int
:param decay: filter coefficient decay rate.
:type decay: float
:rtype: float | 3.839134 | 3.934796 | 0.975688 |
"Convert wind direction from 0..15 to compass point text"
global _winddir_text_array
if pts is None:
return None
if not isinstance(pts, int):
pts = int(pts + 0.5) % 16
if not _winddir_text_array:
_ = pywws.localisation.translation.ugettext
_winddir_text_array = (
_(u'N'), _(u'NNE'), _(u'NE'), _(u'ENE'),
_(u'E'), _(u'ESE'), _(u'SE'), _(u'SSE'),
_(u'S'), _(u'SSW'), _(u'SW'), _(u'WSW'),
_(u'W'), _(u'WNW'), _(u'NW'), _(u'NNW'),
)
return _winddir_text_array[pts] | def winddir_text(pts) | Convert wind direction from 0..15 to compass point text | 2.467983 | 2.23741 | 1.103054 |
"Convert wind from metres per second to Beaufort scale"
if ms is None:
return None
for bft in range(len(_bft_threshold)):
if ms < _bft_threshold[bft]:
return bft
return len(_bft_threshold) | def wind_bft(ms) | Convert wind from metres per second to Beaufort scale | 4.397243 | 3.528929 | 1.246056 |
if temp is None or hum is None:
return None
a = 17.27
b = 237.7
gamma = ((a * temp) / (b + temp)) + math.log(float(hum) / 100.0)
return (b * gamma) / (a - gamma) | def dew_point(temp, hum) | Compute dew point, using formula from
http://en.wikipedia.org/wiki/Dew_point. | 2.456407 | 2.456545 | 0.999944 |
"Calculate Humidity Index as per Canadian Weather Standards"
if temp is None or humidity is None:
return None
# Formulas are adapted to not use e^(...) with no appreciable
# change in accuracy (0.0227%)
saturation_pressure = (6.112 * (10.0**(7.5 * temp / (237.7 + temp))) *
float(humidity) / 100.0)
return temp + (0.555 * (saturation_pressure - 10.0)) | def cadhumidex(temp, humidity) | Calculate Humidity Index as per Canadian Weather Standards | 8.051044 | 6.529001 | 1.23312 |
if temp is None or humidity is None:
return None
if dew is None:
dew = dew_point(temp, humidity)
if temp < 26.7 or humidity < 40 or dew < 12.0:
return temp
T = (temp * 1.8) + 32.0
R = humidity
c_1 = -42.379
c_2 = 2.04901523
c_3 = 10.14333127
c_4 = -0.22475541
c_5 = -0.00683783
c_6 = -0.05481717
c_7 = 0.00122874
c_8 = 0.00085282
c_9 = -0.00000199
return ((c_1 + (c_2 * T) + (c_3 * R) + (c_4 * T * R) + (c_5 * (T**2)) +
(c_6 * (R**2)) + (c_7 * (T**2) * R) + (c_8 * T * (R**2)) +
(c_9 * (T**2) * (R**2))) - 32.0) / 1.8 | def usaheatindex(temp, humidity, dew=None) | Calculate Heat Index as per USA National Weather Service Standards
See http://en.wikipedia.org/wiki/Heat_index, formula 1. The
formula is not valid for T < 26.7C, Dew Point < 12C, or RH < 40% | 1.629765 | 1.546266 | 1.054 |
if temp is None or wind is None:
return None
wind_kph = wind * 3.6
if wind_kph <= 4.8 or temp > 10.0:
return temp
return min(13.12 + (temp * 0.6215) +
(((0.3965 * temp) - 11.37) * (wind_kph ** 0.16)),
temp) | def wind_chill(temp, wind) | Compute wind chill, using formula from
http://en.wikipedia.org/wiki/wind_chill | 3.39263 | 3.184128 | 1.065482 |
if temp is None or rh is None or wind is None:
return None
vap_press = (float(rh) / 100.0) * 6.105 * math.exp(
17.27 * temp / (237.7 + temp))
return temp + (0.33 * vap_press) - (0.70 * wind) - 4.00 | def apparent_temp(temp, rh, wind) | Compute apparent temperature (real feel), using formula from
http://www.bom.gov.au/info/thermal_stress/ | 2.998497 | 3.124303 | 0.959733 |
if temp is None or hum is None:
return None
dew_pt = dew_point(temp, hum)
spread = float(temp) - dew_pt
return spread * 125.0 | def cloud_base(temp, hum) | Calculate cumulus cloud base in metres, using formula from
https://en.wikipedia.org/wiki/Cloud_base or
https://de.wikipedia.org/wiki/Kondensationsniveau#Konvektionskondensationsniveau | 4.771029 | 4.740037 | 1.006538 |
if not ignore_open_monitors:
open_monitors = monitoring._open_monitors()
if len(open_monitors) > 0:
envs = [m.env.spec.id if m.env.spec else '(unknown)' for m in open_monitors]
raise error.Error("Still have an open monitor on {}. You must run 'env.monitor.close()' before uploading.".format(', '.join(envs)))
env_info, training_episode_batch, training_video = upload_training_data(training_dir, api_key=api_key)
env_id = env_info['env_id']
training_episode_batch_id = training_video_id = None
if training_episode_batch:
training_episode_batch_id = training_episode_batch.id
if training_video:
training_video_id = training_video.id
if logger.level <= logging.INFO:
if training_episode_batch_id is not None and training_video_id is not None:
logger.info('[%s] Creating evaluation object from %s with learning curve and training video', env_id, training_dir)
elif training_episode_batch_id is not None:
logger.info('[%s] Creating evaluation object from %s with learning curve', env_id, training_dir)
elif training_video_id is not None:
logger.info('[%s] Creating evaluation object from %s with training video', env_id, training_dir)
else:
raise error.Error("[%s] You didn't have any recorded training data in {}. Once you've used 'env.monitor.start(training_dir)' to start recording, you need to actually run some rollouts. Please join the community chat on https://gym.openai.com if you have any issues.".format(env_id, training_dir))
evaluation = resource.Evaluation.create(
training_episode_batch=training_episode_batch_id,
training_video=training_video_id,
env=env_info['env_id'],
algorithm={
'id': algorithm_id,
},
writeup=writeup,
gym_version=env_info['gym_version'],
api_key=api_key,
# >>>>>>>>> START changes >>>>>>>>>>>>>>>>>>>>>>>>
env_info=env_info,
# <<<<<<<<< END changes <<<<<<<<<<<<<<<<<<<<<<<<<<
)
logger.info(
.rstrip(), env_id, evaluation.web_url())
return evaluation | def upload(training_dir, algorithm_id=None, writeup=None, api_key=None, ignore_open_monitors=False) | Upload the results of training (as automatically recorded by your
env's monitor) to OpenAI Gym.
Args:
training_dir (Optional[str]): A directory containing the results of a training run.
algorithm_id (Optional[str]): An algorithm id indicating the particular version of the algorithm (including choices of parameters) you are running (visit https://gym.openai.com/algorithms to create an id)
writeup (Optional[str]): A Gist URL (of the form https://gist.github.com/<user>/<id>) containing your writeup for this evaluation.
api_key (Optional[str]): Your OpenAI API key. Can also be provided as an environment variable (OPENAI_GYM_API_KEY). | 3.318058 | 3.305734 | 1.003728 |
installed_packages = self._list_packages()
# Tagging core envs
gym_package = 'gym ({})'.format(installed_packages['gym']) if 'gym' in installed_packages else 'gym'
core_specs = registry.all()
for spec in core_specs:
spec.source = 'OpenAI Gym Core Package'
spec.package = gym_package
# Loading user envs
if not os.path.isfile(self.cache_path):
return
with open(self.cache_path) as cache:
for line in cache:
user_package, registered_envs = self._load_package(line.rstrip('\n'), installed_packages)
if logger.level <= logging.DEBUG:
logger.debug('Installed %d user environments from package "%s"', len(registered_envs), user_package['name'])
if self.cache_needs_update:
self._update_cache()
if len(self.env_ids) > 0:
logger.info('Found and registered %d user environments.', len(self.env_ids)) | def load_user_envs(self) | Loads downloaded user envs from filesystem cache on `import gym` | 4.362244 | 4.147896 | 1.051676 |
if len(json_line) == 0:
return {}, set([])
valid_json = False
try:
user_package = json.loads(json_line)
valid_json = True
except ValueError:
user_package = {}
package_name = user_package['name'] if 'name' in user_package else None
module_name = package_name.replace('-', '_') if package_name is not None else ''
envs_before = set(registry.list())
if not valid_json or package_name is None:
self.cache_needs_update = True
logger.warn('Unable to load user environments. Try deleting your cache '
'file "%s" if this problem persists. \n\nLine: %s', self.cache_path, json_line)
return {}, set([])
elif package_name not in installed_packages:
self.cache_needs_update = True
logger.warn('The package "%s" does not seem to be installed anymore. User environments from this '
'package will not be registered, and the package will no longer be loaded on `import gym`', package_name)
elif module_name in sys.modules:
self.cache_needs_update = True
try:
reload_module(sys.modules[module_name])
except ImportError:
if 'gym' in package_name: # To avoid uninstalling failing dependencies
logger.warn('Unable to reload the module "%s" from package "%s" (%s). This is usually caused by a '
'invalid pip package. The package will be uninstalled and no longer be loaded on `import gym`.\n',
module_name, package_name, installed_packages[package_name])
traceback.print_exc(file=sys.stdout)
sys.stdout.write('\n')
self._run_cmd('{} uninstall -y {}'.format(pip_exec, package_name))
else:
try:
__import__(module_name)
except ImportError:
if 'gym' in package_name: # To avoid uninstalling failing dependencies
self.cache_needs_update = True
logger.warn('Unable to import the module "%s" from package "%s" (%s). This is usually caused by a '
'invalid pip package. The package will be uninstalled and no longer be loaded on `import gym`.\n',
module_name, package_name, installed_packages[package_name])
traceback.print_exc(file=sys.stdout)
sys.stdout.write('\n')
self._run_cmd('{} uninstall -y {}'.format(pip_exec, package_name))
envs_after = set(registry.list())
registered_envs = envs_after - envs_before
if len(registered_envs) > 0:
self.user_packages[package_name] = user_package
for new_env in registered_envs:
new_spec = registry.spec(new_env)
new_spec.source = user_package['source']
new_spec.package = '{} ({})'.format(user_package['name'], user_package['version'])
self.env_ids.add(new_env.lower())
return user_package, registered_envs | def _load_package(self, json_line, installed_packages) | Returns the user_package (name, version, source), and the list of envs registered when the package was loaded | 2.703269 | 2.565229 | 1.053812 |
if self._entry_point is None:
raise error.Error('Attempting to make deprecated env {}. (HINT: is there a newer registered version of this env?)'.format(self.id))
cls = load(self._entry_point)
env = cls(**self._kwargs)
# Make the enviroment aware of which spec it came from.
env.spec = self
env = env.build(extra_wrappers=self._wrappers)
return env | def make(self) | Instantiates an instance of the environment with appropriate kwargs | 9.337656 | 7.692061 | 1.213934 |
# Support specifying another host via hdfs://host:port/path syntax
# We ignore the scheme and piece together the query and fragment
# Note that HDFS URIs are not URL encoded, so a '?' or a '#' in the URI is part of the
# path
parts = urlsplit(path, allow_fragments=False)
if not parts.path.startswith('/'):
raise ValueError("Path must be absolute, was given {}".format(path))
if parts.scheme not in ('', 'hdfs', 'hftp', 'webhdfs'):
warnings.warn("Unexpected scheme {}".format(parts.scheme))
assert not parts.fragment
path = parts.path
if parts.query:
path += '?' + parts.query
if parts.netloc:
hosts = self._parse_hosts(parts.netloc)
else:
hosts = self.hosts
return hosts, path | def _parse_path(self, path) | Return (hosts, path) tuple | 4.902878 | 4.486319 | 1.092851 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.