sentence1 stringlengths 52 3.87M | sentence2 stringlengths 1 47.2k | label stringclasses 1 value |
|---|---|---|
def observe(self, dataset, kind, components=None, distance=1.0, l3=0.0, **kwargs):
"""
TODO: add documentation
Integrate over visible surface elements and return a dictionary of observable values
distance (m)
"""
meshes = self.meshes
if kind=='lp':
def sv(p, p0, w):
# Subsidiary variable:
return (p0-p)/(w/2)
def lorentzian(sv):
return 1-1./(1+sv**2)
def gaussian(sv):
return 1-np.exp(-np.log(2)*sv**2)
profile_func = kwargs.get('profile_func')
profile_rest = kwargs.get('profile_rest')
profile_sv = kwargs.get('profile_sv')
wavelengths = kwargs.get('wavelengths')
if profile_func == 'gaussian':
func = gaussian
elif profile_func == 'lorentzian':
func = lorentzian
else:
raise NotImplementedError("profile_func='{}' not supported".format(profile_func))
visibilities = meshes.get_column_flat('visibilities', components)
abs_intensities = meshes.get_column_flat('abs_intensities:{}'.format(dataset), components)
# mus here will be from the tnormals of the triangle and will not
# be weighted by the visibility of the triangle
mus = meshes.get_column_flat('mus', components)
areas = meshes.get_column_flat('areas_si', components)
ldint = meshes.get_column_flat('ldint:{}'.format(dataset), components)
rvs = (meshes.get_column_flat("rvs:{}".format(dataset), components)*u.solRad/u.d).to(u.m/u.s).value
dls = rvs*profile_rest/c.c.si.value
line = func(sv(wavelengths, profile_rest, profile_sv))
lines = np.array([np.interp(wavelengths, wavelengths+dl, line) for dl in dls])
if not np.any(visibilities):
avg_line = np.full_like(wavelengths, np.nan)
else:
avg_line = np.average(lines, axis=0, weights=abs_intensities*areas*mus*ldint*visibilities)
return {'flux_densities': avg_line}
elif kind=='rv':
visibilities = meshes.get_column_flat('visibilities', components)
if np.all(visibilities==0):
# then no triangles are visible, so we should return nan
return {'rv': np.nan}
rvs = meshes.get_column_flat("rvs:{}".format(dataset), components)
abs_intensities = meshes.get_column_flat('abs_intensities:{}'.format(dataset), components)
# mus here will be from the tnormals of the triangle and will not
# be weighted by the visibility of the triangle
mus = meshes.get_column_flat('mus', components)
areas = meshes.get_column_flat('areas_si', components)
ldint = meshes.get_column_flat('ldint:{}'.format(dataset), components)
# NOTE: don't need ptfarea because its a float (same for all
# elements, regardless of component)
# NOTE: the intensities are already projected but are per unit area
# so we need to multiply by the /projected/ area of each triangle (thus the extra mu)
return {'rv': np.average(rvs, weights=abs_intensities*areas*mus*ldint*visibilities)}
elif kind=='lc':
visibilities = meshes.get_column_flat('visibilities')
if np.all(visibilities==0):
# then no triangles are visible, so we should return nan -
# probably shouldn't ever happen for lcs
return {'flux': np.nan}
intensities = meshes.get_column_flat("intensities:{}".format(dataset), components)
mus = meshes.get_column_flat('mus', components)
areas = meshes.get_column_flat('areas_si', components)
ldint = meshes.get_column_flat('ldint:{}'.format(dataset), components)
# assume that all bodies are using the same passband and therefore
# will have the same ptfarea. If this assumption is ever a problem -
# then we will need to build a flat column based on the component
# of each element so that ptfarea is an array with the same shape
# as those above
if isinstance(self.bodies[0], Envelope):
# for envelopes, we'll make the same assumption and just grab
# that value stored in the first "half"
ptfarea = self.bodies[0]._halves[0].get_ptfarea(dataset)
else:
ptfarea = self.bodies[0].get_ptfarea(dataset)
# intens_proj is the intensity in the direction of the observer per unit surface area of the triangle
# areas is the area of each triangle
# areas*mus is the area of each triangle projected in the direction of the observer
# visibilities is 0 for hidden, 0.5 for partial, 1.0 for visible
# areas*mus*visibilities is the visibile projected area of each triangle (ie half the area for a partially-visible triangle)
# so, intens_proj*areas*mus*visibilities is the intensity in the direction of the observer per the observed projected area of that triangle
# and the sum of these values is the observed flux
# note that the intensities are already projected but are per unit area
# so we need to multiply by the /projected/ area of each triangle (thus the extra mu)
return {'flux': np.sum(intensities*areas*mus*visibilities)*ptfarea/(distance**2)+l3}
else:
raise NotImplementedError("observe for dataset with kind '{}' not implemented".format(kind)) | TODO: add documentation
Integrate over visible surface elements and return a dictionary of observable values
distance (m) | entailment |
def instantaneous_maxr(self):
"""
Recall the maximum r (triangle furthest from the center of the star) of
this star at the given time
:return: maximum r
:rtype: float
"""
logger.debug("{}.instantaneous_maxr".format(self.component))
if 'maxr' not in self.inst_vals.keys():
logger.debug("{}.instantaneous_maxr COMPUTING".format(self.component))
self.inst_vals['maxr'] = max(self.mesh.rs.centers*self._scale)
return self.inst_vals['maxr'] | Recall the maximum r (triangle furthest from the center of the star) of
this star at the given time
:return: maximum r
:rtype: float | entailment |
def _get_mass_by_index(self, index):
"""
where index can either by an integer or a list of integers (returns some of masses)
"""
if hasattr(index, '__iter__'):
return sum([self.masses[i] for i in index])
else:
return self.masses[index] | where index can either by an integer or a list of integers (returns some of masses) | entailment |
def _get_coords_by_index(self, coords_array, index):
"""
where index can either by an integer or a list of integers (returns some of masses)
coords_array should be a single array (xs, ys, or zs)
"""
if hasattr(index, '__iter__'):
# then we want the center-of-mass coordinates
# TODO: clean this up
return np.average([_value(coords_array[i]) for i in index],
weights=[self._get_mass_by_index(i) for i in index])
else:
return coords_array[index] | where index can either by an integer or a list of integers (returns some of masses)
coords_array should be a single array (xs, ys, or zs) | entailment |
def get_standard_mesh(self, scaled=True):
"""
TODO: add documentation
"""
# TODO: allow this to take etheta and retreive a mesh at that true anomaly
theta = 0.0
protomesh = self._standard_meshes[theta] #.copy() # if theta in self._standard_meshes.keys() else self.mesh.copy()
if scaled:
# TODO: be careful about self._scale... we may want self._instantaneous_scale
return mesh.ScaledProtoMesh.from_proto(protomesh, self._scale)
else:
return protomesh.copy() | TODO: add documentation | entailment |
def reset_time(self, time, true_anom, elongan, eincl):
"""
TODO: add documentation
"""
self.true_anom = true_anom
self.elongan = elongan
self.eincl = eincl
self.time = time
self.populated_at_time = []
self.reset()
return | TODO: add documentation | entailment |
def update_position(self, time,
xs, ys, zs, vxs, vys, vzs,
ethetas, elongans, eincls,
ds=None, Fs=None,
ignore_effects=False,
component_com_x=None,
**kwargs):
"""
Update the position of the star into its orbit
:parameter float time: the current time
:parameter list xs: a list/array of x-positions of ALL COMPONENTS in the :class:`System`
:parameter list ys: a list/array of y-positions of ALL COMPONENTS in the :class:`System`
:parameter list zs: a list/array of z-positions of ALL COMPONENTS in the :class:`System`
:parameter list vxs: a list/array of x-velocities of ALL COMPONENTS in the :class:`System`
:parameter list vys: a list/array of y-velocities of ALL COMPONENTS in the :class:`System`
:parameter list vzs: a list/array of z-velocities of ALL COMPONENTS in the :class:`System`
:parameter list ethetas: a list/array of euler-thetas of ALL COMPONENTS in the :class:`System`
:parameter list elongans: a list/array of euler-longans of ALL COMPONENTS in the :class:`System`
:parameter list eincls: a list/array of euler-incls of ALL COMPONENTS in the :class:`System`
:parameter list ds: (optional) a list/array of instantaneous distances of ALL COMPONENTS in the :class:`System`
:parameter list Fs: (optional) a list/array of instantaneous syncpars of ALL COMPONENTS in the :class:`System`
"""
self.reset_time(time, ethetas[self.ind_self], elongans[self.ind_self], eincls[self.ind_self])
#-- Get current position/euler information
# TODO: get rid of this ugly _value stuff
pos = (_value(xs[self.ind_self]), _value(ys[self.ind_self]), _value(zs[self.ind_self]))
vel = (_value(vxs[self.ind_self_vel]), _value(vys[self.ind_self_vel]), _value(vzs[self.ind_self_vel]))
euler = (_value(ethetas[self.ind_self]), _value(elongans[self.ind_self]), _value(eincls[self.ind_self]))
euler_vel = (_value(ethetas[self.ind_self_vel]), _value(elongans[self.ind_self_vel]), _value(eincls[self.ind_self_vel]))
# TODO: eventually pass etheta to has_standard_mesh
# TODO: implement reprojection as an option based on a nearby standard?
if self.needs_remesh or not self.has_standard_mesh():
logger.debug("{}.update_position: remeshing at t={}".format(self.component, time))
# track whether we did the remesh or not, so we know if we should
# compute local quantities if not otherwise necessary
did_remesh = True
# TODO: allow time dependence on d and F from dynamics
# d = _value(ds[self.ind_self])
# F = _value(Fs[self.ind_self])
new_mesh_dict, scale = self._build_mesh(mesh_method=self.mesh_method)
if self.mesh_method != 'wd':
new_mesh_dict = self._offset_mesh(new_mesh_dict)
# We only need the gradients where we'll compute local
# quantities which, for a marching mesh, is at the vertices.
new_mesh_dict['normgrads'] = new_mesh_dict.pop('vnormgrads', np.array([]))
# And lastly, let's fill the velocities column - with zeros
# at each of the vertices
new_mesh_dict['velocities'] = np.zeros(new_mesh_dict['vertices'].shape if self.mesh_method != 'wd' else new_mesh_dict['centers'].shape)
new_mesh_dict['tareas'] = np.array([])
# TODO: need to be very careful about self.sma vs self._scale - maybe need to make a self._instantaneous_scale???
# self._scale = scale
if not self.has_standard_mesh():
# then we only computed this because we didn't already have a
# standard_mesh... so let's save this for future use
# TODO: eventually pass etheta to save_as_standard_mesh
protomesh = mesh.ProtoMesh(**new_mesh_dict)
self.save_as_standard_mesh(protomesh)
# Here we'll build a scaledprotomesh directly from the newly
# marched mesh
# NOTE that we're using scale from the new
# mesh rather than self._scale since the instantaneous separation
# has likely changed since periastron
scaledprotomesh = mesh.ScaledProtoMesh(scale=scale, **new_mesh_dict)
else:
logger.debug("{}.update_position: accessing standard mesh at t={}".format(self.component, self.time))
# track whether we did the remesh or not, so we know if we should
# compute local quantities if not otherwise necessary
did_remesh = False
# We still need to go through scaledprotomesh instead of directly
# to mesh since features may want to process the body-centric
# coordinates before placing in orbit
# TODO: eventually pass etheta to get_standard_mesh
scaledprotomesh = self.get_standard_mesh(scaled=True)
# TODO: can we avoid an extra copy here?
if not ignore_effects and len(self.features):
logger.debug("{}.update_position: processing features at t={}".format(self.component, self.time))
# First allow features to edit the coords_for_computations (pvertices).
# Changes here WILL affect future computations for logg, teff,
# intensities, etc. Note that these WILL NOT affect the
# coords_for_observations automatically - those should probably be
# perturbed as well, unless there is a good reason not to.
for feature in self.features:
# NOTE: these are ALWAYS done on the protomesh
coords_for_observations = feature.process_coords_for_computations(scaledprotomesh.coords_for_computations, s=self.polar_direction_xyz, t=self.time)
if scaledprotomesh._compute_at_vertices:
scaledprotomesh.update_columns(pvertices=coords_for_observations)
else:
scaledprotomesh.update_columns(centers=coords_for_observations)
raise NotImplementedError("areas are not updated for changed mesh")
for feature in self.features:
coords_for_observations = feature.process_coords_for_observations(scaledprotomesh.coords_for_computations, scaledprotomesh.coords_for_observations, s=self.polar_direction_xyz, t=self.time)
if scaledprotomesh._compute_at_vertices:
scaledprotomesh.update_columns(vertices=coords_for_observations)
# TODO [DONE?]: centers either need to be supported or we need to report
# vertices in the frontend as x, y, z instead of centers
updated_props = libphoebe.mesh_properties(scaledprotomesh.vertices,
scaledprotomesh.triangles,
tnormals=True,
areas=True)
scaledprotomesh.update_columns(**updated_props)
else:
scaledprotomesh.update_columns(centers=coords_for_observations)
raise NotImplementedError("areas are not updated for changed mesh")
# TODO NOW [OPTIMIZE]: get rid of the deepcopy here - but without it the
# mesh velocities build-up and do terrible things. It may be possible
# to just clear the velocities in get_standard_mesh()?
logger.debug("{}.update_position: placing in orbit, Mesh.from_scaledproto at t={}".format(self.component, self.time))
self._mesh = mesh.Mesh.from_scaledproto(scaledprotomesh.copy(),
pos, vel, euler, euler_vel,
self.polar_direction_xyz*self.freq_rot*self._scale,
component_com_x)
# Lastly, we'll recompute physical quantities (not observables) if
# needed for this time-step.
# TODO [DONE?]: make sure features smartly trigger needs_recompute_instantaneous
# TODO: get rid of the or True here... the problem is that we're saving the standard mesh before filling local quantities
if self.needs_recompute_instantaneous or did_remesh:
logger.debug("{}.update_position: calling compute_local_quantities at t={}".format(self.component, self.time))
self.compute_local_quantities(xs, ys, zs, ignore_effects)
return | Update the position of the star into its orbit
:parameter float time: the current time
:parameter list xs: a list/array of x-positions of ALL COMPONENTS in the :class:`System`
:parameter list ys: a list/array of y-positions of ALL COMPONENTS in the :class:`System`
:parameter list zs: a list/array of z-positions of ALL COMPONENTS in the :class:`System`
:parameter list vxs: a list/array of x-velocities of ALL COMPONENTS in the :class:`System`
:parameter list vys: a list/array of y-velocities of ALL COMPONENTS in the :class:`System`
:parameter list vzs: a list/array of z-velocities of ALL COMPONENTS in the :class:`System`
:parameter list ethetas: a list/array of euler-thetas of ALL COMPONENTS in the :class:`System`
:parameter list elongans: a list/array of euler-longans of ALL COMPONENTS in the :class:`System`
:parameter list eincls: a list/array of euler-incls of ALL COMPONENTS in the :class:`System`
:parameter list ds: (optional) a list/array of instantaneous distances of ALL COMPONENTS in the :class:`System`
:parameter list Fs: (optional) a list/array of instantaneous syncpars of ALL COMPONENTS in the :class:`System` | entailment |
def populate_observable(self, time, kind, dataset, **kwargs):
"""
TODO: add documentation
"""
if kind in ['mesh', 'orb']:
return
if time==self.time and dataset in self.populated_at_time and 'pblum' not in kind:
# then we've already computed the needed columns
# TODO: handle the case of intensities already computed by
# /different/ dataset (ie RVs computed first and filling intensities
# and then lc requesting intensities with SAME passband/atm)
return
new_mesh_cols = getattr(self, '_populate_{}'.format(kind.lower()))(dataset, **kwargs)
for key, col in new_mesh_cols.items():
self.mesh.update_columns_dict({'{}:{}'.format(key, dataset): col})
self.populated_at_time.append(dataset) | TODO: add documentation | entailment |
def from_bundle(cls, b, component, compute=None,
mesh_init_phi=0.0, datasets=[], **kwargs):
"""
Build a star from the :class:`phoebe.frontend.bundle.Bundle` and its
hierarchy.
Usually it makes more sense to call :meth:`System.from_bundle` directly.
:parameter b: the :class:`phoebe.frontend.bundle.Bundle`
:parameter str component: label of the component in the bundle
:parameter str compute: name of the computeoptions in the bundle
:parameter list datasets: list of names of datasets
:parameter **kwargs: temporary overrides for computeoptions
:return: an instantiated :class:`Star` object
"""
# TODO [DONE?]: handle overriding options from kwargs
# TODO [DONE?]: do we need dynamics method???
hier = b.hierarchy
if not len(hier.get_value()):
raise NotImplementedError("Star meshing requires a hierarchy to exist")
label_self = component
label_sibling = hier.get_stars_of_sibling_of(component)
label_orbit = hier.get_parent_of(component)
starrefs = hier.get_stars()
ind_self = starrefs.index(label_self)
# for the sibling, we may need to handle a list of stars (ie in the case of a hierarchical triple)
ind_sibling = starrefs.index(label_sibling) if isinstance(label_sibling, str) else [starrefs.index(l) for l in label_sibling]
comp_no = ['primary', 'secondary'].index(hier.get_primary_or_secondary(component))+1
self_ps = b.filter(component=component, context='component', check_visible=False)
requiv = self_ps.get_value('requiv', unit=u.solRad)
masses = [b.get_value('mass', component=star, context='component', unit=u.solMass) for star in starrefs]
if b.hierarchy.get_parent_of(component) is not None:
sma = b.get_value('sma', component=label_orbit, context='component', unit=u.solRad)
ecc = b.get_value('ecc', component=label_orbit, context='component')
is_single = False
else:
# single star case
sma = 1.0
ecc = 0.0
is_single = True
incl = b.get_value('incl', component=label_orbit, context='component', unit=u.rad)
long_an = b.get_value('long_an', component=label_orbit, context='component', unit=u.rad)
# NOTE: these may not be used when not visible for contact systems, so
# Star_roche_envelope_half should ignore and override with
# aligned/synchronous
incl_star = self_ps.get_value('incl', unit=u.rad, check_visible=False)
long_an_star = self_ps.get_value('long_an', unit=u.rad, check_visible=False)
polar_direction_uvw = mesh.spin_in_system(incl_star, long_an_star)
# freq_rot for contacts will be provided by that subclass as 2*pi/P_orb since they're always synchronous
freq_rot = self_ps.get_value('freq', unit=u.rad/u.d)
t0 = b.get_value('t0', context='system', unit=u.d)
teff = b.get_value('teff', component=component, context='component', unit=u.K)
gravb_bol= b.get_value('gravb_bol', component=component, context='component')
abun = b.get_value('abun', component=component, context='component', check_visible=False)
irrad_frac_refl = b.get_value('irrad_frac_refl_bol', component=component, context='component')
try:
rv_grav_override = kwargs.pop('rv_grav', None)
do_rv_grav = b.get_value('rv_grav', component=component, compute=compute, check_visible=False, rv_grav=rv_grav_override) if compute is not None else False
except ValueError:
# rv_grav may not have been copied to this component if no rvs are attached
do_rv_grav = False
mesh_method_override = kwargs.pop('mesh_method', None)
mesh_method = b.get_value('mesh_method', component=component, compute=compute, mesh_method=mesh_method_override) if compute is not None else 'marching'
if mesh_method == 'marching':
ntriangles_override = kwargs.pop('ntriangle', None)
kwargs['ntriangles'] = b.get_value('ntriangles', component=component, compute=compute, ntriangles=ntriangles_override) if compute is not None else 1000
distortion_method_override = kwargs.pop('distortion_method', None)
kwargs['distortion_method'] = b.get_value('distortion_method', component=component, compute=compute, distortion_method=distortion_method_override) if compute is not None else 'roche'
elif mesh_method == 'wd':
gridsize_override = kwargs.pop('gridsize', None)
kwargs['gridsize'] = b.get_value('gridsize', component=component, compute=compute, gridsize=gridsize_override) if compute is not None else 30
else:
raise NotImplementedError
features = []
for feature in b.filter(component=component).features:
feature_ps = b.filter(feature=feature, component=component)
feature_cls = globals()[feature_ps.kind.title()]
features.append(feature_cls.from_bundle(b, feature))
if conf.devel:
mesh_offset_override = kwargs.pop('mesh_offset', None)
do_mesh_offset = b.get_value('mesh_offset', compute=compute, mesh_offset=mesh_offset_override)
else:
do_mesh_offset = True
datasets_intens = [ds for ds in b.filter(kind=['lc', 'rv', 'lp'], context='dataset').datasets if ds != '_default']
datasets_lp = [ds for ds in b.filter(kind='lp', context='dataset').datasets if ds != '_default']
atm_override = kwargs.pop('atm', None)
atm = b.get_value('atm', compute=compute, component=component, atm=atm_override) if compute is not None else 'blackbody'
passband_override = kwargs.pop('passband', None)
passband = {ds: b.get_value('passband', dataset=ds, passband=passband_override) for ds in datasets_intens}
intens_weighting_override = kwargs.pop('intens_weighting', None)
intens_weighting = {ds: b.get_value('intens_weighting', dataset=ds, intens_weighting=intens_weighting_override) for ds in datasets_intens}
ld_func_override = kwargs.pop('ld_func', None)
ld_func = {ds: b.get_value('ld_func', dataset=ds, component=component, ld_func=ld_func_override) for ds in datasets_intens}
ld_coeffs_override = kwargs.pop('ld_coeffs', None)
ld_coeffs = {ds: b.get_value('ld_coeffs', dataset=ds, component=component, check_visible=False, ld_coeffs=ld_coeffs_override) for ds in datasets_intens}
ld_func_bol_override = kwargs.pop('ld_func_bol', None)
ld_func['bol'] = b.get_value('ld_func_bol', component=component, context='component', check_visible=False, ld_func_bol=ld_func_bol_override)
ld_coeffs_bol_override = kwargs.pop('ld_coeffs_bol', None)
ld_coeffs['bol'] = b.get_value('ld_coeffs_bol', component=component, context='component', check_visible=False, ld_coeffs_bol=ld_coeffs_bol_override)
profile_rest_override = kwargs.pop('profile_rest', None)
lp_profile_rest = {ds: b.get_value('profile_rest', dataset=ds, unit=u.nm, profile_rest=profile_rest_override) for ds in datasets_lp}
# we'll pass kwargs on here so they can be overridden by the classmethod
# of any subclass and then intercepted again by the __init__ by the
# same subclass. Note: kwargs also hold meshing kwargs which are used
# by Star.__init__
return cls(component, comp_no, ind_self, ind_sibling,
masses, ecc,
incl, long_an, t0,
do_mesh_offset,
mesh_init_phi,
atm,
datasets,
passband,
intens_weighting,
ld_func,
ld_coeffs,
lp_profile_rest,
requiv,
sma,
polar_direction_uvw,
freq_rot,
teff,
gravb_bol,
abun,
irrad_frac_refl,
mesh_method,
is_single,
do_rv_grav,
features,
**kwargs
) | Build a star from the :class:`phoebe.frontend.bundle.Bundle` and its
hierarchy.
Usually it makes more sense to call :meth:`System.from_bundle` directly.
:parameter b: the :class:`phoebe.frontend.bundle.Bundle`
:parameter str component: label of the component in the bundle
:parameter str compute: name of the computeoptions in the bundle
:parameter list datasets: list of names of datasets
:parameter **kwargs: temporary overrides for computeoptions
:return: an instantiated :class:`Star` object | entailment |
def polar_direction_xyz(self):
"""
get current polar direction in Roche (xyz) coordinates
"""
return mesh.spin_in_roche(self.polar_direction_uvw,
self.true_anom, self.elongan, self.eincl).astype(float) | get current polar direction in Roche (xyz) coordinates | entailment |
def get_target_volume(self, etheta=0.0, scaled=False):
"""
TODO: add documentation
get the volume that the Star should have at a given euler theta
"""
# TODO: make this a function of d instead of etheta?
logger.debug("determining target volume at t={}, theta={}".format(self.time, etheta))
# TODO: eventually this could allow us to "break" volume conservation
# and have volume be a function of d, with some scaling factor provided
# by the user as a parameter. Until then, we'll assume volume is
# conserved which means the volume should always be the same
volume = 4./3 * np.pi * self.requiv**3
if not scaled:
return volume / self._scale**3
else:
return volume | TODO: add documentation
get the volume that the Star should have at a given euler theta | entailment |
def north_pole_uvw(self):
"""location of the north pole in the global/system frame"""
# TODO: is this rpole scaling true for all distortion_methods??
rpole = self.instantaneous_rpole*self.sma
return self.polar_direction_uvw*rpole+self.mesh._pos | location of the north pole in the global/system frame | entailment |
def instantaneous_tpole(self):
"""
compute the instantaenous temperature at the pole to achieve the mean
effective temperature (teff) provided by the user
"""
logger.debug("{}.instantaneous_tpole".format(self.component))
if 'tpole' not in self.inst_vals.keys():
logger.debug("{}.instantaneous_tpole COMPUTING".format(self.component))
if self.mesh is None:
raise ValueError("mesh must be computed before determining tpole")
# Convert from mean to polar by dividing total flux by gravity darkened flux (Ls drop out)
# see PHOEBE Legacy scientific reference eq 5.20
self.inst_vals['tpole'] = self.teff*(np.sum(self.mesh.areas) / np.sum(self.mesh.gravs.centers*self.mesh.areas))**(0.25)
return self.inst_vals['tpole'] | compute the instantaenous temperature at the pole to achieve the mean
effective temperature (teff) provided by the user | entailment |
def _fill_loggs(self, mesh=None, ignore_effects=False):
"""
TODO: add documentation
Calculate local surface gravity
GMSunNom = 1.3271244e20 m**3 s**-2
RSunNom = 6.597e8 m
"""
logger.debug("{}._fill_loggs".format(self.component))
if mesh is None:
mesh = self.mesh
loggs = np.log10(mesh.normgrads.for_computations * g_rel_to_abs(self.masses[self.ind_self], self.sma))
if not ignore_effects:
for feature in self.features:
if feature.proto_coords:
loggs = feature.process_loggs(loggs, mesh.roche_coords_for_computations, s=self.polar_direction_xyz, t=self.time)
else:
loggs = feature.process_loggs(loggs, mesh.coords_for_computations, s=self.polar_direction_xyz, t=self.time)
mesh.update_columns(loggs=loggs)
if not self.needs_recompute_instantaneous:
logger.debug("{}._fill_loggs: copying loggs to standard mesh".format(self.component))
theta = 0.0
self._standard_meshes[theta].update_columns(loggs=loggs) | TODO: add documentation
Calculate local surface gravity
GMSunNom = 1.3271244e20 m**3 s**-2
RSunNom = 6.597e8 m | entailment |
def _fill_gravs(self, mesh=None, **kwargs):
"""
TODO: add documentation
requires _fill_loggs to have been called
"""
logger.debug("{}._fill_gravs".format(self.component))
if mesh is None:
mesh = self.mesh
# TODO: rename 'gravs' to 'gdcs' (gravity darkening corrections)
gravs = ((mesh.normgrads.for_computations * g_rel_to_abs(self.masses[self.ind_self], self.sma))/self.instantaneous_gpole)**self.gravb_bol
mesh.update_columns(gravs=gravs)
if not self.needs_recompute_instantaneous:
logger.debug("{}._fill_gravs: copying gravs to standard mesh".format(self.component))
theta = 0.0
self._standard_meshes[theta].update_columns(gravs=gravs) | TODO: add documentation
requires _fill_loggs to have been called | entailment |
def _fill_teffs(self, mesh=None, ignore_effects=False, **kwargs):
r"""
requires _fill_loggs and _fill_gravs to have been called
Calculate local temperature of a Star.
"""
logger.debug("{}._fill_teffs".format(self.component))
if mesh is None:
mesh = self.mesh
# Now we can compute the local temperatures.
# see PHOEBE Legacy scientific reference eq 5.23
teffs = self.instantaneous_tpole*mesh.gravs.for_computations**0.25
if not ignore_effects:
for feature in self.features:
if feature.proto_coords:
teffs = feature.process_teffs(teffs, mesh.roche_coords_for_computations, s=self.polar_direction_xyz, t=self.time)
else:
teffs = feature.process_teffs(teffs, mesh.coords_for_computations, s=self.polar_direction_xyz, t=self.time)
mesh.update_columns(teffs=teffs)
if not self.needs_recompute_instantaneous:
logger.debug("{}._fill_teffs: copying teffs to standard mesh".format(self.component))
theta = 0.0
self._standard_meshes[theta].update_columns(teffs=teffs) | r"""
requires _fill_loggs and _fill_gravs to have been called
Calculate local temperature of a Star. | entailment |
def _fill_abuns(self, mesh=None, abun=0.0):
"""
TODO: add documentation
"""
logger.debug("{}._fill_abuns".format(self.component))
if mesh is None:
mesh = self.mesh
# TODO: support from frontend
mesh.update_columns(abuns=abun)
if not self.needs_recompute_instantaneous:
logger.debug("{}._fill_abuns: copying abuns to standard mesh".format(self.component))
theta = 0.0
self._standard_meshes[theta].update_columns(abuns=abun) | TODO: add documentation | entailment |
def _fill_albedos(self, mesh=None, irrad_frac_refl=0.0):
"""
TODO: add documentation
"""
logger.debug("{}._fill_albedos".format(self.component))
if mesh is None:
mesh = self.mesh
mesh.update_columns(irrad_frac_refl=irrad_frac_refl)
if not self.needs_recompute_instantaneous:
logger.debug("{}._fill_albedos: copying albedos to standard mesh".format(self.component))
theta = 0.0
self._standard_meshes[theta].update_columns(irrad_frac_refl=irrad_frac_refl) | TODO: add documentation | entailment |
def compute_pblum_scale(self, dataset, pblum, **kwargs):
"""
intensities should already be computed for this dataset at the time for which pblum is being provided
TODO: add documentation
"""
logger.debug("{}.compute_pblum_scale(dataset={}, pblum={})".format(self.component, dataset, pblum))
abs_luminosity = self.compute_luminosity(dataset, **kwargs)
# We now want to remember the scale for all intensities such that the
# luminosity in relative units gives the provided pblum
pblum_scale = pblum / abs_luminosity
self.set_pblum_scale(dataset, pblum_scale) | intensities should already be computed for this dataset at the time for which pblum is being provided
TODO: add documentation | entailment |
def _populate_lp(self, dataset, **kwargs):
"""
Populate columns necessary for an LP dataset
This should not be called directly, but rather via :meth:`Body.populate_observable`
or :meth:`System.populate_observables`
"""
logger.debug("{}._populate_lp(dataset={})".format(self.component, dataset))
profile_rest = kwargs.get('profile_rest', self.lp_profile_rest.get(dataset))
rv_cols = self._populate_rv(dataset, **kwargs)
cols = rv_cols
# rvs = (rv_cols['rvs']*u.solRad/u.d).to(u.m/u.s).value
# cols['dls'] = rv_cols['rvs']*profile_rest/c.c.si.value
return cols | Populate columns necessary for an LP dataset
This should not be called directly, but rather via :meth:`Body.populate_observable`
or :meth:`System.populate_observables` | entailment |
def _populate_rv(self, dataset, **kwargs):
"""
Populate columns necessary for an RV dataset
This should not be called directly, but rather via :meth:`Body.populate_observable`
or :meth:`System.populate_observables`
"""
logger.debug("{}._populate_rv(dataset={})".format(self.component, dataset))
# We need to fill all the flux-related columns so that we can weigh each
# triangle's rv by its flux in the requested passband.
lc_cols = self._populate_lc(dataset, **kwargs)
# rv per element is just the z-component of the velocity vectory. Note
# the change in sign from our right-handed system to rv conventions.
# These will be weighted by the fluxes when integrating
rvs = -1*self.mesh.velocities.for_computations[:,2]
# Gravitational redshift
if self.do_rv_grav:
rv_grav = c.G*(self.mass*u.solMass)/(self.instantaneous_rpole*u.solRad)/c.c
# rvs are in solRad/d internally
rv_grav = rv_grav.to('solRad/d').value
rvs += rv_grav
cols = lc_cols
cols['rvs'] = rvs
return cols | Populate columns necessary for an RV dataset
This should not be called directly, but rather via :meth:`Body.populate_observable`
or :meth:`System.populate_observables` | entailment |
def _populate_lc(self, dataset, **kwargs):
"""
Populate columns necessary for an LC dataset
This should not be called directly, but rather via :meth:`Body.populate_observable`
or :meth:`System.populate_observables`
:raises NotImplementedError: if lc_method is not supported
"""
logger.debug("{}._populate_lc(dataset={})".format(self.component, dataset))
lc_method = kwargs.get('lc_method', 'numerical') # TODO: make sure this is actually passed
passband = kwargs.get('passband', self.passband.get(dataset, None))
intens_weighting = kwargs.get('intens_weighting', self.intens_weighting.get(dataset, None))
ld_func = kwargs.get('ld_func', self.ld_func.get(dataset, None))
ld_coeffs = kwargs.get('ld_coeffs', self.ld_coeffs.get(dataset, None)) if ld_func != 'interp' else None
atm = kwargs.get('atm', self.atm)
boosting_method = kwargs.get('boosting_method', self.boosting_method)
pblum = kwargs.get('pblum', 4*np.pi)
if lc_method=='numerical':
pb = passbands.get_passband(passband)
if intens_weighting=='photon':
ptfarea = pb.ptf_photon_area/pb.h/pb.c
else:
ptfarea = pb.ptf_area
self.set_ptfarea(dataset, ptfarea)
ldint = pb.ldint(Teff=self.mesh.teffs.for_computations,
logg=self.mesh.loggs.for_computations,
abun=self.mesh.abuns.for_computations,
atm=atm,
ld_func=ld_func,
ld_coeffs=ld_coeffs,
photon_weighted=intens_weighting=='photon')
# abs_normal_intensities are the normal emergent passband intensities:
abs_normal_intensities = pb.Inorm(Teff=self.mesh.teffs.for_computations,
logg=self.mesh.loggs.for_computations,
abun=self.mesh.abuns.for_computations,
atm=atm,
ldint=ldint,
photon_weighted=intens_weighting=='photon')
# abs_intensities are the projected (limb-darkened) passband intensities
# TODO: why do we need to use abs(mus) here?
abs_intensities = pb.Imu(Teff=self.mesh.teffs.for_computations,
logg=self.mesh.loggs.for_computations,
abun=self.mesh.abuns.for_computations,
mu=abs(self.mesh.mus_for_computations),
atm=atm,
ldint=ldint,
ld_func=ld_func,
ld_coeffs=ld_coeffs,
photon_weighted=intens_weighting=='photon')
# Beaming/boosting
if boosting_method == 'none':
boost_factors = 1.0
elif boosting_method == 'linear':
bindex = pb.bindex(Teff=self.mesh.teffs.for_computations,
logg=self.mesh.loggs.for_computations,
abun=self.mesh.abuns.for_computations,
mu=abs(self.mesh.mus_for_computations),
atm=atm,
photon_weighted=intens_weighting=='photon')
boost_factors = 1.0 + bindex * self.mesh.velocities.for_computations[:,2]/37241.94167601236
else:
raise NotImplementedError("boosting_method='{}' not supported".format(self.boosting_method))
# boosting is aspect dependent so we don't need to correct the
# normal intensities
abs_intensities *= boost_factors
# Handle pblum - distance and l3 scaling happens when integrating (in observe)
# we need to scale each triangle so that the summed normal_intensities over the
# entire star is equivalent to pblum / 4pi
normal_intensities = abs_normal_intensities * self.get_pblum_scale(dataset)
intensities = abs_intensities * self.get_pblum_scale(dataset)
elif lc_method=='analytical':
raise NotImplementedError("analytical fluxes not yet supported")
# TODO: this probably needs to be moved into observe or backends.phoebe
# (assuming it doesn't result in per-triangle quantities)
else:
raise NotImplementedError("lc_method '{}' not recognized".format(lc_method))
# TODO: do we really need to store all of these if store_mesh==False?
# Can we optimize by only returning the essentials if we know we don't need them?
return {'abs_normal_intensities': abs_normal_intensities,
'normal_intensities': normal_intensities,
'abs_intensities': abs_intensities,
'intensities': intensities,
'ldint': ldint,
'boost_factors': boost_factors} | Populate columns necessary for an LC dataset
This should not be called directly, but rather via :meth:`Body.populate_observable`
or :meth:`System.populate_observables`
:raises NotImplementedError: if lc_method is not supported | entailment |
def needs_remesh(self):
"""
whether the star needs to be re-meshed (for any reason)
"""
# TODO: may be able to get away with removing the features check and just doing for pulsations, etc?
# TODO: what about dpdt, deccdt, dincldt, etc?
return len(self.features) > 0 or self.is_misaligned or self.ecc != 0 or self.dynamics_method != 'keplerian' | whether the star needs to be re-meshed (for any reason) | entailment |
def _build_mesh(self, mesh_method, **kwargs):
"""
this function takes mesh_method and kwargs that came from the generic Body.intialize_mesh and returns
the grid... intialize mesh then takes care of filling columns and rescaling to the correct units, etc
"""
# need the sma to scale between Roche and real units
sma = kwargs.get('sma', self.sma) # Rsol (same units as coordinates)
mesh_args = self.instantaneous_mesh_args
if mesh_method == 'marching':
# TODO: do this during mesh initialization only and then keep delta fixed in time??
ntriangles = kwargs.get('ntriangles', self.ntriangles)
# we need the surface area of the lobe to estimate the correct value
# to pass for delta to marching. We will later need the volume to
# expose its value
logger.debug("libphoebe.roche_area_volume{}".format(mesh_args))
av = libphoebe.roche_area_volume(*mesh_args,
choice=2,
larea=True,
lvolume=True)
delta = _estimate_delta(ntriangles, av['larea'])
logger.debug("libphoebe.roche_marching_mesh{}".format(mesh_args))
new_mesh = libphoebe.roche_marching_mesh(*mesh_args,
delta=delta,
choice=2,
full=True,
max_triangles=ntriangles*2,
vertices=True,
triangles=True,
centers=True,
vnormals=True,
tnormals=True,
cnormals=False,
vnormgrads=True,
cnormgrads=False,
areas=True,
volume=False,
init_phi=self.mesh_init_phi)
# In addition to the values exposed by the mesh itself, let's report
# the volume and surface area of the lobe. The lobe area is used
# if mesh_offseting is required, and the volume is optionally exposed
# to the user.
new_mesh['volume'] = av['lvolume'] # * sma**3
new_mesh['area'] = av['larea'] # * sma**2
scale = sma
elif mesh_method == 'wd':
N = int(kwargs.get('gridsize', self.gridsize))
# unpack mesh_args
q, F, d, Phi = mesh_args
the_grid = mesh_wd.discretize_wd_style(N, q, F, d, Phi)
new_mesh = mesh.wd_grid_to_mesh_dict(the_grid, q, F, d)
scale = sma
else:
raise NotImplementedError("mesh_method '{}' is not supported".format(mesh_method))
return new_mesh, scale | this function takes mesh_method and kwargs that came from the generic Body.intialize_mesh and returns
the grid... intialize mesh then takes care of filling columns and rescaling to the correct units, etc | entailment |
def _build_mesh(self, mesh_method, **kwargs):
"""
this function takes mesh_method and kwargs that came from the generic Body.intialize_mesh and returns
the grid... intialize mesh then takes care of filling columns and rescaling to the correct units, etc
"""
# if we don't provide instantaneous masses or smas, then assume they are
# not time dependent - in which case they were already stored in the init
sma = kwargs.get('sma', self.sma) # Rsol (same units as coordinates)
mesh_args = self.instantaneous_mesh_args
if mesh_method == 'marching':
ntriangles = kwargs.get('ntriangles', self.ntriangles)
av = libphoebe.sphere_area_volume(*mesh_args,
larea=True,
lvolume=True)
delta = _estimate_delta(ntriangles, av['larea'])
new_mesh = libphoebe.sphere_marching_mesh(*mesh_args,
delta=delta,
full=True,
max_triangles=ntriangles*2,
vertices=True,
triangles=True,
centers=True,
vnormals=True,
tnormals=True,
cnormals=False,
vnormgrads=True,
cnormgrads=False,
areas=True,
volume=True,
init_phi=self.mesh_init_phi)
# In addition to the values exposed by the mesh itself, let's report
# the volume and surface area of the lobe. The lobe area is used
# if mesh_offseting is required, and the volume is optionally exposed
# to the user.
new_mesh['volume'] = av['lvolume']
new_mesh['area'] = av['larea']
scale = sma
else:
raise NotImplementedError("mesh_method '{}' is not supported".format(mesh_method))
return new_mesh, scale | this function takes mesh_method and kwargs that came from the generic Body.intialize_mesh and returns
the grid... intialize mesh then takes care of filling columns and rescaling to the correct units, etc | entailment |
def populate_observable(self, time, kind, dataset, **kwargs):
"""
TODO: add documentation
"""
for half in self._halves:
half.populate_observable(time, kind, dataset, **kwargs) | TODO: add documentation | entailment |
def from_bundle(cls, b, feature):
"""
Initialize a Spot feature from the bundle.
"""
feature_ps = b.get_feature(feature)
colat = feature_ps.get_value('colat', unit=u.rad)
longitude = feature_ps.get_value('long', unit=u.rad)
if len(b.hierarchy.get_stars())>=2:
star_ps = b.get_component(feature_ps.component)
orbit_ps = b.get_component(b.hierarchy.get_parent_of(feature_ps.component))
syncpar = star_ps.get_value('syncpar')
period = orbit_ps.get_value('period')
dlongdt = (syncpar - 1) / period * 2 * np.pi
else:
star_ps = b.get_component(feature_ps.component)
dlongdt = star_ps.get_value('freq', unit=u.rad/u.d)
longitude = np.pi/2
radius = feature_ps.get_value('radius', unit=u.rad)
relteff = feature_ps.get_value('relteff', unit=u.dimensionless_unscaled)
t0 = b.get_value('t0', context='system', unit=u.d)
return cls(colat, longitude, dlongdt, radius, relteff, t0) | Initialize a Spot feature from the bundle. | entailment |
def pointing_vector(self, s, time):
"""
s is the spin vector in roche coordinates
time is the current time
"""
t = time - self._t0
longitude = self._longitude + self._dlongdt * t
# define the basis vectors in the spin (primed) coordinates in terms of
# the Roche coordinates.
# ez' = s
# ex' = (ex - s(s.ex)) /|i - s(s.ex)|
# ey' = s x ex'
ex = np.array([1., 0., 0.])
ezp = s
exp = (ex - s*np.dot(s,ex))
eyp = np.cross(s, exp)
return np.sin(self._colat)*np.cos(longitude)*exp +\
np.sin(self._colat)*np.sin(longitude)*eyp +\
np.cos(self._colat)*ezp | s is the spin vector in roche coordinates
time is the current time | entailment |
def process_teffs(self, teffs, coords, s=np.array([0., 0., 1.]), t=None):
"""
Change the local effective temperatures for any values within the
"cone" defined by the spot. Any teff within the spot will have its
current value multiplied by the "relteff" factor
:parameter array teffs: array of teffs for computations
:parameter array coords: array of coords for computations
:t float: current time
"""
if t is None:
# then assume at t0
t = self._t0
pointing_vector = self.pointing_vector(s,t)
logger.debug("spot.process_teffs at t={} with pointing_vector={} and radius={}".format(t, pointing_vector, self._radius))
cos_alpha_coords = np.dot(coords, pointing_vector) / np.linalg.norm(coords, axis=1)
cos_alpha_spot = np.cos(self._radius)
filter_ = cos_alpha_coords > cos_alpha_spot
teffs[filter_] = teffs[filter_] * self._relteff
return teffs | Change the local effective temperatures for any values within the
"cone" defined by the spot. Any teff within the spot will have its
current value multiplied by the "relteff" factor
:parameter array teffs: array of teffs for computations
:parameter array coords: array of coords for computations
:t float: current time | entailment |
def from_bundle(cls, b, feature):
"""
Initialize a Pulsation feature from the bundle.
"""
feature_ps = b.get_feature(feature)
freq = feature_ps.get_value('freq', unit=u.d**-1)
radamp = feature_ps.get_value('radamp', unit=u.dimensionless_unscaled)
l = feature_ps.get_value('l', unit=u.dimensionless_unscaled)
m = feature_ps.get_value('m', unit=u.dimensionless_unscaled)
teffext = feature_ps.get_value('teffext')
GM = c.G.to('solRad3 / (solMass d2)').value*b.get_value(qualifier='mass', component=feature_ps.component, context='component', unit=u.solMass)
R = b.get_value(qualifier='rpole', component=feature_ps.component, section='component', unit=u.solRad)
tanamp = GM/R**3/freq**2
return cls(radamp, freq, l, m, tanamp, teffext) | Initialize a Pulsation feature from the bundle. | entailment |
def project_onto_potential(r, pot_name, *args):
"""
TODO: add documentation
"""
pot = globals()[pot_name]
dpdx = globals()['d%sdx'%(pot_name)]
dpdy = globals()['d%sdy'%(pot_name)]
dpdz = globals()['d%sdz'%(pot_name)]
dpdr = globals()['d%sdr'%(pot_name)]
n_iter = 0
rmag, rmag0 = np.sqrt((r*r).sum()), 0
lam, nu = r[0]/rmag, r[2]/rmag
dc = np.array((lam, np.sqrt(1-lam*lam-nu*nu), nu)) # direction cosines -- must not change during reprojection
D, q, F, p0 = args
while np.abs(rmag-rmag0) > 1e-12 and n_iter < 100:
rmag0 = rmag
rmag = rmag0 - pot(rmag0*dc, *args)/dpdr(rmag0*dc, *args[:-1])
n_iter += 1
if n_iter == 100:
logger.warning('projection did not converge')
r = rmag*dc
return MeshVertex(r, dpdx, dpdy, dpdz, *args[:-1]) | TODO: add documentation | entailment |
def discretize_wd_style(N, q, F, d, Phi):
"""
TODO: add documentation
New implementation. I'll make this work first, then document.
"""
DEBUG = False
Ts = []
potential = 'BinaryRoche'
r0 = libphoebe.roche_pole(q, F, d, Phi)
# The following is a hack that needs to go!
pot_name = potential
dpdx = globals()['d%sdx'%(pot_name)]
dpdy = globals()['d%sdy'%(pot_name)]
dpdz = globals()['d%sdz'%(pot_name)]
if DEBUG:
import matplotlib.pyplot as plt
from matplotlib.path import Path
import matplotlib.patches as patches
fig = plt.figure()
ax1 = fig.add_subplot(131)
ax2 = fig.add_subplot(132)
ax3 = fig.add_subplot(133)
ax1.set_xlim(-0.3, 0.3) # -1.6 1.6
ax1.set_ylim(-0.3, 0.3)
ax2.set_xlim(-0.3, 0.3)
ax2.set_ylim(-0.3, 0.3)
ax3.set_xlim(-0.3, 0.3)
ax3.set_ylim(-0.3, 0.3)
ax1.set_xlabel('x')
ax1.set_ylabel('y')
ax2.set_xlabel('x')
ax2.set_ylabel('z')
ax3.set_xlabel('y')
ax3.set_ylabel('z')
# Rectangle centers:
theta = np.array([np.pi/2*(k-0.5)/N for k in range(1, N+2)])
phi = np.array([[np.pi*(l-0.5)/Mk for l in range(1, Mk+1)] for Mk in np.array(1 + 1.3*N*np.sin(theta), dtype=int)])
for t in range(len(theta)-1):
dtheta = theta[t+1]-theta[t]
for i in range(len(phi[t])):
dphi = phi[t][1]-phi[t][0]
# Project the vertex onto the potential; this will be our center point:
rc = np.array((r0*sin(theta[t])*cos(phi[t][i]), r0*sin(theta[t])*sin(phi[t][i]), r0*cos(theta[t])))
vc = project_onto_potential(rc, potential, d, q, F, Phi).r
# Next we need to find the tangential plane, which we'll get by finding the normal,
# which is the negative of the gradient:
nc = np.array((-dpdx(vc, d, q, F), -dpdy(vc, d, q, F), -dpdz(vc, d, q, F)))
# Then we need to find the intercontext of +/-dtheta/dphi-deflected
# radius vectors with the tangential plane. We do that by solving
#
# d = [(p0 - l0) \dot n] / (l \dot n),
#
# where p0 and l0 are reference points on the plane and on the line,
# respectively, n is the normal vector, and l is the line direction
# vector. For convenience l0 can be set to 0, and p0 is just vc. d
# then measures the distance from the origin along l.
l1 = np.array((sin(theta[t]-dtheta/2)*cos(phi[t][i]-dphi/2), sin(theta[t]-dtheta/2)*sin(phi[t][i]-dphi/2), cos(theta[t]-dtheta/2)))
l2 = np.array((sin(theta[t]-dtheta/2)*cos(phi[t][i]+dphi/2), sin(theta[t]-dtheta/2)*sin(phi[t][i]+dphi/2), cos(theta[t]-dtheta/2)))
l3 = np.array((sin(theta[t]+dtheta/2)*cos(phi[t][i]+dphi/2), sin(theta[t]+dtheta/2)*sin(phi[t][i]+dphi/2), cos(theta[t]+dtheta/2)))
l4 = np.array((sin(theta[t]+dtheta/2)*cos(phi[t][i]-dphi/2), sin(theta[t]+dtheta/2)*sin(phi[t][i]-dphi/2), cos(theta[t]+dtheta/2)))
r1 = np.dot(vc, nc) / np.dot(l1, nc) * l1
r2 = np.dot(vc, nc) / np.dot(l2, nc) * l2
r3 = np.dot(vc, nc) / np.dot(l3, nc) * l3
r4 = np.dot(vc, nc) / np.dot(l4, nc) * l4
# This sorts out the vertices, now we need to fudge the surface
# area. WD does not take curvature of the equipotential at vc
# into account, so the surface area computed from these vertex-
# delimited surfaces will generally be different from what WD
# computes. Thus, we compute the surface area the same way WD
# does it and assign it to each element even though that isn't
# quite its area:
#
# dsigma = || r^2 sin(theta)/cos(gamma) dtheta dphi ||,
#
# where gamma is the angle between l and n.
cosgamma = np.dot(vc, nc)/np.sqrt(np.dot(vc, vc))/np.sqrt(np.dot(nc, nc))
dsigma = np.abs(np.dot(vc, vc)*np.sin(theta[t])/cosgamma*dtheta*dphi)
# Temporary addition: triangle areas: ######################
side1 = sqrt((r1[0]-r2[0])**2 + (r1[1]-r2[1])**2 + (r1[2]-r2[2])**2)
side2 = sqrt((r1[0]-r3[0])**2 + (r1[1]-r3[1])**2 + (r1[2]-r3[2])**2)
side3 = sqrt((r2[0]-r3[0])**2 + (r2[1]-r3[1])**2 + (r2[2]-r3[2])**2)
s = 0.5*(side1 + side2 + side3)
dsigma_t_sq = s*(s-side1)*(s-side2)*(s-side3)
dsigma_t = sqrt(dsigma_t_sq) if dsigma_t_sq > 0 else 0.0
############################################################
if DEBUG:
fc = 'orange'
verts = [(r1[0], r1[1]), (r2[0], r2[1]), (r3[0], r3[1]), (r4[0], r4[1]), (r1[0], r1[1])]
codes = [Path.MOVETO, Path.LINETO, Path.LINETO, Path.LINETO, Path.CLOSEPOLY]
path = Path(verts, codes)
patch = patches.PathPatch(path, facecolor=fc, lw=2)
ax1.add_patch(patch)
verts = [(r1[0], r1[2]), (r2[0], r2[2]), (r3[0], r3[2]), (r4[0], r4[2]), (r1[0], r1[2])]
codes = [Path.MOVETO, Path.LINETO, Path.LINETO, Path.LINETO, Path.CLOSEPOLY]
path = Path(verts, codes)
patch = patches.PathPatch(path, facecolor=fc, lw=2)
ax2.add_patch(patch)
verts = [(r1[1], r1[2]), (r2[1], r2[2]), (r3[1], r3[2]), (r4[1], r4[2]), (r1[1], r1[2])]
codes = [Path.MOVETO, Path.LINETO, Path.LINETO, Path.LINETO, Path.CLOSEPOLY]
path = Path(verts, codes)
patch = patches.PathPatch(path, facecolor=fc, lw=2)
ax3.add_patch(patch)
# Ts.append(np.array((vc[0], vc[1], vc[2], dsigma/2, r1[0], r1[1], r1[2], r2[0], r2[1], r2[2], r3[0], r3[1], r3[2], nc[0], nc[1], nc[2])))
# Ts.append(np.array((vc[0], vc[1], vc[2], dsigma/2, r3[0], r3[1], r3[2], r4[0], r4[1], r4[2], r1[0], r1[1], r1[2], nc[0], nc[1], nc[2])))
# # Instead of recomputing all quantities, just reflect over the y- and z-directions.
# Ts.append(np.array((vc[0], -vc[1], vc[2], dsigma/2, r1[0], -r1[1], r1[2], r2[0], -r2[1], r2[2], r3[0], -r3[1], r3[2], nc[0], -nc[1], nc[2])))
# Ts.append(np.array((vc[0], -vc[1], vc[2], dsigma/2, r3[0], -r3[1], r3[2], r4[0], -r4[1], r4[2], r1[0], -r1[1], r1[2], nc[0], -nc[1], nc[2])))
# Ts.append(np.array((vc[0], vc[1], -vc[2], dsigma/2, r1[0], r1[1], -r1[2], r2[0], r2[1], -r2[2], r3[0], r3[1], -r3[2], nc[0], nc[1], -nc[2])))
# Ts.append(np.array((vc[0], vc[1], -vc[2], dsigma/2, r3[0], r3[1], -r3[2], r4[0], r4[1], -r4[2], r1[0], r1[1], -r1[2], nc[0], nc[1], -nc[2])))
# Ts.append(np.array((vc[0], -vc[1], -vc[2], dsigma/2, r1[0], -r1[1], -r1[2], r2[0], -r2[1], -r2[2], r3[0], -r3[1], -r3[2], nc[0], -nc[1], -nc[2])))
# Ts.append(np.array((vc[0], -vc[1], -vc[2], dsigma/2, r3[0], -r3[1], -r3[2], r4[0], -r4[1], -r4[2], r1[0], -r1[1], -r1[2], nc[0], -nc[1], -nc[2])))
# FOR TESTING - report theta/phi for each triangle
# uncomment the above original version eventually
Ts.append(np.array((vc[0], vc[1], vc[2], dsigma/2, r1[0], r1[1], r1[2], r2[0], r2[1], r2[2], r3[0], r3[1], r3[2], nc[0], nc[1], nc[2], theta[t], phi[t][0], dsigma_t)))
Ts.append(np.array((vc[0], vc[1], vc[2], dsigma/2, r3[0], r3[1], r3[2], r4[0], r4[1], r4[2], r1[0], r1[1], r1[2], nc[0], nc[1], nc[2], theta[t], phi[t][0], dsigma_t)))
# Instead of recomputing all quantities, just reflect over the y- and z-directions.
Ts.append(np.array((vc[0], -vc[1], vc[2], dsigma/2, r1[0], -r1[1], r1[2], r2[0], -r2[1], r2[2], r3[0], -r3[1], r3[2], nc[0], -nc[1], nc[2], theta[t], -phi[t][0], dsigma_t)))
Ts.append(np.array((vc[0], -vc[1], vc[2], dsigma/2, r3[0], -r3[1], r3[2], r4[0], -r4[1], r4[2], r1[0], -r1[1], r1[2], nc[0], -nc[1], nc[2], theta[t], -phi[t][0], dsigma_t)))
Ts.append(np.array((vc[0], vc[1], -vc[2], dsigma/2, r1[0], r1[1], -r1[2], r2[0], r2[1], -r2[2], r3[0], r3[1], -r3[2], nc[0], nc[1], -nc[2], np.pi-theta[t], phi[t][0], dsigma_t)))
Ts.append(np.array((vc[0], vc[1], -vc[2], dsigma/2, r3[0], r3[1], -r3[2], r4[0], r4[1], -r4[2], r1[0], r1[1], -r1[2], nc[0], nc[1], -nc[2], np.pi-theta[t], phi[t][0], dsigma_t)))
Ts.append(np.array((vc[0], -vc[1], -vc[2], dsigma/2, r1[0], -r1[1], -r1[2], r2[0], -r2[1], -r2[2], r3[0], -r3[1], -r3[2], nc[0], -nc[1], -nc[2], np.pi-theta[t], -phi[t][0], dsigma_t)))
Ts.append(np.array((vc[0], -vc[1], -vc[2], dsigma/2, r3[0], -r3[1], -r3[2], r4[0], -r4[1], -r4[2], r1[0], -r1[1], -r1[2], nc[0], -nc[1], -nc[2], np.pi-theta[t], -phi[t][0], dsigma_t)))
if DEBUG:
plt.show()
# Assemble a mesh table:
table = np.array(Ts)
return table | TODO: add documentation
New implementation. I'll make this work first, then document. | entailment |
def open(cls, filename):
"""Open a new bundle.
Open a bundle from a JSON-formatted PHOEBE 2 file.
This is a constructor so should be called as:
>>> b = Bundle.open('test.phoebe')
:parameter str filename: relative or full path to the file
:return: instantiated :class:`Bundle` object
"""
filename = os.path.expanduser(filename)
logger.debug("importing from {}".format(filename))
f = open(filename, 'r')
data = json.load(f, object_pairs_hook=parse_json)
f.close()
b = cls(data)
version = b.get_value('phoebe_version')
phoebe_version_import = StrictVersion(version if version != 'devel' else '2.1.2')
phoebe_version_this = StrictVersion(__version__ if __version__ != 'devel' else '2.1.2')
logger.debug("importing from PHOEBE v {} into v {}".format(phoebe_version_import, phoebe_version_this))
# update the entry in the PS, so if this is saved again it will have the new version
b.set_value('phoebe_version', __version__)
if phoebe_version_import == phoebe_version_this:
return b
elif phoebe_version_import > phoebe_version_this:
warning = "importing from a newer version ({}) of PHOEBE, this may or may not work, consider updating".format(phoebe_version_import)
print("WARNING: {}".format(warning))
logger.warning(warning)
return b
if phoebe_version_import < StrictVersion("2.1.2"):
b._import_before_v211 = True
warning = "Importing from an older version ({}) of PHOEBE which did not support constraints in solar units. All constraints will remain in SI, but calling set_hierarchy will likely fail.".format(phoebe_version_import)
print("WARNING: {}".format(warning))
logger.warning(warning)
if phoebe_version_import < StrictVersion("2.1.0"):
logger.warning("importing from an older version ({}) of PHOEBE into version {}".format(phoebe_version_import, phoebe_version_this))
def _ps_dict(ps):
return {p.qualifier: p.get_quantity() if hasattr(p, 'get_quantity') else p.get_value() for p in ps.to_list()}
# rpole -> requiv: https://github.com/phoebe-project/phoebe2/pull/300
dict_stars = {}
for star in b.hierarchy.get_stars():
ps_star = b.filter(context='component', component=star)
dict_stars[star] = _ps_dict(ps_star)
# TODO: actually do the translation
rpole = dict_stars[star].pop('rpole', 1.0*u.solRad).to(u.solRad).value
# PHOEBE 2.0 didn't have syncpar for contacts
if len(b.filter('syncpar', component=star)):
F = b.get_value('syncpar', component=star, context='component')
else:
F = 1.0
parent_orbit = b.hierarchy.get_parent_of(star)
component = b.hierarchy.get_primary_or_secondary(star, return_ind=True)
sma = b.get_value('sma', component=parent_orbit, context='component', unit=u.solRad)
q = b.get_value('q', component=parent_orbit, context='component')
d = 1 - b.get_value('ecc', component=parent_orbit)
logger.info("roche.rpole_to_requiv_aligned(rpole={}, sma={}, q={}, F={}, d={}, component={})".format(rpole, sma, q, F, d, component))
dict_stars[star]['requiv'] = roche.rpole_to_requiv_aligned(rpole, sma, q, F, d, component=component)
b.remove_component(star)
for star, dict_star in dict_stars.items():
logger.info("attempting to update component='{}' to new version requirements".format(star))
b.add_component('star', component=star, check_label=False, **dict_star)
dict_envs = {}
for env in b.hierarchy.get_envelopes():
ps_env = b.filter(context='component', component=env)
dict_envs[env] = _ps_dict(ps_env)
b.remove_component(env)
for env, dict_env in dict_envs.items():
logger.info("attempting to update component='{}' to new version requirements".format(env))
b.add_component('envelope', component=env, check_label=False, **dict_env)
# TODO: this probably will fail once more than one contacts are
# supported, but will never need that for 2.0->2.1 since
# multiples aren't supported (yet) call b.set_hierarchy() to
# reset all hieararchy-dependent constraints (including
# pot<->requiv)
b.set_hierarchy()
primary = b.hierarchy.get_stars()[0]
b.flip_constraint('pot', component=env, solve_for='requiv@{}'.format(primary), check_nan=False)
b.set_value('pot', component=env, context='component', value=dict_env['pot'])
b.flip_constraint('requiv', component=primary, solve_for='pot', check_nan=False)
# reset all hieararchy-dependent constraints
b.set_hierarchy()
# mesh datasets: https://github.com/phoebe-project/phoebe2/pull/261, https://github.com/phoebe-project/phoebe2/pull/300
for dataset in b.filter(context='dataset', kind='mesh').datasets:
logger.info("attempting to update mesh dataset='{}' to new version requirements".format(dataset))
ps_mesh = b.filter(context='dataset', kind='mesh', dataset=dataset)
dict_mesh = _ps_dict(ps_mesh)
# NOTE: we will not remove (or update) the dataset from any existing models
b.remove_dataset(dataset, context=['dataset', 'constraint', 'compute'])
if len(b.filter(dataset=dataset, context='model')):
logger.warning("existing model for dataset='{}' models={} will not be removed, but likely will not work with new plotting updates".format(dataset, b.filter(dataset=dataset, context='model').models))
b.add_dataset('mesh', dataset=dataset, check_label=False, **dict_mesh)
# vgamma definition: https://github.com/phoebe-project/phoebe2/issues/234
logger.info("updating vgamma to new version requirements")
b.set_value('vgamma', -1*b.get_value('vgamma'))
# remove phshift parameter: https://github.com/phoebe-project/phoebe2/commit/1fa3a4e1c0f8d80502101e1b1e750f5fb14115cb
logger.info("removing any phshift parameters for new version requirements")
b.remove_parameters_all(qualifier='phshift')
# colon -> long: https://github.com/phoebe-project/phoebe2/issues/211
logger.info("removing any colon parameters for new version requirements")
b.remove_parameters_all(qualifier='colon')
# make sure constraints are updated according to conf.interactive_constraints
b.run_delayed_constraints()
return b | Open a new bundle.
Open a bundle from a JSON-formatted PHOEBE 2 file.
This is a constructor so should be called as:
>>> b = Bundle.open('test.phoebe')
:parameter str filename: relative or full path to the file
:return: instantiated :class:`Bundle` object | entailment |
def from_server(cls, bundleid, server='http://localhost:5555',
as_client=True):
"""Load a new bundle from a server.
[NOT IMPLEMENTED]
Load a bundle from a phoebe server. This is a constructor so should be
called as:
>>> b = Bundle.from_server('asdf', as_client=False)
:parameter str bundleid: the identifier given to the bundle by the
server
:parameter str server: the host (and port) of the server
:parameter bool as_client: whether to attach in client mode
(default: True)
"""
if not conf.devel:
raise NotImplementedError("'from_server' not officially supported for this release. Enable developer mode to test.")
# TODO: run test message on server, if localhost and fails, attempt to
# launch?
url = "{}/{}/json".format(server, bundleid)
logger.info("downloading bundle from {}".format(url))
r = requests.get(url, timeout=5)
rjson = r.json()
b = cls(rjson['data'])
if as_client:
b.as_client(as_client, server=server,
bundleid=rjson['meta']['bundleid'])
logger.warning("This bundle is in client mode, meaning all\
computations will be handled by the server at {}. To disable\
client mode, call as_client(False) or in the future pass\
as_client=False to from_server".format(server))
return b | Load a new bundle from a server.
[NOT IMPLEMENTED]
Load a bundle from a phoebe server. This is a constructor so should be
called as:
>>> b = Bundle.from_server('asdf', as_client=False)
:parameter str bundleid: the identifier given to the bundle by the
server
:parameter str server: the host (and port) of the server
:parameter bool as_client: whether to attach in client mode
(default: True) | entailment |
def from_legacy(cls, filename, add_compute_legacy=True, add_compute_phoebe=True):
"""Load a bundle from a PHOEBE 1.0 Legacy file.
This is a constructor so should be called as:
>>> b = Bundle.from_legacy('myfile.phoebe')
:parameter str filename: relative or full path to the file
:return: instantiated :class:`Bundle` object
"""
logger.warning("importing from legacy is experimental until official 1.0 release")
filename = os.path.expanduser(filename)
return io.load_legacy(filename, add_compute_legacy, add_compute_phoebe) | Load a bundle from a PHOEBE 1.0 Legacy file.
This is a constructor so should be called as:
>>> b = Bundle.from_legacy('myfile.phoebe')
:parameter str filename: relative or full path to the file
:return: instantiated :class:`Bundle` object | entailment |
def default_star(cls, starA='starA', force_build=False):
"""
For convenience, this function is available at the top-level as
<phoebe.default_star> as well as <phoebe.frontend.bundle.Bundle.default_star>.
sun
This is a constructor, so should be called as:
>>> b = Bundle.default_binary()
Arguments
-----------
* `starA` (string, optional, default='starA'): the label to be set for
starA.
* `force_build` (bool, optional, default=False): whether to force building
the bundle from scratch. If False, pre-cached files will be loaded
whenever possible to save time.
Returns
-----------
* an instantiated <phoebe.frontend.bundle.Bundle> object.
"""
if not force_build and not conf.devel:
b = cls.open(os.path.join(_bundle_cache_dir, 'default_star.bundle'))
if starA != 'starA':
b.rename_component('starA', starA)
return b
b = cls()
# IMPORTANT NOTE: if changing any of the defaults for a new release,
# make sure to update the cached files (see frontend/default_bundles
# directory for script to update all cached bundles)
b.add_star(component=starA)
b.set_hierarchy(_hierarchy.component(b[starA]))
b.add_compute(distortion_method='rotstar', irrad_method='none')
return b | For convenience, this function is available at the top-level as
<phoebe.default_star> as well as <phoebe.frontend.bundle.Bundle.default_star>.
sun
This is a constructor, so should be called as:
>>> b = Bundle.default_binary()
Arguments
-----------
* `starA` (string, optional, default='starA'): the label to be set for
starA.
* `force_build` (bool, optional, default=False): whether to force building
the bundle from scratch. If False, pre-cached files will be loaded
whenever possible to save time.
Returns
-----------
* an instantiated <phoebe.frontend.bundle.Bundle> object. | entailment |
def default_binary(cls, starA='primary', starB='secondary', orbit='binary',
contact_binary=False, force_build=False):
"""
For convenience, this function is available at the top-level as
<phoebe.default_binary> as well as
<phoebe.frontend.bundle.Bundle.default_binary>.
primary - secondary
This is a constructor, so should be called as:
>>> b = Bundle.default_binary()
Arguments
-----------
* `starA` (string, optional, default='primary'): the label to be set for
the primary component.
* `starB` (string, optional, default='secondary'): the label to be set for
the secondary component.
* `orbit` (string, optional, default='binary'): the label to be set for
the binary component.
* `contact_binary` (bool, optional, default=False): whether to also
add an envelope (with component='contact_envelope') and set the
hierarchy to a contact binary system.
* `force_build` (bool, optional, default=False): whether to force building
the bundle from scratch. If False, pre-cached files will be loaded
whenever possible to save time.
Returns
-----------
* an instantiated <phoebe.frontend.bundle.Bundle> object.
"""
if not force_build and not conf.devel:
if contact_binary:
b = cls.open(os.path.join(_bundle_cache_dir, 'default_contact_binary.bundle'))
else:
b = cls.open(os.path.join(_bundle_cache_dir, 'default_binary.bundle'))
secondary = 'secondary'
if starA != 'primary':
if starA == 'secondary':
secondary = 'temp_secondary'
b.rename_component('secondary', secondary)
b.rename_component('primary', starA)
if starB != 'secondary':
b.rename_component(secondary, starB)
if orbit != 'binary':
b.rename_component('binary', 'orbit')
return b
b = cls()
# IMPORTANT NOTE: if changing any of the defaults for a new release,
# make sure to update the cached files (see frontend/default_bundles
# directory for script to update all cached bundles)
if contact_binary:
orbit_defaults = {'sma': 3.35, 'period': 0.5}
star_defaults = {'requiv': 1.5}
else:
orbit_defaults = {'sma': 5.3, 'period': 1.0}
star_defaults = {'requiv': 1.0}
b.add_star(component=starA, **star_defaults)
b.add_star(component=starB, **star_defaults)
b.add_orbit(component=orbit, **orbit_defaults)
if contact_binary:
b.add_component('envelope', component='contact_envelope')
b.set_hierarchy(_hierarchy.binaryorbit,
b[orbit],
b[starA],
b[starB],
b['contact_envelope'])
else:
b.set_hierarchy(_hierarchy.binaryorbit,
b[orbit],
b[starA],
b[starB])
b.add_compute()
return b | For convenience, this function is available at the top-level as
<phoebe.default_binary> as well as
<phoebe.frontend.bundle.Bundle.default_binary>.
primary - secondary
This is a constructor, so should be called as:
>>> b = Bundle.default_binary()
Arguments
-----------
* `starA` (string, optional, default='primary'): the label to be set for
the primary component.
* `starB` (string, optional, default='secondary'): the label to be set for
the secondary component.
* `orbit` (string, optional, default='binary'): the label to be set for
the binary component.
* `contact_binary` (bool, optional, default=False): whether to also
add an envelope (with component='contact_envelope') and set the
hierarchy to a contact binary system.
* `force_build` (bool, optional, default=False): whether to force building
the bundle from scratch. If False, pre-cached files will be loaded
whenever possible to save time.
Returns
-----------
* an instantiated <phoebe.frontend.bundle.Bundle> object. | entailment |
def default_triple(cls, inner_as_primary=True, inner_as_overcontact=False,
starA='starA', starB='starB', starC='starC',
inner='inner', outer='outer',
contact_envelope='contact_envelope'):
"""Load a bundle with a default triple system.
Set inner_as_primary based on what hierarchical configuration you want.
inner_as_primary = True:
starA - starB -- starC
inner_as_primary = False:
starC -- starA - starB
This is a constructor, so should be called as:
>>> b = Bundle.default_triple_primary()
:parameter bool inner_as_primary: whether the inner-binary should be
the primary component of the outer-orbit
:return: instantiated :class:`Bundle` object
"""
if not conf.devel:
raise NotImplementedError("'default_triple' not officially supported for this release. Enable developer mode to test.")
b = cls()
b.add_star(component=starA)
b.add_star(component=starB)
b.add_star(component=starC)
b.add_orbit(component=inner, period=1)
b.add_orbit(component=outer, period=10)
if inner_as_overcontact:
b.add_envelope(component=contact_envelope)
inner_hier = _hierarchy.binaryorbit(b[inner],
b[starA],
b[starB],
b[contact_envelope])
else:
inner_hier = _hierarchy.binaryorbit(b[inner], b[starA], b[starB])
if inner_as_primary:
hierstring = _hierarchy.binaryorbit(b[outer], inner_hier, b[starC])
else:
hierstring = _hierarchy.binaryorbit(b[outer], b[starC], inner_hier)
b.set_hierarchy(hierstring)
b.add_constraint(constraint.keplers_third_law_hierarchical,
outer, inner)
# TODO: does this constraint need to be rebuilt when things change?
# (ie in set_hierarchy)
b.add_compute()
return b | Load a bundle with a default triple system.
Set inner_as_primary based on what hierarchical configuration you want.
inner_as_primary = True:
starA - starB -- starC
inner_as_primary = False:
starC -- starA - starB
This is a constructor, so should be called as:
>>> b = Bundle.default_triple_primary()
:parameter bool inner_as_primary: whether the inner-binary should be
the primary component of the outer-orbit
:return: instantiated :class:`Bundle` object | entailment |
def save(self, filename, clear_history=True, incl_uniqueid=False,
compact=False):
"""Save the bundle to a JSON-formatted ASCII file.
:parameter str filename: relative or full path to the file
:parameter bool clear_history: whether to clear history log
items before saving (default: True)
:parameter bool incl_uniqueid: whether to including uniqueids in the
file (only needed if its necessary to maintain the uniqueids when
reloading)
:parameter bool compact: whether to use compact file-formatting (maybe
be quicker to save/load, but not as easily readable)
:return: the filename
"""
if clear_history:
# TODO: let's not actually clear history,
# but rather skip the context when saving
self.remove_history()
# TODO: add option for clear_models, clear_feedback
# NOTE: PS.save will handle os.path.expanduser
return super(Bundle, self).save(filename, incl_uniqueid=incl_uniqueid,
compact=compact) | Save the bundle to a JSON-formatted ASCII file.
:parameter str filename: relative or full path to the file
:parameter bool clear_history: whether to clear history log
items before saving (default: True)
:parameter bool incl_uniqueid: whether to including uniqueids in the
file (only needed if its necessary to maintain the uniqueids when
reloading)
:parameter bool compact: whether to use compact file-formatting (maybe
be quicker to save/load, but not as easily readable)
:return: the filename | entailment |
def export_legacy(self, filename):
"""
TODO: add docs
"""
logger.warning("exporting to legacy is experimental until official 1.0 release")
filename = os.path.expanduser(filename)
return io.pass_to_legacy(self, filename) | TODO: add docs | entailment |
def _on_socket_push_updates(self, resp):
"""
[NOT IMPLEMENTED]
"""
# TODO: check to make sure resp['meta']['bundleid']==bundleid ?
# TODO: handle added parameters
# TODO: handle removed (isDeleted) parameters
for item in resp['data']:
if item['id'] in self.uniqueids:
# then we're updating something in the parameter (or deleting)
param = self.get_parameter(uniqueid=item['id'])
for attr, value in item['attributes'].items():
if hasattr(param, "_{}".format(attr)):
logger.info("updates from server: setting {}@{}={}".
format(attr, param.twig, value))
setattr(param, "_{}".format(attr), value)
else:
self._attach_param_from_server(item) | [NOT IMPLEMENTED] | entailment |
def _attach_param_from_server(self, item):
"""
[NOT IMPLEMENTED]
"""
if isinstance(item, list):
for itemi in item:
self._attach_param_from_server(itemi)
else:
# then we need to add a new parameter
d = item['attributes']
d['uniqueid'] = item['id']
param = parameters.parameter_from_json(d, bundle=self)
metawargs = {}
self._attach_params([param], **metawargs) | [NOT IMPLEMENTED] | entailment |
def as_client(self, as_client=True, server='http://localhost:5555',
bundleid=None):
"""
[NOT IMPLEMENTED]
"""
if as_client:
if not _can_client:
raise ImportError("dependencies to support client mode not met - see docs")
server_running = self._test_server(server=server,
start_if_fail=True)
if not server_running:
raise ValueError("server {} is not running".format(server))
server_split = server.split(':')
host = ':'.join(server_split[:-1])
port = int(float(server_split[-1] if len(server_split) else 8000))
self._socketio = SocketIO(host, port, BaseNamespace)
self._socketio.on('connect', self._on_socket_connect)
self._socketio.on('disconnect', self._on_socket_disconnect)
self._socketio.on('push updates', self._on_socket_push_updates)
if not bundleid:
upload_url = "{}/upload".format(server)
logger.info("uploading bundle to server {}".format(upload_url))
data = json.dumps(self.to_json(incl_uniqueid=True))
r = requests.post(upload_url, data=data, timeout=5)
bundleid = r.json()['meta']['bundleid']
self._socketio.emit('subscribe bundle', {'bundleid': bundleid})
self._bundleid = bundleid
self._is_client = server
logger.info("connected as client to server at {}:{}".
format(host, port))
else:
logger.warning("This bundle is now permanently detached from the instance\
on the server and will not receive future updates. To start a client\
in sync with the version on the server or other clients currently \
subscribed, you must instantiate a new bundle with Bundle.from_server.")
if hasattr(self, '_socketIO') and self._socketIO is not None:
self._socketio.emit('unsubscribe bundle', {'bundleid': bundleid})
self._socketIO.disconnect()
self._socketIO = None
self._bundleid = None
self._is_client = False | [NOT IMPLEMENTED] | entailment |
def client_update(self):
"""
[NOT IMPLEMENTED]
"""
if not self.is_client:
raise ValueError("Bundle is not in client mode, cannot update")
logger.info("updating client...")
# wait briefly to pickup any missed messages, which should then fire
# the corresponding callbacks and update the bundle
self._socketio.wait(seconds=1)
self._last_client_update = datetime.now() | [NOT IMPLEMENTED] | entailment |
def _default_label(self, base, context, **kwargs):
"""
Determine a default label given a base label and the passed kwargs
this simply counts the current number of matches on metawargs and
appends that number to the base
:parameter str base: the base string for the label
:parameter str context: name of the context (where the label is going)
:parameter **kwargs: the kwargs to run a filter on. The returned label
will be "{}{:02d}".format(base, number_of_results_with_kwargs+1)
:return: label
"""
kwargs['context'] = context
params = len(getattr(self.filter(check_visible=False,**kwargs), '{}s'.format(context)))
return "{}{:02d}".format(base, params+1) | Determine a default label given a base label and the passed kwargs
this simply counts the current number of matches on metawargs and
appends that number to the base
:parameter str base: the base string for the label
:parameter str context: name of the context (where the label is going)
:parameter **kwargs: the kwargs to run a filter on. The returned label
will be "{}{:02d}".format(base, number_of_results_with_kwargs+1)
:return: label | entailment |
def get_setting(self, twig=None, **kwargs):
"""
Filter in the 'setting' context
:parameter str twig: the twig used for filtering
:parameter **kwargs: any other tags to do the filter (except tag or
context)
:return: :class:`phoebe.parameters.parameters.ParameterSet`
"""
if twig is not None:
kwargs['twig'] = twig
kwargs['context'] = 'setting'
return self.filter_or_get(**kwargs) | Filter in the 'setting' context
:parameter str twig: the twig used for filtering
:parameter **kwargs: any other tags to do the filter (except tag or
context)
:return: :class:`phoebe.parameters.parameters.ParameterSet` | entailment |
def _add_history(self, redo_func, redo_kwargs, undo_func, undo_kwargs,
**kwargs):
"""
Add a new log (undo/redoable) to this history context
:parameter str redo_func: function to redo the action, must be a
method of :class:`Bundle`
:parameter dict redo_kwargs: kwargs to pass to the redo_func. Each
item must be serializable (float or str, not objects)
:parameter str undo_func: function to undo the action, must be a
method of :class:`Bundle`
:parameter dict undo_kwargs: kwargs to pass to the undo_func. Each
item must be serializable (float or str, not objects)
:parameter str history: label of the history parameter
:raises ValueError: if the label for this history item is forbidden or
already exists
"""
if not self.history_enabled:
return
param = HistoryParameter(self, redo_func, redo_kwargs,
undo_func, undo_kwargs)
metawargs = {'context': 'history',
'history': kwargs.get('history', self._default_label('hist', **{'context': 'history'}))}
self._check_label(metawargs['history'])
self._attach_params([param], **metawargs) | Add a new log (undo/redoable) to this history context
:parameter str redo_func: function to redo the action, must be a
method of :class:`Bundle`
:parameter dict redo_kwargs: kwargs to pass to the redo_func. Each
item must be serializable (float or str, not objects)
:parameter str undo_func: function to undo the action, must be a
method of :class:`Bundle`
:parameter dict undo_kwargs: kwargs to pass to the undo_func. Each
item must be serializable (float or str, not objects)
:parameter str history: label of the history parameter
:raises ValueError: if the label for this history item is forbidden or
already exists | entailment |
def get_history(self, i=None):
"""
Get a history item by index.
You can toggle whether history is recorded using
* :meth:`enable_history`
* :meth:`disable_history`
:parameter int i: integer for indexing (can be positive or
negative). If i is None or not provided, the entire list
of history items will be returned
:return: :class:`phoebe.parameters.parameters.Parameter` if i is
an int, or :class:`phoebe.parameters.parameters.ParameterSet` if i
is not provided
:raises ValueError: if no history items have been recorded.
"""
ps = self.filter(context='history')
# if not len(ps):
# raise ValueError("no history recorded")
if i is not None:
return ps.to_list()[i]
else:
return ps | Get a history item by index.
You can toggle whether history is recorded using
* :meth:`enable_history`
* :meth:`disable_history`
:parameter int i: integer for indexing (can be positive or
negative). If i is None or not provided, the entire list
of history items will be returned
:return: :class:`phoebe.parameters.parameters.Parameter` if i is
an int, or :class:`phoebe.parameters.parameters.ParameterSet` if i
is not provided
:raises ValueError: if no history items have been recorded. | entailment |
def remove_history(self, i=None):
"""
Remove a history item from the bundle by index.
You can toggle whether history is recorded using
* :meth:`enable_history`
* :meth:`disable_history`
:parameter int i: integer for indexing (can be positive or
negative). If i is None or not provided, the entire list
of history items will be removed
:raises ValueError: if no history items have been recorded.
"""
if i is None:
self.remove_parameters_all(context='history')
else:
param = self.get_history(i=i)
self.remove_parameter(uniqueid=param.uniqueid) | Remove a history item from the bundle by index.
You can toggle whether history is recorded using
* :meth:`enable_history`
* :meth:`disable_history`
:parameter int i: integer for indexing (can be positive or
negative). If i is None or not provided, the entire list
of history items will be removed
:raises ValueError: if no history items have been recorded. | entailment |
def undo(self, i=-1):
"""
Undo an item in the history logs
:parameter int i: integer for indexing (can be positive or
negative). Defaults to -1 if not provided (the latest
recorded history item)
:raises ValueError: if no history items have been recorded
"""
_history_enabled = self.history_enabled
param = self.get_history(i)
self.disable_history()
param.undo()
# TODO: do we really want to remove this? then what's the point of redo?
self.remove_parameter(uniqueid=param.uniqueid)
if _history_enabled:
self.enable_history() | Undo an item in the history logs
:parameter int i: integer for indexing (can be positive or
negative). Defaults to -1 if not provided (the latest
recorded history item)
:raises ValueError: if no history items have been recorded | entailment |
def set_hierarchy(self, *args, **kwargs):
"""
Set the hierarchy of the system.
See tutorial on building a system.
TODO: provide documentation
args can be
- string representation (preferably passed through hierarchy already)
- func and strings/PSs/params to pass to function
"""
if self._import_before_v211:
raise ValueError("This bundle was created before constraints in solar units were supported and therefore cannot call set_hierarchy. Either downgrade PHOEBE or re-create this system from scratch if you need to change the hierarchy.")
# need to run any constraints since some may be deleted and rebuilt
changed_params = self.run_delayed_constraints()
_old_param = self.get_hierarchy()
if len(args) == 1 and isinstance(args[0], str):
repr_ = args[0]
kind = None
elif len(args) == 0:
if 'value' in kwargs.keys():
repr_ = kwargs['value']
kind = None
else:
repr_ = self.get_hierarchy().get_value()
kind = None
else:
func = _get_add_func(hierarchy, args[0])
func_args = args[1:]
repr_ = func(*func_args)
kind = func.func_name
hier_param = HierarchyParameter(value=repr_,
description='Hierarchy representation')
self.remove_parameters_all(qualifier='hierarchy', context='system')
metawargs = {'context': 'system'}
self._attach_params([hier_param], **metawargs)
# cache hierarchy param so we don't need to do a filter everytime we
# want to access it in is_visible, etc
self._hierarchy_param = hier_param
self._handle_pblum_defaults()
# self._handle_dataset_selectparams()
# Handle inter-PS constraints
starrefs = hier_param.get_stars()
# user_interactive_constraints = conf.interactive_constraints
# conf.interactive_constraints_off()
for component in self.hierarchy.get_envelopes():
# we need two of the three [comp_env] + self.hierarchy.get_siblings_of(comp_env) to have constraints
logger.debug('re-creating requiv constraints')
existing_requiv_constraints = self.filter(constraint_func='requiv_to_pot', component=[component]+self.hierarchy.get_siblings_of(component))
if len(existing_requiv_constraints) == 2:
# do we need to rebuild these?
continue
elif len(existing_requiv_constraints)==0:
for component_requiv in self.hierarchy.get_siblings_of(component):
pot_parameter = self.get_parameter(qualifier='pot', component=self.hierarchy.get_envelope_of(component_requiv), context='component')
requiv_parameter = self.get_parameter(qualifier='requiv', component=component_requiv, context='component')
if len(pot_parameter.constrained_by):
solve_for = requiv_parameter.uniquetwig
else:
solve_for = pot_parameter.uniquetwig
self.add_constraint(constraint.requiv_to_pot, component_requiv,
constraint=self._default_label('requiv_to_pot', context='constraint'),
solve_for=solve_for)
else:
raise NotImplementedError("expected 0 or 2 existing requiv_to_pot constraints")
logger.debug('re-creating fillout_factor (contact) constraint for {}'.format(component))
if len(self.filter(context='constraint',
constraint_func='fillout_factor',
component=component)):
constraint_param = self.get_constraint(constraint_func='fillout_factor',
component=component)
self.remove_constraint(constraint_func='fillout_factor',
component=component)
self.add_constraint(constraint.fillout_factor, component,
solve_for=constraint_param.constrained_parameter.uniquetwig,
constraint=constraint_param.constraint)
else:
self.add_constraint(constraint.fillout_factor, component,
constraint=self._default_label('fillout_factor', context='constraint'))
logger.debug('re-creating pot_min (contact) constraint for {}'.format(component))
if len(self.filter(context='constraint',
constraint_func='potential_contact_min',
component=component)):
constraint_param = self.get_constraint(constraint_func='potential_contact_min',
component=component)
self.remove_constraint(constraint_func='potential_contact_min',
component=component)
self.add_constraint(constraint.potential_contact_min, component,
solve_for=constraint_param.constrained_parameter.uniquetwig,
constraint=constraint_param.constraint)
else:
self.add_constraint(constraint.potential_contact_min, component,
constraint=self._default_label('pot_min', context='constraint'))
logger.debug('re-creating pot_max (contact) constraint for {}'.format(component))
if len(self.filter(context='constraint',
constraint_func='potential_contact_max',
component=component)):
constraint_param = self.get_constraint(constraint_func='potential_contact_max',
component=component)
self.remove_constraint(constraint_func='potential_contact_max',
component=component)
self.add_constraint(constraint.potential_contact_max, component,
solve_for=constraint_param.constrained_parameter.uniquetwig,
constraint=constraint_param.constraint)
else:
self.add_constraint(constraint.potential_contact_max, component,
constraint=self._default_label('pot_max', context='constraint'))
for component in self.hierarchy.get_stars():
if len(starrefs)==1:
pass
# we'll do the potential constraint either way
else:
logger.debug('re-creating mass constraint for {}'.format(component))
# TODO: will this cause problems if the constraint has been flipped?
if len(self.filter(context='constraint',
constraint_func='mass',
component=component)):
constraint_param = self.get_constraint(constraint_func='mass',
component=component)
self.remove_constraint(constraint_func='mass',
component=component)
self.add_constraint(constraint.mass, component,
solve_for=constraint_param.constrained_parameter.uniquetwig,
constraint=constraint_param.constraint)
else:
self.add_constraint(constraint.mass, component,
constraint=self._default_label('mass', context='constraint'))
logger.debug('re-creating comp_sma constraint for {}'.format(component))
# TODO: will this cause problems if the constraint has been flipped?
if len(self.filter(context='constraint',
constraint_func='comp_sma',
component=component)):
constraint_param = self.get_constraint(constraint_func='comp_sma',
component=component)
self.remove_constraint(constraint_func='comp_sma',
component=component)
self.add_constraint(constraint.comp_sma, component,
solve_for=constraint_param.constrained_parameter.uniquetwig,
constraint=constraint_param.constraint)
else:
self.add_constraint(constraint.comp_sma, component,
constraint=self._default_label('comp_sma', context='constraint'))
logger.debug('re-creating rotation_period constraint for {}'.format(component))
# TODO: will this cause problems if the constraint has been flipped?
if len(self.filter(context='constraint',
constraint_func='rotation_period',
component=component)):
constraint_param = self.get_constraint(constraint_func='rotation_period',
component=component)
self.remove_constraint(constraint_func='rotation_period',
component=component)
self.add_constraint(constraint.rotation_period, component,
solve_for=constraint_param.constrained_parameter.uniquetwig,
constraint=constraint_param.constraint)
else:
self.add_constraint(constraint.rotation_period, component,
constraint=self._default_label('rotation_period', context='constraint'))
if self.hierarchy.is_contact_binary(component):
# then we're in a contact binary and need to create pot<->requiv constraints
# NOTE: pot_min and pot_max are handled above at the envelope level
logger.debug('re-creating requiv_detached_max (contact) constraint for {}'.format(component))
if len(self.filter(context='constraint',
constraint_func='requiv_detached_max',
component=component)):
# then we're changing from detached to contact so should remove the detached constraint first
self.remove_constraint(constraint_func='requiv_detached_max', component=component)
logger.debug('re-creating requiv_contact_max (contact) constraint for {}'.format(component))
if len(self.filter(context='constraint',
constraint_func='requiv_contact_max',
component=component)):
constraint_param = self.get_constraint(constraint_func='requiv_contact_max',
component=component)
self.remove_constraint(constraint_func='requiv_contact_max',
component=component)
self.add_constraint(constraint.requiv_contact_max, component,
solve_for=constraint_param.constrained_parameter.uniquetwig,
constraint=constraint_param.constraint)
else:
self.add_constraint(constraint.requiv_contact_max, component,
constraint=self._default_label('requiv_max', context='constraint'))
logger.debug('re-creating requiv_contact_min (contact) constraint for {}'.format(component))
if len(self.filter(context='constraint',
constraint_func='requiv_contact_min',
component=component)):
constraint_param = self.get_constraint(constraint_func='requiv_contact_min',
component=component)
self.remove_constraint(constraint_func='requiv_contact_min',
component=component)
self.add_constraint(constraint.requiv_contact_min, component,
solve_for=constraint_param.constrained_parameter.uniquetwig,
constraint=constraint_param.constraint)
else:
self.add_constraint(constraint.requiv_contact_min, component,
constraint=self._default_label('requiv_min', context='constraint'))
else:
# then we're in a detached/semi-detached system
# let's make sure we remove any requiv_to_pot constraints
if len(self.filter(context='constraint',
constraint_func='requiv_to_pot',
component=component)):
self.remove_constraint(constraint_func='requiv_to_pot', component=component)
logger.debug('re-creating requiv_max (detached) constraint for {}'.format(component))
if len(self.filter(context='constraint',
constraint_func='requiv_contact_max',
component=component)):
# then we're changing from contact to detached so should remove the detached constraint first
self.remove_constraint(constraint_func='requiv_contact_max', component=component)
logger.debug('re-creating requiv_detached_max (detached) constraint for {}'.format(component))
if len(self.filter(context='constraint',
constraint_func='requiv_detached_max',
component=component)):
constraint_param = self.get_constraint(constraint_func='requiv_detached_max',
component=component)
self.remove_constraint(constraint_func='requiv_detached_max',
component=component)
self.add_constraint(constraint.requiv_detached_max, component,
solve_for=constraint_param.constrained_parameter.uniquetwig,
constraint=constraint_param.constraint)
else:
self.add_constraint(constraint.requiv_detached_max, component,
constraint=self._default_label('requiv_max', context='constraint'))
logger.debug('re-creating pitch constraint for {}'.format(component))
# TODO: will this cause problems if the constraint has been flipped?
# TODO: what if the user disabled/removed this constraint?
if len(self.filter(context='constraint',
constraint_func='pitch',
component=component)):
constraint_param = self.get_constraint(constraint_func='pitch',
component=component)
self.remove_constraint(constraint_func='pitch',
component=component)
self.add_constraint(constraint.pitch, component,
solve_for=constraint_param.constrained_parameter.uniquetwig,
constraint=constraint_param.constraint)
else:
self.add_constraint(constraint.pitch, component,
constraint=self._default_label('pitch', context='constraint'))
logger.debug('re-creating yaw constraint for {}'.format(component))
# TODO: will this cause problems if the constraint has been flipped?
# TODO: what if the user disabled/removed this constraint?
if len(self.filter(context='constraint',
constraint_func='yaw',
component=component)):
constraint_param = self.get_constraint(constraint_func='yaw',
component=component)
self.remove_constraint(constraint_func='yaw',
component=component)
self.add_constraint(constraint.yaw, component,
solve_for=constraint_param.constrained_parameter.uniquetwig,
constraint=constraint_param.constraint)
else:
self.add_constraint(constraint.yaw, component,
constraint=self._default_label('yaw', context='constraint'))
# if user_interactive_constraints:
# conf.interactive_constraints_on()
# self.run_delayed_constraints()
redo_kwargs = {k: v for k, v in hier_param.to_dict().items()
if v not in [None, ''] and
k not in ['uniqueid', 'uniquetwig', 'twig',
'Class', 'context', 'qualifier',
'description']}
if _old_param is None:
# this will fake the undo-ability to raise an error saying it
# cannot be undone
undo_kwargs = {'uniqueid': None}
else:
undo_kwargs = {k: v for k, v in _old_param.to_dict().items()
if v not in [None, ''] and
k not in ['uniqueid', 'uniquetwig', 'twig',
'Class', 'context', 'qualifier',
'description']}
self._add_history(redo_func='set_hierarchy',
redo_kwargs=redo_kwargs,
undo_func='set_hierarchy',
undo_kwargs=undo_kwargs)
return | Set the hierarchy of the system.
See tutorial on building a system.
TODO: provide documentation
args can be
- string representation (preferably passed through hierarchy already)
- func and strings/PSs/params to pass to function | entailment |
def get_system(self, twig=None, **kwargs):
"""
Filter in the 'system' context
:parameter str twig: twig to use for filtering
:parameter **kwargs: any other tags to do the filter
(except twig or context)
:return: :class:`phoebe.parameters.parameters.Parameter` or
:class:`phoebe.parameters.parameters.ParameterSet`
"""
if twig is not None:
kwargs['twig'] = twig
kwargs['context'] = 'system'
return self.filter(**kwargs) | Filter in the 'system' context
:parameter str twig: twig to use for filtering
:parameter **kwargs: any other tags to do the filter
(except twig or context)
:return: :class:`phoebe.parameters.parameters.Parameter` or
:class:`phoebe.parameters.parameters.ParameterSet` | entailment |
def run_checks(self, **kwargs):
"""
Check to see whether the system is expected to be computable.
This is called by default for each set_value but will only raise a
logger warning if fails. This is also called immediately when calling
:meth:`run_compute`.
kwargs are passed to override currently set values as if they were
sent to :meth:`run_compute`.
:return: True if passed, False if failed and a message
"""
# make sure all constraints have been run
changed_params = self.run_delayed_constraints()
hier = self.hierarchy
if hier is None:
return True, ''
for component in hier.get_stars():
kind = hier.get_kind_of(component)
comp_ps = self.get_component(component)
if not len(comp_ps):
return False, "component '{}' in the hierarchy is not in the bundle".format(component)
parent = hier.get_parent_of(component)
parent_ps = self.get_component(parent)
if kind in ['star']:
# ignore the single star case
if parent:
# contact systems MUST by synchronous
if hier.is_contact_binary(component):
if self.get_value(qualifier='syncpar', component=component, context='component', **kwargs) != 1.0:
return False,\
'contact binaries must by synchronous, but syncpar@{}!=1'.format(component)
if self.get_value(qualifier='ecc', component=parent, context='component', **kwargs) != 0.0:
return False,\
'contact binaries must by circular, but ecc@{}!=0'.format(component)
if self.get_value(qualifier='pitch', component=component, context='component', **kwargs) != 0.0:
return False,\
'contact binaries must be aligned, but pitch@{}!=0'.format(component)
if self.get_value(qualifier='yaw', component=component, context='component', **kwargs) != 0.0:
return False,\
'contact binaries must be aligned, but yaw@{}!=0'.format(component)
# MUST NOT be overflowing at PERIASTRON (d=1-ecc, etheta=0)
requiv = comp_ps.get_value('requiv', unit=u.solRad, **kwargs)
requiv_max = comp_ps.get_value('requiv_max', unit=u.solRad, **kwargs)
if hier.is_contact_binary(component):
if np.isnan(requiv) or requiv > requiv_max:
return False,\
'{} is overflowing at L2/L3 (requiv={}, requiv_max={})'.format(component, requiv, requiv_max)
requiv_min = comp_ps.get_value('requiv_min')
if np.isnan(requiv) or requiv <= requiv_min:
return False,\
'{} is underflowing at L1 and not a contact system (requiv={}, requiv_min={})'.format(component, requiv, requiv_min)
elif requiv <= requiv_min * 1.001:
return False,\
'requiv@{} is too close to requiv_min (within 0.1% of critical). Use detached/semidetached model instead.'.format(component)
else:
if requiv > requiv_max:
return False,\
'{} is overflowing at periastron (requiv={}, requiv_max={})'.format(component, requiv, requiv_max)
else:
raise NotImplementedError("checks not implemented for type '{}'".format(kind))
# we also need to make sure that stars don't overlap each other
# so we'll check for each pair of stars (see issue #70 on github)
# TODO: rewrite overlap checks
for orbitref in []: #hier.get_orbits():
if len(hier.get_children_of(orbitref)) == 2:
q = self.get_value(qualifier='q', component=orbitref, context='component', **kwargs)
ecc = self.get_value(qualifier='ecc', component=orbitref, context='component', **kwargs)
starrefs = hier.get_children_of(orbitref)
if hier.get_kind_of(starrefs[0]) != 'star' or hier.get_kind_of(starrefs[1]) != 'star':
# print "***", hier.get_kind_of(starrefs[0]), hier.get_kind_of(starrefs[1])
continue
if self.get_value(qualifier='pitch', component=starrefs[0])!=0.0 or \
self.get_value(qualifier='pitch', component=starrefs[1])!=0.0 or \
self.get_value(qualifier='yaw', component=starrefs[0])!=0.0 or \
self.get_value(qualifier='yaw', component=starrefs[1])!=0.0:
# we cannot run this test for misaligned cases
continue
comp0 = hier.get_primary_or_secondary(starrefs[0], return_ind=True)
comp1 = hier.get_primary_or_secondary(starrefs[1], return_ind=True)
q0 = roche.q_for_component(q, comp0)
q1 = roche.q_for_component(q, comp1)
F0 = self.get_value(qualifier='syncpar', component=starrefs[0], context='component', **kwargs)
F1 = self.get_value(qualifier='syncpar', component=starrefs[1], context='component', **kwargs)
pot0 = self.get_value(qualifier='pot', component=starrefs[0], context='component', **kwargs)
pot0 = roche.pot_for_component(pot0, q0, comp0)
pot1 = self.get_value(qualifier='pot', component=starrefs[1], context='component', **kwargs)
pot1 = roche.pot_for_component(pot1, q1, comp1)
xrange0 = libphoebe.roche_xrange(q0, F0, 1.0-ecc, pot0+1e-6, choice=0)
xrange1 = libphoebe.roche_xrange(q1, F1, 1.0-ecc, pot1+1e-6, choice=0)
if xrange0[1]+xrange1[1] > 1.0-ecc:
return False,\
'components in {} are overlapping at periastron (change ecc@{}, syncpar@{}, or syncpar@{}).'.format(orbitref, orbitref, starrefs[0], starrefs[1])
# check to make sure passband supports the selected atm
for pbparam in self.filter(qualifier='passband').to_list():
pb = pbparam.get_value()
pbatms = _pbtable[pb]['atms']
# NOTE: atms are not attached to datasets, but per-compute and per-component
for atmparam in self.filter(qualifier='atm', kind='phoebe').to_list():
atm = atmparam.get_value()
if atm not in pbatms:
return False, "'{}' passband ({}) does not support atm='{}' ({}).".format(pb, pbparam.twig, atm, atmparam.twig)
# check length of ld_coeffs vs ld_func and ld_func vs atm
def ld_coeffs_len(ld_func, ld_coeffs):
# current choices for ld_func are:
# ['interp', 'uniform', 'linear', 'logarithmic', 'quadratic', 'square_root', 'power', 'claret', 'hillen', 'prsa']
if ld_func == 'interp':
return True,
elif ld_func in ['linear'] and len(ld_coeffs)==1:
return True,
elif ld_func in ['logarithmic', 'square_root', 'quadratic'] and len(ld_coeffs)==2:
return True,
elif ld_func in ['power'] and len(ld_coeffs)==4:
return True,
else:
return False, "ld_coeffs={} wrong length for ld_func='{}'.".format(ld_coeffs, ld_func)
for component in self.hierarchy.get_stars():
# first check ld_coeffs_bol vs ld_func_bol
ld_func = str(self.get_value(qualifier='ld_func_bol', component=component, context='component', check_visible=False, **kwargs))
ld_coeffs = np.asarray(self.get_value(qualifier='ld_coeffs_bol', component=component, context='component', check_visible=False, **kwargs))
check = ld_coeffs_len(ld_func, ld_coeffs)
if not check[0]:
return check
if ld_func != 'interp':
check = libphoebe.ld_check(ld_func, ld_coeffs)
if not check:
return False, 'ld_coeffs_bol={} not compatible for ld_func_bol=\'{}\'.'.format(ld_coeffs, ld_func)
for dataset in self.datasets:
if dataset=='_default' or self.get_dataset(dataset=dataset, kind='*dep').kind not in ['lc_dep', 'rv_dep']:
continue
ld_func = str(self.get_value(qualifier='ld_func', dataset=dataset, component=component, context='dataset', **kwargs))
ld_coeffs = np.asarray(self.get_value(qualifier='ld_coeffs', dataset=dataset, component=component, context='dataset', check_visible=False, **kwargs))
if ld_coeffs is not None:
check = ld_coeffs_len(ld_func, ld_coeffs)
if not check[0]:
return check
if ld_func != 'interp':
check = libphoebe.ld_check(ld_func, ld_coeffs)
if not check:
return False, 'ld_coeffs={} not compatible for ld_func=\'{}\'.'.format(ld_coeffs, ld_func)
if ld_func=='interp':
for compute in kwargs.get('computes', self.computes):
atm = self.get_value(qualifier='atm', component=component, compute=compute, context='compute', **kwargs)
if atm != 'ck2004':
return False, "ld_func='interp' only supported by atm='ck2004'. Either change atm@{} or ld_func@{}@{}".format(component, component, dataset)
# mesh-consistency checks
for compute in self.computes:
mesh_methods = [p.get_value() for p in self.filter(qualifier='mesh_method', compute=compute, force_ps=True).to_list()]
if 'wd' in mesh_methods:
if len(set(mesh_methods)) > 1:
return False, "all (or none) components must use mesh_method='wd'."
#### WARNINGS ONLY ####
# let's check teff vs gravb_bol and irrad_frac_refl_bol
for component in self.hierarchy.get_stars():
teff = self.get_value(qualifier='teff', component=component, context='component', unit=u.K, **kwargs)
gravb_bol = self.get_value(qualifier='gravb_bol', component=component, context='component', **kwargs)
if teff >= 8000. and gravb_bol < 0.9:
return None, "'{}' probably has a radiative atm (teff={:.0f}K>8000K), for which gravb_bol=1.00 might be a better approx than gravb_bol={:.2f}.".format(component, teff, gravb_bol)
elif teff <= 6600. and gravb_bol >= 0.9:
return None, "'{}' probably has a convective atm (teff={:.0f}K<6600K), for which gravb_bol=0.32 might be a better approx than gravb_bol={:.2f}.".format(component, teff, gravb_bol)
elif gravb_bol < 0.32 or gravb_bol > 1.00:
return None, "'{}' has intermittent temperature (6600K<teff={:.0f}K<8000K), gravb_bol might be better between 0.32-1.00 than gravb_bol={:.2f}.".format(component, teff, gravb_bol)
for component in self.hierarchy.get_stars():
teff = self.get_value(qualifier='teff', component=component, context='component', unit=u.K, **kwargs)
irrad_frac_refl_bol = self.get_value(qualifier='irrad_frac_refl_bol', component=component, context='component', **kwargs)
if teff >= 8000. and irrad_frac_refl_bol < 0.8:
return None, "'{}' probably has a radiative atm (teff={:.0f}K>8000K), for which irrad_frac_refl_bol=1.00 might be a better approx than irrad_frac_refl_bol={:.2f}.".format(component, teff, irrad_frac_refl_bol)
elif teff <= 6600. and irrad_frac_refl_bol >= 0.75:
return None, "'{}' probably has a convective atm (teff={:.0f}K<6600K), for which irrad_frac_refl_bol=0.6 might be a better approx than irrad_frac_refl_bol={:.2f}.".format(component, teff, irrad_frac_refl_bol)
elif irrad_frac_refl_bol < 0.6:
return None, "'{}' has intermittent temperature (6600K<teff={:.0f}K<8000K), irrad_frac_refl_bol might be better between 0.6-1.00 than irrad_frac_refl_bol={:.2f}.".format(component, teff, irrad_frac_refl_bol)
# TODO: add other checks
# - make sure all ETV components are legal
# - check for conflict between dynamics_method and mesh_method (?)
# we've survived all tests
return True, '' | Check to see whether the system is expected to be computable.
This is called by default for each set_value but will only raise a
logger warning if fails. This is also called immediately when calling
:meth:`run_compute`.
kwargs are passed to override currently set values as if they were
sent to :meth:`run_compute`.
:return: True if passed, False if failed and a message | entailment |
def add_feature(self, kind, component=None, **kwargs):
"""
Add a new feature (spot, etc) to a component in the system. If not
provided, 'feature' (the name of the new feature) will be created
for you and can be accessed by the 'feature' attribute of the returned
ParameterSet
>>> b.add_feature(feature.spot, component='mystar')
or
>>> b.add_feature('spot', 'mystar', colat=90)
Available kinds include:
* :func:`phoebe.parameters.feature.spot`
:parameter kind: function to call that returns a
ParameterSet or list of parameters. This must either be
a callable function that accepts nothing but default values,
or the name of a function (as a string) that can be found in the
:mod:`phoebe.parameters.feature` module (ie. 'spot')
:type kind: str or callable
:parameter str component: name of the component to attach the feature
:parameter str feature: (optional) name of the newly-created feature
:parameter **kwargs: default value for any of the newly-created
parameters
:return: :class:`phoebe.parameters.parameters.ParameterSet` of
all parameters that have been added
:raises NotImplementedError: if required constraint is not implemented
"""
func = _get_add_func(_feature, kind)
if kwargs.get('feature', False) is None:
# then we want to apply the default below, so let's pop for now
_ = kwargs.pop('feature')
kwargs.setdefault('feature',
self._default_label(func.func_name,
**{'context': 'feature',
'kind': func.func_name}))
self._check_label(kwargs['feature'])
if component is None:
stars = self.hierarchy.get_meshables()
if len(stars) == 1:
component = stars[0]
else:
raise ValueError("must provide component")
if component not in self.components:
raise ValueError('component not recognized')
component_kind = self.filter(component=component, context='component').kind
if not _feature._component_allowed_for_feature(func.func_name, component_kind):
raise ValueError("{} does not support component with kind {}".format(func.func_name, component_kind))
params, constraints = func(**kwargs)
metawargs = {'context': 'feature',
'component': component,
'feature': kwargs['feature'],
'kind': func.func_name}
self._attach_params(params, **metawargs)
redo_kwargs = deepcopy(kwargs)
redo_kwargs['func'] = func.func_name
self._add_history(redo_func='add_feature',
redo_kwargs=redo_kwargs,
undo_func='remove_feature',
undo_kwargs={'feature': kwargs['feature']})
for constraint in constraints:
self.add_constraint(*constraint)
#return params
# NOTE: we need to call get_ in order to make sure all metawargs are applied
return self.get_feature(**metawargs) | Add a new feature (spot, etc) to a component in the system. If not
provided, 'feature' (the name of the new feature) will be created
for you and can be accessed by the 'feature' attribute of the returned
ParameterSet
>>> b.add_feature(feature.spot, component='mystar')
or
>>> b.add_feature('spot', 'mystar', colat=90)
Available kinds include:
* :func:`phoebe.parameters.feature.spot`
:parameter kind: function to call that returns a
ParameterSet or list of parameters. This must either be
a callable function that accepts nothing but default values,
or the name of a function (as a string) that can be found in the
:mod:`phoebe.parameters.feature` module (ie. 'spot')
:type kind: str or callable
:parameter str component: name of the component to attach the feature
:parameter str feature: (optional) name of the newly-created feature
:parameter **kwargs: default value for any of the newly-created
parameters
:return: :class:`phoebe.parameters.parameters.ParameterSet` of
all parameters that have been added
:raises NotImplementedError: if required constraint is not implemented | entailment |
def get_feature(self, feature=None, **kwargs):
"""
Filter in the 'proerty' context
:parameter str feature: name of the feature (optional)
:parameter **kwargs: any other tags to do the filter
(except component or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet`
"""
if feature is not None:
kwargs['feature'] = feature
kwargs['context'] = 'feature'
return self.filter(**kwargs) | Filter in the 'proerty' context
:parameter str feature: name of the feature (optional)
:parameter **kwargs: any other tags to do the filter
(except component or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet` | entailment |
def remove_feature(self, feature=None, **kwargs):
"""
[NOT IMPLEMENTED]
Remove a 'feature' from the bundle
:raises NotImplementedError: because this isn't implemented yet
"""
self._kwargs_checks(kwargs)
# Let's avoid deleting ALL features from the matching contexts
if feature is None and not len(kwargs.items()):
raise ValueError("must provide some value to filter for features")
kwargs['feature'] = feature
# Let's avoid the possibility of deleting a single parameter
kwargs['qualifier'] = None
# Let's also avoid the possibility of accidentally deleting system
# parameters, etc
kwargs.setdefault('context', ['feature'])
self.remove_parameters_all(**kwargs)
self._add_history(redo_func='remove_feature',
redo_kwargs=kwargs,
undo_func=None,
undo_kwargs={})
return | [NOT IMPLEMENTED]
Remove a 'feature' from the bundle
:raises NotImplementedError: because this isn't implemented yet | entailment |
def rename_feature(self, old_feature, new_feature):
"""
Change the label of a feature attached to the Bundle
:parameter str old_feature: the current name of the feature
(must exist)
:parameter str new_feature: the desired new name of the feature
(must not exist)
:return: None
:raises ValueError: if the new_feature is forbidden
"""
# TODO: raise error if old_feature not found?
self._check_label(new_feature)
self._rename_label('feature', old_feature, new_feature) | Change the label of a feature attached to the Bundle
:parameter str old_feature: the current name of the feature
(must exist)
:parameter str new_feature: the desired new name of the feature
(must not exist)
:return: None
:raises ValueError: if the new_feature is forbidden | entailment |
def add_spot(self, component=None, feature=None, **kwargs):
"""
Shortcut to :meth:`add_feature` but with kind='spot'
"""
if component is None:
if len(self.hierarchy.get_stars())==1:
component = self.hierarchy.get_stars()[0]
else:
raise ValueError("must provide component for spot")
kwargs.setdefault('component', component)
kwargs.setdefault('feature', feature)
return self.add_feature('spot', **kwargs) | Shortcut to :meth:`add_feature` but with kind='spot' | entailment |
def get_spot(self, feature=None, **kwargs):
"""
Shortcut to :meth:`get_feature` but with kind='spot'
"""
kwargs.setdefault('kind', 'spot')
return self.get_feature(feature, **kwargs) | Shortcut to :meth:`get_feature` but with kind='spot' | entailment |
def remove_spot(self, feature=None, **kwargs):
"""
[NOT IMPLEMENTED]
Shortcut to :meth:`remove_feature` but with kind='spot'
"""
kwargs.setdefault('kind', 'spot')
return self.remove_feature(feature, **kwargs) | [NOT IMPLEMENTED]
Shortcut to :meth:`remove_feature` but with kind='spot' | entailment |
def add_component(self, kind, **kwargs):
"""
Add a new component (star or orbit) to the system. If not provided,
'component' (the name of the new star or orbit) will be created for
you and can be accessed by the 'component' attribute of the returned
ParameterSet.
>>> b.add_component(component.star)
or
>>> b.add_component('orbit', period=2.5)
Available kinds include:
* :func:`phoebe.parameters.component.star`
* :func:`phoebe.parameters.component.orbit`
:parameter kind: function to call that returns a
ParameterSet or list of parameters. This must either be
a callable function that accepts nothing but default
values, or the name of a function (as a string) that can
be found in the :mod:`phoebe.parameters.component` module
(ie. 'star', 'orbit')
:type kind: str or callable
:parameter str component: (optional) name of the newly-created
component
:parameter **kwargs: default values for any of the newly-created
parameters
:return: :class:`phoebe.parameters.parameters.ParameterSet` of
all parameters that have been added
:raises NotImplementedError: if required constraint is not implemented
"""
func = _get_add_func(component, kind)
if kwargs.get('component', False) is None:
# then we want to apply the default below, so let's pop for now
_ = kwargs.pop('component')
kwargs.setdefault('component',
self._default_label(func.func_name,
**{'context': 'component',
'kind': func.func_name}))
if kwargs.pop('check_label', True):
self._check_label(kwargs['component'])
params, constraints = func(**kwargs)
metawargs = {'context': 'component',
'component': kwargs['component'],
'kind': func.func_name}
self._attach_params(params, **metawargs)
redo_kwargs = deepcopy(kwargs)
redo_kwargs['func'] = func.func_name
self._add_history(redo_func='add_component',
redo_kwargs=redo_kwargs,
undo_func='remove_component',
undo_kwargs={'component': kwargs['component']})
for constraint in constraints:
self.add_constraint(*constraint)
# since we've already processed (so that we can get the new qualifiers),
# we'll only raise a warning
self._kwargs_checks(kwargs, warning_only=True)
# return params
return self.get_component(**metawargs) | Add a new component (star or orbit) to the system. If not provided,
'component' (the name of the new star or orbit) will be created for
you and can be accessed by the 'component' attribute of the returned
ParameterSet.
>>> b.add_component(component.star)
or
>>> b.add_component('orbit', period=2.5)
Available kinds include:
* :func:`phoebe.parameters.component.star`
* :func:`phoebe.parameters.component.orbit`
:parameter kind: function to call that returns a
ParameterSet or list of parameters. This must either be
a callable function that accepts nothing but default
values, or the name of a function (as a string) that can
be found in the :mod:`phoebe.parameters.component` module
(ie. 'star', 'orbit')
:type kind: str or callable
:parameter str component: (optional) name of the newly-created
component
:parameter **kwargs: default values for any of the newly-created
parameters
:return: :class:`phoebe.parameters.parameters.ParameterSet` of
all parameters that have been added
:raises NotImplementedError: if required constraint is not implemented | entailment |
def get_component(self, component=None, **kwargs):
"""
Filter in the 'component' context
:parameter str component: name of the component (optional)
:parameter **kwargs: any other tags to do the filter
(except component or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet`
"""
if component is not None:
kwargs['component'] = component
kwargs['context'] = 'component'
return self.filter(**kwargs) | Filter in the 'component' context
:parameter str component: name of the component (optional)
:parameter **kwargs: any other tags to do the filter
(except component or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet` | entailment |
def remove_component(self, component, **kwargs):
"""
[NOT IMPLEMENTED]
Remove a 'component' from the bundle
:raises NotImplementedError: because this isn't implemented yet
"""
# NOTE: run_checks will check if an entry is in the hierarchy but has no parameters
kwargs['component'] = component
# NOTE: we do not remove from 'model' by default
kwargs['context'] = ['component', 'constraint', 'dataset', 'compute']
self.remove_parameters_all(**kwargs) | [NOT IMPLEMENTED]
Remove a 'component' from the bundle
:raises NotImplementedError: because this isn't implemented yet | entailment |
def rename_component(self, old_component, new_component):
"""
Change the label of a component attached to the Bundle
:parameter str old_component: the current name of the component
(must exist)
:parameter str new_component: the desired new name of the component
(must not exist)
:return: None
:raises ValueError: if the new_component is forbidden
"""
# TODO: raise error if old_component not found?
# even though _rename_tag will call _check_label again, we should
# do it first so that we can raise any errors BEFORE we start messing
# with the hierarchy
self._check_label(new_component)
# changing hierarchy must be called first since it needs to access
# the kind of old_component
if len([c for c in self.components if new_component in c]):
logger.warning("hierarchy may not update correctly with new component")
self.hierarchy.rename_component(old_component, new_component)
self._rename_label('component', old_component, new_component)
self._handle_dataset_selectparams() | Change the label of a component attached to the Bundle
:parameter str old_component: the current name of the component
(must exist)
:parameter str new_component: the desired new name of the component
(must not exist)
:return: None
:raises ValueError: if the new_component is forbidden | entailment |
def add_orbit(self, component=None, **kwargs):
"""
Shortcut to :meth:`add_component` but with kind='orbit'
"""
kwargs.setdefault('component', component)
return self.add_component('orbit', **kwargs) | Shortcut to :meth:`add_component` but with kind='orbit' | entailment |
def get_orbit(self, component=None, **kwargs):
"""
Shortcut to :meth:`get_component` but with kind='star'
"""
kwargs.setdefault('kind', 'orbit')
return self.get_component(component, **kwargs) | Shortcut to :meth:`get_component` but with kind='star' | entailment |
def remove_orbit(self, component=None, **kwargs):
"""
[NOT IMPLEMENTED]
Shortcut to :meth:`remove_component` but with kind='star'
"""
kwargs.setdefault('kind', 'orbit')
return self.remove_component(component, **kwargs) | [NOT IMPLEMENTED]
Shortcut to :meth:`remove_component` but with kind='star' | entailment |
def add_star(self, component=None, **kwargs):
"""
Shortcut to :meth:`add_component` but with kind='star'
"""
kwargs.setdefault('component', component)
return self.add_component('star', **kwargs) | Shortcut to :meth:`add_component` but with kind='star' | entailment |
def get_star(self, component=None, **kwargs):
"""
Shortcut to :meth:`get_component` but with kind='star'
"""
kwargs.setdefault('kind', 'star')
return self.get_component(component, **kwargs) | Shortcut to :meth:`get_component` but with kind='star' | entailment |
def remove_star(self, component=None, **kwargs):
"""
[NOT IMPLEMENTED]
Shortcut to :meth:`remove_component` but with kind='star'
"""
kwargs.setdefault('kind', 'star')
return self.remove_component(component, **kwargs) | [NOT IMPLEMENTED]
Shortcut to :meth:`remove_component` but with kind='star' | entailment |
def add_envelope(self, component=None, **kwargs):
"""
[NOT SUPPORTED]
Shortcut to :meth:`add_component` but with kind='envelope'
"""
kwargs.setdefault('component', component)
return self.add_component('envelope', **kwargs) | [NOT SUPPORTED]
Shortcut to :meth:`add_component` but with kind='envelope' | entailment |
def get_envelope(self, component=None, **kwargs):
"""
[NOT SUPPORTED]
Shortcut to :meth:`get_component` but with kind='envelope'
"""
kwargs.setdefault('kind', 'envelope')
return self.get_component(component, **kwargs) | [NOT SUPPORTED]
Shortcut to :meth:`get_component` but with kind='envelope' | entailment |
def remove_envelope(self, component=None, **kwargs):
"""
[NOT SUPPORTED]
[NOT IMPLEMENTED]
Shortcut to :meth:`remove_component` but with kind='envelope'
"""
kwargs.setdefault('kind', 'envelope')
return self.remove_component(component, **kwargs) | [NOT SUPPORTED]
[NOT IMPLEMENTED]
Shortcut to :meth:`remove_component` but with kind='envelope' | entailment |
def get_ephemeris(self, component=None, t0='t0_supconj', **kwargs):
"""
Get the ephemeris of a component (star or orbit)
:parameter str component: name of the component. If not given,
component will default to the top-most level of the current
hierarchy
:parameter t0: qualifier of the parameter to be used for t0
:type t0: str
:parameter **kwargs: any value passed through kwargs will override the
ephemeris retrieved by component (ie period, t0, dpdt).
Note: be careful about units - input values will not be converted.
:return: dictionary containing period, t0 (t0_supconj if orbit),
dpdt (as applicable)
:rtype: dict
"""
if component is None:
component = self.hierarchy.get_top()
if kwargs.get('shift', False):
raise ValueError("support for phshift was removed as of 2.1. Please pass t0 instead.")
ret = {}
ps = self.filter(component=component, context='component')
if ps.kind in ['orbit']:
ret['period'] = ps.get_value(qualifier='period', unit=u.d)
if isinstance(t0, str):
ret['t0'] = ps.get_value(qualifier=t0, unit=u.d)
elif isinstance(t0, float) or isinstance(t0, int):
ret['t0'] = t0
else:
raise ValueError("t0 must be string (qualifier) or float")
ret['dpdt'] = ps.get_value(qualifier='dpdt', unit=u.d/u.d)
elif ps.kind in ['star']:
# TODO: consider renaming period to prot
ret['period'] = ps.get_value(qualifier='period', unit=u.d)
else:
raise NotImplementedError
for k,v in kwargs.items():
ret[k] = v
return ret | Get the ephemeris of a component (star or orbit)
:parameter str component: name of the component. If not given,
component will default to the top-most level of the current
hierarchy
:parameter t0: qualifier of the parameter to be used for t0
:type t0: str
:parameter **kwargs: any value passed through kwargs will override the
ephemeris retrieved by component (ie period, t0, dpdt).
Note: be careful about units - input values will not be converted.
:return: dictionary containing period, t0 (t0_supconj if orbit),
dpdt (as applicable)
:rtype: dict | entailment |
def to_phase(self, time, component=None, t0='t0_supconj', **kwargs):
"""
Get the phase(s) of a time(s) for a given ephemeris
:parameter time: time to convert to phases (should be in same system
as t0s)
:type time: float, list, or array
:parameter t0: qualifier of the parameter to be used for t0
:type t0: str
:parameter str component: component for which to get the ephemeris.
If not given, component will default to the top-most level of the
current hierarchy
:parameter **kwargs: any value passed through kwargs will override the
ephemeris retrieved by component (ie period, t0, dpdt).
Note: be careful about units - input values will not be converted.
:return: phase (float) or phases (array)
"""
if kwargs.get('shift', False):
raise ValueError("support for phshift was removed as of 2.1. Please pass t0 instead.")
ephem = self.get_ephemeris(component=component, t0=t0, **kwargs)
if isinstance(time, list):
time = np.array(time)
elif isinstance(time, Parameter):
time = time.get_value(u.d)
elif isinstance(time, str):
time = self.get_value(time, u.d)
t0 = ephem.get('t0', 0.0)
period = ephem.get('period', 1.0)
dpdt = ephem.get('dpdt', 0.0)
if dpdt != 0:
phase = np.mod(1./dpdt * np.log(period + dpdt*(time-t0)), 1.0)
else:
phase = np.mod((time-t0)/period, 1.0)
if isinstance(phase, float):
if phase > 0.5:
phase -= 1
else:
# then should be an array
phase[phase > 0.5] -= 1
return phase | Get the phase(s) of a time(s) for a given ephemeris
:parameter time: time to convert to phases (should be in same system
as t0s)
:type time: float, list, or array
:parameter t0: qualifier of the parameter to be used for t0
:type t0: str
:parameter str component: component for which to get the ephemeris.
If not given, component will default to the top-most level of the
current hierarchy
:parameter **kwargs: any value passed through kwargs will override the
ephemeris retrieved by component (ie period, t0, dpdt).
Note: be careful about units - input values will not be converted.
:return: phase (float) or phases (array) | entailment |
def to_time(self, phase, component=None, t0='t0_supconj', **kwargs):
"""
Get the time(s) of a phase(s) for a given ephemeris
:parameter phase: phase to convert to times (should be in
same system as t0s)
:type phase: float, list, or array
` :parameter str component: component for which to get the ephemeris.
If not given, component will default to the top-most level of the
current hierarchy
:parameter t0: qualifier of the parameter to be used for t0
:type t0: str
:parameter **kwargs: any value passed through kwargs will override the
ephemeris retrieved by component (ie period, t0, dpdt).
Note: be careful about units - input values will not be converted.
:return: time (float) or times (array)
"""
if kwargs.get('shift', False):
raise ValueError("support for phshift was removed as of 2.1. Please pass t0 instead.")
ephem = self.get_ephemeris(component=component, t0=t0, **kwargs)
if isinstance(phase, list):
phase = np.array(phase)
t0 = ephem.get('t0', 0.0)
period = ephem.get('period', 1.0)
dpdt = ephem.get('dpdt', 0.0)
# if changing this, also see parameters.constraint.time_ephem
if dpdt != 0:
time = t0 + 1./dpdt*(np.exp(dpdt*(phase))-period)
else:
time = t0 + (phase)*period
return time | Get the time(s) of a phase(s) for a given ephemeris
:parameter phase: phase to convert to times (should be in
same system as t0s)
:type phase: float, list, or array
` :parameter str component: component for which to get the ephemeris.
If not given, component will default to the top-most level of the
current hierarchy
:parameter t0: qualifier of the parameter to be used for t0
:type t0: str
:parameter **kwargs: any value passed through kwargs will override the
ephemeris retrieved by component (ie period, t0, dpdt).
Note: be careful about units - input values will not be converted.
:return: time (float) or times (array) | entailment |
def add_dataset(self, kind, component=None, **kwargs):
"""
Add a new dataset to the bundle. If not provided,
'dataset' (the name of the new dataset) will be created for
you and can be accessed by the 'dataset' attribute of the returned
ParameterSet.
For light curves, the light curve will be generated for the entire system.
For radial velocities, you need to provide a list of components
for which values should be computed.
Available kinds include:
* :func:`phoebe.parameters.dataset.lc`
* :func:`phoebe.parameters.dataset.rv`
* :func:`phoebe.parameters.dataset.etv`
* :func:`phoebe.parameters.dataset.orb`
* :func:`phoebe.parameters.dataset.mesh`
* :func:`phoebe.parameters.dataset.lp`
:parameter kind: function to call that returns a
ParameterSet or list of parameters. This must either be
a callable function that accepts nothing but default
values, or the name of a function (as a string) that can
be found in the :mod:`phoebe.parameters.dataset` module
:type kind: str or callable
:parameter component: a list of
components for which to compute the observables. For
light curves this should be left at None to always compute
the light curve for the entire system. For most other
types, you need to provide at least one component.
:type component: str or list of strings or None
:parameter str dataset: (optional) name of the newly-created dataset
:parameter **kwargs: default values for any of the newly-created
parameters
:return: :class:`phoebe.parameters.parameters.ParameterSet` of
all parameters that have been added
:raises NotImplementedError: if required constraint is not implemented
"""
sing_plural = {}
sing_plural['time'] = 'times'
sing_plural['flux'] = 'fluxes'
sing_plural['sigma'] = 'sigmas'
sing_plural['rv'] = 'rvs'
func = _get_add_func(_dataset, kind.lower()
if isinstance(kind, str)
else kind)
kwargs.setdefault('dataset',
self._default_label(func.func_name,
**{'context': 'dataset',
'kind': func.func_name}))
if kwargs.pop('check_label', True):
self._check_label(kwargs['dataset'])
kind = func.func_name
# Let's remember if the user passed components or if they were automatically assigned
user_provided_components = component or kwargs.get('components', False)
if kind == 'lc':
allowed_components = [None]
default_components = allowed_components
elif kind in ['rv', 'orb']:
allowed_components = self.hierarchy.get_stars() # + self.hierarchy.get_orbits()
default_components = self.hierarchy.get_stars()
# TODO: how are we going to handle overcontacts dynamical vs flux-weighted
elif kind in ['mesh']:
# allowed_components = self.hierarchy.get_meshables()
allowed_components = [None]
# allowed_components = self.hierarchy.get_stars()
# TODO: how will this work when changing hierarchy to add/remove the common envelope?
default_components = allowed_components
elif kind in ['etv']:
hier = self.hierarchy
stars = hier.get_stars()
# only include components in which the sibling is also a star that
# means that the companion in a triple cannot be timed, because how
# do we know who it's eclipsing?
allowed_components = [s for s in stars if hier.get_sibling_of(s) in stars]
default_components = allowed_components
elif kind in ['lp']:
# TODO: need to think about what this should be for contacts...
allowed_components = self.hierarchy.get_stars() + self.hierarchy.get_orbits()
default_components = [self.hierarchy.get_top()]
else:
allowed_components = [None]
default_components = [None]
# Let's handle the case where the user accidentally sends components
# instead of component
if kwargs.get('components', None) and component is None:
logger.warning("assuming you meant 'component' instead of 'components'")
components = kwargs.pop('components')
else:
components = component
if isinstance(components, str):
components = [components]
elif hasattr(components, '__iter__'):
components = components
elif components is None:
components = default_components
else:
raise NotImplementedError
# Let's handle the case where the user accidentally sends singular
# instead of plural (since we used to have this)
# TODO: use parameter._singular_to_plural?
for singular, plural in sing_plural.items():
if kwargs.get(singular, None) is not None and kwargs.get(plural, None) is None:
logger.warning("assuming you meant '{}' instead of '{}'".format(plural, singular))
kwargs[plural] = kwargs.pop(singular)
if not np.all([component in allowed_components
for component in components]):
raise ValueError("'{}' not a recognized/allowable component".format(component))
obs_metawargs = {'context': 'dataset',
'kind': kind,
'dataset': kwargs['dataset']}
if kind in ['lp']:
# then times needs to be passed now to duplicate and tag the Parameters
# correctly
obs_kwargs = {'times': kwargs.pop('times', [])}
else:
obs_kwargs = {}
obs_params, constraints = func(**obs_kwargs)
self._attach_params(obs_params, **obs_metawargs)
for constraint in constraints:
# TODO: tricky thing here will be copying the constraints
self.add_constraint(*constraint)
dep_func = _get_add_func(_dataset, "{}_dep".format(kind))
dep_metawargs = {'context': 'dataset',
'kind': '{}_dep'.format(kind),
'dataset': kwargs['dataset']}
dep_params = dep_func()
self._attach_params(dep_params, **dep_metawargs)
# Now we need to apply any kwargs sent by the user. There are a few
# scenarios (and each kwargs could fall into different ones):
# times = [0,1,2]
# in this case, we want to apply time across all of the components that
# are applicable for this dataset kind AND to _default so that any
# future components added to the system are copied appropriately
# times = [0,1,2], components=['primary', 'secondary']
# in this case, we want to apply the value for time across components
# but time@_default should remain empty (it will not copy for components
# added in the future)
# times = {'primary': [0,1], 'secondary': [0,1,2]}
# here, regardless of the components, we want to apply these to their
# individually requested parameters. We won't touch _default unless
# its included in the dictionary
# this needs to happen before kwargs get applied so that the default
# values can be overridden by the supplied kwargs
self._handle_pblum_defaults()
self._handle_dataset_selectparams()
for k, v in kwargs.items():
if isinstance(v, dict):
for component, value in v.items():
logger.debug("setting value of dataset parameter: qualifier={}, dataset={}, component={}, value={}".format(k, kwargs['dataset'], component, value))
try:
self.set_value_all(qualifier=k,
dataset=kwargs['dataset'],
component=component,
value=value,
check_visible=False,
ignore_none=True)
except:
self.remove_dataset(dataset=kwargs['dataset'])
raise ValueError("could not set value for {}={}, dataset has not been added".format(k, value))
elif k in ['dataset']:
pass
else:
# for dataset kinds that include passband dependent AND
# independent parameters, we need to carefully default on
# what component to use when passing the defaults
if kind in ['rv', 'lp'] and k in ['ld_func', 'ld_coeffs',
'passband', 'intens_weighting',
'profile_rest', 'profile_func', 'profile_sv']:
# passband-dependent (ie lc_dep) parameters do not have
# assigned components
components_ = None
elif components == [None]:
components_ = None
elif user_provided_components:
components_ = components
else:
components_ = components+['_default']
logger.debug("setting value of dataset parameter: qualifier={}, dataset={}, component={}, value={}".format(k, kwargs['dataset'], components_, v))
try:
self.set_value_all(qualifier=k,
dataset=kwargs['dataset'],
component=components_,
value=v,
check_visible=False,
ignore_none=True)
except:
self.remove_dataset(dataset=kwargs['dataset'])
raise ValueError("could not set value for {}={}, dataset has not been added".format(k, v))
redo_kwargs = deepcopy({k:v if not isinstance(v, nparray.ndarray) else v.to_json() for k,v in kwargs.items()})
redo_kwargs['func'] = func.func_name
self._add_history(redo_func='add_dataset',
redo_kwargs=redo_kwargs,
undo_func='remove_dataset',
undo_kwargs={'dataset': kwargs['dataset']})
# since we've already processed (so that we can get the new qualifiers),
# we'll only raise a warning
self._kwargs_checks(kwargs, warning_only=True)
return self.filter(dataset=kwargs['dataset']) | Add a new dataset to the bundle. If not provided,
'dataset' (the name of the new dataset) will be created for
you and can be accessed by the 'dataset' attribute of the returned
ParameterSet.
For light curves, the light curve will be generated for the entire system.
For radial velocities, you need to provide a list of components
for which values should be computed.
Available kinds include:
* :func:`phoebe.parameters.dataset.lc`
* :func:`phoebe.parameters.dataset.rv`
* :func:`phoebe.parameters.dataset.etv`
* :func:`phoebe.parameters.dataset.orb`
* :func:`phoebe.parameters.dataset.mesh`
* :func:`phoebe.parameters.dataset.lp`
:parameter kind: function to call that returns a
ParameterSet or list of parameters. This must either be
a callable function that accepts nothing but default
values, or the name of a function (as a string) that can
be found in the :mod:`phoebe.parameters.dataset` module
:type kind: str or callable
:parameter component: a list of
components for which to compute the observables. For
light curves this should be left at None to always compute
the light curve for the entire system. For most other
types, you need to provide at least one component.
:type component: str or list of strings or None
:parameter str dataset: (optional) name of the newly-created dataset
:parameter **kwargs: default values for any of the newly-created
parameters
:return: :class:`phoebe.parameters.parameters.ParameterSet` of
all parameters that have been added
:raises NotImplementedError: if required constraint is not implemented | entailment |
def get_dataset(self, dataset=None, **kwargs):
"""
Filter in the 'dataset' context
:parameter str dataset: name of the dataset (optional)
:parameter **kwargs: any other tags to do the filter
(except dataset or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet`
"""
if dataset is not None:
kwargs['dataset'] = dataset
kwargs['context'] = 'dataset'
if 'kind' in kwargs.keys():
# since we switched how dataset kinds are named, let's just
# automatically handle switching to lowercase
kwargs['kind'] = kwargs['kind'].lower()
return self.filter(**kwargs) | Filter in the 'dataset' context
:parameter str dataset: name of the dataset (optional)
:parameter **kwargs: any other tags to do the filter
(except dataset or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet` | entailment |
def remove_dataset(self, dataset=None, **kwargs):
""" Remove a dataset from the Bundle.
This removes all matching Parameters from the dataset, model, and
constraint contexts (by default if the context tag is not provided).
You must provide some sort of filter or this will raise an Error (so
that all Parameters are not accidentally removed).
:parameter str dataset: name of the dataset
:parameter **kwargs: any other tags to do the filter (except qualifier
and dataset)
:raises ValueError: if no filter is provided
"""
self._kwargs_checks(kwargs)
# Let's avoid deleting ALL parameters from the matching contexts
if dataset is None and not len(kwargs.items()):
raise ValueError("must provide some value to filter for datasets")
# let's handle deps if kind was passed
kind = kwargs.get('kind', None)
if kind is not None:
if isinstance(kind, str):
kind = [kind]
kind_deps = []
for kind_i in kind:
dep = '{}_dep'.format(kind_i)
if dep not in kind:
kind_deps.append(dep)
kind = kind + kind_deps
kwargs['kind'] = kind
if dataset is None:
# then let's find the list of datasets that match the filter,
# we'll then use dataset to do the removing. This avoids leaving
# pararameters behind that don't specifically match the filter
# (ie if kind is passed as 'rv' we still want to remove parameters
# with datasets that are RVs but belong to a different kind in
# another context like compute)
dataset = self.filter(**kwargs).datasets
kwargs['kind'] = None
kwargs['dataset'] = dataset
# Let's avoid the possibility of deleting a single parameter
kwargs['qualifier'] = None
# Let's also avoid the possibility of accidentally deleting system
# parameters, etc
kwargs.setdefault('context', ['dataset', 'model', 'constraint', 'compute'])
# ps = self.filter(**kwargs)
# logger.info('removing {} parameters (this is not undoable)'.\
# format(len(ps)))
# print "*** kwargs", kwargs, len(ps)
self.remove_parameters_all(**kwargs)
# not really sure why we need to call this twice, but it seems to do
# the trick
self.remove_parameters_all(**kwargs)
self._handle_dataset_selectparams()
# TODO: check to make sure that trying to undo this
# will raise an error saying this is not undo-able
self._add_history(redo_func='remove_dataset',
redo_kwargs={'dataset': dataset},
undo_func=None,
undo_kwargs={})
return | Remove a dataset from the Bundle.
This removes all matching Parameters from the dataset, model, and
constraint contexts (by default if the context tag is not provided).
You must provide some sort of filter or this will raise an Error (so
that all Parameters are not accidentally removed).
:parameter str dataset: name of the dataset
:parameter **kwargs: any other tags to do the filter (except qualifier
and dataset)
:raises ValueError: if no filter is provided | entailment |
def rename_dataset(self, old_dataset, new_dataset):
"""
Change the label of a dataset attached to the Bundle
:parameter str old_dataset: the current name of the dataset
(must exist)
:parameter str new_dataset: the desired new name of the dataset
(must not exist)
:return: None
:raises ValueError: if the new_dataset is forbidden
"""
# TODO: raise error if old_component not found?
self._check_label(new_dataset)
self._rename_label('dataset', old_dataset, new_dataset)
self._handle_dataset_selectparams() | Change the label of a dataset attached to the Bundle
:parameter str old_dataset: the current name of the dataset
(must exist)
:parameter str new_dataset: the desired new name of the dataset
(must not exist)
:return: None
:raises ValueError: if the new_dataset is forbidden | entailment |
def enable_dataset(self, dataset=None, **kwargs):
"""
Enable a 'dataset'. Datasets that are enabled will be computed
during :meth:`run_compute` and included in the cost function
during :meth:`run_fitting`.
If compute is not provided, the dataset will be enabled across all
compute options.
:parameter str dataset: name of the dataset
:parameter **kwargs: any other tags to do the filter
(except dataset or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet`
of the enabled dataset
"""
kwargs['context'] = 'compute'
kwargs['dataset'] = dataset
kwargs['qualifier'] = 'enabled'
self.set_value_all(value=True, **kwargs)
self._add_history(redo_func='enable_dataset',
redo_kwargs={'dataset': dataset},
undo_func='disable_dataset',
undo_kwargs={'dataset': dataset})
return self.get_dataset(dataset=dataset) | Enable a 'dataset'. Datasets that are enabled will be computed
during :meth:`run_compute` and included in the cost function
during :meth:`run_fitting`.
If compute is not provided, the dataset will be enabled across all
compute options.
:parameter str dataset: name of the dataset
:parameter **kwargs: any other tags to do the filter
(except dataset or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet`
of the enabled dataset | entailment |
def disable_dataset(self, dataset=None, **kwargs):
"""
Disable a 'dataset'. Datasets that are enabled will be computed
during :meth:`run_compute` and included in the cost function
during :meth:`run_fitting`.
If compute is not provided, the dataset will be disabled across all
compute options.
:parameter str dataset: name of the dataset
:parameter **kwargs: any other tags to do the filter
(except dataset or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet`
of the disabled dataset
"""
kwargs['context'] = 'compute'
kwargs['dataset'] = dataset
kwargs['qualifier'] = 'enabled'
self.set_value_all(value=False, **kwargs)
self._add_history(redo_func='disable_dataset',
redo_kwargs={'dataset': dataset},
undo_func='enable_dataset',
undo_kwargs={'dataset': dataset})
return self.get_dataset(dataset=dataset) | Disable a 'dataset'. Datasets that are enabled will be computed
during :meth:`run_compute` and included in the cost function
during :meth:`run_fitting`.
If compute is not provided, the dataset will be disabled across all
compute options.
:parameter str dataset: name of the dataset
:parameter **kwargs: any other tags to do the filter
(except dataset or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet`
of the disabled dataset | entailment |
def add_constraint(self, *args, **kwargs):
"""
TODO: add documentation
args can be string representation (length 1)
func and strings to pass to function
"""
# TODO: be smart enough to take kwargs (especially for undoing a
# remove_constraint) for kind, value (expression),
redo_kwargs = deepcopy(kwargs)
if len(args) == 1 and \
isinstance(args[0], str) and \
not _get_add_func(_constraint, args[0],
return_none_if_not_found=True):
# then only the expression has been passed,
# we just need to pass it on to constraints.custom
func = constraint.custom
func_args = args
elif len(args) == 2 and \
all([isinstance(arg, Parameter) or
isinstance(arg, ConstraintParameter) for arg in args]):
# then we have 2 constraint expressions
func = constraint.custom
func_args = args
elif len(args) == 0:
# then everything is passed through kwargs
if 'kind' in kwargs.keys():
func = _get_add_func(_constraint, kwargs['kind'])
elif 'func' in kwargs.keys():
func = _get_add_func(_constraint, kwargs['func'])
elif 'constraint_func' in kwargs.keys():
func = _get_add_func(_constraint, kwargs['constraint_func'])
else:
func = constraint.custom
func_args = []
# constraint_param = ConstraintParameter(self, **kwargs)
else:
# then we've been passed the function in constraints and its
# arguments
func = _get_add_func(_constraint, args[0])
func_args = args[1:]
if 'solve_for' in kwargs.keys():
# solve_for is a twig, we need to pass the parameter
kwargs['solve_for'] = self.get_parameter(kwargs['solve_for'])
lhs, rhs, constraint_kwargs = func(self, *func_args, **kwargs)
# NOTE that any component parameters required have already been
# created by this point
constraint_param = ConstraintParameter(self,
qualifier=lhs.qualifier,
component=lhs.component,
dataset=lhs.dataset,
feature=lhs.feature,
kind=lhs.kind,
model=lhs.model,
constraint_func=func.__name__,
constraint_kwargs=constraint_kwargs,
in_solar_units=func.__name__ not in constraint.list_of_constraints_requiring_si,
value=rhs,
default_unit=lhs.default_unit,
description='expression that determines the constraint')
newly_constrained_param = constraint_param.get_constrained_parameter()
check_kwargs = {k:v for k,v in newly_constrained_param.meta.items() if k not in ['context', 'twig', 'uniquetwig']}
check_kwargs['context'] = 'constraint'
if len(self._bundle.filter(**check_kwargs)):
raise ValueError("'{}' is already constrained".format(newly_constrained_param.twig))
metawargs = {'context': 'constraint',
'kind': func.func_name}
params = ParameterSet([constraint_param])
constraint_param._update_bookkeeping()
self._attach_params(params, **metawargs)
redo_kwargs['func'] = func.func_name
self._add_history(redo_func='add_constraint',
redo_kwargs=redo_kwargs,
undo_func='remove_constraint',
undo_kwargs={'uniqueid': constraint_param.uniqueid})
# we should run it now to make sure everything is in-sync
if conf.interactive_constraints:
self.run_constraint(uniqueid=constraint_param.uniqueid, skip_kwargs_checks=True)
else:
self._delayed_constraints.append(constraint_param.uniqueid)
return params | TODO: add documentation
args can be string representation (length 1)
func and strings to pass to function | entailment |
def get_constraint(self, twig=None, **kwargs):
"""
Filter in the 'constraint' context
:parameter str constraint: name of the constraint (optional)
:parameter **kwargs: any other tags to do the filter
(except constraint or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet`
"""
if twig is not None:
kwargs['twig'] = twig
kwargs['context'] = 'constraint'
return self.get(**kwargs) | Filter in the 'constraint' context
:parameter str constraint: name of the constraint (optional)
:parameter **kwargs: any other tags to do the filter
(except constraint or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet` | entailment |
def remove_constraint(self, twig=None, **kwargs):
"""
Remove a 'constraint' from the bundle
:parameter str twig: twig to filter for the constraint
:parameter **kwargs: any other tags to do the filter
(except twig or context)
"""
# let's run delayed constraints first to ensure that we get the same
# results in interactive and non-interactive modes as well as to make
# sure we don't have delayed constraints for the constraint we're
# about to remove. This could perhaps be optimized by searching
# for this/these constraints and only running/removing those, but
# probably isn't worth the savings.
changed_params = self.run_delayed_constraints()
kwargs['twig'] = twig
redo_kwargs = deepcopy(kwargs)
kwargs['context'] = 'constraint'
# we'll get the constraint so that we can undo the bookkeeping
# and also reproduce an undo command
constraint = self.get_parameter(**kwargs)
# undo parameter bookkeeping
constraint._remove_bookkeeping()
# and finally remove it
self.remove_parameter(**kwargs)
undo_kwargs = {k: v for k, v in constraint.to_dict().items()
if v is not None and
k not in ['uniqueid', 'uniquetwig', 'twig',
'Class', 'context']}
self._add_history(redo_func='remove_constraint',
redo_kwargs=redo_kwargs,
undo_func='add_constraint',
undo_kwargs=undo_kwargs) | Remove a 'constraint' from the bundle
:parameter str twig: twig to filter for the constraint
:parameter **kwargs: any other tags to do the filter
(except twig or context) | entailment |
def flip_constraint(self, twig=None, solve_for=None, **kwargs):
"""
Flip an existing constraint to solve for a different parameter
:parameter str twig: twig to filter the constraint
:parameter solve_for: twig or actual parameter object of the new
parameter which this constraint should constraint (solve for).
:type solve_for: str or :class:`phoebe.parameters.parameters.Parameter
:parameter **kwargs: any other tags to do the filter
(except twig or context)
"""
self._kwargs_checks(kwargs, additional_allowed_keys=['check_nan'])
kwargs['twig'] = twig
redo_kwargs = deepcopy(kwargs)
undo_kwargs = deepcopy(kwargs)
changed_params = self.run_delayed_constraints()
param = self.get_constraint(**kwargs)
if kwargs.pop('check_nan', True) and np.any(np.isnan([p.get_value() for p in param.vars.to_list()])):
raise ValueError("cannot flip constraint while the value of {} is nan".format([p.twig for p in param.vars.to_list() if np.isnan(p.get_value())]))
if solve_for is None:
return param
if isinstance(solve_for, Parameter):
solve_for = solve_for.uniquetwig
redo_kwargs['solve_for'] = solve_for
undo_kwargs['solve_for'] = param.constrained_parameter.uniquetwig
logger.info("flipping constraint '{}' to solve for '{}'".format(param.uniquetwig, solve_for))
param.flip_for(solve_for)
result = self.run_constraint(uniqueid=param.uniqueid, skip_kwargs_checks=True)
self._add_history(redo_func='flip_constraint',
redo_kwargs=redo_kwargs,
undo_func='flip_constraint',
undo_kwargs=undo_kwargs)
return param | Flip an existing constraint to solve for a different parameter
:parameter str twig: twig to filter the constraint
:parameter solve_for: twig or actual parameter object of the new
parameter which this constraint should constraint (solve for).
:type solve_for: str or :class:`phoebe.parameters.parameters.Parameter
:parameter **kwargs: any other tags to do the filter
(except twig or context) | entailment |
def run_constraint(self, twig=None, return_parameter=False, **kwargs):
"""
Run a given 'constraint' now and set the value of the constrained
parameter. In general, there shouldn't be any need to manually
call this - constraints should automatically be run whenever a
dependent parameter's value is change.
:parameter str twig: twig to filter for the constraint
:parameter **kwargs: any other tags to do the filter
(except twig or context)
:return: the resulting value of the constraint
:rtype: float or units.Quantity
"""
if not kwargs.get('skip_kwargs_checks', False):
self._kwargs_checks(kwargs)
kwargs['twig'] = twig
kwargs['context'] = 'constraint'
# kwargs['qualifier'] = 'expression'
kwargs['check_visible'] = False
kwargs['check_default'] = False
# print "***", kwargs
expression_param = self.get_parameter(**kwargs)
kwargs = {}
kwargs['twig'] = None
# TODO: this might not be the case, we just know its not in constraint
kwargs['qualifier'] = expression_param.qualifier
kwargs['component'] = expression_param.component
kwargs['dataset'] = expression_param.dataset
kwargs['feature'] = expression_param.feature
kwargs['context'] = []
if kwargs['component'] is not None:
kwargs['context'] += ['component']
if kwargs['dataset'] is not None:
kwargs['context'] += ['dataset']
if kwargs['feature'] is not None:
kwargs['context'] += ['feature']
kwargs['check_visible'] = False
kwargs['check_default'] = False
constrained_param = self.get_parameter(**kwargs)
result = expression_param.result
constrained_param.set_value(result, force=True, run_constraints=True)
logger.debug("setting '{}'={} from '{}' constraint".format(constrained_param.uniquetwig, result, expression_param.uniquetwig))
if return_parameter:
return constrained_param
else:
return result | Run a given 'constraint' now and set the value of the constrained
parameter. In general, there shouldn't be any need to manually
call this - constraints should automatically be run whenever a
dependent parameter's value is change.
:parameter str twig: twig to filter for the constraint
:parameter **kwargs: any other tags to do the filter
(except twig or context)
:return: the resulting value of the constraint
:rtype: float or units.Quantity | entailment |
def compute_pblums(self, compute=None, **kwargs):
"""
Compute the passband luminosities that will be applied to the system,
following all coupling, etc, as well as all relevant compute options
(ntriangles, distortion_method, etc). The exposed passband luminosities
(and any coupling) are computed at t0@system.
This method is only for convenience and will be recomputed internally
within run_compute. Alternatively, you can create a mesh dataset
and request any specific pblum to be exposed (per-time).
:parameter str compute: label of the compute options (note required if
only one is attached to the bundle)
:parameter component: (optional) label of the component(s) requested
:type component: str or list of strings
:parameter dataset: (optional) label of the dataset(s) requested
:type dataset: str or list of strings
:parameter component: (optional) label of the component(s) requested
:type component: str or list of strings
:return: dictionary with keys <component>@<dataset> and computed pblums
as values (as quantity objects, default units of W)
"""
datasets = kwargs.pop('dataset', self.datasets)
components = kwargs.pop('component', self.components)
# don't allow things like model='mymodel', etc
forbidden_keys = parameters._meta_fields_filter
self._kwargs_checks(kwargs, additional_forbidden_keys=forbidden_keys)
if compute is None:
if len(self.computes)==1:
compute = self.computes[0]
else:
raise ValueError("must provide compute")
system = backends.PhoebeBackend()._create_system_and_compute_pblums(self, compute, **kwargs)
pblums = {}
for component, star in system.items():
if component not in components:
continue
for dataset in star._pblum_scale.keys():
if dataset not in datasets:
continue
pblums["{}@{}".format(component, dataset)] = float(star.compute_luminosity(dataset)) * u.W
return pblums | Compute the passband luminosities that will be applied to the system,
following all coupling, etc, as well as all relevant compute options
(ntriangles, distortion_method, etc). The exposed passband luminosities
(and any coupling) are computed at t0@system.
This method is only for convenience and will be recomputed internally
within run_compute. Alternatively, you can create a mesh dataset
and request any specific pblum to be exposed (per-time).
:parameter str compute: label of the compute options (note required if
only one is attached to the bundle)
:parameter component: (optional) label of the component(s) requested
:type component: str or list of strings
:parameter dataset: (optional) label of the dataset(s) requested
:type dataset: str or list of strings
:parameter component: (optional) label of the component(s) requested
:type component: str or list of strings
:return: dictionary with keys <component>@<dataset> and computed pblums
as values (as quantity objects, default units of W) | entailment |
def add_compute(self, kind=compute.phoebe, **kwargs):
"""
Add a set of computeoptions for a given backend to the bundle.
The label ('compute') can then be sent to :meth:`run_compute`.
If not provided, 'compute' will be created for you and can be
accessed by the 'compute' attribute of the returned
ParameterSet.
Available kinds include:
* :func:`phoebe.parameters.compute.phoebe`
* :func:`phoebe.parameters.compute.legacy`
* :func:`phoebe.parameters.compute.photodynam`
* :func:`phoebe.parameters.compute.jktebop`
:parameter kind: function to call that returns a
ParameterSet or list of parameters. This must either be
a callable function that accepts nothing but default
values, or the name of a function (as a string) that can
be found in the :mod:`phoebe.parameters.compute` module
:type kind: str or callable
:parameter str compute: (optional) name of the newly-created
compute optins
:parameter **kwargs: default values for any of the newly-created
parameters
:return: :class:`phoebe.parameters.parameters.ParameterSet` of
all parameters that have been added
:raises NotImplementedError: if required constraint is not implemented
"""
func = _get_add_func(_compute, kind)
kwargs.setdefault('compute',
self._default_label(func.func_name,
**{'context': 'compute',
'kind': func.func_name}))
self._check_label(kwargs['compute'])
params = func(**kwargs)
# TODO: similar kwargs logic as in add_dataset (option to pass dict to
# apply to different components this would be more complicated here if
# allowing to also pass to different datasets
metawargs = {'context': 'compute',
'kind': func.func_name,
'compute': kwargs['compute']}
logger.info("adding {} '{}' compute to bundle".format(metawargs['kind'], metawargs['compute']))
self._attach_params(params, **metawargs)
redo_kwargs = deepcopy(kwargs)
redo_kwargs['func'] = func.func_name
self._add_history(redo_func='add_compute',
redo_kwargs=redo_kwargs,
undo_func='remove_compute',
undo_kwargs={'compute': kwargs['compute']})
# since we've already processed (so that we can get the new qualifiers),
# we'll only raise a warning
self._kwargs_checks(kwargs, warning_only=True)
return self.get_compute(**metawargs) | Add a set of computeoptions for a given backend to the bundle.
The label ('compute') can then be sent to :meth:`run_compute`.
If not provided, 'compute' will be created for you and can be
accessed by the 'compute' attribute of the returned
ParameterSet.
Available kinds include:
* :func:`phoebe.parameters.compute.phoebe`
* :func:`phoebe.parameters.compute.legacy`
* :func:`phoebe.parameters.compute.photodynam`
* :func:`phoebe.parameters.compute.jktebop`
:parameter kind: function to call that returns a
ParameterSet or list of parameters. This must either be
a callable function that accepts nothing but default
values, or the name of a function (as a string) that can
be found in the :mod:`phoebe.parameters.compute` module
:type kind: str or callable
:parameter str compute: (optional) name of the newly-created
compute optins
:parameter **kwargs: default values for any of the newly-created
parameters
:return: :class:`phoebe.parameters.parameters.ParameterSet` of
all parameters that have been added
:raises NotImplementedError: if required constraint is not implemented | entailment |
def get_compute(self, compute=None, **kwargs):
"""
Filter in the 'compute' context
:parameter str compute: name of the compute options (optional)
:parameter **kwargs: any other tags to do the filter
(except compute or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet`
"""
if compute is not None:
kwargs['compute'] = compute
kwargs['context'] = 'compute'
return self.filter(**kwargs) | Filter in the 'compute' context
:parameter str compute: name of the compute options (optional)
:parameter **kwargs: any other tags to do the filter
(except compute or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet` | entailment |
def remove_compute(self, compute, **kwargs):
"""
Remove a 'compute' from the bundle
:parameter str compute: name of the compute options
:parameter **kwargs: any other tags to do the filter
(except twig or context)
:raise NotImplementedError: because it isn't
"""
kwargs['compute'] = compute
kwargs['context'] = 'comute'
self.remove_parameters_all(**kwargs) | Remove a 'compute' from the bundle
:parameter str compute: name of the compute options
:parameter **kwargs: any other tags to do the filter
(except twig or context)
:raise NotImplementedError: because it isn't | entailment |
def rename_compute(self, old_compute, new_compute):
"""
Change the label of a compute attached to the Bundle
:parameter str old_compute: the current name of the compute options
(must exist)
:parameter str new_compute: the desired new name of the compute options
(must not exist)
:return: None
:raises ValueError: if the new_compute is forbidden
"""
# TODO: raise error if old_compute not found?
self._check_label(new_compute)
self._rename_label('compute', old_compute, new_compute) | Change the label of a compute attached to the Bundle
:parameter str old_compute: the current name of the compute options
(must exist)
:parameter str new_compute: the desired new name of the compute options
(must not exist)
:return: None
:raises ValueError: if the new_compute is forbidden | entailment |
def run_compute(self, compute=None, model=None, detach=False,
times=None, **kwargs):
"""
Run a forward model of the system on the enabled dataset using
a specified set of compute options.
To attach and set custom values for compute options, including choosing
which backend to use, see:
* :meth:`add_compute`
To define the dataset types and times at which the model should be
computed see:
* :meth:`add_dataset`
To disable or enable existing datasets see:
* :meth:`enable_dataset`
* :meth:`disable_dataset`
:parameter str compute: (optional) name of the compute options to use.
If not provided or None, run_compute will use an existing set of
attached compute options if only 1 exists. If more than 1 exist,
then compute becomes a required argument. If no compute options
exist, then this will use default options and create and attach
a new set of compute options with a default label.
:parameter str model: (optional) name of the resulting model. If not
provided this will default to 'latest'. NOTE: existing models
with the same name will be overwritten - including 'latest'
:parameter bool datach: [EXPERIMENTAL] whether to detach from the computation run,
or wait for computations to complete. If detach is True, see
:meth:`get_model` and :meth:`phoebe.parameters.parameters.JobParameter`
for details on how to check the job status and retrieve the results.
Alternatively, you can provide the server location (host and port) as
a string to detach and the bundle will temporarily enter client mode,
submit the job to the server, and leave client mode. The resulting
:meth:`phoebe.parameters.parameters.JobParameter` will then contain
the necessary information to pull the results from the server at anytime
in the future.
:parameter list times: [EXPERIMENTAL] override the times at which to compute the model.
NOTE: this only (temporarily) replaces the time array for datasets
with times provided (ie empty time arrays are still ignored). So if
you attach a rv to a single component, the model will still only
compute for that single component. ALSO NOTE: this option is ignored
if detach=True (at least for now).
:parameter **kwargs: any values in the compute options to temporarily
override for this single compute run (parameter values will revert
after run_compute is finished)
:return: :class:`phoebe.parameters.parameters.ParameterSet` of the
newly-created model containing the synthetic data.
"""
if isinstance(detach, str):
# then we want to temporarily go in to client mode
self.as_client(server=detach)
self.run_compute(compute=compute, model=model, time=time, **kwargs)
self.as_client(False)
return self.get_model(model)
# protomesh and pbmesh were supported kwargs in 2.0.x but are no longer
# so let's raise an error if they're passed here
if 'protomesh' in kwargs.keys():
raise ValueError("protomesh is no longer a valid option")
if 'pbmesh' in kwargs.keys():
raise ValueError("pbmesh is no longer a valid option")
if model is None:
model = 'latest'
if model in self.models:
logger.warning("overwriting model: {}".format(model))
self.remove_model(model)
self._check_label(model)
if isinstance(times, float) or isinstance(times, int):
times = [times]
# handle case where compute is not provided
if compute is None:
computes = self.get_compute(**kwargs).computes
if len(computes)==0:
# NOTE: this doesn't take **kwargs since we want those to be
# temporarily overriden as is the case when the compute options
# are already attached
self.add_compute()
computes = self.get_compute().computes
# now len(computes) should be 1 and will trigger the next
# if statement
if len(computes)==1:
compute = computes[0]
elif len(computes)>1:
raise ValueError("must provide label of compute options since more than one are attached")
# handle the ability to send multiple compute options/backends - here
# we'll just always send a list of compute options
if isinstance(compute, str):
computes = [compute]
else:
computes = compute
# if interactive mode was ever off, let's make sure all constraints
# have been run before running system checks or computing the model
changed_params = self.run_delayed_constraints()
# any kwargs that were used just to filter for get_compute should be
# removed so that they aren't passed on to all future get_value(...
# **kwargs) calls
for k in parameters._meta_fields_filter:
if k in kwargs.keys():
dump = kwargs.pop(k)
# we'll wait to here to run kwargs and system checks so that
# add_compute is already called if necessary
self._kwargs_checks(kwargs, ['skip_checks', 'jobid'])
if not kwargs.get('skip_checks', False):
passed, msg = self.run_checks(computes=computes, **kwargs)
if passed is None:
# then just raise a warning
logger.warning(msg)
if passed is False:
# then raise an error
raise ValueError("system failed to pass checks: {}".format(msg))
# let's first make sure that there is no duplication of enabled datasets
datasets = []
# compute_ so we don't write over compute which we need if detach=True
for compute_ in computes:
# TODO: filter by value instead of if statement once implemented
for enabled_param in self.filter(qualifier='enabled',
compute=compute_,
context='compute').to_list():
if enabled_param.get_value():
item = (enabled_param.dataset, enabled_param.component)
if item in datasets:
raise ValueError("dataset {}@{} is enabled in multiple compute options".format(item[0], item[1]))
datasets.append(item)
# now if we're supposed to detach we'll just prepare the job for submission
# either in another subprocess or through some queuing system
if detach and mpi.within_mpirun:
logger.warning("cannot detach when within mpirun, ignoring")
detach = False
if (detach or mpi.enabled) and not mpi.within_mpirun:
if detach:
logger.warning("detach support is EXPERIMENTAL")
if times is not None:
# TODO: support overriding times with detached - issue here is
# that it isn't necessarilly trivially to send this array
# through the script. May need to convert to list first to
# avoid needing to import numpy?
logger.warning("overriding time is not supported within detach - ignoring")
# we'll track everything through the model name as well as
# a random string, to avoid any conflicts
jobid = kwargs.get('jobid', parameters._uniqueid())
# we'll build a python script that can replicate this bundle as it
# is now, run compute, and then save the resulting model
script_fname = "_{}.py".format(jobid)
f = open(script_fname, 'w')
f.write("import os; os.environ['PHOEBE_ENABLE_PLOTTING'] = 'FALSE'; os.environ['PHOEBE_ENABLE_SYMPY'] = 'FALSE'; os.environ['PHOEBE_ENABLE_ONLINE_PASSBANDS'] = 'FALSE';\n")
f.write("import phoebe; import json\n")
# TODO: can we skip the history context? And maybe even other models
# or datasets (except times and only for run_compute but not run_fitting)
f.write("bdict = json.loads(\"\"\"{}\"\"\")\n".format(json.dumps(self.to_json())))
f.write("b = phoebe.Bundle(bdict)\n")
# TODO: make sure this works with multiple computes
compute_kwargs = kwargs.items()+[('compute', compute), ('model', model)]
compute_kwargs_string = ','.join(["{}={}".format(k,"\'{}\'".format(v) if isinstance(v, str) else v) for k,v in compute_kwargs])
f.write("model_ps = b.run_compute({})\n".format(compute_kwargs_string))
f.write("model_ps.save('_{}.out', incl_uniqueid=True)\n".format(jobid))
f.close()
script_fname = os.path.abspath(script_fname)
cmd = mpi.detach_cmd.format(script_fname)
# TODO: would be nice to catch errors caused by the detached script...
# but that would probably need to be the responsibility of the
# jobparam to return a failed status and message
subprocess.call(cmd, shell=True)
# create model parameter and attach (and then return that instead of None)
job_param = JobParameter(self,
location=os.path.dirname(script_fname),
status_method='exists',
retrieve_method='local',
uniqueid=jobid)
metawargs = {'context': 'model', 'model': model}
self._attach_params([job_param], **metawargs)
if isinstance(detach, str):
self.save(detach)
if not detach:
return job_param.attach()
else:
logger.info("detaching from run_compute. Call get_model('{}').attach() to re-attach".format(model))
# return self.get_model(model)
return job_param
for compute in computes:
computeparams = self.get_compute(compute=compute)
if not computeparams.kind:
raise KeyError("could not recognize backend from compute: {}".format(compute))
logger.info("running {} backend to create '{}' model".format(computeparams.kind, model))
compute_class = getattr(backends, '{}Backend'.format(computeparams.kind.title()))
# compute_func = getattr(backends, computeparams.kind)
metawargs = {'compute': compute, 'model': model, 'context': 'model'} # dataset, component, etc will be set by the compute_func
params = compute_class().run(self, compute, times=times, **kwargs)
# average over any exposure times before attaching parameters
if computeparams.kind == 'phoebe':
# TODO: we could eventually do this for all backends - we would
# just need to copy the computeoption parameters into each backend's
# compute PS, and include similar logic for oversampling that is
# currently in backends._extract_info_from_bundle_by_time into
# backends._extract_info_from_bundle_by_dataset. We'd also
# need to make sure that exptime is not being passed to any
# alternate backend - and ALWAYS handle it here
for dataset in params.datasets:
# not all dataset-types currently support exposure times.
# Once they do, this ugly if statement can be removed
if len(self.filter(dataset=dataset, qualifier='exptime')):
exptime = self.get_value(qualifier='exptime', dataset=dataset, context='dataset', unit=u.d)
if exptime > 0:
if self.get_value(qualifier='fti_method', dataset=dataset, compute=compute, context='compute', **kwargs)=='oversample':
times_ds = self.get_value(qualifier='times', dataset=dataset, context='dataset')
# exptime = self.get_value(qualifier='exptime', dataset=dataset, context='dataset', unit=u.d)
fti_oversample = self.get_value(qualifier='fti_oversample', dataset=dataset, compute=compute, context='compute', check_visible=False, **kwargs)
# NOTE: this is hardcoded for LCs which is the
# only dataset that currently supports oversampling,
# but this will need to be generalized if/when
# we expand that support to other dataset kinds
fluxes = np.zeros(times_ds.shape)
# the oversampled times and fluxes will be
# sorted according to times this may cause
# exposures to "overlap" each other, so we'll
# later need to determine which times (and
# therefore fluxes) belong to which datapoint
times_oversampled_sorted = params.get_value('times', dataset=dataset)
fluxes_oversampled = params.get_value('fluxes', dataset=dataset)
for i,t in enumerate(times_ds):
# rebuild the unsorted oversampled times - see backends._extract_from_bundle_by_time
# TODO: try to optimize this by having these indices returned by the backend itself
times_oversampled_this = np.linspace(t-exptime/2., t+exptime/2., fti_oversample)
sample_inds = np.searchsorted(times_oversampled_sorted, times_oversampled_this)
fluxes[i] = np.mean(fluxes_oversampled[sample_inds])
params.set_value(qualifier='times', dataset=dataset, value=times_ds)
params.set_value(qualifier='fluxes', dataset=dataset, value=fluxes)
self._attach_params(params, **metawargs)
redo_kwargs = deepcopy(kwargs)
redo_kwargs['compute'] = computes if len(computes)>1 else computes[0]
redo_kwargs['model'] = model
self._add_history(redo_func='run_compute',
redo_kwargs=redo_kwargs,
undo_func='remove_model',
undo_kwargs={'model': model})
return self.get_model(model) | Run a forward model of the system on the enabled dataset using
a specified set of compute options.
To attach and set custom values for compute options, including choosing
which backend to use, see:
* :meth:`add_compute`
To define the dataset types and times at which the model should be
computed see:
* :meth:`add_dataset`
To disable or enable existing datasets see:
* :meth:`enable_dataset`
* :meth:`disable_dataset`
:parameter str compute: (optional) name of the compute options to use.
If not provided or None, run_compute will use an existing set of
attached compute options if only 1 exists. If more than 1 exist,
then compute becomes a required argument. If no compute options
exist, then this will use default options and create and attach
a new set of compute options with a default label.
:parameter str model: (optional) name of the resulting model. If not
provided this will default to 'latest'. NOTE: existing models
with the same name will be overwritten - including 'latest'
:parameter bool datach: [EXPERIMENTAL] whether to detach from the computation run,
or wait for computations to complete. If detach is True, see
:meth:`get_model` and :meth:`phoebe.parameters.parameters.JobParameter`
for details on how to check the job status and retrieve the results.
Alternatively, you can provide the server location (host and port) as
a string to detach and the bundle will temporarily enter client mode,
submit the job to the server, and leave client mode. The resulting
:meth:`phoebe.parameters.parameters.JobParameter` will then contain
the necessary information to pull the results from the server at anytime
in the future.
:parameter list times: [EXPERIMENTAL] override the times at which to compute the model.
NOTE: this only (temporarily) replaces the time array for datasets
with times provided (ie empty time arrays are still ignored). So if
you attach a rv to a single component, the model will still only
compute for that single component. ALSO NOTE: this option is ignored
if detach=True (at least for now).
:parameter **kwargs: any values in the compute options to temporarily
override for this single compute run (parameter values will revert
after run_compute is finished)
:return: :class:`phoebe.parameters.parameters.ParameterSet` of the
newly-created model containing the synthetic data. | entailment |
def get_model(self, model=None, **kwargs):
"""
Filter in the 'model' context
:parameter str model: name of the model (optional)
:parameter **kwargs: any other tags to do the filter
(except model or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet`
"""
if model is not None:
kwargs['model'] = model
kwargs['context'] = 'model'
return self.filter(**kwargs) | Filter in the 'model' context
:parameter str model: name of the model (optional)
:parameter **kwargs: any other tags to do the filter
(except model or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet` | entailment |
def remove_model(self, model, **kwargs):
"""
Remove a 'model' from the bundle
:parameter str twig: twig to filter for the model
:parameter **kwargs: any other tags to do the filter
(except twig or context)
"""
kwargs['model'] = model
kwargs['context'] = 'model'
self.remove_parameters_all(**kwargs) | Remove a 'model' from the bundle
:parameter str twig: twig to filter for the model
:parameter **kwargs: any other tags to do the filter
(except twig or context) | entailment |
def rename_model(self, old_model, new_model):
"""
Change the label of a model attached to the Bundle
:parameter str old_model: the current name of the model
(must exist)
:parameter str new_model: the desired new name of the model
(must not exist)
:return: None
:raises ValueError: if the new_model is forbidden
"""
# TODO: raise error if old_feature not found?
self._check_label(new_model)
self._rename_label('model', old_model, new_model) | Change the label of a model attached to the Bundle
:parameter str old_model: the current name of the model
(must exist)
:parameter str new_model: the desired new name of the model
(must not exist)
:return: None
:raises ValueError: if the new_model is forbidden | entailment |
def add_prior(self, twig=None, **kwargs):
"""
[NOT IMPLEMENTED]
:raises NotImplementedError: because it isn't
"""
raise NotImplementedError
param = self.get_parameter(twig=twig, **kwargs)
# TODO: make sure param is a float parameter?
func = _get_add_func(_distributions, 'prior')
# TODO: send smart defaults for priors based on limits of parameter
params = func(**kwargs)
metawargs = {k: v for k, v in params.meta.items()
if k not in ['uniqueid', 'uniquetwig', 'twig']}
metawargs['context'] = 'prior'
logger.info("adding prior on '{}' parameter".format(param.uniquetwig))
self._attach_params(params, **metawargs)
redo_kwargs = deepcopy(kwargs)
redo_kwargs['func'] = func.func_name
self._add_history(redo_func='add_prior',
redo_kwargs=redo_kwargs,
undo_func='remove_prior',
undo_kwargs={'twig': param.uniquetwig})
# return params
return self.get_prior(**metawargs) | [NOT IMPLEMENTED]
:raises NotImplementedError: because it isn't | entailment |
def get_prior(self, twig=None, **kwargs):
"""
[NOT IMPLEMENTED]
:raises NotImplementedError: because it isn't
"""
raise NotImplementedError
kwargs['context'] = 'prior'
return self.filter(twig=twig, **kwargs) | [NOT IMPLEMENTED]
:raises NotImplementedError: because it isn't | entailment |
def get_fitting(self, fitting=None, **kwargs):
"""
[NOT IMPLEMENTED]
:raises NotImplementedError: because it isn't
"""
raise NotImplementedError
if fitting is not None:
kwargs['fitting'] = fitting
kwargs['context'] = 'fitting'
return self.filter(**kwargs) | [NOT IMPLEMENTED]
:raises NotImplementedError: because it isn't | entailment |
def get_posterior(self, twig=None, feedback=None, **kwargs):
"""
[NOT IMPLEMENTED]
:raises NotImplementedError: because it isn't
"""
raise NotImplementedError
kwargs['context'] = 'posterior'
return self.filter(twig=twig, **kwargs) | [NOT IMPLEMENTED]
:raises NotImplementedError: because it isn't | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.