code string | signature string | docstring string | loss_without_docstring float64 | loss_with_docstring float64 | factor float64 |
|---|---|---|---|---|---|
data = data or {}
return self.post(vehicle_id, 'command/%s' % name, data,
wake_if_asleep=wake_if_asleep) | def command(self, vehicle_id, name, data=None, wake_if_asleep=True) | Post name command to the vehicle_id.
Parameters
----------
vehicle_id : string
Identifier for the car on the owner-api endpoint. Confusingly it
is not the vehicle_id field for identifying the car across
different endpoints.
https://tesla-api.timdorr.com/api-basics/vehicles#vehicle_id-vs-id
name : string
Tesla API command. https://tesla-api.timdorr.com/vehicle/commands
data : dict
Optional parameters.
wake_if_asleep : bool
Function for underlying api call for whether a failed response
should wake up the vehicle or retry.
Returns
-------
dict
Tesla json object. | 2.876119 | 4.485562 | 0.641195 |
cur_time = time.time()
with self.__lock:
# Update the online cars using get_vehicles()
last_update = self._last_attempted_update_time
if (force or cur_time - last_update > self.update_interval):
cars = self.get_vehicles()
for car in cars:
self.car_online[car['id']] = (car['state'] == 'online')
self._last_attempted_update_time = cur_time
# Only update online vehicles that haven't been updated recently
# The throttling is per car's last succesful update
# Note: This separate check is because there may be individual cars
# to update.
update_succeeded = False
for id_, value in self.car_online.items():
# If specific car_id provided, only update match
if (car_id is not None and car_id != id_):
continue
if (value and # pylint: disable=too-many-boolean-expressions
(id_ in self.__update and self.__update[id_]) and
(force or id_ not in self._last_update_time or
((cur_time - self._last_update_time[id_]) >
self.update_interval))):
# Only update cars with update flag on
try:
data = self.get(id_, 'data', wake_if_asleep)
except TeslaException:
data = None
if data and data['response']:
response = data['response']
self.__climate[car_id] = response['climate_state']
self.__charging[car_id] = response['charge_state']
self.__state[car_id] = response['vehicle_state']
self.__driving[car_id] = response['drive_state']
self.__gui[car_id] = response['gui_settings']
self.car_online[car_id] = (response['state']
== 'online')
self._last_update_time[car_id] = time.time()
update_succeeded = True
return update_succeeded | def update(self, car_id=None, wake_if_asleep=False, force=False) | Update all vehicle attributes in the cache.
This command will connect to the Tesla API and first update the list of
online vehicles assuming no attempt for at least the [update_interval].
It will then update all the cached values for cars that are awake
assuming no update has occurred for at least the [update_interval].
Args:
inst (Controller): The instance of a controller
car_id (string): The vehicle to update. If None, all cars are updated.
wake_if_asleep (bool): Keyword arg to force a vehicle awake. This is
processed by the wake_up decorator.
force (bool): Keyword arg to force a vehicle update regardless of the
update_interval
Returns:
True if any update succeeded for any vehicle else false
Throws:
RetryLimitError | 3.45146 | 3.260448 | 1.058585 |
self._controller.update(self._id, wake_if_asleep=False)
data = self._controller.get_charging_params(self._id)
if data:
self.__battery_level = data['battery_level']
self.__charging_state = data['charging_state'] | def update(self) | Update the battery state. | 4.780776 | 4.015455 | 1.190594 |
self._controller.update(self._id, wake_if_asleep=False)
data = self._controller.get_charging_params(self._id)
if data:
self.__battery_range = data['battery_range']
self.__est_battery_range = data['est_battery_range']
self.__ideal_battery_range = data['ideal_battery_range']
data = self._controller.get_gui_params(self._id)
if data:
if data['gui_distance_units'] == "mi/hr":
self.measurement = 'LENGTH_MILES'
else:
self.measurement = 'LENGTH_KILOMETERS'
self.__rated = (data['gui_range_display'] == "Rated") | def update(self) | Update the battery range state. | 4.130232 | 3.752042 | 1.100796 |
# pylint: disable=protected-access
return (not self._controller.car_online[self.id()] and
(self._controller._last_update_time[self.id()] -
self._controller._last_wake_up_time[self.id()] >
self._controller.update_interval)) | def assumed_state(self) | Return whether the data is from an online vehicle. | 7.226022 | 5.52518 | 1.307835 |
self._controller.update(self._id, wake_if_asleep=False)
data = self._controller.get_drive_params(self._id)
if data:
self.__longitude = data['longitude']
self.__latitude = data['latitude']
self.__heading = data['heading']
if self.__longitude and self.__latitude and self.__heading:
self.__location = {'longitude': self.__longitude,
'latitude': self.__latitude,
'heading': self.__heading} | def update(self) | Update the current GPS location. | 3.462159 | 3.172534 | 1.091291 |
self._controller.update(self._id, wake_if_asleep=False)
data = self._controller.get_state_params(self._id)
if data:
self.__odometer = data['odometer']
data = self._controller.get_gui_params(self._id)
if data:
if data['gui_distance_units'] == "mi/hr":
self.measurement = 'LENGTH_MILES'
else:
self.measurement = 'LENGTH_KILOMETERS'
self.__rated = (data['gui_range_display'] == "Rated") | def update(self) | Update the odometer and the unit of measurement based on GUI. | 5.455713 | 4.978058 | 1.095952 |
obs_params = []
syn_params, constraints = lc_syn(syn=False, **kwargs)
obs_params += syn_params.to_list()
#obs_params += lc_dep(**kwargs).to_list()
#~ obs_params += [FloatArrayParameter(qualifier='flag', value=kwargs.get('flag', []), default_unit=None, description='Signal flag')]
#~ obs_params += [FloatArrayParameter(qualifier='weight', value=kwargs.get('weight', []), default_unit=None, description='Signal weight')]
#~ obs_params += [FloatParameter(qualifier='timeoffset', value=kwargs.get('timeoffset', 0.0), default_unit=u.d, description='Zeropoint date offset for observations')]
#~ obs_params += [FloatParameter(qualifier='statweight', value=kwargs.get('statweight', 0.0), default_unit=None, description='Statistical weight in overall fitting')]
return ParameterSet(obs_params), constraints | def lc(**kwargs) | Create parameters for a new light curve dataset.
Generally, this will be used as an input to the kind argument in
:meth:`phoebe.frontend.bundle.Bundle.add_dataset`
:parameter **kwargs: defaults for the values of any of the parameters
:return: a :class:`phoebe.parameters.parameters.ParameterSet` of all newly
created :class:`phoebe.parameters.parameters.Parameter`s | 4.15586 | 4.095257 | 1.014798 |
obs_params = []
#obs_params += [FloatParameter(qualifier='statweight', value = kwargs.get('statweight', 1.0), default_unit=u.dimensionless_unscaled, description='Statistical weight in overall fitting')]
syn_params, constraints = rv_syn(syn=False, **kwargs)
obs_params += syn_params.to_list()
#obs_params += rv_dep(**kwargs).to_list()
return ParameterSet(obs_params), constraints | def rv(**kwargs) | Create parameters for a new radial velocity dataset.
Generally, this will be used as an input to the kind argument in
:meth:`phoebe.frontend.bundle.Bundle.add_dataset`
:parameter **kwargs: defaults for the values of any of the parameters
:return: a :class:`phoebe.parameters.parameters.ParameterSet` of all newly
created :class:`phoebe.parameters.parameters.Parameter`s | 8.534063 | 8.416806 | 1.013931 |
obs_params = []
#obs_params += [FloatParameter(qualifier='statweight', value = kwargs.get('statweight', 1.0), default_unit=u.dimensionless_unscaled, description='Statistical weight in overall fitting')]
syn_params, constraints = lp_syn(syn=False, **kwargs)
obs_params += syn_params.to_list()
#obs_params += rv_dep(**kwargs).to_list()
return ParameterSet(obs_params), constraints | def lp(**kwargs) | Create parameters for a new line profile dataset.
Generally, this will be used as an input to the kind argument in
:meth:`phoebe.frontend.bundle.Bundle.add_dataset`
:parameter **kwargs: defaults for the values of any of the parameters
:return: a :class:`phoebe.parameters.parameters.ParameterSet` of all newly
created :class:`phoebe.parameters.parameters.Parameter`s | 9.181373 | 9.225317 | 0.995237 |
if not conf.devel:
raise NotImplementedError("'etv' dataset not officially supported for this release. Enable developer mode to test.")
obs_params = []
syn_params, constraints = etv_syn(syn=False, **kwargs)
obs_params += syn_params.to_list()
#obs_params += etv_dep(**kwargs).to_list()
return ParameterSet(obs_params), constraints | def etv(**kwargs) | Create parameters for a new eclipse timing variations dataset.
Generally, this will be used as an input to the kind argument in
:meth:`phoebe.frontend.bundle.Bundle.add_dataset`
:parameter **kwargs: default for the values of any of the ParameterSet
:return: a :class:`phoebe.parameters.parameters.ParameterSet` of all newly
created :class:`phoebe.parameters.parameters.Parameter`s | 11.240553 | 10.875401 | 1.033576 |
obs_params = []
#~ obs_params += [FloatArrayParameter(qualifier='exptime', value=kwargs.get('exptime', []), default_unit=u.s, description='Signal exposure time')]
#~ obs_params += [FloatArrayParameter(qualifier='flag', value=kwargs.get('flag', []), default_unit=None, description='Signal flag')]
#~ obs_params += [FloatArrayParameter(qualifier='weight', value=kwargs.get('weight', []), default_unit=None, description='Signal weight')]
#~ obs_params += [FloatParameter(qualifier='timeoffset', value=kwargs.get('timeoffset', 0.0), default_unit=u.d, description='Zeropoint date offset for observations')]
#~ obs_params += [FloatParameter(qualifier='statweight', value=kwargs.get('statweight', 0.0), default_unit=None, description='Statistical weight in overall fitting')]
syn_params, constraints = orb_syn(syn=False, **kwargs)
obs_params += syn_params.to_list()
#obs_params += orb_dep(**kwargs).to_list()
return ParameterSet(obs_params), [] | def orb(**kwargs) | Create parameters for a new orbit dataset.
Generally, this will be used as an input to the kind argument in
:meth:`phoebe.frontend.bundle.Bundle.add_dataset`
:parameter **kwargs: defaults for the values of any of the parameters
:return: a :class:`phoebe.parameters.parameters.ParameterSet` of all newly
created :class:`phoebe.parameters.parameters.Parameter`s | 3.606253 | 3.623717 | 0.995181 |
obs_params = []
syn_params, constraints = mesh_syn(syn=False, **kwargs)
obs_params += syn_params.to_list()
obs_params += [SelectParameter(qualifier='include_times', value=kwargs.get('include_times', []), description='append to times from the following datasets/time standards', choices=['t0@system'])]
obs_params += [SelectParameter(qualifier='columns', value=kwargs.get('columns', []), description='columns to expose within the mesh', choices=_mesh_columns)]
#obs_params += mesh_dep(**kwargs).to_list()
return ParameterSet(obs_params), constraints | def mesh(**kwargs) | Create parameters for a new mesh dataset.
Generally, this will be used as an input to the kind argument in
:meth:`phoebe.frontend.bundle.Bundle.add_dataset`
:parameter **kwargs: defaults for the values of any of the parameters
:return: a :class:`phoebe.parameters.parameters.ParameterSet` of all newly
created :class:`phoebe.parameters.parameters.Parameter`s | 10.474075 | 9.929634 | 1.05483 |
if not conf.devel:
raise NotImplementedError("'photodynam' backend not officially supported for this release. Enable developer mode to test.")
params = []
params += [BoolParameter(qualifier='enabled', copy_for={'context': 'dataset', 'kind': ['lc', 'rv', 'orb'], 'dataset': '*'}, dataset='_default', value=kwargs.get('enabled', True), description='Whether to create synthetics in compute/fitting run')]
params += [FloatParameter(qualifier='stepsize', value=kwargs.get('stepsize', 0.01), default_unit=None, description='blah')]
params += [FloatParameter(qualifier='orbiterror', value=kwargs.get('orbiterror', 1e-20), default_unit=None, description='blah')]
# TODO: remove this option and instead use time0@system
#params += [FloatParameter(qualifier='time0', value=kwargs.get('time0', 0.0), default_unit=u.d, description='Time to start the integration')]
return ParameterSet(params) | def photodynam(**kwargs) | Compute options for using Josh Carter's 'photodynam' code as a
backend (must be installed).
Generally, this will be used as an input to the kind argument in
:meth:`phoebe.frontend.bundle.Bundle.add_compute`
Please see :func:`phoebe.backend.backends.photodynam` for a list of sources to
cite when using this backend.
:parameter **kwargs: defaults for the values of any of the parameters
:return: a :class:`phoebe.parameters.parameters.ParameterSet` of all newly
created :class:`phoebe.parameters.parameters.Parameter`s | 7.905137 | 7.653488 | 1.03288 |
if not conf.devel:
raise NotImplementedError("'jktebop' backend not officially supported for this release. Enable developer mode to test.")
params = []
params += [BoolParameter(qualifier='enabled', copy_for={'context': 'dataset', 'kind': ['lc'], 'dataset': '*'}, dataset='_default', value=kwargs.get('enabled', True), description='Whether to create synthetics in compute/fitting run')]
params += [FloatParameter(qualifier='ringsize', value=kwargs.get('ringsize', 5), default_unit=u.deg, description='Integ Ring Size')]
return ParameterSet(params) | def jktebop(**kwargs) | Compute options for using John Southworth's 'jktebop' code as a
backend (must be installed).
Generally, this will be used as an input to the kind argument in
:meth:`phoebe.frontend.bundle.Bundle.add_compute`
Please see :func:`phoebe.backend.backends.jktebop` for a list of sources to
cite when using this backend.
:parameter **kwargs: defaults for the values of any of the parameters
:return: a :class:`phoebe.parameters.parameters.ParameterSet` of all newly
created :class:`phoebe.parameters.parameters.Parameter`s | 16.790848 | 16.086046 | 1.043814 |
def decorator(view_func):
@functools.wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
slug = kwargs.pop("slug", None)
if not getattr(request, "team", None):
request.team = get_object_or_404(Team, slug=slug)
return view_func(request, *args, **kwargs)
return _wrapped_view
if func:
return decorator(func)
return decorator | def team_required(func=None) | Decorator for views that require a team be supplied wither via a slug in the
url pattern or already set on the request object from the TeamMiddleware | 1.700316 | 1.688685 | 1.006888 |
def decorator(view_func):
@team_required
@login_required
@functools.wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
role = request.team.role_for(request.user)
if role not in [Membership.ROLE_MANAGER, Membership.ROLE_OWNER]:
raise Http404()
return view_func(request, *args, **kwargs)
return _wrapped_view
if func:
return decorator(func)
return decorator | def manager_required(func=None) | Decorator for views that require not only a team but also that a user be
logged in and be the manager or owner of that team. | 2.109724 | 1.961935 | 1.075328 |
if self.use_main_ns:
self.namespace = __main__.__dict__
if state == 0:
self.matches = self.attr_matches(text)
try:
return self.matches[state]
except IndexError:
return None | def complete(self, text, state) | Return the next possible completion for 'text'.
This is called successively with state == 0, 1, 2, ... until it
returns None. The completion should begin with 'text'. | 4.897698 | 4.349617 | 1.126007 |
def _method_or_attr(thisobject, item):
# decide whether to append a '(' to the end of the attr based
# on whether its callable
if hasattr(getattr(thisobject, item), '__call__'):
return item + '('
else:
return item
tb_compl_commands = {
'.': {},
'[': {},
'.get(': {},
'.set(': {},
'.filter(': {},
'.filter_or_get(': {},
'.get_parameter(': {},
'.remove_parameter(': {},
'.remove_parameters_all(': {},
'.get_value(': {},
'.set_value(': {},
'.set_value_all(': {},
# TODO: default_unit, adjust, prior, posterior, enabled?
'.get_history(': {'context': 'history'},
'.remove_history(': {'context': 'history'},
'.get_component(': {'context': 'system'},
'.remove_component(': {'context': 'system'},
'.get_mesh(': {'context': 'mesh'},
'.remove_mesh(': {'context': 'mesh'},
'.get_constraint(': {'context': 'constraint'},
'.remove_constraint(': {'context': 'constraint'},
'.flip_constraint(': {'context': 'constraint'},
'.run_constraint(': {'context': 'constraint'},
'.get_compute(': {'context': 'compute'},
'.remove_compute(': {'context': 'compute'},
'.run_compute(': {'context': 'compute'},
'.get_prior(': {'context': 'prior'}, # TODO: remove_prior, run_prior, enable_prior, disable_prior
'.get_fitting(': {'context': 'fitting'},
'.remove_fitting(': {'context': 'fitting'},
'.run_fitting(': {'context': 'fitting'},
'.get_posterior(': {'context': 'posterior'}, # TODO: remove_posterior, draw_from_posterior
'.get_feedback(': {'context': 'feedback'},
'.remove_feedback(': {'context': 'feedback'},
# TODO: plots, plugins
}
expr = None
for cmd,filter_kwargs in tb_compl_commands.items():
if cmd in text:
expr, attr = text.rsplit(cmd, 1)
#~ if len(attr)==0:
#~ return []
if attr[0] not in ["'",'"'] and cmd != '.':
return []
else:
if cmd == '.':
# then we're just looking for attributes and don't
# need to offset for the ' or "
stringchar = ''
attr = attr
else:
# then we're the first argument of some method
# and need to account for the starting ' or "
stringchar = attr[0]
attr = attr[1:]
break
if expr is None:
# then we haven't found a match
return []
try:
thisobject = eval(expr, self.namespace)
except Exception:
return []
if cmd == '.':
# then we're looking for attributes of thisobject (PS or bundle) that start with attr
words = [_method_or_attr(thisobject, item) for item in dir(thisobject) if item[:len(attr)] == attr]
else:
# then we're looking to autocomplete the twig attr for thisobject (PS or bundle)
words = thisobject.filter_or_get(attr, autocomplete=True, **filter_kwargs)
matches = []
n = len(attr)
for word in words:
matches.append('{}{}{}{}'.format(expr,cmd,stringchar,word))
return matches | def attr_matches(self, text) | Compute matches when text contains a dot.
Assuming the text is of the form NAME.NAME....[NAME], and is
evaluable in self.namespace, it will be evaluated and its attributes
(as revealed by dir()) are used as possible completions. (For class
instances, class members are also considered.)
WARNING: this can still invoke arbitrary C code, if an object
with a __getattr__ hook is evaluated. | 4.295833 | 4.212386 | 1.01981 |
# needs_mesh = {info['dataset']: info['kind'] for info in needed_syns if info['needs_mesh']}
params = []
for needed_syn in needed_syns:
# print "*** _create_syns needed_syn", needed_syn
# used to be {}_syn
syn_kind = '{}'.format(needed_syn['kind'])
# if needed_syn['kind']=='mesh':
# parameters.dataset.mesh will handle creating the necessary columns
# needed_syn['dataset_fields'] = needs_mesh
# needed_syn['columns'] = b.get_value(qualifier='columns', dataset=needed_syn['dataset'], context='dataset')
# datasets = b.get_value(qualifier='datasets', dataset=needed_syn['dataset'], context='dataset')
# needed_syn['datasets'] = {ds: b.filter(datset=ds, context='dataset').exclude(kind='*_dep').kind for ds in datasets}
# phoebe will compute everything sorted - even if the input times array
# is out of order, so let's make sure the exposed times array is in
# the correct (sorted) order
if 'times' in needed_syn.keys():
needed_syn['times'].sort()
needed_syn['empty_arrays_len'] = len(needed_syn['times'])
these_params, these_constraints = getattr(_dataset, "{}_syn".format(syn_kind.lower()))(**needed_syn)
# TODO: do we need to handle constraints?
these_params = these_params.to_list()
for param in these_params:
if param._dataset is None:
# dataset may be set for mesh columns
param._dataset = needed_syn['dataset']
param._kind = syn_kind
param._component = needed_syn['component']
# reset copy_for... model Parameters should never copy
param._copy_for = {}
# context, model, etc will be handle by the bundle once these are returned
params += these_params
return ParameterSet(params) | def _create_syns(b, needed_syns) | Create empty synthetics
:parameter b: the :class:`phoebe.frontend.bundle.Bundle`
:parameter list needed_syns: list of dictionaries containing kwargs to access
the dataset (dataset, component, kind)
:return: :class:`phoebe.parameters.parameters.ParameterSet` of all new parameters | 7.845247 | 7.099594 | 1.105028 |
packet = {'dataset': kwargs.get('dataset', info['dataset']),
'component': kwargs.get('component', info['component']),
'kind': kwargs.get('kind', info['kind']),
'qualifier': qualifier,
'value': value,
'time': time
}
return packet | def _make_packet(qualifier, value, time, info, **kwargs) | where kwargs overrides info | 2.867164 | 2.762894 | 1.03774 |
raise NotImplementedError("run_checks is not implemented by the {} backend".format(self.__class__.__name__)) | def run_checks(self, b, compute, times=[], **kwargs) | run any sanity checks to make sure the parameters and options are legal
for this backend. If they are not, raise an error here to avoid errors
within the workers.
Any physics-checks that are backend-independent should be in
Bundle.run_checks, and don't need to be repeated here.
This should be subclassed by all backends, otherwise will throw a
NotImplementedError | 5.99157 | 4.913984 | 1.21929 |
packet, new_syns = self._get_packet_and_syns(b, compute, times, **kwargs)
for k,v in kwargs.items():
packet[k] = v
packet['b'] = b.to_json() if mpi.enabled else b
packet['compute'] = compute
packet['backend'] = self.__class__.__name__
return packet, new_syns | def get_packet_and_syns(self, b, compute, times=[], **kwargs) | get_packet is called by the master and must get all information necessary
to send to all workers. The returned packet will be passed on as
_run_chunk(**packet) with the following exceptions:
* b: the bundle will be included in the packet serialized
* compute: the label of the compute options will be included in the packet
* backend: the class name will be passed on in the packet so the worker can call the correct backend
* all kwargs will be passed on verbatim | 3.505471 | 3.342998 | 1.048601 |
# TODO: move to BaseBackendByDataset or BaseBackend?
logger.debug("rank:{}/{} {}._fill_syns".format(mpi.myrank, mpi.nprocs, self.__class__.__name__))
for packetlists in rpacketlists_per_worker:
# single worker
for packetlist in packetlists:
# single time/dataset
for packet in packetlist:
# single parameter
new_syns.set_value(**packet)
return new_syns | def _fill_syns(self, new_syns, rpacketlists_per_worker) | rpacket_per_worker is a list of packetlists as returned by _run_chunk | 7.83201 | 7.765925 | 1.00851 |
self.run_checks(b, compute, times, **kwargs)
logger.debug("rank:{}/{} calling get_packet_and_syns".format(mpi.myrank, mpi.nprocs))
packet, new_syns = self.get_packet_and_syns(b, compute, times, **kwargs)
if mpi.enabled:
# broadcast the packet to ALL workers
mpi.comm.bcast(packet, root=0)
# now even the master can become a worker and take on a chunk
packet['b'] = b
rpacketlists = self._run_chunk(**packet)
# now receive all packetlists
rpacketlists_per_worker = mpi.comm.gather(rpacketlists, root=0)
else:
rpacketlists_per_worker = [self._run_chunk(**packet)]
return self._fill_syns(new_syns, rpacketlists_per_worker) | def run(self, b, compute, times=[], **kwargs) | if within mpirun, workers should call _run_worker instead of run | 5.627324 | 5.409694 | 1.04023 |
# the volume of a slanted triangular cone is A_triangle * (r_vec dot norm_vec) / 3.
# TODO: implement normalizing normals into meshing routines (or at least have them supply normal_mags to the mesh)
# TODO: remove this function - should now be returned by the meshing algorithm itself
# although wd method may currently use this
normal_mags = np.linalg.norm(normals, axis=1) #np.sqrt((normals**2).sum(axis=1))
return np.sum(sizes*((centers*normals).sum(axis=1)/normal_mags)/3) | def compute_volume(sizes, centers, normals) | Compute the numerical volume of a convex mesh
:parameter array sizes: array of sizes of triangles
:parameter array centers: array of centers of triangles (x,y,z)
:parameter array normals: array of normals of triangles (will normalize if not already)
:return: the volume (float) | 13.846994 | 13.729475 | 1.00856 |
s1 = sin(eincl);
c1 = cos(eincl);
s2 = sin(elongan);
c2 = cos(elongan);
s3 = sin(etheta);
c3 = cos(etheta);
c1s3 = c1*s3;
c1c3 = c1*c3;
return np.array([
[-c2*c3+s2*c1s3, c2*s3+s2*c1c3, -s2*s1],
[-s2*c3-c2*c1s3, s2*s3-c2*c1c3, c2*s1],
[s1*s3, s1*c3, c1]
]) | def euler_trans_matrix(etheta, elongan, eincl) | Get the transformation matrix R to translate/rotate a mesh according to
euler angles.
The matrix is
R(long,incl,theta) =
Rz(pi).Rz(long).Rx(incl).Rz(theta)
Rz(long).Rx(-incl).Rz(theta).Rz(pi)
where
Rx(u) = 1, 0, 0
0, cos(u), -sin(u)
0, sin(u), cos(u)
Ry(u) = cos(u), 0, sin(u)
0, 1, 0
-sin(u), 0, cos(u)
Rz(u) = cos(u), -sin(u), 0
sin(u), cos(u), 0
0, 0, 1
Rz(pi) = reflection across z-axis
Note:
R(0,0,0) = -1, 0, 0
0, -1, 0
0, 0, 1
:parameter float etheta: euler theta angle
:parameter float elongan: euler long of asc node angle
:parameter float eincl: euler inclination angle
:return: matrix with size 3x3 | 1.836013 | 1.842296 | 0.996589 |
#print "*** spin_in_system", incl, long_an, np.dot(Rz(long_an), np.dot(Rx(-incl), np.array([0,0,1])))
# Rz(long_an) Rx(incl) [0, 0, 1]
return np.dot(Rz(long_an), np.dot(Rx(-incl), np.array([0.,0.,1.]))) | def spin_in_system(incl, long_an) | Spin in the plane of sky of a star given its inclination and "long_an"
incl - inclination of the star in the plane of sky
long_an - longitude of ascending node (equator) of the star in the plane of sky
Return:
spin - in plane of sky | 3.07282 | 3.429322 | 0.896043 |
# m = Rz(long).Rx(-incl).Rz(theta).Rz(pi)
m = euler_trans_matrix(etheta, elongan, eincl)
return np.dot(m.T, s) | def spin_in_roche(s, etheta, elongan, eincl) | Transform the spin s of a star on Kerpler orbit with
etheta - true anomaly
elongan - longitude of ascending node
eincl - inclination
from in the plane of sky reference frame into
the Roche reference frame. | 9.048768 | 9.838934 | 0.91969 |
C = cos(alpha)
S = sin(alpha)
t = 1 - C
ux = sin(theta)*cos(phi)
uy = sin(theta)*sin(phi)
uz = cos(theta)
return np.array([
[t*ux**2 + C, t*ux*uy - S*uz, t*ux*uz + S*uy],
[t*ux*uy + S*uz, t*uy**2 + C, t*uy*uz - S*ux],
[t*ux*uz - S*uy, t*uy*uz + S*ux, t*uz**2 + C]
]) | def general_rotation_matrix(theta, phi, alpha) | Rotation around vector
u = (sin(theta) cos(phi), sin(theta) sin(phi), cos(theta))
by an angle
alpha
Ref:
http://ksuweb.kennesaw.edu/~plaval//math4490/rotgen.pdf
:parameter float theta:
:parameter float phi:
:parameter float alpha: rotation angle
:return: 3x3 matrix of floats | 1.729706 | 1.678019 | 1.030802 |
trans_matrix = euler_trans_matrix(*euler)
if not reverse:
trans_matrix = trans_matrix.T
if isinstance(array, ComputedColumn):
array = array.for_computations
if is_normal:
# then we don't do an offset by the position
return np.dot(np.asarray(array), trans_matrix)
else:
return np.dot(np.asarray(array), trans_matrix) + np.asarray(pos) | def transform_position_array(array, pos, euler, is_normal, reverse=False) | Transform any Nx3 position array by translating to a center-of-mass 'pos'
and applying an euler transformation
:parameter array array: numpy array of Nx3 positions in the original (star)
coordinate frame
:parameter array pos: numpy array with length 3 giving cartesian
coordinates to offset all positions
:parameter array euler: euler angles (etheta, elongan, eincl) in radians
:parameter bool is_normal: whether each entry is a normal vector rather
than position vector. If true, the quantities won't be offset by
'pos'
:return: new positions array with same shape as 'array'. | 4.424052 | 4.911952 | 0.900671 |
trans_matrix = euler_trans_matrix(*euler)
# v_{rot,i} = omega x r_i with omega = rotation_vel
rotation_component = np.cross(rotation_vel, pos_array, axisb=1)
orbital_component = np.asarray(vel)
if isinstance(array, ComputedColumn):
array = array.for_computations
new_vel = np.dot(np.asarray(array)+rotation_component, trans_matrix.T) + orbital_component
return new_vel | def transform_velocity_array(array, pos_array, vel, euler, rotation_vel=(0,0,0)) | Transform any Nx3 velocity vector array by adding the center-of-mass 'vel',
accounting for solid-body rotation, and applying an euler transformation.
:parameter array array: numpy array of Nx3 velocity vectors in the original
(star) coordinate frame
:parameter array pos_array: positions of the elements with respect to the
original (star) coordinate frame. Must be the same shape as 'array'.
:parameter array vel: numpy array with length 3 giving cartesian velocity
offsets in the new (system) coordinate frame
:parameter array euler: euler angles (etheta, elongan, eincl) in radians
:parameter array rotation_vel: vector of the rotation velocity of the star
in the original (star) coordinate frame
:return: new velocity array with same shape as 'array' | 6.482144 | 7.120809 | 0.91031 |
# WD returns a list of triangles with 9 coordinates (v1x, v1y, v1z, v2x, ...)
triangles_9N = the_grid[:,4:13]
new_mesh = {}
# force the mesh to be computed at centers rather than the PHOEBE default
# of computing at vertices and averaging for the centers. This will
# propogate to all ComputedColumns, which means we'll fill those quanities
# (ie normgrads, velocities) per-triangle.
new_mesh['compute_at_vertices'] = False
# PHOEBE's mesh structure stores vertices in an Nx3 array
new_mesh['vertices'] = triangles_9N.reshape(-1,3)
# and triangles as indices pointing to each of the 3 vertices (Nx3)
new_mesh['triangles'] = np.arange(len(triangles_9N)*3).reshape(-1,3)
new_mesh['centers'] = the_grid[:,0:3]
new_mesh['tnormals'] = the_grid[:,13:16]
norms = np.linalg.norm(new_mesh['tnormals'], axis=1)
new_mesh['normgrads'] = norms
# TODO: do this the right way by dividing along axis=1 (or using np.newaxis as done for multiplying in ComputedColumns)
new_mesh['tnormals'] = np.array([tn/n for tn,n in zip(new_mesh['tnormals'], norms)])
# NOTE: there are no vnormals in wd-style mesh
new_mesh['areas'] = the_grid[:,3]
new_mesh['tareas'] = the_grid[:,18]
# TESTING ONLY - remove this eventually ??? (currently being used
# to test WD-style eclipse detection by using theta and phi (lat and long)
# to determine which triangles are in the same "strip")
new_mesh['thetas'] = the_grid[:,16]
new_mesh['phis'] = the_grid[:,17]
# TODO: get rid of this list comprehension
# grads = np.array([libphoebe.roche_gradOmega_only(q, F, d, c) for c in new_mesh['centers']])
# new_mesh['normgrads'] = np.sqrt(grads[:,0]**2+grads[:,1]**2+grads[:,2]**2)
# new_mesh['normgrads'] = norms #np.linalg.norm(grads, axis=1)
# TODO: actually compute the numerical volume (find old code)
new_mesh['volume'] = compute_volume(new_mesh['areas'], new_mesh['centers'], new_mesh['tnormals'])
# new_mesh['area'] # TODO: compute surface area??? (not sure if needed)
new_mesh['velocities'] = np.zeros(new_mesh['centers'].shape)
return new_mesh | def wd_grid_to_mesh_dict(the_grid, q, F, d) | Transform a wd-style mesh to the format used by PHOEBE. Namely this handles
translating vertices from Nx9 to Nx3x3 and creating the array of indices
for each triangle.
:parameter record-array the_grid: output from discretize_wd_style
:parameter float q: mass-ratio (M_this/M_sibling)
:parameter float F: syncpar
:parameter float d: instantaneous unitless separation
:return: the dictionary in PHOEBE's format to be passed to a Mesh class | 5.978898 | 5.670826 | 1.054326 |
if not self.mesh._compute_at_vertices:
return None
return np.mean(self.vertices_per_triangle, axis=1) | def averages(self) | Access to the average of the values at the vertices for each triangle.
If the quantities are defined at centers instead of vertices, this
will return None. Also see :method:`centers`.
:return: numpy array or None | 11.429863 | 9.622085 | 1.187878 |
if not self.mesh._compute_at_vertices:
return None
vertices_per_triangle = self.vertices_per_triangle
if vertices_per_triangle.ndim==2:
# return np.dot(self.vertices_per_triangle, self.mesh.weights)
return np.sum(vertices_per_triangle*self.mesh.weights, axis=1)
elif vertices_per_triangle.ndim==3:
return np.sum(vertices_per_triangle*self.mesh.weights[:,np.newaxis], axis=1)
else:
raise NotImplementedError | def weighted_averages(self) | Access to the weighted averages of the values at the vertices for each
triangle based on the weights provided by mesh.weights. This is most
useful for partially visible triangles when using libphoebe's
eclipse detection that returns weights for each vertex.
Note that weights by default are set to 1/3 for each vertex, meaning
that this will provide the same values as :meth:`averages` unless
the weights are overridden within the mesh.
If the quantities are defined at centers instead of vertices, this will
return None.
:return: numpy array or None | 2.970321 | 2.81156 | 1.056467 |
if self.mesh._compute_at_vertices:
self._vertices = value
else:
self._centers = value | def set_for_computations(self, value) | Set the quantities, either at the vertices or centers depending on the
settings of the mesh (mesh._compute_at_vertices) | 12.406676 | 3.050475 | 4.067129 |
# make sure to do the geometric things that are needed for some of the
# ComputedColumns first
for key in ('triangles', 'vertices', 'centers', 'vnormals', 'tnormals'):
if key in kwargs.keys():
self.__setitem__(key, kwargs.pop(key))
for k, v in kwargs.items():
if isinstance(v, float) and k not in self._scalar_fields:
# Then let's make an array with the correct length full of this
# scalar
# NOTE: this won't work for Nx3's, but that
# really shouldn't ever happen since they should be set
# within the init.
# v = np.ones(self.Ntriangles)*v
if self._compute_at_vertices:
v = np.full(self.Nvertices, v)
else:
v = np.full(self.Ntriangles, v)
self.__setitem__(k, v)
if isinstance(v, ComputedColumn):
# then let's update the mesh instance to correctly handle
# inheritance
self.__getitem__(k)._mesh = self | def update_columns_dict(self, kwargs) | Update the value of a column or multiple columns by passing as a dict.
For observable columns, provide the label of the observable itself and
it will be found (so long as it does not conflict with an existing
non-observable column). | 6.632051 | 6.821448 | 0.972235 |
if self._compute_at_vertices:
return self.vertices - self._pos_center
else:
return self.centers - self._pos_center | def coords_for_observations(self) | Return the coordinates from the center of the star for each element
(either centers or vertices depending on the setting in the mesh)
after perturbations (either by features or by offsetting to get
the correct volume). NOTE: this is NOT necessarily where the physical
parameters were computed, but IS where eclipse detection, etc, is
handled. | 9.015268 | 5.191638 | 1.736498 |
# TODO: need to subtract the position offset if a Mesh (in orbit)
if self._compute_at_vertices:
if self.pvertices is not None:
return self.pvertices - self._pos_center
else:
return self.vertices - self._pos_center
else:
return self.centers - self._pos_center | def coords_for_computations(self) | Return the coordinates from the center of the star for each element
(either centers or vertices depending on the setting in the mesh). | 8.739176 | 6.571707 | 1.329818 |
rs = np.linalg.norm(self.coords_for_computations, axis=1)
return ComputedColumn(self, rs) | def rs(self) | Return the radius of each element (either vertices or centers
depending on the setting in the mesh) with respect to the center of
the star.
NOTE: unscaled
(ComputedColumn) | 13.477377 | 8.71316 | 1.546784 |
# TODO: should this be moved to Mesh? Even though its surely possible
# to compute without being placed in orbit, projecting in x,y doesn't
# make much sense without LOS orientation.
rprojs = np.linalg.norm(self.coords_for_computations[:,:2], axis=1)
return ComputedColumn(self, rprojs) | def rprojs(self) | Return the projected (in xy/uv plane) radius of each element (either
vertices or centers depending on the setting in the mesh) with respect
to the center of the star.
NOTE: unscaled
(ComputedColumn) | 20.747438 | 15.943498 | 1.30131 |
coords = self.coords_for_computations
norms = self.normals_for_computations
# TODO: ditch the list comprehension... I know I figured out how to do
# this (ie along an axis) with np.dot somewhere else
# cosbetas = np.array([np.dot(c,n) / (np.linalg.norm(c)*np.linalg.norm(n)) for c,n in zip(coords, norms)])
cosbetas = libphoebe.scalproj_cosangle(
np.ascontiguousarray(coords),
np.ascontiguousarray(norms)
)
return ComputedColumn(self, cosbetas) | def cosbetas(self) | TODO: add documentation
(ComputedColumn) | 7.289527 | 6.585701 | 1.106872 |
if self._areas is not None:
return (self.areas*u.solRad**2).to(u.m**2).value
else:
return None | def areas_si(self) | TODO: add documentation | 4.403275 | 3.863979 | 1.13957 |
mesh = cls(**proto_mesh.items())
mesh._copy_roche_values()
mesh._scale_mesh(scale=scale)
return mesh | def from_proto(cls, proto_mesh, scale) | TODO: add documentation | 12.367792 | 10.39291 | 1.190022 |
pos_ks = ['vertices', 'centers']
# TODO: scale velocities???
# handle scale
self.update_columns_dict({k: self[k]*scale for k in pos_ks})
self.update_columns(areas=self.areas*(scale**2))
self._volume *= scale**3
if self._area is not None:
# self._area is None for wd meshes
self._area += scale**2 | def _scale_mesh(self, scale) | TODO: add documentation | 11.229255 | 10.605337 | 1.058831 |
mesh = cls(**proto_mesh.items())
mesh._copy_roche_values()
mesh._scale_mesh(scale=scale)
mesh._place_in_orbit(pos, vel, euler, euler_vel, rotation_vel, component_com_x)
return mesh | def from_proto(cls, proto_mesh, scale,
pos, vel, euler, euler_vel,
rotation_vel=(0,0,0),
component_com_x=None) | Turn a ProtoMesh into a Mesh scaled and placed in orbit.
Update all geometry fields from the proto reference frame, to the
current system reference frame, given the current position, velocitiy,
euler angles, and rotational velocity of THIS mesh.
:parameter list pos: current position (x, y, z)
:parameter list vel: current velocity (vx, vy, vz)
:parameter list euler: current euler angles (etheta, elongan, eincl)
:parameter list rotation_vel: rotation velocity vector (polar_dir*freq_rot) | 5.340407 | 5.338463 | 1.000364 |
mesh = cls(**scaledproto_mesh.items())
# roche coordinates have already been copied
# so do NOT call mesh._copy_roche_values() here
mesh._place_in_orbit(pos, vel, euler, euler_vel, rotation_vel, component_com_x)
return mesh | def from_scaledproto(cls, scaledproto_mesh,
pos, vel, euler, euler_vel,
rotation_vel=(0,0,0),
component_com_x=None) | TODO: add documentation | 7.856339 | 8.006446 | 0.981252 |
# TODO: store pos, vel, euler so that INCREMENTAL changes are allowed
# if passing new values (and then make this a public method). See note
# below!
pos_ks = ['vertices', 'pvertices', 'centers']
norm_ks = ['vnormals', 'tnormals'] #, 'cnormals']
vel_ks = ['velocities']
# NOTE: we do velocities first since they require the positions WRT
# the star (not WRT the system). Will need to keep this in mind if we
# eventually support incremental transformations.
# pos_array = self.vertices if self._compute_at_vertices else self.centers
pos_array = self.roche_vertices if self._compute_at_vertices else self.roche_centers
if component_com_x is not None and component_com_x != 0.0:
# then we're the secondary component and need to do 1-x and then flip the rotation component vxs
pos_array = np.array([component_com_x, 0.0, 0.0]) - pos_array
self.update_columns_dict({k: transform_velocity_array(self[k], pos_array, vel, euler_vel, rotation_vel) for k in vel_ks if self[k] is not None})
# TODO: handle velocity from mesh reprojection during volume conservation
# handle rotation/displacement
# NOTE: mus will automatically be updated on-the-fly
self.update_columns_dict({k: transform_position_array(self[k], pos, euler, False) for k in pos_ks if self[k] is not None})
self.update_columns_dict({k: transform_position_array(self[k], pos, euler, True) for k in norm_ks if self[k] is not None})
# let's store the position. This is both useful for "undoing" the
# orbit-offset, and also eventually to allow incremental changes.
self._pos = pos
if component_com_x is not None and component_com_x != 0.0:
self._pos_center = transform_position_array(np.array([component_com_x, 0.0, 0.0]), pos, euler, False)
else:
self._pos_center = pos
self._euler = euler | def _place_in_orbit(self, pos, vel, euler, euler_vel, rotation_vel=(0,0,0), component_com_x=None) | TODO: add documentation | 5.712678 | 5.708069 | 1.000807 |
if self._visibilities is not None:
return self._visibilities
else:
return np.ones(self.Ntriangles) | def visibilities(self) | Return the array of visibilities, where each item is a scalar/float
between 0 (completely hidden/invisible) and 1 (completely visible).
(Nx1) | 4.389868 | 4.475812 | 0.980798 |
if self._weights is not None and len(self._weights):
return self._weights
else:
return np.full((self.Ntriangles, 3), 1./3) | def weights(self) | TODO: add documentation
(Nx3) | 5.236969 | 4.909632 | 1.066672 |
super(Mesh, self).update_columns_dict(kwargs)
# if kwargs.get('vnormals', None) is not None or kwargs.get('tnormals', None) is not None:
# self._compute_mus()
if kwargs.get('triangles', None) is not None:
# reset visibilities and velocities so that they are reset
# when next queried
self.update_columns(visibilities=None, velocities=None) | def update_columns_dict(self, kwargs) | TODO: add documentation | 6.581643 | 6.355134 | 1.035642 |
if not isinstance(value_dict, dict):
value_dict = {comp_no: value_dict for comp_no in self._dict.keys()}
for comp, value in value_dict.items():
if computed_type is not None:
# then create the ComputedColumn now to override the default value of compute_at_vertices
self._dict[comp]._observables[field] = ComputedColumn(self._dict[comp], compute_at_vertices=computed_type=='vertices')
#print "***", comp, field, inds, value
if inds:
raise NotImplementedError('setting column with indices not yet ported to new meshing')
# self._dict[comp][field][inds] = value
else:
if comp in self._dict.keys():
self._dict[comp][field] = value
else:
meshes = self._dict[self._parent_envelope_of[comp]]
meshes[comp][field] = value | def update_columns(self, field, value_dict, inds=None, computed_type=None) | update the columns of all meshes
:parameter str field: name of the mesh columnname
:parameter value_dict: dictionary with component as keys and new
data as values. If value_dict is not a dictionary,
it will be applied to all components
:type value_dict: dict or value (array or float) | 5.220348 | 5.151257 | 1.013413 |
def get_field(c, field, computed_type):
if c not in self._dict.keys() and self._parent_envelope_of[c] in self._dict.keys():
mesh = self._dict[self._parent_envelope_of[c]]
return mesh.get_column_flat(field, components, computed_type)
mesh = self._dict[c]
if isinstance(mesh, Meshes):
# then do this recursively for all components in the Meshes object
# but don't allow nesting in the dictionary, instead combine
# all subcomponents into one entry with the current component
return mesh.get_column_flat(field, mesh._components, computed_type)
f = mesh[field]
if isinstance(f, ComputedColumn):
col = getattr(f, computed_type)
else:
col = f
return col
if components:
if isinstance(components, str):
components = [components]
else:
components = self.keys()
return {c: get_field(c, field, computed_type) for c in components} | def get_column(self, field, components=None, computed_type='for_observations') | TODO: add documentation
return a dictionary for a single column, with component as keys and the
column array as values
:parameter str field: name of the mesh columnname
:parameter components: | 4.511752 | 4.486596 | 1.005607 |
return self.pack_column_flat(self.get_column(field, components, computed_type),
components,
offset=field=='triangles') | def get_column_flat(self, field, components=None, computed_type='for_observations') | TODO: add documentation
return a single merged value (hstacked) from all meshes
:parameter str field: name of the mesh columnname
:parameter components: | 11.074108 | 17.784786 | 0.622673 |
if components:
if isinstance(components, str):
components = [components]
elif isinstance(components, list):
components = components
else:
raise TypeError("components should be list or string, not {}".format(type(components)))
elif isinstance(value, dict):
components = value.keys()
elif isinstance(value, list):
components = self._dict.keys()
value = {c: v for c,v in zip(components, value)}
if offset:
values = []
offsetN = 0
for c in components:
values.append(value[c]+offsetN)
offsetN += len(self[c]['vertices'])
else:
values = [value[c] for c in components]
if len(value[components[0]].shape) > 1:
return np.vstack(values)
else:
return np.hstack(values) | def pack_column_flat(self, value, components=None, offset=False) | TODO: add documentation | 2.52275 | 2.459616 | 1.025669 |
if components:
if isinstance(components, str):
components = [components]
else:
components = self._dict.keys()
# TODO: add this
# we need to split the flat array by the lengths of each mesh
N_lower = 0
N_upper = 0
offsetN = 0.0
value_dict = {}
for comp in components:
if isinstance(self[comp], Meshes):
# then we need to recursively extract to the underlying meshes
# pass
meshes = self[comp]._dict
else:
meshes = {comp: self[comp]}
for c, mesh in meshes.items():
if computed_type=='vertices' or (computed_type is None and mesh._compute_at_vertices):
N = mesh.Nvertices
else:
N = mesh.Ntriangles
N_upper += N
value_dict[c] = value[N_lower:N_upper] - offsetN
if offset:
offsetN += N
N_lower += N
return value_dict | def unpack_column_flat(self, value, components=None, offset=False, computed_type=None) | TODO: add documentation
TODO: needs testing | 4.890173 | 4.696465 | 1.041246 |
value_dict = self.unpack_column_flat(value, components, computed_type=computed_type)
self.update_columns(field, value_dict, computed_type=computed_type) | def set_column_flat(self, field, value, components=None, computed_type=None) | TODO: add documentation
TODO: needs testing | 3.696433 | 3.345959 | 1.104745 |
self._dict[component] = np.hstack([self._dict[component][~inds], new_submesh]) | def replace_elements(self, inds, new_submesh, component) | TODO: add documentation
TODO: remove this method??? | 5.991964 | 5.186564 | 1.155286 |
r
# Initial value
Fn = M + ecc*sin(M) + ecc**2/2.*sin(2*M)
# Iterative solving of the transcendent Kepler's equation
for i in range(itermax):
F = Fn
Mn = F-ecc*sin(F)
Fn = F+(M-Mn)/(1.-ecc*cos(F))
keep = F!=0 # take care of zerodivision
if hasattr(F,'__iter__'):
if np.all(abs((Fn-F)[keep]/F[keep])<0.00001):
break
elif (abs((Fn-F)/F)<0.00001):
break
# relationship between true anomaly (theta) and eccentric anomaly (Fn)
true_an = 2.*arctan(sqrt((1.+ecc)/(1.-ecc))*tan(Fn/2.))
return Fn,true_an | def _true_anomaly(M,ecc,itermax=8) | r"""
Calculation of true and eccentric anomaly in Kepler orbits.
``M`` is the phase of the star, ``ecc`` is the eccentricity
See p.39 of Hilditch, 'An Introduction To Close Binary Stars':
Kepler's equation:
.. math::
E - e\sin E = \frac{2\pi}{P}(t-T)
with :math:`E` the eccentric anomaly. The right hand size denotes the
observed phase :math:`M`. This function returns the true anomaly, which is
the position angle of the star in the orbit (:math:`\theta` in Hilditch'
book). The relationship between the eccentric and true anomaly is as
follows:
.. math::
\tan(\theta/2) = \sqrt{\frac{1+e}{1-e}} \tan(E/2)
@parameter M: phase
@type M: float
@parameter ecc: eccentricity
@type ecc: float
@keyword itermax: maximum number of iterations
@type itermax: integer
@return: eccentric anomaly (E), true anomaly (theta)
@rtype: float,float | 4.823923 | 4.936828 | 0.97713 |
params = []
params += [FloatParameter(qualifier="colat", value=kwargs.get('colat', 0.0), default_unit=u.deg, description='Colatitude of the center of the spot wrt spin axes')]
params += [FloatParameter(qualifier="long", value=kwargs.get('long', 0.0), default_unit=u.deg, description='Longitude of the center of the spot wrt spin axis')]
params += [FloatParameter(qualifier='radius', value=kwargs.get('radius', 1.0), default_unit=u.deg, description='Angular radius of the spot')]
# params += [FloatParameter(qualifier='area', value=kwargs.get('area', 1.0), default_unit=u.solRad, description='Surface area of the spot')]
params += [FloatParameter(qualifier='relteff', value=kwargs.get('relteff', 1.0), limits=(0.,None), default_unit=u.dimensionless_unscaled, description='Temperature of the spot relative to the intrinsic temperature')]
# params += [FloatParameter(qualifier='teff', value=kwargs.get('teff', 10000), default_unit=u.K, description='Temperature of the spot')]
constraints = []
return ParameterSet(params), constraints | def spot(feature, **kwargs) | Create parameters for a spot
Generally, this will be used as input to the method argument in
:meth:`phoebe.frontend.bundle.Bundle.add_feature`
:parameter **kwargs: defaults for the values of any of the parameters
:return: a :class:`phoebe.parameters.parameters.ParameterSet` | 2.89483 | 2.780914 | 1.040963 |
if not conf.devel:
raise NotImplementedError("'pulsation' feature not officially supported for this release. Enable developer mode to test.")
params = []
params += [FloatParameter(qualifier='radamp', value=kwargs.get('radamp', 0.1), default_unit=u.dimensionless_unscaled, description='Relative radial amplitude of the pulsations')]
params += [FloatParameter(qualifier='freq', value=kwargs.get('freq', 1.0), default_unit=u.d**-1, description='Frequency of the pulsations')]
params += [IntParameter(qualifier='l', value=kwargs.get('l', 0), default_unit=u.dimensionless_unscaled, description='Non-radial degree l')]
params += [IntParameter(qualifier='m', value=kwargs.get('m', 0), default_unit=u.dimensionless_unscaled, description='Azimuthal order m')]
params += [BoolParameter(qualifier='teffext', value=kwargs.get('teffext', False), description='Switch to denote whether Teffs are provided by the external code')]
constraints = []
return ParameterSet(params), constraints | def pulsation(feature, **kwargs) | Create parameters for a pulsation feature
Generally, this will be used as input to the method argument in
:meth:`phoebe.frontend.bundle.Bundle.add_feature`
:parameter **kwargs: defaults for the values of any of the parameters
:return: a :class:`phoebe.parameters.parameters.ParameterSet` | 4.522248 | 4.461679 | 1.013576 |
if not hasattr(anim, '_encoded_video'):
with NamedTemporaryFile(suffix='.mp4') as f:
anim.save(f.name, fps=20, extra_args=['-vcodec', 'libx264'])
video = open(f.name, "rb").read()
anim._encoded_video = video.encode("base64")
return VIDEO_TAG.format(anim._encoded_video) | def anim_to_html(anim) | adapted from: http://jakevdp.github.io/blog/2013/05/12/embedding-matplotlib-animations/
This function converts and animation object from matplotlib into HTML which can then
be embedded in an IPython notebook.
This requires ffmpeg to be installed in order to build the intermediate mp4 file
To get these to display automatically, you need to set animation.Animation._repr_html_ = plotlib.anim_to_html
(this is done on your behalf by PHOEBE) | 2.619434 | 2.790653 | 0.938645 |
if '/' in filename:
path, filename = os.path.split(filename)
else:
# TODO: this needs to change to be directory of the .phoebe file
path = dir
load_file = os.path.join(path, filename)
lcdata = np.loadtxt(load_file)
ncol = len(lcdata[0])
if dep == 'Magnitude':
mag = lcdata[:,1]
flux = 10**(-0.4*(mag-mzero))
lcdata[:,1] = flux
d = {}
d['phoebe_lc_time'] = lcdata[:,0]
d['phoebe_lc_flux'] = lcdata[:,1]
if indweight=="Standard deviation":
if ncol >= 3:
d['phoebe_lc_sigmalc'] = lcdata[:,2]
else:
logger.warning('A sigma column was mentioned in the .phoebe file but is not present in the lc data file')
elif indweight =="Standard weight":
if ncol >= 3:
sigma = np.sqrt(1/lcdata[:,2])
d['phoebe_lc_sigmalc'] = sigma
logger.warning('Standard weight has been converted to Standard deviation.')
else:
logger.warning('A sigma column was mentioned in the .phoebe file but is not present in the lc data file')
else:
logger.warning('Phoebe 2 currently only supports standard deviaton')
# dataset.set_value(check_visible=False, **d)
return d | def load_lc_data(filename, indep, dep, indweight=None, mzero=None, dir='./') | load dictionary with lc data | 3.924024 | 3.900388 | 1.00606 |
if '/' in filename:
path, filename = os.path.split(filename)
else:
path = dir
load_file = os.path.join(path, filename)
rvdata = np.loadtxt(load_file)
d ={}
d['phoebe_rv_time'] = rvdata[:,0]
d['phoebe_rv_vel'] = rvdata[:,1]
ncol = len(rvdata[0])
if indweight=="Standard deviation":
if ncol >= 3:
d['phoebe_rv_sigmarv'] = rvdata[:,2]
else:
logger.warning('A sigma column is mentioned in the .phoebe file but is not present in the rv data file')
elif indweight =="Standard weight":
if ncol >= 3:
sigma = np.sqrt(1/rvdata[:,2])
d['phoebe_rv_sigmarv'] = sigma
logger.warning('Standard weight has been converted to Standard deviation.')
else:
logger.warning('Phoebe 2 currently only supports standard deviaton')
return d | def load_rv_data(filename, indep, dep, indweight=None, dir='./') | load dictionary with rv data. | 4.217866 | 4.183986 | 1.008098 |
rvs = eb.get_dataset(kind='rv').datasets
#first check to see if there are currently in RV datasets
if dataid == 'Undefined':
dataid = None
# if len(rvs) == 0:
#if there isn't we add one the easy part
try:
eb._check_label(dataid)
rv_dataset = eb.add_dataset('rv', dataset=dataid, times=[])
except ValueError:
logger.warning("The name picked for the radial velocity curve is forbidden. Applying default name instead")
rv_dataset = eb.add_dataset('rv', times=[])
# else:
# #now we have to determine if we add to an existing dataset or make a new one
# rvs = eb.get_dataset(kind='rv').datasets
# found = False
# #set the component of the companion
#
# if comp == 'primary':
# comp_o = 'primary'
# else:
# comp_o = 'secondary'
# for x in rvs:
# test_dataset = eb.get_dataset(x, check_visible=False)
#
#
# if len(test_dataset.get_value(qualifier='rvs', component=comp_o, check_visible=False)) == 0: #so at least it has an empty spot now check against filter and length
# # removing reference to time_o. If there are no rvs there should be no times
# # time_o = test_dataset.get_value('times', component=comp_o)
# passband_o = test_dataset.get_value('passband')
#
# # if np.all(time_o == time) and (passband == passband_o):
# if (passband == passband_o):
# rv_dataset = test_dataset
# found = True
#
# if not found:
# try:
# eb._check_label(dataid)
#
# rv_dataset = eb.add_dataset('rv', dataset=dataid, times=[])
#
# except ValueError:
#
# logger.warning("The name picked for the lightcurve is forbidden. Applying default name instead")
# rv_dataset = eb.add_dataset('rv', times=[])
return rv_dataset | def det_dataset(eb, passband, dataid, comp, time) | Since RV datasets can have values related to each component in phoebe2, but are component specific in phoebe1
, it is important to determine which dataset to add parameters to. This function will do that.
eb - bundle
rvpt - relevant phoebe 1 parameters | 4.363235 | 4.242349 | 1.028495 |
theta = np.array([np.pi/2*(k-0.5)/N for k in range(1, N+1)])
phi = np.array([[np.pi*(l-0.5)/Mk for l in range(1, Mk+1)] for Mk in np.array(1 + 1.3*N*np.sin(theta), dtype=int)])
Ntri = 2*np.array([len(p) for p in phi]).sum()
return Ntri | def N_to_Ntriangles(N) | @N: WD style gridsize
Converts WD style grid size @N to the number of triangles on the
surface.
Returns: number of triangles. | 4.284462 | 4.358755 | 0.982955 |
# currently only used by legacy wrapper: consider moving/removing
if component==1:
return pot
elif component==2:
if reverse:
return pot/q + 0.5*(q-1)/q
else:
return q*pot - 0.5 * (q-1)
else:
raise NotImplementedError | def pot_for_component(pot, q, component=1, reverse=False) | q for secondaries should already be flipped (via q_for_component) | 6.534478 | 6.622577 | 0.986697 |
volume_critical = libphoebe.roche_misaligned_critical_volume(q, F, d, s)
logger.debug("libphoebe.roche_misaligned_critical_volume(q={}, F={}, d={}, s={}) => {}".format(q, F, d, s, volume_critical))
requiv_critical = scale * (volume_critical * 3./4 * 1./np.pi)**(1./3)
# logger.debug("roche.roche_misaligned_critical_requiv = {}".format(requiv_critical))
return requiv_critical | def roche_misaligned_critical_requiv(q, F, d, s, scale=1.0) | NOTE: output is in units of scale (so constraints will use SI)
NOTE: q should already be flipped (i.e. the output of q_for_component) if necessary
NOTE: s should be in roche coordinates at the applicable time/true anomaly | 3.279556 | 3.467352 | 0.945839 |
q = q_for_component(q, component=component)
rpole_ = np.array([0, 0, rpole/sma])
logger.debug("libphoebe.roche_Omega(q={}, F={}, d={}, rpole={})".format(q, F, d, rpole_))
pot = libphoebe.roche_Omega(q, F, d, rpole_)
return pot_for_component(pot, component, reverse=True) | def rpole_to_pot_aligned(rpole, sma, q, F, d, component=1) | Transforms polar radius to surface potential | 4.510598 | 4.494107 | 1.003669 |
q = q_for_component(q, component=component)
Phi = pot_for_component(pot, q, component=component)
logger.debug("libphobe.roche_pole(q={}, F={}, d={}, Omega={})".format(q, F, d, pot))
return libphoebe.roche_pole(q, F, d, pot) * sma | def pot_to_rpole_aligned(pot, sma, q, F, d, component=1) | Transforms surface potential to polar radius | 6.058568 | 6.137187 | 0.98719 |
r
return 1.0/sqrt(r[0]*r[0]+r[1]*r[1]+r[2]*r[2]) + q*(1.0/sqrt((r[0]-D)*(r[0]-D)+r[1]*r[1]+r[2]*r[2])-r[0]/D/D) + 0.5*F*F*(1+q)*(r[0]*r[0]+r[1]*r[1]) - Omega | def BinaryRoche (r, D, q, F, Omega=0.0) | r"""
Computes a value of the asynchronous, eccentric Roche potential.
If :envvar:`Omega` is passed, it computes the difference.
The asynchronous, eccentric Roche potential is given by [Wilson1979]_
.. math::
\Omega = \frac{1}{\sqrt{x^2 + y^2 + z^2}} + q\left(\frac{1}{\sqrt{(x-D)^2+y^2+z^2}} - \frac{x}{D^2}\right) + \frac{1}{2}F^2(1+q)(x^2+y^2)
@param r: relative radius vector (3 components)
@type r: 3-tuple
@param D: instantaneous separation
@type D: float
@param q: mass ratio
@type q: float
@param F: synchronicity parameter
@type F: float
@param Omega: value of the potential
@type Omega: float | 3.113066 | 2.140227 | 1.454549 |
return -r[0]*(r[0]*r[0]+r[1]*r[1]+r[2]*r[2])**-1.5 -q*(r[0]-D)*((r[0]-D)*(r[0]-D)+r[1]*r[1]+r[2]*r[2])**-1.5 -q/D/D + F*F*(1+q)*r[0] | def dBinaryRochedx (r, D, q, F) | Computes a derivative of the potential with respect to x.
@param r: relative radius vector (3 components)
@param D: instantaneous separation
@param q: mass ratio
@param F: synchronicity parameter | 3.16837 | 3.255437 | 0.973255 |
return (2*r[0]*r[0]-r[1]*r[1]-r[2]*r[2])/(r[0]*r[0]+r[1]*r[1]+r[2]*r[2])**2.5 +\
q*(2*(r[0]-D)*(r[0]-D)-r[1]*r[1]-r[2]*r[2])/((r[0]-D)*(r[0]-D)+r[1]*r[1]+r[2]*r[2])**2.5 +\
F*F*(1+q) | def d2BinaryRochedx2(r, D, q, F) | Computes second derivative of the potential with respect to x.
@param r: relative radius vector (3 components)
@param D: instantaneous separation
@param q: mass ratio
@param F: synchronicity parameter | 2.230533 | 2.236046 | 0.997534 |
return -r[1]*(r[0]*r[0]+r[1]*r[1]+r[2]*r[2])**-1.5 -q*r[1]*((r[0]-D)*(r[0]-D)+r[1]*r[1]+r[2]*r[2])**-1.5 + F*F*(1+q)*r[1] | def dBinaryRochedy (r, D, q, F) | Computes a derivative of the potential with respect to y.
@param r: relative radius vector (3 components)
@param D: instantaneous separation
@param q: mass ratio
@param F: synchronicity parameter | 3.158472 | 3.211875 | 0.983373 |
return -r[2]*(r[0]*r[0]+r[1]*r[1]+r[2]*r[2])**-1.5 -q*r[2]*((r[0]-D)*(r[0]-D)+r[1]*r[1]+r[2]*r[2])**-1.5 | def dBinaryRochedz(r, D, q, F) | Computes a derivative of the potential with respect to z.
@param r: relative radius vector (3 components)
@param D: instantaneous separation
@param q: mass ratio
@param F: synchronicity parameter | 2.843805 | 2.765394 | 1.028354 |
r2 = (r*r).sum()
r1 = np.sqrt(r2)
return -1./r2 - q*(r1-r[0]/r1*D)/((r[0]-D)*(r[0]-D)+r[1]*r[1]+r[2]*r[2])**1.5 - q*r[0]/r1/D/D + F*F*(1+q)*(1-r[2]*r[2]/r2)*r1 | def dBinaryRochedr (r, D, q, F) | Computes a derivative of the potential with respect to r.
@param r: relative radius vector (3 components)
@param D: instantaneous separation
@param q: mass ratio
@param F: synchronicity parameter | 5.37695 | 5.191531 | 1.035716 |
params = []
params += [StringParameter(qualifier='phoebe_version', value=kwargs.get('phoebe_version', __version__), description='Version of PHOEBE - change with caution')]
params += [BoolParameter(qualifier='log_history', value=kwargs.get('log_history', False), description='Whether to log history (undo/redo)')]
params += [DictParameter(qualifier='dict_filter', value=kwargs.get('dict_filter', {}), description='Filters to use when using dictionary access')]
params += [BoolParameter(qualifier='dict_set_all', value=kwargs.get('dict_set_all', False), description='Whether to set all values for dictionary access that returns more than 1 result')]
# params += [ChoiceParameter(qualifier='plotting_backend', value=kwargs.get('plotting_backend', 'mpl'), choices=['mpl', 'mpld3', 'mpl2bokeh', 'bokeh'] if conf.devel else ['mpl'], description='Default backend to use for plotting')]
# problem with try_sympy parameter: it can't be used during initialization... so this may need to be a phoebe-level setting
# params += [BoolParameter(qualifier='try_sympy', value=kwargs.get('try_sympy', True), description='Whether to use sympy if installed for constraints')]
# This could be complicated - because then we'll have to specifically watch to see when its enabled and then run all constraints - not sure if that is worth the time savings
# params += [BoolParameter(qualifier='run_constraints', value=kwargs.get('run_constraints', True), description='Whether to run_constraints whenever a parameter changes (warning: turning off will disable constraints until enabled at which point all constraints will be run)')]
return ParameterSet(params) | def settings(**kwargs) | Generally, this will automatically be added to a newly initialized
:class:`phoebe.frontend.bundle.Bundle`
:parameter **kwargs: defaults for the values of any of the parameters
:return: a :class:`phoebe.parameters.parameters.ParameterSet` of all newly
created :class:`phoebe.parameters.parameters.Parameter`s | 5.923712 | 5.787376 | 1.023557 |
@functools.wraps(fctn)
def _send_if_client(self, *args, **kwargs):
fctn_map = {'set_quantity': 'set_value'}
b = self._bundle
if b is not None and b.is_client:
# TODO: self._filter???
# TODO: args???
method = fctn_map.get(fctn.__name__, fctn.__name__)
d = self._filter if hasattr(self, '_filter') \
else {'twig': self.twig}
d['bundleid'] = b._bundleid
for k, v in kwargs.items():
d[k] = v
logger.info('emitting to {}({}) to server'.format(method, d))
b._socketio.emit(method, d)
if fctn.__name__ in ['run_compute', 'run_fitting']:
# then we're expecting a quick response with an added jobparam
# let's add that now
self._bundle.client_update()
else:
return fctn(self, *args, **kwargs)
return _send_if_client | def send_if_client(fctn) | Intercept and send to the server if bundle is in client mode. | 5.145051 | 5.008632 | 1.027237 |
@functools.wraps(fctn)
def _update_if_client(self, *args, **kwargs):
b = self._bundle
if b is None or not hasattr(b, 'is_client'):
return fctn(self, *args, **kwargs)
elif b.is_client and \
(b._last_client_update is None or
(datetime.now() - b._last_client_update).seconds > 1):
b.client_update()
return fctn(self, *args, **kwargs)
return _update_if_client | def update_if_client(fctn) | Intercept and check updates from server if bundle is in client mode. | 2.543285 | 2.298718 | 1.106393 |
return ''.join(random.SystemRandom().choice(
string.ascii_uppercase + string.ascii_lowercase)
for _ in range(n)) | def _uniqueid(n=30) | Return a unique string with length n.
:parameter int N: number of character in the uniqueid
:return: the uniqueid
:rtype: str | 2.898296 | 3.723406 | 0.778399 |
if isinstance(dictionary, str):
dictionary = json.loads(dictionary, object_pairs_hook=parse_json)
classname = dictionary.pop('Class')
if classname not in _parameter_class_that_require_bundle:
bundle = None
# now let's do some dirty magic and get the actual classitself
# from THIS module. __name__ is a string to lookup this module
# from the sys.modules dictionary
cls = getattr(sys.modules[__name__], classname)
return cls._from_json(bundle, **dictionary) | def parameter_from_json(dictionary, bundle=None) | Load a single parameter from a JSON dictionary.
:parameter dict dictionary: the dictionary containing the parameter
information
:parameter bundle: (optional)
:return: instantiated :class:`Parameter` object | 7.730249 | 8.421145 | 0.917957 |
return OrderedDict([(k, getattr(self, k))
for k in _meta_fields_twig
if k not in ignore]) | def get_meta(self, ignore=['uniqueid']) | Dictionary of all meta-tags, with option to ignore certain tags.
See all the meta-tag properties that are shared by ALL Parameters.
If a given value is 'None', that means that it is not shared
among ALL Parameters. To see the different values among the
Parameters, you can access that attribute.
:parameter list ignore: list of keys to exclude from the returned
dictionary
:return: an ordered dictionary of tag properties | 10.492384 | 13.506797 | 0.776823 |
for param in self.to_list():
for k, v in kwargs.items():
# Here we'll set the attributes (_context, _qualifier, etc)
if getattr(param, '_{}'.format(k)) is None:
setattr(param, '_{}'.format(k), v) | def set_meta(self, **kwargs) | Set the value of tags for all Parameters in this ParameterSet. | 6.839505 | 5.547543 | 1.232889 |
ret = {}
for typ in _meta_fields_twig:
if typ in ['uniqueid', 'plugin', 'feedback', 'fitting', 'history', 'twig', 'uniquetwig']:
continue
k = '{}s'.format(typ)
ret[k] = getattr(self, k)
return ret | def tags(self) | Returns a dictionary that lists all available tags that can be used
for further filtering | 15.129073 | 14.96407 | 1.011027 |
return "@".join([getattr(self, k) for k in _meta_fields_twig if self.meta.get(k) is not None]) | def common_twig(self) | The twig that is common between all items in this ParameterSet.
This twig gives a single string which can point back to this ParameterSet
(but may include other entries as well)
see also :meth:`uniquetwig`
:return: twig (full) of this Parameter | 10.881373 | 16.796669 | 0.647829 |
# we want to set meta-fields that are shared by ALL params in the PS
for field in _meta_fields_twig:
keys_for_this_field = set([getattr(p, field)
for p in self.to_list()
if getattr(p, field) is not None])
if len(keys_for_this_field)==1:
setattr(self, '_'+field, list(keys_for_this_field)[0])
else:
setattr(self, '_'+field, None) | def _set_meta(self) | set the meta fields of the ParameterSet as those that are shared
by ALL parameters in the ParameterSet. For any fields that are
not | 4.870366 | 3.810383 | 1.278183 |
for_this_param = self.filter(twig, check_visible=False)
metawargs = {}
# NOTE: self.contexts is INCREDIBLY expensive
# if len(self.contexts) and 'context' not in force_levels:
if 'context' not in force_levels:
# then let's force context to be included
force_levels.append('context')
for k in force_levels:
metawargs[k] = getattr(for_this_param, k)
prev_count = len(self)
# just to fake in case no metawargs are passed at all
ps_for_this_search = []
for k in _meta_fields_twig:
metawargs[k] = getattr(for_this_param, k)
if getattr(for_this_param, k) is None:
continue
ps_for_this_search = self.filter(check_visible=False, **metawargs)
if len(ps_for_this_search) < prev_count and k not in force_levels:
prev_count = len(ps_for_this_search)
elif k not in force_levels:
# this didn't help us
metawargs[k] = None
if len(ps_for_this_search) != 1:
# TODO: after fixing regex in twig (t0type vs t0)
# change this to raise Error instead of return
return twig
# now we go in the other direction and try to remove each to make sure
# the count goes up
for k in _meta_fields_twig:
if metawargs[k] is None or k in force_levels:
continue
ps_for_this_search = self.filter(check_visible=False,
**{ki: metawargs[k]
for ki in _meta_fields_twig
if ki != k})
if len(ps_for_this_search) == 1:
# then we didn't need to use this tag
metawargs[k] = None
# and lastly, we make sure that the tag corresponding to the context
# is present
context = for_this_param.context
if hasattr(for_this_param, context):
metawargs[context] = getattr(for_this_param, context)
return "@".join([metawargs[k]
for k in _meta_fields_twig
if metawargs[k] is not None]) | def _uniquetwig(self, twig, force_levels=['qualifier']) | get the least unique twig for the parameter given by twig that
will return this single result for THIS PS
:parameter str twig: a twig that will return a single Parameter from
THIS PS
:parameter list force_levels: (optional) a list of "levels"
(eg. context) that should be included whether or not they are
necessary
:return: the unique twig
:rtype: str | 4.306929 | 4.207338 | 1.023671 |
lst = params.to_list() if isinstance(params, ParameterSet) else params
for param in lst:
param._bundle = self
for k, v in kwargs.items():
# Here we'll set the attributes (_context, _qualifier, etc)
if getattr(param, '_{}'.format(k)) is None:
setattr(param, '_{}'.format(k), v)
self._params.append(param)
self._check_copy_for()
return | def _attach_params(self, params, **kwargs) | Attach a list of parameters (or ParameterSet) to this ParameterSet.
:parameter list params: list of parameters, or ParameterSet
:parameter **kwargs: attributes to set for each parameter (ie tags) | 6.044359 | 5.603226 | 1.078728 |
if not self._bundle:
return
# read the following at your own risk - I just wrote it and it still
# confuses me and baffles me that it works
for param in self.to_list():
if param.copy_for:
# copy_for tells us how to filter and what set of attributes
# needs a copy of this parameter
#
# copy_for = {'kind': ['star', 'disk', 'custombody'], 'component': '*'}
# means that this should exist for each component (since that has a wildcard) which
# has a kind in [star, disk, custombody]
#
# copy_for = {'kind': ['rv_dep'], 'component': '*', 'dataset': '*'}
# means that this should exist for each component/dataset pair with the
# rv_dep kind
attrs = [k for k,v in param.copy_for.items() if '*' in v]
# attrs is a list of the attributes for which we need a copy of
# this parameter for any pair
ps = self._bundle.filter(check_visible=False, check_default=False, force_ps=True, **param.copy_for)
metawargs = {k:v for k,v in ps.meta.items() if v is not None and k in attrs}
for k,v in param.meta.items():
if k not in ['twig', 'uniquetwig'] and k not in attrs:
metawargs[k] = v
# metawargs is a list of the shared tags that will be used to filter for
# existing parameters so that we know whether they already exist or
# still need to be created
# logger.debug("_check_copy_for {}: attrs={}".format(param.twig, attrs))
for attrvalues in itertools.product(*(getattr(ps, '{}s'.format(attr)) for attr in attrs)):
# logger.debug("_check_copy_for {}: attrvalues={}".format(param.twig, attrvalues))
# for each attrs[i] (ie component), attrvalues[i] (star01)
# we need to look for this parameter, and if it does not exist
# then create it by copying param
for attr, attrvalue in zip(attrs, attrvalues):
#if attrvalue=='_default' and not getattr(param, attr):
# print "SKIPPING", attr, attrvalue
# continue
metawargs[attr] = attrvalue
# logger.debug("_check_copy_for {}: metawargs={}".format(param.twig, metawargs))
if not len(self._bundle.filter(check_visible=False, **metawargs)):
# then we need to make a new copy
logger.debug("copying '{}' parameter for {}".format(param.qualifier, {attr: attrvalue for attr, attrvalue in zip(attrs, attrvalues)}))
newparam = param.copy()
for attr, attrvalue in zip(attrs, attrvalues):
setattr(newparam, '_{}'.format(attr), attrvalue)
newparam._copy_for = False
if newparam._visible_if and newparam._visible_if.lower() == 'false':
newparam._visible_if = None
newparam._bundle = self._bundle
self._params.append(newparam)
# Now we need to handle copying constraints. This can't be
# in the previous if statement because the parameters can be
# copied before constraints are ever attached.
if hasattr(param, 'is_constraint') and param.is_constraint:
param_constraint = param.is_constraint
copied_param = self._bundle.get_parameter(check_visible=False, check_default=False, **metawargs)
if not copied_param.is_constraint:
constraint_kwargs = param_constraint.constraint_kwargs.copy()
for attr, attrvalue in zip(attrs, attrvalues):
if attr in constraint_kwargs.keys():
constraint_kwargs[attr] = attrvalue
logger.debug("copying constraint '{}' parameter for {}".format(param_constraint.constraint_func, {attr: attrvalue for attr, attrvalue in zip(attrs, attrvalues)}))
self.add_constraint(func=param_constraint.constraint_func, **constraint_kwargs)
return | def _check_copy_for(self) | Check the value of copy_for and make appropriate copies. | 4.69283 | 4.64307 | 1.010717 |
if not isinstance(label, str):
label = str(label)
if label.lower() in _forbidden_labels:
raise ValueError("'{}' is forbidden to be used as a label"
.format(label))
if not re.match("^[a-z,A-Z,0-9,_]*$", label):
raise ValueError("label '{}' is forbidden - only alphabetic, numeric, and '_' characters are allowed in labels".format(label))
if len(self.filter(twig=label, check_visible=False)):
raise ValueError("label '{}' is already in use".format(label))
if label[0] in ['_']:
raise ValueError("first character of label is a forbidden character") | def _check_label(self, label) | Check to see if the label is allowed. | 4.346813 | 4.23489 | 1.026429 |
filename = os.path.expanduser(filename)
f = open(filename, 'r')
if _can_ujson:
# NOTE: this will not parse the unicode. Bundle.open always calls
# json instead of ujson for this reason.
data = ujson.load(f)
else:
data = json.load(f, object_pairs_hook=parse_json)
f.close()
return cls(data) | def open(cls, filename) | Open a ParameterSet from a JSON-formatted file.
This is a constructor so should be called as:
>>> b = ParameterSet.open('test.json')
:parameter str filename: relative or full path to the file
:return: instantiated :class:`ParameterSet` object | 5.209931 | 6.485722 | 0.803292 |
filename = os.path.expanduser(filename)
f = open(filename, 'w')
if compact:
if _can_ujson:
ujson.dump(self.to_json(incl_uniqueid=incl_uniqueid), f,
sort_keys=False, indent=0)
else:
logger.warning("for faster compact saving, install ujson")
json.dump(self.to_json(incl_uniqueid=incl_uniqueid), f,
sort_keys=False, indent=0)
else:
json.dump(self.to_json(incl_uniqueid=incl_uniqueid), f,
sort_keys=True, indent=0, separators=(',', ': '))
f.close()
return filename | def save(self, filename, incl_uniqueid=False, compact=False) | Save the ParameterSet to a JSON-formatted ASCII file
:parameter str filename: relative or fullpath to the file
:parameter bool incl_uniqueid: whether to including uniqueids in the
file (only needed if its necessary to maintain the uniqueids when
reloading)
:parameter bool compact: whether to use compact file-formatting (maybe
be quicker to save/load, but not as easily readable)
:return: filename
:rtype: str | 2.342294 | 2.40492 | 0.973959 |
if self._bundle is None or not self._bundle.is_client:
raise ValueError("bundle must be in client mode")
if len(kwargs):
return self.filter(**kwargs).ui(client=client)
querystr = "&".join(["{}={}".format(k, v)
for k, v in self._filter.items()])
# print self._filter
url = "{}/{}?{}".format(client, self._bundle._bundleid, querystr)
logger.info("opening {} in browser".format(url))
webbrowser.open(url)
return url | def ui(self, client='http://localhost:4200', **kwargs) | [NOT IMPLEMENTED]
The bundle must be in client mode in order to open the web-interface.
See :meth:`Bundle:as_client` to switch to client mode.
:parameter str client: URL of the running client which must be connected
to the same server as the bundle
:return: URL of the parameterset of this bundle in the client (will also
attempt to open webbrowser)
:rtype: str | 4.353191 | 3.587796 | 1.213333 |
if kwargs:
return self.filter(**kwargs).to_list()
return self._params | def to_list(self, **kwargs) | Convert the :class:`ParameterSet` to a list of :class:`Parameter`s
:return: list of class:`Parameter` objects | 8.384675 | 9.655508 | 0.868383 |
if kwargs:
return self.filter(**kwargs).to_list_of_dicts()
return [param.to_dict() for param in self._params] | def to_list_of_dicts(self, **kwargs) | Convert the :class:`ParameterSet` to a list of the dictionary representation
of each :class:`Parameter`
:return: list of dicts | 4.117989 | 4.09875 | 1.004694 |
if kwargs:
return self.filter(**kwargs).to_flat_dict()
return {param.uniquetwig: param for param in self._params} | def to_flat_dict(self, **kwargs) | Convert the :class:`ParameterSet` to a flat dictionary, with keys being
uniquetwigs to access the parameter and values being the :class:`Parameter`
objects themselves.
:return: dict of :class:`Parameter`s | 12.750867 | 5.720257 | 2.229072 |
if kwargs:
return self.filter(**kwargs).to_dict(field=field)
if field is not None:
keys_for_this_field = set([getattr(p, field)
for p in self.to_list()
if getattr(p, field) is not None])
return {k: self.filter(check_visible=False, **{field: k}) for k in keys_for_this_field}
# we want to find the first level (from the bottom) in which filtering
# further would shorten the list (ie there are more than one unique
# item for that field)
# so let's go through the fields in reverse (context up to (but not
# including) qualifier)
for field in reversed(_meta_fields_twig[1:]):
# then get the unique keys in this field among the params in this
# PS
keys_for_this_field = set([getattr(p, field)
for p in self.to_list()
if getattr(p, field) is not None])
# and if there are more than one, then return a dictionary with
# those keys and the ParameterSet of the matching items
if len(keys_for_this_field) > 1:
self._next_field = field
return {k: self.filter(check_visible=False, **{field: k})
for k in keys_for_this_field}
# if we've survived, then we're at the bottom and only have times or
# qualifier left
if self.context in ['hierarchy']:
self._next_field = 'qualifier'
return {param.qualifier: param for param in self._params}
else:
self._next_field = 'time'
return {param.time: param for param in self._params} | def to_dict(self, field=None, **kwargs) | Convert the ParameterSet to a structured (nested) dictionary
to allow traversing the structure from the bottom up
:parameter str field: (optional) build the dictionary with keys at
a given level/field. Can be any of the keys in
:func:`meta`. If None, the keys will be the lowest
level in which Parameters have different values.
:return: dict of :class:`Parameter`s or :class:`ParameterSet`s | 4.754591 | 4.47779 | 1.061816 |
twig = key
method = None
twigsplit = re.findall(r"[\w']+", twig)
if twigsplit[0] == 'value':
twig = '@'.join(twigsplit[1:])
method = 'set_value'
elif twigsplit[0] == 'quantity':
twig = '@'.join(twigsplit[1:])
method = 'set_quantity'
elif twigsplit[0] in ['unit', 'default_unit']:
twig = '@'.join(twigsplit[1:])
method = 'set_default_unit'
elif twigsplit[0] in ['timederiv']:
twig = '@'.join(twigsplit[1:])
method = 'set_timederiv'
elif twigsplit[0] in ['description']:
raise KeyError("cannot set {} of {}".format(twigsplit[0], '@'.join(twigsplit[1:])))
if self._bundle is not None and self._bundle.get_setting('dict_set_all').get_value() and len(self.filter(twig=twig, **kwargs)) > 1:
# then we need to loop through all the returned parameters and call set on them
for param in self.filter(twig=twig, **kwargs).to_list():
self.set('{}@{}'.format(method, param.twig) if method is not None else param.twig, value)
else:
if method is None:
return self.set_value(twig=twig, value=value, **kwargs)
else:
param = self.get_parameter(twig=twig, **kwargs)
return getattr(param, method)(value) | def set(self, key, value, **kwargs) | Set the value of a Parameter in the ParameterSet.
If :func:`get` would retrieve a Parameter, this will set the
value of that parameter.
Or you can provide 'value@...' or 'default_unit@...', etc
to specify what attribute to set.
:parameter str key: the twig (called key here to be analagous
to a normal dict)
:parameter value: value to set
:parameter **kwargs: other filter parameters (must result in
returning a single :class:`Parameter`)
:return: the value of the :class:`Parameter` after setting the
new value (including converting units if applicable) | 3.23939 | 3.052106 | 1.061362 |
lst = []
for context in _contexts:
lst += [v.to_json(incl_uniqueid=incl_uniqueid)
for v in self.filter(context=context,
check_visible=False,
check_default=False).to_list()]
return lst | def to_json(self, incl_uniqueid=False) | Convert the ParameterSet to a json-compatible dictionary
:return: list of dictionaries | 6.184937 | 6.24769 | 0.989956 |
kwargs['check_visible'] = check_visible
kwargs['check_default'] = check_default
kwargs['force_ps'] = True
return self.filter_or_get(twig=twig, **kwargs) | def filter(self, twig=None, check_visible=True, check_default=True, **kwargs) | Filter the ParameterSet based on the meta-tags of the Parameters
and return another ParameterSet.
Because another ParameterSet is returned, these filter calls are
chainable.
>>> b.filter(context='component').filter(component='starA')
:parameter str twig: (optional) the search twig - essentially a single
string with any delimiter (ie '@') that will be parsed
into any of the meta-tags. Example: instead of
b.filter(context='component', component='starA'), you
could do b.filter('starA@component').
:parameter bool check_visible: whether to hide invisible
parameters. These are usually parameters that do not
play a role unless the value of another parameter meets
some condition.
:parameter bool check_default: whether to exclude parameters which
have a _default tag (these are parameters which solely exist
to provide defaults for when new parameters or datasets are
added and the parameter needs to be copied appropriately).
Defaults to True.
:parameter **kwargs: meta-tags to search (ie. 'context', 'component',
'model', etc). See :func:`meta` for all possible options.
:return: the resulting :class:`ParameterSet` | 3.700725 | 5.797086 | 0.638377 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.