code string | signature string | docstring string | loss_without_docstring float64 | loss_with_docstring float64 | factor float64 |
|---|---|---|---|---|---|
# TODO: check to make sure resp['meta']['bundleid']==bundleid ?
# TODO: handle added parameters
# TODO: handle removed (isDeleted) parameters
for item in resp['data']:
if item['id'] in self.uniqueids:
# then we're updating something in the parameter (or deleting)
param = self.get_parameter(uniqueid=item['id'])
for attr, value in item['attributes'].items():
if hasattr(param, "_{}".format(attr)):
logger.info("updates from server: setting {}@{}={}".
format(attr, param.twig, value))
setattr(param, "_{}".format(attr), value)
else:
self._attach_param_from_server(item) | def _on_socket_push_updates(self, resp) | [NOT IMPLEMENTED] | 7.732287 | 7.565006 | 1.022112 |
if isinstance(item, list):
for itemi in item:
self._attach_param_from_server(itemi)
else:
# then we need to add a new parameter
d = item['attributes']
d['uniqueid'] = item['id']
param = parameters.parameter_from_json(d, bundle=self)
metawargs = {}
self._attach_params([param], **metawargs) | def _attach_param_from_server(self, item) | [NOT IMPLEMENTED] | 6.398202 | 6.246858 | 1.024227 |
if as_client:
if not _can_client:
raise ImportError("dependencies to support client mode not met - see docs")
server_running = self._test_server(server=server,
start_if_fail=True)
if not server_running:
raise ValueError("server {} is not running".format(server))
server_split = server.split(':')
host = ':'.join(server_split[:-1])
port = int(float(server_split[-1] if len(server_split) else 8000))
self._socketio = SocketIO(host, port, BaseNamespace)
self._socketio.on('connect', self._on_socket_connect)
self._socketio.on('disconnect', self._on_socket_disconnect)
self._socketio.on('push updates', self._on_socket_push_updates)
if not bundleid:
upload_url = "{}/upload".format(server)
logger.info("uploading bundle to server {}".format(upload_url))
data = json.dumps(self.to_json(incl_uniqueid=True))
r = requests.post(upload_url, data=data, timeout=5)
bundleid = r.json()['meta']['bundleid']
self._socketio.emit('subscribe bundle', {'bundleid': bundleid})
self._bundleid = bundleid
self._is_client = server
logger.info("connected as client to server at {}:{}".
format(host, port))
else:
logger.warning("This bundle is now permanently detached from the instance\
on the server and will not receive future updates. To start a client\
in sync with the version on the server or other clients currently \
subscribed, you must instantiate a new bundle with Bundle.from_server.")
if hasattr(self, '_socketIO') and self._socketIO is not None:
self._socketio.emit('unsubscribe bundle', {'bundleid': bundleid})
self._socketIO.disconnect()
self._socketIO = None
self._bundleid = None
self._is_client = False | def as_client(self, as_client=True, server='http://localhost:5555',
bundleid=None) | [NOT IMPLEMENTED] | 3.950258 | 3.930831 | 1.004942 |
if not self.is_client:
raise ValueError("Bundle is not in client mode, cannot update")
logger.info("updating client...")
# wait briefly to pickup any missed messages, which should then fire
# the corresponding callbacks and update the bundle
self._socketio.wait(seconds=1)
self._last_client_update = datetime.now() | def client_update(self) | [NOT IMPLEMENTED] | 11.104641 | 10.921695 | 1.016751 |
kwargs['context'] = context
params = len(getattr(self.filter(check_visible=False,**kwargs), '{}s'.format(context)))
return "{}{:02d}".format(base, params+1) | def _default_label(self, base, context, **kwargs) | Determine a default label given a base label and the passed kwargs
this simply counts the current number of matches on metawargs and
appends that number to the base
:parameter str base: the base string for the label
:parameter str context: name of the context (where the label is going)
:parameter **kwargs: the kwargs to run a filter on. The returned label
will be "{}{:02d}".format(base, number_of_results_with_kwargs+1)
:return: label | 13.504495 | 9.175381 | 1.471818 |
if twig is not None:
kwargs['twig'] = twig
kwargs['context'] = 'setting'
return self.filter_or_get(**kwargs) | def get_setting(self, twig=None, **kwargs) | Filter in the 'setting' context
:parameter str twig: the twig used for filtering
:parameter **kwargs: any other tags to do the filter (except tag or
context)
:return: :class:`phoebe.parameters.parameters.ParameterSet` | 4.830521 | 5.047553 | 0.957003 |
if not self.history_enabled:
return
param = HistoryParameter(self, redo_func, redo_kwargs,
undo_func, undo_kwargs)
metawargs = {'context': 'history',
'history': kwargs.get('history', self._default_label('hist', **{'context': 'history'}))}
self._check_label(metawargs['history'])
self._attach_params([param], **metawargs) | def _add_history(self, redo_func, redo_kwargs, undo_func, undo_kwargs,
**kwargs) | Add a new log (undo/redoable) to this history context
:parameter str redo_func: function to redo the action, must be a
method of :class:`Bundle`
:parameter dict redo_kwargs: kwargs to pass to the redo_func. Each
item must be serializable (float or str, not objects)
:parameter str undo_func: function to undo the action, must be a
method of :class:`Bundle`
:parameter dict undo_kwargs: kwargs to pass to the undo_func. Each
item must be serializable (float or str, not objects)
:parameter str history: label of the history parameter
:raises ValueError: if the label for this history item is forbidden or
already exists | 7.500757 | 6.889145 | 1.088779 |
ps = self.filter(context='history')
# if not len(ps):
# raise ValueError("no history recorded")
if i is not None:
return ps.to_list()[i]
else:
return ps | def get_history(self, i=None) | Get a history item by index.
You can toggle whether history is recorded using
* :meth:`enable_history`
* :meth:`disable_history`
:parameter int i: integer for indexing (can be positive or
negative). If i is None or not provided, the entire list
of history items will be returned
:return: :class:`phoebe.parameters.parameters.Parameter` if i is
an int, or :class:`phoebe.parameters.parameters.ParameterSet` if i
is not provided
:raises ValueError: if no history items have been recorded. | 7.336706 | 6.531147 | 1.123341 |
if i is None:
self.remove_parameters_all(context='history')
else:
param = self.get_history(i=i)
self.remove_parameter(uniqueid=param.uniqueid) | def remove_history(self, i=None) | Remove a history item from the bundle by index.
You can toggle whether history is recorded using
* :meth:`enable_history`
* :meth:`disable_history`
:parameter int i: integer for indexing (can be positive or
negative). If i is None or not provided, the entire list
of history items will be removed
:raises ValueError: if no history items have been recorded. | 6.30842 | 7.474507 | 0.843991 |
_history_enabled = self.history_enabled
param = self.get_history(i)
self.disable_history()
param.undo()
# TODO: do we really want to remove this? then what's the point of redo?
self.remove_parameter(uniqueid=param.uniqueid)
if _history_enabled:
self.enable_history() | def undo(self, i=-1) | Undo an item in the history logs
:parameter int i: integer for indexing (can be positive or
negative). Defaults to -1 if not provided (the latest
recorded history item)
:raises ValueError: if no history items have been recorded | 6.971136 | 7.212523 | 0.966532 |
if twig is not None:
kwargs['twig'] = twig
kwargs['context'] = 'system'
return self.filter(**kwargs) | def get_system(self, twig=None, **kwargs) | Filter in the 'system' context
:parameter str twig: twig to use for filtering
:parameter **kwargs: any other tags to do the filter
(except twig or context)
:return: :class:`phoebe.parameters.parameters.Parameter` or
:class:`phoebe.parameters.parameters.ParameterSet` | 4.125021 | 4.131818 | 0.998355 |
func = _get_add_func(_feature, kind)
if kwargs.get('feature', False) is None:
# then we want to apply the default below, so let's pop for now
_ = kwargs.pop('feature')
kwargs.setdefault('feature',
self._default_label(func.func_name,
**{'context': 'feature',
'kind': func.func_name}))
self._check_label(kwargs['feature'])
if component is None:
stars = self.hierarchy.get_meshables()
if len(stars) == 1:
component = stars[0]
else:
raise ValueError("must provide component")
if component not in self.components:
raise ValueError('component not recognized')
component_kind = self.filter(component=component, context='component').kind
if not _feature._component_allowed_for_feature(func.func_name, component_kind):
raise ValueError("{} does not support component with kind {}".format(func.func_name, component_kind))
params, constraints = func(**kwargs)
metawargs = {'context': 'feature',
'component': component,
'feature': kwargs['feature'],
'kind': func.func_name}
self._attach_params(params, **metawargs)
redo_kwargs = deepcopy(kwargs)
redo_kwargs['func'] = func.func_name
self._add_history(redo_func='add_feature',
redo_kwargs=redo_kwargs,
undo_func='remove_feature',
undo_kwargs={'feature': kwargs['feature']})
for constraint in constraints:
self.add_constraint(*constraint)
#return params
# NOTE: we need to call get_ in order to make sure all metawargs are applied
return self.get_feature(**metawargs) | def add_feature(self, kind, component=None, **kwargs) | Add a new feature (spot, etc) to a component in the system. If not
provided, 'feature' (the name of the new feature) will be created
for you and can be accessed by the 'feature' attribute of the returned
ParameterSet
>>> b.add_feature(feature.spot, component='mystar')
or
>>> b.add_feature('spot', 'mystar', colat=90)
Available kinds include:
* :func:`phoebe.parameters.feature.spot`
:parameter kind: function to call that returns a
ParameterSet or list of parameters. This must either be
a callable function that accepts nothing but default values,
or the name of a function (as a string) that can be found in the
:mod:`phoebe.parameters.feature` module (ie. 'spot')
:type kind: str or callable
:parameter str component: name of the component to attach the feature
:parameter str feature: (optional) name of the newly-created feature
:parameter **kwargs: default value for any of the newly-created
parameters
:return: :class:`phoebe.parameters.parameters.ParameterSet` of
all parameters that have been added
:raises NotImplementedError: if required constraint is not implemented | 4.841142 | 4.993235 | 0.96954 |
if feature is not None:
kwargs['feature'] = feature
kwargs['context'] = 'feature'
return self.filter(**kwargs) | def get_feature(self, feature=None, **kwargs) | Filter in the 'proerty' context
:parameter str feature: name of the feature (optional)
:parameter **kwargs: any other tags to do the filter
(except component or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet` | 5.02972 | 5.295334 | 0.94984 |
self._kwargs_checks(kwargs)
# Let's avoid deleting ALL features from the matching contexts
if feature is None and not len(kwargs.items()):
raise ValueError("must provide some value to filter for features")
kwargs['feature'] = feature
# Let's avoid the possibility of deleting a single parameter
kwargs['qualifier'] = None
# Let's also avoid the possibility of accidentally deleting system
# parameters, etc
kwargs.setdefault('context', ['feature'])
self.remove_parameters_all(**kwargs)
self._add_history(redo_func='remove_feature',
redo_kwargs=kwargs,
undo_func=None,
undo_kwargs={})
return | def remove_feature(self, feature=None, **kwargs) | [NOT IMPLEMENTED]
Remove a 'feature' from the bundle
:raises NotImplementedError: because this isn't implemented yet | 8.998795 | 9.177519 | 0.980526 |
# TODO: raise error if old_feature not found?
self._check_label(new_feature)
self._rename_label('feature', old_feature, new_feature) | def rename_feature(self, old_feature, new_feature) | Change the label of a feature attached to the Bundle
:parameter str old_feature: the current name of the feature
(must exist)
:parameter str new_feature: the desired new name of the feature
(must not exist)
:return: None
:raises ValueError: if the new_feature is forbidden | 6.005316 | 5.968893 | 1.006102 |
if component is None:
if len(self.hierarchy.get_stars())==1:
component = self.hierarchy.get_stars()[0]
else:
raise ValueError("must provide component for spot")
kwargs.setdefault('component', component)
kwargs.setdefault('feature', feature)
return self.add_feature('spot', **kwargs) | def add_spot(self, component=None, feature=None, **kwargs) | Shortcut to :meth:`add_feature` but with kind='spot' | 3.649571 | 3.313789 | 1.101329 |
kwargs.setdefault('kind', 'spot')
return self.get_feature(feature, **kwargs) | def get_spot(self, feature=None, **kwargs) | Shortcut to :meth:`get_feature` but with kind='spot' | 4.99134 | 2.384169 | 2.093534 |
kwargs.setdefault('kind', 'spot')
return self.remove_feature(feature, **kwargs) | def remove_spot(self, feature=None, **kwargs) | [NOT IMPLEMENTED]
Shortcut to :meth:`remove_feature` but with kind='spot' | 4.637294 | 3.000345 | 1.545587 |
func = _get_add_func(component, kind)
if kwargs.get('component', False) is None:
# then we want to apply the default below, so let's pop for now
_ = kwargs.pop('component')
kwargs.setdefault('component',
self._default_label(func.func_name,
**{'context': 'component',
'kind': func.func_name}))
if kwargs.pop('check_label', True):
self._check_label(kwargs['component'])
params, constraints = func(**kwargs)
metawargs = {'context': 'component',
'component': kwargs['component'],
'kind': func.func_name}
self._attach_params(params, **metawargs)
redo_kwargs = deepcopy(kwargs)
redo_kwargs['func'] = func.func_name
self._add_history(redo_func='add_component',
redo_kwargs=redo_kwargs,
undo_func='remove_component',
undo_kwargs={'component': kwargs['component']})
for constraint in constraints:
self.add_constraint(*constraint)
# since we've already processed (so that we can get the new qualifiers),
# we'll only raise a warning
self._kwargs_checks(kwargs, warning_only=True)
# return params
return self.get_component(**metawargs) | def add_component(self, kind, **kwargs) | Add a new component (star or orbit) to the system. If not provided,
'component' (the name of the new star or orbit) will be created for
you and can be accessed by the 'component' attribute of the returned
ParameterSet.
>>> b.add_component(component.star)
or
>>> b.add_component('orbit', period=2.5)
Available kinds include:
* :func:`phoebe.parameters.component.star`
* :func:`phoebe.parameters.component.orbit`
:parameter kind: function to call that returns a
ParameterSet or list of parameters. This must either be
a callable function that accepts nothing but default
values, or the name of a function (as a string) that can
be found in the :mod:`phoebe.parameters.component` module
(ie. 'star', 'orbit')
:type kind: str or callable
:parameter str component: (optional) name of the newly-created
component
:parameter **kwargs: default values for any of the newly-created
parameters
:return: :class:`phoebe.parameters.parameters.ParameterSet` of
all parameters that have been added
:raises NotImplementedError: if required constraint is not implemented | 5.63756 | 5.647592 | 0.998224 |
if component is not None:
kwargs['component'] = component
kwargs['context'] = 'component'
return self.filter(**kwargs) | def get_component(self, component=None, **kwargs) | Filter in the 'component' context
:parameter str component: name of the component (optional)
:parameter **kwargs: any other tags to do the filter
(except component or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet` | 4.895505 | 4.450275 | 1.100045 |
# NOTE: run_checks will check if an entry is in the hierarchy but has no parameters
kwargs['component'] = component
# NOTE: we do not remove from 'model' by default
kwargs['context'] = ['component', 'constraint', 'dataset', 'compute']
self.remove_parameters_all(**kwargs) | def remove_component(self, component, **kwargs) | [NOT IMPLEMENTED]
Remove a 'component' from the bundle
:raises NotImplementedError: because this isn't implemented yet | 18.875694 | 21.945759 | 0.860107 |
# TODO: raise error if old_component not found?
# even though _rename_tag will call _check_label again, we should
# do it first so that we can raise any errors BEFORE we start messing
# with the hierarchy
self._check_label(new_component)
# changing hierarchy must be called first since it needs to access
# the kind of old_component
if len([c for c in self.components if new_component in c]):
logger.warning("hierarchy may not update correctly with new component")
self.hierarchy.rename_component(old_component, new_component)
self._rename_label('component', old_component, new_component)
self._handle_dataset_selectparams() | def rename_component(self, old_component, new_component) | Change the label of a component attached to the Bundle
:parameter str old_component: the current name of the component
(must exist)
:parameter str new_component: the desired new name of the component
(must not exist)
:return: None
:raises ValueError: if the new_component is forbidden | 9.255911 | 9.515734 | 0.972695 |
kwargs.setdefault('component', component)
return self.add_component('orbit', **kwargs) | def add_orbit(self, component=None, **kwargs) | Shortcut to :meth:`add_component` but with kind='orbit' | 4.823622 | 3.570292 | 1.351044 |
kwargs.setdefault('kind', 'orbit')
return self.get_component(component, **kwargs) | def get_orbit(self, component=None, **kwargs) | Shortcut to :meth:`get_component` but with kind='star' | 4.877348 | 2.912368 | 1.674702 |
kwargs.setdefault('kind', 'orbit')
return self.remove_component(component, **kwargs) | def remove_orbit(self, component=None, **kwargs) | [NOT IMPLEMENTED]
Shortcut to :meth:`remove_component` but with kind='star' | 4.324577 | 3.596582 | 1.202413 |
kwargs.setdefault('component', component)
return self.add_component('star', **kwargs) | def add_star(self, component=None, **kwargs) | Shortcut to :meth:`add_component` but with kind='star' | 4.978135 | 3.662079 | 1.359374 |
kwargs.setdefault('kind', 'star')
return self.get_component(component, **kwargs) | def get_star(self, component=None, **kwargs) | Shortcut to :meth:`get_component` but with kind='star' | 4.633916 | 2.41396 | 1.919632 |
kwargs.setdefault('kind', 'star')
return self.remove_component(component, **kwargs) | def remove_star(self, component=None, **kwargs) | [NOT IMPLEMENTED]
Shortcut to :meth:`remove_component` but with kind='star' | 4.677809 | 2.954049 | 1.583524 |
kwargs.setdefault('component', component)
return self.add_component('envelope', **kwargs) | def add_envelope(self, component=None, **kwargs) | [NOT SUPPORTED]
Shortcut to :meth:`add_component` but with kind='envelope' | 4.655909 | 4.08298 | 1.140321 |
kwargs.setdefault('kind', 'envelope')
return self.get_component(component, **kwargs) | def get_envelope(self, component=None, **kwargs) | [NOT SUPPORTED]
Shortcut to :meth:`get_component` but with kind='envelope' | 4.928976 | 2.87895 | 1.712074 |
kwargs.setdefault('kind', 'envelope')
return self.remove_component(component, **kwargs) | def remove_envelope(self, component=None, **kwargs) | [NOT SUPPORTED]
[NOT IMPLEMENTED]
Shortcut to :meth:`remove_component` but with kind='envelope' | 4.647039 | 3.213755 | 1.445984 |
if component is None:
component = self.hierarchy.get_top()
if kwargs.get('shift', False):
raise ValueError("support for phshift was removed as of 2.1. Please pass t0 instead.")
ret = {}
ps = self.filter(component=component, context='component')
if ps.kind in ['orbit']:
ret['period'] = ps.get_value(qualifier='period', unit=u.d)
if isinstance(t0, str):
ret['t0'] = ps.get_value(qualifier=t0, unit=u.d)
elif isinstance(t0, float) or isinstance(t0, int):
ret['t0'] = t0
else:
raise ValueError("t0 must be string (qualifier) or float")
ret['dpdt'] = ps.get_value(qualifier='dpdt', unit=u.d/u.d)
elif ps.kind in ['star']:
# TODO: consider renaming period to prot
ret['period'] = ps.get_value(qualifier='period', unit=u.d)
else:
raise NotImplementedError
for k,v in kwargs.items():
ret[k] = v
return ret | def get_ephemeris(self, component=None, t0='t0_supconj', **kwargs) | Get the ephemeris of a component (star or orbit)
:parameter str component: name of the component. If not given,
component will default to the top-most level of the current
hierarchy
:parameter t0: qualifier of the parameter to be used for t0
:type t0: str
:parameter **kwargs: any value passed through kwargs will override the
ephemeris retrieved by component (ie period, t0, dpdt).
Note: be careful about units - input values will not be converted.
:return: dictionary containing period, t0 (t0_supconj if orbit),
dpdt (as applicable)
:rtype: dict | 3.737424 | 3.157461 | 1.18368 |
if kwargs.get('shift', False):
raise ValueError("support for phshift was removed as of 2.1. Please pass t0 instead.")
ephem = self.get_ephemeris(component=component, t0=t0, **kwargs)
if isinstance(time, list):
time = np.array(time)
elif isinstance(time, Parameter):
time = time.get_value(u.d)
elif isinstance(time, str):
time = self.get_value(time, u.d)
t0 = ephem.get('t0', 0.0)
period = ephem.get('period', 1.0)
dpdt = ephem.get('dpdt', 0.0)
if dpdt != 0:
phase = np.mod(1./dpdt * np.log(period + dpdt*(time-t0)), 1.0)
else:
phase = np.mod((time-t0)/period, 1.0)
if isinstance(phase, float):
if phase > 0.5:
phase -= 1
else:
# then should be an array
phase[phase > 0.5] -= 1
return phase | def to_phase(self, time, component=None, t0='t0_supconj', **kwargs) | Get the phase(s) of a time(s) for a given ephemeris
:parameter time: time to convert to phases (should be in same system
as t0s)
:type time: float, list, or array
:parameter t0: qualifier of the parameter to be used for t0
:type t0: str
:parameter str component: component for which to get the ephemeris.
If not given, component will default to the top-most level of the
current hierarchy
:parameter **kwargs: any value passed through kwargs will override the
ephemeris retrieved by component (ie period, t0, dpdt).
Note: be careful about units - input values will not be converted.
:return: phase (float) or phases (array) | 3.182177 | 2.858218 | 1.113343 |
if kwargs.get('shift', False):
raise ValueError("support for phshift was removed as of 2.1. Please pass t0 instead.")
ephem = self.get_ephemeris(component=component, t0=t0, **kwargs)
if isinstance(phase, list):
phase = np.array(phase)
t0 = ephem.get('t0', 0.0)
period = ephem.get('period', 1.0)
dpdt = ephem.get('dpdt', 0.0)
# if changing this, also see parameters.constraint.time_ephem
if dpdt != 0:
time = t0 + 1./dpdt*(np.exp(dpdt*(phase))-period)
else:
time = t0 + (phase)*period
return time | def to_time(self, phase, component=None, t0='t0_supconj', **kwargs) | Get the time(s) of a phase(s) for a given ephemeris
:parameter phase: phase to convert to times (should be in
same system as t0s)
:type phase: float, list, or array
` :parameter str component: component for which to get the ephemeris.
If not given, component will default to the top-most level of the
current hierarchy
:parameter t0: qualifier of the parameter to be used for t0
:type t0: str
:parameter **kwargs: any value passed through kwargs will override the
ephemeris retrieved by component (ie period, t0, dpdt).
Note: be careful about units - input values will not be converted.
:return: time (float) or times (array) | 5.094913 | 4.475576 | 1.138382 |
if dataset is not None:
kwargs['dataset'] = dataset
kwargs['context'] = 'dataset'
if 'kind' in kwargs.keys():
# since we switched how dataset kinds are named, let's just
# automatically handle switching to lowercase
kwargs['kind'] = kwargs['kind'].lower()
return self.filter(**kwargs) | def get_dataset(self, dataset=None, **kwargs) | Filter in the 'dataset' context
:parameter str dataset: name of the dataset (optional)
:parameter **kwargs: any other tags to do the filter
(except dataset or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet` | 7.798203 | 7.935127 | 0.982745 |
self._kwargs_checks(kwargs)
# Let's avoid deleting ALL parameters from the matching contexts
if dataset is None and not len(kwargs.items()):
raise ValueError("must provide some value to filter for datasets")
# let's handle deps if kind was passed
kind = kwargs.get('kind', None)
if kind is not None:
if isinstance(kind, str):
kind = [kind]
kind_deps = []
for kind_i in kind:
dep = '{}_dep'.format(kind_i)
if dep not in kind:
kind_deps.append(dep)
kind = kind + kind_deps
kwargs['kind'] = kind
if dataset is None:
# then let's find the list of datasets that match the filter,
# we'll then use dataset to do the removing. This avoids leaving
# pararameters behind that don't specifically match the filter
# (ie if kind is passed as 'rv' we still want to remove parameters
# with datasets that are RVs but belong to a different kind in
# another context like compute)
dataset = self.filter(**kwargs).datasets
kwargs['kind'] = None
kwargs['dataset'] = dataset
# Let's avoid the possibility of deleting a single parameter
kwargs['qualifier'] = None
# Let's also avoid the possibility of accidentally deleting system
# parameters, etc
kwargs.setdefault('context', ['dataset', 'model', 'constraint', 'compute'])
# ps = self.filter(**kwargs)
# logger.info('removing {} parameters (this is not undoable)'.\
# format(len(ps)))
# print "*** kwargs", kwargs, len(ps)
self.remove_parameters_all(**kwargs)
# not really sure why we need to call this twice, but it seems to do
# the trick
self.remove_parameters_all(**kwargs)
self._handle_dataset_selectparams()
# TODO: check to make sure that trying to undo this
# will raise an error saying this is not undo-able
self._add_history(redo_func='remove_dataset',
redo_kwargs={'dataset': dataset},
undo_func=None,
undo_kwargs={})
return | def remove_dataset(self, dataset=None, **kwargs) | Remove a dataset from the Bundle.
This removes all matching Parameters from the dataset, model, and
constraint contexts (by default if the context tag is not provided).
You must provide some sort of filter or this will raise an Error (so
that all Parameters are not accidentally removed).
:parameter str dataset: name of the dataset
:parameter **kwargs: any other tags to do the filter (except qualifier
and dataset)
:raises ValueError: if no filter is provided | 7.279708 | 6.746999 | 1.078955 |
# TODO: raise error if old_component not found?
self._check_label(new_dataset)
self._rename_label('dataset', old_dataset, new_dataset)
self._handle_dataset_selectparams() | def rename_dataset(self, old_dataset, new_dataset) | Change the label of a dataset attached to the Bundle
:parameter str old_dataset: the current name of the dataset
(must exist)
:parameter str new_dataset: the desired new name of the dataset
(must not exist)
:return: None
:raises ValueError: if the new_dataset is forbidden | 10.746205 | 10.772905 | 0.997522 |
kwargs['context'] = 'compute'
kwargs['dataset'] = dataset
kwargs['qualifier'] = 'enabled'
self.set_value_all(value=True, **kwargs)
self._add_history(redo_func='enable_dataset',
redo_kwargs={'dataset': dataset},
undo_func='disable_dataset',
undo_kwargs={'dataset': dataset})
return self.get_dataset(dataset=dataset) | def enable_dataset(self, dataset=None, **kwargs) | Enable a 'dataset'. Datasets that are enabled will be computed
during :meth:`run_compute` and included in the cost function
during :meth:`run_fitting`.
If compute is not provided, the dataset will be enabled across all
compute options.
:parameter str dataset: name of the dataset
:parameter **kwargs: any other tags to do the filter
(except dataset or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet`
of the enabled dataset | 4.910004 | 5.101038 | 0.96255 |
kwargs['context'] = 'compute'
kwargs['dataset'] = dataset
kwargs['qualifier'] = 'enabled'
self.set_value_all(value=False, **kwargs)
self._add_history(redo_func='disable_dataset',
redo_kwargs={'dataset': dataset},
undo_func='enable_dataset',
undo_kwargs={'dataset': dataset})
return self.get_dataset(dataset=dataset) | def disable_dataset(self, dataset=None, **kwargs) | Disable a 'dataset'. Datasets that are enabled will be computed
during :meth:`run_compute` and included in the cost function
during :meth:`run_fitting`.
If compute is not provided, the dataset will be disabled across all
compute options.
:parameter str dataset: name of the dataset
:parameter **kwargs: any other tags to do the filter
(except dataset or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet`
of the disabled dataset | 5.007776 | 5.159258 | 0.970639 |
# TODO: be smart enough to take kwargs (especially for undoing a
# remove_constraint) for kind, value (expression),
redo_kwargs = deepcopy(kwargs)
if len(args) == 1 and \
isinstance(args[0], str) and \
not _get_add_func(_constraint, args[0],
return_none_if_not_found=True):
# then only the expression has been passed,
# we just need to pass it on to constraints.custom
func = constraint.custom
func_args = args
elif len(args) == 2 and \
all([isinstance(arg, Parameter) or
isinstance(arg, ConstraintParameter) for arg in args]):
# then we have 2 constraint expressions
func = constraint.custom
func_args = args
elif len(args) == 0:
# then everything is passed through kwargs
if 'kind' in kwargs.keys():
func = _get_add_func(_constraint, kwargs['kind'])
elif 'func' in kwargs.keys():
func = _get_add_func(_constraint, kwargs['func'])
elif 'constraint_func' in kwargs.keys():
func = _get_add_func(_constraint, kwargs['constraint_func'])
else:
func = constraint.custom
func_args = []
# constraint_param = ConstraintParameter(self, **kwargs)
else:
# then we've been passed the function in constraints and its
# arguments
func = _get_add_func(_constraint, args[0])
func_args = args[1:]
if 'solve_for' in kwargs.keys():
# solve_for is a twig, we need to pass the parameter
kwargs['solve_for'] = self.get_parameter(kwargs['solve_for'])
lhs, rhs, constraint_kwargs = func(self, *func_args, **kwargs)
# NOTE that any component parameters required have already been
# created by this point
constraint_param = ConstraintParameter(self,
qualifier=lhs.qualifier,
component=lhs.component,
dataset=lhs.dataset,
feature=lhs.feature,
kind=lhs.kind,
model=lhs.model,
constraint_func=func.__name__,
constraint_kwargs=constraint_kwargs,
in_solar_units=func.__name__ not in constraint.list_of_constraints_requiring_si,
value=rhs,
default_unit=lhs.default_unit,
description='expression that determines the constraint')
newly_constrained_param = constraint_param.get_constrained_parameter()
check_kwargs = {k:v for k,v in newly_constrained_param.meta.items() if k not in ['context', 'twig', 'uniquetwig']}
check_kwargs['context'] = 'constraint'
if len(self._bundle.filter(**check_kwargs)):
raise ValueError("'{}' is already constrained".format(newly_constrained_param.twig))
metawargs = {'context': 'constraint',
'kind': func.func_name}
params = ParameterSet([constraint_param])
constraint_param._update_bookkeeping()
self._attach_params(params, **metawargs)
redo_kwargs['func'] = func.func_name
self._add_history(redo_func='add_constraint',
redo_kwargs=redo_kwargs,
undo_func='remove_constraint',
undo_kwargs={'uniqueid': constraint_param.uniqueid})
# we should run it now to make sure everything is in-sync
if conf.interactive_constraints:
self.run_constraint(uniqueid=constraint_param.uniqueid, skip_kwargs_checks=True)
else:
self._delayed_constraints.append(constraint_param.uniqueid)
return params | def add_constraint(self, *args, **kwargs) | TODO: add documentation
args can be string representation (length 1)
func and strings to pass to function | 4.96143 | 4.972768 | 0.99772 |
if twig is not None:
kwargs['twig'] = twig
kwargs['context'] = 'constraint'
return self.get(**kwargs) | def get_constraint(self, twig=None, **kwargs) | Filter in the 'constraint' context
:parameter str constraint: name of the constraint (optional)
:parameter **kwargs: any other tags to do the filter
(except constraint or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet` | 4.152358 | 4.372728 | 0.949604 |
# let's run delayed constraints first to ensure that we get the same
# results in interactive and non-interactive modes as well as to make
# sure we don't have delayed constraints for the constraint we're
# about to remove. This could perhaps be optimized by searching
# for this/these constraints and only running/removing those, but
# probably isn't worth the savings.
changed_params = self.run_delayed_constraints()
kwargs['twig'] = twig
redo_kwargs = deepcopy(kwargs)
kwargs['context'] = 'constraint'
# we'll get the constraint so that we can undo the bookkeeping
# and also reproduce an undo command
constraint = self.get_parameter(**kwargs)
# undo parameter bookkeeping
constraint._remove_bookkeeping()
# and finally remove it
self.remove_parameter(**kwargs)
undo_kwargs = {k: v for k, v in constraint.to_dict().items()
if v is not None and
k not in ['uniqueid', 'uniquetwig', 'twig',
'Class', 'context']}
self._add_history(redo_func='remove_constraint',
redo_kwargs=redo_kwargs,
undo_func='add_constraint',
undo_kwargs=undo_kwargs) | def remove_constraint(self, twig=None, **kwargs) | Remove a 'constraint' from the bundle
:parameter str twig: twig to filter for the constraint
:parameter **kwargs: any other tags to do the filter
(except twig or context) | 7.799607 | 7.783887 | 1.00202 |
self._kwargs_checks(kwargs, additional_allowed_keys=['check_nan'])
kwargs['twig'] = twig
redo_kwargs = deepcopy(kwargs)
undo_kwargs = deepcopy(kwargs)
changed_params = self.run_delayed_constraints()
param = self.get_constraint(**kwargs)
if kwargs.pop('check_nan', True) and np.any(np.isnan([p.get_value() for p in param.vars.to_list()])):
raise ValueError("cannot flip constraint while the value of {} is nan".format([p.twig for p in param.vars.to_list() if np.isnan(p.get_value())]))
if solve_for is None:
return param
if isinstance(solve_for, Parameter):
solve_for = solve_for.uniquetwig
redo_kwargs['solve_for'] = solve_for
undo_kwargs['solve_for'] = param.constrained_parameter.uniquetwig
logger.info("flipping constraint '{}' to solve for '{}'".format(param.uniquetwig, solve_for))
param.flip_for(solve_for)
result = self.run_constraint(uniqueid=param.uniqueid, skip_kwargs_checks=True)
self._add_history(redo_func='flip_constraint',
redo_kwargs=redo_kwargs,
undo_func='flip_constraint',
undo_kwargs=undo_kwargs)
return param | def flip_constraint(self, twig=None, solve_for=None, **kwargs) | Flip an existing constraint to solve for a different parameter
:parameter str twig: twig to filter the constraint
:parameter solve_for: twig or actual parameter object of the new
parameter which this constraint should constraint (solve for).
:type solve_for: str or :class:`phoebe.parameters.parameters.Parameter
:parameter **kwargs: any other tags to do the filter
(except twig or context) | 4.760085 | 4.339385 | 1.096949 |
if not kwargs.get('skip_kwargs_checks', False):
self._kwargs_checks(kwargs)
kwargs['twig'] = twig
kwargs['context'] = 'constraint'
# kwargs['qualifier'] = 'expression'
kwargs['check_visible'] = False
kwargs['check_default'] = False
# print "***", kwargs
expression_param = self.get_parameter(**kwargs)
kwargs = {}
kwargs['twig'] = None
# TODO: this might not be the case, we just know its not in constraint
kwargs['qualifier'] = expression_param.qualifier
kwargs['component'] = expression_param.component
kwargs['dataset'] = expression_param.dataset
kwargs['feature'] = expression_param.feature
kwargs['context'] = []
if kwargs['component'] is not None:
kwargs['context'] += ['component']
if kwargs['dataset'] is not None:
kwargs['context'] += ['dataset']
if kwargs['feature'] is not None:
kwargs['context'] += ['feature']
kwargs['check_visible'] = False
kwargs['check_default'] = False
constrained_param = self.get_parameter(**kwargs)
result = expression_param.result
constrained_param.set_value(result, force=True, run_constraints=True)
logger.debug("setting '{}'={} from '{}' constraint".format(constrained_param.uniquetwig, result, expression_param.uniquetwig))
if return_parameter:
return constrained_param
else:
return result | def run_constraint(self, twig=None, return_parameter=False, **kwargs) | Run a given 'constraint' now and set the value of the constrained
parameter. In general, there shouldn't be any need to manually
call this - constraints should automatically be run whenever a
dependent parameter's value is change.
:parameter str twig: twig to filter for the constraint
:parameter **kwargs: any other tags to do the filter
(except twig or context)
:return: the resulting value of the constraint
:rtype: float or units.Quantity | 3.771159 | 3.730603 | 1.010871 |
datasets = kwargs.pop('dataset', self.datasets)
components = kwargs.pop('component', self.components)
# don't allow things like model='mymodel', etc
forbidden_keys = parameters._meta_fields_filter
self._kwargs_checks(kwargs, additional_forbidden_keys=forbidden_keys)
if compute is None:
if len(self.computes)==1:
compute = self.computes[0]
else:
raise ValueError("must provide compute")
system = backends.PhoebeBackend()._create_system_and_compute_pblums(self, compute, **kwargs)
pblums = {}
for component, star in system.items():
if component not in components:
continue
for dataset in star._pblum_scale.keys():
if dataset not in datasets:
continue
pblums["{}@{}".format(component, dataset)] = float(star.compute_luminosity(dataset)) * u.W
return pblums | def compute_pblums(self, compute=None, **kwargs) | Compute the passband luminosities that will be applied to the system,
following all coupling, etc, as well as all relevant compute options
(ntriangles, distortion_method, etc). The exposed passband luminosities
(and any coupling) are computed at t0@system.
This method is only for convenience and will be recomputed internally
within run_compute. Alternatively, you can create a mesh dataset
and request any specific pblum to be exposed (per-time).
:parameter str compute: label of the compute options (note required if
only one is attached to the bundle)
:parameter component: (optional) label of the component(s) requested
:type component: str or list of strings
:parameter dataset: (optional) label of the dataset(s) requested
:type dataset: str or list of strings
:parameter component: (optional) label of the component(s) requested
:type component: str or list of strings
:return: dictionary with keys <component>@<dataset> and computed pblums
as values (as quantity objects, default units of W) | 6.223084 | 5.420818 | 1.147997 |
func = _get_add_func(_compute, kind)
kwargs.setdefault('compute',
self._default_label(func.func_name,
**{'context': 'compute',
'kind': func.func_name}))
self._check_label(kwargs['compute'])
params = func(**kwargs)
# TODO: similar kwargs logic as in add_dataset (option to pass dict to
# apply to different components this would be more complicated here if
# allowing to also pass to different datasets
metawargs = {'context': 'compute',
'kind': func.func_name,
'compute': kwargs['compute']}
logger.info("adding {} '{}' compute to bundle".format(metawargs['kind'], metawargs['compute']))
self._attach_params(params, **metawargs)
redo_kwargs = deepcopy(kwargs)
redo_kwargs['func'] = func.func_name
self._add_history(redo_func='add_compute',
redo_kwargs=redo_kwargs,
undo_func='remove_compute',
undo_kwargs={'compute': kwargs['compute']})
# since we've already processed (so that we can get the new qualifiers),
# we'll only raise a warning
self._kwargs_checks(kwargs, warning_only=True)
return self.get_compute(**metawargs) | def add_compute(self, kind=compute.phoebe, **kwargs) | Add a set of computeoptions for a given backend to the bundle.
The label ('compute') can then be sent to :meth:`run_compute`.
If not provided, 'compute' will be created for you and can be
accessed by the 'compute' attribute of the returned
ParameterSet.
Available kinds include:
* :func:`phoebe.parameters.compute.phoebe`
* :func:`phoebe.parameters.compute.legacy`
* :func:`phoebe.parameters.compute.photodynam`
* :func:`phoebe.parameters.compute.jktebop`
:parameter kind: function to call that returns a
ParameterSet or list of parameters. This must either be
a callable function that accepts nothing but default
values, or the name of a function (as a string) that can
be found in the :mod:`phoebe.parameters.compute` module
:type kind: str or callable
:parameter str compute: (optional) name of the newly-created
compute optins
:parameter **kwargs: default values for any of the newly-created
parameters
:return: :class:`phoebe.parameters.parameters.ParameterSet` of
all parameters that have been added
:raises NotImplementedError: if required constraint is not implemented | 7.996303 | 8.149592 | 0.98119 |
if compute is not None:
kwargs['compute'] = compute
kwargs['context'] = 'compute'
return self.filter(**kwargs) | def get_compute(self, compute=None, **kwargs) | Filter in the 'compute' context
:parameter str compute: name of the compute options (optional)
:parameter **kwargs: any other tags to do the filter
(except compute or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet` | 5.41982 | 4.627429 | 1.171238 |
kwargs['compute'] = compute
kwargs['context'] = 'comute'
self.remove_parameters_all(**kwargs) | def remove_compute(self, compute, **kwargs) | Remove a 'compute' from the bundle
:parameter str compute: name of the compute options
:parameter **kwargs: any other tags to do the filter
(except twig or context)
:raise NotImplementedError: because it isn't | 19.667137 | 23.015213 | 0.854528 |
# TODO: raise error if old_compute not found?
self._check_label(new_compute)
self._rename_label('compute', old_compute, new_compute) | def rename_compute(self, old_compute, new_compute) | Change the label of a compute attached to the Bundle
:parameter str old_compute: the current name of the compute options
(must exist)
:parameter str new_compute: the desired new name of the compute options
(must not exist)
:return: None
:raises ValueError: if the new_compute is forbidden | 5.789261 | 5.963466 | 0.970788 |
if model is not None:
kwargs['model'] = model
kwargs['context'] = 'model'
return self.filter(**kwargs) | def get_model(self, model=None, **kwargs) | Filter in the 'model' context
:parameter str model: name of the model (optional)
:parameter **kwargs: any other tags to do the filter
(except model or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet` | 5.433151 | 4.547546 | 1.194743 |
kwargs['model'] = model
kwargs['context'] = 'model'
self.remove_parameters_all(**kwargs) | def remove_model(self, model, **kwargs) | Remove a 'model' from the bundle
:parameter str twig: twig to filter for the model
:parameter **kwargs: any other tags to do the filter
(except twig or context) | 10.157162 | 11.862724 | 0.856225 |
# TODO: raise error if old_feature not found?
self._check_label(new_model)
self._rename_label('model', old_model, new_model) | def rename_model(self, old_model, new_model) | Change the label of a model attached to the Bundle
:parameter str old_model: the current name of the model
(must exist)
:parameter str new_model: the desired new name of the model
(must not exist)
:return: None
:raises ValueError: if the new_model is forbidden | 7.757757 | 7.594406 | 1.021509 |
raise NotImplementedError
param = self.get_parameter(twig=twig, **kwargs)
# TODO: make sure param is a float parameter?
func = _get_add_func(_distributions, 'prior')
# TODO: send smart defaults for priors based on limits of parameter
params = func(**kwargs)
metawargs = {k: v for k, v in params.meta.items()
if k not in ['uniqueid', 'uniquetwig', 'twig']}
metawargs['context'] = 'prior'
logger.info("adding prior on '{}' parameter".format(param.uniquetwig))
self._attach_params(params, **metawargs)
redo_kwargs = deepcopy(kwargs)
redo_kwargs['func'] = func.func_name
self._add_history(redo_func='add_prior',
redo_kwargs=redo_kwargs,
undo_func='remove_prior',
undo_kwargs={'twig': param.uniquetwig})
# return params
return self.get_prior(**metawargs) | def add_prior(self, twig=None, **kwargs) | [NOT IMPLEMENTED]
:raises NotImplementedError: because it isn't | 7.724042 | 7.792254 | 0.991246 |
raise NotImplementedError
kwargs['context'] = 'prior'
return self.filter(twig=twig, **kwargs) | def get_prior(self, twig=None, **kwargs) | [NOT IMPLEMENTED]
:raises NotImplementedError: because it isn't | 9.921452 | 15.442401 | 0.642481 |
raise NotImplementedError
if fitting is not None:
kwargs['fitting'] = fitting
kwargs['context'] = 'fitting'
return self.filter(**kwargs) | def get_fitting(self, fitting=None, **kwargs) | [NOT IMPLEMENTED]
:raises NotImplementedError: because it isn't | 6.044824 | 8.630509 | 0.700402 |
raise NotImplementedError
kwargs['context'] = 'posterior'
return self.filter(twig=twig, **kwargs) | def get_posterior(self, twig=None, feedback=None, **kwargs) | [NOT IMPLEMENTED]
:raises NotImplementedError: because it isn't | 9.084705 | 13.963616 | 0.650598 |
raise NotImplementedError
if feedback is not None:
kwargs['feedback'] = feedback
kwargs['context'] = 'feedback'
return self.filter(**kwargs) | def get_feedback(self, feedback=None, **kwargs) | [NOT IMPLEMENTED]
:raises NotImplementedError: because it isn't | 5.805709 | 8.510282 | 0.682199 |
res = bundle.filter(twig=twig, force_ps=True, check_visible=False, check_default=False, **kwargs)
# we force_ps instead of checking is_instance(res, ParameterSet) to avoid
# having to import from parameters
if len(res) == 1:
#~ print "twighelpers._twig_to_uniqueid len(res): {}, res: {}".format(len(res), res)
return res.to_list()[0].uniqueid
else:
raise ValueError("did not return a single unique match to a parameter for '{}'".format(twig)) | def _twig_to_uniqueid(bundle, twig, **kwargs) | kwargs are passed on to filter | 8.727749 | 8.476537 | 1.029636 |
self._user_label = _uniqueid_to_uniquetwig(self._bundle, self.unique_label)
self._set_curly_label() | def update_user_label(self) | finds this parameter and gets the least_unique_twig from the bundle | 28.898323 | 14.124516 | 2.045969 |
if not self.is_param:
raise ValueError("this var does not point to a parameter")
# this is quite expensive, so let's cache the parameter object so we only
# have to filter on the first time this is called
if self._parameter is None:
self._parameter = self._bundle.get_parameter(uniqueid=self.unique_label, check_visible=False, check_default=False)
return self._parameter | def get_parameter(self) | get the parameter object from the system for this var
needs to be backend safe (not passing or storing bundle) | 7.718111 | 6.019178 | 1.282253 |
acceptable_keys = ['unit', 'pad', 'lim', 'label']
# if direction in ['s']:
# acceptable_keys += ['mode']
processed_kwargs = {}
for k,v in kwargs.items():
if k.startswith(direction):
processed_key = k.lstrip(direction)
else:
processed_key = k
if processed_key in acceptable_keys:
processed_kwargs[processed_key] = v
return processed_kwargs | def _process_dimension_kwargs(direction, kwargs) | process kwargs for AxDimension instances by stripping off the prefix
for the appropriate direction | 3.509826 | 3.308924 | 1.060715 |
def _z(call):
if isinstance(call.z.value, np.ndarray):
return np.mean(call.z.value.flatten())
elif isinstance(call.z.value, float) or isinstance(call.z.value, int):
return call.z.value
else:
# put it at the back
return -np.inf
calls = self._calls
zs = np.array([_z(c) for c in calls])
sorted_inds = zs.argsort()
# TODO: ugh, this is ugly. Test to find the optimal way to sort
# while still ending up with a list
return _call.make_callgroup(np.array(calls)[sorted_inds].tolist()) | def calls_sorted(self) | calls sorted in z | 5.284419 | 5.010552 | 1.054658 |
if len(self.calls) == 0:
return True, ''
msg = []
if not _consistent_allow_none(call._axorder, self._axorder):
msg.append('inconsistent axorder, {} != {}'.format(call.axorder, self.axorder))
if not _consistent_allow_none(call._axpos, self._axpos):
msg.append('inconsistent axpos, {} != {}'.format(call.axpos, self.axpos))
if call._axorder == self._axorder and call._axorder is not None:
# then despite other conflicts, attempt to put on same axes
return True, ''
if call._axpos == self._axpos and call._axpos is not None:
# then despite other conflicts, attempt to put on same axes
return True, ''
# TODO: include s, c, fc, ec, etc and make these checks into loops
if call.x.unit.physical_type != self.x.unit.physical_type:
msg.append('inconsitent xunit, {} != {}'.format(call.x.unit, self.x.unit))
if call.y.unit.physical_type != self.y.unit.physical_type:
msg.append('inconsitent yunit, {} != {}'.format(call.y.unit, self.y.unit))
if call.z.unit.physical_type != self.z.unit.physical_type:
msg.append('inconsitent zunit, {} != {}'.format(call.z.unit, self.z.unit))
if call.i.unit.physical_type != self.i.unit.physical_type:
msg.append('inconsistent iunit, {} != {}'.format(call.i.unit, self.i.unit))
if call.i.is_reference or self.i.is_reference:
if call.i.reference != self.i.reference:
msg.append('inconsistent i reference, {} != {}'.format(call.i.reference, self.i.reference))
if not _consistent_allow_none(call.title, self.title):
msg.append('inconsistent axes title, {} != {}'.format(call.title, self.title))
# here we send the protected _label so that we get None instead of empty string
if not _consistent_allow_none(call.x._label, self.x._label):
msg.append('inconsitent xlabel, {} != {}'.format(call.x.label, self.x.label))
if not _consistent_allow_none(call.y._label, self.y._label):
msg.append('inconsitent ylabel, {} != {}'.format(call.y.label, self.y.label))
if not _consistent_allow_none(call.z._label, self.z._label):
msg.append('inconsitent zlabel, {} != {}'.format(call.z.label, self.z.label))
if len(msg):
return False, ', '.join(msg)
else:
return True, '' | def consistent_with_call(self, call) | check to see if a new call would be consistent to add to this Axes instance
checks include:
* compatible units in all directions
* compatible independent-variable (if applicable) | 2.101239 | 2.082106 | 1.009189 |
if lim is None:
self._lim = lim
return
typeerror_msg = "lim must be of type tuple, float, None, or in ['fixed', 'symmetric', 'frame', 'sliding']"
if isinstance(lim, str):
if lim in ['fixed', 'symmetric', 'frame', 'sliding']:
self._lim = lim
return
else:
raise ValueError(typeerror_msg)
if isinstance(lim, int):
lim = float(lim)
if isinstance(lim, float):
if lim <= 0.0:
raise ValueError("lim cannot be <= 0")
self._lim = lim
return
if not isinstance(lim, tuple):
try:
lim = tuple(lim)
except:
raise TypeError(typeerror_msg)
if not len(lim)==2:
raise ValueError('lim must have length 2')
for l in lim:
if not (isinstance(l, float) or isinstance(l, int) or l is None):
raise ValueError("each item in limit must be of type float, int, or None")
self._lim = lim | def lim(self, lim) | set lim (limits) | 2.504817 | 2.492169 | 1.005075 |
if self.value is None:
return None
if i is None:
return np.median(self.value)
if indeps is None:
indeps_all_calls = list(set(np.concatenate([common.tolist(call.i.get_value(unit=self.axes.i.unit)) for call in self.axes.calls])))
indeps = np.linspace(np.nanmin(indeps_all_calls),
np.nanmax(indeps_all_calls),
len(self.value))
if len(indeps) != len(self.value):
raise ValueError("indeps and value must have same length")
return np.interp(i, indeps, self.value) | def get_value(self, i, indeps=None) | access the interpolated value at a give value of i (independent-variable)
if indeps is not passed, then the entire range of indeps over all calls is assumed | 3.666862 | 3.304054 | 1.109807 |
params = []
params += [FloatParameter(qualifier='t0', value=kwargs.get('t0', 0.0), default_unit=u.d, description='Time at which all values are provided')]
# TODO: re-enable these once they're incorporated into orbits (dynamics) correctly.
params += [FloatParameter(qualifier='ra', value=kwargs.get('ra', 0.0), default_unit=u.deg, description='Right ascension')]
params += [FloatParameter(qualifier='dec', value=kwargs.get('dec', 0.0), default_unit=u.deg, description='Declination')]
params += [StringParameter(qualifier='epoch', value=kwargs.get('epoch', 'J2000'), description='Epoch of coordinates')]
#params += [FloatParameter(qualifier='pmra', value=kwargs.get('pmra', 0.0), default_unit=u.mas/u.yr, description='Proper motion in right ascension')]
#params += [FloatParameter(qualifier='pmdec', value=kwargs.get('pmdec', 0.0), default_unit=u.mas/u.yr, description='Proper motion in declination')]
params += [FloatParameter(qualifier='distance', value=kwargs.get('distance', 1.0), default_unit=u.m, description='Distance to the system')]
params += [FloatParameter(qualifier='vgamma', value=kwargs.get('vgamma', 0.0), default_unit=u.km/u.s, description='Systemic velocity (in the direction of positive RV or negative vz)')]
return ParameterSet(params) | def system(**kwargs) | Generally, this will automatically be added to a newly initialized
:class:`phoebe.frontend.bundle.Bundle`
:parameter **kwargs: defaults for the values of any of the parameters
:return: a :class:`phoebe.parameters.parameters.ParameterSet` of all newly
created :class:`phoebe.parameters.parameters.Parameter`s | 2.486566 | 2.554943 | 0.973237 |
for field_name, access_types in access.items():
id_field = "{}_id".format(field_name)
# Check that the team is associated with the object via a FK...
if hasattr(obj, id_field) and getattr(obj, id_field) is None:
# ...and there is no existing related team.
# TODO - the team name needs to be able to create a unique
# slug that's < 50 characters long.
# TODO - this is just a workaround:
next_pk = next(iter(instance.pk for instance in obj.__class__.objects.order_by("-pk")), 0) + 1 # this is a thing a beauty. ;-)
team_name = u"{} for {} {}".format(
field_name, obj._meta.model_name, next_pk)
new_team = Team(
name=team_name,
member_access=access_types[0],
manager_access=access_types[1],
creator=user)
new_team.save()
setattr(obj, field_name, new_team)
return obj | def create_teams(obj, user, access) | Will create new teams associated with the referenced obj and set the
resulting relation to the correct attribute.
The naming convention for team foreign keys is pluralname_team (for
example, instructors_team).
This function will take the access dictionary and apply the specified
access types as follows:
access = {
'trainees_team': ('open', 'add someone'),
}
Where the key name is the team name and the tuple contains the access
types for member access and manager access respectively.
If the foreign key already has a value associated with it, this function
will NOT create a new team to replace it. | 4.741585 | 4.562275 | 1.039303 |
def _to_str(obj):
return "{}:{}".format(obj.kind, obj.component)
if isinstance(obj, str):
# TODO: check to see if valid?, use allow_hierarchy
# TODO: when passed labels this is going to give the wrong thing, but that might be fixable in the HierarchyParameter set_value check
#~ raise NotImplementedError # need to get object from bundle and pass to _to_str
return obj
elif isinstance(obj, ParameterSet):
# TODO: be smarter about this and don't assume only 1 will be returned
if 'repr' in obj.to_flat_dict().keys():
return obj.get_value(qualifier='repr')
else:
# make sure we only have things in the 'component' context
obj = obj.filter(context='component')
return _to_str(obj)
elif isinstance(obj, Parameter):
if obj.qualifier == 'repr':
return obj.get_value()
else:
return _to_str(obj)
else:
raise NotImplementedError("could not parse {}".format(obj)) | def _to_component(obj, allow_hierarchy=True) | takes either a string, ParameterSet, Parameter, or the string representation (component or nested hierarchy) | 7.304729 | 6.784614 | 1.076661 |
if envelope:
return '{}({}, {}, {})'.format(_to_component(orbit, False), _to_component(comp1), _to_component(comp2), _to_component(envelope, False))
else:
return '{}({}, {})'.format(_to_component(orbit, False), _to_component(comp1), _to_component(comp2)) | def binaryorbit(orbit, comp1, comp2, envelope=None) | Build the string representation of a hierarchy containing a binary
orbit with 2 components.
Generally, this will be used as an input to the kind argument in
:meth:`phoebe.frontend.bundle.Bundle.set_hierarchy`
:parameter comp1: an existing hierarchy string, Parameter, or ParameterSet
:parameter comp2: an existing hierarchy string, Parameter, or ParameterSet
:return: the string representation of the hierarchy | 2.524828 | 2.588336 | 0.975464 |
uniques = [np.unique(column, return_inverse=True) for column in grid_pars]
#[0] are the unique values, [1] the indices for these to recreate the original array
# we need to copy the values of the unique axes explicitly into new arrays
# otherwise we can get issues with the interpolator
axis_values = []
for uniques_ in uniques:
this_axes = np.zeros(len(uniques_[0]))
this_axes[:] = uniques_[0]
axis_values.append(this_axes)
#axis_values = [uniques_[0] for uniques_ in uniques]
#axis_values = [np.require(uniques_[0],requirements=['A','O','W','F']) for uniques_ in uniques]
unique_val_indices = [uniques_[1] for uniques_ in uniques]
data_dim = np.shape(grid_data)[0]
par_dims = [len(uv[0]) for uv in uniques]
par_dims.append(data_dim)
pixelgrid = np.ones(par_dims)
# We put np.inf as default value. If we get an inf, that means we tried to access
# a region of the pixelgrid that is not populated by the data table
pixelgrid[pixelgrid==1] = np.inf
# now populate the multiDgrid
indices = [uv[1] for uv in uniques]
pixelgrid[indices] = grid_data.T
return tuple(axis_values), pixelgrid | def create_pixeltypegrid(grid_pars, grid_data) | Creates pixelgrid and arrays of axis values.
Starting from:
- grid_pars: 2D numpy array, 1 column per parameter, unlimited number of
cols
- grid_data: 2D numpy array, 1 column per variable, data corresponding
to the rows in grid_pars
The grid should be rectangular and complete, i.e. every combination of the unique values in the
parameter columns should exist. If not, a nan value will be inserted.
@param grid_pars: Npar x Ngrid array of parameters
@type grid_pars: array
@param grid_data: Ndata x Ngrid array of data
@type grid_data: array
@return: axis values and pixelgrid
@rtype: array, array | 5.275874 | 4.998085 | 1.055579 |
# convert requested parameter combination into a coordinate
p_ = np.array([np.searchsorted(av_,val) for av_, val in zip(axis_values,p)])
lowervals_stepsize = np.array([[av_[p__-1], av_[p__]-av_[p__-1]] \
for av_, p__ in zip(axis_values,p_)])
p_coord = (p-lowervals_stepsize[:,0])/lowervals_stepsize[:,1] + p_-1
# interpolate
if order > 1:
prefilter = False
else:
prefilter = False
return [ndimage.map_coordinates(pixelgrid[...,i],p_coord, order=order,
prefilter=prefilter, mode=mode, cval=cval) \
for i in range(np.shape(pixelgrid)[-1])] | def interpolate(p, axis_values, pixelgrid, order=1, mode='constant', cval=0.0) | Interpolates in a grid prepared by create_pixeltypegrid().
p is an array of parameter arrays
@param p: Npar x Ninterpolate array
@type p: array
@return: Ndata x Ninterpolate array
@rtype: array | 4.223725 | 4.689966 | 0.900588 |
res = libphoebe.interp(p, axis_values, pixelgrid)
return res | def cinterpolate(p, axis_values, pixelgrid) | Interpolates in a grid prepared by create_pixeltypegrid().
Does a similar thing as :py:func:`interpolate`, but does everything in C.
p is an array of parameter arrays.
Careful, the shape of input :envvar:`p` and output is the transpose of
:py:func:`interpolate`.
@param p: Ninterpolate X Npar array
@type p: array
@return: Ninterpolate X Ndata array
@rtype: array | 9.999516 | 16.406754 | 0.609476 |
if mpi.within_mpirun and mpi.myrank == 0:
# tell the workers to invoke the same logger
mpi.comm.bcast({'worker_command': 'logger', 'args': args, 'kwargs': kwargs}, root=0)
return _utils.get_basic_logger(*args, **kwargs) | def logger(*args, **kwargs) | shortcut to :func:`utils.get_basic_logger` | 8.863122 | 6.943461 | 1.27647 |
name = ""
#-- define formats
if style=='default':
format = '%(asctime)s %(name)-12s %(levelname)-7s %(message)s'
datefmt = '%a, %d %b %Y %H:%M'
elif style=='grandpa':
format = '# %(levelname)-7s %(message)s'
datefmt = '%a, %d %b %Y %H:%M'
elif style=='minimal':
format = ''
datefmt = '%a, %d %b %Y %H:%M'
if style=='trace':
formatter = MyFormatter()
else:
formatter = logging.Formatter(fmt=format,datefmt=datefmt)
if clevel: clevel = logging.__dict__[clevel.upper()]
if flevel: flevel = logging.__dict__[flevel.upper()]
#-- set up basic configuration.
# The basicConfig sets up one default logger. If you give a filename, it's
# a FileHandler, otherwise a StreamHandler.
#-- If we want console and filename, first set up a basic FileHandler, then
# add terminal StreamHandler
if filename is not None:
if flevel is None:
level = clevel
else:
level = flevel
logging.basicConfig(level=level,
format=format,datefmt=datefmt,
filename=filename,filemode=filemode)
fh = logging.FileHandler(filename)
fh.setLevel(flevel)
fh.setFormatter(formatter)
logging.getLogger(name).addHandler(fh)
if filename is not None and clevel:
# define a Handler which writes INFO messages or higher to the sys.stderr
ch = logging.StreamHandler()
ch.setLevel(clevel)
# tell the handler to use this format
ch.setFormatter(formatter)
logging.getLogger(name).addHandler(ch)
#-- If we only want a console:
else:
logging.basicConfig(level=clevel,format=format,datefmt=datefmt,
filename=filename,filemode=filemode)
#-- fix filename logging
if filename is not None:
logging.getLogger(name).handlers[0].level = flevel
return logging.getLogger(name) | def get_basic_logger(clevel='WARNING',flevel='DEBUG',
style="default",filename=None,filemode='w') | Return a basic logger via a log file and/or terminal.
Example 1: log only to the console, accepting levels "INFO" and above
>>> logger = utils.get_basic_logger()
Example 2: log only to the console, accepting levels "DEBUG" and above
>>> logger = utils.get_basic_logger(clevel='DEBUG')
Example 3: log only to a file, accepting levels "DEBUG" and above
>>> logger = utils.get_basic_logger(clevel=None,filename='mylog.log')
Example 4: log only to a file, accepting levels "INFO" and above
>>> logger = utils.get_basic_logger(clevel=None,flevel='INFO',filename='mylog.log')
Example 5: log to the terminal (INFO and above) and file (DEBUG and above)
>>> logger = utils.get_basic_logger(filename='mylog.log')
The different logging styles are:
C{style='default'}::
Wed, 13 Feb 2013 08:47 root INFO Some information
C{style='grandpa'}::
# INFO Some information
C{style='minimal'}::
Some information
@param style: logger style
@type style: str, one of 'default','grandpa','minimal' | 2.783135 | 2.738067 | 1.01646 |
new_pairs = []
for key, value in pairs:
if isinstance(value, unicode):
value = value.encode('utf-8')
if isinstance(key, unicode):
key = key.encode('utf-8')
new_pairs.append((key, value))
return dict(new_pairs) | def parse_json(pairs) | modified from:
https://stackoverflow.com/questions/956867/how-to-get-string-objects-instead-of-unicode-from-json#34796078
pass this to the object_pairs_hook kwarg of json.load/loads | 1.790049 | 1.634029 | 1.095482 |
if not isinstance(direction, str):
raise TypeError("direction must be of type str")
accepted_values = ['i', 'x', 'y', 'z', 's', 'c']
if direction not in accepted_values:
raise ValueError("must be one of: {}".format(accepted_values))
self._direction = direction | def direction(self, direction) | set the direction | 3.044632 | 3.074126 | 0.990406 |
if isinstance(self.call.i._value, float):
if self.call.i._value==i:
return self._to_unit(self._value, unit)
else:
return None
# we can't call i._value here because that may point to a string, and
# we want this to resolve the array
i_value = self.call.i.get_value(linebreak=False, sort_by_indep=False)
if len(i_value) != len(self._value):
raise ValueError("length mismatch with independent-variable")
sort_inds = i_value.argsort()
indep_value = i_value[sort_inds]
this_value = self._value[sort_inds]
return self._to_unit(np.interp(i, indep_value, this_value, left=np.nan, right=np.nan), unit) | def interpolate_at_i(self, i, unit=None) | access the interpolated value at a give value of i (independent-variable) | 4.649086 | 4.308866 | 1.078958 |
if sort_by_indep is None:
# TODO: add property of the call?
sort_by_indep = True
indep_array = self.call.i.get_value(i=i,
unit=iunit,
uncover=uncover,
trail=trail,
linebreak=False,
sort_by_indep=False)
this_array = getattr(self, func)(i=i,
unit=unit,
uncover=uncover,
trail=trail,
linebreak=False,
sort_by_indep=False)
if not (isinstance(indep_array, np.ndarray) and len(indep_array)==len(this_array)):
sort_by_indep = False
if sort_by_indep:
# TODO: it might be nice to buffer this at the call level, so making
# multiple get_value calls doesn't have to recompute the sort-order
sort_inds = indep_array.argsort()
return this_array[sort_inds]
else:
return this_array | def _sort_by_indep(self, func='get_value', i=None, iunit=None, unit=None,
uncover=None, trail=None, linebreak=None,
sort_by_indep=None) | must be called before (or within) _do_linebreak | 3.001215 | 3.023423 | 0.992655 |
if value is None:
self._value = value
return
# handle casting to acceptable types
if isinstance(value, list) or isinstance(value, tuple):
value = np.array(value)
if isinstance(value, int):
value = float(value)
if isinstance(value, u.Quantity):
if self.unit == u.dimensionless_unscaled:
# then take the unit from quantity and apply it
self.unit = value.unit
value = value.value
else:
# then convert to the requested unit
value = value.to(self.unit).value
# handle setting based on type
if isinstance(value, np.ndarray):
# if len(value.shape) != 1:
# raise ValueError("value must be a flat array")
self._value = value
elif isinstance(value, float):
# TODO: do we want to cast to np.array([value])??
# this will most likely be used for axhline/axvline
self._value = value
# elif isinstance(value, str):
# TODO: then need to pull from the bundle??? Or will this happen
# at a higher level
elif self.direction=='c' and isinstance(value, str):
self._value = common.coloralias.map(value)
else:
raise TypeError("value must be of type array (or similar)") | def _set_value(self, value) | set the value | 4.937238 | 4.891614 | 1.009327 |
return self.get_value(i=i, unit=unit,
uncover=uncover, trail=trail,
linebreak=linebreak, sort_by_indep=sort_by_indep,
attr='_error') | def get_error(self, i=None, unit=None,
uncover=None, trail=None,
linebreak=None, sort_by_indep=None) | access the error for a given value of i (independent-variable) depending
on which effects (i.e. uncover) are enabled. | 2.194437 | 2.459888 | 0.892088 |
# TODO: check length with value?
# TODO: type checks (similar to value)
if self.direction not in ['x', 'y', 'z'] and error is not None:
raise ValueError("error only accepted for x, y, z dimensions")
if isinstance(error, u.Quantity):
error = error.to(self.unit).value
self._error = error | def error(self, error) | set the error | 6.565934 | 6.225363 | 1.054707 |
if self.direction in ['i'] and label is not None:
raise ValueError("label not accepted for indep dimension")
if label is None:
self._label = label
return
if not isinstance(label, str):
try:
label = str(label)
except:
raise TypeError("label must be of type str")
self._label = label | def label(self, label) | set the label | 4.39883 | 4.360176 | 1.008865 |
if isinstance(self._value, str):
dimension = self._value
return getattr(self.call, dimension).value
return super(CallDimensionI, self)._get_value() | def value(self) | access the value | 11.867401 | 10.949653 | 1.083815 |
# for the indep direction we also allow a string which points to one
# of the other available dimensions
# TODO: support c, fc, ec?
if isinstance(value, common.basestring) and value in ['x', 'y', 'z']:
# we'll cast just to get rid of any python2 unicodes
self._value = str(value)
dimension = value
self._unit = getattr(self.call, dimension).unit
return
# NOTE: cannot do super on setter directly, see this python
# bug: https://bugs.python.org/issue14965 and discussion:
# https://mail.python.org/pipermail/python-dev/2010-April/099672.html
super(CallDimensionI, self)._set_value(value) | def value(self, value) | set the value | 10.025843 | 10.126737 | 0.990037 |
if solar_units:
omega = rotfreq / (2*np.pi) / np.sqrt(c.GM_sun.to(u.solRad**3/u.d**2).value/scale**3)
else:
# then SI units
omega = rotfreq / (2*np.pi) / np.sqrt(c.GM_sun.value/scale**3)
# print "*** rotstar.rotfreq_to_omega", rotfreq, scale, solar_units, omega
return omega | def rotfreq_to_omega(rotfreq, scale=c.R_sun.si.value, solar_units=False) | TODO: add documentation
NOTE: everything MUST be in consistent units according to solar_units bool | 3.708354 | 3.758464 | 0.986668 |
if isinstance(item, tuple):
return item
elif isinstance(item, list):
return tuple(item)
elif isinstance(item, np.ndarray):
return tuple(item.tolist())
else:
raise NotImplementedError | def _ensure_tuple(item) | Simply ensure that the passed item is a tuple. If it is not, then
convert it if possible, or raise a NotImplementedError
Args:
item: the item that needs to become a tuple
Returns:
the item casted as a tuple
Raises:
NotImplementedError: if converting the given item to a tuple
is not implemented. | 1.952951 | 2.138718 | 0.913141 |
stepsize = 0.01
orbiterror = 1e-16
computeps = b.get_compute(compute, check_visible=False, force_ps=True)
ltte = computeps.get_value('ltte', check_visible=False, **kwargs)
hier = b.hierarchy
starrefs = hier.get_stars()
orbitrefs = hier.get_orbits()
def mean_anom(t0, t0_perpass, period):
# TODO: somehow make this into a constraint where t0 and mean anom
# are both in the compute options if dynamic_method==nbody
# (one is constrained from the other and the orbit.... nvm, this gets ugly)
return 2 * np.pi * (t0 - t0_perpass) / period
masses = [b.get_value('mass', u.solMass, component=component, context='component') * c.G.to('AU3 / (Msun d2)').value for component in starrefs] # GM
smas = [b.get_value('sma', u.AU, component=component, context='component') for component in orbitrefs]
eccs = [b.get_value('ecc', component=component, context='component') for component in orbitrefs]
incls = [b.get_value('incl', u.rad, component=component, context='component') for component in orbitrefs]
per0s = [b.get_value('per0', u.rad, component=component, context='component') for component in orbitrefs]
long_ans = [b.get_value('long_an', u.rad, component=component, context='component') for component in orbitrefs]
t0_perpasses = [b.get_value('t0_perpass', u.d, component=component, context='component') for component in orbitrefs]
periods = [b.get_value('period', u.d, component=component, context='component') for component in orbitrefs]
vgamma = b.get_value('vgamma', context='system', unit=u.solRad/u.d)
t0 = b.get_value('t0', context='system', unit=u.d)
# mean_anoms = [mean_anom(t0, t0_perpass, period) for t0_perpass, period in zip(t0_perpasses, periods)]
mean_anoms = [b.get_value('mean_anom', u.rad, component=component, context='component') for component in orbitrefs]
return dynamics_bs(times, masses, smas, eccs, incls, per0s, long_ans, \
mean_anoms, t0, vgamma, stepsize, orbiterror, ltte,
return_roche_euler=return_roche_euler) | def dynamics_from_bundle_bs(b, times, compute=None, return_roche_euler=False, **kwargs) | Parse parameters in the bundle and call :func:`dynamics`.
See :func:`dynamics` for more detailed information.
NOTE: you must either provide compute (the label) OR all relevant options
as kwargs (ltte)
Args:
b: (Bundle) the bundle with a set hierarchy
times: (list or array) times at which to run the dynamics
stepsize: (float, optional) stepsize for the integration
[default: 0.01]
orbiterror: (float, optional) orbiterror for the integration
[default: 1e-16]
ltte: (bool, default False) whether to account for light travel time effects.
Returns:
t, xs, ys, zs, vxs, vys, vzs. t is a numpy array of all times,
the remaining are a list of numpy arrays (a numpy array per
star - in order given by b.hierarchy.get_stars()) for the cartesian
positions and velocities of each star at those same times. | 3.823373 | 3.432931 | 1.113734 |
# if visibility == 0, it should remain 0
# if visibility == 0.5, it should stay 0.5 if mu > 0 else it should become 0
# if visibility == 1, it should stay 1 if mu > 0 else it should become 0
# this can all by easily done by multiplying by int(mu>0) (1 if visible, 0 if hidden)
return {comp_no: mesh.visibilities * (mesh.mus > 0).astype(int) for comp_no, mesh in meshes.items()}, None, None | def only_horizon(meshes, xs, ys, zs, expose_horizon=False) | Check all visible or partial triangles to see if they're behind the horizon,
by checking the direction of the z-component of the normals (ie hidden if mu<0) | 6.445913 | 5.91162 | 1.09038 |
centers_flat = meshes.get_column_flat('centers')
vertices_flat = meshes.get_column_flat('vertices')
triangles_flat = meshes.get_column_flat('triangles') # should handle offset automatically
if horizon_method=='boolean':
normals_flat = meshes.get_column_flat('tnormals')
elif horizon_method=='linear':
normals_flat = meshes.get_column_flat('vnormals')
else:
raise NotImplementedError
# viewing_vector is defined as star -> earth
# NOTE: this will need to flip if we change the convention on the z-direction
viewing_vector = np.array([0., 0., 1.])
# we need to send in ALL vertices but only the visible triangle information
info = libphoebe.mesh_visibility(viewing_vector,
vertices_flat,
triangles_flat,
normals_flat,
tvisibilities=True,
taweights=True,
method=horizon_method,
horizon=expose_horizon)
visibilities = meshes.unpack_column_flat(info['tvisibilities'], computed_type='triangles')
weights = meshes.unpack_column_flat(info['taweights'], computed_type='triangles')
if expose_horizon:
horizons = info['horizon']
# TODO: we need to do this per component and somehow return them in
# a predictable order or as a dictionary a la the other returned quantities
horizons = [vertices_flat[horizon_i] for horizon_i in horizons]
else:
horizons = None
return visibilities, weights, horizons | def native(meshes, xs, ys, zs, expose_horizon=False, horizon_method='boolean') | TODO: add documentation
this is the new eclipse detection method in libphoebe | 5.684752 | 5.445904 | 1.043858 |
# TODO: make this a decorator?
if isinstance(item, list) and len(item)==1:
item = item[0]
if isinstance(item, ParameterSet):
return item.filter(context=context, check_visible=False)
elif isinstance(item, str):
return b.filter(item, context=context, check_visible=False)
else:
logger.debug("_get_system_ps got {}".format(item))
raise NotImplementedError("_get_system_ps does not support item with type: {}".format(type(item))) | def _get_system_ps(b, item, context='component') | parses the input arg (either twig or PS) to retrieve the actual parametersets | 3.483934 | 3.311276 | 1.052142 |
return ConstraintParameter(q._bundle, "requiv_L1(%s, %d)" % (", ".join(["{%s}" % (param.uniquetwig if hasattr(param, 'uniquetwig') else param.expr) for param in (q, syncpar, ecc, sma, incl_star, long_an_star, incl_orb, long_an_orb)]), compno)) | def roche_requiv_L1(q, syncpar, ecc, sma, incl_star, long_an_star, incl_orb, long_an_orb, compno=1) | TODO: add documentation | 7.319535 | 7.023833 | 1.0421 |
return ConstraintParameter(q._bundle, "requiv_contact_L1(%s, %d)" % (", ".join(["{%s}" % (param.uniquetwig if hasattr(param, 'uniquetwig') else param.expr) for param in (q, sma)]), compno)) | def roche_requiv_contact_L1(q, sma, compno=1) | TODO: add documentation | 14.889975 | 13.680387 | 1.088418 |
return ConstraintParameter(requiv._bundle, "requiv_to_pot_contact({}, {}, {}, {})".format(_get_expr(requiv), _get_expr(q), _get_expr(sma), compno)) | def requiv_to_pot_contact(requiv, q, sma, compno=1) | TODO: add documentation | 6.564673 | 5.978806 | 1.097991 |
return ConstraintParameter(pot._bundle, "pot_to_requiv_contact({}, {}, {}, {})".format(_get_expr(pot), _get_expr(q), _get_expr(sma), compno)) | def pot_to_requiv_contact(pot, q, sma, compno=1) | TODO: add documentation | 6.781385 | 6.046932 | 1.121459 |
return ConstraintParameter(ecc._bundle, "esinw2per0({}, {})".format(_get_expr(ecc), _get_expr(esinw))) | def esinw2per0(ecc, esinw) | TODO: add documentation | 11.916354 | 9.943285 | 1.198432 |
# print "***", "ecosw2per0({}, {})".format(_get_expr(ecc), _get_expr(ecosw))
return ConstraintParameter(ecc._bundle, "ecosw2per0({}, {})".format(_get_expr(ecc), _get_expr(ecosw))) | def ecosw2per0(ecc, ecosw) | TODO: add documentation | 6.383268 | 6.147469 | 1.038357 |
return ConstraintParameter(t0_perpass._bundle, "t0_perpass_to_supconj({}, {}, {}, {})".format(_get_expr(t0_perpass), _get_expr(period), _get_expr(ecc), _get_expr(per0))) | def t0_perpass_to_supconj(t0_perpass, period, ecc, per0) | TODO: add documentation | 4.326012 | 3.973647 | 1.088675 |
return ConstraintParameter(t0_supconj._bundle, "t0_supconj_to_perpass({}, {}, {}, {})".format(_get_expr(t0_supconj), _get_expr(period), _get_expr(ecc), _get_expr(per0))) | def t0_supconj_to_perpass(t0_supconj, period, ecc, per0) | TODO: add documentation | 4.418787 | 4.085656 | 1.081537 |
return ConstraintParameter(t0_ref._bundle, "t0_ref_to_supconj({}, {}, {}, {})".format(_get_expr(t0_ref), _get_expr(period), _get_expr(ecc), _get_expr(per0))) | def t0_ref_to_supconj(t0_ref, period, ecc, per0) | TODO: add documentation | 4.556489 | 4.200365 | 1.084784 |
orbit_ps = _get_system_ps(b, orbit)
# We want to get the parameters in THIS orbit, but calling through
# the bundle in case we need to create it.
# To do that, we need to know the search parameters to get items from this PS.
metawargs = orbit_ps.meta
metawargs.pop('qualifier')
# Now we'll define the parameters in case they don't exist and need to be created
sma_def = FloatParameter(qualifier='sma', value=8.0, default_unit=u.solRad, description='Semi major axis')
incl_def = FloatParameter(qualifier='incl', value=90.0, default_unit=u.deg, description='Orbital inclination angle')
asini_def = FloatParameter(qualifier='asini', value=8.0, default_unit=u.solRad, description='Projected semi major axis')
# And now call get_or_create on the bundle
sma, created = b.get_or_create('sma', sma_def, **metawargs)
incl, created = b.get_or_create('incl', incl_def, **metawargs)
asini, created = b.get_or_create('asini', asini_def, **metawargs)
if solve_for in [None, asini]:
lhs = asini
rhs = sma * sin(incl)
elif solve_for == sma:
lhs = sma
rhs = asini / sin(incl)
elif solve_for == incl:
lhs = incl
rhs = arcsin(asini/sma)
else:
raise NotImplementedError
#- return lhs, rhs, args_as_pss
return lhs, rhs, {'orbit': orbit} | def asini(b, orbit, solve_for=None) | Create a constraint for asini in an orbit.
If any of the required parameters ('asini', 'sma', 'incl') do not
exist in the orbit, they will be created.
:parameter b: the :class:`phoebe.frontend.bundle.Bundle`
:parameter str orbit: the label of the orbit in which this
constraint should be built
:parameter str solve_for: if 'asini' should not be the derived/constrained
parameter, provide which other parameter should be derived
(ie 'sma' or 'incl')
:returns: lhs (Parameter), rhs (ConstraintParameter), args (list of arguments
that were passed to this function) | 4.521835 | 4.060493 | 1.113617 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.