code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
kwargs['check_visible'] = check_visible kwargs['check_default'] = check_default # print "***", kwargs ps = self.filter(twig=twig, **kwargs) if not len(ps): # TODO: custom exception? raise ValueError("0 results found") elif len(ps) != 1: # TODO: custom exception? raise ValueError("{} results found: {}".format(len(ps), ps.twigs)) else: # then only 1 item, so return the parameter return ps._params[0]
def get(self, twig=None, check_visible=True, check_default=True, **kwargs)
Get a single parameter from this ParameterSet. This works exactly the same as filter except there must be only a single result, and the Parameter itself is returned instead of a ParameterSet. Also see :meth:`get_parameter` (which is simply an alias of this method) :parameter str twig: (optional) the search twig - essentially a single string with any delimiter (ie '@') that will be parsed into any of the meta-tags. Example: instead of b.filter(context='component', component='starA'), you could do b.filter('starA@component'). :parameter bool check_visible: whether to hide invisible parameters. These are usually parameters that do not play a role unless the value of another parameter meets some condition. :parameter bool check_default: whether to exclude parameters which have a _default tag (these are parameters which solely exist to provide defaults for when new parameters or datasets are added and the parameter needs to be copied appropriately). Defaults to True. :parameter **kwargs: meta-tags to search (ie. 'context', 'component', 'model', etc). See :func:`meta` for all possible options. :return: the resulting :class:`Parameter` :raises ValueError: if either 0 or more than 1 results are found matching the search.
4.657818
4.155999
1.120746
return self - self.filter(twig=twig, check_visible=check_visible, **kwargs)
def exclude(self, twig=None, check_visible=True, **kwargs)
Exclude the results from this filter from the current ParameterSet. See :meth:`filter` for options.
5.183196
5.197235
0.997299
ps = self.filter_or_get(qualifier=qualifier, **kwargs) if isinstance(ps, Parameter): return ps, False elif len(ps): # TODO: custom exception? raise ValueError("more than 1 result was found") else: self._attach_params(ParameterSet([new_parameter]), **kwargs) logger.debug("creating and attaching new parameter: {}".format(new_parameter.qualifier)) return self.filter_or_get(qualifier=qualifier, **kwargs), True
def get_or_create(self, qualifier, new_parameter, **kwargs)
Get a :class:`Parameter` from the ParameterSet, if it does not exist, create and attach it. Note: running this on a ParameterSet that is NOT a :class:`phoebe.frontend.bundle.Bundle`, will NOT add the Parameter to the bundle, but only the temporary ParameterSet :parameter str qualifier: the qualifier of the :class:`Parameter` (note, not the twig) :parameter new_parameter: the parameter to attach if no result is found :type new_parameter: :class:`Parameter` :parameter **kwargs: meta-tags to search - will also be applied to new_parameter if it is attached. :return: Parameter, created :rtype: :class:`Parameter`, bool :raises ValueError: if more than 1 result was found using the search criteria.
4.836788
4.20376
1.150586
# TODO: check to see if protected (required by a current constraint or # by a backend) self._params = [p for p in self._params if p != param]
def _remove_parameter(self, param)
Remove a Parameter from the ParameterSet :parameter param: the :class:`Parameter` object to be removed :type param: :class:`Parameter`
16.2873
18.035191
0.903084
param = self.get(twig=twig, **kwargs) self._remove_parameter(param)
def remove_parameter(self, twig=None, **kwargs)
Remove a :class:`Parameter` from the ParameterSet Note: removing Parameters from a ParameterSet will not remove them from any parent ParameterSets (including the :class:`phoebe.frontend.bundle.Bundle`) :parameter str twig: the twig to search for the parameter :parameter **kwargs: meta-tags to search :raises ValueError: if 0 or more than 1 results are found using the provided search criteria.
5.499566
7.468978
0.736321
params = self.filter(twig=twig, check_visible=False, check_default=False, **kwargs) for param in params.to_list(): self._remove_parameter(param)
def remove_parameters_all(self, twig=None, **kwargs)
Remove all :class:`Parameter`s that match the search from the ParameterSet. Any Parameter that would be included in the resulting ParameterSet from a :func:`filter` call with the same arguments will be removed from this ParameterSet. Note: removing Parameters from a ParameterSet will not remove them from any parent ParameterSets (including the :class:`phoebe.frontend.bundle.Bundle`) :parameter str twig: the twig to search for the parameter :parameter **kwargs: meta-tags to search
5.376429
7.068092
0.760662
# TODO: for time derivatives will need to use t instead of time (time # gets passed to twig filtering) if default is not None is not None: # then we need to do a filter first to see if parameter exists if not len(self.filter(twig=twig, **kwargs)): return default param = self.get_parameter(twig=twig, **kwargs) if param.qualifier in kwargs.keys(): # then we have an "override" value that was passed, and we should # just return that. # Example b.get_value('teff', teff=6000) returns 6000 return kwargs.get(param.qualifier) return param.get_quantity(unit=unit, t=t)
def get_quantity(self, twig=None, unit=None, default=None, t=None, **kwargs)
TODO: add documentation
7.718192
7.470244
1.033191
# TODO: handle twig having parameter key (value@, default_unit@, adjust@, etc) # TODO: does this return anything (update the docstring)? return self.get_parameter(twig=twig, **kwargs).set_quantity(value=value, **kwargs)
def set_quantity(self, twig=None, value=None, **kwargs)
TODO: add documentation
17.217459
15.522491
1.109194
# TODO: for time derivatives will need to use t instead of time (time # gets passed to twig filtering) if default is not None: # then we need to do a filter first to see if parameter exists if not len(self.filter(twig=twig, **kwargs)): return default param = self.get_parameter(twig=twig, **kwargs) # if hasattr(param, 'default_unit'): # This breaks for constraint parameters if isinstance(param, FloatParameter) or\ isinstance(param,FloatArrayParameter): return param.get_value(unit=unit, t=t, **kwargs) return param.get_value(**kwargs)
def get_value(self, twig=None, unit=None, default=None, t=None, **kwargs)
Get the value of a :class:`Parameter` in this ParameterSet :parameter str twig: the twig to search for the parameter :parameter unit: units for the returned result (if applicable). If None or not provided, the value will be returned in that Parameter's default_unit (if applicable) :type unit: str or astropy.units.Unit :parameter default: what to return if the parameter cannot be found. If this is None (default) then an error will be raised instead. Note that the units of default will not be converted. :parameter time: time at which to compute the value (will only affect time-dependent parameters). If provided as a float it is assumed that the units are the same as t0. NOTE: this is not fully supported yet, use with caution. :parameter **kwargs: meta-tags to search :return: value (type depeding on the type of the :class:`Parameter`)
6.837676
6.233988
1.096838
# TODO: handle twig having parameter key (value@, default_unit@, adjust@, etc) # TODO: does this return anything (update the docstring)? if twig is not None and value is None: # then try to support value as the first argument if no matches with twigs if not isinstance(twig, str): value = twig twig = None elif not len(self.filter(twig=twig, check_default=check_default, **kwargs)): value = twig twig = None if "index" in kwargs.keys(): return self.get_parameter(twig=twig, **kwargs).set_index_value(value=value, **kwargs) if "time" in kwargs.keys(): if not len(self.filter(**kwargs)): # then let's try filtering without time and seeing if we get a # FloatArrayParameter so that we can use set_index_value instead time = kwargs.pop("time") param = self.get_parameter(twig=twig, **kwargs) if not isinstance(param, FloatArrayParameter): raise TypeError # TODO: do we need to be more clever about time qualifier for # ETV datasets? TODO: is this robust enough... this won't search # for times outside the existing ParameterSet. We could also # try param.get_parent_ps().get_parameter('time'), but this # won't work when outside the bundle (which is used within # backends.py to set fluxes, etc) print "*** # get_parameter(qualifier='times', **kwargs)", {k:v for k,v in # kwargs.items() if k not in ['qualifier']} time_param = self.get_parameter(qualifier='times', **{k:v for k,v in kwargs.items() if k not in ['qualifier']}) index = np.where(time_param.get_value()==time)[0] return param.set_index_value(value=value, index=index, **kwargs) return self.get_parameter(twig=twig, **kwargs).set_value(value=value, **kwargs)
def set_value(self, twig=None, value=None, **kwargs)
Set the value of a :class:`Parameter` in this ParameterSet Note: setting the value of a Parameter in a ParameterSet WILL change that Parameter across any parent ParameterSets (including the :class:`phoebe.frontend.bundle.Bundle`) :parameter set twig: the twig to search for the parameter :parameter value: the value to set. Provide units, if necessary, by sending a Quantity object (ie 2.4*u.rad) :parameter **kwargs: meta-tags to search :raises ValueError: if 0 or more than 1 results are found matching the search criteria.
6.768817
6.440697
1.050945
if twig is not None and value is None: # then try to support value as the first argument if no matches with twigs if not isinstance(twig, str): value = twig twig = None elif not len(self.filter(twig=twig, check_default=check_default, **kwargs)): value = twig twig = None params = self.filter(twig=twig, check_default=check_default, **kwargs).to_list() if not kwargs.pop('ignore_none', False) and not len(params): raise ValueError("no parameters found") for param in params: if "index" in kwargs.keys(): return self.get_parameter(twig=twig, **kwargs).set_index_value(value=value, **kwargs) param.set_value(value=value, **kwargs)
def set_value_all(self, twig=None, value=None, check_default=False, **kwargs)
Set the value of all returned :class:`Parameter`s in this ParameterSet. Any :class:`Parameter` that would be included in the resulting ParameterSet from a :func:`filter` call with the same arguments will have their value set. Note: setting the value of a Parameter in a ParameterSet WILL change that Parameter across any parent ParameterSets (including the :class:`phoebe.frontend.bundle.Bundle`) :parameter str twig: the twig to search for the parameter :parameter value: the value to set. Provide units, if necessary, by sending a Quantity object (ie 2.4*u.rad) :parameter bool check_default: whether to exclude any default values. Defaults to False (unlike all filtering). Note that this acts on the current ParameterSet so any filtering done before this call will EXCLUDE defaults by default. :parameter **kwargs: meta-tags to search
4.236095
4.202339
1.008033
return self.get_parameter(twig=twig, **kwargs).get_default_unit()
def get_default_unit(self, twig=None, **kwargs)
TODO: add documentation
5.28401
3.942966
1.340111
if twig is not None and unit is None: # then try to support value as the first argument if no matches with twigs if isinstance(unit, u.Unit) or not isinstance(twig, str): unit = twig twig = None elif not len(self.filter(twig=twig, check_default=check_default, **kwargs)): unit = twig twig = None return self.get_parameter(twig=twig, **kwargs).set_default_unit(unit)
def set_default_unit(self, twig=None, unit=None, **kwargs)
TODO: add documentation
6.370314
6.200592
1.027372
if twig is not None and unit is None: # then try to support value as the first argument if no matches with twigs if isinstance(unit, u.Unit) or not isinstance(twig, str): unit = twig twig = None elif not len(self.filter(twig=twig, check_default=check_default, **kwargs)): unit = twig twig = None for param in self.filter(twig=twig, **kwargs).to_list(): param.set_default_unit(unit)
def set_default_unit_all(self, twig=None, unit=None, **kwargs)
TODO: add documentation
6.187865
6.01194
1.029263
return self.get_parameter(twig=twig, **kwargs).get_description()
def get_description(self, twig=None, **kwargs)
TODO: add documentation
6.217199
4.581664
1.356974
if animate and not show and not save: logger.warning("setting show to True since animate=True and save not provided") show = True if animate: # prefer times over time times = kwargs.get('times', kwargs.get('time', None)) save_kwargs = kwargs.get('save_kwargs', {}) if times is None: # then let's try to get all SYNTHETIC times # it would be nice to only do ENABLED, but then we have to worry about compute # it would also be nice to worry about models... but then you should filter first logger.info("no times were providing, so defaulting to animate over all dataset times") times = [] for dataset in self.datasets: ps = self.filter(dataset=dataset, context='model') if len(ps.times): # for the case of meshes/spectra times += [float(t) for t in ps.times] else: for param in ps.filter(qualifier='times').to_list(): times += list(param.get_value()) times = sorted(list(set(times))) logger.info("calling autofig.animate(i={}, draw_sidebars={}, draw_title={}, tight_layout={}, save={}, show={}, save_kwargs={})".format(times, draw_sidebars, draw_title, tight_layout, save, show, save_kwargs)) mplanim = self.gcf().animate(i=times, draw_sidebars=draw_sidebars, draw_title=draw_title, tight_layout=tight_layout, subplot_grid=subplot_grid, save=save, show=show, save_kwargs=save_kwargs) afig = self.gcf() # clear the autofig figure self.clf() return afig, mplanim else: time = kwargs.get('time', None) if isinstance(time, str): time = self.get_value(time, context=['component', 'system']) logger.info("calling autofig.draw(i={}, draw_sidebars={}, draw_title={}, tight_layout={}, save={}, show={})".format(time, draw_sidebars, draw_title, tight_layout, save, show)) fig = self.gcf().draw(i=time, draw_sidebars=draw_sidebars, draw_title=draw_title, tight_layout=tight_layout, subplot_grid=subplot_grid, save=save, show=show) # clear the figure so next call will start over and future shows will work afig = self.gcf() self.clf() return afig, fig
def _show_or_save(self, save, show, animate, draw_sidebars=True, draw_title=True, tight_layout=False, subplot_grid=None, **kwargs)
Draw/animate and show and/or save a autofig plot
3.992759
3.923128
1.017749
kwargs.setdefault('show', True) kwargs.setdefault('save', False) kwargs.setdefault('animate', False) return self._show_or_save(**kwargs)
def show(self, **kwargs)
Draw and show the plot.
4.100742
3.590332
1.142162
filename = os.path.expanduser(filename) kwargs.setdefault('show', False) kwargs.setdefault('save', filename) kwargs.setdefault('animate', False) return self._show_or_save(**kwargs)
def savefig(self, filename, **kwargs)
Draw and save the plot. :parameter str filename: filename to save to. Be careful of extensions here... matplotlib accepts many different image formats while other backends will only export to html.
4.540197
5.766174
0.787385
s = self.to_json() cpy = parameter_from_json(s) # TODO: may need to subclass for Parameters that require bundle by using this line instead: # cpy = parameter_from_json(s, bundle=self._bundle) cpy.set_uniqueid(_uniqueid()) return cpy
def copy(self)
Deepcopy the parameter (with a new uniqueid). All other tags will remain the same... so some other tag should be changed before attaching back to a ParameterSet or Bundle. :return: the copied :class:`Parameter` object
12.585782
9.167743
1.372833
if hasattr(self, 'constrained_by') and len(self.constrained_by) > 0: return "* {:>30}: {}".format(self.uniquetwig_trunc, self.get_quantity() if hasattr(self, 'quantity') else self.get_value()) else: return "{:>32}: {}".format(self.uniquetwig_trunc, self.get_quantity() if hasattr(self, 'quantity') else self.get_value())
def to_string_short(self)
see also :meth:`to_string` :return: a shorter abreviated string reprentation of the parameter
4.519995
4.698044
0.962101
filename = os.path.expanduser(filename) f = open(filename, 'w') json.dump(self.to_json(incl_uniqueid=incl_uniqueid), f, sort_keys=True, indent=0, separators=(',', ': ')) f.close() return filename
def save(self, filename, incl_uniqueid=False)
Save the Parameter to a JSON-formatted ASCII file :parameter str filename: relative or fullpath to the file :return: filename :rtype: str
2.437037
2.642322
0.922309
def _parse(k, v): if k=='value': if isinstance(self._value, nparray.ndarray): if self._value.unit is not None and hasattr(self, 'default_unit'): v = self._value.to(self.default_unit).to_dict() else: v = self._value.to_dict() if isinstance(v, u.Quantity): v = self.get_value() # force to be in default units if isinstance(v, np.ndarray): v = v.tolist() return v elif k=='limits': return [vi.value if hasattr(vi, 'value') else vi for vi in v] elif v is None: return v elif isinstance(v, str): return v elif isinstance(v, dict): return v elif isinstance(v, float) or isinstance(v, int) or isinstance(v, list): return v elif _is_unit(v): return str(v.to_string()) else: try: return str(v) except: raise NotImplementedError("could not parse {} of '{}' to json".format(k, self.uniquetwig)) return {k: _parse(k, v) for k,v in self.to_dict().items() if (v is not None and k not in ['twig', 'uniquetwig', 'quantity'] and (k!='uniqueid' or incl_uniqueid or self.qualifier=='detached_job'))}
def to_json(self, incl_uniqueid=False)
:return: a JSON-ready dictionary holding all information for this parameter
3.813682
3.813151
1.000139
return OrderedDict([(k, getattr(self, k)) for k in _meta_fields_all if k not in ignore])
def get_meta(self, ignore=['uniqueid'])
See all the meta-tag properties for this Parameter :parameter list ignore: list of keys to exclude from the returned dictionary :return: an ordered dictionary of tag properties
5.415915
7.617987
0.710938
if ps is None: ps = self._bundle if ps is None: return self.twig return ps._uniquetwig(self.twig)
def uniquetwig(self, ps=None)
see also :meth:`twig` Determine the shortest (more-or-less) twig which will point to this single Parameter in a given parent :class:`ParameterSet` :parameter ps: :class:`ParameterSet` in which the returned uniquetwig will point to this Parameter. If not provided or None this will default to the parent :class:`phoebe.frontend.bundle.Bundle`, if available. :return: uniquetwig :rtype: str
6.078891
4.774642
1.273162
return "@".join([getattr(self, k) for k in _meta_fields_twig if getattr(self, k) is not None])
def twig(self)
The twig of a Parameter is a single string with the individual :meth:`meta` tags separated by '@' symbols. This twig gives a single string which can point back to this Parameter. see also :meth:`uniquetwig` :return: twig (full) of this Parameter
9.361527
11.462349
0.81672
def is_visible_single(visible_if): # visible_if syntax: [ignore,these]qualifier:value if visible_if.lower() == 'false': return False # otherwise we need to find the parameter we're referencing and check its value if visible_if[0]=='[': remove_metawargs, visible_if = visible_if[1:].split(']') remove_metawargs = remove_metawargs.split(',') else: remove_metawargs = [] qualifier, value = visible_if.split(':') if 'hierarchy.' in qualifier: # TODO: set specific syntax (hierarchy.get_meshables:2) # then this needs to do some logic on the hierarchy hier = self._bundle.hierarchy if not len(hier.get_value()): # then hierarchy hasn't been set yet, so we can't do any # of these tests return True method = qualifier.split('.')[1] if value in ['true', 'True']: value = True elif value in ['false', 'False']: value = False return getattr(hier, method)(self.component) == value else: # the parameter needs to have all the same meta data except qualifier # TODO: switch this to use self.get_parent_ps ? metawargs = {k:v for k,v in self.get_meta(ignore=['twig', 'uniquetwig', 'uniqueid']+remove_metawargs).items() if v is not None} metawargs['qualifier'] = qualifier # metawargs['twig'] = None # metawargs['uniquetwig'] = None # metawargs['uniqueid'] = None # if metawargs.get('component', None) == '_default': # metawargs['component'] = None try: # this call is quite expensive and bloats every get_parameter(check_visible=True) param = self._bundle.get_parameter(check_visible=False, check_default=False, **metawargs) except ValueError: # let's not let this hold us up - sometimes this can happen when copying # parameters (from copy_for) in order that the visible_if parameter # happens later logger.debug("parameter not found when trying to determine if visible, {}".format(metawargs)) return True #~ print "***", qualifier, param.qualifier, param.get_value(), value if isinstance(param, BoolParameter): if value in ['true', 'True']: value = True elif value in ['false', 'False']: value = False if isinstance(value, str) and value[0] in ['!', '~']: return param.get_value() != value[1:] elif value=='<notempty>': return len(param.get_value()) > 0 else: return param.get_value() == value if self.visible_if is None: return True if not self._bundle: # then we may not be able to do the check, for now let's just return True return True return np.all([is_visible_single(visible_if_i) for visible_if_i in self.visible_if.split(',')])
def is_visible(self)
see also :meth:`visible_if` :return: whether this parameter is currently visible (and therefore shown in ParameterSets and visible to :meth:`ParameterSet.filter`) :rtype: bool
5.773333
5.725249
1.008399
if self._bundle is None: return None metawargs = {k:v for k,v in self.meta.items() if k not in ['qualifier', 'twig', 'uniquetwig']} return self._bundle.filter(**metawargs)
def get_parent_ps(self)
Return a :class:`ParameterSet` of all Parameters in the same :class:`phoebe.frontend.bundle.Bundle` which share the same meta-tags (except qualifier, twig, uniquetwig) :return: the parent :class:`ParameterSet`
12.858848
4.233209
3.037612
if self.qualifier in kwargs.keys(): # then we have an "override" value that was passed, and we should # just return that. # Example teff_param.get_value('teff', teff=6000) returns 6000 return kwargs.get(self.qualifier) return None
def get_value(self, *args, **kwargs)
This method should be overriden by any subclass of Parameter, and should be decorated with the @update_if_client decorator. Please see the individual classes documentation: * :meth:`FloatParameter.get_value` * :meth:`ArrayParameter.get_value` * :meth:`HierarchyParameter.get_value` * :meth:`IntParameter.get_value` * :meth:`BoolParameter.get_value` * :meth:`ChoiceParameter.get_value` * :meth:`ConstraintParameter.get_value` * :meth:`HistoryParameter.get_value` If subclassing, this method needs to: * cast to the correct type/units, handling defaults :raises NotImplementedError: because this must be subclassed
8.382403
10.210029
0.820997
selection = [] for v in self.get_value(**kwargs): for choice in self.choices: if v==choice and choice not in selection: selection.append(choice) elif fnmatch(choice, v) and choice not in selection: selection.append(choice) return selection
def expand_value(self, **kwargs)
expand the selection to account for wildcards
3.739032
3.081684
1.213308
value = [v for v in self.get_value() if self.valid_selection(v)] self.set_value(value)
def remove_not_valid_selections(self)
update the value to remove any that are (no longer) valid
4.564979
3.30033
1.383188
return (self.limits[0] is None or value >= self.limits[0]) and (self.limits[1] is None or value <= self.limits[1])
def within_limits(self, value)
check whether a value falls within the set limits :parameter value: float or Quantity to test. If value is a float, it is assumed that it has the same units as default_units
2.617558
2.805488
0.933014
if isinstance(value, int) or isinstance(value, float): value = value * self.default_unit return (self.limits[0] is None or value >= self.limits[0]) and (self.limits[1] is None or value <= self.limits[1])
def within_limits(self, value)
check whether a value falls within the set limits :parameter value: float or Quantity to test. If value is a float, it is assumed that it has the same units as default_units
2.605259
2.405729
1.08294
if self._is_constraint is None: return None return self._bundle.get_parameter(context='constraint', uniqueid=self._is_constraint)
def is_constraint(self)
returns the expression of the constraint that constrains this parameter
12.801581
8.615189
1.485932
if self._is_constraint is None: return [] params = [] for var in self.is_constraint._vars: param = var.get_parameter() if param.uniqueid != self.uniqueid: params.append(param) return params
def constrained_by(self)
returns a list of parameters that constrain this parameter
5.727289
4.551027
1.258461
expressions = [] for uniqueid in self._in_constraints: expressions.append(self._bundle.get_parameter(context='constraint', uniqueid=uniqueid)) return expressions
def in_constraints(self)
returns a list of the expressions in which this parameter constrains another
10.17793
8.094599
1.257373
params = [] for constraint in self.in_constraints: for var in constraint._vars: param = var.get_parameter() if param.component == constraint.component and param.qualifier == constraint.qualifier: if param not in params and param.uniqueid != self.uniqueid: params.append(param) return params
def constrains(self)
returns a list of parameters that are constrained by this parameter
5.105208
4.434852
1.151157
params = [] constraints = self.in_constraints if self.is_constraint is not None: constraints.append(self.is_constraint) for constraint in constraints: for var in constraint._vars: param = var.get_parameter() if param not in params and param.uniqueid != self.uniqueid: params.append(param) return params
def related_to(self)
returns a list of all parameters that are either constrained by or constrain this parameter
4.352293
3.278968
1.327336
opt = np.get_printoptions() np.set_printoptions(threshold=8, edgeitems=3, linewidth=opt['linewidth']-len(self.uniquetwig)-2) str_ = super(FloatArrayParameter, self).to_string_short() np.set_printoptions(**opt) return str_
def to_string_short(self)
see also :meth:`to_string` :return: a shorter abreviated string reprentation of the parameter
6.53434
6.377415
1.024606
# TODO: add support for units # TODO: add support for non-linear interpolation (probably would need to use scipy)? # TODO: add support for interpolating in phase_space if len(kwargs.keys()) > 1: raise KeyError("interp_value only takes a single qualifier-value pair") qualifier, qualifier_interp_value = kwargs.items()[0] if isinstance(qualifier_interp_value, str): # then assume its a twig and try to resolve # for example: time='t0_supconj' qualifier_interp_value = self._bundle.get_value(qualifier_interp_value, context=['system', 'component']) parent_ps = self.get_parent_ps() if qualifier not in parent_ps.qualifiers: # TODO: handle plural to singular (having to say # interp_value(times=5) is awkward) raise KeyError("'{}' not valid qualifier (must be one of {})".format(qualifier, parent_ps.qualifiers)) qualifier_parameter = parent_ps.get(qualifier=qualifier) if not isinstance(qualifier_parameter, FloatArrayParameter): raise KeyError("'{}' does not point to a FloatArrayParameter".format(qualifier)) return np.interp(qualifier_interp_value, qualifier_parameter.get_value(), self.get_value())
def interp_value(self, **kwargs)
Interpolate to find the value in THIS array given a value from ANOTHER array in the SAME parent :class:`ParameterSet` This currently only supports simple 1d linear interpolation (via numpy.interp) and does no checks to make sure you're interpolating with respect to an independent parameter - so use with caution. >>> print this_param.get_parent_ps().qualifiers >>> 'other_qualifier' in this_param.get_parent_ps().qualifiers True >>> this_param.interp_value(other_qualifier=5) where other_qualifier must be in this_param.get_parent_ps().qualifiers AND must point to another FloatArrayParameter. Example: >>> b['flux@lc01@model'].interp_value(times=10.2) NOTE: Interpolation by phase is not currently supported - but you can use :meth:`phoebe.frontend.bundle.Bundle.to_time` to convert to a valid time first (just make sure its in the bounds of the time array). NOTE: this method does not currently support units. You must provide the interpolating value in its default units and are returned the value in the default units (no support for quantities). :parameter **kwargs: see examples above, must provide a single qualifier-value pair to use for interpolation. In most cases this will probably be time=value or wavelength=value. :raises KeyError: if more than one qualifier is passed :raises KeyError: if no qualifier is passed that belongs to the parent :class:`ParameterSet` :raises KeyError: if the qualifier does not point to another :class:`FloatArrayParameter`
6.065485
4.612261
1.315078
if not isinstance(self._value, nparray.ndarray): raise ValueError("value is not a nparray object") for property, value in kwargs.items(): setattr(self._value, property, value)
def set_property(self, **kwargs)
set any property of the underlying nparray object
4.916221
3.223011
1.525351
repr_ = self.get_value() repr_str = '["{}"]'.format(repr_.replace(', ', '", "').replace('(', '", ["').replace(')', '"]')).replace(']"', '"]').replace('""', '"').replace(']"', ']') return json.loads(repr_str)
def _parse_repr(self)
turn something like "orbit:outer(orbit:inner(star:starA, star:starB), star:starC)" into ['orbit:outer', ['orbit:inner', ['star:starA', 'star:starB'], 'star:starC']]
7.183147
5.438761
1.320732
try: i = structure.index(item) except ValueError: for j,substructure in enumerate(structure): if isinstance(substructure, list): return self._recurse_find_trace(substructure, item, trace+[j]) else: return trace+[i]
def _recurse_find_trace(self, structure, item, trace=[])
given a nested structure from _parse_repr and find the trace route to get to item
2.336532
2.27704
1.026127
for i in trace: structure = structure[i] return structure
def _get_by_trace(self, structure, trace)
retrieve an item from the nested structure from _parse_repr given a trace (probably modified from _recurse_find_trace)
8.723035
5.319206
1.639913
l = re.findall(r"[\w']+", self.get_value()) # now search for indices of star and take the next entry from this flat list return [l[i+1] for i,s in enumerate(l) if s=='star']
def get_stars(self)
get 'component' of all stars in order primary -> secondary
11.570756
9.691164
1.193949
#~ l = re.findall(r"[\w']+", self.get_value()) # now search for indices of orbit and take the next entry from this flat list #~ return [l[i+1] for i,s in enumerate(l) if s=='orbit'] orbits = [] for star in self.get_stars(): parent = self.get_parent_of(star) if parent not in orbits and parent!='component' and parent is not None: orbits.append(parent) return orbits
def get_orbits(self)
get 'component' of all orbits in order primary -> secondary
6.367996
5.590562
1.139062
sibling = self.get_sibling_of(component) if sibling in self.get_stars(): return sibling stars = [child for child in self.get_stars_of_children_of(sibling)] # TODO: do we need to make sure there aren't duplicates? # return list(set(stars)) return stars
def get_stars_of_sibling_of(self, component)
same as get_sibling_of except if the sibling is an orbit, this will recursively follow the tree to return a list of all stars under that orbit
4.528728
4.009229
1.129576
structure, trace, item = self._get_structure_and_trace(component) item_kind, item_label = item.split(':') if isinstance(kind, str): kind = [kind] if item_kind not in ['orbit']: # return None return [] else: items = self._get_by_trace(structure, trace[:-1]+[trace[-1]+1]) # we want to ignore suborbits #return [str(ch.split(':')[-1]) for ch in items if isinstance(ch, unicode)] return [str(ch.split(':')[-1]) for ch in items if isinstance(ch, unicode) and (kind is None or ch.split(':')[0] in kind)]
def get_children_of(self, component, kind=None)
get to component labels of the children of a given component
4.793083
4.629704
1.035289
stars = self.get_stars() orbits = self.get_orbits() stars_children = [] for child in self.get_children_of(component): if child in stars: stars_children.append(child) elif child in orbits: stars_children += self.get_stars_of_children_of(child) else: # maybe an envelope or eventually spot, ring, etc pass return stars_children
def get_stars_of_children_of(self, component)
same as get_children_of except if any of the children are orbits, this will recursively follow the tree to return a list of all children (grandchildren, etc) stars under that orbit
4.052608
3.551973
1.140946
children = self.get_children_of(component, kind=kind) if children is None: return None else: return children[ind]
def get_child_of(self, component, ind, kind=None)
get a child (by index) of a given component
2.676798
2.466941
1.085068
parent = self.get_parent_of(component) if parent is None: # then this is a single component, not in a binary return 'primary' children_of_parent = self.get_children_of(parent) ind = children_of_parent.index(component) if ind > 1: return None if return_ind: return ind + 1 return ['primary', 'secondary'][ind]
def get_primary_or_secondary(self, component, return_ind=False)
return whether a given component is the 'primary' or 'secondary' component in its parent orbit
4.110916
3.764654
1.091977
l = re.findall(r"[\w']+", self.get_value()) # now search for indices of star and take the next entry from this flat list meshables = [l[i+1] for i,s in enumerate(l) if s in ['star', 'envelope']] # now we want to remove any star which has a sibling envelope has_sibling_envelope = [] for item in meshables: if self.get_sibling_of(item, kind='envelope'): has_sibling_envelope.append(item) return [m for m in meshables if m not in has_sibling_envelope]
def get_meshables(self)
return a list of components that are meshable (generally stars, but handles the envelope for an contact_binary)
6.481849
5.486671
1.181381
if component not in self._is_contact_binary.keys(): self._update_cache() return self._is_contact_binary.get(component)
def is_contact_binary(self, component)
especially useful for constraints tells whether any component (star, envelope) is part of a contact_binary by checking its siblings for an envelope
4.540682
4.590373
0.989175
if component not in self._is_binary.keys(): self._update_cache() return self._is_binary.get(component)
def is_binary(self, component)
especially useful for constraints tells whether any component (star, envelope) is part of a binary by checking its parent
5.333884
5.339405
0.998966
# cache _var_params if self._var_params is None: self._var_params = ParameterSet([var.get_parameter() for var in self._vars]) return self._var_params
def vars(self)
return all the variables in a PS
5.133895
5.271564
0.973885
kwargs['twig'] = twig kwargs['check_default'] = False kwargs['check_visible'] = False ps = self.vars.filter(**kwargs) if len(ps)==1: return ps.get(check_visible=False, check_default=False) elif len(ps) > 1: # TODO: is this safe? Some constraints may have a parameter listed # twice, so we can do this then, but maybe should check to make sure # all items have the same uniqueid? Maybe check len(ps.uniqueids)? return ps.to_list()[0] else: raise KeyError("no result found")
def get_parameter(self, twig=None, **kwargs)
get a parameter from those that are variables
7.800228
7.21034
1.081811
_orig_expression = self.get_value() # try to get the parameter from the bundle kwargs['twig'] = twig newly_constrained_var = self._get_var(**kwargs) newly_constrained_param = self.get_parameter(**kwargs) check_kwargs = {k:v for k,v in newly_constrained_param.meta.items() if k not in ['context', 'twig', 'uniquetwig']} check_kwargs['context'] = 'constraint' if len(self._bundle.filter(**check_kwargs)): raise ValueError("'{}' is already constrained".format(newly_constrained_param.twig)) currently_constrained_var = self._get_var(qualifier=self.qualifier, component=self.component) currently_constrained_param = currently_constrained_var.get_parameter() # or self.constrained_parameter import constraint if self.constraint_func is not None and hasattr(constraint, self.constraint_func): # then let's see if the method is capable of resolving for use # try: if True: # TODO: this is not nearly general enough, each method takes different arguments # and getting solve_for as newly_constrained_param.qualifier lhs, rhs, constraint_kwargs = getattr(constraint, self.constraint_func)(self._bundle, solve_for=newly_constrained_param, **self.constraint_kwargs) # except NotImplementedError: # pass # else: # TODO: this needs to be smarter and match to self._get_var().user_label instead of the current uniquetwig expression = rhs._value # safe expression #~ print "*** flip by recalling method success!", expression # print "***", lhs._value, rhs._value if expression is not None: expression = expression elif _use_sympy: eq_safe = "({}) - {}".format(self._value, currently_constrained_var.safe_label) #~ print "*** solving {} for {}".format(eq_safe, newly_constrained_var.safe_label) expression = sympy.solve(eq_safe, newly_constrained_var.safe_label)[0] #~ print "*** solution: {}".format(expression) else: # TODO: ability for built-in constraints to flip themselves # we could access self.kind and re-call that with a new solve_for option? raise ValueError("must either have sympy installed or provide a new expression") self._qualifier = newly_constrained_param.qualifier self._component = newly_constrained_param.component self._kind = newly_constrained_param.kind self._value = str(expression) # reset the default_unit so that set_default_unit doesn't complain # about incompatible units self._default_unit = None self.set_default_unit(newly_constrained_param.default_unit) self._update_bookkeeping() self._add_history(redo_func='flip_constraint', redo_kwargs={'expression': expression, 'uniqueid': newly_constrained_param.uniqueid}, undo_func='flip_constraint', undo_kwargs={'expression': _orig_expression, 'uniqueid': currently_constrained_param.uniqueid})
def flip_for(self, twig=None, expression=None, **kwargs)
flip the constraint to solve for for any of the parameters in the expression expression (optional if sympy available, required if not)
6.36844
6.285535
1.01319
if self._value == 'loaded': status = 'loaded' elif not _is_server and self._bundle is not None and self._server_status is not None: if not _can_requests: raise ImportError("requests module required for external jobs") if self._value in ['complete']: # then we have no need to bother checking again status = self._value else: url = self._server_status logger.info("checking job status on server from {}".format(url)) # "{}/{}/parameters/{}".format(server, bundleid, self.uniqueid) r = requests.get(url, timeout=5) try: rjson = r.json() except ValueError: # TODO: better exception here - perhaps look for the status code from the response? status = self._value else: status = rjson['data']['attributes']['value'] else: if self.status_method == 'exists': output_exists = os.path.isfile("_{}.out".format(self.uniqueid)) if output_exists: status = 'complete' else: status = 'unknown' else: raise NotImplementedError # here we'll set the value to be the latest CHECKED status for the sake # of exporting to JSON and updating the status for clients. get_value # will still call status so that it will return the CURRENT value. self._value = status return status
def get_status(self)
[NOT IMPLEMENTED]
7.114625
6.972108
1.020441
form_kwargs = self.get_form_kwargs() # @@@ remove fields that would cause the form to be bound # when instantiated bound_fields = ["data", "files"] for field in bound_fields: form_kwargs.pop(field, None) return self.get_form_class()(**form_kwargs)
def get_unbound_form(self)
Overrides behavior of FormView.get_form_kwargs when method is POST or PUT
4.779594
4.492742
1.063848
data = { "html": render_to_string( "pinax/teams/_invite_form.html", { "invite_form": self.get_unbound_form(), "team": self.team }, request=self.request ) } membership = self.membership if membership is not None: if membership.state == Membership.STATE_APPLIED: fragment_class = ".applicants" elif membership.state == Membership.STATE_INVITED: fragment_class = ".invitees" elif membership.state in (Membership.STATE_AUTO_JOINED, Membership.STATE_ACCEPTED): fragment_class = { Membership.ROLE_OWNER: ".owners", Membership.ROLE_MANAGER: ".managers", Membership.ROLE_MEMBER: ".members" }[membership.role] data.update({ "append-fragments": { fragment_class: render_to_string( "pinax/teams/_membership.html", { "membership": membership, "team": self.team }, request=self.request ) } }) return data
def get_form_success_data(self, form)
Allows customization of the JSON data returned when a valid form submission occurs.
2.887729
2.812576
1.026721
# TODO: this is ugly and makes everything ugly # can we handle this with a clean decorator or just requiring that only floats be passed?? if hasattr(obj, 'value'): return obj.value elif isinstance(obj, np.ndarray): return np.array([o.value for o in obj]) elif hasattr(obj, '__iter__'): return [_value(o) for o in obj] return obj
def _value(obj)
make sure to get a float
5.573536
5.076232
1.097967
return np.sqrt(4./np.sqrt(3) * float(area) / float(ntriangles))
def _estimate_delta(ntriangles, area)
estimate the value for delta to send to marching based on the number of requested triangles and the expected surface area of mesh
6.616459
7.204944
0.918322
hier = b.hierarchy if not len(hier.get_value()): raise NotImplementedError("Meshing requires a hierarchy to exist") # now pull general compute options if compute is not None: if isinstance(compute, str): compute_ps = b.get_compute(compute, check_visible=False) else: # then hopefully compute is the parameterset compute_ps = compute eclipse_method = compute_ps.get_value(qualifier='eclipse_method', **kwargs) horizon_method = compute_ps.get_value(qualifier='horizon_method', check_visible=False, **kwargs) dynamics_method = compute_ps.get_value(qualifier='dynamics_method', **kwargs) irrad_method = compute_ps.get_value(qualifier='irrad_method', **kwargs) boosting_method = compute_ps.get_value(qualifier='boosting_method', **kwargs) if conf.devel: mesh_init_phi = compute_ps.get_value(qualifier='mesh_init_phi', unit=u.rad, **kwargs) else: mesh_init_phi = 0.0 else: eclipse_method = 'native' horizon_method = 'boolean' dynamics_method = 'keplerian' irrad_method = 'none' boosting_method = 'none' mesh_init_phi = 0.0 # NOTE: here we use globals()[Classname] because getattr doesn't work in # the current module - now this doesn't really make sense since we only # support stars, but eventually the classname could be Disk, Spot, etc if 'dynamics_method' in kwargs.keys(): # already set as default above _dump = kwargs.pop('dynamics_method') meshables = hier.get_meshables() def get_distortion_method(hier, compute_ps, component, **kwargs): if hier.get_kind_of(component) in ['envelope']: return 'roche' if compute_ps.get_value('mesh_method', component=component, **kwargs)=='wd': return 'roche' return compute_ps.get_value('distortion_method', component=component, **kwargs) bodies_dict = {comp: globals()[_get_classname(hier.get_kind_of(comp), get_distortion_method(hier, compute_ps, comp, **kwargs))].from_bundle(b, comp, compute, dynamics_method=dynamics_method, mesh_init_phi=mesh_init_phi, datasets=datasets, **kwargs) for comp in meshables} # envelopes need to know their relationships with the underlying stars parent_envelope_of = {} for meshable in meshables: if hier.get_kind_of(meshable) == 'envelope': for starref in hier.get_siblings_of(meshable): parent_envelope_of[starref] = meshable return cls(bodies_dict, eclipse_method=eclipse_method, horizon_method=horizon_method, dynamics_method=dynamics_method, irrad_method=irrad_method, boosting_method=boosting_method, parent_envelope_of=parent_envelope_of)
def from_bundle(cls, b, compute=None, datasets=[], **kwargs)
Build a system from the :class:`phoebe.frontend.bundle.Bundle` and its hierarchy. :parameter b: the :class:`phoebe.frontend.bundle.Bundle` :parameter str compute: name of the computeoptions in the bundle :parameter list datasets: list of names of datasets :parameter **kwargs: temporary overrides for computeoptions :return: an instantiated :class:`System` object, including its children :class:`Body`s
4.204144
4.141336
1.015166
if component in self._bodies.keys(): return self._bodies[component] else: # then hopefully we're a child star of an contact_binary envelope parent_component = self._parent_envelope_of[component] return self._bodies[parent_component].get_half(component)
def get_body(self, component)
TODO: add documentation
12.746766
11.465027
1.111796
self.xs = np.array(_value(xs)) self.ys = np.array(_value(ys)) self.zs = np.array(_value(zs)) for starref,body in self.items(): body.update_position(time, xs, ys, zs, vxs, vys, vzs, ethetas, elongans, eincls, ds=ds, Fs=Fs, ignore_effects=ignore_effects)
def update_positions(self, time, xs, ys, zs, vxs, vys, vzs, ethetas, elongans, eincls, ds=None, Fs=None, ignore_effects=False)
TODO: add documentation all arrays should be for the current time, but iterable over all bodies
2.756618
2.577188
1.069623
if self.irrad_method is not 'none' and not ignore_effects: # TODO: only for kinds that require intensities (i.e. not orbit or # dynamical RVs, etc) self.handle_reflection() for kind, dataset in zip(kinds, datasets): for starref, body in self.items(): body.populate_observable(time, kind, dataset)
def populate_observables(self, time, kinds, datasets, ignore_effects=False)
TODO: add documentation ignore_effects: whether to ignore reflection and features (useful for computing luminosities)
11.048758
10.713653
1.031278
eclipse_method = kwargs.get('eclipse_method', self.eclipse_method) horizon_method = kwargs.get('horizon_method', self.horizon_method) # Let's first check to see if eclipses are even possible at these # positions. If they are not, then we only have to do horizon # # To do that, we'll take the conservative max_r for each object # and their current positions, and see if the separations are larger # than sum of max_rs possible_eclipse = False if len(self.bodies) == 1: if self.bodies[0].__class__.__name__ == 'Envelope': possible_eclipse = True else: possible_eclipse = False else: logger.debug("system.handle_eclipses: determining if eclipses are possible from instantaneous_maxr") max_rs = [body.instantaneous_maxr for body in self.bodies] # logger.debug("system.handle_eclipses: max_rs={}".format(max_rs)) for i in range(0, len(max_rs)-1): for j in range(i+1, len(max_rs)): proj_sep_sq = sum([(c[i]-c[j])**2 for c in (self.xs,self.ys)]) max_sep_ecl = max_rs[i] + max_rs[j] if proj_sep_sq < (1.05*max_sep_ecl)**2: # then this pair has the potential for eclipsing triangles possible_eclipse = True break if not possible_eclipse and not expose_horizon and horizon_method=='boolean': eclipse_method = 'only_horizon' # meshes is an object which allows us to easily access and update columns # in the meshes *in memory*. That is meshes.update_columns will propogate # back to the current mesh for each body. meshes = self.meshes # Reset all visibilities to be fully visible to start meshes.update_columns('visiblities', 1.0) ecl_func = getattr(eclipse, eclipse_method) if eclipse_method=='native': ecl_kwargs = {'horizon_method': horizon_method} else: ecl_kwargs = {} logger.debug("system.handle_eclipses: possible_eclipse={}, expose_horizon={}, calling {} with kwargs {}".format(possible_eclipse, expose_horizon, eclipse_method, ecl_kwargs)) visibilities, weights, horizon = ecl_func(meshes, self.xs, self.ys, self.zs, expose_horizon=expose_horizon, **ecl_kwargs) # NOTE: analytic horizons are called in backends.py since they don't # actually depend on the mesh at all. # visiblilities here is a dictionary with keys being the component # labels and values being the np arrays of visibilities. We can pass # this dictionary directly and the columns will be applied respectively. meshes.update_columns('visibilities', visibilities) # weights is also a dictionary with keys being the component labels # and values and np array of weights. if weights is not None: meshes.update_columns('weights', weights) return horizon
def handle_eclipses(self, expose_horizon=False, **kwargs)
Detect the triangles at the horizon and the eclipsed triangles, handling any necessary subdivision. :parameter str eclipse_method: name of the algorithm to use to detect the horizon or eclipses (defaults to the value set by computeoptions) :parameter str subdiv_alg: name of the algorithm to use for subdivision (defaults to the value set by computeoptions) :parameter int subdiv_num: number of subdivision iterations (defaults the value set by computeoptions)
4.692637
4.817507
0.97408
logger.debug("{}.instantaneous_maxr".format(self.component)) if 'maxr' not in self.inst_vals.keys(): logger.debug("{}.instantaneous_maxr COMPUTING".format(self.component)) self.inst_vals['maxr'] = max(self.mesh.rs.centers*self._scale) return self.inst_vals['maxr']
def instantaneous_maxr(self)
Recall the maximum r (triangle furthest from the center of the star) of this star at the given time :return: maximum r :rtype: float
4.957175
5.216095
0.950361
if hasattr(index, '__iter__'): return sum([self.masses[i] for i in index]) else: return self.masses[index]
def _get_mass_by_index(self, index)
where index can either by an integer or a list of integers (returns some of masses)
2.632565
2.045639
1.286916
if hasattr(index, '__iter__'): # then we want the center-of-mass coordinates # TODO: clean this up return np.average([_value(coords_array[i]) for i in index], weights=[self._get_mass_by_index(i) for i in index]) else: return coords_array[index]
def _get_coords_by_index(self, coords_array, index)
where index can either by an integer or a list of integers (returns some of masses) coords_array should be a single array (xs, ys, or zs)
4.684193
4.431062
1.057127
# TODO: allow this to take etheta and retreive a mesh at that true anomaly theta = 0.0 protomesh = self._standard_meshes[theta] #.copy() # if theta in self._standard_meshes.keys() else self.mesh.copy() if scaled: # TODO: be careful about self._scale... we may want self._instantaneous_scale return mesh.ScaledProtoMesh.from_proto(protomesh, self._scale) else: return protomesh.copy()
def get_standard_mesh(self, scaled=True)
TODO: add documentation
10.152263
9.593312
1.058265
self.true_anom = true_anom self.elongan = elongan self.eincl = eincl self.time = time self.populated_at_time = [] self.reset() return
def reset_time(self, time, true_anom, elongan, eincl)
TODO: add documentation
3.278537
3.190156
1.027704
if kind in ['mesh', 'orb']: return if time==self.time and dataset in self.populated_at_time and 'pblum' not in kind: # then we've already computed the needed columns # TODO: handle the case of intensities already computed by # /different/ dataset (ie RVs computed first and filling intensities # and then lc requesting intensities with SAME passband/atm) return new_mesh_cols = getattr(self, '_populate_{}'.format(kind.lower()))(dataset, **kwargs) for key, col in new_mesh_cols.items(): self.mesh.update_columns_dict({'{}:{}'.format(key, dataset): col}) self.populated_at_time.append(dataset)
def populate_observable(self, time, kind, dataset, **kwargs)
TODO: add documentation
12.577874
12.402858
1.014111
return mesh.spin_in_roche(self.polar_direction_uvw, self.true_anom, self.elongan, self.eincl).astype(float)
def polar_direction_xyz(self)
get current polar direction in Roche (xyz) coordinates
42.512295
27.106421
1.568348
# TODO: make this a function of d instead of etheta? logger.debug("determining target volume at t={}, theta={}".format(self.time, etheta)) # TODO: eventually this could allow us to "break" volume conservation # and have volume be a function of d, with some scaling factor provided # by the user as a parameter. Until then, we'll assume volume is # conserved which means the volume should always be the same volume = 4./3 * np.pi * self.requiv**3 if not scaled: return volume / self._scale**3 else: return volume
def get_target_volume(self, etheta=0.0, scaled=False)
TODO: add documentation get the volume that the Star should have at a given euler theta
10.028481
9.809793
1.022293
# TODO: is this rpole scaling true for all distortion_methods?? rpole = self.instantaneous_rpole*self.sma return self.polar_direction_uvw*rpole+self.mesh._pos
def north_pole_uvw(self)
location of the north pole in the global/system frame
43.883785
43.141232
1.017212
logger.debug("{}.instantaneous_tpole".format(self.component)) if 'tpole' not in self.inst_vals.keys(): logger.debug("{}.instantaneous_tpole COMPUTING".format(self.component)) if self.mesh is None: raise ValueError("mesh must be computed before determining tpole") # Convert from mean to polar by dividing total flux by gravity darkened flux (Ls drop out) # see PHOEBE Legacy scientific reference eq 5.20 self.inst_vals['tpole'] = self.teff*(np.sum(self.mesh.areas) / np.sum(self.mesh.gravs.centers*self.mesh.areas))**(0.25) return self.inst_vals['tpole']
def instantaneous_tpole(self)
compute the instantaenous temperature at the pole to achieve the mean effective temperature (teff) provided by the user
9.21963
8.615151
1.070165
logger.debug("{}._fill_loggs".format(self.component)) if mesh is None: mesh = self.mesh loggs = np.log10(mesh.normgrads.for_computations * g_rel_to_abs(self.masses[self.ind_self], self.sma)) if not ignore_effects: for feature in self.features: if feature.proto_coords: loggs = feature.process_loggs(loggs, mesh.roche_coords_for_computations, s=self.polar_direction_xyz, t=self.time) else: loggs = feature.process_loggs(loggs, mesh.coords_for_computations, s=self.polar_direction_xyz, t=self.time) mesh.update_columns(loggs=loggs) if not self.needs_recompute_instantaneous: logger.debug("{}._fill_loggs: copying loggs to standard mesh".format(self.component)) theta = 0.0 self._standard_meshes[theta].update_columns(loggs=loggs)
def _fill_loggs(self, mesh=None, ignore_effects=False)
TODO: add documentation Calculate local surface gravity GMSunNom = 1.3271244e20 m**3 s**-2 RSunNom = 6.597e8 m
5.716223
5.703029
1.002314
logger.debug("{}._fill_gravs".format(self.component)) if mesh is None: mesh = self.mesh # TODO: rename 'gravs' to 'gdcs' (gravity darkening corrections) gravs = ((mesh.normgrads.for_computations * g_rel_to_abs(self.masses[self.ind_self], self.sma))/self.instantaneous_gpole)**self.gravb_bol mesh.update_columns(gravs=gravs) if not self.needs_recompute_instantaneous: logger.debug("{}._fill_gravs: copying gravs to standard mesh".format(self.component)) theta = 0.0 self._standard_meshes[theta].update_columns(gravs=gravs)
def _fill_gravs(self, mesh=None, **kwargs)
TODO: add documentation requires _fill_loggs to have been called
11.394106
10.753775
1.059545
r logger.debug("{}._fill_teffs".format(self.component)) if mesh is None: mesh = self.mesh # Now we can compute the local temperatures. # see PHOEBE Legacy scientific reference eq 5.23 teffs = self.instantaneous_tpole*mesh.gravs.for_computations**0.25 if not ignore_effects: for feature in self.features: if feature.proto_coords: teffs = feature.process_teffs(teffs, mesh.roche_coords_for_computations, s=self.polar_direction_xyz, t=self.time) else: teffs = feature.process_teffs(teffs, mesh.coords_for_computations, s=self.polar_direction_xyz, t=self.time) mesh.update_columns(teffs=teffs) if not self.needs_recompute_instantaneous: logger.debug("{}._fill_teffs: copying teffs to standard mesh".format(self.component)) theta = 0.0 self._standard_meshes[theta].update_columns(teffs=teffs)
def _fill_teffs(self, mesh=None, ignore_effects=False, **kwargs)
r""" requires _fill_loggs and _fill_gravs to have been called Calculate local temperature of a Star.
7.048918
6.739264
1.045948
logger.debug("{}._fill_abuns".format(self.component)) if mesh is None: mesh = self.mesh # TODO: support from frontend mesh.update_columns(abuns=abun) if not self.needs_recompute_instantaneous: logger.debug("{}._fill_abuns: copying abuns to standard mesh".format(self.component)) theta = 0.0 self._standard_meshes[theta].update_columns(abuns=abun)
def _fill_abuns(self, mesh=None, abun=0.0)
TODO: add documentation
6.775132
6.335988
1.06931
logger.debug("{}._fill_albedos".format(self.component)) if mesh is None: mesh = self.mesh mesh.update_columns(irrad_frac_refl=irrad_frac_refl) if not self.needs_recompute_instantaneous: logger.debug("{}._fill_albedos: copying albedos to standard mesh".format(self.component)) theta = 0.0 self._standard_meshes[theta].update_columns(irrad_frac_refl=irrad_frac_refl)
def _fill_albedos(self, mesh=None, irrad_frac_refl=0.0)
TODO: add documentation
4.061365
3.85045
1.054777
logger.debug("{}.compute_pblum_scale(dataset={}, pblum={})".format(self.component, dataset, pblum)) abs_luminosity = self.compute_luminosity(dataset, **kwargs) # We now want to remember the scale for all intensities such that the # luminosity in relative units gives the provided pblum pblum_scale = pblum / abs_luminosity self.set_pblum_scale(dataset, pblum_scale)
def compute_pblum_scale(self, dataset, pblum, **kwargs)
intensities should already be computed for this dataset at the time for which pblum is being provided TODO: add documentation
4.752468
4.518362
1.051812
logger.debug("{}._populate_lp(dataset={})".format(self.component, dataset)) profile_rest = kwargs.get('profile_rest', self.lp_profile_rest.get(dataset)) rv_cols = self._populate_rv(dataset, **kwargs) cols = rv_cols # rvs = (rv_cols['rvs']*u.solRad/u.d).to(u.m/u.s).value # cols['dls'] = rv_cols['rvs']*profile_rest/c.c.si.value return cols
def _populate_lp(self, dataset, **kwargs)
Populate columns necessary for an LP dataset This should not be called directly, but rather via :meth:`Body.populate_observable` or :meth:`System.populate_observables`
6.531996
6.140492
1.063758
logger.debug("{}._populate_rv(dataset={})".format(self.component, dataset)) # We need to fill all the flux-related columns so that we can weigh each # triangle's rv by its flux in the requested passband. lc_cols = self._populate_lc(dataset, **kwargs) # rv per element is just the z-component of the velocity vectory. Note # the change in sign from our right-handed system to rv conventions. # These will be weighted by the fluxes when integrating rvs = -1*self.mesh.velocities.for_computations[:,2] # Gravitational redshift if self.do_rv_grav: rv_grav = c.G*(self.mass*u.solMass)/(self.instantaneous_rpole*u.solRad)/c.c # rvs are in solRad/d internally rv_grav = rv_grav.to('solRad/d').value rvs += rv_grav cols = lc_cols cols['rvs'] = rvs return cols
def _populate_rv(self, dataset, **kwargs)
Populate columns necessary for an RV dataset This should not be called directly, but rather via :meth:`Body.populate_observable` or :meth:`System.populate_observables`
12.719056
12.312061
1.033057
# TODO: may be able to get away with removing the features check and just doing for pulsations, etc? # TODO: what about dpdt, deccdt, dincldt, etc? return len(self.features) > 0 or self.is_misaligned or self.ecc != 0 or self.dynamics_method != 'keplerian'
def needs_remesh(self)
whether the star needs to be re-meshed (for any reason)
23.223019
19.590075
1.185448
# need the sma to scale between Roche and real units sma = kwargs.get('sma', self.sma) # Rsol (same units as coordinates) mesh_args = self.instantaneous_mesh_args if mesh_method == 'marching': # TODO: do this during mesh initialization only and then keep delta fixed in time?? ntriangles = kwargs.get('ntriangles', self.ntriangles) # we need the surface area of the lobe to estimate the correct value # to pass for delta to marching. We will later need the volume to # expose its value logger.debug("libphoebe.roche_area_volume{}".format(mesh_args)) av = libphoebe.roche_area_volume(*mesh_args, choice=2, larea=True, lvolume=True) delta = _estimate_delta(ntriangles, av['larea']) logger.debug("libphoebe.roche_marching_mesh{}".format(mesh_args)) new_mesh = libphoebe.roche_marching_mesh(*mesh_args, delta=delta, choice=2, full=True, max_triangles=ntriangles*2, vertices=True, triangles=True, centers=True, vnormals=True, tnormals=True, cnormals=False, vnormgrads=True, cnormgrads=False, areas=True, volume=False, init_phi=self.mesh_init_phi) # In addition to the values exposed by the mesh itself, let's report # the volume and surface area of the lobe. The lobe area is used # if mesh_offseting is required, and the volume is optionally exposed # to the user. new_mesh['volume'] = av['lvolume'] # * sma**3 new_mesh['area'] = av['larea'] # * sma**2 scale = sma elif mesh_method == 'wd': N = int(kwargs.get('gridsize', self.gridsize)) # unpack mesh_args q, F, d, Phi = mesh_args the_grid = mesh_wd.discretize_wd_style(N, q, F, d, Phi) new_mesh = mesh.wd_grid_to_mesh_dict(the_grid, q, F, d) scale = sma else: raise NotImplementedError("mesh_method '{}' is not supported".format(mesh_method)) return new_mesh, scale
def _build_mesh(self, mesh_method, **kwargs)
this function takes mesh_method and kwargs that came from the generic Body.intialize_mesh and returns the grid... intialize mesh then takes care of filling columns and rescaling to the correct units, etc
6.354696
6.37358
0.997037
# if we don't provide instantaneous masses or smas, then assume they are # not time dependent - in which case they were already stored in the init sma = kwargs.get('sma', self.sma) # Rsol (same units as coordinates) mesh_args = self.instantaneous_mesh_args if mesh_method == 'marching': ntriangles = kwargs.get('ntriangles', self.ntriangles) av = libphoebe.sphere_area_volume(*mesh_args, larea=True, lvolume=True) delta = _estimate_delta(ntriangles, av['larea']) new_mesh = libphoebe.sphere_marching_mesh(*mesh_args, delta=delta, full=True, max_triangles=ntriangles*2, vertices=True, triangles=True, centers=True, vnormals=True, tnormals=True, cnormals=False, vnormgrads=True, cnormgrads=False, areas=True, volume=True, init_phi=self.mesh_init_phi) # In addition to the values exposed by the mesh itself, let's report # the volume and surface area of the lobe. The lobe area is used # if mesh_offseting is required, and the volume is optionally exposed # to the user. new_mesh['volume'] = av['lvolume'] new_mesh['area'] = av['larea'] scale = sma else: raise NotImplementedError("mesh_method '{}' is not supported".format(mesh_method)) return new_mesh, scale
def _build_mesh(self, mesh_method, **kwargs)
this function takes mesh_method and kwargs that came from the generic Body.intialize_mesh and returns the grid... intialize mesh then takes care of filling columns and rescaling to the correct units, etc
6.588287
6.651826
0.990448
for half in self._halves: half.populate_observable(time, kind, dataset, **kwargs)
def populate_observable(self, time, kind, dataset, **kwargs)
TODO: add documentation
5.261744
4.905539
1.072613
feature_ps = b.get_feature(feature) colat = feature_ps.get_value('colat', unit=u.rad) longitude = feature_ps.get_value('long', unit=u.rad) if len(b.hierarchy.get_stars())>=2: star_ps = b.get_component(feature_ps.component) orbit_ps = b.get_component(b.hierarchy.get_parent_of(feature_ps.component)) syncpar = star_ps.get_value('syncpar') period = orbit_ps.get_value('period') dlongdt = (syncpar - 1) / period * 2 * np.pi else: star_ps = b.get_component(feature_ps.component) dlongdt = star_ps.get_value('freq', unit=u.rad/u.d) longitude = np.pi/2 radius = feature_ps.get_value('radius', unit=u.rad) relteff = feature_ps.get_value('relteff', unit=u.dimensionless_unscaled) t0 = b.get_value('t0', context='system', unit=u.d) return cls(colat, longitude, dlongdt, radius, relteff, t0)
def from_bundle(cls, b, feature)
Initialize a Spot feature from the bundle.
3.378227
3.350457
1.008288
t = time - self._t0 longitude = self._longitude + self._dlongdt * t # define the basis vectors in the spin (primed) coordinates in terms of # the Roche coordinates. # ez' = s # ex' = (ex - s(s.ex)) /|i - s(s.ex)| # ey' = s x ex' ex = np.array([1., 0., 0.]) ezp = s exp = (ex - s*np.dot(s,ex)) eyp = np.cross(s, exp) return np.sin(self._colat)*np.cos(longitude)*exp +\ np.sin(self._colat)*np.sin(longitude)*eyp +\ np.cos(self._colat)*ezp
def pointing_vector(self, s, time)
s is the spin vector in roche coordinates time is the current time
7.303915
6.700658
1.090029
if t is None: # then assume at t0 t = self._t0 pointing_vector = self.pointing_vector(s,t) logger.debug("spot.process_teffs at t={} with pointing_vector={} and radius={}".format(t, pointing_vector, self._radius)) cos_alpha_coords = np.dot(coords, pointing_vector) / np.linalg.norm(coords, axis=1) cos_alpha_spot = np.cos(self._radius) filter_ = cos_alpha_coords > cos_alpha_spot teffs[filter_] = teffs[filter_] * self._relteff return teffs
def process_teffs(self, teffs, coords, s=np.array([0., 0., 1.]), t=None)
Change the local effective temperatures for any values within the "cone" defined by the spot. Any teff within the spot will have its current value multiplied by the "relteff" factor :parameter array teffs: array of teffs for computations :parameter array coords: array of coords for computations :t float: current time
4.384933
3.790871
1.156708
feature_ps = b.get_feature(feature) freq = feature_ps.get_value('freq', unit=u.d**-1) radamp = feature_ps.get_value('radamp', unit=u.dimensionless_unscaled) l = feature_ps.get_value('l', unit=u.dimensionless_unscaled) m = feature_ps.get_value('m', unit=u.dimensionless_unscaled) teffext = feature_ps.get_value('teffext') GM = c.G.to('solRad3 / (solMass d2)').value*b.get_value(qualifier='mass', component=feature_ps.component, context='component', unit=u.solMass) R = b.get_value(qualifier='rpole', component=feature_ps.component, section='component', unit=u.solRad) tanamp = GM/R**3/freq**2 return cls(radamp, freq, l, m, tanamp, teffext)
def from_bundle(cls, b, feature)
Initialize a Pulsation feature from the bundle.
4.855508
4.754161
1.021317
pot = globals()[pot_name] dpdx = globals()['d%sdx'%(pot_name)] dpdy = globals()['d%sdy'%(pot_name)] dpdz = globals()['d%sdz'%(pot_name)] dpdr = globals()['d%sdr'%(pot_name)] n_iter = 0 rmag, rmag0 = np.sqrt((r*r).sum()), 0 lam, nu = r[0]/rmag, r[2]/rmag dc = np.array((lam, np.sqrt(1-lam*lam-nu*nu), nu)) # direction cosines -- must not change during reprojection D, q, F, p0 = args while np.abs(rmag-rmag0) > 1e-12 and n_iter < 100: rmag0 = rmag rmag = rmag0 - pot(rmag0*dc, *args)/dpdr(rmag0*dc, *args[:-1]) n_iter += 1 if n_iter == 100: logger.warning('projection did not converge') r = rmag*dc return MeshVertex(r, dpdx, dpdy, dpdz, *args[:-1])
def project_onto_potential(r, pot_name, *args)
TODO: add documentation
4.040298
3.995276
1.011269
if not conf.devel: raise NotImplementedError("'from_server' not officially supported for this release. Enable developer mode to test.") # TODO: run test message on server, if localhost and fails, attempt to # launch? url = "{}/{}/json".format(server, bundleid) logger.info("downloading bundle from {}".format(url)) r = requests.get(url, timeout=5) rjson = r.json() b = cls(rjson['data']) if as_client: b.as_client(as_client, server=server, bundleid=rjson['meta']['bundleid']) logger.warning("This bundle is in client mode, meaning all\ computations will be handled by the server at {}. To disable\ client mode, call as_client(False) or in the future pass\ as_client=False to from_server".format(server)) return b
def from_server(cls, bundleid, server='http://localhost:5555', as_client=True)
Load a new bundle from a server. [NOT IMPLEMENTED] Load a bundle from a phoebe server. This is a constructor so should be called as: >>> b = Bundle.from_server('asdf', as_client=False) :parameter str bundleid: the identifier given to the bundle by the server :parameter str server: the host (and port) of the server :parameter bool as_client: whether to attach in client mode (default: True)
6.786339
7.37197
0.92056
logger.warning("importing from legacy is experimental until official 1.0 release") filename = os.path.expanduser(filename) return io.load_legacy(filename, add_compute_legacy, add_compute_phoebe)
def from_legacy(cls, filename, add_compute_legacy=True, add_compute_phoebe=True)
Load a bundle from a PHOEBE 1.0 Legacy file. This is a constructor so should be called as: >>> b = Bundle.from_legacy('myfile.phoebe') :parameter str filename: relative or full path to the file :return: instantiated :class:`Bundle` object
5.410957
6.668587
0.81141
if not force_build and not conf.devel: b = cls.open(os.path.join(_bundle_cache_dir, 'default_star.bundle')) if starA != 'starA': b.rename_component('starA', starA) return b b = cls() # IMPORTANT NOTE: if changing any of the defaults for a new release, # make sure to update the cached files (see frontend/default_bundles # directory for script to update all cached bundles) b.add_star(component=starA) b.set_hierarchy(_hierarchy.component(b[starA])) b.add_compute(distortion_method='rotstar', irrad_method='none') return b
def default_star(cls, starA='starA', force_build=False)
For convenience, this function is available at the top-level as <phoebe.default_star> as well as <phoebe.frontend.bundle.Bundle.default_star>. sun This is a constructor, so should be called as: >>> b = Bundle.default_binary() Arguments ----------- * `starA` (string, optional, default='starA'): the label to be set for starA. * `force_build` (bool, optional, default=False): whether to force building the bundle from scratch. If False, pre-cached files will be loaded whenever possible to save time. Returns ----------- * an instantiated <phoebe.frontend.bundle.Bundle> object.
10.651378
10.872824
0.979633
if not force_build and not conf.devel: if contact_binary: b = cls.open(os.path.join(_bundle_cache_dir, 'default_contact_binary.bundle')) else: b = cls.open(os.path.join(_bundle_cache_dir, 'default_binary.bundle')) secondary = 'secondary' if starA != 'primary': if starA == 'secondary': secondary = 'temp_secondary' b.rename_component('secondary', secondary) b.rename_component('primary', starA) if starB != 'secondary': b.rename_component(secondary, starB) if orbit != 'binary': b.rename_component('binary', 'orbit') return b b = cls() # IMPORTANT NOTE: if changing any of the defaults for a new release, # make sure to update the cached files (see frontend/default_bundles # directory for script to update all cached bundles) if contact_binary: orbit_defaults = {'sma': 3.35, 'period': 0.5} star_defaults = {'requiv': 1.5} else: orbit_defaults = {'sma': 5.3, 'period': 1.0} star_defaults = {'requiv': 1.0} b.add_star(component=starA, **star_defaults) b.add_star(component=starB, **star_defaults) b.add_orbit(component=orbit, **orbit_defaults) if contact_binary: b.add_component('envelope', component='contact_envelope') b.set_hierarchy(_hierarchy.binaryorbit, b[orbit], b[starA], b[starB], b['contact_envelope']) else: b.set_hierarchy(_hierarchy.binaryorbit, b[orbit], b[starA], b[starB]) b.add_compute() return b
def default_binary(cls, starA='primary', starB='secondary', orbit='binary', contact_binary=False, force_build=False)
For convenience, this function is available at the top-level as <phoebe.default_binary> as well as <phoebe.frontend.bundle.Bundle.default_binary>. primary - secondary This is a constructor, so should be called as: >>> b = Bundle.default_binary() Arguments ----------- * `starA` (string, optional, default='primary'): the label to be set for the primary component. * `starB` (string, optional, default='secondary'): the label to be set for the secondary component. * `orbit` (string, optional, default='binary'): the label to be set for the binary component. * `contact_binary` (bool, optional, default=False): whether to also add an envelope (with component='contact_envelope') and set the hierarchy to a contact binary system. * `force_build` (bool, optional, default=False): whether to force building the bundle from scratch. If False, pre-cached files will be loaded whenever possible to save time. Returns ----------- * an instantiated <phoebe.frontend.bundle.Bundle> object.
3.661016
3.44029
1.064159
if not conf.devel: raise NotImplementedError("'default_triple' not officially supported for this release. Enable developer mode to test.") b = cls() b.add_star(component=starA) b.add_star(component=starB) b.add_star(component=starC) b.add_orbit(component=inner, period=1) b.add_orbit(component=outer, period=10) if inner_as_overcontact: b.add_envelope(component=contact_envelope) inner_hier = _hierarchy.binaryorbit(b[inner], b[starA], b[starB], b[contact_envelope]) else: inner_hier = _hierarchy.binaryorbit(b[inner], b[starA], b[starB]) if inner_as_primary: hierstring = _hierarchy.binaryorbit(b[outer], inner_hier, b[starC]) else: hierstring = _hierarchy.binaryorbit(b[outer], b[starC], inner_hier) b.set_hierarchy(hierstring) b.add_constraint(constraint.keplers_third_law_hierarchical, outer, inner) # TODO: does this constraint need to be rebuilt when things change? # (ie in set_hierarchy) b.add_compute() return b
def default_triple(cls, inner_as_primary=True, inner_as_overcontact=False, starA='starA', starB='starB', starC='starC', inner='inner', outer='outer', contact_envelope='contact_envelope')
Load a bundle with a default triple system. Set inner_as_primary based on what hierarchical configuration you want. inner_as_primary = True: starA - starB -- starC inner_as_primary = False: starC -- starA - starB This is a constructor, so should be called as: >>> b = Bundle.default_triple_primary() :parameter bool inner_as_primary: whether the inner-binary should be the primary component of the outer-orbit :return: instantiated :class:`Bundle` object
4.256108
4.491623
0.947566
if clear_history: # TODO: let's not actually clear history, # but rather skip the context when saving self.remove_history() # TODO: add option for clear_models, clear_feedback # NOTE: PS.save will handle os.path.expanduser return super(Bundle, self).save(filename, incl_uniqueid=incl_uniqueid, compact=compact)
def save(self, filename, clear_history=True, incl_uniqueid=False, compact=False)
Save the bundle to a JSON-formatted ASCII file. :parameter str filename: relative or full path to the file :parameter bool clear_history: whether to clear history log items before saving (default: True) :parameter bool incl_uniqueid: whether to including uniqueids in the file (only needed if its necessary to maintain the uniqueids when reloading) :parameter bool compact: whether to use compact file-formatting (maybe be quicker to save/load, but not as easily readable) :return: the filename
11.436285
12.772573
0.895378
logger.warning("exporting to legacy is experimental until official 1.0 release") filename = os.path.expanduser(filename) return io.pass_to_legacy(self, filename)
def export_legacy(self, filename)
TODO: add docs
9.28906
8.509773
1.091575