_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q16900
|
Context.block_view
|
train
|
def block_view(self, mri):
# type: (str) -> Block
"""Get a view of a block
Args:
mri: The mri of the controller hosting the block
Returns:
Block: The block we control
"""
controller = self.get_controller(mri)
block = controller.block_view(weakref.proxy(self))
return block
|
python
|
{
"resource": ""
}
|
q16901
|
Context.set_notify_dispatch_request
|
train
|
def set_notify_dispatch_request(self, notify_dispatch_request, *args):
"""Set function to call just before requests are dispatched
Args:
notify_dispatch_request (callable): function will be called
with request as single arg just before request is dispatched
"""
self._notify_dispatch_request = notify_dispatch_request
self._notify_args = args
|
python
|
{
"resource": ""
}
|
q16902
|
Context.ignore_stops_before_now
|
train
|
def ignore_stops_before_now(self):
"""Ignore any stops received before this point"""
self._sentinel_stop = object()
self._q.put(self._sentinel_stop)
|
python
|
{
"resource": ""
}
|
q16903
|
Context.put
|
train
|
def put(self, path, value, timeout=None, event_timeout=None):
""""Puts a value to a path and returns when it completes
Args:
path (list): The path to put to
value (object): The value to set
timeout (float): time in seconds to wait for responses, wait forever
if None
event_timeout: maximum time in seconds to wait between each response
event, wait forever if None
Returns:
The value after the put completes
"""
future = self.put_async(path, value)
self.wait_all_futures(
future, timeout=timeout, event_timeout=event_timeout)
return future.result()
|
python
|
{
"resource": ""
}
|
q16904
|
Context.put_async
|
train
|
def put_async(self, path, value):
""""Puts a value to a path and returns immediately
Args:
path (list): The path to put to
value (object): The value to set
Returns:
Future: A single Future which will resolve to the result
"""
request = Put(self._get_next_id(), path, value)
request.set_callback(self._q.put)
future = self._dispatch_request(request)
return future
|
python
|
{
"resource": ""
}
|
q16905
|
Context.post
|
train
|
def post(self, path, params=None, timeout=None, event_timeout=None):
"""Synchronously calls a method
Args:
path (list): The path to post to
params (dict): parameters for the call
timeout (float): time in seconds to wait for responses, wait
forever if None
event_timeout: maximum time in seconds to wait between each response
event, wait forever if None
Returns:
the result from 'method'
"""
future = self.post_async(path, params)
self.wait_all_futures(
future, timeout=timeout, event_timeout=event_timeout)
return future.result()
|
python
|
{
"resource": ""
}
|
q16906
|
Context.post_async
|
train
|
def post_async(self, path, params=None):
"""Asynchronously calls a function on a child block
Args:
path (list): The path to post to
params (dict): parameters for the call
Returns:
Future: as single Future that will resolve to the result
"""
request = Post(self._get_next_id(), path, params)
request.set_callback(self._q.put)
future = self._dispatch_request(request)
return future
|
python
|
{
"resource": ""
}
|
q16907
|
Context.unsubscribe
|
train
|
def unsubscribe(self, future):
"""Terminates the subscription given by a future
Args:
future (Future): The future of the original subscription
"""
assert future not in self._pending_unsubscribes, \
"%r has already been unsubscribed from" % \
self._pending_unsubscribes[future]
subscribe = self._requests[future]
self._pending_unsubscribes[future] = subscribe
# Clear out the subscription
self._subscriptions.pop(subscribe.id)
request = Unsubscribe(subscribe.id)
request.set_callback(self._q.put)
try:
controller = self.get_controller(subscribe.path[0])
except ValueError:
# Controller has already gone, probably during tearDown
pass
else:
self.handle_request(controller, request)
|
python
|
{
"resource": ""
}
|
q16908
|
Context.unsubscribe_all
|
train
|
def unsubscribe_all(self, callback=False):
"""Send an unsubscribe for all active subscriptions"""
futures = ((f, r) for f, r in self._requests.items()
if isinstance(r, Subscribe)
and f not in self._pending_unsubscribes)
if futures:
for future, request in futures:
if callback:
log.warn("Unsubscribing from %s", request.path)
cothread.Callback(self.unsubscribe, future)
else:
self.unsubscribe(future)
|
python
|
{
"resource": ""
}
|
q16909
|
Context.when_matches
|
train
|
def when_matches(self, path, good_value, bad_values=None, timeout=None,
event_timeout=None):
"""Resolve when an path value equals value
Args:
path (list): The path to wait to
good_value (object): the value to wait for
bad_values (list): values to raise an error on
timeout (float): time in seconds to wait for responses, wait
forever if None
event_timeout: maximum time in seconds to wait between each response
event, wait forever if None
"""
future = self.when_matches_async(path, good_value, bad_values)
self.wait_all_futures(
future, timeout=timeout, event_timeout=event_timeout)
|
python
|
{
"resource": ""
}
|
q16910
|
Context.when_matches_async
|
train
|
def when_matches_async(self, path, good_value, bad_values=None):
"""Wait for an attribute to become a given value
Args:
path (list): The path to wait to
good_value: If it is a callable then expect it to return
True if we are satisfied and raise on error. If it is not
callable then compare each value against this one and return
if it matches.
bad_values (list): values to raise an error on
Returns:
Future: a single Future that will resolve when the path matches
good_value or bad_values
"""
when = When(good_value, bad_values)
future = self.subscribe(path, when)
when.set_future_context(future, weakref.proxy(self))
return future
|
python
|
{
"resource": ""
}
|
q16911
|
Context.wait_all_futures
|
train
|
def wait_all_futures(self, futures, timeout=None, event_timeout=None):
# type: (Union[List[Future], Future, None], float, float) -> None
"""Services all futures until the list 'futures' are all done
then returns. Calls relevant subscription callbacks as they
come off the queue and raises an exception on abort
Args:
futures: a `Future` or list of all futures that the caller
wants to wait for
timeout: maximum total time in seconds to wait for responses, wait
forever if None
event_timeout: maximum time in seconds to wait between each response
event, wait forever if None
"""
if timeout is None:
end = None
else:
end = time.time() + timeout
if not isinstance(futures, list):
if futures:
futures = [futures]
else:
futures = []
filtered_futures = []
for f in futures:
if f.done():
if f.exception() is not None:
raise f.exception()
else:
filtered_futures.append(f)
while filtered_futures:
if event_timeout is not None:
until = time.time() + event_timeout
if end is not None:
until = min(until, end)
else:
until = end
self._service_futures(filtered_futures, until)
|
python
|
{
"resource": ""
}
|
q16912
|
Context.sleep
|
train
|
def sleep(self, seconds):
"""Services all futures while waiting
Args:
seconds (float): Time to wait
"""
until = time.time() + seconds
try:
while True:
self._service_futures([], until)
except TimeoutError:
return
|
python
|
{
"resource": ""
}
|
q16913
|
Spawned.get
|
train
|
def get(self, timeout=None):
# type: (float) -> T
"""Return the result or raise the error the function has produced"""
self.wait(timeout)
if isinstance(self._result, Exception):
raise self._result
return self._result
|
python
|
{
"resource": ""
}
|
q16914
|
RunnableController.update_configure_params
|
train
|
def update_configure_params(self, part=None, info=None):
# type: (Part, ConfigureParamsInfo) -> None
"""Tell controller part needs different things passed to Configure"""
with self.changes_squashed:
# Update the dict
if part:
self.part_configure_params[part] = info
# No process yet, so don't do this yet
if self.process is None:
return
# Get the model of our configure method as the starting point
configure_model = MethodModel.from_callable(self.configure)
# These will not be inserted as the already exist
ignored = tuple(ConfigureHook.call_types)
# Re-calculate the following
required = []
takes_elements = OrderedDict()
defaults = OrderedDict()
# First do the required arguments
for k in configure_model.takes.required:
required.append(k)
takes_elements[k] = configure_model.takes.elements[k]
for part in self.parts.values():
try:
info = self.part_configure_params[part]
except KeyError:
continue
for k in info.required:
if k not in required and k not in ignored:
required.append(k)
takes_elements[k] = info.metas[k]
# Now the default and optional
for k in configure_model.takes.elements:
if k not in required:
takes_elements[k] = configure_model.takes.elements[k]
for part in self.parts.values():
try:
info = self.part_configure_params[part]
except KeyError:
continue
for k in info.metas:
if k not in required and k not in ignored:
takes_elements[k] = info.metas[k]
if k in info.defaults:
defaults[k] = info.defaults[k]
# Set the values
configure_model.takes.set_elements(takes_elements)
configure_model.takes.set_required(required)
configure_model.set_defaults(defaults)
# Update methods from the new metas
self._block.configure.set_takes(configure_model.takes)
self._block.configure.set_defaults(configure_model.defaults)
# Now make a validate model with returns
validate_model = MethodModel.from_dict(configure_model.to_dict())
returns = MapMeta.from_dict(validate_model.takes.to_dict())
for v in returns.elements.values():
v.set_writeable(False)
self._block.validate.set_takes(validate_model.takes)
self._block.validate.set_defaults(validate_model.defaults)
self._block.validate.set_returns(returns)
|
python
|
{
"resource": ""
}
|
q16915
|
RunnableController.validate
|
train
|
def validate(self, generator, axesToMove=None, **kwargs):
# type: (AGenerator, AAxesToMove, **Any) -> AConfigureParams
"""Validate configuration parameters and return validated parameters.
Doesn't take device state into account so can be run in any state
"""
iterations = 10
# We will return this, so make sure we fill in defaults
for k, default in self._block.configure.defaults.items():
if k not in kwargs:
kwargs[k] = default
# The validated parameters we will eventually return
params = ConfigureParams(generator, axesToMove, **kwargs)
# Make some tasks just for validate
part_contexts = self.create_part_contexts()
# Get any status from all parts
status_part_info = self.run_hooks(
ReportStatusHook(p, c) for p, c in part_contexts.items())
while iterations > 0:
# Try up to 10 times to get a valid set of parameters
iterations -= 1
# Validate the params with all the parts
validate_part_info = self.run_hooks(
ValidateHook(p, c, status_part_info, **kwargs)
for p, c, kwargs in self._part_params(part_contexts, params))
tweaks = ParameterTweakInfo.filter_values(validate_part_info)
if tweaks:
for tweak in tweaks:
deserialized = self._block.configure.takes.elements[
tweak.parameter].validate(tweak.value)
setattr(params, tweak.parameter, deserialized)
self.log.debug(
"Tweaking %s to %s", tweak.parameter, deserialized)
else:
# Consistent set, just return the params
return params
raise ValueError("Could not get a consistent set of parameters")
|
python
|
{
"resource": ""
}
|
q16916
|
RunnableController.abort
|
train
|
def abort(self):
# type: () -> None
"""Abort the current operation and block until aborted
Normally it will return in Aborted state. If something goes wrong it
will return in Fault state. If the user disables then it will return in
Disabled state.
"""
# Tell _call_do_run not to resume
if self.resume_queue:
self.resume_queue.put(False)
self.try_aborting_function(ss.ABORTING, ss.ABORTED, self.do_abort)
|
python
|
{
"resource": ""
}
|
q16917
|
RunnableController.resume
|
train
|
def resume(self):
# type: () -> None
"""Resume a paused scan.
Normally it will return in Running state. If something goes wrong it
will return in Fault state.
"""
self.transition(ss.RUNNING)
self.resume_queue.put(True)
|
python
|
{
"resource": ""
}
|
q16918
|
ConfigureHook.create_info
|
train
|
def create_info(cls, configure_func):
# type: (Callable) -> ConfigureParamsInfo
"""Create a `ConfigureParamsInfo` describing the extra parameters
that should be passed at configure"""
call_types = getattr(configure_func, "call_types",
{}) # type: Dict[str, Anno]
metas = OrderedDict()
required = []
defaults = OrderedDict()
for k, anno in call_types.items():
if k not in cls.call_types:
scls = VMeta.lookup_annotype_converter(anno)
metas[k] = scls.from_annotype(anno, writeable=True)
if anno.default is NO_DEFAULT:
required.append(k)
elif anno.default is not None:
defaults[k] = anno.default
return ConfigureParamsInfo(metas, required, defaults)
|
python
|
{
"resource": ""
}
|
q16919
|
Hookable.register_hooked
|
train
|
def register_hooked(self,
hooks, # type: Union[Type[Hook], Sequence[Type[Hook]]]
func, # type: Hooked
args_gen=None # type: Optional[ArgsGen]
):
# type: (Type[Hook], Callable, Optional[Callable]) -> None
"""Register func to be run when any of the hooks are run by parent
Args:
hooks: A Hook class or list of Hook classes of interest
func: The callable that should be run on that Hook
args_gen: Optionally specify the argument names that should be
passed to func. If not given then use func.call_types.keys
"""
if self.hooked is None:
self.hooked = {}
if args_gen is None:
args_gen = getattr(func, "call_types", {}).keys
if not isinstance(hooks, Sequence):
hooks = [hooks]
for hook_cls in hooks:
self.hooked[hook_cls] = (func, args_gen)
|
python
|
{
"resource": ""
}
|
q16920
|
Hookable.on_hook
|
train
|
def on_hook(self, hook):
# type: (Hook) -> None
"""Takes a hook, and optionally calls hook.run on a function"""
try:
func, args_gen = self.hooked[type(hook)]
except (KeyError, TypeError):
return
else:
hook(func, args_gen())
|
python
|
{
"resource": ""
}
|
q16921
|
StateSet.transition_allowed
|
train
|
def transition_allowed(self, initial_state, target_state):
# type: (str, str) -> bool
"""Check if a transition between two states is allowed"""
assert initial_state in self._allowed, \
"%s is not in %s" % (initial_state, list(self._allowed))
return target_state in self._allowed[initial_state]
|
python
|
{
"resource": ""
}
|
q16922
|
StateSet.set_allowed
|
train
|
def set_allowed(self, initial_state, *allowed_states):
# type: (str, *str) -> None
"""Add an allowed transition from initial_state to allowed_states"""
allowed_states = list(allowed_states)
self._allowed.setdefault(initial_state, set()).update(allowed_states)
for state in allowed_states + [initial_state]:
if state not in self.possible_states:
self.possible_states.append(state)
|
python
|
{
"resource": ""
}
|
q16923
|
cmd_string
|
train
|
def cmd_string(name, cmd):
# type: (AName, ACmd) -> ADefine
"""Define a string parameter coming from a shell command to be used within
this YAML file. Trailing newlines will be stripped."""
value = subprocess.check_output(cmd, shell=True).rstrip("\n")
return Define(name, value)
|
python
|
{
"resource": ""
}
|
q16924
|
export_env_string
|
train
|
def export_env_string(name, value):
# type: (AEnvName, AEnvValue) -> ADefine
"""Exports an environment variable with the given value"""
os.environ[name] = value
return Define(name, value)
|
python
|
{
"resource": ""
}
|
q16925
|
WebsocketClientComms.on_message
|
train
|
def on_message(self, message):
"""Pass response from server to process receive queue
Args:
message(str): Received message
"""
# Called in tornado loop
try:
self.log.debug("Got message %s", message)
d = json_decode(message)
response = deserialize_object(d, Response)
if isinstance(response, (Return, Error)):
request = self._request_lookup.pop(response.id)
if isinstance(response, Error):
# Make the message an exception so it can be raised
response.message = ResponseError(response.message)
else:
request = self._request_lookup[response.id]
# Transfer the work of the callback to cothread
cothread.Callback(request.callback, response)
except Exception:
# If we don't catch the exception here, tornado will spew odd
# error messages about 'HTTPRequest' object has no attribute 'path'
self.log.exception("on_message(%r) failed", message)
|
python
|
{
"resource": ""
}
|
q16926
|
Info.filter_parts
|
train
|
def filter_parts(cls, part_info):
# type: (Type[T], PartInfo) -> Dict[str, List[T]]
"""Filter the part_info dict looking for instances of our class
Args:
part_info (dict): {part_name: [Info] or None} as returned from
Controller.run_hook()
Returns:
dict: {part_name: [info]} where info is a subclass of cls
"""
filtered = OrderedDict()
for part_name, info_list in part_info.items():
if info_list is None or isinstance(info_list, Exception):
continue
info_list = [i for i in info_list if isinstance(i, cls)]
if info_list:
filtered[part_name] = info_list
return filtered
|
python
|
{
"resource": ""
}
|
q16927
|
Info.filter_values
|
train
|
def filter_values(cls, part_info):
# type: (Type[T], PartInfo) -> List[T]
"""Filter the part_info dict list looking for instances of our class
Args:
part_info (dict): {part_name: [Info] or None} as returned from
Controller.run_hook()
Returns:
list: [info] where info is a subclass of cls
"""
filtered = []
for info_list in cls.filter_parts(part_info).values():
filtered += info_list
return filtered
|
python
|
{
"resource": ""
}
|
q16928
|
Info.filter_single_value
|
train
|
def filter_single_value(cls, part_info, error_msg=None):
# type: (Type[T], PartInfo, str) -> T
"""Filter the part_info dict list looking for a single instance of our
class
Args:
part_info (dict): {part_name: [Info] or None} as returned from
Controller.run_hook()
error_msg (str, optional): Specific error message to show if
there isn't a single value
Returns:
info subclass of cls
"""
filtered = cls.filter_values(part_info)
if len(filtered) != 1:
if error_msg is None:
error_msg = "Expected a single %s, got %s of them" % \
(cls.__name__, len(filtered))
raise BadValueError(error_msg)
return filtered[0]
|
python
|
{
"resource": ""
}
|
q16929
|
PvaServerComms.disconnect_pv_clients
|
train
|
def disconnect_pv_clients(self, mris):
# type: (List[str]) -> None
"""Disconnect anyone listening to any of the given mris"""
for mri in mris:
for pv in self._pvs.pop(mri, {}).values():
# Close pv with force destroy on, this will call
# onLastDisconnect
pv.close(destroy=True, sync=True, timeout=1.0)
|
python
|
{
"resource": ""
}
|
q16930
|
Port.with_source_port_tag
|
train
|
def with_source_port_tag(self, tags, connected_value):
"""Add a Source Port tag to the tags list, removing any other Source
Ports"""
new_tags = [t for t in tags if not t.startswith("sourcePort:")]
new_tags.append(self.source_port_tag(connected_value))
return new_tags
|
python
|
{
"resource": ""
}
|
q16931
|
Port.port_tag_details
|
train
|
def port_tag_details(cls, tags):
# type: (Sequence[str]) -> Union[Tuple[bool, Port, str], None]
"""Search tags for port info, returning it
Args:
tags: A list of tags to check
Returns:
None or (is_source, port, connected_value|disconnected_value)
where port is one of the Enum entries of Port
"""
for tag in tags:
match = port_tag_re.match(tag)
if match:
source_sink, port, extra = match.groups()
return source_sink == "source", cls(port), extra
|
python
|
{
"resource": ""
}
|
q16932
|
StatefulController.transition
|
train
|
def transition(self, state, message=""):
"""Change to a new state if the transition is allowed
Args:
state (str): State to transition to
message (str): Message if the transition is to a fault state
"""
with self.changes_squashed:
initial_state = self.state.value
if self.state_set.transition_allowed(
initial_state=initial_state, target_state=state):
self.log.debug(
"%s: Transitioning from %s to %s",
self.mri, initial_state, state)
if state == ss.DISABLED:
alarm = Alarm.invalid("Disabled")
elif state == ss.FAULT:
alarm = Alarm.major(message)
else:
alarm = Alarm()
self.update_health(self, HealthInfo(alarm))
self.state.set_value(state)
self.state.set_alarm(alarm)
for child, writeable in self._children_writeable[state].items():
if isinstance(child, AttributeModel):
child.meta.set_writeable(writeable)
elif isinstance(child, MethodModel):
child.set_writeable(writeable)
else:
raise TypeError("Cannot transition from %s to %s" %
(initial_state, state))
|
python
|
{
"resource": ""
}
|
q16933
|
wait_for_stateful_block_init
|
train
|
def wait_for_stateful_block_init(context, mri, timeout=DEFAULT_TIMEOUT):
"""Wait until a Block backed by a StatefulController has initialized
Args:
context (Context): The context to use to make the child block
mri (str): The mri of the child block
timeout (float): The maximum time to wait
"""
context.when_matches(
[mri, "state", "value"], StatefulStates.READY,
bad_values=[StatefulStates.FAULT, StatefulStates.DISABLED],
timeout=timeout)
|
python
|
{
"resource": ""
}
|
q16934
|
Future.exception
|
train
|
def exception(self, timeout=None):
"""Return the exception raised by the call that the future represents.
Args:
timeout: The number of seconds to wait for the exception if the
future isn't done. If None, then there is no limit on the wait
time.
Returns:
The exception raised by the call that the future represents or None
if the call completed without raising.
Raises:
TimeoutError: If the future didn't finish executing before the given
timeout.
"""
if self._state == self.RUNNING:
self._context.wait_all_futures([self], timeout)
return self._exception
|
python
|
{
"resource": ""
}
|
q16935
|
Future.set_result
|
train
|
def set_result(self, result):
"""Sets the return value of work associated with the future.
Should only be used by Task and unit tests.
"""
self._result = result
self._state = self.FINISHED
|
python
|
{
"resource": ""
}
|
q16936
|
Future.set_exception
|
train
|
def set_exception(self, exception):
"""Sets the result of the future as being the given exception.
Should only be used by Task and unit tests.
"""
assert isinstance(exception, Exception), \
"%r should be an Exception" % exception
self._exception = exception
self._state = self.FINISHED
|
python
|
{
"resource": ""
}
|
q16937
|
Request.return_response
|
train
|
def return_response(self, value=None):
# type: (Any) -> Tuple[Callback, Return]
"""Create a Return Response object to signal a return value"""
response = Return(id=self.id, value=value)
return self.callback, response
|
python
|
{
"resource": ""
}
|
q16938
|
Request.error_response
|
train
|
def error_response(self, exception):
# type: (Exception) -> Tuple[Callback, Error]
"""Create an Error Response object to signal an error"""
response = Error(id=self.id, message=exception)
log.exception("Exception raised for request %s", self)
return self.callback, response
|
python
|
{
"resource": ""
}
|
q16939
|
make_view
|
train
|
def make_view(controller, context, data):
# type: (Controller, Context, Any) -> Any
"""Make a View subclass containing properties specific for given data
Args:
controller (Controller): The child controller that hosts the data
context (Context): The context the parent has made that the View should
use for manipulating the data
data (Model): The actual data that context will be manipulating
Returns:
View: A View subclass instance that provides a user-focused API to
the given data
"""
if isinstance(data, BlockModel):
# Make an Block View
view = _make_view_subclass(Block, controller, context, data)
elif isinstance(data, AttributeModel):
# Make an Attribute View
view = Attribute(controller, context, data)
elif isinstance(data, MethodModel):
# Make a Method View
view = Method(controller, context, data)
elif isinstance(data, Model):
# Make a generic View
view = _make_view_subclass(View, controller, context, data)
elif isinstance(data, dict):
# Make a dict of Views
d = OrderedDict()
for k, v in data.items():
d[k] = make_view(controller, context, v)
view = d
elif isinstance(data, list):
# Need to recurse down
view = [make_view(controller, context, x) for x in data]
else:
# Just return the data unwrapped as it should be immutable
view = data
return view
|
python
|
{
"resource": ""
}
|
q16940
|
Attribute.put_value
|
train
|
def put_value(self, value, timeout=None):
"""Put a value to the Attribute and wait for completion"""
self._context.put(self._data.path + ["value"], value, timeout=timeout)
|
python
|
{
"resource": ""
}
|
q16941
|
ExposureDeadtimeInfo.calculate_exposure
|
train
|
def calculate_exposure(self, duration):
# type: (float) -> float
"""Calculate the exposure to set the detector to given the duration of
the frame and the readout_time and frequency_accuracy"""
exposure = duration - self.frequency_accuracy * duration / 1000000.0 - \
self.readout_time
assert exposure > 0.0, \
"Exposure time %s too small when deadtime taken into account" % (
exposure,)
return exposure
|
python
|
{
"resource": ""
}
|
q16942
|
Model.set_notifier_path
|
train
|
def set_notifier_path(self, notifier, path):
"""Sets the notifier, and the path from the path from block root
Args:
notifier (Notifier): The Notifier to tell when endpoint data changes
path (list): The absolute path to get to this object
"""
# type: (Union[Notifier, DummyNotifier], List[str]) -> None
# This function should either change from the DummyNotifier or to
# the DummyNotifier, never between two valid notifiers
assert self.notifier is Model.notifier or notifier is Model.notifier, \
"Already have a notifier %s path %s" % (self.notifier, self.path)
self.notifier = notifier
self.path = path
# Tell all our children too
for name, ct in self.call_types.items():
if ct.is_mapping:
child = getattr(self, name)
if child and issubclass(ct.typ[1], Model):
for k, v in child.items():
v.set_notifier_path(notifier, self.path + [name, k])
elif issubclass(ct.typ, Model):
assert not ct.is_array, \
"Can't deal with Arrays of Models %s" % ct
child = getattr(self, name)
child.set_notifier_path(notifier, self.path + [name])
|
python
|
{
"resource": ""
}
|
q16943
|
Model.apply_change
|
train
|
def apply_change(self, path, *args):
# type: (List[str], Any) -> None
"""Take a single change from a Delta and apply it to this model"""
if len(path) > 1:
# This is for a child
self[path[0]].apply_change(path[1:], *args)
else:
# This is for us
assert len(path) == 1 and len(args) == 1, \
"Cannot process change %s" % ([self.path + path] + list(args))
getattr(self, "set_%s" % path[0])(args[0])
|
python
|
{
"resource": ""
}
|
q16944
|
VMeta.create_attribute_model
|
train
|
def create_attribute_model(self, initial_value=None):
# type: (Any) -> AttributeModel
"""Make an AttributeModel instance of the correct type for this Meta
Args:
initial_value: The initial value the Attribute should take
Returns:
AttributeModel: The created attribute model instance
"""
attr = self.attribute_class(meta=self, value=initial_value)
return attr
|
python
|
{
"resource": ""
}
|
q16945
|
VMeta.from_annotype
|
train
|
def from_annotype(cls, anno, writeable, **kwargs):
# type: (Anno, bool, **Any) -> VMeta
"""Return an instance of this class from an Anno"""
ret = cls(description=anno.description, writeable=writeable, **kwargs)
widget = ret.default_widget()
if widget != Widget.NONE:
ret.set_tags([widget.tag()])
return ret
|
python
|
{
"resource": ""
}
|
q16946
|
VMeta.register_annotype_converter
|
train
|
def register_annotype_converter(cls, types, is_array=False,
is_mapping=False):
# type: (Union[Sequence[type], type], bool, bool) -> Any
"""Register this class as a converter for Anno instances"""
if not isinstance(types, Sequence):
types = [types]
def decorator(subclass):
for typ in types:
cls._annotype_lookup[(typ, is_array, is_mapping)] = subclass
return subclass
return decorator
|
python
|
{
"resource": ""
}
|
q16947
|
VMeta.lookup_annotype_converter
|
train
|
def lookup_annotype_converter(cls, anno):
# type: (Anno) -> Type[VMeta]
"""Look up a vmeta based on an Anno"""
if hasattr(anno.typ, "__bases__"):
# This is a proper type
bases = inspect.getmro(anno.typ)
else:
# This is a numpy dtype
bases = [anno.typ]
for typ in bases:
key = (typ, bool(anno.is_array), bool(anno.is_mapping))
try:
return cls._annotype_lookup[key]
except KeyError:
pass
raise KeyError(anno)
|
python
|
{
"resource": ""
}
|
q16948
|
AttributeModel.set_value
|
train
|
def set_value(self, value, set_alarm_ts=True, alarm=None, ts=None):
# type: (Any, bool, Alarm, TimeStamp) -> Any
"""Set value, calculating alarm and ts if requested"""
value = self.meta.validate(value)
if set_alarm_ts:
if alarm is None:
alarm = Alarm.ok
else:
alarm = deserialize_object(alarm, Alarm)
if ts is None:
ts = TimeStamp()
else:
ts = deserialize_object(ts, TimeStamp)
self.set_value_alarm_ts(value, alarm, ts)
else:
self.set_endpoint_data("value", value)
return self.value
|
python
|
{
"resource": ""
}
|
q16949
|
AttributeModel.set_value_alarm_ts
|
train
|
def set_value_alarm_ts(self, value, alarm, ts):
"""Set value with pre-validated alarm and timeStamp"""
# type: (Any, Alarm, TimeStamp) -> None
with self.notifier.changes_squashed:
# Assume they are of the right format
self.value = value
self.notifier.add_squashed_change(self.path + ["value"], value)
if alarm is not self.alarm:
self.alarm = alarm
self.notifier.add_squashed_change(self.path + ["alarm"], alarm)
self.timeStamp = ts
self.notifier.add_squashed_change(self.path + ["timeStamp"], ts)
|
python
|
{
"resource": ""
}
|
q16950
|
PandABlocksClient.send_recv
|
train
|
def send_recv(self, message, timeout=10.0):
"""Send a message to a PandABox and wait for the response
Args:
message (str): The message to send
timeout (float): How long to wait before raising queue.Empty
Returns:
str: The response
"""
response_queue = self.send(message)
response = self.recv(response_queue, timeout)
return response
|
python
|
{
"resource": ""
}
|
q16951
|
PandABlocksClient._send_loop
|
train
|
def _send_loop(self):
"""Service self._send_queue, sending requests to server"""
while True:
message, response_queue = self._send_queue.get()
if message is self.STOP:
break
try:
self._response_queues.put(response_queue)
self._socket.send(message)
except Exception: # pylint:disable=broad-except
log.exception("Exception sending message %s", message)
|
python
|
{
"resource": ""
}
|
q16952
|
PandABlocksClient._respond
|
train
|
def _respond(self, resp):
"""Respond to the person waiting"""
response_queue = self._response_queues.get(timeout=0.1)
response_queue.put(resp)
self._completed_response_lines = []
self._is_multiline = None
|
python
|
{
"resource": ""
}
|
q16953
|
PandABlocksClient._recv_loop
|
train
|
def _recv_loop(self):
"""Service socket recv, returning responses to the correct queue"""
self._completed_response_lines = []
self._is_multiline = None
lines_iterator = self._get_lines()
while True:
try:
line = next(lines_iterator)
if self._is_multiline is None:
self._is_multiline = line.startswith("!") or line == "."
if line.startswith("ERR"):
self._respond(ValueError(line))
elif self._is_multiline:
if line == ".":
self._respond(self._completed_response_lines)
else:
assert line[0] == "!", \
"Multiline response {} doesn't start with !" \
.format(repr(line))
self._completed_response_lines.append(line[1:])
else:
self._respond(line)
except StopIteration:
return
except Exception:
log.exception("Exception receiving message")
raise
|
python
|
{
"resource": ""
}
|
q16954
|
PandABlocksClient.parameterized_send
|
train
|
def parameterized_send(self, request, parameter_list):
"""Send batched requests for a list of parameters
Args:
request (str): Request to send, like "%s.*?\n"
parameter_list (list): parameters to format with, like
["TTLIN", "TTLOUT"]
Returns:
dict: {parameter: response_queue}
"""
response_queues = OrderedDict()
for parameter in parameter_list:
response_queues[parameter] = self.send(request % parameter)
return response_queues
|
python
|
{
"resource": ""
}
|
q16955
|
ChildPart.notify_dispatch_request
|
train
|
def notify_dispatch_request(self, request):
# type: (Request) -> None
"""Will be called when a context passed to a hooked function is about
to dispatch a request"""
if isinstance(request, Put) and request.path[0] == self.mri:
# This means the context we were passed has just made a Put request
# so mark the field as "we_modified" so it doesn't screw up the
# modified led
attribute_name = request.path[-2]
if attribute_name not in self.no_save:
self.log.warning(
"Part %s tried to set '%s' that is not in self.no_save. "
"This will stop the 'modified' attribute from working.",
self, attribute_name)
|
python
|
{
"resource": ""
}
|
q16956
|
ChildPart.sever_sink_ports
|
train
|
def sever_sink_ports(self, context, ports, connected_to=None):
# type: (AContext, APortMap, str) -> None
"""Conditionally sever Sink Ports of the child. If connected_to
is then None then sever all, otherwise restrict to connected_to's
Source Ports
Args:
context (Context): The context to use
ports (dict): {part_name: [PortInfo]}
connected_to (str): Restrict severing to this part
"""
# Find the Source Ports to connect to
if connected_to:
# Calculate a lookup of the Source Port "name" to type
source_port_lookup = self._source_port_lookup(
ports.get(connected_to, []))
else:
source_port_lookup = True
# Find our Sink Ports
sink_ports = self._get_flowgraph_ports(ports, SinkPortInfo)
# If we have Sunk Ports that need to be disconnected then do so
if sink_ports and source_port_lookup:
child = context.block_view(self.mri)
attribute_values = {}
for name, port_info in sink_ports.items():
if source_port_lookup is True or source_port_lookup.get(
child[name].value, None) == port_info.port:
attribute_values[name] = port_info.disconnected_value
child.put_attribute_values(attribute_values)
|
python
|
{
"resource": ""
}
|
q16957
|
ChildPart.calculate_part_visibility
|
train
|
def calculate_part_visibility(self, ports):
# type: (APortMap) -> None
"""Calculate what is connected to what
Args:
ports: {part_name: [PortInfo]} from other ports
"""
# Calculate a lookup of Source Port connected_value to part_name
source_port_lookup = {}
for part_name, port_infos in SourcePortInfo.filter_parts(ports).items():
for port_info in port_infos:
source_port_lookup[port_info.connected_value] = (
part_name, port_info.port)
# Look through all the Sink Ports, and set both ends of the
# connection to visible if they aren't specified
for part_name, port_infos in SinkPortInfo.filter_parts(
ports).items():
for port_info in port_infos:
if port_info.value != port_info.disconnected_value:
conn_part, port = source_port_lookup.get(
port_info.value, (None, None))
if conn_part and port == port_info.port:
if conn_part not in self.part_visibility:
self.part_visibility[conn_part] = True
if part_name not in self.part_visibility:
self.part_visibility[part_name] = True
|
python
|
{
"resource": ""
}
|
q16958
|
Notifier.handle_subscribe
|
train
|
def handle_subscribe(self, request):
# type: (Subscribe) -> CallbackResponses
"""Handle a Subscribe request from outside. Called with lock taken"""
ret = self._tree.handle_subscribe(request, request.path[1:])
self._subscription_keys[request.generate_key()] = request
return ret
|
python
|
{
"resource": ""
}
|
q16959
|
Notifier.handle_unsubscribe
|
train
|
def handle_unsubscribe(self, request):
# type: (Unsubscribe) -> CallbackResponses
"""Handle a Unsubscribe request from outside. Called with lock taken"""
subscribe = self._subscription_keys.pop(request.generate_key())
ret = self._tree.handle_unsubscribe(subscribe, subscribe.path[1:])
return ret
|
python
|
{
"resource": ""
}
|
q16960
|
Notifier.add_squashed_change
|
train
|
def add_squashed_change(self, path, data):
# type: (List[str], Any) -> None
"""Register a squashed change to a particular path
Args:
path (list): The path of what has changed, relative from Block
data (object): The new data
"""
assert self._squashed_count, "Called while not squashing changes"
self._squashed_changes.append([path[1:], data])
|
python
|
{
"resource": ""
}
|
q16961
|
NotifierNode.notify_changes
|
train
|
def notify_changes(self, changes):
# type: (List[List]) -> CallbackResponses
"""Set our data and notify anyone listening
Args:
changes (list): [[path, optional data]] where path is the path to
what has changed, and data is the unserialized object that has
changed
Returns:
list: [(callback, Response)] that need to be called
"""
ret = []
child_changes = {}
for change in changes:
# Add any changes that our children need to know about
self._add_child_change(change, child_changes)
# If we have update subscribers, serialize at this level
if self.update_requests:
serialized = serialize_object(self.data)
for request in self.update_requests:
ret.append(request.update_response(serialized))
# If we have delta subscribers, serialize the changes
if self.delta_requests:
for change in changes:
change[-1] = serialize_object(change[-1])
for request in self.delta_requests:
ret.append(request.delta_response(changes))
# Now notify our children
for name, child_changes in child_changes.items():
ret += self.children[name].notify_changes(child_changes)
return ret
|
python
|
{
"resource": ""
}
|
q16962
|
NotifierNode._update_data
|
train
|
def _update_data(self, data):
# type: (Any) -> Dict[str, List]
"""Set our data and notify any subscribers of children what has changed
Args:
data (object): The new data
Returns:
dict: {child_name: [path_list, optional child_data]} of the change
that needs to be passed to a child as a result of this
"""
self.data = data
child_change_dict = {}
# Reflect change of data to children
for name in self.children:
child_data = getattr(data, name, None)
if child_data is None:
# Deletion
child_change_dict[name] = [[]]
else:
# Change
child_change_dict[name] = [[], child_data]
return child_change_dict
|
python
|
{
"resource": ""
}
|
q16963
|
NotifierNode.handle_subscribe
|
train
|
def handle_subscribe(self, request, path):
# type: (Subscribe, List[str]) -> CallbackResponses
"""Add to the list of request to notify, and notify the initial value of
the data held
Args:
request (Subscribe): The subscribe request
path (list): The relative path from ourself
Returns:
list: [(callback, Response)] that need to be called
"""
ret = []
if path:
# Recurse down
name = path[0]
if name not in self.children:
self.children[name] = NotifierNode(
getattr(self.data, name, None), self)
ret += self.children[name].handle_subscribe(request, path[1:])
else:
# This is for us
serialized = serialize_object(self.data)
if request.delta:
self.delta_requests.append(request)
ret.append(request.delta_response([[[], serialized]]))
else:
self.update_requests.append(request)
ret.append(request.update_response(serialized))
return ret
|
python
|
{
"resource": ""
}
|
q16964
|
NotifierNode.handle_unsubscribe
|
train
|
def handle_unsubscribe(self, request, path):
# type: (Subscribe, List[str]) -> CallbackResponses
"""Remove from the notifier list and send a return
Args:
request (Subscribe): The original subscribe request
path (list): The relative path from ourself
Returns:
list: [(callback, Response)] that need to be called
"""
ret = []
if path:
# Recurse down
name = path[0]
child = self.children[name]
ret += child.handle_unsubscribe(request, path[1:])
if not child.children and not child.update_requests \
and not child.delta_requests:
del self.children[name]
else:
# This is for us
if request in self.update_requests:
self.update_requests.remove(request)
else:
self.delta_requests.remove(request)
ret.append(request.return_response())
return ret
|
python
|
{
"resource": ""
}
|
q16965
|
string
|
train
|
def string(name, description, default=None):
# type: (AName, ADescription, AStringDefault) -> AAnno
"""Add a string parameter to be passed when instantiating this YAML file"""
args = common_args(name, default)
return Anno(description, typ=str, **args)
|
python
|
{
"resource": ""
}
|
q16966
|
float64
|
train
|
def float64(name, description, default=None):
# type: (AName, ADescription, AFloat64Default) -> AAnno
"""Add a float64 parameter to be passed when instantiating this YAML file"""
args = common_args(name, default)
return Anno(description, typ=float, **args)
|
python
|
{
"resource": ""
}
|
q16967
|
int32
|
train
|
def int32(name, description, default=None):
# type: (AName, ADescription, AInt32Default) -> AAnno
"""Add an int32 parameter to be passed when instantiating this YAML file"""
args = common_args(name, default)
return Anno(description, typ=int, **args)
|
python
|
{
"resource": ""
}
|
q16968
|
make_block_creator
|
train
|
def make_block_creator(yaml_path, filename=None):
# type: (str, str) -> Callable[..., List[Controller]]
"""Make a collection function that will create a list of blocks
Args:
yaml_path (str): File path to YAML file, or a file in the same dir
filename (str): If give, use this filename as the last element in
the yaml_path (so yaml_path can be __file__)
Returns:
function: A collection function decorated with @takes. This can be
used in other blocks or instantiated by the process. If the
YAML text specified controllers or parts then a block instance
with the given name will be instantiated. If there are any
blocks listed then they will be called. All created blocks
by this or any sub collection will be returned
"""
sections, yamlname, docstring = Section.from_yaml(yaml_path, filename)
yamldir = os.path.dirname(yaml_path)
# Check we have only one controller
controller_sections = [s for s in sections if s.section == "controllers"]
assert len(controller_sections) == 1, \
"Expected exactly 1 controller, got %s" % (controller_sections,)
controller_section = controller_sections[0]
def block_creator(kwargs):
# Create the param dict of the static defined arguments
defines = _create_defines(sections, yamlname, yamldir, kwargs)
controllers, parts = _create_blocks_and_parts(sections, defines)
# Make the controller
controller = controller_section.instantiate(defines)
for part in parts:
controller.add_part(part)
controllers.append(controller)
return controllers
creator = creator_with_nice_signature(
block_creator, sections, yamlname, yaml_path, docstring)
return creator
|
python
|
{
"resource": ""
}
|
q16969
|
Section.instantiate
|
train
|
def instantiate(self, substitutions):
"""Keep recursing down from base using dotted name, then call it with
self.params and args
Args:
substitutions (dict): Substitutions to make to self.param_dict
Returns:
The found object called with (*args, map_from_d)
E.g. if ob is malcolm.parts, and name is "ca.CADoublePart", then the
object will be malcolm.parts.ca.CADoublePart
"""
param_dict = self.substitute_params(substitutions)
pkg, ident = self.name.rsplit(".", 1)
pkg = "malcolm.modules.%s" % pkg
try:
ob = importlib.import_module(pkg)
except ImportError as e:
raise_with_traceback(
ImportError("\n%s:%d:\n%s" % (
self.filename, self.lineno, e)))
try:
ob = getattr(ob, ident)
except AttributeError:
raise_with_traceback(
ImportError("\n%s:%d:\nPackage %r has no ident %r" % (
self.filename, self.lineno, pkg, ident)))
try:
model = MethodModel.from_callable(ob, returns=False)
args = model.validate(param_dict)
ret = ob(**args)
except Exception as e:
sourcefile = inspect.getsourcefile(ob)
lineno = inspect.getsourcelines(ob)[1]
raise_with_traceback(
YamlError("\n%s:%d:\n%s:%d:\n%s" % (
self.filename, self.lineno, sourcefile, lineno, e)))
else:
return ret
|
python
|
{
"resource": ""
}
|
q16970
|
Section.from_yaml
|
train
|
def from_yaml(cls, yaml_path, filename=None):
"""Split a dictionary into parameters controllers parts blocks defines
Args:
yaml_path (str): File path to YAML file, or a file in the same dir
filename (str): If give, use this filename as the last element in
the yaml_path (so yaml_path can be __file__)
Returns:
tuple: (sections, yamlname, docstring) where sections is a
list of created sections
"""
if filename:
# different filename to support passing __file__
yaml_path = os.path.join(os.path.dirname(yaml_path), filename)
assert yaml_path.endswith(".yaml"), \
"Expected a/path/to/<yamlname>.yaml, got %r" % yaml_path
yamlname = os.path.basename(yaml_path)[:-5]
log.debug("Parsing %s", yaml_path)
with open(yaml_path) as f:
text = f.read()
# First separate them into their relevant sections
ds = yaml.load(text, Loader=yaml.RoundTripLoader)
docstring = None
sections = []
for d in ds:
assert len(d) == 1, \
"Expected section length 1, got %d" % len(d)
lineno = d._yaml_line_col.line + 1
name = list(d)[0]
sections.append(cls(
yaml_path, lineno, name, d[name]))
if name == "builtin.defines.docstring":
docstring = d[name]["value"]
return sections, yamlname, docstring
|
python
|
{
"resource": ""
}
|
q16971
|
Section.substitute_params
|
train
|
def substitute_params(self, substitutions):
"""Substitute param values in our param_dict from params
Args:
substitutions (Map or dict): Values to substitute. E.g. Map of
{"name": "me"}
E.g. if self.param_dict is:
{"name": "$(name):pos", "exposure": 1.0}
And substitutions is:
{"name": "me"}
After the call self.param_dict will be:
{"name": "me:pos", "exposure": 1.0}
"""
param_dict = {}
# TODO: this should be yaml.add_implicit_resolver()
for k, v in self.param_dict.items():
param_dict[k] = replace_substitutions(v, substitutions)
return param_dict
|
python
|
{
"resource": ""
}
|
q16972
|
MotorInfo.make_velocity_profile
|
train
|
def make_velocity_profile(self, v1, v2, distance, min_time):
"""Calculate PVT points that will perform the move within motor params
Args:
v1 (float): Starting velocity in EGUs/s
v2 (float): Ending velocity in EGUs/s
distance (float): Relative distance to travel in EGUs
min_time (float): The minimum time the move should take
Returns:
tuple: (time_list, position_list) where time_list is a list of
relative time points in seconds, and position_list is the
position in EGUs that the motor should be
"""
# Take off the settle time and distance
if min_time > 0:
min_time -= self.velocity_settle
distance -= self.velocity_settle * v2
# The ramp time and distance of a continuous ramp from v1 to v2
ramp_time = self.acceleration_time(v1, v2)
ramp_distance = self.ramp_distance(v1, v2, ramp_time)
remaining_distance = distance - ramp_distance
# Check if we need to stretch in time
if min_time > ramp_time:
# Check how fast we would need to be going so that the total move
# completes in min_time
pad_velocity = remaining_distance / (min_time - ramp_time)
if pad_velocity > max(v1, v2):
# Can't just pad the ramp, make a hat pointing up
it = self._make_hat(
v1, v2, self.acceleration, distance, min_time)
elif pad_velocity < min(v1, v2):
# Can't just pad the ramp, make a hat pointing down
it = self._make_hat(
v1, v2, -self.acceleration, distance, min_time)
else:
# Make a padded ramp
it = self._make_padded_ramp(v1, v2, pad_velocity, min_time)
elif remaining_distance < 0:
# Make a hat pointing down
it = self._make_hat(v1, v2, -self.acceleration, distance, min_time)
else:
# Make a hat pointing up
it = self._make_hat(v1, v2, self.acceleration, distance, min_time)
# Create the time and velocity arrays
time_array = [0.0]
velocity_array = [v1]
for t, v in it:
assert t >= 0, "Got negative t %s" % t
if t == 0:
assert v == velocity_array[-1], \
"Can't move velocity in zero time"
continue
if v * velocity_array[-1] < 0:
# Crossed zero, put in an explicit zero velocity
fraction = velocity_array[-1] / (velocity_array[-1] - v)
time_array.append(time_array[-1] + fraction * t)
velocity_array.append(0)
t -= fraction * t
time_array.append(time_array[-1] + t)
velocity_array.append(v)
# Add on the settle time
if self.velocity_settle > 0:
time_array.append(time_array[-1] + self.velocity_settle)
velocity_array.append(v2)
return time_array, velocity_array
|
python
|
{
"resource": ""
}
|
q16973
|
MotorInfo.cs_axis_mapping
|
train
|
def cs_axis_mapping(cls,
part_info, # type: Dict[str, Optional[Sequence]]
axes_to_move # type: Sequence[str]
):
# type: (...) -> Tuple[str, Dict[str, MotorInfo]]
"""Given the motor infos for the parts, filter those with scannable
names in axes_to_move, check they are all in the same CS, and return
the cs_port and mapping of cs_axis to MotorInfo"""
cs_ports = set() # type: Set[str]
axis_mapping = {} # type: Dict[str, MotorInfo]
for motor_info in cls.filter_values(part_info):
if motor_info.scannable in axes_to_move:
assert motor_info.cs_axis in cs_axis_names, \
"Can only scan 1-1 mappings, %r is %r" % \
(motor_info.scannable, motor_info.cs_axis)
cs_ports.add(motor_info.cs_port)
axis_mapping[motor_info.scannable] = motor_info
missing = list(set(axes_to_move) - set(axis_mapping))
assert not missing, \
"Some scannables %s are not in the CS mapping %s" % (
missing, axis_mapping)
assert len(cs_ports) == 1, \
"Requested axes %s are in multiple CS numbers %s" % (
axes_to_move, list(cs_ports))
cs_axis_counts = Counter([x.cs_axis for x in axis_mapping.values()])
# Any cs_axis defs that are used for more that one raw motor
overlap = [k for k, v in cs_axis_counts.items() if v > 1]
assert not overlap, \
"CS axis defs %s have more that one raw motor attached" % overlap
return cs_ports.pop(), axis_mapping
|
python
|
{
"resource": ""
}
|
q16974
|
ManagerController.set_layout
|
train
|
def set_layout(self, value):
"""Set the layout table value. Called on attribute put"""
# Can't do this with changes_squashed as it will call update_modified
# from another thread and deadlock. Need RLock.is_owned() from update_*
part_info = self.run_hooks(
LayoutHook(p, c, self.port_info, value)
for p, c in self.create_part_contexts(only_visible=False).items())
with self.changes_squashed:
layout_parts = LayoutInfo.filter_parts(part_info)
name, mri, x, y, visible = [], [], [], [], []
for part_name, layout_infos in layout_parts.items():
for layout_info in layout_infos:
name.append(part_name)
mri.append(layout_info.mri)
x.append(layout_info.x)
y.append(layout_info.y)
visible.append(layout_info.visible)
layout_table = LayoutTable(name, mri, x, y, visible)
try:
# Compare the Array seq to get at the numpy array
np.testing.assert_equal(
layout_table.visible.seq, self.layout.value.visible.seq)
except AssertionError:
visibility_changed = True
else:
visibility_changed = False
self.layout.set_value(layout_table)
if self.saved_visibility is None:
# First write of table, set layout and exports saves
self.saved_visibility = layout_table.visible
self.saved_exports = self.exports.value.to_dict()
# Force visibility changed so we update_block_endpoints
# even if there weren't any visible
visibility_changed = True
if visibility_changed:
self.update_modified()
self.update_exportable()
# Part visibility changed, might have attributes or methods
# that we need to hide or show
self.update_block_endpoints()
|
python
|
{
"resource": ""
}
|
q16975
|
ManagerController.save
|
train
|
def save(self, designName=""):
# type: (ASaveDesign) -> None
"""Save the current design to file"""
self.try_stateful_function(
ss.SAVING, ss.READY, self.do_save, designName)
|
python
|
{
"resource": ""
}
|
q16976
|
ManagerController._validated_config_filename
|
train
|
def _validated_config_filename(self, name):
"""Make config dir and return full file path and extension
Args:
name (str): Filename without dir or extension
Returns:
str: Full path including extension
"""
dir_name = self._make_config_dir()
filename = os.path.join(dir_name, name.split(".json")[0] + ".json")
return filename
|
python
|
{
"resource": ""
}
|
q16977
|
ManagerController.do_load
|
train
|
def do_load(self, design, init=False):
# type: (str, bool) -> None
"""Load a design name, running the child LoadHooks.
Args:
design: Name of the design json file, without extension
init: Passed to the LoadHook to tell the children if this is being
run at Init or not
"""
if design:
filename = self._validated_config_filename(design)
with open(filename, "r") as f:
text = f.read()
structure = json_decode(text)
else:
structure = {}
# Attributes and Children used to be merged, support this
attributes = structure.get("attributes", structure)
children = structure.get("children", structure)
# Set the layout table
name, mri, x, y, visible = [], [], [], [], []
for part_name, d in attributes.get("layout", {}).items():
name.append(part_name)
mri.append("")
x.append(d["x"])
y.append(d["y"])
visible.append(d["visible"])
self.set_layout(LayoutTable(name, mri, x, y, visible))
# Set the exports table
source, export = [], []
for source_name, export_name in attributes.get("exports", {}).items():
source.append(source_name)
export.append(export_name)
self.exports.set_value(ExportTable(source, export))
# Set other attributes
our_values = {k: v for k, v in attributes.items()
if k in self.our_config_attributes}
block = self.block_view()
block.put_attribute_values(our_values)
# Run the load hook to get parts to load their own structure
self.run_hooks(
LoadHook(p, c, children.get(p.name, {}), init)
for p, c in self.create_part_contexts(only_visible=False).items())
self._mark_clean(design, init)
|
python
|
{
"resource": ""
}
|
q16978
|
FieldRegistry.add_method_model
|
train
|
def add_method_model(self,
func, # type: Callable
name=None, # type: Optional[str]
description=None, # type: Optional[str]
owner=None, # type: object
):
# type: (...) -> MethodModel
"""Register a function to be added to the block"""
if name is None:
name = func.__name__
method = MethodModel.from_callable(func, description)
self._add_field(owner, name, method, func)
return method
|
python
|
{
"resource": ""
}
|
q16979
|
PartRegistrar.add_method_model
|
train
|
def add_method_model(self,
func, # type: Callable
name=None, # type: Optional[str]
description=None, # type: Optional[str]
):
# type: (...) -> MethodModel
"""Register a function to be added to the Block as a MethodModel"""
return self._field_registry.add_method_model(
func, name, description, self._part)
|
python
|
{
"resource": ""
}
|
q16980
|
PartRegistrar.add_attribute_model
|
train
|
def add_attribute_model(self,
name, # type: str
attr, # type: AttributeModel
writeable_func=None, # type: Optional[Callable]
):
# type: (...) -> AttributeModel
"""Register a pre-existing AttributeModel to be added to the Block"""
return self._field_registry.add_attribute_model(
name, attr, writeable_func, self._part)
|
python
|
{
"resource": ""
}
|
q16981
|
MOProblem.objective_bounds
|
train
|
def objective_bounds(self):
"""
Return objective bounds
Returns
-------
lower : list of floats
Lower boundaries for the objectives
Upper : list of floats
Upper boundaries for the objectives
"""
if self.ideal and self.nadir:
return self.ideal, self.nadir
raise NotImplementedError(
"Ideal and nadir value calculation is not yet implemented"
)
|
python
|
{
"resource": ""
}
|
q16982
|
_centroids
|
train
|
def _centroids(n_clusters: int, points: List[List[float]]) -> List[List[float]]:
""" Return n_clusters centroids of points
"""
k_means = KMeans(n_clusters=n_clusters)
k_means.fit(points)
closest, _ = pairwise_distances_argmin_min(k_means.cluster_centers_, points)
return list(map(list, np.array(points)[closest.tolist()]))
|
python
|
{
"resource": ""
}
|
q16983
|
new_points
|
train
|
def new_points(
factory: IterationPointFactory, solution, weights: List[List[float]] = None
) -> List[Tuple[np.ndarray, List[float]]]:
"""Generate approximate set of points
Generate set of Pareto optimal solutions projecting from the Pareto optimal solution
using weights to determine the direction.
Parameters
----------
factory:
IterationPointFactory with suitable optimization problem
solution:
Current solution from which new solutions are projected
weights:
Direction of the projection, if not given generate with
:func:random_weights
"""
from desdeo.preference.direct import DirectSpecification
points = []
nof = factory.optimization_method.optimization_problem.problem.nof_objectives()
if not weights:
weights = random_weights(nof, 50 * nof)
for pref in map(
lambda w: DirectSpecification(factory.optimization_method, np.array(w)), weights
):
points.append(factory.result(pref, solution))
return points
|
python
|
{
"resource": ""
}
|
q16984
|
as_minimized
|
train
|
def as_minimized(values: List[float], maximized: List[bool]) -> List[float]:
""" Return vector values as minimized
"""
return [v * -1. if m else v for v, m in zip(values, maximized)]
|
python
|
{
"resource": ""
}
|
q16985
|
_prompt_wrapper
|
train
|
def _prompt_wrapper(message, default=None, validator=None):
""" Handle references piped from file
"""
class MockDocument:
def __init__(self, text):
self.text = text
if HAS_INPUT:
ret = prompt(message, default=default, validator=validator)
else:
ret = sys.stdin.readline().strip()
print(message, ret)
if validator:
validator.validate(MockDocument(ret))
if "q" in ret:
if not HAS_OUTPUT:
print("User exit")
sys.exit("User exit")
return ret
|
python
|
{
"resource": ""
}
|
q16986
|
init_nautilus
|
train
|
def init_nautilus(method):
"""Initialize nautilus method
Parameters
----------
method
Interactive method used for the process
Returns
-------
PreferenceInformation subclass to be initialized
"""
print("Preference elicitation options:")
print("\t1 - Percentages")
print("\t2 - Relative ranks")
print("\t3 - Direct")
PREFCLASSES = [PercentageSpecifictation, RelativeRanking, DirectSpecification]
pref_sel = int(
_prompt_wrapper(
"Reference elicitation ",
default=u"%s" % (1),
validator=NumberValidator([1, 3]),
)
)
preference_class = PREFCLASSES[pref_sel - 1]
print("Nadir: %s" % method.problem.nadir)
print("Ideal: %s" % method.problem.ideal)
if method.current_iter - method.user_iters:
finished_iter = method.user_iters - method.current_iter
else:
finished_iter = 0
new_iters = int(
_prompt_wrapper(
u"Ni: ", default=u"%s" % (method.current_iter), validator=NumberValidator()
)
)
method.current_iter = new_iters
method.user_iters = finished_iter + new_iters
return preference_class
|
python
|
{
"resource": ""
}
|
q16987
|
iter_nautilus
|
train
|
def iter_nautilus(method):
""" Iterate NAUTILUS method either interactively, or using given preferences if given
Parameters
----------
method : instance of NAUTILUS subclass
Fully initialized NAUTILUS method instance
"""
solution = None
while method.current_iter:
preference_class = init_nautilus(method)
pref = preference_class(method, None)
default = ",".join(map(str, pref.default_input()))
while method.current_iter:
method.print_current_iteration()
pref_input = _prompt_wrapper(
u"Preferences: ",
default=default,
validator=VectorValidator(method, pref),
)
cmd = _check_cmd(pref_input)
if cmd:
solution = method.zh
break
pref = preference_class(
method, np.fromstring(pref_input, dtype=np.float, sep=",")
)
default = ",".join(map(str, pref.pref_input))
solution, _ = method.next_iteration(pref)
if cmd and list(cmd)[0] == "c":
break
return solution
|
python
|
{
"resource": ""
}
|
q16988
|
isin
|
train
|
def isin(value, values):
""" Check that value is in values """
for i, v in enumerate(value):
if v not in np.array(values)[:, i]:
return False
return True
|
python
|
{
"resource": ""
}
|
q16989
|
NIMBUS.between
|
train
|
def between(self, objs1: List[float], objs2: List[float], n=1):
"""
Generate `n` solutions which attempt to trade-off `objs1` and `objs2`.
Parameters
----------
objs1
First boundary point for desired objective function values
objs2
Second boundary point for desired objective function values
n
Number of solutions to generate
"""
from desdeo.preference.base import ReferencePoint
objs1_arr = np.array(objs1)
objs2_arr = np.array(objs2)
segments = n + 1
diff = objs2_arr - objs1_arr
solutions = []
for x in range(1, segments):
btwn_obj = objs1_arr + float(x) / segments * diff
solutions.append(
self._get_ach().result(ReferencePoint(self, btwn_obj), None)
)
return ResultSet(solutions)
|
python
|
{
"resource": ""
}
|
q16990
|
find_focusable
|
train
|
def find_focusable(node):
"""
Search for the first focusable window within the node tree
"""
if not node.children:
return node
if node.focus:
return find_focusable(node.children_dict[node.focus[0]])
|
python
|
{
"resource": ""
}
|
q16991
|
find_parent_split
|
train
|
def find_parent_split(node, orientation):
"""
Find the first parent split relative to the given node
according to the desired orientation
"""
if (node and node.orientation == orientation
and len(node.children) > 1):
return node
if not node or node.type == "workspace":
return None
return find_parent_split(node.parent, orientation)
|
python
|
{
"resource": ""
}
|
q16992
|
cycle_windows
|
train
|
def cycle_windows(tree, direction):
"""
Cycle through windows of the current workspace
"""
wanted = {
"orientation": ("vertical" if direction in ("up", "down")
else "horizontal"),
"direction": (1 if direction in ("down", "right")
else -1),
}
split = find_parent_split(tree.focused.parent, wanted["orientation"])
if split:
# Get the next child given the direction
child_ids = [child.id for child in split.children]
focus_idx = child_ids.index(split.focused_child.id)
next_idx = (focus_idx + wanted['direction']) % len(child_ids)
next_node = split.children[next_idx]
return find_focusable(next_node)
return None
|
python
|
{
"resource": ""
}
|
q16993
|
cycle_outputs
|
train
|
def cycle_outputs(tree, direction):
"""
Cycle through directions
"""
direction = 1 if direction == "next" else -1
outputs = [output for output in tree.root.children
if output.name != "__i3"]
focus_idx = outputs.index(tree.root.focused_child)
next_idx = (focus_idx + direction) % len(outputs)
next_output = outputs[next_idx]
return find_focusable(next_output)
|
python
|
{
"resource": ""
}
|
q16994
|
NIMBUSClassification.with_class
|
train
|
def with_class(self, cls):
""" Return functions with the class
"""
rcls = []
for key, value in self._classification.items():
if value[0] == cls:
rcls.append(key)
return rcls
|
python
|
{
"resource": ""
}
|
q16995
|
NIMBUSClassification._as_reference_point
|
train
|
def _as_reference_point(self) -> np.ndarray:
""" Return classification information as reference point
"""
ref_val = []
for fn, f in self._classification.items():
if f[0] == "<":
ref_val.append(self._method.problem.ideal[fn])
elif f[0] == "<>":
ref_val.append(self._method.problem.nadir[fn])
else:
ref_val.append(f[1])
return np.array(ref_val)
|
python
|
{
"resource": ""
}
|
q16996
|
main
|
train
|
def main(logfile=False):
""" Solve River Pollution problem with NAUTILUS V1 and E-NAUTILUS Methods
"""
# Duplicate output to log file
class NAUTILUSOptionValidator(Validator):
def validate(self, document):
if document.text not in "ao":
raise ValidationError(
message="Please select a for apriori or o for optimization option",
cursor_position=0,
)
if logfile:
Tee(logfile)
first = True
current_iter = 0
while first or current_iter:
# SciPy breaks box constraints
nautilus_v1 = NAUTILUSv1(RiverPollution(), SciPyDE)
if not first:
nautilus_v1.current_iter = current_iter
first = False
nadir = nautilus_v1.problem.nadir
ideal = nautilus_v1.problem.ideal
solution = tui.iter_nautilus(nautilus_v1)
current_iter = nautilus_v1.current_iter
# TODO: Move to tui module
method_e = None
if current_iter > 0:
option = _prompt_wrapper(
"select a for apriori or o for optimization option: ",
default="o",
validator=NAUTILUSOptionValidator(),
)
if option.lower() == "a":
wi = _prompt_wrapper(
"Number of PO solutions (10 or 20): ",
default="20",
validator=tui.NumberValidator(),
)
weights = WEIGHTS[wi]
factory = IterationPointFactory(
SciPyDE(NautilusAchievementProblem(RiverPollution()))
)
points = misc.new_points(factory, solution, weights=weights)
method_e = ENAUTILUS(PreGeneratedProblem(points=points), PointSearch)
method_e.zh_prev = solution
else:
method_e = ENAUTILUS(RiverPollution(), SciPyDE)
# method_e.zh = solution
method_e.current_iter = nautilus_v1.current_iter
method_e.user_iters = nautilus_v1.user_iters
print(
"E-NAUTILUS\nselected iteration point: %s:"
% ",".join(map(str, solution))
)
while method_e and method_e.current_iter > 0:
if solution is None:
solution = method_e.problem.nadir
method_e.problem.nadir = nadir
method_e.problem.ideal = ideal
cmd = tui.iter_enautilus(
method_e, initial_iterpoint=solution, initial_bound=method_e.fh_lo
)
if cmd:
print(method_e.current_iter)
current_iter = method_e.current_iter
break
if tui.HAS_INPUT:
input("Press ENTER to exit")
|
python
|
{
"resource": ""
}
|
q16997
|
setup
|
train
|
def setup(app):
"""Setup connects events to the sitemap builder"""
app.add_config_value(
'site_url',
default=None,
rebuild=False
)
try:
app.add_config_value(
'html_baseurl',
default=None,
rebuild=False
)
except:
pass
app.connect('html-page-context', add_html_link)
app.connect('build-finished', create_sitemap)
app.sitemap_links = []
app.locales = []
|
python
|
{
"resource": ""
}
|
q16998
|
OptimizationMethod.search
|
train
|
def search(self, max=False, **params) -> Tuple[np.ndarray, List[float]]:
"""
Search for the optimal solution
This sets up the search for the optimization and calls the _search method
Parameters
----------
max : bool (default False)
If true find mximum of the objective function instead of minimum
**params : dict [optional]
Parameters for single objective optimization method
"""
self._max = max
if max:
self._coeff = -1.0
else:
self._coeff = 1.0
return self._search(**params)
|
python
|
{
"resource": ""
}
|
q16999
|
NAUTILUSv1.next_iteration
|
train
|
def next_iteration(self, preference=None):
"""
Return next iteration bounds
"""
if preference:
self.preference = preference
print(("Given preference: %s" % self.preference.pref_input))
self._update_fh()
# tmpzh = list(self.zh)
self._update_zh(self.zh, self.fh)
# self.zh = list(np.array(self.zh) / 2. + np.array(self.zh_prev) / 2.)
# self.zh_prev = tmpzh
if self.current_iter != 1:
self.fh_lo = list(self.lower_bounds_factory.result(self.zh_prev))
self.current_iter -= 1
return self.fh_lo, self.zh
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.