code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
# type: (Hook) -> None try: func, args_gen = self.hooked[type(hook)] except (KeyError, TypeError): return else: hook(func, args_gen())
def on_hook(self, hook)
Takes a hook, and optionally calls hook.run on a function
6.259832
7.048492
0.888109
# type: (str, str) -> bool assert initial_state in self._allowed, \ "%s is not in %s" % (initial_state, list(self._allowed)) return target_state in self._allowed[initial_state]
def transition_allowed(self, initial_state, target_state)
Check if a transition between two states is allowed
2.938083
3.045439
0.964748
# type: (str, *str) -> None allowed_states = list(allowed_states) self._allowed.setdefault(initial_state, set()).update(allowed_states) for state in allowed_states + [initial_state]: if state not in self.possible_states: self.possible_states.append(state)
def set_allowed(self, initial_state, *allowed_states)
Add an allowed transition from initial_state to allowed_states
2.789681
2.705711
1.031034
# type: (AName, ACmd) -> ADefine value = subprocess.check_output(cmd, shell=True).rstrip("\n") return Define(name, value)
def cmd_string(name, cmd)
Define a string parameter coming from a shell command to be used within this YAML file. Trailing newlines will be stripped.
8.581947
7.966574
1.077244
# type: (AEnvName, AEnvValue) -> ADefine os.environ[name] = value return Define(name, value)
def export_env_string(name, value)
Exports an environment variable with the given value
8.572281
9.974096
0.859454
# type: (AModuleName, AModulePath) -> ADefine define = Define(name, path) assert os.path.isdir(path), "%r doesn't exist" % path name = "malcolm.modules.%s" % name import_package_from_path(name, path) return define
def module_path(name, path)
Load an external malcolm module (e.g. ADCore/etc/malcolm)
7.305158
7.04708
1.036622
# Called in tornado loop try: self.log.debug("Got message %s", message) d = json_decode(message) response = deserialize_object(d, Response) if isinstance(response, (Return, Error)): request = self._request_lookup.pop(response.id) if isinstance(response, Error): # Make the message an exception so it can be raised response.message = ResponseError(response.message) else: request = self._request_lookup[response.id] # Transfer the work of the callback to cothread cothread.Callback(request.callback, response) except Exception: # If we don't catch the exception here, tornado will spew odd # error messages about 'HTTPRequest' object has no attribute 'path' self.log.exception("on_message(%r) failed", message)
def on_message(self, message)
Pass response from server to process receive queue Args: message(str): Received message
6.020989
6.116374
0.984405
# Send a root Subscribe to the server subscribe = Subscribe(path=[mri], delta=True) done_queue = Queue() def handle_response(response): # Called from tornado if not isinstance(response, Delta): # Return or Error is the end of our subscription, log and ignore self.log.debug("Proxy got response %r", response) done_queue.put(None) else: cothread.Callback( self._handle_response, response, block, done_queue) subscribe.set_callback(handle_response) IOLoopHelper.call(self._send_request, subscribe) done_queue.get(timeout=DEFAULT_TIMEOUT)
def sync_proxy(self, mri, block)
Abstract method telling the ClientComms to sync this proxy Block with its remote counterpart. Should wait until it is connected Args: mri (str): The mri for the remote block block (BlockModel): The local proxy Block to keep in sync
8.068145
8.517938
0.947195
q = Queue() request = Put( path=[mri, attribute_name, "value"], value=value) request.set_callback(q.put) IOLoopHelper.call(self._send_request, request) response = q.get() if isinstance(response, Error): raise response.message else: return response.value
def send_put(self, mri, attribute_name, value)
Abstract method to dispatch a Put to the server Args: mri (str): The mri of the Block attribute_name (str): The name of the Attribute within the Block value: The value to put
4.268103
4.724199
0.903455
q = Queue() request = Post( path=[mri, method_name], parameters=params) request.set_callback(q.put) IOLoopHelper.call(self._send_request, request) response = q.get() if isinstance(response, Error): raise response.message else: return response.value
def send_post(self, mri, method_name, **params)
Abstract method to dispatch a Post to the server Args: mri (str): The mri of the Block method_name (str): The name of the Method within the Block params: The parameters to send Returns: The return results from the server
4.680954
5.256094
0.890577
# type: (Type[T], PartInfo) -> Dict[str, List[T]] filtered = OrderedDict() for part_name, info_list in part_info.items(): if info_list is None or isinstance(info_list, Exception): continue info_list = [i for i in info_list if isinstance(i, cls)] if info_list: filtered[part_name] = info_list return filtered
def filter_parts(cls, part_info)
Filter the part_info dict looking for instances of our class Args: part_info (dict): {part_name: [Info] or None} as returned from Controller.run_hook() Returns: dict: {part_name: [info]} where info is a subclass of cls
2.654097
3.156587
0.840812
# type: (Type[T], PartInfo) -> List[T] filtered = [] for info_list in cls.filter_parts(part_info).values(): filtered += info_list return filtered
def filter_values(cls, part_info)
Filter the part_info dict list looking for instances of our class Args: part_info (dict): {part_name: [Info] or None} as returned from Controller.run_hook() Returns: list: [info] where info is a subclass of cls
4.598463
8.274275
0.555754
# type: (Type[T], PartInfo, str) -> T filtered = cls.filter_values(part_info) if len(filtered) != 1: if error_msg is None: error_msg = "Expected a single %s, got %s of them" % \ (cls.__name__, len(filtered)) raise BadValueError(error_msg) return filtered[0]
def filter_single_value(cls, part_info, error_msg=None)
Filter the part_info dict list looking for a single instance of our class Args: part_info (dict): {part_name: [Info] or None} as returned from Controller.run_hook() error_msg (str, optional): Specific error message to show if there isn't a single value Returns: info subclass of cls
2.890009
4.101336
0.70465
# type: (List[str]) -> None for mri in mris: for pv in self._pvs.pop(mri, {}).values(): # Close pv with force destroy on, this will call # onLastDisconnect pv.close(destroy=True, sync=True, timeout=1.0)
def disconnect_pv_clients(self, mris)
Disconnect anyone listening to any of the given mris
7.747232
8.06942
0.960073
new_tags = [t for t in tags if not t.startswith("sourcePort:")] new_tags.append(self.source_port_tag(connected_value)) return new_tags
def with_source_port_tag(self, tags, connected_value)
Add a Source Port tag to the tags list, removing any other Source Ports
3.026642
3.104641
0.974877
# type: (Sequence[str]) -> Union[Tuple[bool, Port, str], None] for tag in tags: match = port_tag_re.match(tag) if match: source_sink, port, extra = match.groups() return source_sink == "source", cls(port), extra
def port_tag_details(cls, tags)
Search tags for port info, returning it Args: tags: A list of tags to check Returns: None or (is_source, port, connected_value|disconnected_value) where port is one of the Enum entries of Port
4.699965
5.640651
0.833231
with self.changes_squashed: initial_state = self.state.value if self.state_set.transition_allowed( initial_state=initial_state, target_state=state): self.log.debug( "%s: Transitioning from %s to %s", self.mri, initial_state, state) if state == ss.DISABLED: alarm = Alarm.invalid("Disabled") elif state == ss.FAULT: alarm = Alarm.major(message) else: alarm = Alarm() self.update_health(self, HealthInfo(alarm)) self.state.set_value(state) self.state.set_alarm(alarm) for child, writeable in self._children_writeable[state].items(): if isinstance(child, AttributeModel): child.meta.set_writeable(writeable) elif isinstance(child, MethodModel): child.set_writeable(writeable) else: raise TypeError("Cannot transition from %s to %s" % (initial_state, state))
def transition(self, state, message="")
Change to a new state if the transition is allowed Args: state (str): State to transition to message (str): Message if the transition is to a fault state
4.325396
4.286952
1.008968
done_queue = Queue() self._queues[mri] = done_queue update_fields = set() def callback(value=None): if isinstance(value, Exception): # Disconnect or Cancelled or RemoteError if isinstance(value, Disconnected): # We will get a reconnect with a whole new structure update_fields.clear() block.health.set_value( value="pvAccess disconnected", alarm=Alarm.disconnected("pvAccess disconnected") ) else: with block.notifier.changes_squashed: if not update_fields: self.log.debug("Regenerating from %s", list(value)) self._regenerate_block(block, value, update_fields) done_queue.put(None) else: self._update_block(block, value, update_fields) m = self._ctxt.monitor(mri, callback, notify_disconnect=True) self._monitors.add(m) done_queue.get(timeout=DEFAULT_TIMEOUT)
def sync_proxy(self, mri, block)
Abstract method telling the ClientComms to sync this proxy Block with its remote counterpart. Should wait until it is connected Args: mri (str): The mri for the remote block block (BlockModel): The local proxy Block to keep in sync
6.677158
6.902632
0.967335
path = attribute_name + ".value" typ, value = convert_to_type_tuple_value(serialize_object(value)) if isinstance(typ, tuple): # Structure, make into a Value _, typeid, fields = typ value = Value(Type(fields, typeid), value) try: self._ctxt.put(mri, {path: value}, path) except RemoteError: if attribute_name == "exports": # TODO: use a tag instead of a name # This will change the structure of the block # Wait for reconnect self._queues[mri].get(timeout=DEFAULT_TIMEOUT) else: # Not expected, raise raise
def send_put(self, mri, attribute_name, value)
Abstract method to dispatch a Put to the server Args: mri (str): The mri of the Block attribute_name (str): The name of the Attribute within the Block value: The value to put
11.331691
11.271976
1.005298
typ, parameters = convert_to_type_tuple_value(serialize_object(params)) uri = NTURI(typ[2]) uri = uri.wrap( path="%s.%s" % (mri, method_name), kws=parameters, scheme="pva" ) value = self._ctxt.rpc(mri, uri, timeout=None) return convert_value_to_dict(value)
def send_post(self, mri, method_name, **params)
Abstract method to dispatch a Post to the server Args: mri (str): The mri of the Block method_name (str): The name of the Method within the Block params: The parameters to send Returns: The return results from the server
12.351474
13.995219
0.88255
# type: (Context, Model, str) -> Any with self._lock: child = data[child_name] child_view = make_view(self, context, child) return child_view
def make_view(self, context, data, child_name)
Make a child View of data[child_name]
4.910954
4.480222
1.096141
# type: (Get) -> CallbackResponses data = self._block for i, endpoint in enumerate(request.path[1:]): try: data = data[endpoint] except KeyError: if hasattr(data, "typeid"): typ = data.typeid else: typ = type(data) raise UnexpectedError( "Object %s of type %r has no attribute %r" % ( request.path[:i+1], typ, endpoint)) # Important to serialize now with the lock so we get a consistent set serialized = serialize_object(data) ret = [request.return_response(serialized)] return ret
def _handle_get(self, request)
Called with the lock taken
6.793257
6.792539
1.000106
# type: (Put) -> CallbackResponses attribute_name = request.path[1] attribute = self._block[attribute_name] assert isinstance(attribute, AttributeModel), \ "Cannot Put to %s which is a %s" % (attribute.path, type(attribute)) self.check_field_writeable(attribute) put_function = self.get_put_function(attribute_name) value = attribute.meta.validate(request.value) with self.lock_released: result = put_function(value) if request.get and result is None: # We asked for a Get, and didn't get given a return, so do return # the current value. Don't serialize here as value is immutable # (as long as we don't try too hard to break the rules) result = self._block[attribute_name].value elif not request.get: # We didn't ask for a Get, so throw result away result = None ret = [request.return_response(result)] return ret
def _handle_put(self, request)
Called with the lock taken
7.047996
7.057737
0.99862
# type: (Post) -> CallbackResponses method_name = request.path[1] method = self._block[method_name] assert isinstance(method, MethodModel), \ "Cannot Post to %s which is a %s" % (method.path, type(method)) self.check_field_writeable(method) post_function = self.get_post_function(method_name) args = method.validate(request.parameters) with self.lock_released: result = post_function(**args) # Don't need to serialize as the result should be immutable ret = [request.return_response(result)] return ret
def _handle_post(self, request)
Called with the lock taken
7.984484
7.934143
1.006345
context.when_matches( [mri, "state", "value"], StatefulStates.READY, bad_values=[StatefulStates.FAULT, StatefulStates.DISABLED], timeout=timeout)
def wait_for_stateful_block_init(context, mri, timeout=DEFAULT_TIMEOUT)
Wait until a Block backed by a StatefulController has initialized Args: context (Context): The context to use to make the child block mri (str): The mri of the child block timeout (float): The maximum time to wait
13.074094
14.352221
0.910946
if self._state == self.RUNNING: self._context.wait_all_futures([self], timeout) return self.__get_result()
def result(self, timeout=None)
Return the result of the call that the future represents. Args: timeout: The number of seconds to wait for the result if the future isn't done. If None, then there is no limit on the wait time. Returns: The result of the call that the future represents. Raises: TimeoutError: If the future didn't finish executing before the given timeout. exceptions.Exception: If the call raised then that exception will be raised.
9.328039
10.605557
0.879543
if self._state == self.RUNNING: self._context.wait_all_futures([self], timeout) return self._exception
def exception(self, timeout=None)
Return the exception raised by the call that the future represents. Args: timeout: The number of seconds to wait for the exception if the future isn't done. If None, then there is no limit on the wait time. Returns: The exception raised by the call that the future represents or None if the call completed without raising. Raises: TimeoutError: If the future didn't finish executing before the given timeout.
9.477594
10.929092
0.86719
self._result = result self._state = self.FINISHED
def set_result(self, result)
Sets the return value of work associated with the future. Should only be used by Task and unit tests.
7.958906
7.051369
1.128704
assert isinstance(exception, Exception), \ "%r should be an Exception" % exception self._exception = exception self._state = self.FINISHED
def set_exception(self, exception)
Sets the result of the future as being the given exception. Should only be used by Task and unit tests.
4.901731
4.656266
1.052717
names = [self.__module__, self.__class__.__name__] for field, value in sorted(fields.items()): names.append(value) # names should be something like this for one field: # ["malcolm.modules.scanning.controllers.runnablecontroller", # "RunnableController", "BL45P-ML-SCAN-01"] self.log = logging.getLogger(".".join(names)) if fields: self.log.addFilter(FieldFilter(fields)) return self.log
def set_logger(self, **fields)
Change the name of the logger that log.* should call Args: **fields: Extra fields to be logged. Logger name will be: ".".join([<module_name>, <cls_name>] + fields_sorted_on_key)
6.268492
5.503864
1.138926
# type: (Any) -> Tuple[Callback, Return] response = Return(id=self.id, value=value) return self.callback, response
def return_response(self, value=None)
Create a Return Response object to signal a return value
7.70317
8.197108
0.939742
# type: (Exception) -> Tuple[Callback, Error] response = Error(id=self.id, message=exception) log.exception("Exception raised for request %s", self) return self.callback, response
def error_response(self, exception)
Create an Error Response object to signal an error
7.352543
8.170385
0.899902
# type: (Controller, Context, Any) -> Any if isinstance(data, BlockModel): # Make an Block View view = _make_view_subclass(Block, controller, context, data) elif isinstance(data, AttributeModel): # Make an Attribute View view = Attribute(controller, context, data) elif isinstance(data, MethodModel): # Make a Method View view = Method(controller, context, data) elif isinstance(data, Model): # Make a generic View view = _make_view_subclass(View, controller, context, data) elif isinstance(data, dict): # Make a dict of Views d = OrderedDict() for k, v in data.items(): d[k] = make_view(controller, context, v) view = d elif isinstance(data, list): # Need to recurse down view = [make_view(controller, context, x) for x in data] else: # Just return the data unwrapped as it should be immutable view = data return view
def make_view(controller, context, data)
Make a View subclass containing properties specific for given data Args: controller (Controller): The child controller that hosts the data context (Context): The context the parent has made that the View should use for manipulating the data data (Model): The actual data that context will be manipulating Returns: View: A View subclass instance that provides a user-focused API to the given data
2.572921
2.744723
0.937407
self._context.put(self._data.path + ["value"], value, timeout=timeout)
def put_value(self, value, timeout=None)
Put a value to the Attribute and wait for completion
11.15112
10.295312
1.083126
# type: (float) -> float exposure = duration - self.frequency_accuracy * duration / 1000000.0 - \ self.readout_time assert exposure > 0.0, \ "Exposure time %s too small when deadtime taken into account" % ( exposure,) return exposure
def calculate_exposure(self, duration)
Calculate the exposure to set the detector to given the duration of the frame and the readout_time and frequency_accuracy
7.979671
6.560277
1.216362
# type: (Union[Notifier, DummyNotifier], List[str]) -> None # This function should either change from the DummyNotifier or to # the DummyNotifier, never between two valid notifiers assert self.notifier is Model.notifier or notifier is Model.notifier, \ "Already have a notifier %s path %s" % (self.notifier, self.path) self.notifier = notifier self.path = path # Tell all our children too for name, ct in self.call_types.items(): if ct.is_mapping: child = getattr(self, name) if child and issubclass(ct.typ[1], Model): for k, v in child.items(): v.set_notifier_path(notifier, self.path + [name, k]) elif issubclass(ct.typ, Model): assert not ct.is_array, \ "Can't deal with Arrays of Models %s" % ct child = getattr(self, name) child.set_notifier_path(notifier, self.path + [name])
def set_notifier_path(self, notifier, path)
Sets the notifier, and the path from the path from block root Args: notifier (Notifier): The Notifier to tell when endpoint data changes path (list): The absolute path to get to this object
4.568337
4.741
0.963581
# type: (List[str], Any) -> None if len(path) > 1: # This is for a child self[path[0]].apply_change(path[1:], *args) else: # This is for us assert len(path) == 1 and len(args) == 1, \ "Cannot process change %s" % ([self.path + path] + list(args)) getattr(self, "set_%s" % path[0])(args[0])
def apply_change(self, path, *args)
Take a single change from a Delta and apply it to this model
3.530807
3.582433
0.985589
# type: (Any) -> AttributeModel attr = self.attribute_class(meta=self, value=initial_value) return attr
def create_attribute_model(self, initial_value=None)
Make an AttributeModel instance of the correct type for this Meta Args: initial_value: The initial value the Attribute should take Returns: AttributeModel: The created attribute model instance
6.041494
9.01438
0.670206
# type: (Anno, bool, **Any) -> VMeta ret = cls(description=anno.description, writeable=writeable, **kwargs) widget = ret.default_widget() if widget != Widget.NONE: ret.set_tags([widget.tag()]) return ret
def from_annotype(cls, anno, writeable, **kwargs)
Return an instance of this class from an Anno
7.45753
7.672043
0.97204
# type: (Union[Sequence[type], type], bool, bool) -> Any if not isinstance(types, Sequence): types = [types] def decorator(subclass): for typ in types: cls._annotype_lookup[(typ, is_array, is_mapping)] = subclass return subclass return decorator
def register_annotype_converter(cls, types, is_array=False, is_mapping=False)
Register this class as a converter for Anno instances
3.127639
3.395081
0.921227
# type: (Anno) -> Type[VMeta] if hasattr(anno.typ, "__bases__"): # This is a proper type bases = inspect.getmro(anno.typ) else: # This is a numpy dtype bases = [anno.typ] for typ in bases: key = (typ, bool(anno.is_array), bool(anno.is_mapping)) try: return cls._annotype_lookup[key] except KeyError: pass raise KeyError(anno)
def lookup_annotype_converter(cls, anno)
Look up a vmeta based on an Anno
4.496309
3.845541
1.169227
# type: (Any, bool, Alarm, TimeStamp) -> Any value = self.meta.validate(value) if set_alarm_ts: if alarm is None: alarm = Alarm.ok else: alarm = deserialize_object(alarm, Alarm) if ts is None: ts = TimeStamp() else: ts = deserialize_object(ts, TimeStamp) self.set_value_alarm_ts(value, alarm, ts) else: self.set_endpoint_data("value", value) return self.value
def set_value(self, value, set_alarm_ts=True, alarm=None, ts=None)
Set value, calculating alarm and ts if requested
3.29024
3.189674
1.031529
# type: (Any, Alarm, TimeStamp) -> None with self.notifier.changes_squashed: # Assume they are of the right format self.value = value self.notifier.add_squashed_change(self.path + ["value"], value) if alarm is not self.alarm: self.alarm = alarm self.notifier.add_squashed_change(self.path + ["alarm"], alarm) self.timeStamp = ts self.notifier.add_squashed_change(self.path + ["timeStamp"], ts)
def set_value_alarm_ts(self, value, alarm, ts)
Set value with pre-validated alarm and timeStamp
3.845447
3.702192
1.038695
# type: (AName, ASleep) -> AGreeting print("Manufacturing greeting...") sleep_for(sleep) greeting = "Hello %s" % name return greeting
def greet(self, name, sleep=0)
Optionally sleep <sleep> seconds, then return a greeting to <name>
10.406839
9.925767
1.048467
VERSION_FILE = '../malcolm/version.py' mo = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', open(VERSION_FILE, 'rt').read(), re.M) if mo: return mo.group(1) else: raise RuntimeError( 'Unable to find version string in {0}.'.format(VERSION_FILE))
def get_version()
Extracts the version number from the version.py file.
2.376271
2.224155
1.068392
# create the axes dimensions attribute, a comma separated list giving size # of the axis dimensions padded with . for the detector dimensions and # multidimensional dimensions pad_dims = [] for d in generator.dimensions: if len(d.axes) == 1: pad_dims.append("%s_set" % d.axes[0]) else: pad_dims.append(".") pad_dims += ["."] * 2 # assume a 2 dimensional detector with h5py.File(vds_file_path, 'r+', libver="latest") as vds: for data, node in zip(set_data, set_bases): # create a group for this entry vds.require_group(node) # points to the axis demand data sets vds[node].attrs["axes"] = pad_dims vds[node].attrs["NX_class"] = ['NXdata'] # points to the detector dataset for this entry vds[node].attrs["signal"] = data.split('/')[-1] # a hard link from this entry 'signal' to the actual data vds[node + data] = vds[data] axis_sets = {} # iterate the axes in each dimension of the generator to create the # axis information nodes for i, d in enumerate(generator.dimensions): for axis in d.axes: # add signal data dimension for axis axis_indices = '{}_set_indices'.format(axis) vds[node].attrs[axis_indices] = i # demand positions for axis axis_set = '{}_set'.format(axis) if axis_sets.get(axis_set): # link to the first entry's demand list vds[node + axis_set] = axis_sets[axis_set] else: # create the demand list for the first entry only axis_demands = d.get_positions(axis) vds.create_dataset( node + axis_set, data=axis_demands) vds[node + axis_set].attrs["units"] = \ generator.units[axis] axis_sets[axis_set] = vds[node + axis_set] vds['entry'].attrs["NX_class"] = ['NXentry']
def add_nexus_nodes(generator, vds_file_path)
Add in the additional information to make this into a standard nexus format file:- (a) create the standard structure under the 'entry' group with a subgroup for each dataset. 'set_bases' lists the data sets we make here. (b) save a dataset for each axis in each of the dimensions of the scan representing the demand position at every point in the scan.
4.812738
4.491046
1.07163
response_queue = self.send(message) response = self.recv(response_queue, timeout) return response
def send_recv(self, message, timeout=10.0)
Send a message to a PandABox and wait for the response Args: message (str): The message to send timeout (float): How long to wait before raising queue.Empty Returns: str: The response
4.371173
6.144516
0.711394
while True: message, response_queue = self._send_queue.get() if message is self.STOP: break try: self._response_queues.put(response_queue) self._socket.send(message) except Exception: # pylint:disable=broad-except log.exception("Exception sending message %s", message)
def _send_loop(self)
Service self._send_queue, sending requests to server
3.63375
3.354136
1.083364
response_queue = self._response_queues.get(timeout=0.1) response_queue.put(resp) self._completed_response_lines = [] self._is_multiline = None
def _respond(self, resp)
Respond to the person waiting
7.622832
7.862885
0.96947
self._completed_response_lines = [] self._is_multiline = None lines_iterator = self._get_lines() while True: try: line = next(lines_iterator) if self._is_multiline is None: self._is_multiline = line.startswith("!") or line == "." if line.startswith("ERR"): self._respond(ValueError(line)) elif self._is_multiline: if line == ".": self._respond(self._completed_response_lines) else: assert line[0] == "!", \ "Multiline response {} doesn't start with !" \ .format(repr(line)) self._completed_response_lines.append(line[1:]) else: self._respond(line) except StopIteration: return except Exception: log.exception("Exception receiving message") raise
def _recv_loop(self)
Service socket recv, returning responses to the correct queue
3.175354
3.052706
1.040177
response_queues = OrderedDict() for parameter in parameter_list: response_queues[parameter] = self.send(request % parameter) return response_queues
def parameterized_send(self, request, parameter_list)
Send batched requests for a list of parameters Args: request (str): Request to send, like "%s.*?\n" parameter_list (list): parameters to format with, like ["TTLIN", "TTLOUT"] Returns: dict: {parameter: response_queue}
5.087246
3.672064
1.385391
child.exposure.put_value(duration) child.acquirePeriod.put_value(duration) readout_time = child.acquirePeriod.value - child.exposure.value # It seems that the difference between acquirePeriod and exposure # doesn't tell the whole story, we seem to need an additional bit # of readout (or something) time on top fudge_factor = duration * 0.004 + 0.001 return readout_time + fudge_factor
def get_readout_time(self, child, duration)
Calculate the readout time of the detector from the EPICS driver: - Set exposure and acquire period to same value - Acquire period will be set to lowest acceptable value - Difference will be readout time (this value is affected by detector settings)
6.684188
5.727947
1.166943
# type: (Request) -> None if isinstance(request, Put) and request.path[0] == self.mri: # This means the context we were passed has just made a Put request # so mark the field as "we_modified" so it doesn't screw up the # modified led attribute_name = request.path[-2] if attribute_name not in self.no_save: self.log.warning( "Part %s tried to set '%s' that is not in self.no_save. " "This will stop the 'modified' attribute from working.", self, attribute_name)
def notify_dispatch_request(self, request)
Will be called when a context passed to a hooked function is about to dispatch a request
9.774898
10.277311
0.951114
# type: (AContext, APortMap, str) -> None # Find the Source Ports to connect to if connected_to: # Calculate a lookup of the Source Port "name" to type source_port_lookup = self._source_port_lookup( ports.get(connected_to, [])) else: source_port_lookup = True # Find our Sink Ports sink_ports = self._get_flowgraph_ports(ports, SinkPortInfo) # If we have Sunk Ports that need to be disconnected then do so if sink_ports and source_port_lookup: child = context.block_view(self.mri) attribute_values = {} for name, port_info in sink_ports.items(): if source_port_lookup is True or source_port_lookup.get( child[name].value, None) == port_info.port: attribute_values[name] = port_info.disconnected_value child.put_attribute_values(attribute_values)
def sever_sink_ports(self, context, ports, connected_to=None)
Conditionally sever Sink Ports of the child. If connected_to is then None then sever all, otherwise restrict to connected_to's Source Ports Args: context (Context): The context to use ports (dict): {part_name: [PortInfo]} connected_to (str): Restrict severing to this part
4.866657
4.952299
0.982707
# type: (APortMap) -> None # Calculate a lookup of Source Port connected_value to part_name source_port_lookup = {} for part_name, port_infos in SourcePortInfo.filter_parts(ports).items(): for port_info in port_infos: source_port_lookup[port_info.connected_value] = ( part_name, port_info.port) # Look through all the Sink Ports, and set both ends of the # connection to visible if they aren't specified for part_name, port_infos in SinkPortInfo.filter_parts( ports).items(): for port_info in port_infos: if port_info.value != port_info.disconnected_value: conn_part, port = source_port_lookup.get( port_info.value, (None, None)) if conn_part and port == port_info.port: if conn_part not in self.part_visibility: self.part_visibility[conn_part] = True if part_name not in self.part_visibility: self.part_visibility[part_name] = True
def calculate_part_visibility(self, ports)
Calculate what is connected to what Args: ports: {part_name: [PortInfo]} from other ports
3.40332
3.509213
0.969824
# type: (Subscribe) -> CallbackResponses ret = self._tree.handle_subscribe(request, request.path[1:]) self._subscription_keys[request.generate_key()] = request return ret
def handle_subscribe(self, request)
Handle a Subscribe request from outside. Called with lock taken
11.824522
13.045299
0.90642
# type: (Unsubscribe) -> CallbackResponses subscribe = self._subscription_keys.pop(request.generate_key()) ret = self._tree.handle_unsubscribe(subscribe, subscribe.path[1:]) return ret
def handle_unsubscribe(self, request)
Handle a Unsubscribe request from outside. Called with lock taken
14.835044
15.8172
0.937906
# type: (List[str], Any) -> None assert self._squashed_count, "Called while not squashing changes" self._squashed_changes.append([path[1:], data])
def add_squashed_change(self, path, data)
Register a squashed change to a particular path Args: path (list): The path of what has changed, relative from Block data (object): The new data
6.203625
11.265969
0.550652
# type: (List[List]) -> CallbackResponses ret = [] child_changes = {} for change in changes: # Add any changes that our children need to know about self._add_child_change(change, child_changes) # If we have update subscribers, serialize at this level if self.update_requests: serialized = serialize_object(self.data) for request in self.update_requests: ret.append(request.update_response(serialized)) # If we have delta subscribers, serialize the changes if self.delta_requests: for change in changes: change[-1] = serialize_object(change[-1]) for request in self.delta_requests: ret.append(request.delta_response(changes)) # Now notify our children for name, child_changes in child_changes.items(): ret += self.children[name].notify_changes(child_changes) return ret
def notify_changes(self, changes)
Set our data and notify anyone listening Args: changes (list): [[path, optional data]] where path is the path to what has changed, and data is the unserialized object that has changed Returns: list: [(callback, Response)] that need to be called
3.874648
4.067752
0.952528
# type: (Any) -> Dict[str, List] self.data = data child_change_dict = {} # Reflect change of data to children for name in self.children: child_data = getattr(data, name, None) if child_data is None: # Deletion child_change_dict[name] = [[]] else: # Change child_change_dict[name] = [[], child_data] return child_change_dict
def _update_data(self, data)
Set our data and notify any subscribers of children what has changed Args: data (object): The new data Returns: dict: {child_name: [path_list, optional child_data]} of the change that needs to be passed to a child as a result of this
4.059129
3.783309
1.072904
# type: (Subscribe, List[str]) -> CallbackResponses ret = [] if path: # Recurse down name = path[0] if name not in self.children: self.children[name] = NotifierNode( getattr(self.data, name, None), self) ret += self.children[name].handle_subscribe(request, path[1:]) else: # This is for us serialized = serialize_object(self.data) if request.delta: self.delta_requests.append(request) ret.append(request.delta_response([[[], serialized]])) else: self.update_requests.append(request) ret.append(request.update_response(serialized)) return ret
def handle_subscribe(self, request, path)
Add to the list of request to notify, and notify the initial value of the data held Args: request (Subscribe): The subscribe request path (list): The relative path from ourself Returns: list: [(callback, Response)] that need to be called
4.752551
4.833102
0.983334
# type: (Subscribe, List[str]) -> CallbackResponses ret = [] if path: # Recurse down name = path[0] child = self.children[name] ret += child.handle_unsubscribe(request, path[1:]) if not child.children and not child.update_requests \ and not child.delta_requests: del self.children[name] else: # This is for us if request in self.update_requests: self.update_requests.remove(request) else: self.delta_requests.remove(request) ret.append(request.return_response()) return ret
def handle_unsubscribe(self, request, path)
Remove from the notifier list and send a return Args: request (Subscribe): The original subscribe request path (list): The relative path from ourself Returns: list: [(callback, Response)] that need to be called
3.738715
3.836564
0.974496
# type: (AName, ADescription, AStringDefault) -> AAnno args = common_args(name, default) return Anno(description, typ=str, **args)
def string(name, description, default=None)
Add a string parameter to be passed when instantiating this YAML file
11.537047
14.268586
0.808563
# type: (AName, ADescription, AFloat64Default) -> AAnno args = common_args(name, default) return Anno(description, typ=float, **args)
def float64(name, description, default=None)
Add a float64 parameter to be passed when instantiating this YAML file
10.033057
12.558155
0.798928
# type: (AName, ADescription, AInt32Default) -> AAnno args = common_args(name, default) return Anno(description, typ=int, **args)
def int32(name, description, default=None)
Add an int32 parameter to be passed when instantiating this YAML file
10.122073
12.479377
0.811104
# type: (str, str) -> Callable[..., List[Controller]] sections, yamlname, docstring = Section.from_yaml(yaml_path, filename) yamldir = os.path.dirname(yaml_path) # Check we have only one controller controller_sections = [s for s in sections if s.section == "controllers"] assert len(controller_sections) == 1, \ "Expected exactly 1 controller, got %s" % (controller_sections,) controller_section = controller_sections[0] def block_creator(kwargs): # Create the param dict of the static defined arguments defines = _create_defines(sections, yamlname, yamldir, kwargs) controllers, parts = _create_blocks_and_parts(sections, defines) # Make the controller controller = controller_section.instantiate(defines) for part in parts: controller.add_part(part) controllers.append(controller) return controllers creator = creator_with_nice_signature( block_creator, sections, yamlname, yaml_path, docstring) return creator
def make_block_creator(yaml_path, filename=None)
Make a collection function that will create a list of blocks Args: yaml_path (str): File path to YAML file, or a file in the same dir filename (str): If give, use this filename as the last element in the yaml_path (so yaml_path can be __file__) Returns: function: A collection function decorated with @takes. This can be used in other blocks or instantiated by the process. If the YAML text specified controllers or parts then a block instance with the given name will be instantiated. If there are any blocks listed then they will be called. All created blocks by this or any sub collection will be returned
4.686965
5.067832
0.924846
param_dict = self.substitute_params(substitutions) pkg, ident = self.name.rsplit(".", 1) pkg = "malcolm.modules.%s" % pkg try: ob = importlib.import_module(pkg) except ImportError as e: raise_with_traceback( ImportError("\n%s:%d:\n%s" % ( self.filename, self.lineno, e))) try: ob = getattr(ob, ident) except AttributeError: raise_with_traceback( ImportError("\n%s:%d:\nPackage %r has no ident %r" % ( self.filename, self.lineno, pkg, ident))) try: model = MethodModel.from_callable(ob, returns=False) args = model.validate(param_dict) ret = ob(**args) except Exception as e: sourcefile = inspect.getsourcefile(ob) lineno = inspect.getsourcelines(ob)[1] raise_with_traceback( YamlError("\n%s:%d:\n%s:%d:\n%s" % ( self.filename, self.lineno, sourcefile, lineno, e))) else: return ret
def instantiate(self, substitutions)
Keep recursing down from base using dotted name, then call it with self.params and args Args: substitutions (dict): Substitutions to make to self.param_dict Returns: The found object called with (*args, map_from_d) E.g. if ob is malcolm.parts, and name is "ca.CADoublePart", then the object will be malcolm.parts.ca.CADoublePart
2.920048
2.963283
0.98541
if filename: # different filename to support passing __file__ yaml_path = os.path.join(os.path.dirname(yaml_path), filename) assert yaml_path.endswith(".yaml"), \ "Expected a/path/to/<yamlname>.yaml, got %r" % yaml_path yamlname = os.path.basename(yaml_path)[:-5] log.debug("Parsing %s", yaml_path) with open(yaml_path) as f: text = f.read() # First separate them into their relevant sections ds = yaml.load(text, Loader=yaml.RoundTripLoader) docstring = None sections = [] for d in ds: assert len(d) == 1, \ "Expected section length 1, got %d" % len(d) lineno = d._yaml_line_col.line + 1 name = list(d)[0] sections.append(cls( yaml_path, lineno, name, d[name])) if name == "builtin.defines.docstring": docstring = d[name]["value"] return sections, yamlname, docstring
def from_yaml(cls, yaml_path, filename=None)
Split a dictionary into parameters controllers parts blocks defines Args: yaml_path (str): File path to YAML file, or a file in the same dir filename (str): If give, use this filename as the last element in the yaml_path (so yaml_path can be __file__) Returns: tuple: (sections, yamlname, docstring) where sections is a list of created sections
4.352071
3.975188
1.094809
param_dict = {} # TODO: this should be yaml.add_implicit_resolver() for k, v in self.param_dict.items(): param_dict[k] = replace_substitutions(v, substitutions) return param_dict
def substitute_params(self, substitutions)
Substitute param values in our param_dict from params Args: substitutions (Map or dict): Values to substitute. E.g. Map of {"name": "me"} E.g. if self.param_dict is: {"name": "$(name):pos", "exposure": 1.0} And substitutions is: {"name": "me"} After the call self.param_dict will be: {"name": "me:pos", "exposure": 1.0}
4.259785
4.231594
1.006662
# The time taken to ramp from v1 to pad_velocity t1 = self.acceleration_time(v1, pad_velocity) # Then on to v2 t2 = self.acceleration_time(pad_velocity, v2) # The distance during the pad tp = total_time - t1 - t2 # Yield the points yield t1, pad_velocity yield tp, pad_velocity yield t2, v2
def _make_padded_ramp(self, v1, v2, pad_velocity, total_time)
Makes a ramp that looks like this: v1 \______ pad_velocity | |\ | | \v2 t1 tp t2 Such that whole section takes total_time
4.240659
4.066552
1.042814
if min_time > 0: # We are trying to meet time constraints # Solve quadratic to give vm b = v1 + v2 + min_time * acceleration c = distance * acceleration + (v1*v1 + v2*v2) / 2 op = b*b - 4 * c if np.isclose(op, 0): # Might have a negative number as rounding error... op = 0 elif op < 0: # Can't do this, set something massive to fail vm check... op = 10000000000 def get_times(vm): t1 = (vm - v1) / acceleration t2 = (vm - v2) / acceleration tm = min_time - t1 - t2 assert -self.max_velocity <= vm <= self.max_velocity assert t1 >= 0 and t2 >= 0 and tm >= 0 return t1, tm, t2 try: # Try negative root vm = (b - np.sqrt(op)) / 2 t1, tm, t2 = get_times(vm) except AssertionError: try: # Try positive root vm = (b + np.sqrt(op)) / 2 t1, tm, t2 = get_times(vm) except AssertionError: # If vm is out of range or any segment takes negative time, # we can't do it in min_time, so act as if unconstrained t1, tm, t2, vm = self._calculate_hat_params( v1, v2, acceleration, distance) else: t1, tm, t2, vm = self._calculate_hat_params( v1, v2, acceleration, distance) # If middle segment needs to be negative time then we need to cap # vm and spend no time at vm if tm < 0: # Solve the quadratic to work out how long to spend accelerating vm = np.sqrt( (2 * acceleration * distance + v1 * v1 + v2 * v2) / 2) if acceleration < 0: vm = -vm t1 = self.acceleration_time(v1, vm) t2 = self.acceleration_time(vm, v2) tm = 0 # Yield the result yield t1, vm yield tm, vm yield t2, v2
def _make_hat(self, v1, v2, acceleration, distance, min_time)
Make a hat that looks like this: ______ vm v1 /| | \ d1| dm|d2\ v2 | | t1 tm t2 Such that the area under the graph (d1+d2+d3) is distance and t1+t2+t3 >= min_time
3.905376
3.821874
1.021848
# Take off the settle time and distance if min_time > 0: min_time -= self.velocity_settle distance -= self.velocity_settle * v2 # The ramp time and distance of a continuous ramp from v1 to v2 ramp_time = self.acceleration_time(v1, v2) ramp_distance = self.ramp_distance(v1, v2, ramp_time) remaining_distance = distance - ramp_distance # Check if we need to stretch in time if min_time > ramp_time: # Check how fast we would need to be going so that the total move # completes in min_time pad_velocity = remaining_distance / (min_time - ramp_time) if pad_velocity > max(v1, v2): # Can't just pad the ramp, make a hat pointing up it = self._make_hat( v1, v2, self.acceleration, distance, min_time) elif pad_velocity < min(v1, v2): # Can't just pad the ramp, make a hat pointing down it = self._make_hat( v1, v2, -self.acceleration, distance, min_time) else: # Make a padded ramp it = self._make_padded_ramp(v1, v2, pad_velocity, min_time) elif remaining_distance < 0: # Make a hat pointing down it = self._make_hat(v1, v2, -self.acceleration, distance, min_time) else: # Make a hat pointing up it = self._make_hat(v1, v2, self.acceleration, distance, min_time) # Create the time and velocity arrays time_array = [0.0] velocity_array = [v1] for t, v in it: assert t >= 0, "Got negative t %s" % t if t == 0: assert v == velocity_array[-1], \ "Can't move velocity in zero time" continue if v * velocity_array[-1] < 0: # Crossed zero, put in an explicit zero velocity fraction = velocity_array[-1] / (velocity_array[-1] - v) time_array.append(time_array[-1] + fraction * t) velocity_array.append(0) t -= fraction * t time_array.append(time_array[-1] + t) velocity_array.append(v) # Add on the settle time if self.velocity_settle > 0: time_array.append(time_array[-1] + self.velocity_settle) velocity_array.append(v2) return time_array, velocity_array
def make_velocity_profile(self, v1, v2, distance, min_time)
Calculate PVT points that will perform the move within motor params Args: v1 (float): Starting velocity in EGUs/s v2 (float): Ending velocity in EGUs/s distance (float): Relative distance to travel in EGUs min_time (float): The minimum time the move should take Returns: tuple: (time_list, position_list) where time_list is a list of relative time points in seconds, and position_list is the position in EGUs that the motor should be
2.732194
2.780425
0.982653
# type: (...) -> Tuple[str, Dict[str, MotorInfo]] cs_ports = set() # type: Set[str] axis_mapping = {} # type: Dict[str, MotorInfo] for motor_info in cls.filter_values(part_info): if motor_info.scannable in axes_to_move: assert motor_info.cs_axis in cs_axis_names, \ "Can only scan 1-1 mappings, %r is %r" % \ (motor_info.scannable, motor_info.cs_axis) cs_ports.add(motor_info.cs_port) axis_mapping[motor_info.scannable] = motor_info missing = list(set(axes_to_move) - set(axis_mapping)) assert not missing, \ "Some scannables %s are not in the CS mapping %s" % ( missing, axis_mapping) assert len(cs_ports) == 1, \ "Requested axes %s are in multiple CS numbers %s" % ( axes_to_move, list(cs_ports)) cs_axis_counts = Counter([x.cs_axis for x in axis_mapping.values()]) # Any cs_axis defs that are used for more that one raw motor overlap = [k for k, v in cs_axis_counts.items() if v > 1] assert not overlap, \ "CS axis defs %s have more that one raw motor attached" % overlap return cs_ports.pop(), axis_mapping
def cs_axis_mapping(cls, part_info, # type: Dict[str, Optional[Sequence]] axes_to_move # type: Sequence[str] )
Given the motor infos for the parts, filter those with scannable names in axes_to_move, check they are all in the same CS, and return the cs_port and mapping of cs_axis to MotorInfo
3.099205
2.688817
1.152628
# Can't do this with changes_squashed as it will call update_modified # from another thread and deadlock. Need RLock.is_owned() from update_* part_info = self.run_hooks( LayoutHook(p, c, self.port_info, value) for p, c in self.create_part_contexts(only_visible=False).items()) with self.changes_squashed: layout_parts = LayoutInfo.filter_parts(part_info) name, mri, x, y, visible = [], [], [], [], [] for part_name, layout_infos in layout_parts.items(): for layout_info in layout_infos: name.append(part_name) mri.append(layout_info.mri) x.append(layout_info.x) y.append(layout_info.y) visible.append(layout_info.visible) layout_table = LayoutTable(name, mri, x, y, visible) try: # Compare the Array seq to get at the numpy array np.testing.assert_equal( layout_table.visible.seq, self.layout.value.visible.seq) except AssertionError: visibility_changed = True else: visibility_changed = False self.layout.set_value(layout_table) if self.saved_visibility is None: # First write of table, set layout and exports saves self.saved_visibility = layout_table.visible self.saved_exports = self.exports.value.to_dict() # Force visibility changed so we update_block_endpoints # even if there weren't any visible visibility_changed = True if visibility_changed: self.update_modified() self.update_exportable() # Part visibility changed, might have attributes or methods # that we need to hide or show self.update_block_endpoints()
def set_layout(self, value)
Set the layout table value. Called on attribute put
6.080414
5.937319
1.024101
# type: (ASaveDesign) -> None self.try_stateful_function( ss.SAVING, ss.READY, self.do_save, designName)
def save(self, designName="")
Save the current design to file
21.133207
20.692057
1.02132
dir_name = self._make_config_dir() filename = os.path.join(dir_name, name.split(".json")[0] + ".json") return filename
def _validated_config_filename(self, name)
Make config dir and return full file path and extension Args: name (str): Filename without dir or extension Returns: str: Full path including extension
4.014728
4.104994
0.978011
# type: (str, bool) -> None if design: filename = self._validated_config_filename(design) with open(filename, "r") as f: text = f.read() structure = json_decode(text) else: structure = {} # Attributes and Children used to be merged, support this attributes = structure.get("attributes", structure) children = structure.get("children", structure) # Set the layout table name, mri, x, y, visible = [], [], [], [], [] for part_name, d in attributes.get("layout", {}).items(): name.append(part_name) mri.append("") x.append(d["x"]) y.append(d["y"]) visible.append(d["visible"]) self.set_layout(LayoutTable(name, mri, x, y, visible)) # Set the exports table source, export = [], [] for source_name, export_name in attributes.get("exports", {}).items(): source.append(source_name) export.append(export_name) self.exports.set_value(ExportTable(source, export)) # Set other attributes our_values = {k: v for k, v in attributes.items() if k in self.our_config_attributes} block = self.block_view() block.put_attribute_values(our_values) # Run the load hook to get parts to load their own structure self.run_hooks( LoadHook(p, c, children.get(p.name, {}), init) for p, c in self.create_part_contexts(only_visible=False).items()) self._mark_clean(design, init)
def do_load(self, design, init=False)
Load a design name, running the child LoadHooks. Args: design: Name of the design json file, without extension init: Passed to the LoadHook to tell the children if this is being run at Init or not
4.321133
4.401826
0.981668
# type: (...) -> MethodModel if name is None: name = func.__name__ method = MethodModel.from_callable(func, description) self._add_field(owner, name, method, func) return method
def add_method_model(self, func, # type: Callable name=None, # type: Optional[str] description=None, # type: Optional[str] owner=None, # type: object )
Register a function to be added to the block
3.949174
4.626916
0.853522
# type: (...) -> MethodModel return self._field_registry.add_method_model( func, name, description, self._part)
def add_method_model(self, func, # type: Callable name=None, # type: Optional[str] description=None, # type: Optional[str] )
Register a function to be added to the Block as a MethodModel
7.663483
8.563643
0.894886
# type: (...) -> AttributeModel return self._field_registry.add_attribute_model( name, attr, writeable_func, self._part)
def add_attribute_model(self, name, # type: str attr, # type: AttributeModel writeable_func=None, # type: Optional[Callable] )
Register a pre-existing AttributeModel to be added to the Block
6.512339
8.597579
0.757462
if self.ideal and self.nadir: return self.ideal, self.nadir raise NotImplementedError( "Ideal and nadir value calculation is not yet implemented" )
def objective_bounds(self)
Return objective bounds Returns ------- lower : list of floats Lower boundaries for the objectives Upper : list of floats Upper boundaries for the objectives
7.14355
8.085835
0.883465
if isinstance(variables, Variable): addvars = copy.deepcopy([variables]) else: addvars = copy.deepcopy(variables) if index is None: self.variables.extend(addvars) else: self.variables[index:index] = addvars
def add_variables( self, variables: Union[List["Variable"], "Variable"], index: int = None ) -> None
Parameters ---------- variable : list of variables or single variable Add variables as problem variables index : int Location to add variables, if None add to the end
2.763579
2.805632
0.985011
k_means = KMeans(n_clusters=n_clusters) k_means.fit(points) closest, _ = pairwise_distances_argmin_min(k_means.cluster_centers_, points) return list(map(list, np.array(points)[closest.tolist()]))
def _centroids(n_clusters: int, points: List[List[float]]) -> List[List[float]]
Return n_clusters centroids of points
2.752652
2.67324
1.029706
# Initial wector space as per # Miettinen, K. Nonlinear Multiobjective Optimization # Kluwer Academic Publishers, 1999 wspace = 50 * nobj while wspace < nweight: wspace *= 2 weights = np.random.rand(wspace, nobj) return _centroids(nobj, weights)
def random_weights(nobj: int, nweight: int) -> List[List[float]]
Generatate nw random weight vectors for nof objectives as per Tchebycheff method [SteCho83]_ .. [SteCho83] Steuer, R. E. & Choo, E.-U. An interactive weighted Tchebycheff procedure for multiple objective programming, Mathematical programming, Springer, 1983, 26, 326-344 Parameters ---------- nobj: Number of objective functions nweight: Number of weights vectors to be generated Returns ------- List[List[float] nobj x nweight matrix of weight vectors
9.593456
10.271955
0.933946
from desdeo.preference.direct import DirectSpecification points = [] nof = factory.optimization_method.optimization_problem.problem.nof_objectives() if not weights: weights = random_weights(nof, 50 * nof) for pref in map( lambda w: DirectSpecification(factory.optimization_method, np.array(w)), weights ): points.append(factory.result(pref, solution)) return points
def new_points( factory: IterationPointFactory, solution, weights: List[List[float]] = None ) -> List[Tuple[np.ndarray, List[float]]]
Generate approximate set of points Generate set of Pareto optimal solutions projecting from the Pareto optimal solution using weights to determine the direction. Parameters ---------- factory: IterationPointFactory with suitable optimization problem solution: Current solution from which new solutions are projected weights: Direction of the projection, if not given generate with :func:random_weights
8.506451
9.389501
0.905953
return [v * -1. if m else v for v, m in zip(values, maximized)]
def as_minimized(values: List[float], maximized: List[bool]) -> List[float]
Return vector values as minimized
3.934039
3.806908
1.033395
class MockDocument: def __init__(self, text): self.text = text if HAS_INPUT: ret = prompt(message, default=default, validator=validator) else: ret = sys.stdin.readline().strip() print(message, ret) if validator: validator.validate(MockDocument(ret)) if "q" in ret: if not HAS_OUTPUT: print("User exit") sys.exit("User exit") return ret
def _prompt_wrapper(message, default=None, validator=None)
Handle references piped from file
4.686713
4.686631
1.000017
print("Preference elicitation options:") print("\t1 - Percentages") print("\t2 - Relative ranks") print("\t3 - Direct") PREFCLASSES = [PercentageSpecifictation, RelativeRanking, DirectSpecification] pref_sel = int( _prompt_wrapper( "Reference elicitation ", default=u"%s" % (1), validator=NumberValidator([1, 3]), ) ) preference_class = PREFCLASSES[pref_sel - 1] print("Nadir: %s" % method.problem.nadir) print("Ideal: %s" % method.problem.ideal) if method.current_iter - method.user_iters: finished_iter = method.user_iters - method.current_iter else: finished_iter = 0 new_iters = int( _prompt_wrapper( u"Ni: ", default=u"%s" % (method.current_iter), validator=NumberValidator() ) ) method.current_iter = new_iters method.user_iters = finished_iter + new_iters return preference_class
def init_nautilus(method)
Initialize nautilus method Parameters ---------- method Interactive method used for the process Returns ------- PreferenceInformation subclass to be initialized
5.348863
5.208892
1.026872
solution = None while method.current_iter: preference_class = init_nautilus(method) pref = preference_class(method, None) default = ",".join(map(str, pref.default_input())) while method.current_iter: method.print_current_iteration() pref_input = _prompt_wrapper( u"Preferences: ", default=default, validator=VectorValidator(method, pref), ) cmd = _check_cmd(pref_input) if cmd: solution = method.zh break pref = preference_class( method, np.fromstring(pref_input, dtype=np.float, sep=",") ) default = ",".join(map(str, pref.pref_input)) solution, _ = method.next_iteration(pref) if cmd and list(cmd)[0] == "c": break return solution
def iter_nautilus(method)
Iterate NAUTILUS method either interactively, or using given preferences if given Parameters ---------- method : instance of NAUTILUS subclass Fully initialized NAUTILUS method instance
6.453835
6.46004
0.999039
for i, v in enumerate(value): if v not in np.array(values)[:, i]: return False return True
def isin(value, values)
Check that value is in values
5.197502
4.760292
1.091845
from desdeo.preference.base import ReferencePoint objs1_arr = np.array(objs1) objs2_arr = np.array(objs2) segments = n + 1 diff = objs2_arr - objs1_arr solutions = [] for x in range(1, segments): btwn_obj = objs1_arr + float(x) / segments * diff solutions.append( self._get_ach().result(ReferencePoint(self, btwn_obj), None) ) return ResultSet(solutions)
def between(self, objs1: List[float], objs2: List[float], n=1)
Generate `n` solutions which attempt to trade-off `objs1` and `objs2`. Parameters ---------- objs1 First boundary point for desired objective function values objs2 Second boundary point for desired objective function values n Number of solutions to generate
7.060525
7.225166
0.977213
if not node.children: return node if node.focus: return find_focusable(node.children_dict[node.focus[0]])
def find_focusable(node)
Search for the first focusable window within the node tree
4.725528
4.451286
1.06161
if (node and node.orientation == orientation and len(node.children) > 1): return node if not node or node.type == "workspace": return None return find_parent_split(node.parent, orientation)
def find_parent_split(node, orientation)
Find the first parent split relative to the given node according to the desired orientation
4.05599
4.718299
0.85963
wanted = { "orientation": ("vertical" if direction in ("up", "down") else "horizontal"), "direction": (1 if direction in ("down", "right") else -1), } split = find_parent_split(tree.focused.parent, wanted["orientation"]) if split: # Get the next child given the direction child_ids = [child.id for child in split.children] focus_idx = child_ids.index(split.focused_child.id) next_idx = (focus_idx + wanted['direction']) % len(child_ids) next_node = split.children[next_idx] return find_focusable(next_node) return None
def cycle_windows(tree, direction)
Cycle through windows of the current workspace
3.870706
3.851007
1.005115
direction = 1 if direction == "next" else -1 outputs = [output for output in tree.root.children if output.name != "__i3"] focus_idx = outputs.index(tree.root.focused_child) next_idx = (focus_idx + direction) % len(outputs) next_output = outputs[next_idx] return find_focusable(next_output)
def cycle_outputs(tree, direction)
Cycle through directions
3.940932
3.979763
0.990243
parser = ArgumentParser() parser.add_argument("direction", choices=( "up", "down", "left", "right", "next", "prev" ), help="Direction to put the focus on") args = parser.parse_args() tree = i3Tree() con = None if args.direction in ("next", "prev"): con = cycle_outputs(tree, args.direction) else: con = cycle_windows(tree, args.direction) if con: i3.focus(con_id=con.id)
def main()
Entry point
4.016697
3.996678
1.005009
rcls = [] for key, value in self._classification.items(): if value[0] == cls: rcls.append(key) return rcls
def with_class(self, cls)
Return functions with the class
4.374184
4.378155
0.999093
ref_val = [] for fn, f in self._classification.items(): if f[0] == "<": ref_val.append(self._method.problem.ideal[fn]) elif f[0] == "<>": ref_val.append(self._method.problem.nadir[fn]) else: ref_val.append(f[1]) return np.array(ref_val)
def _as_reference_point(self) -> np.ndarray
Return classification information as reference point
4.403984
3.738463
1.17802
# Duplicate output to log file class NAUTILUSOptionValidator(Validator): def validate(self, document): if document.text not in "ao": raise ValidationError( message="Please select a for apriori or o for optimization option", cursor_position=0, ) if logfile: Tee(logfile) first = True current_iter = 0 while first or current_iter: # SciPy breaks box constraints nautilus_v1 = NAUTILUSv1(RiverPollution(), SciPyDE) if not first: nautilus_v1.current_iter = current_iter first = False nadir = nautilus_v1.problem.nadir ideal = nautilus_v1.problem.ideal solution = tui.iter_nautilus(nautilus_v1) current_iter = nautilus_v1.current_iter # TODO: Move to tui module method_e = None if current_iter > 0: option = _prompt_wrapper( "select a for apriori or o for optimization option: ", default="o", validator=NAUTILUSOptionValidator(), ) if option.lower() == "a": wi = _prompt_wrapper( "Number of PO solutions (10 or 20): ", default="20", validator=tui.NumberValidator(), ) weights = WEIGHTS[wi] factory = IterationPointFactory( SciPyDE(NautilusAchievementProblem(RiverPollution())) ) points = misc.new_points(factory, solution, weights=weights) method_e = ENAUTILUS(PreGeneratedProblem(points=points), PointSearch) method_e.zh_prev = solution else: method_e = ENAUTILUS(RiverPollution(), SciPyDE) # method_e.zh = solution method_e.current_iter = nautilus_v1.current_iter method_e.user_iters = nautilus_v1.user_iters print( "E-NAUTILUS\nselected iteration point: %s:" % ",".join(map(str, solution)) ) while method_e and method_e.current_iter > 0: if solution is None: solution = method_e.problem.nadir method_e.problem.nadir = nadir method_e.problem.ideal = ideal cmd = tui.iter_enautilus( method_e, initial_iterpoint=solution, initial_bound=method_e.fh_lo ) if cmd: print(method_e.current_iter) current_iter = method_e.current_iter break if tui.HAS_INPUT: input("Press ENTER to exit")
def main(logfile=False)
Solve River Pollution problem with NAUTILUS V1 and E-NAUTILUS Methods
6.211401
5.62298
1.104646
app.add_config_value( 'site_url', default=None, rebuild=False ) try: app.add_config_value( 'html_baseurl', default=None, rebuild=False ) except: pass app.connect('html-page-context', add_html_link) app.connect('build-finished', create_sitemap) app.sitemap_links = [] app.locales = []
def setup(app)
Setup connects events to the sitemap builder
3.08068
2.979303
1.034027