sentence1
stringlengths
52
3.87M
sentence2
stringlengths
1
47.2k
label
stringclasses
1 value
def get_nice_info_dict_string(self, info, level='\t', overview=None): """ Inserts all elements of a notification info-dictionary of gtkmvc3 or a Signal into one string and indicates levels of calls defined by 'kwargs'. Additionally, the elements get structured into a dict that holds all levels of the general notification key-value pairs in faster accessible lists. The dictionary has the element 'type' and the general elements {'model': [], 'prop_name': [], 'instance': [], 'method_name': [], 'args': [], 'kwargs': []}) plus specific elements according the type. Type is always one of the following list ['before', 'after', 'signal']. """ def get_nice_meta_signal_msg_tuple_string(meta_signal_msg_tuple, level, overview): meta_signal_dict = {} # origin s = "\n{0}origin={1}".format(level + "\t", meta_signal_msg_tuple.origin) meta_signal_dict['origin'] = meta_signal_msg_tuple.origin # change s += "\n{0}change={1}".format(level + "\t", meta_signal_msg_tuple.change) meta_signal_dict['change'] = meta_signal_msg_tuple.change # affects_children s += "\n{0}affects_children={1}".format(level + "\t", meta_signal_msg_tuple.affects_children) meta_signal_dict['affects_children'] = meta_signal_msg_tuple.affects_children overview['signal'].append(meta_signal_dict) # notification (tuple) notification_dict = {} meta_signal_dict['notification'] = notification_dict if meta_signal_msg_tuple.notification is None: s += "\n{0}notification={1}".format(level + "\t", meta_signal_msg_tuple.notification) else: s += "\n{0}notification=Notification(".format(level + "\t") # model notification_dict['model'] = meta_signal_msg_tuple.notification.model s += "\n{0}model={1}".format(level + "\t\t", meta_signal_msg_tuple.notification.model) # prop_name notification_dict['prop_name'] = meta_signal_msg_tuple.notification.prop_name s += "\n{0}prop_name={1}".format(level + "\t\t", meta_signal_msg_tuple.notification.prop_name) # info notification_dict['info'] = meta_signal_msg_tuple.notification.info overview['kwargs'].append(meta_signal_msg_tuple.notification.info) s += "\n{0}info=\n{1}{0}\n".format(level + "\t\t", self.get_nice_info_dict_string(meta_signal_msg_tuple.notification.info, level+'\t\t\t', overview)) return s def get_nice_action_signal_msg_tuple_string(meta_signal_msg_tuple, level, overview): meta_signal_dict = {} # after s = "\n{0}after={1}".format(level + "\t", meta_signal_msg_tuple.after) meta_signal_dict['after'] = meta_signal_msg_tuple.after # action s += "\n{0}action={1}".format(level + "\t", meta_signal_msg_tuple.action) meta_signal_dict['action'] = meta_signal_msg_tuple.action # origin s += "\n{0}origin={1}".format(level + "\t", meta_signal_msg_tuple.origin) meta_signal_dict['origin'] = meta_signal_msg_tuple.origin # origin s += "\n{0}action_parent_m={1}".format(level + "\t", meta_signal_msg_tuple.action_parent_m) meta_signal_dict['action_parent_m'] = meta_signal_msg_tuple.origin # change s += "\n{0}affected_models={1}".format(level + "\t", meta_signal_msg_tuple.affected_models) meta_signal_dict['affected_models'] = meta_signal_msg_tuple.affected_models if meta_signal_msg_tuple.after: s += "\n{0}result={1}".format(level + "\t", meta_signal_msg_tuple.result) meta_signal_dict['result'] = meta_signal_msg_tuple.result return s overview_was_none = False if overview is None: overview_was_none = True overview = dict({'model': [], 'prop_name': [], 'instance': [], 'method_name': [], 'args': [], 'kwargs': []}) overview['others'] = [] overview['info'] = [] if 'before' in info: overview['type'] = 'before' elif 'after' in info: overview['type'] = 'after' overview['result'] = [] else: # 'signal' in info: overview['type'] = 'signal' overview['signal'] = [] if ('after' in info or 'before' in info or 'signal' in info) and 'model' in info: if 'before' in info: s = "{0}'before': {1}".format(level, info['before']) elif 'after' in info: s = "{0}'after': {1}".format(level, info['after']) else: s = "{0}'signal': {1}".format(level, info['signal']) else: return str(info) overview['info'].append(info) # model s += "\n{0}'model': {1}".format(level, info['model']) overview['model'].append(info['model']) # prop_name s += "\n{0}'prop_name': {1}".format(level, info['prop_name']) overview['prop_name'].append(info['prop_name']) if not overview['type'] == 'signal': # instance s += "\n{0}'instance': {1}".format(level, info['instance']) overview['instance'].append(info['instance']) # method_name s += "\n{0}'method_name': {1}".format(level, info['method_name']) overview['method_name'].append(info['method_name']) # args s += "\n{0}'args': {1}".format(level, info['args']) overview['args'].append(info['args']) overview['kwargs'].append(info['kwargs']) if overview['type'] == 'after': overview['result'].append(info['result']) # kwargs s += "\n{0}'kwargs': {1}".format(level, self.get_nice_info_dict_string(info['kwargs'], level + "\t", overview)) if overview['type'] == 'after': s += "\n{0}'result': {1}".format(level, info['result']) # additional elements not created by gtkmvc3 or common function calls overview['others'].append({}) for key, value in info.items(): if key in ['before', 'after', 'model', 'prop_name', 'instance', 'method_name', 'args', 'kwargs', 'result']: pass else: s += "\n{0}'{2}': {1}".format(level, info[key], key) overview['others'][len(overview['others'])-1][key] = info[key] else: overview['kwargs'].append({}) # print(info) # print(info['arg']) if isinstance(info['arg'], MetaSignalMsg): overview['signal'].append(info['arg']) s += "\n{0}'arg': MetaSignalMsg({1}".format(level, get_nice_meta_signal_msg_tuple_string(info['arg'], level, overview)) elif isinstance(info['arg'], ActionSignalMsg): overview['instance'].append(info['arg'].action_parent_m.core_element) overview['method_name'].append(info['arg'].action) overview['signal'].append(info['arg']) overview['kwargs'].append(info['arg'].kwargs) # TODO check again this stuff args = [info['arg'].action_parent_m.core_element, ] args.extend(info['arg'].kwargs.values()) overview['args'].append(args) s += "\n{0}'arg': ActionSignalMsg({1}".format(level, get_nice_action_signal_msg_tuple_string(info['arg'], level, overview)) else: raise str(info) if overview_was_none: return s, overview else: return s
Inserts all elements of a notification info-dictionary of gtkmvc3 or a Signal into one string and indicates levels of calls defined by 'kwargs'. Additionally, the elements get structured into a dict that holds all levels of the general notification key-value pairs in faster accessible lists. The dictionary has the element 'type' and the general elements {'model': [], 'prop_name': [], 'instance': [], 'method_name': [], 'args': [], 'kwargs': []}) plus specific elements according the type. Type is always one of the following list ['before', 'after', 'signal'].
entailment
def register_view(self, view): """Called when the View was registered Can be used e.g. to connect signals. Here, the destroy signal is connected to close the application """ super(StateOutcomesListController, self).register_view(view) if isinstance(view, StateOutcomesTreeView): self.connect_signal(view['to_state_combo'], "edited", self.on_to_state_edited) self.connect_signal(view['to_outcome_combo'], "edited", self.on_to_outcome_edited) if isinstance(self.model.state, LibraryState) or self.model.state.get_next_upper_library_root_state(): view['id_cell'].set_property('editable', False) view['name_cell'].set_property('editable', False) self._apply_value_on_edited_and_focus_out(view['name_cell'], self.apply_new_outcome_name) self.update(initiator='"register view"')
Called when the View was registered Can be used e.g. to connect signals. Here, the destroy signal is connected to close the application
entailment
def apply_new_outcome_name(self, path, new_name): """Apply the newly entered outcome name it is was changed :param str path: The path string of the renderer :param str new_name: Newly entered outcome name """ # Don't do anything if outcome name didn't change if new_name == self.list_store[path][self.NAME_STORAGE_ID]: return outcome = self.list_store[path][self.CORE_STORAGE_ID] try: outcome.name = new_name logger.debug("Outcome name changed to '{0}'".format(outcome.name)) except (ValueError, TypeError) as e: logger.warning("The name of the outcome could not be changed: {0}".format(e)) self.list_store[path][self.NAME_STORAGE_ID] = outcome.name
Apply the newly entered outcome name it is was changed :param str path: The path string of the renderer :param str new_name: Newly entered outcome name
entailment
def on_to_state_edited(self, renderer, path, new_state_identifier): """Connects the outcome with a transition to the newly set state :param Gtk.CellRendererText renderer: The cell renderer that was edited :param str path: The path string of the renderer :param str new_state_identifier: An identifier for the new state that was selected """ def do_self_transition_check(t_id, new_state_identifier): # add self transition meta data if 'self' in new_state_identifier.split('.'): insert_self_transition_meta_data(self.model, t_id, 'outcomes_widget', combined_action=True) outcome_id = self.list_store[path][self.ID_STORAGE_ID] if outcome_id in self.dict_to_other_state or outcome_id in self.dict_to_other_outcome: transition_parent_state = self.model.parent.state if outcome_id in self.dict_to_other_state: t_id = self.dict_to_other_state[outcome_id][2] else: t_id = self.dict_to_other_outcome[outcome_id][2] if new_state_identifier is not None: to_state_id = new_state_identifier.split('.')[1] if not transition_parent_state.transitions[t_id].to_state == to_state_id: try: transition_parent_state.transitions[t_id].modify_target(to_state=to_state_id) do_self_transition_check(t_id, new_state_identifier) except ValueError as e: logger.warning("The target of transition couldn't be modified: {0}".format(e)) else: try: transition_parent_state.remove_transition(t_id) except AttributeError as e: logger.warning("The transition couldn't be removed: {0}".format(e)) else: # there is no transition till now if new_state_identifier is not None and not self.model.state.is_root_state: transition_parent_state = self.model.parent.state to_state_id = new_state_identifier.split('.')[1] try: t_id = transition_parent_state.add_transition(from_state_id=self.model.state.state_id, from_outcome=outcome_id, to_state_id=to_state_id, to_outcome=None, transition_id=None) do_self_transition_check(t_id, new_state_identifier) except (ValueError, TypeError) as e: logger.warning("The transition couldn't be added: {0}".format(e)) return else: logger.debug("outcome-editor got None in to_state-combo-change no transition is added")
Connects the outcome with a transition to the newly set state :param Gtk.CellRendererText renderer: The cell renderer that was edited :param str path: The path string of the renderer :param str new_state_identifier: An identifier for the new state that was selected
entailment
def on_to_outcome_edited(self, renderer, path, new_outcome_identifier): """Connects the outcome with a transition to the newly set outcome :param Gtk.CellRendererText renderer: The cell renderer that was edited :param str path: The path string of the renderer :param str new_outcome_identifier: An identifier for the new outcome that was selected """ if self.model.parent is None: return outcome_id = self.list_store[path][self.ID_STORAGE_ID] transition_parent_state = self.model.parent.state if outcome_id in self.dict_to_other_state or outcome_id in self.dict_to_other_outcome: if outcome_id in self.dict_to_other_state: t_id = self.dict_to_other_state[outcome_id][2] else: t_id = self.dict_to_other_outcome[outcome_id][2] if new_outcome_identifier is not None: new_to_outcome_id = int(new_outcome_identifier.split('.')[2]) if not transition_parent_state.transitions[t_id].to_outcome == new_to_outcome_id: to_state_id = self.model.parent.state.state_id try: transition_parent_state.transitions[t_id].modify_target(to_state=to_state_id, to_outcome=new_to_outcome_id) except ValueError as e: logger.warning("The target of transition couldn't be modified: {0}".format(e)) else: transition_parent_state.remove_transition(t_id) else: # there is no transition till now if new_outcome_identifier is not None: to_outcome = int(new_outcome_identifier.split('.')[2]) try: self.model.parent.state.add_transition(from_state_id=self.model.state.state_id, from_outcome=outcome_id, to_state_id=self.model.parent.state.state_id, to_outcome=to_outcome, transition_id=None) except (ValueError, TypeError) as e: logger.warning("The transition couldn't be added: {0}".format(e)) else: logger.debug("outcome-editor got None in to_outcome-combo-change no transition is added")
Connects the outcome with a transition to the newly set outcome :param Gtk.CellRendererText renderer: The cell renderer that was edited :param str path: The path string of the renderer :param str new_outcome_identifier: An identifier for the new outcome that was selected
entailment
def remove_core_element(self, model): """Remove respective core element of handed outcome model :param OutcomeModel model: Outcome model which core element should be removed :return: """ assert model.outcome.parent is self.model.state gui_helper_state_machine.delete_core_element_of_model(model)
Remove respective core element of handed outcome model :param OutcomeModel model: Outcome model which core element should be removed :return:
entailment
def register_view(self, view): """Called when the View was registered Can be used e.g. to connect signals. Here, the destroy signal is connected to close the application """ super(StateOutcomesEditorController, self).register_view(view) if isinstance(view, StateOutcomesEditorView): self.connect_signal(view['add_button'], "clicked", self.oc_list_ctrl.on_add) self.connect_signal(view['remove_button'], "clicked", self.oc_list_ctrl.on_remove) if isinstance(self.model.state, LibraryState) or self.model.state.get_next_upper_library_root_state(): view['add_button'].set_sensitive(False) view['remove_button'].set_sensitive(False)
Called when the View was registered Can be used e.g. to connect signals. Here, the destroy signal is connected to close the application
entailment
def paste_action_callback(self, *event): """Callback method for paste action""" if react_to_event(self.view, self.oc_list_ctrl.tree_view, event) and self.oc_list_ctrl.active_entry_widget is None: global_clipboard.paste(self.model, limited=['outcomes']) return True
Callback method for paste action
entailment
def bring_tab_to_the_top(self, tab_label): """Find tab with label tab_label in list of notebook's and set it to the current page. :param tab_label: String containing the label of the tab to be focused """ page = self.page_dict[tab_label] for notebook in self.notebook_names: page_num = self[notebook].page_num(page) if not page_num == -1: self[notebook].set_current_page(page_num) break
Find tab with label tab_label in list of notebook's and set it to the current page. :param tab_label: String containing the label of the tab to be focused
entailment
def on_config_value_changed(self, config_m, prop_name, info): """Callback when a config value has been changed :param ConfigModel config_m: The config model that has been changed :param str prop_name: Should always be 'config' :param dict info: Information e.g. about the changed config key """ config_key = info['args'][1] if "LOGGING" in config_key: self.update_log_button_state()
Callback when a config value has been changed :param ConfigModel config_m: The config model that has been changed :param str prop_name: Should always be 'config' :param dict info: Information e.g. about the changed config key
entailment
def compare_variables(tree_model, iter1, iter2, user_data=None): """Triggered upon updating the list of global variables Helper method to sort global variables alphabetically. :param tree_model: Tree model implementing the Gtk.TreeSortable interface. :param iter1: Points at a row. :param iter2: Points at a row. """ path1 = tree_model.get_path(iter1)[0] path2 = tree_model.get_path(iter2)[0] # get key of first variable name1 = tree_model[path1][0] # get key of second variable name2 = tree_model[path2][0] name1_as_bits = ' '.join(format(ord(x), 'b') for x in name1) name2_as_bits = ' '.join(format(ord(x), 'b') for x in name2) if name1_as_bits == name2_as_bits: return 0 elif name1_as_bits > name2_as_bits: return 1 else: return -1
Triggered upon updating the list of global variables Helper method to sort global variables alphabetically. :param tree_model: Tree model implementing the Gtk.TreeSortable interface. :param iter1: Points at a row. :param iter2: Points at a row.
entailment
def has_dirty_state_machine(self): """Checks if one of the registered sm has the marked_dirty flag set to True (i.e. the sm was recently modified, without being saved) :return: True if contains state machine that is marked dirty, False otherwise. """ for sm in self.state_machines.values(): if sm.marked_dirty: return True return False
Checks if one of the registered sm has the marked_dirty flag set to True (i.e. the sm was recently modified, without being saved) :return: True if contains state machine that is marked dirty, False otherwise.
entailment
def reset_dirty_flags(self): """Set all marked_dirty flags of the state machine to false.""" for sm_id, sm in self.state_machines.items(): sm.marked_dirty = False
Set all marked_dirty flags of the state machine to false.
entailment
def add_state_machine(self, state_machine): """Add a state machine to the list of managed state machines. If there is no active state machine set yet, then set as active state machine. :param state_machine: State Machine Object :raises exceptions.AttributeError: if the passed state machine was already added of is of a wrong type """ if not isinstance(state_machine, StateMachine): raise AttributeError("State machine must be of type StateMachine") if state_machine.file_system_path is not None: if self.is_state_machine_open(state_machine.file_system_path): raise AttributeError("The state machine is already open {0}".format(state_machine.file_system_path)) logger.debug("Add new state machine with id {0}".format(state_machine.state_machine_id)) self._state_machines[state_machine.state_machine_id] = state_machine return state_machine.state_machine_id
Add a state machine to the list of managed state machines. If there is no active state machine set yet, then set as active state machine. :param state_machine: State Machine Object :raises exceptions.AttributeError: if the passed state machine was already added of is of a wrong type
entailment
def remove_state_machine(self, state_machine_id): """Remove the state machine for a specified state machine id from the list of registered state machines. :param state_machine_id: the id of the state machine to be removed """ import rafcon.core.singleton as core_singletons removed_state_machine = None if state_machine_id in self._state_machines: logger.debug("Remove state machine with id {0}".format(state_machine_id)) removed_state_machine = self._state_machines.pop(state_machine_id) else: logger.error("There is no state_machine with state_machine_id: %s" % state_machine_id) return removed_state_machine # destroy execution history removed_state_machine.destroy_execution_histories() return removed_state_machine
Remove the state machine for a specified state machine id from the list of registered state machines. :param state_machine_id: the id of the state machine to be removed
entailment
def get_active_state_machine(self): """Return a reference to the active state-machine """ if self._active_state_machine_id in self._state_machines: return self._state_machines[self._active_state_machine_id] else: return None
Return a reference to the active state-machine
entailment
def get_open_state_machine_of_file_system_path(self, file_system_path): """Return a reference to the state machine with respective path if open """ for sm in self.state_machines.values(): if sm.file_system_path == file_system_path: return sm
Return a reference to the state machine with respective path if open
entailment
def reset_title(self, title, notebook_identifier): """Triggered whenever a notebook tab is switched in the left bar. Resets the title of the un-docked window to the format 'upper_open_tab / lower_open_tab' :param title: The name of the newly selected tab :param notebook: string taking one of two values 'upper' or 'lower' indicating which notebook was changed """ current_title = self.get_top_widget().get_title() upper_title = current_title.split('/')[0].strip() lower_title = current_title.split('/')[1].strip() if notebook_identifier == 'upper': new_title = title + ' / ' + lower_title else: new_title = upper_title + ' / ' + title self['headerbar'].props.title = new_title
Triggered whenever a notebook tab is switched in the left bar. Resets the title of the un-docked window to the format 'upper_open_tab / lower_open_tab' :param title: The name of the newly selected tab :param notebook: string taking one of two values 'upper' or 'lower' indicating which notebook was changed
entailment
def negative_check_for_model_in_expected_future_models(target_state_m, model, msg, delete=True, with_logger=None): """ Checks if the expected future models list/set includes still a specific model Return False if the handed model is still in and also creates a warning message as feedback. :param StateModel target_state_m: The state model which expected_future_models attribute should be checked :param Model model: Model to check for. :param str msg: Message for the logger if a model is still in. :param bool delete: Flag to delete respective model from list/set. :param with_logger: A optional logger to use in case of logging messages :rtype: bool :return: True if empty and False if still model in set/list """ if with_logger is None: with_logger = logger # check that the model in the list expected_future_model was used if model in target_state_m.expected_future_models: with_logger.warning("{0} -> still in is: {1} Please inform the developer how to reproduce this." "".format(msg, model)) if delete: # TODO think about to destroy this models target_state_m.expected_future_models.remove(model) return False return True
Checks if the expected future models list/set includes still a specific model Return False if the handed model is still in and also creates a warning message as feedback. :param StateModel target_state_m: The state model which expected_future_models attribute should be checked :param Model model: Model to check for. :param str msg: Message for the logger if a model is still in. :param bool delete: Flag to delete respective model from list/set. :param with_logger: A optional logger to use in case of logging messages :rtype: bool :return: True if empty and False if still model in set/list
entailment
def check_expected_future_model_list_is_empty(target_state_m, msg, delete=True, with_logger=None): """ Checks if the expected future models list/set is empty Return False if there are still elements in and also creates a warning message as feedback. :param StateModel target_state_m: The state model which expected_future_models attribute should be checked :param str msg: Message for the logger if a model is still in. :param bool delete: Flag to delete respective model from list/set. :param with_logger: A optional logger to use in case of logging messages :rtype: bool :return: True if empty and False if still model in set/list """ if with_logger is None: with_logger = logger # check that the model in the list expected_future_model was used if target_state_m.expected_future_models: with_logger.warning("{0} -> still in are: {1} Please inform the developer how to reproduce this." "".format(msg, target_state_m.expected_future_models)) if delete: # TODO think about to destroy this models target_state_m.expected_future_models.clear() return False return True
Checks if the expected future models list/set is empty Return False if there are still elements in and also creates a warning message as feedback. :param StateModel target_state_m: The state model which expected_future_models attribute should be checked :param str msg: Message for the logger if a model is still in. :param bool delete: Flag to delete respective model from list/set. :param with_logger: A optional logger to use in case of logging messages :rtype: bool :return: True if empty and False if still model in set/list
entailment
def update_models_recursively(state_m, expected=True): """ If a state model is reused the model depth maybe is to low. Therefore this method checks if all library state models are created with reliable depth :param bool expected: Define newly generated library models as expected or triggers logger warnings if False """ assert isinstance(state_m, AbstractStateModel) if isinstance(state_m, LibraryStateModel): if not state_m.state_copy_initialized: if not expected: logger.warning("State {0} generates unexpected missing state copy models.".format(state_m)) state_m.recursive_generate_models(load_meta_data=False) import rafcon.gui.helpers.meta_data as gui_helper_meta_data gui_helper_meta_data.scale_library_content(state_m) if isinstance(state_m, ContainerStateModel): for child_state_m in state_m.states.values(): update_models_recursively(child_state_m, expected)
If a state model is reused the model depth maybe is to low. Therefore this method checks if all library state models are created with reliable depth :param bool expected: Define newly generated library models as expected or triggers logger warnings if False
entailment
def add_state(container_state_m, state_type): """Add a state to a container state Adds a state of type state_type to the given container_state :param rafcon.gui.models.container_state.ContainerState container_state_m: A model of a container state to add the new state to :param rafcon.core.enums.StateType state_type: The type of state that should be added :return: True if successful, False else """ if container_state_m is None: logger.error("Cannot add a state without a parent.") return False if not isinstance(container_state_m, StateModel) or \ (isinstance(container_state_m, StateModel) and not isinstance(container_state_m, ContainerStateModel)): logger.error("Parent state must be a container, for example a Hierarchy State." + str(container_state_m)) return False state_class = state_type_to_state_class_dict.get(state_type, None) if state_class is None: logger.error("Cannot create state of type {0}".format(state_type)) return False new_state = state_class() from rafcon.gui.models.abstract_state import get_state_model_class_for_state new_state_m = get_state_model_class_for_state(new_state)(new_state) gui_helper_meta_data.put_default_meta_on_state_m(new_state_m, container_state_m) container_state_m.expected_future_models.add(new_state_m) container_state_m.state.add_state(new_state) return True
Add a state to a container state Adds a state of type state_type to the given container_state :param rafcon.gui.models.container_state.ContainerState container_state_m: A model of a container state to add the new state to :param rafcon.core.enums.StateType state_type: The type of state that should be added :return: True if successful, False else
entailment
def create_new_state_from_state_with_type(source_state, target_state_class): """The function duplicates/transforms a state to a new state type. If the source state type and the new state type both are ContainerStates the new state will have not transitions to force the user to explicitly re-order the logical flow according the paradigm of the new state type. :param source_state: previous/original state that is to transform into a new state type (target_state_class) :param target_state_class: the final state class type :return: """ current_state_is_container = isinstance(source_state, ContainerState) new_state_is_container = issubclass(target_state_class, ContainerState) if current_state_is_container and new_state_is_container: # TRANSFORM from CONTAINER- TO CONTAINER-STATE # by default all transitions are left out if the new and original state are container states # -> because switch from Barrier, Preemptive or Hierarchy has always different rules state_transitions = {} state_start_state_id = None logger.info("Type change from %s to %s" % (type(source_state).__name__, target_state_class.__name__)) # decider state is removed because it is unique for BarrierConcurrencyState if isinstance(source_state, BarrierConcurrencyState): source_state.remove_state(UNIQUE_DECIDER_STATE_ID, force=True) assert UNIQUE_DECIDER_STATE_ID not in source_state.states # separate state-elements from source state data_flows = dict(source_state.data_flows) source_state.data_flows = {} input_data_ports = dict(source_state.input_data_ports) output_data_ports = dict(source_state.output_data_ports) scoped_variables = dict(source_state.scoped_variables) income = source_state.income outcomes = dict(source_state.outcomes) source_state.input_data_ports = {} source_state.output_data_ports = {} source_state.scoped_variables = {} source_state.transitions = {} # before remove of outcomes related transitions should be gone source_state.income = Income() source_state.outcomes = {} states = dict(source_state.states) # TODO check why next line can not be performed # source_state.states = {} new_state = target_state_class(name=source_state.name, state_id=source_state.state_id, input_data_ports=input_data_ports, output_data_ports=output_data_ports, scoped_variables=scoped_variables, income=income, outcomes=outcomes, transitions=state_transitions, data_flows=data_flows, states=states, start_state_id=state_start_state_id) else: # TRANSFORM from EXECUTION- TO CONTAINER-STATE or FROM CONTAINER- TO EXECUTION-STATE # in case the new state is an execution state remove of child states (for observable notifications) if current_state_is_container and issubclass(target_state_class, ExecutionState): if isinstance(source_state, BarrierConcurrencyState): source_state.remove_state(UNIQUE_DECIDER_STATE_ID, force=True) assert UNIQUE_DECIDER_STATE_ID not in source_state.states for state_id in list(source_state.states.keys()): source_state.remove_state(state_id) # separate state-elements from source state input_data_ports = dict(source_state.input_data_ports) output_data_ports = dict(source_state.output_data_ports) income = source_state.income outcomes = dict(source_state.outcomes) source_state.input_data_ports = {} source_state.output_data_ports = {} source_state.income = Income() source_state.outcomes = {} new_state = target_state_class(name=source_state.name, state_id=source_state.state_id, input_data_ports=input_data_ports, output_data_ports=output_data_ports, income=income, outcomes=outcomes) if source_state.description is not None and len(source_state.description) > 0: new_state.description = source_state.description new_state.semantic_data = Vividict(source_state.semantic_data) return new_state
The function duplicates/transforms a state to a new state type. If the source state type and the new state type both are ContainerStates the new state will have not transitions to force the user to explicitly re-order the logical flow according the paradigm of the new state type. :param source_state: previous/original state that is to transform into a new state type (target_state_class) :param target_state_class: the final state class type :return:
entailment
def extract_child_models_of_state(state_m, new_state_class): """Retrieve child models of state model The function extracts the child state and state element models of the given state model into a dict. It only extracts those properties that are required for a state of type `new_state_class`. Transitions are always left out. :param state_m: state model of which children are to be extracted from :param new_state_class: The type of the new class :return: """ # check if root state and which type of state assert isinstance(state_m, StateModel) assert issubclass(new_state_class, State) orig_state = state_m.state # only here to get the input parameter of the Core-function current_state_is_container = isinstance(orig_state, ContainerState) new_state_is_container = issubclass(new_state_class, ContainerState) # define which model references to hold for new state required_model_properties = ['input_data_ports', 'output_data_ports', 'outcomes', 'income'] obsolete_model_properties = [] if current_state_is_container and new_state_is_container: # hold some additional references # transition are removed when changing the state type, thus do not copy them required_model_properties.extend(['states', 'data_flows', 'scoped_variables']) obsolete_model_properties.append('transitions') elif current_state_is_container: obsolete_model_properties.extend(['states', 'transitions', 'data_flows', 'scoped_variables']) def get_element_list(state_m, prop_name): if prop_name == 'income': return [state_m.income] wrapper = getattr(state_m, prop_name) # ._obj is needed as gaphas wraps observable lists and dicts into a gaphas.support.ObsWrapper list_or_dict = wrapper._obj if isinstance(list_or_dict, list): return list_or_dict[:] # copy list return list(list_or_dict.values()) # dict required_child_models = {} for prop_name in required_model_properties: required_child_models[prop_name] = get_element_list(state_m, prop_name) obsolete_child_models = {} for prop_name in obsolete_model_properties: obsolete_child_models[prop_name] = get_element_list(state_m, prop_name) # Special handling of BarrierState, which includes the DeciderState that always becomes obsolete if isinstance(state_m, ContainerStateModel): decider_state_m = state_m.states.get(UNIQUE_DECIDER_STATE_ID, None) if decider_state_m: if new_state_is_container: required_child_models['states'].remove(decider_state_m) obsolete_child_models['states'] = [decider_state_m] return required_child_models, obsolete_child_models
Retrieve child models of state model The function extracts the child state and state element models of the given state model into a dict. It only extracts those properties that are required for a state of type `new_state_class`. Transitions are always left out. :param state_m: state model of which children are to be extracted from :param new_state_class: The type of the new class :return:
entailment
def create_state_model_for_state(new_state, meta, state_element_models): """Create a new state model with the defined properties A state model is created for a state of the type of new_state. All child models in state_element_models ( model list for port, connections and states) are added to the new model. :param StateModel new_state: The new state object with the correct type :param Vividict meta: Meta data for the state model :param list state_element_models: All state element and child state models of the original state model :return: New state model for new_state with all childs of state_element_models """ from rafcon.gui.models.abstract_state import get_state_model_class_for_state state_m_class = get_state_model_class_for_state(new_state) new_state_m = state_m_class(new_state, meta=meta, load_meta_data=False, expected_future_models=state_element_models) error_msg = "New state has not re-used all handed expected future models." check_expected_future_model_list_is_empty(new_state_m, msg=error_msg) return new_state_m
Create a new state model with the defined properties A state model is created for a state of the type of new_state. All child models in state_element_models ( model list for port, connections and states) are added to the new model. :param StateModel new_state: The new state object with the correct type :param Vividict meta: Meta data for the state model :param list state_element_models: All state element and child state models of the original state model :return: New state model for new_state with all childs of state_element_models
entailment
def prepare_state_m_for_insert_as(state_m_to_insert, previous_state_size): """Prepares and scales the meta data to fit into actual size of the state.""" # TODO check how much code is duplicated or could be reused for library fit functionality meta data helper # TODO DO REFACTORING !!! and move maybe the hole method to meta data and rename it if isinstance(state_m_to_insert, AbstractStateModel) and \ not gui_helper_meta_data.model_has_empty_meta(state_m_to_insert): if isinstance(state_m_to_insert, ContainerStateModel): # print("TARGET1", state_m_to_insert.state.state_element_attrs) models_dict = {'state': state_m_to_insert} for state_element_key in state_m_to_insert.state.state_element_attrs: state_element_list = getattr(state_m_to_insert, state_element_key) # Some models are hold in a gtkmvc3.support.wrappers.ObsListWrapper, not a list if hasattr(state_element_list, 'keys'): state_element_list = state_element_list.values() models_dict[state_element_key] = {elem.core_element.core_element_id: elem for elem in state_element_list} resize_factor = gui_helper_meta_data.scale_meta_data_according_state(models_dict, as_template=True) gui_helper_meta_data.resize_income_of_state_m(state_m_to_insert, resize_factor) elif isinstance(state_m_to_insert, StateModel): # print("TARGET2", state_m_to_insert.state.state_element_attrs) if previous_state_size: current_size = state_m_to_insert.get_meta_data_editor()['size'] factor = gui_helper_meta_data.divide_two_vectors(current_size, previous_state_size) state_m_to_insert.set_meta_data_editor('size', previous_state_size) factor = (min(*factor), min(*factor)) gui_helper_meta_data.resize_state_meta(state_m_to_insert, factor) else: logger.debug("For insert as template of {0} no resize of state meta data is performed because " "the meta data has empty fields.".format(state_m_to_insert)) # library state is not resize because its ports became resized indirectly -> see was resized flag elif not isinstance(state_m_to_insert, LibraryStateModel): raise TypeError("For insert as template of {0} no resize of state meta data is performed because " "state model type is not ContainerStateModel or StateModel".format(state_m_to_insert)) else: logger.info("For insert as template of {0} no resize of state meta data is performed because the meta data has " "empty fields.".format(state_m_to_insert))
Prepares and scales the meta data to fit into actual size of the state.
entailment
def insert_state_as(target_state_m, state, as_template): """ Add a state into a target state In case the state to be insert is a LibraryState it can be chosen to be insert as template. :param rafcon.gui.models.container_state.ContainerStateModel target_state_m: State model of the target state :param rafcon.core.states.State state: State to be insert as template or not :param bool as_template: The flag determines if a handed state of type LibraryState is insert as template :return: """ if not isinstance(target_state_m, ContainerStateModel) or \ not isinstance(target_state_m.state, ContainerState): logger.error("States can only be inserted in container states") return False state_m = get_state_model_class_for_state(state)(state) if not as_template: gui_helper_meta_data.put_default_meta_on_state_m(state_m, target_state_m) # If inserted as template, we have to extract the state_copy and respective model else: assert isinstance(state, LibraryState) old_lib_state_m = state_m state_m = state_m.state_copy previous_state_size = state_m.get_meta_data_editor()['size'] gui_helper_meta_data.put_default_meta_on_state_m(state_m, target_state_m) # TODO check if the not as template case maybe has to be run with the prepare call prepare_state_m_for_insert_as(state_m, previous_state_size) old_lib_state_m.prepare_destruction(recursive=False) # explicit secure that there is no state_id conflict within target state child states while state_m.state.state_id in target_state_m.state.states: state_m.state.change_state_id() target_state_m.expected_future_models.add(state_m) target_state_m.state.add_state(state_m.state) # secure possible missing models to be generated update_models_recursively(state_m, expected=False)
Add a state into a target state In case the state to be insert is a LibraryState it can be chosen to be insert as template. :param rafcon.gui.models.container_state.ContainerStateModel target_state_m: State model of the target state :param rafcon.core.states.State state: State to be insert as template or not :param bool as_template: The flag determines if a handed state of type LibraryState is insert as template :return:
entailment
def substitute_state(target_state_m, state_m_to_insert, as_template=False): """ Substitutes the target state Both, the state to be replaced (the target state) and the state to be inserted (the new state) are passed via parameters. The new state adapts the size and position of the target state. State elements of the new state are resized but kepp their proportion. :param rafcon.gui.models.container_state.AbstractStateModel target_state_m: State Model of state to be substituted :param rafcon.gui.models.container_state.StateModel state_m_to_insert: State Model of state to be inserted :return: """ # print("substitute_state") state_to_insert = state_m_to_insert.state action_parent_m = target_state_m.parent old_state_m = target_state_m old_state = old_state_m.state state_id = old_state.state_id # BEFORE MODEL tmp_meta_data = {'transitions': {}, 'data_flows': {}, 'state': None} old_state_m = action_parent_m.states[state_id] # print("EMIT-BEFORE ON OLD_STATE ", state_id) old_state_m.action_signal.emit(ActionSignalMsg(action='substitute_state', origin='model', action_parent_m=action_parent_m, affected_models=[old_state_m, ], after=False, kwargs={'state_id': state_id, 'state': state_to_insert})) related_transitions, related_data_flows = action_parent_m.state.related_linkage_state(state_id) tmp_meta_data['state'] = old_state_m.meta # print("old state meta", old_state_m.meta) for t in related_transitions['external']['ingoing'] + related_transitions['external']['outgoing']: tmp_meta_data['transitions'][t.transition_id] = action_parent_m.get_transition_m(t.transition_id).meta for df in related_data_flows['external']['ingoing'] + related_data_flows['external']['outgoing']: tmp_meta_data['data_flows'][df.data_flow_id] = action_parent_m.get_data_flow_m(df.data_flow_id).meta action_parent_m.substitute_state.__func__.tmp_meta_data_storage = tmp_meta_data action_parent_m.substitute_state.__func__.old_state_m = old_state_m # put old state size and rel_pos onto new state previous_state_size = state_m_to_insert.get_meta_data_editor()['size'] state_m_to_insert.set_meta_data_editor('size', old_state_m.get_meta_data_editor()['size']) state_m_to_insert.set_meta_data_editor('rel_pos', old_state_m.get_meta_data_editor()['rel_pos']) # scale the meta data according new size prepare_state_m_for_insert_as(state_m_to_insert, previous_state_size) # CORE new_state = e = None # print("state to insert", state_to_insert) try: # if as_template: # TODO remove this work around if the models are loaded correctly # # the following enforce the creation of a new model (in needed depth) and transfer of meta data # import rafcon.gui.action # meta_dict = rafcon.gui.action.get_state_element_meta(state_m_to_insert) # new_state = action_parent_m.state.substitute_state(state_id, state_to_insert) # sm_m = action_parent_m.get_state_machine_m() # rafcon.gui.action.insert_state_meta_data(meta_dict, sm_m.get_state_model_by_path(new_state.get_path())) # else: action_parent_m.expected_future_models.add(state_m_to_insert) new_state = action_parent_m.state.substitute_state(state_id, state_to_insert) # assert new_state.state_id is state_id assert new_state is state_to_insert except Exception as e: logger.exception("State substitution failed") if new_state: # AFTER MODEL # print("AFTER MODEL", new_state) new_state_m = action_parent_m.states[new_state.state_id] update_models_recursively(state_m=new_state_m) tmp_meta_data = action_parent_m.substitute_state.__func__.tmp_meta_data_storage old_state_m = action_parent_m.substitute_state.__func__.old_state_m changed_models = [] new_state_m.meta = tmp_meta_data['state'] changed_models.append(new_state_m) for t_id, t_meta in tmp_meta_data['transitions'].items(): if action_parent_m.get_transition_m(t_id) is not None: action_parent_m.get_transition_m(t_id).meta = t_meta changed_models.append(action_parent_m.get_transition_m(t_id)) elif t_id in action_parent_m.state.substitute_state.__func__.re_create_io_going_t_ids: logger.warning("Transition model with id {0} to set meta data could not be found.".format(t_id)) for df_id, df_meta in tmp_meta_data['data_flows'].items(): if action_parent_m.get_data_flow_m(df_id) is not None: action_parent_m.get_data_flow_m(df_id).meta = df_meta changed_models.append(action_parent_m.get_data_flow_m(df_id)) elif df_id in action_parent_m.state.substitute_state.__func__.re_create_io_going_df_ids: logger.warning("Data flow model with id {0} to set meta data could not be found.".format(df_id)) msg = ActionSignalMsg(action='substitute_state', origin='model', action_parent_m=action_parent_m, affected_models=changed_models, after=True, result=e) # print("EMIT-AFTER OLDSTATE", msg) old_state_m.action_signal.emit(msg) del action_parent_m.substitute_state.__func__.tmp_meta_data_storage del action_parent_m.substitute_state.__func__.old_state_m
Substitutes the target state Both, the state to be replaced (the target state) and the state to be inserted (the new state) are passed via parameters. The new state adapts the size and position of the target state. State elements of the new state are resized but kepp their proportion. :param rafcon.gui.models.container_state.AbstractStateModel target_state_m: State Model of state to be substituted :param rafcon.gui.models.container_state.StateModel state_m_to_insert: State Model of state to be inserted :return:
entailment
def substitute_state_as(target_state_m, state, as_template, keep_name=False): """ Substitute a target state with a handed state The method generates a state model for the state to be inserted and use function substitute_state to finally substitute the state. In case the state to be inserted is a LibraryState it can be chosen to be inserted as template. It can be chosen that the inserted state keeps the name of the target state. :param rafcon.gui.models.state.AbstractStateModel target_state_m: State model of the state to be substituted :param rafcon.core.states.State state: State to be inserted :param bool as_template: The flag determines if a handed state of type LibraryState is insert as template :param bool keep_name: The flag to keep the name of the target state :return: """ state_m = get_state_model_class_for_state(state)(state) # If inserted as template, we have to extract the state_copy and model otherwise keep original name if as_template: assert isinstance(state_m, LibraryStateModel) state_m = state_m.state_copy state_m.state.parent = None if keep_name: state_m.state.name = target_state_m.state.name assert target_state_m.parent.states[target_state_m.state.state_id] is target_state_m substitute_state(target_state_m, state_m, as_template)
Substitute a target state with a handed state The method generates a state model for the state to be inserted and use function substitute_state to finally substitute the state. In case the state to be inserted is a LibraryState it can be chosen to be inserted as template. It can be chosen that the inserted state keeps the name of the target state. :param rafcon.gui.models.state.AbstractStateModel target_state_m: State model of the state to be substituted :param rafcon.core.states.State state: State to be inserted :param bool as_template: The flag determines if a handed state of type LibraryState is insert as template :param bool keep_name: The flag to keep the name of the target state :return:
entailment
def orify(e, changed_callback): """Add another event to the multi_event :param e: the event to be added to the multi_event :param changed_callback: a method to call if the event status changes, this method has access to the multi_event :return: """ if not hasattr(e, "callbacks"): # Event has not been orified yet e._set = e.set e._clear = e.clear e.set = lambda: or_set(e) e.clear = lambda: or_clear(e) e.callbacks = list() # Keep track of one callback per multi event e.callbacks.append(changed_callback)
Add another event to the multi_event :param e: the event to be added to the multi_event :param changed_callback: a method to call if the event status changes, this method has access to the multi_event :return:
entailment
def create(*events): """Creates a new multi_event The multi_event listens to all events passed in the "events" parameter. :param events: a list of threading.Events :return: The multi_event :rtype: threading.Event """ or_event = threading.Event() def changed(): if any([event.is_set() for event in events]): or_event.set() else: or_event.clear() for e in events: orify(e, changed) changed() return or_event
Creates a new multi_event The multi_event listens to all events passed in the "events" parameter. :param events: a list of threading.Events :return: The multi_event :rtype: threading.Event
entailment
def model_changed(self, model, prop_name, info): """ React to configuration changes Update internal hold enable state, propagates it to view and refresh the text buffer.""" current_enables = self._get_config_enables() if not self._enables == current_enables: # check if filtered buffer update needed filtered_buffer_update_needed = True if all(self._enables[key] == current_enables[key] for key in ['VERBOSE', 'DEBUG', 'INFO', 'WARNING', 'ERROR']): follow_mode_key = 'CONSOLE_FOLLOW_LOGGING' only_follow_mode_changed = self._enables[follow_mode_key] != current_enables[follow_mode_key] filtered_buffer_update_needed = not only_follow_mode_changed self._enables = current_enables self.view.set_enables(self._enables) if filtered_buffer_update_needed: self.update_filtered_buffer() else: self.view.scroll_to_cursor_onscreen()
React to configuration changes Update internal hold enable state, propagates it to view and refresh the text buffer.
entailment
def _handle_double_click(self, event): """ Double click with left mouse button focuses the state and toggles the collapse status""" if event.get_button()[1] == 1: # Left mouse button path_info = self.view.tree_view.get_path_at_pos(int(event.x), int(event.y)) if path_info: # Valid entry was clicked on path = path_info[0] item_iter = self.tree_store.get_iter(path) # Toggle collapse status if applicable for this kind of state if self.view.tree_view.row_expanded(path): self.view.tree_view.collapse_row(path) else: if self.tree_store.iter_has_child(item_iter): self.view.tree_view.expand_to_path(path)
Double click with left mouse button focuses the state and toggles the collapse status
entailment
def create_path(path): """Creates a absolute path in the file system. :param path: The path to be created """ import os if not os.path.exists(path): os.makedirs(path)
Creates a absolute path in the file system. :param path: The path to be created
entailment
def get_md5_file_hash(filename): """Calculates the MD5 hash of a file :param str filename: The filename (including the path) of the file :return: Md5 hash of the file :rtype: str """ import hashlib BLOCKSIZE = 65536 hasher = hashlib.md5() with open(filename, 'rb') as afile: buf = afile.read(BLOCKSIZE) while len(buf) > 0: hasher.update(buf) buf = afile.read(BLOCKSIZE) return hasher.hexdigest()
Calculates the MD5 hash of a file :param str filename: The filename (including the path) of the file :return: Md5 hash of the file :rtype: str
entailment
def file_needs_update(target_file, source_file): """Checks if target_file is not existing or differing from source_file :param target_file: File target for a copy action :param source_file: File to be copied :return: True, if target_file not existing or differing from source_file, else False :rtype: False """ if not os.path.isfile(target_file) or get_md5_file_hash(target_file) != get_md5_file_hash(source_file): return True return False
Checks if target_file is not existing or differing from source_file :param target_file: File target for a copy action :param source_file: File to be copied :return: True, if target_file not existing or differing from source_file, else False :rtype: False
entailment
def copy_file_if_update_required(source_file, target_file): """Copies source_file to target_file if latter one in not existing or outdated :param source_file: Source file of the copy operation :param target_file: Target file of the copy operation """ if file_needs_update(target_file, source_file): shutil.copy(source_file, target_file)
Copies source_file to target_file if latter one in not existing or outdated :param source_file: Source file of the copy operation :param target_file: Target file of the copy operation
entailment
def read_file(file_path, filename=None): """ Open file by path and optional filename If no file name is given the path is interpreted as direct path to the file to be read. If there is no file at location the return value will be None to offer a option for case handling. :param str file_path: Path string. :param str filename: File name of the file to be read. :return: None or str """ file_path = os.path.realpath(file_path) if filename: file_path = os.path.join(file_path, filename) file_content = None if os.path.isfile(file_path): with open(file_path, 'r') as file_pointer: file_content = file_pointer.read() return file_content
Open file by path and optional filename If no file name is given the path is interpreted as direct path to the file to be read. If there is no file at location the return value will be None to offer a option for case handling. :param str file_path: Path string. :param str filename: File name of the file to be read. :return: None or str
entailment
def clean_file_system_paths_from_not_existing_paths(file_system_paths): """Cleans list of paths from elements that do not exist If a path is no more valid/existing, it is removed from the list. :param list[str] file_system_paths: list of file system paths to be checked for existing """ paths_to_delete = [] for path in file_system_paths: if not os.path.exists(path): paths_to_delete.append(path) for path in paths_to_delete: file_system_paths.remove(path)
Cleans list of paths from elements that do not exist If a path is no more valid/existing, it is removed from the list. :param list[str] file_system_paths: list of file system paths to be checked for existing
entailment
def model_changed(self, model, prop_name, info): """This method notifies the model lists and the parent state about changes The method is called each time, the model is changed. This happens, when the state itself changes or one of its children (outcomes, ports) changes. Changes of the children cannot be observed directly, therefore children notify their parent about their changes by calling this method. This method then checks, what has been changed by looking at the method that caused the change. In the following, it notifies the list in which the change happened about the change. E.g. one input data port changes its name. The model of the port observes itself and notifies the parent ( i.e. the state model) about the change by calling this method with the information about the change. This method recognizes that the method "modify_input_data_port" caused the change and therefore triggers a notify on the list if input data port models. "_notify_method_before" is used as trigger method when the changing function is entered and "_notify_method_after" is used when the changing function returns. This changing function in the example would be "modify_input_data_port". :param model: The model that was changed :param prop_name: The property that was changed :param info: Information about the change (e.g. the name of the changing function) """ # If this model has been changed (and not one of its child states), then we have to update all child models # This must be done before notifying anybody else, because other may relay on the updated models if 'after' in info and self.state == info['instance']: self.update_models(model, prop_name, info) # mark the state machine this state belongs to as dirty no_save_change = info["method_name"] in BY_EXECUTION_TRIGGERED_OBSERVABLE_STATE_METHODS if isinstance(model, AbstractStateModel) and prop_name == "state" and no_save_change: # do not track the active flag when marking the sm dirty pass else: # if the state_execution state is changed the sm must not be marked dirty if "after" in info and info["method_name"] not in BY_EXECUTION_TRIGGERED_OBSERVABLE_STATE_METHODS: self._mark_state_machine_as_dirty() changed_list = None cause = None if isinstance(model, DataPortModel) and model.parent is self: if model in self.input_data_ports: changed_list = self.input_data_ports cause = "input_data_port_change" elif model in self.output_data_ports: changed_list = self.output_data_ports cause = "output_data_port_change" elif isinstance(info.instance, Income) and self is model.parent: changed_list = self.income cause = "income_change" elif isinstance(info.instance, Outcome) and self is model.parent: changed_list = self.outcomes cause = "outcome_change" if not (cause is None or cause is "income_change" or changed_list is None): if 'before' in info: changed_list._notify_method_before(self.state, cause, (self.state,), info) elif 'after' in info: changed_list._notify_method_after(self.state, cause, None, (self.state,), info) # Notifies parent state super(StateModel, self).model_changed(model, prop_name, info)
This method notifies the model lists and the parent state about changes The method is called each time, the model is changed. This happens, when the state itself changes or one of its children (outcomes, ports) changes. Changes of the children cannot be observed directly, therefore children notify their parent about their changes by calling this method. This method then checks, what has been changed by looking at the method that caused the change. In the following, it notifies the list in which the change happened about the change. E.g. one input data port changes its name. The model of the port observes itself and notifies the parent ( i.e. the state model) about the change by calling this method with the information about the change. This method recognizes that the method "modify_input_data_port" caused the change and therefore triggers a notify on the list if input data port models. "_notify_method_before" is used as trigger method when the changing function is entered and "_notify_method_after" is used when the changing function returns. This changing function in the example would be "modify_input_data_port". :param model: The model that was changed :param prop_name: The property that was changed :param info: Information about the change (e.g. the name of the changing function)
entailment
def update_models(self, model, name, info): """ This method is always triggered when the core state changes It keeps the following models/model-lists consistent: input-data-port models output-data-port models outcome models """ if info.method_name in ["add_input_data_port", "remove_input_data_port", "input_data_ports"]: (model_list, data_list, model_name, model_class, model_key) = self.get_model_info("input_data_port") elif info.method_name in ["add_output_data_port", "remove_output_data_port", "output_data_ports"]: (model_list, data_list, model_name, model_class, model_key) = self.get_model_info("output_data_port") elif info.method_name in ["add_income", "remove_income", "income"]: (model_list, data_list, model_name, model_class, model_key) = self.get_model_info("income") elif info.method_name in ["add_outcome", "remove_outcome", "outcomes"]: (model_list, data_list, model_name, model_class, model_key) = self.get_model_info("outcome") else: return if "add" in info.method_name: self.add_missing_model(model_list, data_list, model_name, model_class, model_key) elif "remove" in info.method_name: destroy = info.kwargs.get('destroy', True) self.remove_specific_model(model_list, info.result, model_key, destroy) elif info.method_name in ["input_data_ports", "output_data_ports", "income", "outcomes"]: self.re_initiate_model_list(model_list, data_list, model_name, model_class, model_key)
This method is always triggered when the core state changes It keeps the following models/model-lists consistent: input-data-port models output-data-port models outcome models
entailment
def _load_input_data_port_models(self): """Reloads the input data port models directly from the the state """ self.input_data_ports = [] for input_data_port in self.state.input_data_ports.values(): self._add_model(self.input_data_ports, input_data_port, DataPortModel)
Reloads the input data port models directly from the the state
entailment
def _load_output_data_port_models(self): """Reloads the output data port models directly from the the state """ self.output_data_ports = [] for output_data_port in self.state.output_data_ports.values(): self._add_model(self.output_data_ports, output_data_port, DataPortModel)
Reloads the output data port models directly from the the state
entailment
def _load_income_model(self): """ Create income model from core income """ self._add_model(self.income, self.state.income, IncomeModel)
Create income model from core income
entailment
def _load_outcome_models(self): """ Create outcome models from core outcomes """ self.outcomes = [] for outcome in self.state.outcomes.values(): self._add_model(self.outcomes, outcome, OutcomeModel)
Create outcome models from core outcomes
entailment
def re_initiate_model_list(self, model_list_or_dict, core_objects_dict, model_name, model_class, model_key): """Recreate model list The method re-initiate a handed list or dictionary of models with the new dictionary of core-objects. :param model_list_or_dict: could be a list or dictionary of one model type :param core_objects_dict: new dictionary of one type of core-elements (rafcon.core) :param model_name: prop_name for the core-element hold by the model, this core-element is covered by the model :param model_class: model-class of the elements that should be insert :param model_key: if model_list_or_dict is a dictionary the key is the id of the respective element (e.g. 'state_id') :return: """ if model_name == "income": if self.income.income != self.state.income: self._add_model(self.income, self.state.income, IncomeModel) return for _ in range(len(model_list_or_dict)): self.remove_additional_model(model_list_or_dict, core_objects_dict, model_name, model_key) if core_objects_dict: for _ in core_objects_dict: self.add_missing_model(model_list_or_dict, core_objects_dict, model_name, model_class, model_key)
Recreate model list The method re-initiate a handed list or dictionary of models with the new dictionary of core-objects. :param model_list_or_dict: could be a list or dictionary of one model type :param core_objects_dict: new dictionary of one type of core-elements (rafcon.core) :param model_name: prop_name for the core-element hold by the model, this core-element is covered by the model :param model_class: model-class of the elements that should be insert :param model_key: if model_list_or_dict is a dictionary the key is the id of the respective element (e.g. 'state_id') :return:
entailment
def _add_model(self, model_list_or_dict, core_element, model_class, model_key=None, load_meta_data=True): """Adds one model for a given core element. The method will add a model for a given core object and checks if there is a corresponding model object in the future expected model list. The method does not check if an object with corresponding model has already been inserted. :param model_list_or_dict: could be a list or dictionary of one model type :param core_element: the core element to a model for, can be state or state element :param model_class: model-class of the elements that should be insert :param model_key: if model_list_or_dict is a dictionary the key is the id of the respective element (e.g. 'state_id') :param load_meta_data: specific argument for loading meta data :return: """ found_model = self._get_future_expected_model(core_element) if found_model: found_model.parent = self if model_class is IncomeModel: self.income = found_model if found_model else IncomeModel(core_element, self) return if model_key is None: model_list_or_dict.append(found_model if found_model else model_class(core_element, self)) else: model_list_or_dict[model_key] = found_model if found_model else model_class(core_element, self, load_meta_data=load_meta_data)
Adds one model for a given core element. The method will add a model for a given core object and checks if there is a corresponding model object in the future expected model list. The method does not check if an object with corresponding model has already been inserted. :param model_list_or_dict: could be a list or dictionary of one model type :param core_element: the core element to a model for, can be state or state element :param model_class: model-class of the elements that should be insert :param model_key: if model_list_or_dict is a dictionary the key is the id of the respective element (e.g. 'state_id') :param load_meta_data: specific argument for loading meta data :return:
entailment
def add_missing_model(self, model_list_or_dict, core_elements_dict, model_name, model_class, model_key): """Adds one missing model The method will search for the first core-object out of core_object_dict not represented in the list or dict of models handed by model_list_or_dict, adds it and returns without continue to search for more objects which maybe are missing in model_list_or_dict with respect to the core_object_dict. :param model_list_or_dict: could be a list or dictionary of one model type :param core_elements_dict: dictionary of one type of core-elements (rafcon.core) :param model_name: prop_name for the core-element hold by the model, this core-element is covered by the model :param model_class: model-class of the elements that should be insert :param model_key: if model_list_or_dict is a dictionary the key is the id of the respective element (e.g. 'state_id') :return: True, is a new model was added, False else :rtype: bool """ def core_element_has_model(core_object): for model_or_key in model_list_or_dict: model = model_or_key if model_key is None else model_list_or_dict[model_or_key] if core_object is getattr(model, model_name): return True return False if model_name == "income": self._add_model(self.income, self.state.income, IncomeModel) return for core_element in core_elements_dict.values(): if core_element_has_model(core_element): continue # get expected model and connect it to self or create a new model new_model = self._get_future_expected_model(core_element) if new_model: new_model.parent = self else: if type_helpers.type_inherits_of_type(model_class, StateModel): new_model = model_class(core_element, self, expected_future_models=self.expected_future_models) self.expected_future_models = new_model.expected_future_models # update reused models new_model.expected_future_models = set() # clean the field because should not be used further else: new_model = model_class(core_element, self) # insert new model into list or dict if model_key is None: model_list_or_dict.append(new_model) else: model_list_or_dict[getattr(core_element, model_key)] = new_model return True return False
Adds one missing model The method will search for the first core-object out of core_object_dict not represented in the list or dict of models handed by model_list_or_dict, adds it and returns without continue to search for more objects which maybe are missing in model_list_or_dict with respect to the core_object_dict. :param model_list_or_dict: could be a list or dictionary of one model type :param core_elements_dict: dictionary of one type of core-elements (rafcon.core) :param model_name: prop_name for the core-element hold by the model, this core-element is covered by the model :param model_class: model-class of the elements that should be insert :param model_key: if model_list_or_dict is a dictionary the key is the id of the respective element (e.g. 'state_id') :return: True, is a new model was added, False else :rtype: bool
entailment
def remove_additional_model(self, model_list_or_dict, core_objects_dict, model_name, model_key, destroy=True): """Remove one unnecessary model The method will search for the first model-object out of model_list_or_dict that represents no core-object in the dictionary of core-objects handed by core_objects_dict, remove it and return without continue to search for more model-objects which maybe are unnecessary, too. :param model_list_or_dict: could be a list or dictionary of one model type :param core_objects_dict: dictionary of one type of core-elements (rafcon.core) :param model_name: prop_name for the core-element hold by the model, this core-element is covered by the model :param model_key: if model_list_or_dict is a dictionary the key is the id of the respective element (e.g. 'state_id') :return: """ if model_name == "income": self.income.prepare_destruction() self.income = None return for model_or_key in model_list_or_dict: model = model_or_key if model_key is None else model_list_or_dict[model_or_key] found = False for core_object in core_objects_dict.values(): if core_object is getattr(model, model_name): found = True break if not found: if model_key is None: if destroy: model.prepare_destruction() model_list_or_dict.remove(model) else: if destroy: model_list_or_dict[model_or_key].prepare_destruction() del model_list_or_dict[model_or_key] return
Remove one unnecessary model The method will search for the first model-object out of model_list_or_dict that represents no core-object in the dictionary of core-objects handed by core_objects_dict, remove it and return without continue to search for more model-objects which maybe are unnecessary, too. :param model_list_or_dict: could be a list or dictionary of one model type :param core_objects_dict: dictionary of one type of core-elements (rafcon.core) :param model_name: prop_name for the core-element hold by the model, this core-element is covered by the model :param model_key: if model_list_or_dict is a dictionary the key is the id of the respective element (e.g. 'state_id') :return:
entailment
def _get_future_expected_model(self, core_element): """Hand model for an core element from expected model list and remove the model from this list""" for model in self.expected_future_models: if model.core_element is core_element: # print("expected_future_model found -> remove model:", model, [model], id(model)) self.expected_future_models.remove(model) return model return None
Hand model for an core element from expected model list and remove the model from this list
entailment
def as_dict(self, use_preliminary=False): """Create a copy of the config in form of a dict :param bool use_preliminary: Whether to include the preliminary config :return: A dict with the copy of the config :rtype: dict """ config = dict() for key in self.config.keys: if use_preliminary and key in self.preliminary_config: value = self.preliminary_config[key] else: value = self.config.get_config_value(key) config[key] = value return config
Create a copy of the config in form of a dict :param bool use_preliminary: Whether to include the preliminary config :return: A dict with the copy of the config :rtype: dict
entailment
def update_config(self, config_dict, config_file): """Update the content and reference of the config :param dict config_dict: The new configuration :param str config_file: The new file reference """ config_path = path.dirname(config_file) self.config.config_file_path = config_file self.config.path = config_path for config_key, config_value in config_dict.items(): if config_value != self.config.get_config_value(config_key): self.set_preliminary_config_value(config_key, config_value)
Update the content and reference of the config :param dict config_dict: The new configuration :param str config_file: The new file reference
entailment
def get_current_config_value(self, config_key, use_preliminary=True, default=None): """Returns the current config value for the given config key :param str config_key: Config key who's value is requested :param bool use_preliminary: Whether the preliminary config should be queried first :param default: The value to return if config key does not exist :return: Copy of the config value """ if use_preliminary and config_key in self.preliminary_config: return copy(self.preliminary_config[config_key]) return copy(self.config.get_config_value(config_key, default))
Returns the current config value for the given config key :param str config_key: Config key who's value is requested :param bool use_preliminary: Whether the preliminary config should be queried first :param default: The value to return if config key does not exist :return: Copy of the config value
entailment
def set_preliminary_config_value(self, config_key, config_value): """Stores a config value as preliminary new value The config value is not yet applied to the configuration. If the value is identical to the one from the configuration, the entry is deleted from the preliminary config. :param str config_key: Key of the entry :param config_value: New value """ if config_value != self.config.get_config_value(config_key): self.preliminary_config[config_key] = config_value # If the value was reverted to its original value, we can remove the entry elif config_key in self.preliminary_config: del self.preliminary_config[config_key]
Stores a config value as preliminary new value The config value is not yet applied to the configuration. If the value is identical to the one from the configuration, the entry is deleted from the preliminary config. :param str config_key: Key of the entry :param config_value: New value
entailment
def apply_preliminary_config(self, save=True): """Applies the preliminary config to the configuration :param bool save: Whether the config file is be be written to the file system :return: Whether the applied changes require a refresh of the state machines :rtype: bool """ state_machine_refresh_required = False for config_key, config_value in self.preliminary_config.items(): self.config.set_config_value(config_key, config_value) if config_key in self.config.keys_requiring_state_machine_refresh: state_machine_refresh_required = True elif config_key in self.config.keys_requiring_restart: self.changed_keys_requiring_restart.add(config_key) if config_key == 'AUTO_RECOVERY_LOCK_ENABLED': import rafcon.gui.models.auto_backup if config_value: rafcon.gui.models.auto_backup.generate_rafcon_instance_lock_file() else: rafcon.gui.models.auto_backup.remove_rafcon_instance_lock_file() self.preliminary_config.clear() if save: self.config.save_configuration() return state_machine_refresh_required
Applies the preliminary config to the configuration :param bool save: Whether the config file is be be written to the file system :return: Whether the applied changes require a refresh of the state machines :rtype: bool
entailment
def parent(self, parent): """Setter for the parent state of the state element :param rafcon.core.states.state.State parent: Parent state or None """ if parent is None: self._parent = None else: from rafcon.core.states.state import State assert isinstance(parent, State) old_parent = self.parent self._parent = ref(parent) valid, message = self._check_validity() if not valid: if not old_parent: self._parent = None else: self._parent = ref(old_parent) class_name = self.__class__.__name__ if global_config.get_config_value("LIBRARY_RECOVERY_MODE") is True: do_delete_item = True # In case of just the data type is wrong raise an Exception but keep the data flow if "not have matching data types" in message: do_delete_item = False self._parent = ref(parent) raise RecoveryModeException("{0} invalid within state \"{1}\" (id {2}): {3}".format( class_name, parent.name, parent.state_id, message), do_delete_item=do_delete_item) else: raise ValueError("{0} invalid within state \"{1}\" (id {2}): {3} {4}".format( class_name, parent.name, parent.state_id, message, self))
Setter for the parent state of the state element :param rafcon.core.states.state.State parent: Parent state or None
entailment
def _change_property_with_validity_check(self, property_name, value): """Helper method to change a property and reset it if the validity check fails :param str property_name: The name of the property to be changed, e.g. '_data_flow_id' :param value: The new desired value for this property :raises exceptions.ValueError: if a property could not be changed """ assert isinstance(property_name, string_types) old_value = getattr(self, property_name) setattr(self, property_name, value) valid, message = self._check_validity() if not valid: setattr(self, property_name, old_value) class_name = self.__class__.__name__ raise ValueError("The {2}'s '{0}' could not be changed: {1}".format(property_name[1:], message, class_name))
Helper method to change a property and reset it if the validity check fails :param str property_name: The name of the property to be changed, e.g. '_data_flow_id' :param value: The new desired value for this property :raises exceptions.ValueError: if a property could not be changed
entailment
def _check_validity(self): """Checks the validity of the state element's properties Some validity checks can only be performed by the parent. Thus, the existence of a parent and a check function must be ensured and this function be queried. :return: validity and messages :rtype: bool, str """ from rafcon.core.states.state import State if not self.parent: return True, "no parent" if not isinstance(self.parent, State): return True, "no parental check" return self.parent.check_child_validity(self)
Checks the validity of the state element's properties Some validity checks can only be performed by the parent. Thus, the existence of a parent and a check function must be ensured and this function be queried. :return: validity and messages :rtype: bool, str
entailment
def register_new_state_machines(self, model, prop_name, info): """ The method register self as observer newly added StateMachineModels after those were added to the list of state_machines hold by observed StateMachineMangerModel. The method register as observer of observable StateMachineMangerModel.state_machines.""" if info['method_name'] == '__setitem__': self.observe_model(info['args'][1]) self.logger.info(NotificationOverview(info)) elif info['method_name'] == '__delitem__': pass else: self.logger.warning(NotificationOverview(info))
The method register self as observer newly added StateMachineModels after those were added to the list of state_machines hold by observed StateMachineMangerModel. The method register as observer of observable StateMachineMangerModel.state_machines.
entailment
def relieve_state_machines(self, model, prop_name, info): """ The method relieves observed models before those get removed from the list of state_machines hold by observed StateMachineMangerModel. The method register as observer of observable StateMachineMangerModel.state_machines.""" if info['method_name'] == '__setitem__': pass elif info['method_name'] == '__delitem__': self.relieve_model(self.state_machine_manager_model.state_machines[info['args'][0]]) self.logger.info(NotificationOverview(info)) else: self.logger.warning(NotificationOverview(info))
The method relieves observed models before those get removed from the list of state_machines hold by observed StateMachineMangerModel. The method register as observer of observable StateMachineMangerModel.state_machines.
entailment
def all_after_notification(self, model, prop_name, info): """ The method logs all changes that notified recursively trough the hierarchies of the states after the change occurs in the rafcon.core object. The method register as observer of observable StateMachineModel.state_machine of any observed StateMachineModel. :param model: StateMachineModel that is represents the state_machine which has been changed :param prop_name: Name of property that notifies -> here always 'state_machine' :param info: Dictionary that hold recursive notification information like models, property and method names :return: """ self.logger.debug(NotificationOverview(info))
The method logs all changes that notified recursively trough the hierarchies of the states after the change occurs in the rafcon.core object. The method register as observer of observable StateMachineModel.state_machine of any observed StateMachineModel. :param model: StateMachineModel that is represents the state_machine which has been changed :param prop_name: Name of property that notifies -> here always 'state_machine' :param info: Dictionary that hold recursive notification information like models, property and method names :return:
entailment
def relieve_state_machines(self, model, prop_name, info): """ The method relieves observed models before those get removed from the list of state_machines hold by observed StateMachineMangerModel. The method register as observer of observable StateMachineMangerModel.state_machines.""" if info['method_name'] == '__setitem__': pass elif info['method_name'] == '__delitem__': self.relieve_model(self.state_machine_manager_model.state_machines[info['args'][0]]) if self.state_machine_manager_model.state_machines[info['args'][0]].root_state: self.relieve_model(self.state_machine_manager_model.state_machines[info['args'][0]].root_state) # otherwise relieved by root_state assign notification # self.logger.info(NotificationOverview(info)) else: self.logger.warning(NotificationOverview(info))
The method relieves observed models before those get removed from the list of state_machines hold by observed StateMachineMangerModel. The method register as observer of observable StateMachineMangerModel.state_machines.
entailment
def observe_root_state_assignments(self, model, prop_name, info): """ The method relieves observed root_state models and observes newly assigned root_state models. """ if info['old']: self.relieve_model(info['old']) if info['new']: self.observe_model(info['new']) self.logger.info("Exchange observed old root_state model with newly assigned one. sm_id: {}" "".format(info['new'].state.parent.state_machine_id))
The method relieves observed root_state models and observes newly assigned root_state models.
entailment
def observe_meta_signal_changes(self, changed_model, prop_name, info): """" The method prints the structure of all meta_signal-notifications as log-messages. """ self.logger.info(NotificationOverview(info))
The method prints the structure of all meta_signal-notifications as log-messages.
entailment
def set_dict(self, new_dict): """Sets the dictionary of the Vividict The method is able to handle nested dictionaries, by calling the method recursively. :param new_dict: The dict that will be added to the own dict """ for key, value in new_dict.items(): if isinstance(value, dict): self[str(key)] = Vividict(value) else: self[str(key)] = value
Sets the dictionary of the Vividict The method is able to handle nested dictionaries, by calling the method recursively. :param new_dict: The dict that will be added to the own dict
entailment
def vividict_to_dict(vividict): """Helper method to create Python dicts from arbitrary Vividict objects :param Vividict vividict: A Vividict to be converted :return: A Python dict :rtype: dict """ try: from numpy import ndarray except ImportError: ndarray = dict dictionary = {} def np_to_native(np_val): """Recursively convert numpy values to native Python values - Converts matrices to lists - Converts numpy.dtypes to float/int etc :param np_val: value to convert :return: value as native Python value """ if isinstance(np_val, dict): for key, value in np_val.items(): np_val[key] = np_to_native(value) # The following condition cannot hold true if no numpy is installed, as ndarray is set to dict, which was # already handled in the previous condition elif isinstance(np_val, ndarray): # noinspection PyUnresolvedReferences np_val = np_val.tolist() if isinstance(np_val, (list, tuple)): native_list = [np_to_native(val) for val in np_val] if isinstance(np_val, tuple): return tuple(native_list) return native_list if not hasattr(np_val, 'dtype'): # Nothing to convert return np_val return np_val.item() # Get the gloat/int etc value for key, value in vividict.items(): # Convert numpy values to native Python values value = np_to_native(value) if isinstance(value, Vividict): value = Vividict.vividict_to_dict(value) dictionary[key] = value return dictionary
Helper method to create Python dicts from arbitrary Vividict objects :param Vividict vividict: A Vividict to be converted :return: A Python dict :rtype: dict
entailment
def to_yaml(cls, dumper, vividict): """Implementation for the abstract method of the base class YAMLObject """ dictionary = cls.vividict_to_dict(vividict) node = dumper.represent_mapping(cls.yaml_tag, dictionary) return node
Implementation for the abstract method of the base class YAMLObject
entailment
def register_view(self, view): """Called when the View was registered Can be used e.g. to connect signals. Here, the destroy signal is connected to close the application :param rafcon.gui.views.state_editor.semantic_data_editor.SemanticDataEditorView view: An view to show all semantic data of a state """ super(SemanticDataEditorController, self).register_view(view) if isinstance(self.model.state, LibraryState) or self.model.state.get_next_upper_library_root_state(): self.set_editor_lock(True) view['open_externally'].connect('clicked', self.open_externally_clicked) view['new_entry'].connect('clicked', self.on_add, False) view['new_dict_entry'].connect('clicked', self.on_add, True) view['delete_entry'].connect('clicked', self.on_remove) self._apply_value_on_edited_and_focus_out(self.widget_columns[view.KEY_COLUMN_ID].get_cells()[0], self.key_edited) self._apply_value_on_edited_and_focus_out(self.widget_columns[view.VALUE_COLUMN_ID].get_cells()[0], self.value_edited) self.reload_tree_store_data()
Called when the View was registered Can be used e.g. to connect signals. Here, the destroy signal is connected to close the application :param rafcon.gui.views.state_editor.semantic_data_editor.SemanticDataEditorView view: An view to show all semantic data of a state
entailment
def set_editor_lock(self, locked=True): """ Implements the abstract method of the ExternalEditor class. """ self.view['new_entry'].set_sensitive(not locked) self.view['new_dict_entry'].set_sensitive(not locked) self.view['delete_entry'].set_sensitive(not locked) # self.view['open_externally'].set_sensitive(not locked) for current_column in self.view['semantic_data_tree_view'].get_columns(): current_column.get_cells()[0].set_property('editable', not locked)
Implements the abstract method of the ExternalEditor class.
entailment
def get_selected_object(self): """ Gets the selected object in the treeview :return: """ model, paths = self.tree_view.get_selection().get_selected_rows() if len(paths) == 1: return self.tree_store.get_iter(paths[0]), paths[0] else: return None, paths
Gets the selected object in the treeview :return:
entailment
def on_add(self, widget, new_dict=False): """" Adds a new entry to the semantic data of a state. Reloads the tree store. :param widget: The source widget of the action :param bool new_dict: A flag to indicate if the new value is of type dict :return: """ self.semantic_data_counter += 1 treeiter, path = self.get_selected_object() value = dict() if new_dict else "New Value" # get target dict path if treeiter: target_dict_path_as_list = self.tree_store[path][self.ID_STORAGE_ID] if not self.tree_store[path][self.IS_DICT_STORAGE_ID]: target_dict_path_as_list.pop() else: target_dict_path_as_list = [] # generate key target_dict = self.model.state.get_semantic_data(target_dict_path_as_list) new_key_string = generate_semantic_data_key(list(target_dict.keys())) self.model.state.add_semantic_data(target_dict_path_as_list, value, new_key_string) self.reload_tree_store_data() # jump to new element self.select_entry(target_dict_path_as_list + [new_key_string]) logger.debug("Added new semantic data entry!") return True
Adds a new entry to the semantic data of a state. Reloads the tree store. :param widget: The source widget of the action :param bool new_dict: A flag to indicate if the new value is of type dict :return:
entailment
def add_action_callback(self, key_value, modifier_mask, a_dict=False): """Callback method for add action""" if react_to_event(self.view, self.tree_view, event=(key_value, modifier_mask)) and self.active_entry_widget is None: self.on_add(None, a_dict) return True
Callback method for add action
entailment
def on_remove(self, widget, data=None): """ Removes an entry of semantic data of a state. :param widget: :return: """ treeiter, path = self.get_selected_object() if not treeiter: return # check if an element is selected dict_path_as_list = self.tree_store[path][self.ID_STORAGE_ID] logger.debug("Deleting semantic data entry with name {}!".format(dict_path_as_list[-1])) self.model.state.remove_semantic_data(dict_path_as_list) self.reload_tree_store_data() # hold cursor position where the last element was removed try: self.select_entry(self.tree_store[path][self.ID_STORAGE_ID]) except IndexError: if len(self.tree_store): if len(path) > 1: possible_before_path = tuple(list(path[:-1]) + [path[-1] - 1]) if possible_before_path[-1] > -1: self.select_entry(self.tree_store[possible_before_path][self.ID_STORAGE_ID]) else: self.select_entry(self.tree_store[path[:-1]][self.ID_STORAGE_ID]) else: self.select_entry(self.tree_store[path[0] - 1][self.ID_STORAGE_ID]) return True
Removes an entry of semantic data of a state. :param widget: :return:
entailment
def add_items_to_tree_iter(self, input_dict, treeiter, parent_dict_path=None): """ Adds all values of the input dict to self.tree_store :param input_dict: The input dictionary holds all values, which are going to be added. :param treeiter: The pointer inside the tree store to add the input dict :return: """ if parent_dict_path is None: parent_dict_path = [] self.get_view_selection() for key, value in sorted(input_dict.items()): element_dict_path = copy.copy(parent_dict_path) + [key] if isinstance(value, dict): new_iter = self.tree_store.append(treeiter, [key, "", True, element_dict_path]) self.add_items_to_tree_iter(value, new_iter, element_dict_path) else: self.tree_store.append(treeiter, [key, value, False, element_dict_path])
Adds all values of the input dict to self.tree_store :param input_dict: The input dictionary holds all values, which are going to be added. :param treeiter: The pointer inside the tree store to add the input dict :return:
entailment
def reload_tree_store_data(self): """ Reloads the data of the tree store :return: """ model, paths = self.tree_view.get_selection().get_selected_rows() self.tree_store.clear() self.add_items_to_tree_iter(self.model.state.semantic_data, None) self.tree_view.expand_all() try: for path in paths: self.tree_view.get_selection().select_path(path) except ValueError: pass
Reloads the data of the tree store :return:
entailment
def copy_action_callback(self, *event): """Add a copy of all selected row dict value pairs to the clipboard""" if react_to_event(self.view, self.tree_view, event) and self.active_entry_widget is None: _, dict_paths = self.get_view_selection() selected_data_list = [] for dict_path_as_list in dict_paths: value = self.model.state.semantic_data for path_element in dict_path_as_list: value = value[path_element] selected_data_list.append((path_element, value)) rafcon.gui.clipboard.global_clipboard.set_semantic_dictionary_list(selected_data_list)
Add a copy of all selected row dict value pairs to the clipboard
entailment
def paste_action_callback(self, *event): """Add clipboard key value pairs into all selected sub-dictionary""" if react_to_event(self.view, self.tree_view, event) and self.active_entry_widget is None: _, dict_paths = self.get_view_selection() selected_data_list = rafcon.gui.clipboard.global_clipboard.get_semantic_dictionary_list() # enforce paste on root level if semantic data empty or nothing is selected if not dict_paths and not self.model.state.semantic_data: dict_paths = [[]] for target_dict_path_as_list in dict_paths: prev_value = self.model.state.semantic_data value = self.model.state.semantic_data for path_element in target_dict_path_as_list: prev_value = value value = value[path_element] if not isinstance(value, dict) and len(dict_paths) <= 1: # if one selection take parent target_dict_path_as_list.pop(-1) value = prev_value if isinstance(value, dict): for key_to_paste, value_to_add in selected_data_list: self.model.state.add_semantic_data(target_dict_path_as_list, value_to_add, key_to_paste) self.reload_tree_store_data()
Add clipboard key value pairs into all selected sub-dictionary
entailment
def cut_action_callback(self, *event): """Add a copy and cut all selected row dict value pairs to the clipboard""" if react_to_event(self.view, self.tree_view, event) and self.active_entry_widget is None: _, dict_paths = self.get_view_selection() stored_data_list = [] for dict_path_as_list in dict_paths: if dict_path_as_list: value = self.model.state.semantic_data for path_element in dict_path_as_list: value = value[path_element] stored_data_list.append((path_element, value)) self.model.state.remove_semantic_data(dict_path_as_list) rafcon.gui.clipboard.global_clipboard.set_semantic_dictionary_list(stored_data_list) self.reload_tree_store_data()
Add a copy and cut all selected row dict value pairs to the clipboard
entailment
def key_edited(self, path, new_key_str): """ Edits the key of a semantic data entry :param path: The path inside the tree store to the target entry :param str new_key_str: The new value of the target cell :return: """ tree_store_path = self.create_tree_store_path_from_key_string(path) if isinstance(path, string_types) else path if self.tree_store[tree_store_path][self.KEY_STORAGE_ID] == new_key_str: return dict_path = self.tree_store[tree_store_path][self.ID_STORAGE_ID] old_value = self.model.state.get_semantic_data(dict_path) self.model.state.remove_semantic_data(dict_path) if new_key_str == "": target_dict = self.model.state.semantic_data for element in dict_path[0:-1]: target_dict = target_dict[element] new_key_str = generate_semantic_data_key(list(target_dict.keys())) new_dict_path = self.model.state.add_semantic_data(dict_path[0:-1], old_value, key=new_key_str) self._changed_id_to = {':'.join(dict_path): new_dict_path} # use hashable key (workaround for tree view ctrl) self.reload_tree_store_data()
Edits the key of a semantic data entry :param path: The path inside the tree store to the target entry :param str new_key_str: The new value of the target cell :return:
entailment
def value_edited(self, path, new_value_str): """ Adds the value of the semantic data entry :param path: The path inside the tree store to the target entry :param str new_value_str: The new value of the target cell :return: """ tree_store_path = self.create_tree_store_path_from_key_string(path) if isinstance(path, string_types) else path if self.tree_store[tree_store_path][self.VALUE_STORAGE_ID] == new_value_str: return dict_path = self.tree_store[tree_store_path][self.ID_STORAGE_ID] self.model.state.add_semantic_data(dict_path[0:-1], new_value_str, key=dict_path[-1]) self.reload_tree_store_data()
Adds the value of the semantic data entry :param path: The path inside the tree store to the target entry :param str new_value_str: The new value of the target cell :return:
entailment
def get_path_for_core_element(self, core_element_id): """Get path to the row representing core element described by handed core_element_id :param list core_element_id: Core element identifier used in the respective list store column :rtype: tuple :return: path """ def check_function(row_iter, iter_found): row_id = self.tree_store.get_value(row_iter, self.ID_STORAGE_ID) if len(row_id) == len(core_element_id): if row_id == core_element_id: iter_found.append(self.tree_store.get_path(row_iter)) found_paths = [] self.iter_tree_with_handed_function(check_function, found_paths) return found_paths[0] if found_paths else None
Get path to the row representing core element described by handed core_element_id :param list core_element_id: Core element identifier used in the respective list store column :rtype: tuple :return: path
entailment
def save_file_data(self, path): """ Implements the abstract method of the ExternalEditor class. """ try: # just create file with empty text first; this command also creates the whole path to the file filesystem.write_file(os.path.join(path, storage.SCRIPT_FILE), "", create_full_path=True) storage_utils.write_dict_to_json(self.model.state.semantic_data, os.path.join(path, storage.SEMANTIC_DATA_FILE)) except IOError as e: # Only happens if the file doesnt exist yet and would be written to the temp folder. # The method write_file doesn't create the path logger.error('The operating system raised an error: {}'.format(e))
Implements the abstract method of the ExternalEditor class.
entailment
def load_and_set_file_content(self, file_system_path): """ Implements the abstract method of the ExternalEditor class. """ semantic_data = load_data_file(os.path.join(file_system_path, storage.SEMANTIC_DATA_FILE)) self.model.state.semantic_data = semantic_data
Implements the abstract method of the ExternalEditor class.
entailment
def store_widget_properties(self, widget, widget_name): """Sets configuration values for widgets If the widget is a window, then the size and position are stored. If the widget is a pane, then only the position is stored. If the window is maximized the last insert position before being maximized is keep in the config and the maximized flag set to True. The maximized state and the last size and position are strictly separated by this. :param widget: The widget, for which the position (and possibly the size) will be stored. :param widget_name: The window or widget name of the widget, which constitutes a part of its key in the configuration file. """ if isinstance(widget, Gtk.Window): maximized = bool(widget.is_maximized()) self.set_config_value('{0}_MAXIMIZED'.format(widget_name), maximized) if maximized: return size = widget.get_size() self.set_config_value('{0}_SIZE'.format(widget_name), tuple(size)) position = widget.get_position() self.set_config_value('{0}_POS'.format(widget_name), tuple(position)) else: # Gtk.Paned position = widget.get_position() self.set_config_value('{0}_POS'.format(widget_name), position)
Sets configuration values for widgets If the widget is a window, then the size and position are stored. If the widget is a pane, then only the position is stored. If the window is maximized the last insert position before being maximized is keep in the config and the maximized flag set to True. The maximized state and the last size and position are strictly separated by this. :param widget: The widget, for which the position (and possibly the size) will be stored. :param widget_name: The window or widget name of the widget, which constitutes a part of its key in the configuration file.
entailment
def update_recently_opened_state_machines_with(self, state_machine): """ Update recently opened list with file system path of handed state machine model The inserts handed state machine file system path into the recent opened state machines or moves it to be the first element in the list. :param rafcon.core.state_machine.StateMachine state_machine: State machine to check :return: """ if state_machine.file_system_path: # check if path is in recent path already # logger.info("update recent state machine: {}".format(sm.file_system_path)) recently_opened_state_machines = self.get_config_value('recently_opened_state_machines', []) if state_machine.file_system_path in recently_opened_state_machines: del recently_opened_state_machines[recently_opened_state_machines.index(state_machine.file_system_path)] recently_opened_state_machines.insert(0, state_machine.file_system_path) self.set_config_value('recently_opened_state_machines', recently_opened_state_machines)
Update recently opened list with file system path of handed state machine model The inserts handed state machine file system path into the recent opened state machines or moves it to be the first element in the list. :param rafcon.core.state_machine.StateMachine state_machine: State machine to check :return:
entailment
def extend_recently_opened_by_current_open_state_machines(self): """ Update list with all in the state machine manager opened state machines """ from rafcon.gui.singleton import state_machine_manager_model as state_machine_manager_m for sm_m in state_machine_manager_m.state_machines.values(): self.update_recently_opened_state_machines_with(sm_m.state_machine)
Update list with all in the state machine manager opened state machines
entailment
def prepare_recently_opened_state_machines_list_for_storage(self): """ Reduce number of paths in the recent opened state machines to limit from gui config """ from rafcon.gui.singleton import global_gui_config num = global_gui_config.get_config_value('NUMBER_OF_RECENT_OPENED_STATE_MACHINES_STORED') state_machine_paths = self.get_config_value('recently_opened_state_machines', []) self.set_config_value('recently_opened_state_machines', state_machine_paths[:num])
Reduce number of paths in the recent opened state machines to limit from gui config
entailment
def clean_recently_opened_state_machines(self): """Remove state machines who's file system path does not exist""" state_machine_paths = self.get_config_value('recently_opened_state_machines', []) filesystem.clean_file_system_paths_from_not_existing_paths(state_machine_paths) self.set_config_value('recently_opened_state_machines', state_machine_paths)
Remove state machines who's file system path does not exist
entailment
def pause(self): """Set the execution mode to paused """ if self.state_machine_manager.active_state_machine_id is None: logger.info("'Pause' is not a valid action to initiate state machine execution.") return if self.state_machine_manager.get_active_state_machine() is not None: self.state_machine_manager.get_active_state_machine().root_state.recursively_pause_states() logger.debug("Pause execution ...") self.set_execution_mode(StateMachineExecutionStatus.PAUSED)
Set the execution mode to paused
entailment
def finished_or_stopped(self): """ Condition check on finished or stopped status The method returns a value which is equivalent with not 'active' status of the current state machine. :return: outcome of condition check stopped or finished :rtype: bool """ return (self._status.execution_mode is StateMachineExecutionStatus.STOPPED) or \ (self._status.execution_mode is StateMachineExecutionStatus.FINISHED)
Condition check on finished or stopped status The method returns a value which is equivalent with not 'active' status of the current state machine. :return: outcome of condition check stopped or finished :rtype: bool
entailment
def start(self, state_machine_id=None, start_state_path=None): """ Start state machine If no state machine is running start a specific state machine. If no state machine is provided the currently active state machine is started. If there is already a state machine running, just resume it without taking the passed state_machine_id argument into account. :param state_machine_id: The id if the state machine to be started :param start_state_path: The path of the state in the state machine, from which the execution will start :return: """ if not self.finished_or_stopped(): logger.debug("Resume execution engine ...") self.run_to_states = [] if self.state_machine_manager.get_active_state_machine() is not None: self.state_machine_manager.get_active_state_machine().root_state.recursively_resume_states() if isinstance(state_machine_id, int) and \ state_machine_id != self.state_machine_manager.get_active_state_machine().state_machine_id: logger.info("Resumed state machine with id {0} but start of state machine id {1} was requested." "".format(self.state_machine_manager.get_active_state_machine().state_machine_id, state_machine_id)) self.set_execution_mode(StateMachineExecutionStatus.STARTED) else: # do not start another state machine before the old one did not finish its execution if self.state_machine_running: logger.warning("An old state machine is still running! Make sure that it terminates," " before you can start another state machine! {0}".format(self)) return logger.debug("Start execution engine ...") if state_machine_id is not None: self.state_machine_manager.active_state_machine_id = state_machine_id if not self.state_machine_manager.active_state_machine_id: logger.error("There exists no active state machine!") return self.set_execution_mode(StateMachineExecutionStatus.STARTED) self.start_state_paths = [] if start_state_path: path_list = start_state_path.split("/") cur_path = "" for path in path_list: if cur_path == "": cur_path = path else: cur_path = cur_path + "/" + path self.start_state_paths.append(cur_path) self._run_active_state_machine()
Start state machine If no state machine is running start a specific state machine. If no state machine is provided the currently active state machine is started. If there is already a state machine running, just resume it without taking the passed state_machine_id argument into account. :param state_machine_id: The id if the state machine to be started :param start_state_path: The path of the state in the state machine, from which the execution will start :return:
entailment
def stop(self): """Set the execution mode to stopped """ logger.debug("Stop the state machine execution ...") if self.state_machine_manager.get_active_state_machine() is not None: self.state_machine_manager.get_active_state_machine().root_state.recursively_preempt_states() self.__set_execution_mode_to_stopped() # Notifies states waiting in step mode or those that are paused about execution stop self._status.execution_condition_variable.acquire() self._status.execution_condition_variable.notify_all() self._status.execution_condition_variable.release() self.__running_state_machine = None
Set the execution mode to stopped
entailment
def join(self, timeout=None): """Blocking wait for the execution to finish :param float timeout: Maximum time to wait or None for infinitely :return: True if the execution finished, False if no state machine was started or a timeout occurred :rtype: bool """ if self.__wait_for_finishing_thread: if not timeout: # signal handlers won't work if timeout is None and the thread is joined while True: self.__wait_for_finishing_thread.join(0.5) if not self.__wait_for_finishing_thread.isAlive(): break else: self.__wait_for_finishing_thread.join(timeout) return not self.__wait_for_finishing_thread.is_alive() else: logger.warning("Cannot join as state machine was not started yet.") return False
Blocking wait for the execution to finish :param float timeout: Maximum time to wait or None for infinitely :return: True if the execution finished, False if no state machine was started or a timeout occurred :rtype: bool
entailment
def _run_active_state_machine(self): """Store running state machine and observe its status """ # Create new concurrency queue for root state to be able to synchronize with the execution self.__running_state_machine = self.state_machine_manager.get_active_state_machine() if not self.__running_state_machine: logger.error("The running state machine must not be None") self.__running_state_machine.root_state.concurrency_queue = queue.Queue(maxsize=0) if self.__running_state_machine: self.__running_state_machine.start() self.__wait_for_finishing_thread = threading.Thread(target=self._wait_for_finishing) self.__wait_for_finishing_thread.start() else: logger.warning("Currently no active state machine! Please create a new state machine.") self.set_execution_mode(StateMachineExecutionStatus.STOPPED)
Store running state machine and observe its status
entailment
def _wait_for_finishing(self): """Observe running state machine and stop engine if execution has finished""" self.state_machine_running = True self.__running_state_machine.join() self.__set_execution_mode_to_finished() self.state_machine_manager.active_state_machine_id = None plugins.run_on_state_machine_execution_finished() # self.__set_execution_mode_to_stopped() self.state_machine_running = False
Observe running state machine and stop engine if execution has finished
entailment
def backward_step(self): """Take a backward step for all active states in the state machine """ logger.debug("Executing backward step ...") self.run_to_states = [] self.set_execution_mode(StateMachineExecutionStatus.BACKWARD)
Take a backward step for all active states in the state machine
entailment
def step_mode(self, state_machine_id=None): """Set the execution mode to stepping mode. Transitions are only triggered if a new step is triggered """ logger.debug("Activate step mode") if state_machine_id is not None: self.state_machine_manager.active_state_machine_id = state_machine_id self.run_to_states = [] if self.finished_or_stopped(): self.set_execution_mode(StateMachineExecutionStatus.STEP_MODE) self._run_active_state_machine() else: self.set_execution_mode(StateMachineExecutionStatus.STEP_MODE)
Set the execution mode to stepping mode. Transitions are only triggered if a new step is triggered
entailment
def step_into(self): """Take a forward step (into) for all active states in the state machine """ logger.debug("Execution step into ...") self.run_to_states = [] if self.finished_or_stopped(): self.set_execution_mode(StateMachineExecutionStatus.FORWARD_INTO) self._run_active_state_machine() else: self.set_execution_mode(StateMachineExecutionStatus.FORWARD_INTO)
Take a forward step (into) for all active states in the state machine
entailment
def step_over(self): """Take a forward step (over) for all active states in the state machine """ logger.debug("Execution step over ...") self.run_to_states = [] if self.finished_or_stopped(): self.set_execution_mode(StateMachineExecutionStatus.FORWARD_OVER) self._run_active_state_machine() else: self.set_execution_mode(StateMachineExecutionStatus.FORWARD_OVER)
Take a forward step (over) for all active states in the state machine
entailment
def step_out(self): """Take a forward step (out) for all active states in the state machine """ logger.debug("Execution step out ...") self.run_to_states = [] if self.finished_or_stopped(): self.set_execution_mode(StateMachineExecutionStatus.FORWARD_OUT) self._run_active_state_machine() else: self.set_execution_mode(StateMachineExecutionStatus.FORWARD_OUT)
Take a forward step (out) for all active states in the state machine
entailment