code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
if self.list_store[path][self.DATA_TYPE_AS_STRING_STORAGE_ID] == new_data_type_as_string: return gv_name = self.list_store[path][self.NAME_STORAGE_ID] if not self.global_variable_is_editable(gv_name, 'Type change'): return old_value = self.model.global_variable_manager.get_representation(gv_name) # check if valid data type string try: new_data_type = type_helpers.convert_string_to_type(new_data_type_as_string) except (AttributeError, ValueError) as e: logger.error("Could not change data type to '{0}': {1}".format(new_data_type_as_string, e)) return assert isinstance(new_data_type, type) # convert old value if issubclass(new_data_type, type(None)): new_value = old_value else: # new_data_type in [str, float, int, list, dict, tuple, bool]: try: new_value = new_data_type(old_value) except (ValueError, TypeError) as e: new_value = new_data_type() logger.warning("Old value '{}' of global variable '{}' could not be parsed to new type '{}' and is " "therefore resetted: {}".format(old_value, gv_name, new_data_type.__name__, e)) # set value in global variable manager try: self.model.global_variable_manager.set_variable(gv_name, new_value, data_type=new_data_type) except (ValueError, RuntimeError, TypeError) as e: logger.error("Could not set new value unexpected failure '{0}' to value '{1}' -> Exception: {2}" "".format(gv_name, new_value, e))
def apply_new_global_variable_type(self, path, new_data_type_as_string)
Change global variable value according handed string Updates the global variable data type only if different. :param path: The path identifying the edited global variable tree view row, can be str, int or tuple. :param str new_data_type_as_string: New global variable data type as string
3.226514
3.135681
1.028968
if info['method_name'] in ['set_locked_variable'] or info['result'] is Exception: return if info['method_name'] in ['lock_variable', 'unlock_variable']: key = info.kwargs.get('key', info.args[1]) if len(info.args) > 1 else info.kwargs['key'] if key in self.list_store_iterators: gv_row_path = self.list_store.get_path(self.list_store_iterators[key]) self.list_store[gv_row_path][self.IS_LOCKED_AS_STRING_STORAGE_ID] = \ str(self.model.global_variable_manager.is_locked(key)) elif info['method_name'] in ['set_variable', 'delete_variable']: if info['method_name'] == 'set_variable': key = info.kwargs.get('key', info.args[1]) if len(info.args) > 1 else info.kwargs['key'] if key in self.list_store_iterators: gv_row_path = self.list_store.get_path(self.list_store_iterators[key]) self.list_store[gv_row_path][self.VALUE_AS_STRING_STORAGE_ID] = \ str(self.model.global_variable_manager.get_representation(key)) self.list_store[gv_row_path][self.DATA_TYPE_AS_STRING_STORAGE_ID] = \ self.model.global_variable_manager.get_data_type(key).__name__ return self.update_global_variables_list_store() else: logger.warning('Notification that is not handled')
def assign_notification_from_gvm(self, model, prop_name, info)
Handles gtkmvc3 notification from global variable manager Calls update of whole list store in case new variable was added. Avoids to run updates without reasonable change. Holds tree store and updates row elements if is-locked or global variable value changes.
2.681235
2.441365
1.098252
# logger.info("update") self.list_store_iterators = {} self.list_store.clear() keys = self.model.global_variable_manager.get_all_keys() keys.sort() for key in keys: iter = self.list_store.append([key, self.model.global_variable_manager.get_data_type(key).__name__, str(self.model.global_variable_manager.get_representation(key)), str(self.model.global_variable_manager.is_locked(key)), ]) self.list_store_iterators[key] = iter
def update_global_variables_list_store(self)
Updates the global variable list store Triggered after creation or deletion of a variable has taken place.
3.137583
3.110155
1.008819
for lib in os.listdir(lib_path): if os.path.isdir(os.path.join(lib_path, lib)) and not '.' == lib[0]: if os.path.exists(os.path.join(os.path.join(lib_path, lib), "statemachine.yaml")) or \ os.path.exists(os.path.join(os.path.join(lib_path, lib), "statemachine.json")): if not target_path: convert(config_path, os.path.join(lib_path, lib)) else: convert(config_path, os.path.join(lib_path, lib), os.path.join(target_path, lib)) else: if not target_path: convert_libraries_in_path(config_path, os.path.join(lib_path, lib)) else: convert_libraries_in_path(config_path, os.path.join(lib_path, lib), os.path.join(target_path, lib)) else: if os.path.isdir(os.path.join(lib_path, lib)) and '.' == lib[0]: logger.debug("lib_root_path/lib_path .*-folder are ignored if within lib_path, " "e.g. -> {0} -> full path is {1}".format(lib, os.path.join(lib_path, lib)))
def convert_libraries_in_path(config_path, lib_path, target_path=None)
This function resaves all libraries found at the spcified path :param lib_path: the path to look for libraries :return:
2.171739
2.192644
0.990466
from rafcon.gui.mygaphas.items.state import StateView from rafcon.gui.mygaphas.items.connection import DataFlowView, TransitionView if parent_item is None: items = self.get_all_items() else: items = self.get_children(parent_item) for item in items: if view_class is StateView and isinstance(item, StateView) and item.model.state.state_id == element_id: return item if view_class is TransitionView and isinstance(item, TransitionView) and \ item.model.transition.transition_id == element_id: return item if view_class is DataFlowView and isinstance(item, DataFlowView) and \ item.model.data_flow.data_flow_id == element_id: return item return None
def get_view_for_id(self, view_class, element_id, parent_item=None)
Searches and returns the View for the given id and type :param view_class: The view type to search for :param element_id: The id of element of the searched view :param gaphas.item.Item parent_item: Restrict the search to this parent item :return: The view for the given id or None if not found
2.647126
2.553217
1.036781
if trigger_update: self.update_now() from gi.repository import Gtk from gi.repository import GLib from threading import Event event = Event() # Handle all events from gaphas, but not from gtkmvc3 # Make use of the priority, which is higher for gaphas then for gtkmvc3 def priority_handled(event): event.set() priority = (GLib.PRIORITY_HIGH_IDLE + GLib.PRIORITY_DEFAULT_IDLE) / 2 # idle_add is necessary here, as we do not want to block the user from interacting with the GUI # while gaphas is redrawing GLib.idle_add(priority_handled, event, priority=priority) while not event.is_set(): Gtk.main_iteration()
def wait_for_update(self, trigger_update=False)
Update canvas and handle all events in the gtk queue :param bool trigger_update: Whether to call update_now() or not
6.926869
6.535037
1.059959
x, y = self._point.x, self._point.y self._px, self._py = self._item_point.canvas.get_matrix_i2i(self._item_point, self._item_target).transform_point(x, y) return self._px, self._py
def _get_value(self)
Return two delegating variables. Each variable should contain a value attribute with the real value.
6.491399
5.550013
1.169619
# If the parameter is already a type, return it if string_value in ['None', type(None).__name__]: return type(None) if isinstance(string_value, type) or isclass(string_value): return string_value # Get object associated with string # First check whether we are having a built in type (int, str, etc) if sys.version_info >= (3,): import builtins as builtins23 else: import __builtin__ as builtins23 if hasattr(builtins23, string_value): obj = getattr(builtins23, string_value) if type(obj) is type: return obj # If not, try to locate the module try: obj = locate(string_value) except ErrorDuringImport as e: raise ValueError("Unknown type '{0}'".format(e)) # Check whether object is a type if type(obj) is type: return locate(string_value) # Check whether object is a class if isclass(obj): return obj # Raise error if none is the case raise ValueError("Unknown type '{0}'".format(string_value))
def convert_string_to_type(string_value)
Converts a string into a type or class :param string_value: the string to be converted, e.g. "int" :return: The type derived from string_value, e.g. int
3.601211
3.490487
1.031722
from ast import literal_eval try: if data_type in (str, type(None)): converted_value = str(string_value) elif data_type == int: converted_value = int(string_value) elif data_type == float: converted_value = float(string_value) elif data_type == bool: converted_value = bool(literal_eval(string_value)) elif data_type in (list, dict, tuple): converted_value = literal_eval(string_value) if type(converted_value) != data_type: raise ValueError("Invalid syntax: {0}".format(string_value)) elif data_type == object: try: converted_value = literal_eval(string_value) except (ValueError, SyntaxError): converted_value = literal_eval('"' + string_value + '"') elif isinstance(data_type, type): # Try native type conversion converted_value = data_type(string_value) elif isclass(data_type): # Call class constructor converted_value = data_type(string_value) else: raise ValueError("No conversion from string '{0}' to data type '{0}' defined".format( string_value, data_type.__name__)) except (ValueError, SyntaxError, TypeError) as e: raise AttributeError("Can't convert '{0}' to type '{1}': {2}".format(string_value, data_type.__name__, e)) return converted_value
def convert_string_value_to_type_value(string_value, data_type)
Helper function to convert a given string to a given data type :param str string_value: the string to convert :param type data_type: the target data type :return: the converted value
2.046653
2.08637
0.980963
assert isinstance(inheriting_type, type) or isclass(inheriting_type) assert isinstance(base_type, type) or isclass(base_type) if inheriting_type == base_type: return True else: if len(inheriting_type.__bases__) != 1: return False return type_inherits_of_type(inheriting_type.__bases__[0], base_type)
def type_inherits_of_type(inheriting_type, base_type)
Checks whether inheriting_type inherits from base_type :param str inheriting_type: :param str base_type: :return: True is base_type is base of inheriting_type
1.840659
2.069887
0.889256
'''Used to clear the result tables in the OEDB. Caution! This deletes EVERY RESULT SET!''' from egoio.db_tables.model_draft import EgoGridPfHvResultBus as BusResult,\ EgoGridPfHvResultBusT as BusTResult,\ EgoGridPfHvResultStorage as StorageResult,\ EgoGridPfHvResultStorageT as StorageTResult,\ EgoGridPfHvResultGenerator as GeneratorResult,\ EgoGridPfHvResultGeneratorT as GeneratorTResult,\ EgoGridPfHvResultLine as LineResult,\ EgoGridPfHvResultLineT as LineTResult,\ EgoGridPfHvResultLoad as LoadResult,\ EgoGridPfHvResultLoadT as LoadTResult,\ EgoGridPfHvResultTransformer as TransformerResult,\ EgoGridPfHvResultTransformerT as TransformerTResult,\ EgoGridPfHvResultMeta as ResultMeta print('Are you sure that you want to clear all results in the OEDB?') choice = '' while choice not in ['y', 'n']: choice = input('(y/n): ') if choice == 'y': print('Are you sure?') choice2 = '' while choice2 not in ['y', 'n']: choice2 = input('(y/n): ') if choice2 == 'y': print('Deleting all results...') session.query(BusResult).delete() session.query(BusTResult).delete() session.query(StorageResult).delete() session.query(StorageTResult).delete() session.query(GeneratorResult).delete() session.query(GeneratorTResult).delete() session.query(LoadResult).delete() session.query(LoadTResult).delete() session.query(LineResult).delete() session.query(LineTResult).delete() session.query(TransformerResult).delete() session.query(TransformerTResult).delete() session.query(ResultMeta).delete() session.commit() else: print('Deleting aborted!') else: print('Deleting aborted!')
def clear_results_db(session)
Used to clear the result tables in the OEDB. Caution! This deletes EVERY RESULT SET!
2.412887
2.031622
1.187665
script_dir = os.path.abspath( os.path.join(os.path.dirname(__file__), 'sql_scripts')) script_str = open(os.path.join(script_dir, scriptname)).read() conn.execution_options(autocommit=True).execute(script_str) return
def run_sql_script(conn, scriptname='results_md2grid.sql')
This function runs .sql scripts in the folder 'sql_scripts'
2.306512
2.295202
1.004928
if version is None: ormcls_prefix = 'EgoGridPfHvExtension' else: ormcls_prefix = 'EgoPfHvExtension' # Adding overlay-network to existing network scenario = NetworkScenario(session, version = version, prefix=ormcls_prefix, method=kwargs.get('method', 'lopf'), start_snapshot=start_snapshot, end_snapshot=end_snapshot, scn_name='extension_' + scn_extension) network = scenario.build_network(network) # Allow lossless links to conduct bidirectional network.links.loc[network.links.efficiency == 1.0, 'p_min_pu'] = -1 # Set coordinates for new buses extension_buses = network.buses[network.buses.scn_name == 'extension_' + scn_extension] for idx, row in extension_buses.iterrows(): wkt_geom = to_shape(row['geom']) network.buses.loc[idx, 'x'] = wkt_geom.x network.buses.loc[idx, 'y'] = wkt_geom.y return network
def extension (network, session, version, scn_extension, start_snapshot, end_snapshot, **kwargs)
Function that adds an additional network to the existing network container. The new network can include every PyPSA-component (e.g. buses, lines, links). To connect it to the existing network, transformers are needed. All components and its timeseries of the additional scenario need to be inserted in the fitting 'model_draft.ego_grid_pf_hv_extension_' table. The scn_name in the tables have to be labled with 'extension_' + scn_name (e.g. 'extension_nep2035'). Until now, the tables include three additional scenarios: 'nep2035_confirmed': all new lines and needed transformers planed in the 'Netzentwicklungsplan 2035' (NEP2035) that have been confirmed by the Bundesnetzagentur (BNetzA) 'nep2035_b2': all new lines and needed transformers planned in the NEP 2035 in the scenario 2035 B2 'BE_NO_NEP 2035': DC-lines and transformers to connect the upcomming electrical-neighbours Belgium and Norway Generation, loads and its timeseries in Belgium and Norway for scenario 'NEP 2035' Parameters ----- network : The existing network container (e.g. scenario 'NEP 2035') session : session-data overlay_scn_name : Name of the additional scenario (WITHOUT 'extension_') start_snapshot, end_snapshot: Simulation time Returns ------ network : Network container including existing and additional network
6.186745
4.959028
1.247572
if args['gridversion'] == None: ormclass = getattr(import_module('egoio.db_tables.model_draft'), 'EgoGridPfHvExtensionLine') else: ormclass = getattr(import_module('egoio.db_tables.grid'), 'EgoPfHvExtensionLine') query = session.query(ormclass).filter( ormclass.scn_name == 'decommissioning_' + args['scn_decommissioning']) df_decommisionning = pd.read_sql(query.statement, session.bind, index_col='line_id') df_decommisionning.index = df_decommisionning.index.astype(str) for idx, row in network.lines.iterrows(): if (row['s_nom_min'] !=0) & ( row['scn_name'] =='extension_' + args['scn_decommissioning']): v_nom_dec = df_decommisionning['v_nom'][( df_decommisionning.project == row['project']) & ( df_decommisionning.project_id == row['project_id'])] if (v_nom_dec == 110).any(): network.lines.s_nom_min[network.lines.index == idx]\ = args['branch_capacity_factor']['HV'] *\ network.lines.s_nom_min else: network.lines.s_nom_min[network.lines.index == idx] =\ args['branch_capacity_factor']['eHV'] *\ network.lines.s_nom_min ### Drop decommissioning-lines from existing network network.lines = network.lines[~network.lines.index.isin( df_decommisionning.index)] return network
def decommissioning(network, session, args, **kwargs)
Function that removes components in a decommissioning-scenario from the existing network container. Currently, only lines can be decommissioned. All components of the decommissioning scenario need to be inserted in the fitting 'model_draft.ego_grid_pf_hv_extension_' table. The scn_name in the tables have to be labled with 'decommissioning_' + scn_name (e.g. 'decommissioning_nep2035'). Parameters ----- network : The existing network container (e.g. scenario 'NEP 2035') session : session-data overlay_scn_name : Name of the decommissioning scenario Returns ------ network : Network container including decommissioning
4.704024
3.906228
1.204237
# Calculate square of the distance between two points (Pythagoras) distance = (x1.values- x0.values)*(x1.values- x0.values)\ + (y1.values- y0.values)*(y1.values- y0.values) return distance
def distance(x0, x1, y0, y1)
Function that calculates the square of the distance between two points. Parameters ----- x0: x - coordinate of point 0 x1: x - coordinate of point 1 y0: y - coordinate of point 0 y1: y - coordinate of point 1 Returns ------ distance : float square of distance
3.430827
3.683234
0.931471
bus1_index = network.buses.index[network.buses.index == bus1] forbidden_buses = np.append( bus1_index.values, network.lines.bus1[ network.lines.bus0 == bus1].values) forbidden_buses = np.append( forbidden_buses, network.lines.bus0[network.lines.bus1 == bus1].values) forbidden_buses = np.append( forbidden_buses, network.links.bus0[network.links.bus1 == bus1].values) forbidden_buses = np.append( forbidden_buses, network.links.bus1[network.links.bus0 == bus1].values) x0 = network.buses.x[network.buses.index.isin(bus1_index)] y0 = network.buses.y[network.buses.index.isin(bus1_index)] comparable_buses = network.buses[~network.buses.index.isin( forbidden_buses)] x1 = comparable_buses.x y1 = comparable_buses.y distance = (x1.values - x0.values)*(x1.values - x0.values) + \ (y1.values - y0.values)*(y1.values - y0.values) min_distance = distance.min() bus0 = comparable_buses[(((x1.values - x0.values)*(x1.values - x0.values ) + (y1.values - y0.values)*(y1.values - y0.values)) == min_distance)] bus0 = bus0.index[bus0.index == bus0.index.max()] bus0 = ''.join(bus0.values) return bus0
def calc_nearest_point(bus1, network)
Function that finds the geographical nearest point in a network from a given bus. Parameters ----- bus1: float id of bus network: Pypsa network container network including the comparable buses Returns ------ bus0 : float bus_id of nearest point
1.983712
1.969637
1.007146
try: self._mapped[name] = getattr(self._pkg, self._prefix + name) except AttributeError: print('Warning: Relation %s does not exist.' % name)
def map_ormclass(self, name)
Populate _mapped attribute with orm class Parameters ---------- name : str Component part of orm class name. Concatenated with _prefix.
7.450035
5.081833
1.466013
try: ormclass = self._mapped['TempResolution'] if self.version: tr = self.session.query(ormclass).filter( ormclass.temp_id == self.temp_id).filter( ormclass.version == self.version).one() else: tr = self.session.query(ormclass).filter( ormclass.temp_id == self.temp_id).one() except (KeyError, NoResultFound): print('temp_id %s does not exist.' % self.temp_id) timeindex = pd.DatetimeIndex(start=tr.start_time, periods=tr.timesteps, freq=tr.resolution) self.timeindex = timeindex[self.start_snapshot - 1: self.end_snapshot]
def configure_timeindex(self)
Construct a DateTimeIndex with the queried temporal resolution, start- and end_snapshot.
3.908154
3.389949
1.152865
ormclass = self._mapped[name] query = self.session.query(ormclass) if name != carr_ormclass: query = query.filter( ormclass.scn_name == self.scn_name) if self.version: query = query.filter(ormclass.version == self.version) # TODO: Naming is not consistent. Change in database required. if name == 'Transformer': name = 'Trafo' df = pd.read_sql(query.statement, self.session.bind, index_col=name.lower() + '_id') if name == 'Link': df['bus0'] = df.bus0.astype(int) df['bus1'] = df.bus1.astype(int) if 'source' in df: df.source = df.source.map(self.id_to_source()) return df
def fetch_by_relname(self, name)
Construct DataFrame with component data from filtered table data. Parameters ---------- name : str Component name. Returns ------- pd.DataFrame Component data.
4.71945
4.831366
0.976836
ormclass = self._mapped[name] # TODO: This is implemented in a not very robust way. id_column = re.findall(r'[A-Z][^A-Z]*', name)[0] + '_' + 'id' id_column = id_column.lower() query = self.session.query( getattr(ormclass, id_column), getattr(ormclass, column)[self.start_snapshot: self.end_snapshot]. label(column)).filter(and_( ormclass.scn_name == self.scn_name, ormclass.temp_id == self.temp_id)) if self.version: query = query.filter(ormclass.version == self.version) df = pd.io.sql.read_sql(query.statement, self.session.bind, columns=[column], index_col=id_column) df.index = df.index.astype(str) # change of format to fit pypsa df = df[column].apply(pd.Series).transpose() try: assert not df.empty df.index = self.timeindex except AssertionError: print("No data for %s in column %s." % (name, column)) return df
def series_fetch_by_relname(self, name, column)
Construct DataFrame with component timeseries data from filtered table data. Parameters ---------- name : str Component name. column : str Component field with timevarying data. Returns ------- pd.DataFrame Component data.
4.227688
4.321485
0.978295
# TODO: build_network takes care of divergences in database design and # future PyPSA changes from PyPSA's v0.6 on. This concept should be # replaced, when the oedb has a revision system in place, because # sometime this will break!!! if network != None: network = network else: network = pypsa.Network() network.set_snapshots(self.timeindex) timevarying_override = False if pypsa.__version__ == '0.11.0': old_to_new_name = {'Generator': {'p_min_pu_fixed': 'p_min_pu', 'p_max_pu_fixed': 'p_max_pu', 'source': 'carrier', 'dispatch': 'former_dispatch'}, 'Bus': {'current_type': 'carrier'}, 'Transformer': {'trafo_id': 'transformer_id'}, 'Storage': {'p_min_pu_fixed': 'p_min_pu', 'p_max_pu_fixed': 'p_max_pu', 'soc_cyclic': 'cyclic_state_of_charge', 'soc_initial': 'state_of_charge_initial', 'source': 'carrier'}} timevarying_override = True else: old_to_new_name = {'Storage': {'soc_cyclic': 'cyclic_state_of_charge', 'soc_initial': 'state_of_charge_initial'}} for comp, comp_t_dict in self.config.items(): # TODO: This is confusing, should be fixed in db pypsa_comp_name = 'StorageUnit' if comp == 'Storage' else comp df = self.fetch_by_relname(comp) if comp in old_to_new_name: tmp = old_to_new_name[comp] df.rename(columns=tmp, inplace=True) network.import_components_from_dataframe(df, pypsa_comp_name) if comp_t_dict: for comp_t, columns in comp_t_dict.items(): for col in columns: df_series = self.series_fetch_by_relname(comp_t, col) # TODO: VMagPuSet is not implemented. if timevarying_override and comp == 'Generator' \ and not df_series.empty: idx = df[df.former_dispatch == 'flexible'].index idx = [i for i in idx if i in df_series.columns] df_series.drop(idx, axis=1, inplace=True) try: pypsa.io.import_series_from_dataframe( network, df_series, pypsa_comp_name, col) except (ValueError, AttributeError): print("Series %s of component %s could not be " "imported" % (col, pypsa_comp_name)) # populate carrier attribute in PyPSA network network.import_components_from_dataframe( self.fetch_by_relname(carr_ormclass), 'Carrier') self.network = network return network
def build_network(self, network=None, *args, **kwargs)
Core method to construct PyPSA Network object.
4.603702
4.466286
1.030767
logger.debug("Starting execution of {0}{1}".format(self, " (backwards)" if self.backward_execution else "")) self.setup_run() try: concurrency_history_item = self.setup_forward_or_backward_execution() concurrency_queue = self.start_child_states(concurrency_history_item) ####################################################### # wait for the first threads to finish ####################################################### finished_thread_id = concurrency_queue.get() finisher_state = self.states[finished_thread_id] finisher_state.join() # preempt all child states if not self.backward_execution: for state_id, state in self.states.items(): state.recursively_preempt_states() # join all states for history_index, state in enumerate(self.states.values()): self.join_state(state, history_index, concurrency_history_item) self.add_state_execution_output_to_scoped_data(state.output_data, state) self.update_scoped_variables_with_output_dictionary(state.output_data, state) # add the data of the first state now to overwrite data of the preempted states self.add_state_execution_output_to_scoped_data(finisher_state.output_data, finisher_state) self.update_scoped_variables_with_output_dictionary(finisher_state.output_data, finisher_state) ####################################################### # handle backward execution case ####################################################### if self.states[finished_thread_id].backward_execution: return self.finalize_backward_execution() else: self.backward_execution = False ####################################################### # handle no transition ####################################################### transition = self.get_transition_for_outcome(self.states[finished_thread_id], self.states[finished_thread_id].final_outcome) if transition is None: # final outcome is set here transition = self.handle_no_transition(self.states[finished_thread_id]) # it the transition is still None, then the state was preempted or aborted, in this case return if transition is None: self.output_data["error"] = RuntimeError("state aborted") else: if 'error' in self.states[finished_thread_id].output_data: self.output_data["error"] = self.states[finished_thread_id].output_data['error'] self.final_outcome = self.outcomes[transition.to_outcome] return self.finalize_concurrency_state(self.final_outcome) except Exception as e: logger.error("{0} had an internal error: {1}\n{2}".format(self, str(e), str(traceback.format_exc()))) self.output_data["error"] = e self.state_execution_status = StateExecutionStatus.WAIT_FOR_NEXT_STATE return self.finalize(Outcome(-1, "aborted"))
def run(self)
This defines the sequence of actions that are taken when the preemptive concurrency state is executed :return:
3.731566
3.680545
1.013862
valid, message = super(PreemptiveConcurrencyState, self)._check_transition_validity(check_transition) if not valid: return False, message # Only transitions to the parent state are allowed if check_transition.to_state != self.state_id: return False, "Only transitions to the parent state are allowed" return True, message
def _check_transition_validity(self, check_transition)
Transition of BarrierConcurrencyStates must least fulfill the condition of a ContainerState. Start transitions are forbidden in the ConcurrencyState :param check_transition: the transition to check for validity :return:
4.646904
4.052197
1.146761
try: self.relieve_model(self.state_machine_model) assert self.__buffered_root_state_model is self.state_machine_model.root_state self.relieve_model(self.__buffered_root_state_model) self.state_machine_model = None self.__buffered_root_state_model = None self.modifications.prepare_destruction() except KeyError: # Might happen if the observer was already unregistered pass if self.active_action: try: self.active_action.prepare_destruction() except Exception as e: logger.exception("The modification history has had left over an active-action and " "could not destroy it {0}.".format(e)) self.active_action = None
def prepare_destruction(self)
Prepares the model for destruction Un-registers itself as observer from the state machine and the root state
5.79204
5.121108
1.131013
# search for traceable path -> list of action to undo and list of action to redo logger.info("Going to history status #{0}".format(pointer_on_version_to_recover)) undo_redo_list = self.modifications.get_undo_redo_list_from_active_trail_history_item_to_version_id(pointer_on_version_to_recover) logger.debug("Multiple undo and redo to reach modification history element of version {0} " "-> undo-redo-list is: {1}".format(pointer_on_version_to_recover, undo_redo_list)) # logger.debug("acquire lock 1 - for multiple action {0}".format(self.modifications.trail_pointer)) self.state_machine_model.storage_lock.acquire() # logger.debug("acquired lock 1 - for multiple action {0}".format(self.modifications.trail_pointer)) for elem in undo_redo_list: if elem[1] == 'undo': # do undo self._undo(elem[0]) else: # do redo self._redo(elem[0]) self.modifications.reorganize_trail_history_for_version_id(pointer_on_version_to_recover) self.change_count += 1 # logger.debug("release lock 1 - for multiple action {0}".format(self.modifications.trail_pointer)) self.state_machine_model.storage_lock.release()
def recover_specific_version(self, pointer_on_version_to_recover)
Recovers a specific version of the all_time_history element by doing several undos and redos. :param pointer_on_version_to_recover: the id of the list element which is to recover :return:
3.948186
3.862692
1.022133
# logger.verbose("states_after: " + str(NotificationOverview(info, False, self.__class__.__name__))) if self.busy or info.method_name == 'state_change' and \ info.kwargs.prop_name == 'state' and \ info.kwargs.method_name in BY_EXECUTION_TRIGGERED_OBSERVABLE_STATE_METHODS: return else: # logger.debug("History states_AFTER") # \n%s \n%s \n%s" % (model, prop_name, info)) # avoid to vast computation time if 'kwargs' in info and 'method_name' in info['kwargs'] and \ info['kwargs']['method_name'] in BY_EXECUTION_TRIGGERED_OBSERVABLE_STATE_METHODS: return overview = NotificationOverview(info, self.with_verbose, self.__class__.__name__) # handle interrupts of action caused by exceptions if overview['result'][-1] == "CRASH in FUNCTION" or isinstance(overview['result'][-1], Exception): if self.count_before == 1: return self._interrupt_active_action(info) pass # modifications of parent are not observed if not overview['method_name'][0] == 'state_change' or overview['method_name'][-1] == 'parent': return # logger.debug("History states_AFTER {0}".format(overview)) # decrease counter and finish action if count_before = 0 if self.locked: self.after_count() if self.count_before == 0: self.finish_new_action(overview) if self.with_verbose: logger.verbose("HISTORY COUNT WAS OF SUCCESS") else: logger.error("HISTORY after not count [states] -> For every before there should be a after.")
def assign_notification_states_after(self, model, prop_name, info)
This method is called, when any state, transition, data flow, etc. within the state machine modifications. This then typically requires a redraw of the graphical editor, to display these modifications immediately. :param model: The state machine model :param prop_name: The property that was changed :param info: Information about the change
8.167478
8.238979
0.991322
all_trail_action = [a.version_id for a in self.single_trail_history() if a is not None] all_active_action = self.get_all_active_actions() undo_redo_list = [] _undo_redo_list = [] intermediate_version_id = version_id if self.with_verbose: logger.verbose("Version_id : {0} in".format(intermediate_version_id)) logger.verbose("Active actions: {0} in: {1}".format(all_active_action, intermediate_version_id in all_active_action)) logger.verbose("Trail actions : {0} in: {1}".format(all_trail_action, intermediate_version_id in all_trail_action)) if intermediate_version_id not in all_trail_action: # get undo to come from version_id to trail_action while intermediate_version_id not in all_trail_action: _undo_redo_list.insert(0, (intermediate_version_id, 'redo')) intermediate_version_id = self.all_time_history[intermediate_version_id].prev_id intermediate_goal_version_id = intermediate_version_id else: intermediate_goal_version_id = version_id intermediate_version_id = self.trail_history[self.trail_pointer].version_id if self.with_verbose: logger.verbose("Version_id : {0} {1}".format(intermediate_goal_version_id, intermediate_version_id)) logger.verbose("Active actions: {0} in: {1}".format(all_active_action, intermediate_version_id in all_active_action)) logger.verbose("Trail actions : {0} in: {1}".format(all_trail_action, intermediate_version_id in all_trail_action)) # collect undo and redo on trail if intermediate_goal_version_id in all_active_action: # collect needed undo to reach intermediate version while not intermediate_version_id == intermediate_goal_version_id: undo_redo_list.append((intermediate_version_id, 'undo')) intermediate_version_id = self.all_time_history[intermediate_version_id].prev_id elif intermediate_goal_version_id in all_trail_action: # collect needed redo to reach intermediate version while not intermediate_version_id == intermediate_goal_version_id: intermediate_version_id = self.all_time_history[intermediate_version_id].next_id undo_redo_list.append((intermediate_version_id, 'redo')) for elem in _undo_redo_list: undo_redo_list.append(elem) return undo_redo_list
def get_undo_redo_list_from_active_trail_history_item_to_version_id(self, version_id)
Perform fast search from currently active branch to specific version_id and collect all recovery steps.
2.268956
2.256384
1.005572
default_pos = constants.DEFAULT_PANE_POS[config_id] position = global_runtime_config.get_config_value(config_id, default_pos) pane_id = constants.PANE_ID[config_id] self.view[pane_id].set_position(position)
def set_pane_position(self, config_id)
Adjusts the position of a GTK Pane to a value stored in the runtime config file. If there was no value stored, the pane's position is set to a default value. :param config_id: The pane identifier saved in the runtime config file
4.236751
4.413372
0.95998
# TODO: find nice solution # this in only required if the GUI is terminated via Ctrl+C signal if not self.view: # this means that the main window is currently under destruction return execution_engine = rafcon.core.singleton.state_machine_execution_engine label_string = str(execution_engine.status.execution_mode) label_string = label_string.replace("STATE_MACHINE_EXECUTION_STATUS.", "") self.view['execution_status_label'].set_text(label_string) current_execution_mode = execution_engine.status.execution_mode if current_execution_mode is StateMachineExecutionStatus.STARTED: self.view['step_buttons'].hide() self._set_single_button_active('button_start_shortcut') elif current_execution_mode is StateMachineExecutionStatus.PAUSED: self.view['step_buttons'].hide() self._set_single_button_active('button_pause_shortcut') elif execution_engine.finished_or_stopped(): self.view['step_buttons'].hide() self._set_single_button_active('button_stop_shortcut') else: # all step modes self.view['step_buttons'].show() self._set_single_button_active('button_step_mode_shortcut')
def model_changed(self, model, prop_name, info)
Highlight buttons according actual execution status. Furthermore it triggers the label redraw of the active state machine.
4.560692
4.220871
1.08051
# TODO think about to may substitute Controller- by View-objects it is may the better design if controller not in self.get_child_controllers(): return # logger.info("focus controller {0}".format(controller)) if not self.modification_history_was_focused and isinstance(controller, ModificationHistoryTreeController) and \ self.view is not None: self.view.bring_tab_to_the_top('history') self.modification_history_was_focused = True if self.view is not None and isinstance(controller, ExecutionHistoryTreeController): self.view.bring_tab_to_the_top('execution_history') self.modification_history_was_focused = False
def focus_notebook_page_of_controller(self, controller)
Puts the focus on the given child controller The method implements focus request of the notebooks in left side-bar of the main window. Thereby it is the master-function of focus pattern of the notebooks in left side-bar. Actual pattern is: * Execution-History is put to focus any time requested (request occur at the moment when the state-machine is started and stopped. * Modification-History one time focused while and one time after execution if requested. :param controller The controller which request to be focused.
6.491298
5.421757
1.197268
undocked_window_name = window_key.lower() + '_window' widget_name = window_key.lower() undocked_window_view = getattr(self.view, undocked_window_name) undocked_window = undocked_window_view.get_top_widget() if os.getenv("RAFCON_START_MINIMIZED", False): undocked_window.iconify() gui_helper_label.set_window_size_and_position(undocked_window, window_key) self.view[widget_name].reparent(undocked_window_view['central_eventbox']) self.view['undock_{}_button'.format(widget_name)].hide() getattr(self, 'on_{}_hide_clicked'.format(widget_name))(None) self.view['{}_return_button'.format(widget_name)].hide() main_window = self.view.get_top_widget() state_handler = main_window.connect('window-state-event', self.undock_window_callback, undocked_window) self.handler_ids[undocked_window_name] = {"state": state_handler} undocked_window.set_transient_for(main_window) main_window.grab_focus() global_runtime_config.set_config_value(window_key + '_WINDOW_UNDOCKED', True)
def undock_sidebar(self, window_key, widget=None, event=None)
Undock/separate sidebar into independent window The sidebar is undocked and put into a separate new window. The sidebar is hidden in the main-window by triggering the method on_[widget_name]_hide_clicked(). Triggering this method shows the [widget_name]_return_button in the main-window, which does not serve any purpose when the bar is undocked. This button is therefore deliberately hidden. The undock button, which is also part of the sidebar is hidden, because the re-dock button is included in the top_tool_bar of the newly opened window. Not hiding it will result in two re-dock buttons visible in the new window. The new window size and position are loaded from runtime_config, if they exist.
4.197514
3.753184
1.118387
config_parameter_undocked = window_key + '_WINDOW_UNDOCKED' config_id_for_pane_position = window_key + '_DOCKED_POS' undocked_window_name = window_key.lower() + '_window' widget_name = window_key.lower() undocked_window_view = getattr(self.view, undocked_window_name) self.view['main_window'].disconnect(self.handler_ids[undocked_window_name]['state']) getattr(self, 'on_{}_return_clicked'.format(widget_name))(None) undocked_window_view['central_eventbox'].remove(self.view[widget_name]) self.view[sidebar_name].pack_start(self.view[widget_name], True, True, 0) self.get_controller(controller_name).hide_window() self.view['undock_{}_button'.format(widget_name)].show() # restore the position of the pane self.set_pane_position(config_id_for_pane_position) global_runtime_config.set_config_value(config_parameter_undocked, False) return True
def redock_sidebar(self, window_key, sidebar_name, controller_name, widget, event=None)
Redock/embed sidebar into main window The size & position of the open window are saved to the runtime_config file, the sidebar is redocked back to the main-window, and the left-bar window is hidden. The undock button of the bar is made visible again.
4.819772
4.671369
1.031769
title = gui_helper_label.set_notebook_title(notebook, page_num, title_label) window.reset_title(title, notebook_identifier) self.on_switch_page_check_collapse_button(notebook, page_num)
def on_notebook_tab_switch(self, notebook, page, page_num, title_label, window, notebook_identifier)
Triggered whenever a left-bar notebook tab is changed. Updates the title of the corresponding notebook and updates the title of the left-bar window in case un-docked. :param notebook: The GTK notebook where a tab-change occurred :param page_num: The page number of the currently-selected tab :param title_label: The label holding the notebook's title :param window: The left-bar window, for which the title should be changed :param notebook_identifier: A string identifying whether the notebook is the upper or the lower one
7.939572
8.668283
0.915934
self.currently_pressed_keys.add(event.keyval) if event.keyval in [Gdk.KEY_Tab, Gdk.KEY_ISO_Left_Tab] and event.state & Gdk.ModifierType.CONTROL_MASK: self.toggle_sidebars()
def _on_key_press(self, widget, event)
Updates the currently pressed keys In addition, the sidebars are toggled if <Ctrl><Tab> is pressed. :param Gtk.Widget widget: The main window :param Gdk.Event event: The key press event
3.277093
2.604164
1.258405
plugins.run_hook("pre_destruction") logger.debug("Saving runtime config to {0}".format(global_runtime_config.config_file_path)) # store pane last positions for key, widget_name in constants.PANE_ID.items(): global_runtime_config.store_widget_properties(self.view[widget_name], key.replace('_POS', '')) # store hidden or undocked widget flags correctly -> make them independent for restoring for window_key in constants.UNDOCKABLE_WINDOW_KEYS: hidden = False if not global_runtime_config.get_config_value(window_key + "_WINDOW_UNDOCKED"): hidden = getattr(self, window_key.lower() + '_hidden') global_runtime_config.set_config_value(window_key + '_HIDDEN', hidden) global_runtime_config.save_configuration() # state-editor will relieve it's model => it won't observe the state machine manager any more self.get_controller('states_editor_ctrl').prepare_destruction() # avoid new state editor TODO tbd (deleted) rafcon.core.singleton.state_machine_manager.delete_all_state_machines() rafcon.core.singleton.library_manager.prepare_destruction() # gtkmvc installs a global glade custom handler that holds a reference to the last created View class, # preventing it from being destructed. By installing a dummy callback handler, after all views have been # created, the old handler is being removed and with it the reference, allowing all Views to be destructed. # Gtk TODO: check if necessary and search for replacement # try: # from gtk import glade # def dummy(*args, **kwargs): # pass # glade.set_custom_handler(dummy) # except ImportError: # pass # Recursively destroys the main window self.destroy() from rafcon.gui.clipboard import global_clipboard global_clipboard.destroy() gui_singletons.main_window_controller = None
def prepare_destruction(self)
Saves current configuration of windows and panes to the runtime config file, before RAFCON is closed.
9.521328
8.798313
1.082177
if self.backward_execution: # pop the return item of this concurrency state to get the correct scoped data last_history_item = self.execution_history.pop_last_item() assert isinstance(last_history_item, ReturnItem) self.scoped_data = last_history_item.scoped_data # get the concurrency item for the children execution historys concurrency_history_item = self.execution_history.get_last_history_item() assert isinstance(concurrency_history_item, ConcurrencyItem) else: # forward_execution self.execution_history.push_call_history_item(self, CallType.CONTAINER, self, self.input_data) concurrency_history_item = self.execution_history.push_concurrency_history_item(self, len(self.states)) return concurrency_history_item
def setup_forward_or_backward_execution(self)
Sets up the execution of the concurrency states dependent on if the state is executed forward of backward. :return:
5.151146
5.024583
1.025189
self.state_execution_status = StateExecutionStatus.EXECUTE_CHILDREN # actually the queue is not needed in the barrier concurrency case # to avoid code duplication both concurrency states have the same start child function concurrency_queue = queue.Queue(maxsize=0) # infinite Queue size for index, state in enumerate(self.states.values()): if state is not do_not_start_state: state.input_data = self.get_inputs_for_state(state) state.output_data = self.create_output_dictionary_for_state(state) state.concurrency_queue = concurrency_queue state.concurrency_queue_id = index state.generate_run_id() if not self.backward_execution: # care for the history items; this item is only for execution visualization concurrency_history_item.execution_histories[index].push_call_history_item( state, CallType.EXECUTE, self, state.input_data) else: # backward execution last_history_item = concurrency_history_item.execution_histories[index].pop_last_item() assert isinstance(last_history_item, ReturnItem) state.start(concurrency_history_item.execution_histories[index], self.backward_execution, False) return concurrency_queue
def start_child_states(self, concurrency_history_item, do_not_start_state=None)
Utility function to start all child states of the concurrency state. :param concurrency_history_item: each concurrent child branch gets an execution history stack of this concurrency history item :param do_not_start_state: optionally the id of a state can be passed, that must not be started (e.g. in the case of the barrier concurrency state the decider state) :return:
6.080011
5.879165
1.034162
state.join() if state.backward_execution: self.backward_execution = True state.state_execution_status = StateExecutionStatus.INACTIVE # care for the history items if not self.backward_execution: state.concurrency_queue = None # add the data of all child states to the scoped data and the scoped variables state.execution_history.push_return_history_item(state, CallType.EXECUTE, self, state.output_data) else: last_history_item = concurrency_history_item.execution_histories[history_index].pop_last_item() assert isinstance(last_history_item, CallItem)
def join_state(self, state, history_index, concurrency_history_item)
a utility function to join a state :param state: the state to join :param history_index: the index of the execution history stack in the concurrency history item for the given state :param concurrency_history_item: the concurrency history item that stores the execution history stacks of all children :return:
9.835636
8.660387
1.135704
# backward_execution needs to be True to signal the parent container state the backward execution self.backward_execution = True # pop the ConcurrencyItem as we are leaving the barrier concurrency state last_history_item = self.execution_history.pop_last_item() assert isinstance(last_history_item, ConcurrencyItem) last_history_item = self.execution_history.pop_last_item() assert isinstance(last_history_item, CallItem) # this copy is convenience and not required here self.scoped_data = last_history_item.scoped_data self.state_execution_status = StateExecutionStatus.WAIT_FOR_NEXT_STATE return self.finalize()
def finalize_backward_execution(self)
Utility function to finalize the backward execution of the concurrency state. :return:
7.945756
7.530684
1.055118
final_outcome = outcome self.write_output_data() self.check_output_data_type() self.execution_history.push_return_history_item(self, CallType.CONTAINER, self, self.output_data) self.state_execution_status = StateExecutionStatus.WAIT_FOR_NEXT_STATE singleton.state_machine_execution_engine._modify_run_to_states(self) if self.preempted: final_outcome = Outcome(-2, "preempted") return self.finalize(final_outcome)
def finalize_concurrency_state(self, outcome)
Utility function to finalize the forward execution of the concurrency state. :param outcome: :return:
9.692798
9.775061
0.991584
if not port: return 0. parent_state_v = self.get_parent_state_v() if parent_state_v is port.parent: # port of connection's parent state return port.port_size[1] return max(port.port_size[1] * 1.5, self._calc_line_width() / 1.3)
def _head_length(self, port)
Distance from the center of the port to the perpendicular waypoint
7.158689
6.750638
1.060446
super(DataPortListController, self).register_view(view) view['name_col'].add_attribute(view['name_text'], 'text', self.NAME_STORAGE_ID) view['data_type_col'].add_attribute(view['data_type_text'], 'text', self.DATA_TYPE_NAME_STORAGE_ID) if not isinstance(self.model.state, LibraryState) and self.model.state.get_next_upper_library_root_state() is None: view['name_text'].set_property("editable", True) view['data_type_text'].set_property("editable", True) # in the linkage overview the the default value is not shown if isinstance(view, InputPortsListView) or isinstance(view, OutputPortsListView): view['default_value_col'].add_attribute(view['default_value_text'], 'text', self.DEFAULT_VALUE_STORAGE_ID) self._apply_value_on_edited_and_focus_out(view['default_value_text'], self._apply_new_data_port_default_value) view['default_value_col'].set_cell_data_func(view['default_value_text'], self._default_value_cell_data_func) if isinstance(self.model.state, LibraryState): view['default_value_col'].set_title("Used value") if self.model.state.get_next_upper_library_root_state() is None: # never enabled means it is disabled view['default_value_text'].set_property("editable", True) self._apply_value_on_edited_and_focus_out(view['name_text'], self._apply_new_data_port_name) self._apply_value_on_edited_and_focus_out(view['data_type_text'], self._apply_new_data_port_type) if isinstance(self.model.state, LibraryState): view['use_runtime_value_toggle'] = Gtk.CellRendererToggle() view['use_runtime_value_col'] = Gtk.TreeViewColumn("Use Runtime Value") view['use_runtime_value_col'].set_property("sizing", Gtk.TreeViewColumnSizing.AUTOSIZE) view.get_top_widget().append_column(view['use_runtime_value_col']) view['use_runtime_value_col'].pack_start(view['use_runtime_value_toggle'], True) view['use_runtime_value_col'].add_attribute(view['use_runtime_value_toggle'], 'active', self.USE_RUNTIME_VALUE_STORAGE_ID) if self.model.state.get_next_upper_library_root_state() is None: view['use_runtime_value_toggle'].set_property("activatable", True) view['use_runtime_value_toggle'].connect("toggled", self.on_use_runtime_value_toggled) self._reload_data_port_list_store()
def register_view(self, view)
Called when the View was registered
2.553553
2.558073
0.998233
try: data_port_id = self.list_store[path][self.ID_STORAGE_ID] self.toggle_runtime_value_usage(data_port_id) except TypeError as e: logger.exception("Error while trying to change the use_runtime_value flag")
def on_use_runtime_value_toggled(self, widget, path)
Try to set the use runtime value flag to the newly entered one
6.598111
5.816149
1.134447
if isinstance(self.model.state, LibraryState): use_runtime_value = model.get_value(iter, self.USE_RUNTIME_VALUE_STORAGE_ID) if use_runtime_value: cell.set_property("editable", True) cell.set_property('text', model.get_value(iter, self.RUNTIME_VALUE_STORAGE_ID)) cell.set_property('foreground', "white") else: cell.set_property("editable", False) cell.set_property('text', model.get_value(iter, self.DEFAULT_VALUE_STORAGE_ID)) cell.set_property('foreground', "dark grey") return
def _default_value_cell_data_func(self, tree_view_column, cell, model, iter, data=None)
Function set renderer properties for every single cell independently The function controls the editable and color scheme for every cell in the default value column according the use_runtime_value flag and whether the state is a library state. :param tree_view_column: the Gtk.TreeViewColumn to be rendered :param cell: the current CellRenderer :param model: the Gtk.ListStore or TreeStore that is the model for TreeView :param iter: an iterator over the rows of the TreeStore/Gtk.ListStore Model :param data: optional data to be passed: see http://dumbmatter.com/2012/02/some-notes-on-porting-from-pygtk-to-pygobject/
2.597966
2.28954
1.134711
tmp = self._get_new_list_store() for data_port_m in self.data_port_model_list: data_port_id = data_port_m.data_port.data_port_id data_type = data_port_m.data_port.data_type # get name of type (e.g. ndarray) data_type_name = data_type.__name__ # get module of type, e.g. numpy data_type_module = data_type.__module__ # if the type is not a builtin type, also show the module if data_type_module not in ['__builtin__', 'builtins']: data_type_name = data_type_module + '.' + data_type_name if data_port_m.data_port.default_value is None: default_value = "None" else: default_value = data_port_m.data_port.default_value if not isinstance(self.model.state, LibraryState): tmp.append([data_port_m.data_port.name, data_type_name, str(default_value), data_port_id, None, None, data_port_m]) else: use_runtime_value, runtime_value = self.get_data_port_runtime_configuration(data_port_id) tmp.append([data_port_m.data_port.name, data_type_name, str(default_value), data_port_id, bool(use_runtime_value), str(runtime_value), data_port_m, ]) tms = Gtk.TreeModelSort(model=tmp) tms.set_sort_column_id(0, Gtk.SortType.ASCENDING) tms.set_sort_func(0, compare_variables) tms.sort_column_changed() tmp = tms self.list_store.clear() for elem in tmp: self.list_store.append(elem[:])
def _reload_data_port_list_store(self)
Reloads the input data port list store from the data port models
2.700651
2.630518
1.026661
try: data_port_id = self.list_store[path][self.ID_STORAGE_ID] if self.state_data_port_dict[data_port_id].name != new_name: self.state_data_port_dict[data_port_id].name = new_name except (TypeError, ValueError) as e: logger.exception("Error while trying to change data port name")
def _apply_new_data_port_name(self, path, new_name)
Applies the new name of the data port defined by path :param str path: The path identifying the edited data port :param str new_name: New name
3.629204
4.033229
0.899826
try: data_port_id = self.list_store[path][self.ID_STORAGE_ID] if self.state_data_port_dict[data_port_id].data_type.__name__ != new_data_type_str: self.state_data_port_dict[data_port_id].change_data_type(new_data_type_str) except ValueError as e: logger.exception("Error while changing data type")
def _apply_new_data_port_type(self, path, new_data_type_str)
Applies the new data type of the data port defined by path :param str path: The path identifying the edited data port :param str new_data_type_str: New data type as str
3.366891
3.416393
0.98551
try: data_port_id = self.list_store[path][self.ID_STORAGE_ID] if isinstance(self.model.state, LibraryState): # this always has to be true, as the runtime value column can only be edited # if the use_runtime_value flag is True if self.list_store[path][self.USE_RUNTIME_VALUE_STORAGE_ID]: self.set_data_port_runtime_value(data_port_id, new_default_value_str) else: if str(self.state_data_port_dict[data_port_id].default_value) != new_default_value_str: self.state_data_port_dict[data_port_id].default_value = new_default_value_str except (TypeError, AttributeError) as e: logger.exception("Error while changing default value")
def _apply_new_data_port_default_value(self, path, new_default_value_str)
Applies the new default value of the data port defined by path :param str path: The path identifying the edited variable :param str new_default_value_str: New default value as string
3.706137
3.78034
0.980371
if not isinstance(model, AbstractStateModel): return # store port selection path_list = None if self.view is not None: model, path_list = self.tree_view.get_selection().get_selected_rows() selected_data_port_ids = [self.list_store[path[0]][self.ID_STORAGE_ID] for path in path_list] if path_list else [] self._reload_data_port_list_store() # recover port selection if selected_data_port_ids: [self.select_entry(selected_data_port_id, False) for selected_data_port_id in selected_data_port_ids]
def _data_ports_changed(self, model)
Reload list store and reminds selection when the model was changed
4.241735
3.730609
1.137009
if ("_input_runtime_value" in info.method_name or info.method_name in ['use_runtime_value_input_data_ports', 'input_data_port_runtime_values']) and \ self.model is model: self._data_ports_changed(model)
def runtime_values_changed(self, model, prop_name, info)
Handle cases for the library runtime values
8.809652
8.717129
1.010614
try: new_data_port_ids = gui_helper_state_machine.add_data_port_to_selected_states('OUTPUT', int, [self.model]) if new_data_port_ids: self.select_entry(new_data_port_ids[self.model.state]) except ValueError: pass
def add_new_data_port(self)
Add a new port with default values and select it
7.509953
7.251656
1.035619
if hasattr(callback, '__call__'): # Is the callback really a function? if action not in self.__action_to_callbacks: self.__action_to_callbacks[action] = [] self.__action_to_callbacks[action].append(callback) controller = None try: controller = callback.__self__ except AttributeError: try: # Needed when callback was wrapped using functools.partial controller = callback.func.__self__ except AttributeError: pass if controller: if controller not in self.__controller_action_callbacks: self.__controller_action_callbacks[controller] = {} if action not in self.__controller_action_callbacks[controller]: self.__controller_action_callbacks[controller][action] = [] self.__controller_action_callbacks[controller][action].append(callback) return True
def add_callback_for_action(self, action, callback)
Adds a callback function to an action The method checks whether both action and callback are valid. If so, the callback is added to the list of functions called when the action is triggered. :param str action: An action like 'add', 'copy', 'info' :param callback: A callback function, which is called when action is triggered. It retrieves the event as parameter :return: True is the parameters are valid and the callback is registered, False else :rtype: bool
2.22535
2.266219
0.981966
if action in self.__action_to_callbacks: if callback in self.__action_to_callbacks[action]: self.__action_to_callbacks[action].remove(callback)
def remove_callback_for_action(self, action, callback)
Remove a callback for a specific action This is mainly for cleanup purposes or a plugin that replaces a GUI widget. :param str action: the cation of which the callback is going to be remove :param callback: the callback to be removed
2.19158
2.856265
0.767289
res = False if action in self.__action_to_callbacks: for callback_function in self.__action_to_callbacks[action]: try: ret = callback_function(key_value, modifier_mask) # If at least one controller returns True, the whole result becomes True res |= (False if ret is None else ret) except Exception as e: logger.exception('Exception while calling callback methods for action "{0}": {1}'.format(action, e)) return res
def trigger_action(self, action, key_value, modifier_mask)
Calls the appropriate callback function(s) for the given action :param str action: The name of the action that was triggered :param key_value: The key value of the shortcut that caused the trigger :param modifier_mask: The modifier mask of the shortcut that caused the trigger :return: Whether a callback was triggered :rtype: bool
4.096416
3.91554
1.046194
# TODO that could need a second clean up # avoid updates because of state destruction if 'before' in info and info['method_name'] == "remove_state": if info.instance is self.model.state: self.no_update_state_destruction = True else: # if the state it self is removed lock the widget to never run updates and relieve all models removed_state_id = info.args[1] if len(info.args) > 1 else info.kwargs['state_id'] if removed_state_id == self.model.state.state_id or \ not self.model.state.is_root_state and removed_state_id == self.model.parent.state.state_id: self.no_update_self_or_parent_state_destruction = True self.relieve_all_models() elif 'after' in info and info['method_name'] == "remove_state": if info.instance.state_id == self.model.state.state_id: self.no_update_state_destruction = False # reduce NotificationOverview generations by the fact that after could cause False and before could cause True if not self.no_update_state_destruction and not self.no_update_self_or_parent_state_destruction and \ (not self.no_update and 'before' in info or 'after' in info and self.no_update): return overview = NotificationOverview(info, False, self.__class__.__name__) # The method causing the change raised an exception, thus nothing was changed and updates are allowed if 'after' in info and isinstance(overview['result'][-1], Exception): self.no_update = False self.no_update_state_destruction = False # self.no_update_self_or_parent_state_destruction = False return if overview['method_name'][-1] in ['group_states', 'ungroup_state', "change_state_type", "change_root_state_type"]: instance_is_self = self.model.state is overview['instance'][-1] instance_is_parent = self.model.parent and self.model.parent.state is overview['instance'][-1] instance_is_parent_parent = self.model.parent and self.model.parent.parent and self.model.parent.parent.state is overview['instance'][-1] # print("no update flag: ", True if 'before' in info and (instance_is_self or instance_is_parent or instance_is_parent_parent) else False) if instance_is_self or instance_is_parent or instance_is_parent_parent: self.no_update = True if 'before' in info else False if overview['prop_name'][-1] == 'state' and overview['method_name'][-1] in ["change_state_type"] and \ self.model.get_state_machine_m() is not None: changed_model = self.model.get_state_machine_m().get_state_model_by_path(overview['args'][-1][1].get_path()) if changed_model not in self._model_observed: self.observe_model(changed_model)
def check_info_on_no_update_flags(self, info)
Stop updates while multi-actions
4.024997
4.000334
1.006165
if is_execution_status_update_notification_from_state_machine_model(prop_name, info): return # do not update while multi-actions self.check_info_on_no_update_flags(info)
def before_notification_state_machine_observation_control(self, model, prop_name, info)
Check for multi-actions and set respective no update flags.
14.933337
9.465046
1.577735
if not isinstance(parameters, dict): raise TypeError("parameters must be a dict") hash = self._parameter_hash(parameters) if name not in self._cache: self._cache[name] = {} self._cache[name][hash.hexdigest()] = value
def store_value(self, name, value, parameters=None)
Stores the value of a certain variable The value of a variable with name 'name' is stored together with the parameters that were used for the calculation. :param str name: The name of the variable :param value: The value to be cached :param dict parameters: The parameters on which the value depends
3.25289
3.659101
0.888986
if not isinstance(parameters, dict): raise TypeError("parameters must a dict") if name not in self._cache: return None hash = self._parameter_hash(parameters) hashdigest = hash.hexdigest() return self._cache[name].get(hashdigest, None)
def get_value(self, name, parameters=None)
Return the value of a cached variable if applicable The value of the variable 'name' is returned, if no parameters are passed or if all parameters are identical to the ones stored for the variable. :param str name: Name of teh variable :param dict parameters: Current parameters or None if parameters do not matter :return: The cached value of the variable or None if the parameters differ
4.326558
4.395818
0.984244
for key, value in parameters.items(): if isinstance(value, (int, float)): parameters[key] = str(Decimal(value).normalize(self._context))
def _normalize_number_values(self, parameters)
Assures equal precision for all number values
3.628039
2.937323
1.235151
super(AbstractTreeViewController, self).register_view(view) self.signal_handlers.append((self._tree_selection, self._tree_selection.connect('changed', self.selection_changed))) # self.handler_ids.append((self.tree_view, # self.tree_view.connect('key-press-event', self.tree_view_keypress_callback))) self.tree_view.connect('key-release-event', self.on_key_release_event) self.tree_view.connect('button-release-event', self.tree_view_keypress_callback) # key press is needed for tab motion but needs to be registered already here TODO why? self.tree_view.connect('key-press-event', self.tree_view_keypress_callback) self._tree_selection.set_mode(Gtk.SelectionMode.MULTIPLE) self.update_selection_sm_prior()
def register_view(self, view)
Register callbacks for button press events and selection changed
3.990283
3.869623
1.031181
if not self.MODEL_STORAGE_ID: return None, None # avoid selection requests on empty tree views -> case warnings in gtk3 if len(self.store) == 0: paths = [] else: model, paths = self._tree_selection.get_selected_rows() # get all related models for selection from respective tree store field selected_model_list = [] for path in paths: model = self.store[path][self.MODEL_STORAGE_ID] selected_model_list.append(model) return self._tree_selection, selected_model_list
def get_view_selection(self)
Get actual tree selection object and all respective models of selected rows
6.697413
5.595521
1.196924
sm_selection, sm_filtered_selected_model_set = self.get_state_machine_selection() tree_selection, selected_model_list = self.get_view_selection() return tree_selection, selected_model_list, sm_selection, sm_filtered_selected_model_set
def get_selections(self)
Get current model selection status in state machine selection (filtered according the purpose of the widget) and tree selection of the widget
5.119807
3.490244
1.466891
if self.CORE_ELEMENT_CLASS in signal_msg.arg.affected_core_element_classes: self.update_selection_sm_prior()
def state_machine_selection_changed(self, state_machine_m, signal_name, signal_msg)
Notify tree view about state machine selection
22.633049
23.24556
0.97365
if react_to_event(self.view, self.tree_view, event) and self.active_entry_widget is None: self.on_add(None) return True
def add_action_callback(self, *event)
Callback method for add action
9.393428
9.904023
0.948446
if react_to_event(self.view, self.tree_view, event) and \ not (self.active_entry_widget and not is_event_of_key_string(event, 'Delete')): self.on_remove(None) return True
def remove_action_callback(self, *event)
Callback method for remove action The method checks whether a shortcut ('Delete') is in the gui config model which shadow the delete functionality of maybe active a entry widget. If a entry widget is active the remove callback return with None.
10.405983
9.180543
1.133482
if react_to_event(self.view, self.tree_view, event) and self.active_entry_widget is None: sm_selection, sm_selected_model_list = self.get_state_machine_selection() # only list specific elements are copied by widget if sm_selection is not None: sm_selection.set(sm_selected_model_list) global_clipboard.copy(sm_selection) return True
def copy_action_callback(self, *event)
Callback method for copy action
9.05398
9.150001
0.989506
if react_to_event(self.view, self.tree_view, event) and self.active_entry_widget is None: sm_selection, sm_selected_model_list = self.get_state_machine_selection() # only list specific elements are cut by widget if sm_selection is not None: sm_selection.set(sm_selected_model_list) global_clipboard.cut(sm_selection) return True
def cut_action_callback(self, *event)
Callback method for copy action
9.316975
9.528921
0.977758
current_row_path, current_focused_column = self.tree_view.get_cursor() # print(current_row_path, current_focused_column) if isinstance(widget, Gtk.TreeView) and not self.active_entry_widget: # avoid jumps for active entry widget pass # cursor motion/selection changes (e.g. also by button release event) # if current_row_path is not None and len(current_row_path) == 1 and isinstance(current_row_path[0], int): # self.tree_view.scroll_to_cell(current_row_path[0], current_focused_column, use_align=True) # # else: # # self._logger.debug("A ListViewController aspects a current_row_path of dimension 1 with integer but" # # " it is {0} and column is {1}".format(current_row_path, current_focused_column)) elif isinstance(widget, Gtk.Entry) and self.view.scrollbar_widget is not None: # calculate the position of the scrollbar to be always centered with the entry widget cursor # TODO check how to get sufficient the scroll-offset in the entry widget -> some times zero when not # TODO the scrollbar is one step behind cursor -> so jump from pos1 to end works not perfect entry_widget_scroll_offset, entry_widget_cursor_position, entry_widget_text_length = \ widget.get_properties(*["scroll-offset", "cursor-position", "text-length"]) cell_rect_of_entry_widget = widget.get_allocation() horizontal_scroll_bar = self.view.scrollbar_widget.get_hscrollbar() # entry_widget_text_length must be greater than zero otherwise DevisionByZero Exception if horizontal_scroll_bar is not None and float(entry_widget_text_length) > 0: adjustment = horizontal_scroll_bar.get_adjustment() layout_pixel_width = widget.get_layout().get_pixel_size()[0] # print("rel_pos pices", cell_rect_of_entry_widget.x,) # int(layout_pixel_width*float(entry_widget_cursor_position)/float(entry_widget_text_length)) rel_pos = cell_rect_of_entry_widget.x - entry_widget_scroll_offset + \ int(layout_pixel_width*float(entry_widget_cursor_position)/float(entry_widget_text_length)) # optimize rel_pos for better user support bounds = widget.get_selection_bounds() if bounds and bounds[1] - bounds[0] == len(widget.get_text()): # if text is fully selected stay in front as far as possible rel_pos = cell_rect_of_entry_widget.x if self._horizontal_scrollbar_stay_in_front_if_possible(): return True else: # try to stay long at the beginning of the columns if the columns fully fit in rel_space = adjustment.get_page_size() - cell_rect_of_entry_widget.x if cell_rect_of_entry_widget.x + widget.get_layout().get_pixel_size()[0] < \ adjustment.get_page_size(): rel_pos = 0. elif rel_space and rel_pos <= rel_space: # accelerate the showing of the first columns rel_pos = rel_pos + rel_pos*3.*(rel_pos - rel_space)/adjustment.get_page_size() rel_pos = 0. if rel_pos <= 0 else rel_pos else: # and jump to the end of the scroller space if close to the upper limit rel_pos = adjustment.get_upper() if rel_pos + 2*entry_widget_scroll_offset > adjustment.get_upper() else rel_pos self._put_horizontal_scrollbar_onto_rel_pos(rel_pos)
def tree_view_keypress_callback(self, widget, event)
General method to adapt widget view and controller behavior according the key press/release and button release events Here the scrollbar motion to follow key cursor motions in editable is already in. :param Gtk.TreeView widget: The tree view the controller use :param Gdk.Event event: The key press event :return:
5.102525
5.069104
1.006593
super(ListViewController, self).register_view(view) self.tree_view.connect('button_press_event', self.mouse_click)
def register_view(self, view)
Register callbacks for button press events and selection changed
5.843558
4.996548
1.169519
path_list = None if self.view is not None: model, path_list = self.tree_view.get_selection().get_selected_rows() old_path = self.get_path() models = [self.list_store[path][self.MODEL_STORAGE_ID] for path in path_list] if path_list else [] if models: try: self.remove_core_elements(models) except AttributeError as e: self._logger.warning("The respective core element of {1}.list_store couldn't be removed. -> {0}" "".format(e, self.__class__.__name__)) if len(self.list_store) > 0: self.tree_view.set_cursor(min(old_path[0], len(self.list_store) - 1)) return True else: self._logger.warning("Please select an element to be removed.")
def on_remove(self, widget, data=None)
Removes respective selected core elements and select the next one
4.303499
3.889465
1.10645
sm_selection = self.model.get_state_machine_m().selection if self.model.get_state_machine_m() else None return sm_selection, sm_selection.get_selected_elements_of_core_class(self.CORE_ELEMENT_CLASS) if sm_selection else set()
def get_state_machine_selection(self)
An abstract getter method for state machine selection The method maybe has to be re-implemented by inherit classes and hands generally a filtered set of selected elements. :return: selection object it self, filtered set of selected elements :rtype: rafcon.gui.selection.Selection, set
6.636998
5.440664
1.219887
if event.get_button()[1] == 1: # Left mouse button path_info = self.tree_view.get_path_at_pos(int(event.x), int(event.y)) if path_info: # Valid entry was clicked on path = path_info[0] iter = self.list_store.get_iter(path) model = self.list_store.get_value(iter, self.MODEL_STORAGE_ID) selection = self.model.get_state_machine_m().selection selection.focus = model
def _handle_double_click(self, event)
Double click with left mouse button focuses the element
4.889108
4.648222
1.051823
if self._do_selection_update: return self._do_selection_update = True tree_selection, selected_model_list, sm_selection, sm_selected_model_list = self.get_selections() if tree_selection is not None: for path, row in enumerate(self.list_store): model = row[self.MODEL_STORAGE_ID] if model not in sm_selected_model_list and model in selected_model_list: tree_selection.unselect_path(Gtk.TreePath.new_from_indices([path])) if model in sm_selected_model_list and model not in selected_model_list: tree_selection.select_path(Gtk.TreePath.new_from_indices([path])) self._do_selection_update = False
def update_selection_sm_prior(self)
State machine prior update of tree selection
2.667987
2.614482
1.020465
if self._do_selection_update: return self._do_selection_update = True tree_selection, selected_model_list, sm_selection, sm_selected_model_list = self.get_selections() if isinstance(sm_selection, Selection): sm_selection.handle_prepared_selection_of_core_class_elements(self.CORE_ELEMENT_CLASS, selected_model_list) self._do_selection_update = False
def update_selection_self_prior(self)
Tree view prior update of state machine selection
5.941355
5.644749
1.052545
for row_num, element_row in enumerate(self.list_store): # Compare data port ids if element_row[self.ID_STORAGE_ID] == core_element_id: if by_cursor: self.tree_view.set_cursor(row_num) else: self.tree_view.get_selection().select_path((row_num, )) break
def select_entry(self, core_element_id, by_cursor=True)
Selects the row entry belonging to the given core_element_id by cursor or tree selection
4.24907
4.142933
1.025619
for row_num, element_row in enumerate(self.list_store): # Compare data port ids if element_row[self.ID_STORAGE_ID] == core_element_id: return row_num,
def get_path_for_core_element(self, core_element_id)
Get path to the row representing core element described by handed core_element_id :param core_element_id: Core element identifier used in the respective list store column :rtype: tuple :return: path
8.966764
8.033739
1.116138
def iter_all_children(row_iter, function, function_args): if isinstance(row_iter, Gtk.TreeIter): function(row_iter, *function_args) for n in reversed(range(self.tree_store.iter_n_children(row_iter))): child_iter = self.tree_store.iter_nth_child(row_iter, n) iter_all_children(child_iter, function, function_args) else: self._logger.warning("Iter has to be TreeIter -> handed argument is: {0}".format(row_iter)) # iter on root level of tree next_iter = self.tree_store.get_iter_first() while next_iter: iter_all_children(next_iter, function, function_args) next_iter = self.tree_store.iter_next(next_iter)
def iter_tree_with_handed_function(self, function, *function_args)
Iterate tree view with condition check function
2.725703
2.692082
1.012489
selected_path = self.tree_store.get_path(state_row_iter) tree_model_row = self.tree_store[selected_path] model = tree_model_row[self.MODEL_STORAGE_ID] if model not in sm_selected_model_list and model in selected_model_list: self._tree_selection.unselect_iter(state_row_iter) elif model in sm_selected_model_list and model not in selected_model_list: self.tree_view.expand_to_path(selected_path) self._tree_selection.select_iter(state_row_iter)
def update_selection_sm_prior_condition(self, state_row_iter, selected_model_list, sm_selected_model_list)
State machine prior update of tree selection for one tree model row
2.550776
2.325239
1.096995
selected_path = self.tree_store.get_path(state_row_iter) tree_model_row = self.tree_store[selected_path] model = tree_model_row[self.MODEL_STORAGE_ID] if model in sm_selected_model_set and model not in selected_model_list: sm_selected_model_set.remove(model) elif model not in sm_selected_model_set and model in selected_model_list: sm_selected_model_set.add(model)
def update_selection_self_prior_condition(self, state_row_iter, sm_selected_model_set, selected_model_list)
Tree view prior update of one model in the state machine selection
2.343041
2.211264
1.059593
if self._do_selection_update: return self._do_selection_update = True tree_selection, selected_model_list, sm_selection, sm_selected_model_set = self.get_selections() if isinstance(sm_selection, Selection): # current sm_selected_model_set will be updated and hand it back self.iter_tree_with_handed_function(self.update_selection_self_prior_condition, sm_selected_model_set, selected_model_list) sm_selection.handle_prepared_selection_of_core_class_elements(self.CORE_ELEMENT_CLASS, sm_selected_model_set) # TODO check if we can solve the difference that occurs e.g. while complex actions?, or same state paths! # -> models in selection for core element not in the tree the function iter tree + condition tolerates this if not set(selected_model_list) == sm_selected_model_set: self._logger.verbose("Difference between tree view selection: \n{0} \nand state machine selection: " "\n{1}".format(set(selected_model_list), sm_selected_model_set)) # TODO check why sometimes not consistent with sm selection. e.g while modification history test if self.check_selection_consistency(sm_check=False): self.update_selection_sm_prior() self._do_selection_update = False
def update_selection_self_prior(self)
Tree view prior update of state machine selection
10.39471
9.976997
1.041868
if self._do_selection_update: return self._do_selection_update = True tree_selection, selected_model_list, sm_selection, sm_selected_model_list = self.get_selections() if tree_selection is not None: # self._logger.info("SM SELECTION IS: {2}\n{0}, \n{1}".format(selected_model_list, sm_selected_model_list, # tree_selection.get_mode())) self.iter_tree_with_handed_function(self.update_selection_sm_prior_condition, selected_model_list, sm_selected_model_list) self.check_selection_consistency() self._do_selection_update = False
def update_selection_sm_prior(self)
State machine prior update of tree selection
4.636228
4.527913
1.023922
path = self.get_path_for_core_element(core_element_id) if path: if by_cursor: self.tree_view.set_cursor(path) else: self.tree_view.get_selection().select_path(path) else: self._logger.warning("Path not valid: {0} (by_cursor {1})".format(str(core_element_id), str(by_cursor)))
def select_entry(self, core_element_id, by_cursor=True)
Selects the row entry belonging to the given core_element_id by cursor or tree selection
2.6224
2.486602
1.054612
return isinstance(var, tuple) and len(var) == 2 and all(isinstance(val, (int, float)) for val in var)
def contains_geometric_info(var)
Check whether the passed variable is a tuple with two floats or integers
2.884372
2.009351
1.435474
parent_size = parent_state_m.get_meta_data_editor()['size'] if not contains_geometric_info(parent_size): raise ValueError("Invalid state size: {}".format(parent_size)) # use handed number of child states and otherwise take number of child states from parent state model num_child_state = len(parent_state_m.states) if num_child_state is None else num_child_state # check if respective canvas is handed if meta data of parent canvas view is consistent with model if canvas is not None: parent_state_v = canvas.get_view_for_model(parent_state_m) if not (parent_state_v.width, parent_state_v.height) == parent_size: logger.warning("Size meta data of model {0} is different to gaphas {1}" "".format((parent_state_v.width, parent_state_v.height), parent_size)) # Calculate default positions for the child states # Make the inset from the top left corner parent_state_width, parent_state_height = parent_size new_state_side_size = min(parent_state_width * 0.2, parent_state_height * 0.2) child_width = new_state_side_size child_height = new_state_side_size child_size = (child_width, child_height) child_spacing = max(child_size) * 1.2 parent_margin = cal_margin(parent_size) # print("parent size", parent_size, parent_margin) max_cols = (parent_state_width - 2*parent_margin) // child_spacing (row, col) = divmod(num_child_state, max_cols) max_rows = (parent_state_height - 2*parent_margin - 0.5*child_spacing) // (1.5*child_spacing) (overlapping, row) = divmod(row, max_rows) overlapping_step = 0.5*parent_margin max_overlaps_x = (parent_state_width - 2*parent_margin - child_width - (parent_margin + (max_cols - 1) * child_spacing + child_spacing - child_width)) // overlapping_step max_overlaps_y = (parent_state_height - 2*parent_margin - child_height - child_spacing * (1.5 * (max_rows - 1) + 1)) // overlapping_step # handle case of less space TODO check again not perfect, maybe that can be done more simple max_overlaps_x = 0 if max_overlaps_x < 0 else max_overlaps_x max_overlaps_y = 0 if max_overlaps_y < 0 else max_overlaps_y max_overlaps = min(max_overlaps_x, max_overlaps_y) + 1 overlapping = divmod(overlapping, max_overlaps)[1] child_rel_pos_x = parent_margin + col * child_spacing + child_spacing - child_width + overlapping*overlapping_step child_rel_pos_y = child_spacing * (1.5 * row + 1.) + overlapping*overlapping_step return (child_rel_pos_x, child_rel_pos_y), (new_state_side_size, new_state_side_size)
def generate_default_state_meta_data(parent_state_m, canvas=None, num_child_state=None, gaphas_editor=True)
Generate default meta data for a child state according its parent state The function could work with the handed num_child_state if all child state are not drawn, till now. The method checks if the parents meta data is consistent in canvas state view and model if a canvas instance is handed. :param rafcon.gui.models.container_state.ContainerStateModel parent_state_m: Model of the state were the child should be drawn into :param rafcon.gui.mygaphas.canvas.MyCanvas canvas: canvas instance the state will be drawn into :param int num_child_state: Number of child states already drawn in canvas parent state view :return child relative pos (tuple) in parent and it size (tuple)
3.309534
3.111191
1.063751
# print("old boundary", left, right, top, bottom) width = right - left width = frame['size'][0] if width < frame['size'][0] else width left -= 0.5 * clearance * width left = 0 if left < 0 else left right += 0.5 * clearance * width height = bottom - top height = frame['size'][1] if height < frame['size'][1] else height top -= 0.5 * clearance * height top = 0 if top < 0 else top bottom += 0.5 * clearance * height # print("new boundary", left, right, top, bottom) return left, right, top, bottom
def add_boundary_clearance(left, right, top, bottom, frame, clearance=0.1)
Increase boundary size The function increase the space between top and bottom and between left and right parameters. The increase performed on the biggest size/frame so max(size boundary, size frame) :param float left: lower x-axis value :param float right: upper x-axis value :param float top: lower y-axis value :param float bottom: upper y-axis value :param dict frame: Dictionary with size and rel_pos tuples :param float clearance: Percentage 0.01 = 1% of clearance :return:
1.925711
1.980811
0.972183
# print("parent_size ->", parent_size) margin = cal_margin(parent_size) # Add margin and ensure that the upper left corner is within the state if group: # frame of grouped state rel_pos = max(left - margin, 0), max(top - margin, 0) # Add margin and ensure that the lower right corner is within the state size = (min(right - left + 2 * margin, parent_size[0] - rel_pos[0]), min(bottom - top + 2 * margin, parent_size[1] - rel_pos[1])) else: # frame inside of state # rel_pos = max(margin, 0), max(margin, 0) rel_pos = left, top size = right - left, bottom - top return margin, rel_pos, size
def cal_frame_according_boundaries(left, right, top, bottom, parent_size, gaphas_editor=True, group=True)
Generate margin and relative position and size handed boundary parameter and parent size
3.218989
3.072284
1.047751
# print("\n", "#"*30, "offset models", pos_offset, "#"*30) # Update relative position of states within the container in order to maintain their absolute position for child_state_m in models_dict['states'].values(): old_rel_pos = child_state_m.get_meta_data_editor(for_gaphas=gaphas_editor)['rel_pos'] # print("old_rel_pos", old_rel_pos, child_state_m) child_state_m.set_meta_data_editor('rel_pos', add_pos(old_rel_pos, pos_offset), from_gaphas=gaphas_editor) # print("new_rel_pos", child_state_m.get_meta_data_editor(for_gaphas=gaphas_editor), child_state_m) # Do the same for scoped variable if not gaphas_editor: for scoped_variable_m in models_dict['scoped_variables'].values(): old_rel_pos = scoped_variable_m.get_meta_data_editor(for_gaphas=gaphas_editor)['inner_rel_pos'] scoped_variable_m.set_meta_data_editor('inner_rel_pos', add_pos(old_rel_pos, pos_offset), gaphas_editor) # Do the same for all connections (transitions and data flows) connection_models = list(models_dict['transitions'].values()) + list(models_dict['data_flows'].values()) for connection_m in connection_models: old_waypoints = connection_m.get_meta_data_editor(for_gaphas=gaphas_editor)['waypoints'] new_waypoints = [] for waypoint in old_waypoints: from rafcon.gui.models.data_flow import DataFlowModel if isinstance(connection_m, DataFlowModel) and gaphas_editor: new_waypoints.append(add_pos(waypoint, (pos_offset[0], -pos_offset[1]))) else: new_waypoints.append(add_pos(waypoint, pos_offset)) connection_m.set_meta_data_editor('waypoints', new_waypoints, from_gaphas=gaphas_editor)
def offset_rel_pos_of_all_models_in_dict(models_dict, pos_offset, gaphas_editor=True)
Add position offset to all handed models in dict
2.469538
2.495083
0.989762
if state_m.meta_data_was_scaled: return state_m.income.set_meta_data_editor('rel_pos', state_m.state_copy.income.get_meta_data_editor()['rel_pos']) # print("scale_library_ports_meta_data ", state_m.get_meta_data_editor()['size'], \) # state_m.state_copy.get_meta_data_editor()['size'] factor = divide_two_vectors(state_m.get_meta_data_editor()['size'], state_m.state_copy.get_meta_data_editor()['size']) # print("scale_library_ports_meta_data -> resize_state_port_meta", factor) if contains_geometric_info(factor): resize_state_port_meta(state_m, factor, True) state_m.meta_data_was_scaled = True else: logger.info("Skip resize of library ports meta data {0}".format(state_m))
def scale_library_ports_meta_data(state_m, gaphas_editor=True)
Scale the ports of library model accordingly relative to state_copy meta size. The function assumes that the meta data of ports of the state_copy of the library was copied to respective elements in the library and that those was not adjusted before.
4.172741
3.839067
1.086916
assert isinstance(library_state_m, LibraryStateModel) # For library states with an ExecutionState as state_copy, scaling does not make sense if not isinstance(library_state_m.state_copy, ContainerStateModel): return # use same size for state copy and put rel_pos to zero library_meta = library_state_m.get_meta_data_editor(gaphas_editor) state_copy_meta = library_state_m.state_copy.set_meta_data_editor('size', library_meta['size'], gaphas_editor) library_state_m.state_copy.set_meta_data_editor('rel_pos', (0., 0.), from_gaphas=gaphas_editor) # work around that gaphas draws in state_copy coordinates (which is not shown) -> reduce state copy size if gaphas_editor: library_state_margin = cal_margin(state_copy_meta['size']) state_copy_size = subtract_pos(state_copy_meta['size'], (2*library_state_margin, 2*library_state_margin)) library_state_m.state_copy.set_meta_data_editor('size', state_copy_size, gaphas_editor) # if meta data has empty fields put default data on state meta data if model_has_empty_meta(library_state_m.state_copy) and \ not put_default_meta_data_on_state_m_recursively(library_state_m.state_copy, library_state_m, only_child_states=True): return # prepare resize by collecting all state elements in the models_dict # do resize in respect to state copy # (opengl same size as library state and in case of gaphas reduced by library state margin) models_dict = {'state': library_state_m.state_copy} for state_element_key in library_state_m.state_copy.state.state_element_attrs: state_element_list = getattr(library_state_m.state_copy, state_element_key) # Some models are hold in a gtkmvc3.support.wrappers.ObsListWrapper, not a list if hasattr(state_element_list, 'keys'): state_element_list = state_element_list.values() models_dict[state_element_key] = {elem.core_element.core_element_id: elem for elem in state_element_list} # perform final resize resize_factor = (1., 1.) try: if not models_dict['states'] and (not models_dict['scoped_variables'] or gaphas_editor): logger.info("Skip scaling for empty root state {0}.".format(library_state_m.state)) else: resize_factor = scale_meta_data_according_state(models_dict, fill_up=True) except: logger.exception("Scale library content of {0} cause a problem.".format(library_state_m.state)) finally: resize_income_of_state_m(library_state_m.state_copy, resize_factor, gaphas_editor)
def scale_library_content(library_state_m, gaphas_editor=True)
Scales the meta data of the content of a LibraryState The contents of the `LibraryStateModel` `library_state_m` (i.e., the `state_copy` and all it children/state elements) to fit the current size of the `LibraryStateModel`. :param LibraryStateModel library_state_m: The library who's content is to be resized :param bool gaphas_editor: Whether to use the meta data for the GraphicalEditor using gaphas (default: True)
5.058358
4.796364
1.054623
for port_m in port_models: old_rel_pos = port_m.get_meta_data_editor(for_gaphas=gaphas_editor)[rel_pos_key] port_m.set_meta_data_editor(rel_pos_key, mult_two_vectors(factor, old_rel_pos), from_gaphas=gaphas_editor)
def _resize_port_models_list(port_models, rel_pos_key, factor, gaphas_editor=True)
Resize relative positions a list of (data or logical) port models
4.19843
4.106029
1.022504
for connection_m in connection_models: # print("old_waypoints", connection_m.get_meta_data_editor(for_gaphas=gaphas_editor), connection_m.core_element) old_waypoints = connection_m.get_meta_data_editor(for_gaphas=gaphas_editor)['waypoints'] new_waypoints = [] for waypoint in old_waypoints: new_waypoints.append(mult_two_vectors(factor, waypoint)) connection_m.set_meta_data_editor('waypoints', new_waypoints, from_gaphas=gaphas_editor)
def _resize_connection_models_list(connection_models, factor, gaphas_editor=True)
Resize relative positions of way points of a list of connection/linkage models
3.331422
3.201525
1.040573
# print("scale ports", factor, state_m, gaphas_editor) if not gaphas_editor and isinstance(state_m, ContainerStateModel): port_models = state_m.input_data_ports[:] + state_m.output_data_ports[:] + state_m.scoped_variables[:] else: port_models = state_m.input_data_ports[:] + state_m.output_data_ports[:] + state_m.outcomes[:] port_models += state_m.scoped_variables[:] if isinstance(state_m, ContainerStateModel) else [] _resize_port_models_list(port_models, 'rel_pos' if gaphas_editor else 'inner_rel_pos', factor, gaphas_editor) resize_income_of_state_m(state_m, factor, gaphas_editor)
def resize_state_port_meta(state_m, factor, gaphas_editor=True)
Resize data and logical ports relative positions
3.457237
3.423993
1.009709
# print("START RESIZE OF STATE", state_m.get_meta_data_editor(for_gaphas=gaphas_editor), state_m) old_rel_pos = state_m.get_meta_data_editor(for_gaphas=gaphas_editor)['rel_pos'] # print("old_rel_pos state", old_rel_pos, state_m.core_element) state_m.set_meta_data_editor('rel_pos', mult_two_vectors(factor, old_rel_pos), from_gaphas=gaphas_editor) # print("new_rel_pos state", state_m.get_meta_data_editor(for_gaphas=gaphas_editor), state_m.core_element) # print("resize factor", factor, state_m, state_m.meta) old_size = state_m.get_meta_data_editor(for_gaphas=gaphas_editor)['size'] # print("old_size", old_size, type(old_size)) state_m.set_meta_data_editor('size', mult_two_vectors(factor, old_size), from_gaphas=gaphas_editor) # print("new_size", state_m.get_meta_data_editor(for_gaphas=gaphas_editor)['size']) if gaphas_editor: old_rel_pos = state_m.get_meta_data_editor(for_gaphas=gaphas_editor)['name']['rel_pos'] state_m.set_meta_data_editor('name.rel_pos', mult_two_vectors(factor, old_rel_pos), from_gaphas=gaphas_editor) old_size = state_m.get_meta_data_editor(for_gaphas=gaphas_editor)['name']['size'] state_m.set_meta_data_editor('name.size', mult_two_vectors(factor, old_size), from_gaphas=gaphas_editor) if isinstance(state_m, LibraryStateModel): # print("LIBRARY", state_m) if gaphas_editor and state_m.state_copy_initialized: if state_m.meta_data_was_scaled: resize_state_port_meta(state_m, factor, gaphas_editor) else: scale_library_ports_meta_data(state_m, gaphas_editor) if state_m.state_copy_initialized: resize_state_meta(state_m.state_copy, factor, gaphas_editor) # print("END LIBRARY RESIZE") else: # print("resize_state_meta -> resize_state_port_meta") resize_state_port_meta(state_m, factor, gaphas_editor) if isinstance(state_m, ContainerStateModel): _resize_connection_models_list(state_m.transitions[:] + state_m.data_flows[:], factor, gaphas_editor) for child_state_m in state_m.states.values(): resize_state_meta(child_state_m, factor, gaphas_editor)
def resize_state_meta(state_m, factor, gaphas_editor=True)
Resize state meta data recursive what includes also LibraryStateModels meta data and its internal state_copy
2.231805
2.145092
1.040424
old_parent_rel_pos = models_dict['state'].get_meta_data_editor()['rel_pos'] offset_rel_pos_of_all_models_in_dict(models_dict, pos_offset=old_parent_rel_pos) return True
def offset_rel_pos_of_models_meta_data_according_parent_state(models_dict)
Offset meta data of state elements according the area used indicated by the state meta data. The offset_rel_pos_of_models_meta_data_according_parent_state offset the position of all handed old elements in the dictionary. :param models_dict: dict that hold lists of meta data with state attribute consistent keys :return:
5.319411
7.329438
0.72576
left, right, top, bottom = get_boundaries_of_elements_in_dict(models_dict=models_dict) parent_size = models_dict['state'].parent.get_meta_data_editor()['size'] _, rel_pos, size = cal_frame_according_boundaries(left, right, top, bottom, parent_size) # Set size and position of new container state models_dict['state'].set_meta_data_editor('rel_pos', rel_pos) models_dict['state'].set_meta_data_editor('size', size) offset = mult_two_vectors((-1., -1.), rel_pos) offset_rel_pos_of_all_models_in_dict(models_dict, offset) return True
def scale_meta_data_according_states(models_dict)
Offset meta data of state elements according the area used indicated by the states and maybe scoped variables (in case of OpenGL editor) meta data. Method is used by group states to set the offset for the elements in the new container state. The method needs some generalisation to create methods to easily scale meta data according new parents or views (e.g. to show inner elements of s library state). :param models_dict: dictionary that hold lists of meta data with state attribute consistent keys :return:
5.346093
5.206895
1.026733
if not state_m.parent: logger.warning("A state can not have a closest sibling state if it has not parent as {0}".format(state_m)) return margin = cal_margin(state_m.parent.get_meta_data_editor()['size']) pos = state_m.get_meta_data_editor()['rel_pos'] size = state_m.get_meta_data_editor()['size'] # otherwise measure from reference state itself if from_logical_port in ["outcome", "income"]: size = (margin, margin) if from_logical_port == "outcome": outcomes_m = [outcome_m for outcome_m in state_m.outcomes if outcome_m.outcome.outcome_id >= 0] free_outcomes_m = [oc_m for oc_m in outcomes_m if not state_m.state.parent.get_transition_for_outcome(state_m.state, oc_m.outcome)] if free_outcomes_m: outcome_m = free_outcomes_m[0] else: outcome_m = outcomes_m[0] pos = add_pos(pos, outcome_m.get_meta_data_editor()['rel_pos']) elif from_logical_port == "income": pos = add_pos(pos, state_m.income.get_meta_data_editor()['rel_pos']) min_distance = None for sibling_state_m in state_m.parent.states.values(): if sibling_state_m is state_m: continue sibling_pos = sibling_state_m.get_meta_data_editor()['rel_pos'] sibling_size = sibling_state_m.get_meta_data_editor()['size'] distance = geometry.cal_dist_between_2_coord_frame_aligned_boxes(pos, size, sibling_pos, sibling_size) if not min_distance or min_distance[0] > distance: min_distance = (distance, sibling_state_m) return min_distance
def get_closest_sibling_state(state_m, from_logical_port=None)
Calculate the closest sibling also from optional logical port of handed state model :param StateModel state_m: Reference State model the closest sibling state should be find for :param str from_logical_port: The logical port of handed state model to be used as reference. :rtype: tuple :return: distance, StateModel of closest state
3.194667
3.176892
1.005595
non_empty_lists_dict = {key: elems for key, elems in self.model_copies.items() if elems} port_attrs = ['input_data_ports', 'output_data_ports', 'scoped_variables', 'outcomes'] port_is_pasted = any([key in non_empty_lists_dict for key in port_attrs]) return non_empty_lists_dict, target_state_m.parent if target_state_m.parent and port_is_pasted else target_state_m
def get_action_arguments(self, target_state_m)
Collect argument attributes for action signal Use non empty list dict to create arguments for action signal msg and logger messages. The action parent model can be different then the target state model because logical and data port changes also may influence the linkage, see action-module (undo/redo). :param rafcon.gui.models.abstract_state.AbstractStateModel target_state_m: State model of target of action :return: dict with lists of elements part of the action, action parent model
6.026086
5.282622
1.140738
assert isinstance(selection, Selection) self.__create_core_and_model_object_copies(selection, smart_selection_adaption)
def copy(self, selection, smart_selection_adaption=True)
Copy all selected items to the clipboard using smart selection adaptation by default :param selection: the current selection :param bool smart_selection_adaption: flag to enable smart selection adaptation mode :return:
9.400084
11.919557
0.788627
assert isinstance(selection, Selection) import rafcon.gui.helpers.state_machine as gui_helper_state_machine if gui_helper_state_machine.is_selection_inside_of_library_state(selected_elements=selection.get_all()): logger.warning("Cut is not performed because elements inside of a library state are selected.") return selection_dict_of_copied_models, parent_m = self.__create_core_and_model_object_copies( selection, smart_selection_adaption) non_empty_lists_dict, action_parent_m = self.get_action_arguments(parent_m if parent_m else None) action_parent_m.action_signal.emit(ActionSignalMsg(action='cut', origin='clipboard', action_parent_m=action_parent_m, affected_models=[], after=False, kwargs={'remove': non_empty_lists_dict})) for models in selection_dict_of_copied_models.values(): gui_helper_state_machine.delete_core_elements_of_models(models, destroy=True, recursive=True, force=False) affected_models = [model for models in non_empty_lists_dict.values() for model in models] action_parent_m.action_signal.emit(ActionSignalMsg(action='cut', origin='clipboard', action_parent_m=action_parent_m, affected_models=affected_models, after=True))
def cut(self, selection, smart_selection_adaption=False)
Cuts all selected items and copy them to the clipboard using smart selection adaptation by default :param selection: the current selection :param bool smart_selection_adaption: flag to enable smart selection adaptation mode :return:
5.293198
5.3654
0.986543
# reset selections for state_element_attr in ContainerState.state_element_attrs: self.model_copies[state_element_attr] = [] # reset parent state_id the copied elements are taken from self.copy_parent_state_id = None self.reset_clipboard_mapping_dicts()
def reset_clipboard(self)
Resets the clipboard, so that old elements do not pollute the new selection that is copied into the clipboard. :return:
12.767175
13.68332
0.933047