code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
from rafcon.core.singleton import state_machine_execution_engine, state_machine_manager force = True if event is not None and hasattr(event, 'state') \ and event.get_state() & Gdk.ModifierType.SHIFT_MASK \ and event.get_state() & Gdk.ModifierType.CONTROL_MASK else force def remove_state_machine_m(): state_machine_id = state_machine_m.state_machine.state_machine_id if state_machine_id in self.model.state_machine_manager.state_machines: self.model.state_machine_manager.remove_state_machine(state_machine_id) def push_sm_running_dialog(): message_string = "The state machine is still running. Are you sure you want to close?" dialog = RAFCONButtonDialog(message_string, ["Stop and close", "Cancel"], message_type=Gtk.MessageType.QUESTION, parent=self.get_root_window()) response_id = dialog.run() dialog.destroy() if response_id == 1: logger.debug("State machine execution is being stopped") state_machine_execution_engine.stop() state_machine_execution_engine.join() # wait for gui is needed; otherwise the signals related to the execution engine cannot # be processed properly by the state machine under destruction rafcon.gui.utils.wait_for_gui() remove_state_machine_m() return True elif response_id == 2: logger.debug("State machine execution will keep running") return False def push_sm_dirty_dialog(): sm_id = state_machine_m.state_machine.state_machine_id root_state_name = state_machine_m.root_state.state.name message_string = "There are unsaved changes in the state machine '{0}' with id {1}. Do you want to close " \ "the state machine anyway?".format(root_state_name, sm_id) dialog = RAFCONButtonDialog(message_string, ["Close without saving", "Cancel"], message_type=Gtk.MessageType.QUESTION, parent=self.get_root_window()) response_id = dialog.run() dialog.destroy() if response_id == 1: # Close without saving pressed remove_state_machine_m() return True else: logger.debug("Closing of state machine canceled") return False # sm running if not state_machine_execution_engine.finished_or_stopped() and \ state_machine_manager.active_state_machine_id == state_machine_m.state_machine.state_machine_id: return push_sm_running_dialog() # close is forced -> sm not saved elif force: remove_state_machine_m() return True # sm dirty -> save sm request dialog elif state_machine_m.state_machine.marked_dirty: return push_sm_dirty_dialog() else: remove_state_machine_m() return True
def on_close_clicked(self, event, state_machine_m, result, force=False)
Triggered when the close button of a state machine tab is clicked Closes state machine if it is saved. Otherwise gives the user the option to 'Close without Saving' or to 'Cancel the Close Operation' :param state_machine_m: The selected state machine model.
3.162879
3.189078
0.991785
state_machine_m_list = [tab['state_machine_m'] for tab in self.tabs.values()] for state_machine_m in state_machine_m_list: self.on_close_clicked(None, state_machine_m, None, force=True)
def close_all_pages(self)
Closes all tabs of the state machines editor.
5.27834
3.94169
1.339106
# remember current selected state machine id currently_selected_sm_id = None if self.model.get_selected_state_machine_model(): currently_selected_sm_id = self.model.get_selected_state_machine_model().state_machine.state_machine_id # create a dictionary from state machine id to state machine path and one for tab page number for recovery state_machine_path_by_sm_id = {} page_num_by_sm_id = {} for sm_id, sm in self.model.state_machine_manager.state_machines.items(): # the sm.base_path is only None if the state machine has never been loaded or saved before if sm_id in state_machine_ids and sm.file_system_path is not None: state_machine_path_by_sm_id[sm_id] = sm.file_system_path page_num_by_sm_id[sm_id] = self.get_page_num(sm_id) # close all state machine in list and remember if one was not closed for sm_id in state_machine_ids: was_closed = self.on_close_clicked(None, self.model.state_machines[sm_id], None, force=True) if not was_closed and sm_id in page_num_by_sm_id: logger.info("State machine with id {0} will not be re-open because was not closed.".format(sm_id)) del state_machine_path_by_sm_id[sm_id] del page_num_by_sm_id[sm_id] # reload state machines from file system try: self.model.state_machine_manager.open_state_machines(state_machine_path_by_sm_id) except AttributeError as e: logger.warning("Not all state machines were re-open because {0}".format(e)) import rafcon.gui.utils rafcon.gui.utils.wait_for_gui() # TODO check again this is needed to secure that all sm-models are generated # recover tab arrangement self.rearrange_state_machines(page_num_by_sm_id) # recover initial selected state machine and case handling if now state machine is open anymore if currently_selected_sm_id: # case if only unsaved state machines are open if currently_selected_sm_id in self.model.state_machine_manager.state_machines: self.set_active_state_machine(currently_selected_sm_id)
def refresh_state_machines(self, state_machine_ids)
Refresh list af state machine tabs :param list state_machine_ids: List of state machine ids to be refreshed :return:
3.665006
3.688939
0.993512
self.refresh_state_machines(list(self.model.state_machine_manager.state_machines.keys()))
def refresh_all_state_machines(self)
Refreshes all state machine tabs
5.888175
5.463801
1.07767
# relieve old one self.relieve_model(self.state_machine_execution_model) # register new self.state_machine_execution_model = new_state_machine_execution_engine self.observe_model(self.state_machine_execution_model)
def switch_state_machine_execution_engine(self, new_state_machine_execution_engine)
Switch the state machine execution engine the main window controller listens to. :param new_state_machine_execution_engine: the new state machine execution engine for this controller :return:
3.954865
4.096571
0.965409
notebook = self.view['notebook'] active_state_machine_id = self.model.state_machine_manager.active_state_machine_id if active_state_machine_id is None: # un-mark all state machine that are marked with execution-running style class for tab in self.tabs.values(): label = notebook.get_tab_label(tab['page']).get_child().get_children()[0] if label.get_style_context().has_class(constants.execution_running_style_class): label.get_style_context().remove_class(constants.execution_running_style_class) else: # mark active state machine with execution-running style class page = self.get_page_for_state_machine_id(active_state_machine_id) if page: label = notebook.get_tab_label(page).get_child().get_children()[0] label.get_style_context().add_class(constants.execution_running_style_class)
def execution_engine_model_changed(self, model, prop_name, info)
High light active state machine.
2.909643
2.758177
1.054915
state_str = json.dumps(state, cls=JSONObjectEncoder, indent=4, check_circular=False, sort_keys=True) state_tuples_dict = {} if isinstance(state, ContainerState): # print(state.states, "\n") for child_state_id, child_state in state.states.items(): # print("child_state: %s" % child_state_id, child_state, "\n") state_tuples_dict[child_state_id] = get_state_tuple(child_state) state_meta_dict = {} if state_m is None else get_state_element_meta(state_m) script_content = state.script.script if isinstance(state, ExecutionState) else None state_tuple = (state_str, state_tuples_dict, state_meta_dict, state.get_path(), script_content, state.file_system_path, copy.deepcopy(state.semantic_data)) return state_tuple
def get_state_tuple(state, state_m=None)
Generates a tuple that holds the state as yaml-strings and its meta data in a dictionary. The tuple consists of: [0] json_str for state, [1] dict of child_state tuples, [2] dict of model_meta-data of self and elements [3] path of state in state machine [4] script_text [5] file system path [6] semantic data # states-meta - [state-, transitions-, data_flows-, outcomes-, inputs-, outputs-, scopes, states-meta] :param rafcon.core.states.state.State state: The state that should be stored :return: state_tuple tuple
3.590248
2.850234
1.259633
if DEBUG_META_REFERENCES: # debug copy from rafcon.gui.helpers.meta_data import meta_data_reference_check meta_data_reference_check(meta) return copy.deepcopy(meta)
def meta_dump_or_deepcopy(meta)
Function to observe meta data vivi-dict copy process and to debug it at one point
10.443487
8.482962
1.231113
# logger.verbose("#H# STATE_MACHINE_UNDO STARTED") state = self.state_machine.root_state self.set_root_state_to_version(state, self.before_storage)
def undo(self)
General Undo, that takes all elements in the parent and :return:
24.483152
24.038605
1.018493
global MAX_ALLOWED_AREA if not parameters: parameters = {} if self.__compare_parameters(width, height, zoom, parameters) and not clear: return True, self.__image, self.__zoom # Restrict image surface size to prevent excessive use of memory while True: try: self.__limiting_multiplicator = 1 area = width * zoom * self.__zoom_multiplicator * height * zoom * self.__zoom_multiplicator if area > MAX_ALLOWED_AREA: self.__limiting_multiplicator = sqrt(MAX_ALLOWED_AREA / area) image = ImageSurface(self.__format, int(ceil(width * zoom * self.multiplicator)), int(ceil(height * zoom * self.multiplicator))) break # If we reach this point, the area was successfully allocated and we can break the loop except Error: MAX_ALLOWED_AREA *= 0.8 self.__set_cached_image(image, width, height, zoom, parameters) return False, self.__image, zoom
def get_cached_image(self, width, height, zoom, parameters=None, clear=False)
Get ImageSurface object, if possible, cached The method checks whether the image was already rendered. This is done by comparing the passed size and parameters with those of the last image. If they are equal, the cached image is returned. Otherwise, a new ImageSurface with the specified dimensions is created and returned. :param width: The width of the image :param height: The height of the image :param zoom: The current scale/zoom factor :param parameters: The parameters used for the image :param clear: If True, the cache is emptied, thus the image won't be retrieved from cache :returns: The flag is True when the image is retrieved from the cache, otherwise False; The cached image surface or a blank one with the desired size; The zoom parameter when the image was stored :rtype: bool, ImageSurface, float
4.473712
4.20215
1.064625
if not zoom: zoom = self.__zoom zoom_multiplicator = zoom * self.multiplicator context.save() context.scale(1. / zoom_multiplicator, 1. / zoom_multiplicator) image_position = round(position[0] * zoom_multiplicator), round(position[1] * zoom_multiplicator) context.translate(*image_position) context.rotate(rotation) context.set_source_surface(self.__image, 0, 0) context.paint() context.restore()
def copy_image_to_context(self, context, position, rotation=0, zoom=None)
Draw a cached image on the context :param context: The Cairo context to draw on :param position: The position od the image
2.395099
2.590511
0.924566
cairo_context = Context(self.__image) cairo_context.scale(zoom * self.multiplicator, zoom * self.multiplicator) return cairo_context
def get_context_for_image(self, zoom)
Creates a temporary cairo context for the image surface :param zoom: The current scaling factor :return: Cairo context to draw on
4.890357
4.253887
1.149621
# Deactivated caching if not global_gui_config.get_config_value('ENABLE_CACHING', True): return False # Empty cache if not self.__image: return False # Changed image size if self.__width != width or self.__height != height: return False # Current zoom greater then prepared zoom if zoom > self.__zoom * self.__zoom_multiplicator: return False # Current zoom much smaller than prepared zoom, causes high memory usage and imperfect anti-aliasing if zoom < self.__zoom / self.__zoom_multiplicator: return False # Changed drawing parameter for key in parameters: try: if key not in self.__last_parameters or self.__last_parameters[key] != parameters[key]: return False except (AttributeError, ValueError): # Some values cannot be compared and raise an exception on comparison (e.g. numpy.ndarray). In this # case, just return False and do not cache. try: # Catch at least the ndarray-case, as this could occure relatively often import numpy if isinstance(self.__last_parameters[key], numpy.ndarray): return numpy.array_equal(self.__last_parameters[key], parameters[key]) except ImportError: return False return False return True
def __compare_parameters(self, width, height, zoom, parameters)
Compare parameters for equality Checks if a cached image is existing, the the dimensions agree and finally if the properties are equal. If so, True is returned, else False, :param width: The width of the image :param height: The height of the image :param zoom: The current scale/zoom factor :param parameters: The parameters used for the image :return: True if all parameters are equal, False else
5.005902
4.823674
1.037778
if not isinstance(parser_result, dict): parser_result = vars(parser_result) plugins.run_post_inits(parser_result)
def post_setup_plugins(parser_result)
Calls the post init hubs :param dict parser_result: Dictionary with the parsed arguments
4.37417
5.253285
0.832654
try: from gi.repository import GLib user_data_folder = GLib.get_user_data_dir() except ImportError: user_data_folder = join(os.path.expanduser("~"), ".local", "share") rafcon_root_path = dirname(realpath(rafcon.__file__)) user_library_folder = join(user_data_folder, "rafcon", "libraries") # The RAFCON_LIB_PATH points to a path with common RAFCON libraries # If the env variable is not set, we have to determine it. In the future, this should always be # ~/.local/share/rafcon/libraries, but for backward compatibility, also a relative RAFCON path is supported if not os.environ.get('RAFCON_LIB_PATH', None): if exists(user_library_folder): os.environ['RAFCON_LIB_PATH'] = user_library_folder else: os.environ['RAFCON_LIB_PATH'] = join(dirname(dirname(rafcon_root_path)), 'share', 'libraries') # Install dummy _ builtin function in case i18.setup_l10n() is not called if sys.version_info >= (3,): import builtins as builtins23 else: import __builtin__ as builtins23 if "_" not in builtins23.__dict__: builtins23.__dict__["_"] = lambda s: s
def setup_environment()
Ensures that the environmental variable RAFCON_LIB_PATH is existent
3.89986
3.664934
1.064101
sm_root_file = join(path, storage.STATEMACHINE_FILE) if exists(sm_root_file): return path else: sm_root_file = join(path, storage.STATEMACHINE_FILE_OLD) if exists(sm_root_file): return path raise argparse.ArgumentTypeError("Failed to open {0}: {1} not found in path".format(path, storage.STATEMACHINE_FILE))
def parse_state_machine_path(path)
Parser for argparse checking pfor a proper state machine path :param str path: Input path from the user :return: The path :raises argparse.ArgumentTypeError: if the path does not contain a statemachine.json file
3.961509
3.941555
1.005062
default_config_path = filesystem.get_default_config_path() filesystem.create_path(default_config_path) parser = core_singletons.argument_parser parser.add_argument('-o', '--open', type=parse_state_machine_path, dest='state_machine_path', metavar='path', nargs='+', help="specify directories of state-machines that shall be opened. The path must " "contain a statemachine.json file") parser.add_argument('-c', '--config', type=config_path, metavar='path', dest='config_path', default=default_config_path, nargs='?', const=default_config_path, help="path to the configuration file config.yaml. Use 'None' to prevent the generation of " "a config file and use the default configuration. Default: {0}".format(default_config_path)) parser.add_argument('-r', '--remote', action='store_true', help="remote control mode") parser.add_argument('-s', '--start_state_path', metavar='path', dest='start_state_path', default=None, nargs='?', help="path within a state machine to the state that should be launched. The state path " "consists of state ids (e.g. QPOXGD/YVWJKZ whereof QPOXGD is the root state and YVWJKZ " "it's child state to start from).") return parser
def setup_argument_parser()
Sets up teh parser with the required arguments :return: The parser object
5.127609
5.257357
0.975321
if config_path is not None: config_path, config_file = filesystem.separate_folder_path_and_file_name(config_path) global_config.load(config_file=config_file, path=config_path) else: global_config.load(path=config_path) # Initialize libraries core_singletons.library_manager.initialize()
def setup_configuration(config_path)
Loads the core configuration from the specified path and uses its content for further setup :param config_path: Path to the core config file
4.373589
4.666557
0.93722
sm = storage.load_state_machine_from_path(state_machine_path) core_singletons.state_machine_manager.add_state_machine(sm) return sm
def open_state_machine(state_machine_path)
Executes the specified state machine :param str state_machine_path: The file path to the state machine :return StateMachine: The loaded state machine
4.978054
7.835473
0.635323
global _user_abort from rafcon.core.states.execution_state import ExecutionState if not isinstance(state_machine.root_state, ExecutionState): while len(state_machine.execution_histories[0]) < 1: time.sleep(0.1) else: time.sleep(0.5) while state_machine.root_state.state_execution_status is not StateExecutionStatus.INACTIVE: try: state_machine.root_state.concurrency_queue.get(timeout=1) # this check triggers if the state machine could not be stopped in the signal handler if _user_abort: return except Empty: pass # no logger output here to make it easier for the parser logger.verbose("RAFCON live signal")
def wait_for_state_machine_finished(state_machine)
wait for a state machine to finish its execution :param state_machine: the statemachine to synchronize with :return:
7.28126
7.343215
0.991563
wait_for_state_machine_finished(state_machine) from twisted.internet import reactor if reactor.running: plugins.run_hook("pre_destruction") reactor.callFromThread(reactor.stop)
def stop_reactor_on_state_machine_finish(state_machine)
Wait for a state machine to be finished and stops the reactor :param state_machine: the state machine to synchronize with
5.435555
6.232108
0.872186
if self.node.ready.is_set(): return True try: return await self.node.wait_until_ready(timeout=timeout) except asyncio.TimeoutError: if no_raise: return False else: raise
async def wait_until_ready( self, timeout: Optional[float] = None, no_raise: bool = False ) -> bool
Waits for the underlying node to become ready. If no_raise is set, returns false when a timeout occurs instead of propogating TimeoutError. A timeout of None means to wait indefinitely.
3.018954
2.395461
1.260281
await self.node.join_voice_channel(self.channel.guild.id, self.channel.id)
async def connect(self)
Connects to the voice channel associated with this Player.
8.047158
3.864904
2.082111
if channel.guild != self.channel.guild: raise TypeError("Cannot move to a different guild.") self.channel = channel await self.connect()
async def move_to(self, channel: discord.VoiceChannel)
Moves this player to a voice channel. Parameters ---------- channel : discord.VoiceChannel
5.267554
5.234927
1.006233
if self.state == PlayerState.DISCONNECTING: return await self.update_state(PlayerState.DISCONNECTING) if not requested: log.debug( f"Forcing player disconnect for guild {self.channel.guild.id}" f" due to player manager request." ) guild_id = self.channel.guild.id voice_ws = self.node.get_voice_ws(guild_id) if not voice_ws.closed: await voice_ws.voice_state(guild_id, None) await self.node.destroy_guild(guild_id) await self.close() self.manager.remove_player(self)
async def disconnect(self, requested=True)
Disconnects this player from it's voice channel.
3.612024
3.45063
1.046773
if event == LavalinkEvents.TRACK_END: if extra == TrackEndReason.FINISHED: await self.play() else: self._is_playing = False
async def handle_event(self, event: "node.LavalinkEvents", extra)
Handles various Lavalink Events. If the event is TRACK END, extra will be TrackEndReason. If the event is TRACK EXCEPTION, extra will be the string reason. If the event is TRACK STUCK, extra will be the threshold ms. Parameters ---------- event : node.LavalinkEvents extra
4.710038
4.393629
1.072015
if state.position > self.position: self._is_playing = True self.position = state.position
async def handle_player_update(self, state: "node.PlayerState")
Handles player updates from lavalink. Parameters ---------- state : websocket.PlayerState
6.915217
6.914993
1.000032
track.requester = requester self.queue.append(track)
def add(self, requester: discord.User, track: Track)
Adds a track to the queue. Parameters ---------- requester : discord.User User who requested the track. track : Track Result from any of the lavalink track search methods.
8.319868
7.334311
1.134376
if self.repeat and self.current is not None: self.queue.append(self.current) self.current = None self.position = 0 self._paused = False if not self.queue: await self.stop() else: self._is_playing = True if self.shuffle: track = self.queue.pop(randrange(len(self.queue))) else: track = self.queue.pop(0) self.current = track log.debug("Assigned current.") await self.node.play(self.channel.guild.id, track)
async def play(self)
Starts playback from lavalink.
3.177421
2.895143
1.0975
await self.node.stop(self.channel.guild.id) self.queue = [] self.current = None self.position = 0 self._paused = False
async def stop(self)
Stops playback from lavalink. .. important:: This method will clear the queue.
6.02166
4.473517
1.346068
self._paused = pause await self.node.pause(self.channel.guild.id, pause)
async def pause(self, pause: bool = True)
Pauses the current song. Parameters ---------- pause : bool Set to ``False`` to resume.
9.041479
9.16879
0.986115
self._volume = max(min(volume, 150), 0) await self.node.volume(self.channel.guild.id, self.volume)
async def set_volume(self, volume: int)
Sets the volume of Lavalink. Parameters ---------- volume : int Between 0 and 150
6.123977
4.447842
1.376842
if self.current.seekable: position = max(min(position, self.current.length), 0) await self.node.seek(self.channel.guild.id, position)
async def seek(self, position: int)
If the track allows it, seeks to a position. Parameters ---------- position : int Between 0 and track length.
5.499233
4.985107
1.103132
if self._already_in_guild(channel): p = self.get_player(channel.guild.id) await p.move_to(channel) else: p = Player(self, channel) await p.connect() self._player_dict[channel.guild.id] = p await self.refresh_player_state(p) return p
async def create_player(self, channel: discord.VoiceChannel) -> Player
Connects to a discord voice channel. This function is safe to repeatedly call as it will return an existing player if there is one. Parameters ---------- channel Returns ------- Player The created Player object.
3.141396
3.329324
0.943554
if guild_id in self._player_dict: return self._player_dict[guild_id] raise KeyError("No such player for that guild.")
def get_player(self, guild_id: int) -> Player
Gets a Player object from a guild ID. Parameters ---------- guild_id : int Discord guild ID. Returns ------- Player Raises ------ KeyError If that guild does not have a Player, e.g. is not connected to any voice channel.
4.217839
4.822807
0.874561
for p in tuple(self.players): await p.disconnect(requested=False) log.debug("Disconnected players.")
async def disconnect(self)
Disconnects all players.
12.0665
9.052072
1.33301
if pkg_resources.resource_exists(package_or_requirement, resource_name): return pkg_resources.resource_filename(package_or_requirement, resource_name) path = _search_in_share_folders(package_or_requirement, resource_name) if path: return path raise RuntimeError("Resource {} not found in {}".format(package_or_requirement, resource_name))
def resource_filename(package_or_requirement, resource_name)
Similar to pkg_resources.resource_filename but if the resource it not found via pkg_resources it also looks in a predefined list of paths in order to find the resource :param package_or_requirement: the module in which the resource resides :param resource_name: the name of the resource :return: the path to the resource :rtype: str
2.379328
2.574177
0.924306
if pkg_resources.resource_exists(package_or_requirement, resource_name): return True path = _search_in_share_folders(package_or_requirement, resource_name) return True if path else False
def resource_exists(package_or_requirement, resource_name)
Similar to pkg_resources.resource_exists but if the resource it not found via pkg_resources it also looks in a predefined list of paths in order to find the resource :param package_or_requirement: the module in which the resource resides :param resource_name: the name of the resource :return: a flag if the file exists :rtype: bool
3.766976
3.89901
0.966136
with open(resource_filename(package_or_requirement, resource_name), 'r') as resource_file: return resource_file.read()
def resource_string(package_or_requirement, resource_name)
Similar to pkg_resources.resource_string but if the resource it not found via pkg_resources it also looks in a predefined list of paths in order to find the resource :param package_or_requirement: the module in which the resource resides :param resource_name: the name of the resource :return: the file content :rtype: str
2.16634
2.849715
0.760196
path = resource_filename(package_or_requirement, relative_path) only_files = [f for f in listdir(path) if isfile(join(path, f))] return only_files
def resource_listdir(package_or_requirement, relative_path)
Similar to pkg_resources.resource_listdir but if the resource it not found via pkg_resources it also looks in a predefined list of paths in order to find the resource :param package_or_requirement: the module in which the resource resides :param relative_path: the relative path to the resource :return: a list of all files residing in the target path :rtype: list
2.266669
3.243679
0.698796
plugins = os.environ.get('RAFCON_PLUGIN_PATH', '') plugin_list = set(plugins.split(os.pathsep)) global plugin_dict for plugin_path in plugin_list: if not plugin_path: continue plugin_path = os.path.expandvars(os.path.expanduser(plugin_path)).strip() if not os.path.exists(plugin_path): logger.error("The specified plugin path does not exist: {}".format(plugin_path)) continue dir_name, plugin_name = os.path.split(plugin_path.rstrip('/')) logger.info("Found plugin '{}' at {}".format(plugin_name, plugin_path)) sys.path.insert(0, dir_name) if plugin_name in plugin_dict: logger.error("Plugin '{}' already loaded".format(plugin_name)) else: try: module = importlib.import_module(plugin_name) plugin_dict[plugin_name] = module logger.info("Successfully loaded plugin '{}'".format(plugin_name)) except ImportError as e: logger.error("Could not import plugin '{}': {}\n{}".format(plugin_name, e, str(traceback.format_exc())))
def load_plugins()
Loads all plugins specified in the RAFCON_PLUGIN_PATH environment variable
2.030944
1.827513
1.111316
for module in plugin_dict.values(): if hasattr(module, "hooks") and callable(getattr(module.hooks, hook_name, None)): getattr(module.hooks, hook_name)(*args, **kwargs)
def run_hook(hook_name, *args, **kwargs)
Runs the passed hook on all registered plugins The function checks, whether the hook is available in the plugin. :param hook_name: Name of the hook, corresponds to the function name being called :param args: Arguments :param kwargs: Keyword arguments
3.102901
3.29991
0.940299
from rafcon.gui.models.state import StateModel from rafcon.gui.models.container_state import ContainerStateModel from rafcon.gui.models.library_state import LibraryStateModel if isinstance(state, ContainerState): return ContainerStateModel elif isinstance(state, LibraryState): return LibraryStateModel elif isinstance(state, State): return StateModel else: logger.warning("There is not model for state of type {0} {1}".format(type(state), state)) return None
def get_state_model_class_for_state(state)
Determines the model required for the given state class :param state: Instance of a state (ExecutionState, BarrierConcurrencyState, ...) :return: The model class required for holding such a state instance
2.700167
3.032603
0.890379
self.is_start = self.state.is_root_state or \ self.parent is None or \ isinstance(self.parent.state, LibraryState) or \ self.state.state_id == self.state.parent.start_state_id
def update_is_start(self)
Updates the `is_start` property of the state A state is a start state, if it is the root state, it has no parent, the parent is a LibraryState or the state's state_id is identical with the ContainerState.start_state_id of the ContainerState it is within.
5.240649
2.783693
1.882625
if self.state is None: logger.verbose("Multiple calls of prepare destruction for {0}".format(self)) self.destruction_signal.emit() try: self.unregister_observer(self) except KeyError: # Might happen if the observer was already unregistered logger.verbose("Observer already unregistered!") pass if recursive: if self.income: self.income.prepare_destruction() for port in self.input_data_ports[:] + self.output_data_ports[:] + self.outcomes[:]: port.prepare_destruction() del self.input_data_ports[:] del self.output_data_ports[:] del self.outcomes[:] self.state = None self.input_data_ports = None self.output_data_ports = None self.income = None self.outcomes = None # History TODO: these are needed by the modification history # self.action_signal = None # self.meta_signal = None # self.destruction_signal = None self.observe = None super(AbstractStateModel, self).prepare_destruction()
def prepare_destruction(self, recursive=True)
Prepares the model for destruction Recursively un-registers all observers and removes references to child models
4.354419
4.217768
1.032399
from rafcon.gui.singleton import state_machine_manager_model state_machine = self.state.get_state_machine() if state_machine: if state_machine.state_machine_id in state_machine_manager_model.state_machines: sm_m = state_machine_manager_model.state_machines[state_machine.state_machine_id] if not two_factor_check or sm_m.get_state_model_by_path(self.state.get_path()) is self: return sm_m else: logger.debug("State model requesting its state machine model parent seems to be obsolete. " "This is a hint to duplicated models and dirty coding") return None
def get_state_machine_m(self, two_factor_check=True)
Get respective state machine model Get a reference of the state machine model the state model belongs to. As long as the root state model has no direct reference to its state machine model the state machine manager model is checked respective model. :rtype: rafcon.gui.models.state_machine.StateMachineModel :return: respective state machine model
5.583745
4.182235
1.33511
for data_port_m in self.input_data_ports: if data_port_m.data_port.data_port_id == data_port_id: return data_port_m return None
def get_input_data_port_m(self, data_port_id)
Returns the input data port model for the given data port id :param data_port_id: The data port id to search for :return: The model of the data port with the given id
2.195073
2.48081
0.884821
for data_port_m in self.output_data_ports: if data_port_m.data_port.data_port_id == data_port_id: return data_port_m return None
def get_output_data_port_m(self, data_port_id)
Returns the output data port model for the given data port id :param data_port_id: The data port id to search for :return: The model of the data port with the given id
2.160776
2.426957
0.890323
from itertools import chain data_ports_m = chain(self.input_data_ports, self.output_data_ports) for data_port_m in data_ports_m: if data_port_m.data_port.data_port_id == data_port_id: return data_port_m return None
def get_data_port_m(self, data_port_id)
Searches and returns the model of a data port of a given state The method searches a port with the given id in the data ports of the given state model. If the state model is a container state, not only the input and output data ports are looked at, but also the scoped variables. :param data_port_id: The data port id to be searched :return: The model of the data port or None if it is not found
2.194075
2.292182
0.957199
for outcome_m in self.outcomes: if outcome_m.outcome.outcome_id == outcome_id: return outcome_m return False
def get_outcome_m(self, outcome_id)
Returns the outcome model for the given outcome id :param outcome_id: The outcome id to search for :return: The model of the outcome with the given id
3.305897
3.733655
0.885432
msg = info.arg # print("action_signal_triggered state: ", self.state.state_id, model, prop_name, info) if msg.action.startswith('sm_notification_'): return # # affected child propagation from state # if hasattr(self, 'states'): # for m in info['arg'].affected_models: # print(m, self.states) # print([m is mm for mm in self.states.values()], [m in self for m in info['arg'].affected_models], \) # [m in self.states.values() for m in info['arg'].affected_models] if any([m in self for m in info['arg'].affected_models]): if not msg.action.startswith('parent_notification_'): new_msg = msg._replace(action='parent_notification_' + msg.action) else: new_msg = msg for m in info['arg'].affected_models: # print('???propagate it to', m, m.parent) if isinstance(m, AbstractStateModel) and m in self: # print('!!!propagate it from {0} to {1} {2}'.format(self.state.state_id, m.state.state_id, m)) m.action_signal.emit(new_msg) if msg.action.startswith('parent_notification_'): return # recursive propagation of action signal TODO remove finally if self.parent is not None: # Notify parent about change of meta data info.arg = msg # print("DONE1", self.state.state_id, msg) self.parent.action_signal_triggered(model, prop_name, info) # print("FINISH DONE1", self.state.state_id, msg) # state machine propagation of action signal (indirect) TODO remove finally elif not msg.action.startswith('sm_notification_'): # Prevent recursive call # If we are the root state, inform the state machine model by emitting our own meta signal. # To make the signal distinguishable for a change of meta data to our state, the change property of # the message is prepended with 'sm_notification_' # print("DONE2", self.state.state_id, msg) new_msg = msg._replace(action='sm_notification_' + msg.action) self.action_signal.emit(new_msg) # print("FINISH DONE2", self.state.state_id, msg) else: # print("DONE3 NOTHING") pass
def action_signal_triggered(self, model, prop_name, info)
This method notifies the parent state and child state models about complex actions
4.442173
4.336092
1.024465
msg = info.arg # print("meta_changed state: ", model, prop_name, info) if msg.notification is None: # Meta data of this state was changed, add information about notification to the signal message notification = Notification(model, prop_name, info) msg = msg._replace(notification=notification) # print("DONE0 ", msg) if self.parent is not None: # Notify parent about change of meta data info.arg = msg self.parent.meta_changed(model, prop_name, info) # print("DONE1 ", msg) elif not msg.change.startswith('sm_notification_'): # Prevent recursive call # If we are the root state, inform the state machine model by emitting our own meta signal. # To make the signal distinguishable for a change of meta data to our state, the change property of # the message is prepended with 'sm_notification_' msg = msg._replace(change='sm_notification_' + msg.change) self.meta_signal.emit(msg) # print("DONE2 ", msg) else: # print("DONE3 NOTHING") pass
def meta_changed(self, model, prop_name, info)
This method notifies the parent state about changes made to the meta data
6.018604
5.765118
1.043969
# TODO: for an Execution state this method is called for each hierarchy level again and again, still?? check it! # print("1AbstractState_load_meta_data: ", path, not path) if not path: path = self.state.file_system_path # print("2AbstractState_load_meta_data: ", path) if path is None: self.meta = Vividict({}) return False path_meta_data = os.path.join(path, storage.FILE_NAME_META_DATA) # TODO: Should be removed with next minor release if not os.path.exists(path_meta_data): logger.debug("Because meta data was not found in {0} use backup option {1}" "".format(path_meta_data, os.path.join(path, storage.FILE_NAME_META_DATA_OLD))) path_meta_data = os.path.join(path, storage.FILE_NAME_META_DATA_OLD) # TODO use the following logger message to debug meta data load process and to avoid maybe repetitive loads # if not os.path.exists(path_meta_data): # logger.info("path not found {0}".format(path_meta_data)) try: # print("try to load meta data from {0} for state {1}".format(path_meta_data, self.state)) tmp_meta = storage.load_data_file(path_meta_data) except ValueError as e: # if no element which is newly generated log a warning # if os.path.exists(os.path.dirname(path)): # logger.debug("Because '{1}' meta data of {0} was not loaded properly.".format(self, e)) if not path.startswith(constants.RAFCON_TEMP_PATH_STORAGE) and not os.path.exists(os.path.dirname(path)): logger.debug("Because '{1}' meta data of {0} was not loaded properly.".format(self, e)) tmp_meta = {} # JSON returns a dict, which must be converted to a Vividict tmp_meta = Vividict(tmp_meta) if tmp_meta: self._parse_for_element_meta_data(tmp_meta) # assign the meta data to the state self.meta = tmp_meta self.meta_signal.emit(MetaSignalMsg("load_meta_data", "all", True)) return True else: # print("nothing to parse", tmp_meta) return False
def load_meta_data(self, path=None)
Load meta data of state model from the file system The meta data of the state model is loaded from the file system and stored in the meta property of the model. Existing meta data is removed. Also the meta data of all state elements (data ports, outcomes, etc) are loaded, as those stored in the same file as the meta data of the state. This is either called on the __init__ of a new state model or if a state model for a container state is created, which then calls load_meta_data for all its children. :param str path: Optional file system path to the meta data file. If not given, the path will be derived from the state's path on the filesystem :return: if meta data file was loaded True otherwise False :rtype: bool
4.793679
4.515299
1.061653
if copy_path: meta_file_path_json = os.path.join(copy_path, self.state.get_storage_path(), storage.FILE_NAME_META_DATA) else: if self.state.file_system_path is None: logger.error("Meta data of {0} can be stored temporary arbitrary but by default first after the " "respective state was stored and a file system path is set.".format(self)) return meta_file_path_json = os.path.join(self.state.file_system_path, storage.FILE_NAME_META_DATA) meta_data = deepcopy(self.meta) self._generate_element_meta_data(meta_data) storage_utils.write_dict_to_json(meta_data, meta_file_path_json)
def store_meta_data(self, copy_path=None)
Save meta data of state model to the file system This method generates a dictionary of the meta data of the state together with the meta data of all state elements (data ports, outcomes, etc.) and stores it on the filesystem. Secure that the store meta data method is called after storing the core data otherwise the last_stored_path is maybe wrong or None. The copy path is considered to be a state machine file system path but not the current one but e.g. of a as copy saved state machine. The meta data will be stored in respective relative state folder in the state machine hierarchy. This folder has to exist. Dues the core elements of the state machine has to be stored first. :param str copy_path: Optional copy path if meta data is not stored to the file system path of state machine
4.530929
4.363619
1.038342
self.meta = deepcopy(source_state_m.meta) for input_data_port_m in self.input_data_ports: source_data_port_m = source_state_m.get_input_data_port_m(input_data_port_m.data_port.data_port_id) input_data_port_m.meta = deepcopy(source_data_port_m.meta) for output_data_port_m in self.output_data_ports: source_data_port_m = source_state_m.get_output_data_port_m(output_data_port_m.data_port.data_port_id) output_data_port_m.meta = deepcopy(source_data_port_m.meta) for outcome_m in self.outcomes: source_outcome_m = source_state_m.get_outcome_m(outcome_m.outcome.outcome_id) outcome_m.meta = deepcopy(source_outcome_m.meta) self.income.meta = deepcopy(source_state_m.income.meta) self.meta_signal.emit(MetaSignalMsg("copy_state_m", "all", True))
def copy_meta_data_from_state_m(self, source_state_m)
Dismiss current meta data and copy meta data from given state model The meta data of the given state model is used as meta data for this state. Also the meta data of all state elements (data ports, outcomes, etc.) is overwritten with the meta data of the elements of the given state. :param source_state_m: State model to load the meta data from
1.901546
1.806491
1.052618
# print("_parse meta data", meta_data) for data_port_m in self.input_data_ports: self._copy_element_meta_data_from_meta_file_data(meta_data, data_port_m, "input_data_port", data_port_m.data_port.data_port_id) for data_port_m in self.output_data_ports: self._copy_element_meta_data_from_meta_file_data(meta_data, data_port_m, "output_data_port", data_port_m.data_port.data_port_id) for outcome_m in self.outcomes: self._copy_element_meta_data_from_meta_file_data(meta_data, outcome_m, "outcome", outcome_m.outcome.outcome_id) if "income" in meta_data: if "gui" in meta_data and "editor_gaphas" in meta_data["gui"] and \ "income" in meta_data["gui"]["editor_gaphas"]: # chain necessary to prevent key generation del meta_data["gui"]["editor_gaphas"]["income"] elif "gui" in meta_data and "editor_gaphas" in meta_data["gui"] and \ "income" in meta_data["gui"]["editor_gaphas"]: # chain necessary to prevent key generation in meta data meta_data["income"]["gui"]["editor_gaphas"] = meta_data["gui"]["editor_gaphas"]["income"] del meta_data["gui"]["editor_gaphas"]["income"] self._copy_element_meta_data_from_meta_file_data(meta_data, self.income, "income", "")
def _parse_for_element_meta_data(self, meta_data)
Load meta data for state elements The meta data of the state meta data file also contains the meta data for state elements (data ports, outcomes, etc). This method parses the loaded meta data for each state element model. The meta data of the elements is removed from the passed dictionary. :param meta_data: Dictionary of loaded meta data
2.3915
2.290008
1.04432
meta_data_element_id = element_name + str(element_id) meta_data_element = meta_data[meta_data_element_id] # print(meta_data_element_id, element_m, meta_data_element) element_m.meta = meta_data_element del meta_data[meta_data_element_id]
def _copy_element_meta_data_from_meta_file_data(meta_data, element_m, element_name, element_id)
Helper method to assign the meta of the given element The method assigns the meta data of the elements from the given meta data dictionary. The copied meta data is then removed from the dictionary. :param meta_data: The loaded meta data :param element_m: The element model that is supposed to retrieve the meta data :param element_name: The name string of the element type in the dictionary :param element_id: The id of the element
2.238967
2.329868
0.960985
for data_port_m in self.input_data_ports: self._copy_element_meta_data_to_meta_file_data(meta_data, data_port_m, "input_data_port", data_port_m.data_port.data_port_id) for data_port_m in self.output_data_ports: self._copy_element_meta_data_to_meta_file_data(meta_data, data_port_m, "output_data_port", data_port_m.data_port.data_port_id) for outcome_m in self.outcomes: self._copy_element_meta_data_to_meta_file_data(meta_data, outcome_m, "outcome", outcome_m.outcome.outcome_id) self._copy_element_meta_data_to_meta_file_data(meta_data, self.income, "income", "")
def _generate_element_meta_data(self, meta_data)
Generate meta data for state elements and add it to the given dictionary This method retrieves the meta data of the state elements (data ports, outcomes, etc) and adds it to the given meta data dictionary. :param meta_data: Dictionary of meta data
2.059892
1.948444
1.057199
sm = self.model.state.get_state_machine() if sm.marked_dirty and not self.saved_initial: try: # Save the file before opening it to update the applied changes. Use option create_full_path=True # to assure that temporary state_machines' script files are saved to # (their path doesnt exist when not saved) filesystem.write_file(os.path.join(path, storage.SCRIPT_FILE), self.view.get_text(), create_full_path=True) except IOError as e: # Only happens if the file doesnt exist yet and would be written to the temp folder. # The method write_file doesnt create the path logger.error('The operating system raised an error: {}'.format(e)) self.saved_initial = True
def save_file_data(self, path)
Implements the abstract method of the ExternalEditor class.
11.530882
11.378546
1.013388
content = filesystem.read_file(file_system_path, storage.SCRIPT_FILE) if content is not None: self.set_script_text(content)
def load_and_set_file_content(self, file_system_path)
Implements the abstract method of the ExternalEditor class.
6.060289
5.733503
1.056996
if isinstance(self.model.state, LibraryState): logger.warning("It is not allowed to modify libraries.") self.view.set_text("") return # Ugly workaround to give user at least some feedback about the parser # Without the loop, this function would block the GTK main loop and the log message would appear after the # function has finished # TODO: run parser in separate thread while Gtk.events_pending(): Gtk.main_iteration_do(False) # get script current_text = self.view.get_text() # Directly apply script if linter was deactivated if not self.view['pylint_check_button'].get_active(): self.set_script_text(current_text) return logger.debug("Parsing execute script...") with open(self.tmp_file, "w") as text_file: text_file.write(current_text) # clear astroid module cache, see http://stackoverflow.com/questions/22241435/pylint-discard-cached-file-state MANAGER.astroid_cache.clear() lint_config_file = resource_filename(rafcon.__name__, "pylintrc") args = ["--rcfile={}".format(lint_config_file)] # put your own here with contextlib.closing(StringIO()) as dummy_buffer: json_report = JSONReporter(dummy_buffer.getvalue()) try: lint.Run([self.tmp_file] + args, reporter=json_report, exit=False) except: logger.exception("Could not run linter to check script") os.remove(self.tmp_file) if json_report.messages: def on_message_dialog_response_signal(widget, response_id): if response_id == 1: self.set_script_text(current_text) else: logger.debug("The script was not saved") widget.destroy() message_string = "Are you sure that you want to save this file?\n\nThe following errors were found:" line = None for message in json_report.messages: (error_string, line) = self.format_error_string(message) message_string += "\n\n" + error_string # focus line of error if line: tbuffer = self.view.get_buffer() start_iter = tbuffer.get_start_iter() start_iter.set_line(int(line)-1) tbuffer.place_cursor(start_iter) message_string += "\n\nThe line was focused in the source editor." self.view.scroll_to_cursor_onscreen() # select state to show source editor sm_m = state_machine_manager_model.get_state_machine_model(self.model) if sm_m.selection.get_selected_state() is not self.model: sm_m.selection.set(self.model) dialog = RAFCONButtonDialog(message_string, ["Save with errors", "Do not save"], on_message_dialog_response_signal, message_type=Gtk.MessageType.WARNING, parent=self.get_root_window()) result = dialog.run() else: self.set_script_text(current_text)
def apply_clicked(self, button)
Triggered when the Apply button in the source editor is clicked.
5.207549
5.11613
1.017869
assert isinstance(text_to_split, string_types) try: time, rest = text_to_split.split(': ', 1) source, message = rest.split(':', 1) except ValueError: time = source = "" message = text_to_split return time.strip(), source.strip(), message.strip()
def split_text(text_to_split)
Split text Splits the debug text into its different parts: 'Time', 'LogLevel + Module Name', 'Debug message' :param text_to_split: Text to split :return: List containing the content of text_to_split split up
3.611386
3.536308
1.021231
if self._enables['CONSOLE_FOLLOW_LOGGING']: if self._auto_scroll_handler_id is None: self._auto_scroll_handler_id = self.text_view.connect("size-allocate", self._auto_scroll) else: if self._auto_scroll_handler_id is not None: self.text_view.disconnect(self._auto_scroll_handler_id) self._auto_scroll_handler_id = None
def update_auto_scroll_mode(self)
Register or un-register signals for follow mode
2.926714
2.553895
1.14598
adj = self['scrollable'].get_vadjustment() adj.set_value(adj.get_upper() - adj.get_page_size())
def _auto_scroll(self, *args)
Scroll to the end of the text view
3.58897
3.504152
1.024205
line_number, _ = self.get_cursor_position() text_buffer = self.text_view.get_buffer() line_iter = text_buffer.get_iter_at_line(line_number) # find closest before line with string within before_line_number = None while line_iter.backward_line(): if s in self.get_text_of_line(line_iter): before_line_number = line_iter.get_line() break # find closest after line with string within after_line_number = None while line_iter.forward_line(): if s in self.get_text_of_line(line_iter): after_line_number = line_iter.get_line() break # take closest one to current position if after_line_number is not None and before_line_number is None: return after_line_number, after_line_number - line_number elif before_line_number is not None and after_line_number is None: return before_line_number, line_number - before_line_number elif after_line_number is not None and before_line_number is not None: after_distance = after_line_number - line_number before_distance = line_number - before_line_number if after_distance < before_distance: return after_line_number, after_distance else: return before_line_number, before_distance else: return None, None
def get_line_number_next_to_cursor_with_string_within(self, s)
Find the closest occurrence of a string with respect to the cursor position in the text view
1.727145
1.718945
1.00477
key = str(key) # Ensure that we have the same string type for all keys (under Python2 and 3!) if self.variable_exist(key): if data_type is None: data_type = self.__global_variable_type_dictionary[key] else: if data_type is None: data_type = type(None) assert isinstance(data_type, type) self.check_value_and_type(value, data_type) with self.__global_lock: unlock = True if self.variable_exist(key): if self.is_locked(key) and self.__access_keys[key] != access_key: raise RuntimeError("Wrong access key for accessing global variable") elif self.is_locked(key): unlock = False else: access_key = self.lock_variable(key, block=True) else: self.__variable_locks[key] = Lock() access_key = self.lock_variable(key, block=True) # --- variable locked if per_reference: self.__global_variable_dictionary[key] = value self.__global_variable_type_dictionary[key] = data_type self.__variable_references[key] = True else: self.__global_variable_dictionary[key] = copy.deepcopy(value) self.__global_variable_type_dictionary[key] = data_type self.__variable_references[key] = False # --- release variable if unlock: self.unlock_variable(key, access_key) logger.debug("Global variable '{}' was set to value '{}' with type '{}'".format(key, value, data_type.__name__))
def set_variable(self, key, value, per_reference=False, access_key=None, data_type=None)
Sets a global variable :param key: the key of the global variable to be set :param value: the new value of the global variable :param per_reference: a flag to decide if the variable should be stored per reference or per value :param access_key: if the variable was explicitly locked with the rafcon.state lock_variable :raises exceptions.RuntimeError: if a wrong access key is passed
2.958574
2.863571
1.033176
key = str(key) if self.variable_exist(key): unlock = True if self.is_locked(key): if self.__access_keys[key] == access_key: unlock = False else: if not access_key: access_key = self.lock_variable(key, block=True) else: raise RuntimeError("Wrong access key for accessing global variable") else: access_key = self.lock_variable(key, block=True) # --- variable locked if self.variable_can_be_referenced(key): if per_reference or per_reference is None: return_value = self.__global_variable_dictionary[key] else: return_value = copy.deepcopy(self.__global_variable_dictionary[key]) else: if per_reference: self.unlock_variable(key, access_key) raise RuntimeError("Variable cannot be accessed by reference") else: return_value = copy.deepcopy(self.__global_variable_dictionary[key]) # --- release variable if unlock: self.unlock_variable(key, access_key) return return_value else: # logger.warning("Global variable '{0}' not existing, returning default value".format(key)) return default
def get_variable(self, key, per_reference=None, access_key=None, default=None)
Fetches the value of a global variable :param key: the key of the global variable to be fetched :param bool per_reference: a flag to decide if the variable should be stored per reference or per value :param access_key: if the variable was explicitly locked with the rafcon.state lock_variable :param default: a value to be returned if the key does not exist :return: The value stored at in the global variable key :raises exceptions.RuntimeError: if a wrong access key is passed or the variable cannot be accessed by reference
3.066973
2.801844
1.094627
key = str(key) return key in self.__variable_references and self.__variable_references[key]
def variable_can_be_referenced(self, key)
Checks whether the value of the variable can be returned by reference :param str key: Name of the variable :return: True if value of variable can be returned by reference, False else
4.965984
8.651166
0.574025
key = str(key) if self.is_locked(key): raise RuntimeError("Global variable is locked") with self.__global_lock: if key in self.__global_variable_dictionary: access_key = self.lock_variable(key, block=True) del self.__global_variable_dictionary[key] self.unlock_variable(key, access_key) del self.__variable_locks[key] del self.__variable_references[key] else: raise AttributeError("Global variable %s does not exist!" % str(key)) logger.debug("Global variable %s was deleted!" % str(key))
def delete_variable(self, key)
Deletes a global variable :param key: the key of the global variable to be deleted :raises exceptions.AttributeError: if the global variable does not exist
3.407458
3.385591
1.006459
key = str(key) # watch out for releasing the __dictionary_lock properly try: if key in self.__variable_locks: # acquire without arguments is blocking lock_successful = self.__variable_locks[key].acquire(False) if lock_successful or block: if (not lock_successful) and block: # case: lock could not be acquired => wait for it as block=True duration = 0. loop_time = 0.1 while not self.__variable_locks[key].acquire(False): time.sleep(loop_time) duration += loop_time if int(duration*10) % 20 == 0: # while loops informs the user about long locked variables logger.verbose("Variable '{2}' is locked and thread {0} waits already {1} seconds to " "access it.".format(currentThread(), duration, key)) access_key = global_variable_id_generator() self.__access_keys[key] = access_key return access_key else: logger.warning("Global variable {} already locked".format(str(key))) return False else: logger.error("Global variable key {} does not exist".format(str(key))) return False except Exception as e: logger.error("Exception thrown: {}".format(str(e))) return False
def lock_variable(self, key, block=False)
Locks a global variable :param key: the key of the global variable to be locked :param block: a flag to specify if to wait for locking the variable in blocking mode
5.31869
5.483051
0.970024
key = str(key) if self.__access_keys[key] == access_key or force: if key in self.__variable_locks: if self.is_locked(key): self.__variable_locks[key].release() return True else: logger.error("Global variable {} is not locked, thus cannot unlock it".format(str(key))) return False else: raise AttributeError("Global variable %s does not exist!" % str(key)) else: raise RuntimeError("Wrong access key for accessing global variable")
def unlock_variable(self, key, access_key, force=False)
Unlocks a global variable :param key: the key of the global variable to be unlocked :param access_key: the access key to be able to unlock the global variable :param force: if the variable should be unlocked forcefully :raises exceptions.AttributeError: if the global variable does not exist :raises exceptions.RuntimeError: if the wrong access key is passed
3.484616
3.36745
1.034794
return self.set_variable(key, value, per_reference=False, access_key=access_key)
def set_locked_variable(self, key, access_key, value)
Set an already locked global variable :param key: the key of the global variable to be set :param access_key: the access key to the already locked global variable :param value: the new value of the global variable
6.34553
10.404436
0.609887
return self.get_variable(key, per_reference=False, access_key=access_key)
def get_locked_variable(self, key, access_key)
Returns the value of an global variable that is already locked :param key: the key of the global variable :param access_key: the access_key to the global variable that is already locked
6.962701
9.806458
0.710012
key = str(key) if key in self.__variable_locks: return self.__variable_locks[key].locked() return False
def is_locked(self, key)
Returns the status of the lock of a global variable :param key: the unique key of the global variable :return:
4.596767
5.784407
0.794683
start_key = str(start_key) output_list = [] if len(self.__global_variable_dictionary) == 0: return output_list for g_key in self.__global_variable_dictionary.keys(): # string comparison if g_key and start_key in g_key: output_list.append(g_key) return output_list
def get_all_keys_starting_with(self, start_key)
Returns all keys, which start with a certain pattern defined in :param start_key. :param start_key: The start pattern to search all keys for. :return:
3.077601
3.622811
0.849506
dict_copy = {} for key, value in self.__global_variable_dictionary.items(): if key in self.__variable_references and self.__variable_references[key]: dict_copy[key] = value else: dict_copy[key] = copy.deepcopy(value) return dict_copy
def global_variable_dictionary(self)
Property for the _global_variable_dictionary field
2.751724
2.725008
1.009804
if value is not None and data_type is not type(None): # if not isinstance(value, data_type): if not type_inherits_of_type(data_type, type(value)): raise TypeError( "Value: '{0}' is not of data type: '{1}', value type: {2}".format(value, data_type, type(value)))
def check_value_and_type(value, data_type)
Checks if a given value is of a specific type :param value: the value to check :param data_type: the type to be checked upon :return:
3.536984
3.828858
0.92377
# Distance between border of rectangle and point # Equation from http://stackoverflow.com/a/18157551/3568069 dx = max(self.point.x - self.width / 2. - pos[0], 0, pos[0] - (self.point.x + self.width / 2.)) dy = max(self.point.y - self.height / 2. - pos[1], 0, pos[1] - (self.point.y + self.height / 2.)) dist = sqrt(dx*dx + dy*dy) return self.point, dist
def glue(self, pos)
Calculates the distance between the given position and the port :param (float, float) pos: Distance to this position is calculated :return: Distance to port :rtype: float
2.966478
3.070012
0.966276
h, r = divmod(time / 1000, 3600) m, s = divmod(r, 60) return "%02d:%02d:%02d" % (h, m, s)
def format_time(time)
Formats the given time into HH:MM:SS
2.122725
2.101938
1.00989
if self.parent: self.parent.remove_controller(self) else: self.destroy()
def __destroy(self)
Remove controller from parent controller and/or destroy it self.
4.958224
2.556584
1.939394
super(PreferencesWindowController, self).register_view(view) self.view['add_library_button'].connect('clicked', self._on_add_library) self.view["remove_library_button"].connect('clicked', self._on_remove_library) self.view['config_tree_view'].connect("button_press_event", self._on_row_clicked_trigger_toggle_of_boolean, self.core_config_model, self.core_list_store) self.view['gui_tree_view'].connect("button_press_event", self._on_row_clicked_trigger_toggle_of_boolean, self.gui_config_model, self.gui_list_store) self.view['core_config_value_renderer'].set_property('editable', True) self.view['core_config_value_renderer'].connect('edited', self._on_config_value_changed, self.core_config_model, self.core_list_store) self.view['gui_config_value_renderer'].set_property('editable', True) self.view['gui_config_value_renderer'].connect('edited', self._on_config_value_changed, self.gui_config_model, self.gui_list_store) self.view['shortcut_config_value_renderer'].set_property('editable', True) self.view['shortcut_config_value_renderer'].connect('edited', self._on_shortcut_changed) self.view['library_config_key_renderer'].set_property('editable', True) self.view['library_config_key_renderer'].connect('edited', self._on_library_name_changed) self.view['library_config_value_renderer'].set_property('editable', True) self.view['library_config_value_renderer'].connect('edited', self._on_library_path_changed) self.view['config_tree_view'].set_model(self.core_list_store) self.view['library_tree_view'].set_model(self.library_list_store) self.view['gui_tree_view'].set_model(self.gui_list_store) self.view['shortcut_tree_view'].set_model(self.shortcut_list_store) self.view['apply_button'].connect("clicked", self._on_apply_button_clicked) self.view['ok_button'].connect('clicked', self._on_ok_button_clicked) self.view['cancel_button'].connect('clicked', self._on_cancel_button_clicked) self.view['import_button'].connect("clicked", self._on_import_config) self.view['export_button'].connect('clicked', self._on_export_config) self.view['preferences_window'].connect('delete_event', self._on_delete_event) self.update_all()
def register_view(self, view)
Called when the View was registered
1.767664
1.758684
1.005106
config_key = info['args'][1] if "key" not in info['kwargs'] else info['kwargs']['key'] # config_value = info['args'][-1] if "value" not in info['kwargs'] else info['kwargs']['value'] self._handle_config_update(config_m, config_key)
def on_config_value_changed(self, config_m, prop_name, info)
Callback when a config value has been changed Only collects information, delegates handling further to _handle_config_update :param ConfigModel config_m: The config model that has been changed :param str prop_name: Should always be 'config' :param dict info: Information e.g. about the changed config key
3.464991
3.404156
1.017871
self.check_for_preliminary_config() method_name = info['method_name'] # __setitem__, __delitem__, clear, ... if method_name in ['__setitem__', '__delitem__']: config_key = info['args'][0] self._handle_config_update(config_m, config_key) # Probably the preliminary config has been cleared, update corresponding list stores elif config_m is self.core_config_model: self.update_core_config_list_store() self.update_libraries_list_store() else: self.update_gui_config_list_store() self.update_shortcut_settings()
def on_preliminary_config_changed(self, config_m, prop_name, info)
Callback when a preliminary config value has been changed Mainly collects information, delegates handling further to _handle_config_update :param ConfigModel config_m: The config model that has been changed :param str prop_name: Should always be 'preliminary_config' :param dict info: Information e.g. about the changed config key
5.139226
4.912641
1.046123
if config_key == "LIBRARY_PATHS": self.update_libraries_list_store() if config_key == "SHORTCUTS": self.update_shortcut_settings() else: self.update_config_value(config_m, config_key)
def _handle_config_update(self, config_m, config_key)
Handles changes in config values The method ensure that the correct list stores are updated with the new values. :param ConfigModel config_m: The config model that has been changed :param config_key: The config key who's value has been changed :return:
4.800889
4.741705
1.012482
self.update_path_labels() self.update_core_config_list_store() self.update_gui_config_list_store() self.update_libraries_list_store() self.update_shortcut_settings() self.check_for_preliminary_config()
def update_all(self)
Shorthand method to update all collection information
6.060045
5.950545
1.018402
if any([self.model.preliminary_config, self.gui_config_model.preliminary_config]): self.view['apply_button'].set_sensitive(True) else: self.view['apply_button'].set_sensitive(False)
def check_for_preliminary_config(self)
Activates the 'Apply' button if there are preliminary changes
4.064503
3.045071
1.334781
self.view['core_label'].set_text("Core Config Path: " + str(self.core_config_model.config.config_file_path)) self.view['gui_label'].set_text("GUI Config Path: " + str(self.gui_config_model.config.config_file_path))
def update_path_labels(self)
Update labels showing config paths
3.441797
2.866642
1.200637
config_value = config_m.get_current_config_value(config_key) if config_m is self.core_config_model: list_store = self.core_list_store elif config_m is self.gui_config_model: list_store = self.gui_list_store else: return self._update_list_store_entry(list_store, config_key, config_value)
def update_config_value(self, config_m, config_key)
Updates the corresponding list store of a changed config value :param ConfigModel config_m: The config model that has been changed :param str config_key: The config key who's value has been changed
2.789558
2.68596
1.03857
for row_num, row in enumerate(list_store): if row[self.KEY_STORAGE_ID] == config_key: row[self.VALUE_STORAGE_ID] = str(config_value) row[self.TOGGLE_VALUE_STORAGE_ID] = config_value return row_num
def _update_list_store_entry(self, list_store, config_key, config_value)
Helper method to update a list store :param Gtk.ListStore list_store: List store to be updated :param str config_key: Config key to search for :param config_value: New config value :returns: Row of list store that has been updated :rtype: int
3.270243
3.290931
0.993714
ignore_keys = [] if ignore_keys is None else ignore_keys list_store.clear() for config_key in sorted(config_m.config.keys): if config_key in ignore_keys: continue config_value = config_m.get_current_config_value(config_key) # (config_key, text, text_visible, toggle_activatable, toggle_visible, text_editable, toggle_state) if isinstance(config_value, bool): list_store.append((str(config_key), str(config_value), False, True, True, False, config_value)) else: list_store.append((str(config_key), str(config_value), True, False, False, True, config_value))
def _update_list_store(config_m, list_store, ignore_keys=None)
Generic method to create list store for a given config model :param ConfigModel config_m: Config model to read into list store :param Gtk.ListStore list_store: List store to be filled :param list ignore_keys: List of keys that should be ignored
2.56191
2.754755
0.929996
self.library_list_store.clear() libraries = self.core_config_model.get_current_config_value("LIBRARY_PATHS", use_preliminary=True, default={}) library_names = sorted(libraries.keys()) for library_name in library_names: library_path = libraries[library_name] self.library_list_store.append((library_name, library_path))
def update_libraries_list_store(self)
Creates the list store for the libraries
3.27192
3.132276
1.044582
self.shortcut_list_store.clear() shortcuts = self.gui_config_model.get_current_config_value("SHORTCUTS", use_preliminary=True, default={}) actions = sorted(shortcuts.keys()) for action in actions: keys = shortcuts[action] self.shortcut_list_store.append((str(action), str(keys)))
def update_shortcut_settings(self)
Creates the list store for the shortcuts
5.061658
4.413725
1.1468
for row_num, iter_elem in enumerate(list_store): if iter_elem[column] == value: tree_view.set_cursor(row_num) return row_num
def _select_row_by_column_value(tree_view, list_store, column, value)
Helper method to select a tree view row :param Gtk.TreeView tree_view: Tree view who's row is to be selected :param Gtk.ListStore list_store: List store of the tree view :param int column: Column in which the value is searched :param value: Value to search for :returns: Row of list store that has selected :rtype: int
2.962962
3.487005
0.849715
self.view['library_tree_view'].grab_focus() if react_to_event(self.view, self.view['library_tree_view'], event): temp_library_name = "<LIB_NAME_%s>" % self._lib_counter self._lib_counter += 1 library_config = self.core_config_model.get_current_config_value("LIBRARY_PATHS", use_preliminary=True, default={}) library_config[temp_library_name] = "<LIB_PATH>" self.core_config_model.set_preliminary_config_value("LIBRARY_PATHS", library_config) self._select_row_by_column_value(self.view['library_tree_view'], self.library_list_store, self.KEY_STORAGE_ID, temp_library_name) return True
def _on_add_library(self, *event)
Callback method handling the addition of a new library
4.796769
4.662862
1.028718
self.view['library_tree_view'].grab_focus() if react_to_event(self.view, self.view['library_tree_view'], event): path = self.view["library_tree_view"].get_cursor()[0] if path is not None: library_name = self.library_list_store[int(path[0])][0] library_config = self.core_config_model.get_current_config_value("LIBRARY_PATHS", use_preliminary=True, default={}) del library_config[library_name] self.core_config_model.set_preliminary_config_value("LIBRARY_PATHS", library_config) if len(self.library_list_store) > 0: self.view['library_tree_view'].set_cursor(min(path[0], len(self.library_list_store) - 1)) return True
def _on_remove_library(self, *event)
Callback method handling the removal of an existing library
3.254746
3.167314
1.027605
config_key = config_list_store[int(path)][self.KEY_STORAGE_ID] config_value = bool(config_list_store[int(path)][self.TOGGLE_VALUE_STORAGE_ID]) config_value ^= True config_m.set_preliminary_config_value(config_key, config_value)
def _on_checkbox_toggled(self, renderer, path, config_m, config_list_store)
Callback method handling a config toggle event :param Gtk.CellRenderer renderer: Cell renderer that has been toggled :param path: Path within the list store :param ConfigModel config_m: The config model related to the toggle option :param Gtk.ListStore config_list_store: The list store related to the toggle option
4.341711
4.621735
0.939411
def handle_import(dialog_text, path_name): chooser = Gtk.FileChooserDialog(dialog_text, None, Gtk.FileChooserAction.SAVE, (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OPEN, Gtk.ResponseType.ACCEPT)) chooser.set_current_folder(path_name) response = chooser.run() if response == Gtk.ResponseType.ACCEPT: # get_filename() returns the whole file path inclusively the filename config_file = chooser.get_filename() config_path = dirname(config_file) self._last_path = config_path config_dict = yaml_configuration.config.load_dict_from_yaml(config_file) config_type = config_dict.get("TYPE") if config_type == "SM_CONFIG": del config_dict["TYPE"] self.core_config_model.update_config(config_dict, config_file) logger.info("Imported Core Config from {0}".format(config_file)) elif config_type == "GUI_CONFIG": del config_dict["TYPE"] self.gui_config_model.update_config(config_dict, config_file) logger.info("Imported GUI Config from {0}".format(config_file)) else: logger.error("{0} is not a valid config file".format(config_file)) elif response == Gtk.ResponseType.CANCEL: logger.info("Import of configuration cancelled") chooser.destroy() handle_import("Import Config Config from", self._last_path) self.check_for_preliminary_config() self.update_path_labels()
def _on_import_config(self, *args)
Callback method the the import button was clicked Shows a dialog allowing to import an existing configuration file
2.848809
2.80027
1.017334
response = self._config_chooser_dialog("Export configuration", "Please select the configuration file(s) to be exported:") if response == Gtk.ResponseType.REJECT: return def handle_export(dialog_text, path, config_m): chooser = Gtk.FileChooserDialog(dialog_text, None, Gtk.FileChooserAction.SAVE, (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_SAVE_AS, Gtk.ResponseType.ACCEPT)) chooser.set_current_folder(path) response = chooser.run() if response == Gtk.ResponseType.ACCEPT: config_file = chooser.get_filename() if not config_file: logger.error("Configuration could not be exported! Invalid file name!") else: if ".yaml" not in config_file: config_file += ".yaml" if config_m.preliminary_config: logger.warning("There are changes in the configuration that have not yet been applied. These " "changes will not be exported.") self._last_path = dirname(config_file) config_dict = config_m.as_dict() config_copy = yaml_configuration.config.DefaultConfig(str(config_dict)) config_copy.config_file_path = config_file config_copy.path = self._last_path try: config_copy.save_configuration() logger.info("Configuration exported to {}" .format(config_file)) except IOError: logger.error("Cannot open file '{}' for writing".format(config_file)) elif response == Gtk.ResponseType.CANCEL: logger.warning("Export Config canceled!") chooser.destroy() if self._core_checkbox.get_active(): handle_export("Select file for core configuration", self._last_path, self.core_config_model) if self._gui_checkbox.get_active(): handle_export("Select file for GUI configuration.", self._last_path, self.gui_config_model)
def _on_export_config(self, *args)
Callback method the the export button was clicked Shows dialogs allowing to export the configurations into separate files
3.20253
3.159597
1.013588
if self.core_config_model.preliminary_config or self.gui_config_model.preliminary_config: self._on_apply_button_clicked() self.__destroy()
def _on_ok_button_clicked(self, *args)
OK button clicked: Applies the configurations and closes the window
8.127923
5.96606
1.36236
self.core_config_model.preliminary_config.clear() self.gui_config_model.preliminary_config.clear() self.__destroy()
def _on_cancel_button_clicked(self, *args)
Cancel button clicked: Dismiss preliminary config and close the window
8.691566
4.929112
1.763312