sentence1 stringlengths 52 3.87M | sentence2 stringlengths 1 47.2k | label stringclasses 1 value |
|---|---|---|
def handle(self, event):
"""
Handle the event by calling each tool until the event is handled
or grabbed.
If a tool is returning True on a button press event, the motion and
button release events are also passed to this
"""
# Allow to handle a subset of events while having a grabbed tool (between a button press & release event)
suppressed_grabbed_tool = None
if event.type in (Gdk.EventType.SCROLL, Gdk.EventType.KEY_PRESS, Gdk.EventType.KEY_RELEASE):
suppressed_grabbed_tool = self._grabbed_tool
self._grabbed_tool = None
rt = super(ToolChain, self).handle(event)
if suppressed_grabbed_tool:
self._grabbed_tool = suppressed_grabbed_tool
return rt | Handle the event by calling each tool until the event is handled
or grabbed.
If a tool is returning True on a button press event, the motion and
button release events are also passed to this | entailment |
def movable_items(self):
"""Filter selection
Filter items of selection that cannot be moved (i.e. are not instances of `Item`) and return the rest.
"""
view = self.view
if self._move_name_v:
yield InMotion(self._item, view)
else:
selected_items = set(view.selected_items)
for item in selected_items:
if not isinstance(item, Item):
continue
yield InMotion(item, view) | Filter selection
Filter items of selection that cannot be moved (i.e. are not instances of `Item`) and return the rest. | entailment |
def on_button_press(self, event):
"""Select items
When the mouse button is pressed, the selection is updated.
:param event: The button event
"""
if event.get_button()[1] not in self._buttons:
return False # Only handle events for registered buttons (left mouse clicks)
if event.get_state()[1] & constants.RUBBERBAND_MODIFIER:
return False # Mouse clicks with pressed shift key are handled in another tool
# Special case: moving the NameView
# This is only allowed, if the hovered item is a NameView and the Ctrl-key is pressed and the only selected
# item is the parental StateView. In this case, the selection and _item will no longer be looked at,
# but only _move_name_v
self._item = self.get_item()
if isinstance(self._item, NameView):
selected_items = self.view.selected_items
if event.get_state()[1] & Gdk.ModifierType.CONTROL_MASK and len(selected_items) == 1 and next(iter(selected_items)) is \
self._item.parent:
self._move_name_v = True
else:
self._item = self._item.parent
if not self._move_name_v:
self._old_selection = self.view.selected_items
if self._item not in self.view.selected_items:
# When items are to be moved, a button-press should not cause any deselection.
# However, the selection is stored, in case no move operation is performed.
self.view.handle_new_selection(self._item)
if not self.view.is_focus():
self.view.grab_focus()
return True | Select items
When the mouse button is pressed, the selection is updated.
:param event: The button event | entailment |
def on_button_release(self, event):
"""Write back changes
If one or more items have been moved, the new position are stored in the corresponding meta data and a signal
notifying the change is emitted.
:param event: The button event
"""
affected_models = {}
for inmotion in self._movable_items:
inmotion.move((event.x, event.y))
rel_pos = gap_helper.calc_rel_pos_to_parent(self.view.canvas, inmotion.item,
inmotion.item.handles()[NW])
if isinstance(inmotion.item, StateView):
state_v = inmotion.item
state_m = state_v.model
self.view.canvas.request_update(state_v)
if state_m.get_meta_data_editor()['rel_pos'] != rel_pos:
state_m.set_meta_data_editor('rel_pos', rel_pos)
affected_models[state_m] = ("position", True, state_v)
elif isinstance(inmotion.item, NameView):
state_v = inmotion.item
state_m = self.view.canvas.get_parent(state_v).model
self.view.canvas.request_update(state_v)
if state_m.get_meta_data_editor()['name']['rel_pos'] != rel_pos:
state_m.set_meta_data_editor('name.rel_pos', rel_pos)
affected_models[state_m] = ("name_position", False, state_v)
elif isinstance(inmotion.item, TransitionView):
transition_v = inmotion.item
transition_m = transition_v.model
self.view.canvas.request_update(transition_v)
current_waypoints = gap_helper.get_relative_positions_of_waypoints(transition_v)
old_waypoints = transition_m.get_meta_data_editor()['waypoints']
if current_waypoints != old_waypoints:
transition_m.set_meta_data_editor('waypoints', current_waypoints)
affected_models[transition_m] = ("waypoints", False, transition_v)
if len(affected_models) == 1:
model = next(iter(affected_models))
change, affects_children, view = affected_models[model]
self.view.graphical_editor.emit('meta_data_changed', model, change, affects_children)
elif len(affected_models) > 1:
# if more than one item has been moved, we need to call the meta_data_changed signal on a common parent
common_parents = None
for change, affects_children, view in affected_models.values():
parents_of_view = set(self.view.canvas.get_ancestors(view))
if common_parents is None:
common_parents = parents_of_view
else:
common_parents = common_parents.intersection(parents_of_view)
assert len(common_parents) > 0, "The selected elements do not have common parent element"
for state_v in common_parents:
# Find most nested state_v
children_of_state_v = self.view.canvas.get_all_children(state_v)
if any(common_parent in children_of_state_v for common_parent in common_parents):
continue
self.view.graphical_editor.emit('meta_data_changed', state_v.model, "positions", True)
break
if not affected_models and self._old_selection is not None:
# The selection is handled differently depending on whether states were moved or not
# If no move operation was performed, we reset the selection to that is was before the button-press event
# and let the state machine selection handle the selection
self.view.unselect_all()
self.view.select_item(self._old_selection)
self.view.handle_new_selection(self._item)
self._move_name_v = False
self._old_selection = None
return super(MoveItemTool, self).on_button_release(event) | Write back changes
If one or more items have been moved, the new position are stored in the corresponding meta data and a signal
notifying the change is emitted.
:param event: The button event | entailment |
def _filter_library_state(self, items):
"""Filters out child elements of library state when they cannot be hovered
Checks if hovered item is within a LibraryState
* if not, the list is returned unfiltered
* if so, STATE_SELECTION_INSIDE_LIBRARY_STATE_ENABLED is checked
* if enabled, the library is selected (instead of the state copy)
* if not, the upper most library is selected
:param list items: Sorted list of items beneath the cursor
:return: filtered items
:rtype: list
"""
if not items:
return items
top_most_item = items[0]
# If the hovered item is e.g. a connection, we need to get the parental state
top_most_state_v = top_most_item if isinstance(top_most_item, StateView) else top_most_item.parent
state = top_most_state_v.model.state
global_gui_config = gui_helper_state_machine.global_gui_config
if global_gui_config.get_config_value('STATE_SELECTION_INSIDE_LIBRARY_STATE_ENABLED'):
# select the library state instead of the library_root_state because it is hidden
if state.is_root_state_of_library:
new_topmost_item = self.view.canvas.get_view_for_core_element(state.parent)
return self.dismiss_upper_items(items, new_topmost_item)
return items
else:
# Find state_copy of uppermost LibraryState
library_root_state = state.get_uppermost_library_root_state()
# If the hovered element is a child of a library, make the library the hovered_item
if library_root_state:
library_state = library_root_state.parent
library_state_v = self.view.canvas.get_view_for_core_element(library_state)
return self.dismiss_upper_items(items, library_state_v)
return items | Filters out child elements of library state when they cannot be hovered
Checks if hovered item is within a LibraryState
* if not, the list is returned unfiltered
* if so, STATE_SELECTION_INSIDE_LIBRARY_STATE_ENABLED is checked
* if enabled, the library is selected (instead of the state copy)
* if not, the upper most library is selected
:param list items: Sorted list of items beneath the cursor
:return: filtered items
:rtype: list | entailment |
def _filter_hovered_items(self, items, event):
"""Filters out items that cannot be hovered
:param list items: Sorted list of items beneath the cursor
:param Gtk.Event event: Motion event
:return: filtered items
:rtype: list
"""
items = self._filter_library_state(items)
if not items:
return items
top_most_item = items[0]
second_top_most_item = items[1] if len(items) > 1 else None
# States/Names take precedence over connections if the connections are on the same hierarchy and if there is
# a port beneath the cursor
first_state_v = next(filter(lambda item: isinstance(item, (NameView, StateView)), items))
first_state_v = first_state_v.parent if isinstance(first_state_v, NameView) else first_state_v
if first_state_v:
# There can be several connections above the state/name skip those and find the first non-connection-item
for item in items:
if isinstance(item, ConnectionView):
# connection is on the same hierarchy level as the state/name, thus we dismiss it
if self.view.canvas.get_parent(top_most_item) is not first_state_v:
continue
break
# Connections are only dismissed, if there is a port beneath the cursor. Search for ports here:
port_beneath_cursor = False
state_ports = first_state_v.get_all_ports()
position = self.view.get_matrix_v2i(first_state_v).transform_point(event.x, event.y)
i2v_matrix = self.view.get_matrix_i2v(first_state_v)
for port_v in state_ports:
item_distance = port_v.port.glue(position)[1]
view_distance = i2v_matrix.transform_distance(item_distance, 0)[0]
if view_distance == 0:
port_beneath_cursor = True
break
if port_beneath_cursor:
items = self.dismiss_upper_items(items, item)
top_most_item = items[0]
second_top_most_item = items[1] if len(items) > 1 else None
# NameView can only be hovered if it or its parent state is selected
if isinstance(top_most_item, NameView):
state_v = second_top_most_item # second item in the list must be the parent state of the NameView
if state_v not in self.view.selected_items and top_most_item not in self.view.selected_items:
items = items[1:]
return items | Filters out items that cannot be hovered
:param list items: Sorted list of items beneath the cursor
:param Gtk.Event event: Motion event
:return: filtered items
:rtype: list | entailment |
def on_button_release(self, event):
"""Select or deselect rubber banded groups of items
The selection of elements is prior and never items are selected or deselected at the same time.
"""
self.queue_draw(self.view)
x0, y0, x1, y1 = self.x0, self.y0, self.x1, self.y1
rectangle = (min(x0, x1), min(y0, y1), abs(x1 - x0), abs(y1 - y0))
selected_items = self.view.get_items_in_rectangle(rectangle, intersect=False)
self.view.handle_new_selection(selected_items)
return True | Select or deselect rubber banded groups of items
The selection of elements is prior and never items are selected or deselected at the same time. | entailment |
def on_button_press(self, event):
"""Handle button press events.
If the (mouse) button is pressed on top of a Handle (item.Handle), that handle is grabbed and can be
dragged around.
"""
if not event.get_button()[1] == 1: # left mouse button
return False
view = self.view
if isinstance(view.hovered_item, StateView):
distance = view.hovered_item.border_width / 2.
item, handle = HandleFinder(view.hovered_item, view).get_handle_at_point((event.x, event.y), distance)
else:
item, handle = HandleFinder(view.hovered_item, view).get_handle_at_point((event.x, event.y))
if not handle:
return False
# Only move ports when the MOVE_PORT_MODIFIER key is pressed
if isinstance(item, (StateView, PortView)) and \
handle in [port.handle for port in item.get_all_ports()] and \
not (event.get_state()[1] & constants.MOVE_PORT_MODIFIER):
return False
# Do not move from/to handles of connections (only their waypoints)
if isinstance(item, ConnectionView) and handle in item.end_handles(include_waypoints=True):
return False
if handle:
view.hovered_item = item
self.motion_handle = None
self.grab_handle(item, handle)
return True | Handle button press events.
If the (mouse) button is pressed on top of a Handle (item.Handle), that handle is grabbed and can be
dragged around. | entailment |
def _set_motion_handle(self, event):
"""Sets motion handle to currently grabbed handle
"""
item = self.grabbed_item
handle = self.grabbed_handle
pos = event.x, event.y
self.motion_handle = HandleInMotion(item, handle, self.view)
self.motion_handle.GLUE_DISTANCE = self._parent_state_v.border_width
self.motion_handle.start_move(pos) | Sets motion handle to currently grabbed handle | entailment |
def _create_temporary_connection(self):
"""Creates a placeholder connection view
:return: New placeholder connection
:rtype: rafcon.gui.mygaphas.items.connection.ConnectionPlaceholderView
"""
if self._is_transition:
self._connection_v = TransitionPlaceholderView(self._parent_state_v.hierarchy_level)
else:
self._connection_v = DataFlowPlaceholderView(self._parent_state_v.hierarchy_level)
self.view.canvas.add(self._connection_v, self._parent_state_v) | Creates a placeholder connection view
:return: New placeholder connection
:rtype: rafcon.gui.mygaphas.items.connection.ConnectionPlaceholderView | entailment |
def _handle_temporary_connection(self, old_sink, new_sink, of_target=True):
"""Connect connection to new_sink
If new_sink is set, the connection origin or target will be set to new_sink. The connection to old_sink is
being removed.
:param gaphas.aspect.ConnectionSink old_sink: Old sink (if existing)
:param gaphas.aspect.ConnectionSink new_sink: New sink (if existing)
:param bool of_target: Whether the origin or target will be reconnected
:return:
"""
def sink_set_and_differs(sink_a, sink_b):
if not sink_a:
return False
if not sink_b:
return True
if sink_a.port != sink_b.port:
return True
return False
if sink_set_and_differs(old_sink, new_sink):
sink_port_v = old_sink.port.port_v
self._disconnect_temporarily(sink_port_v, target=of_target)
if sink_set_and_differs(new_sink, old_sink):
sink_port_v = new_sink.port.port_v
self._connect_temporarily(sink_port_v, target=of_target) | Connect connection to new_sink
If new_sink is set, the connection origin or target will be set to new_sink. The connection to old_sink is
being removed.
:param gaphas.aspect.ConnectionSink old_sink: Old sink (if existing)
:param gaphas.aspect.ConnectionSink new_sink: New sink (if existing)
:param bool of_target: Whether the origin or target will be reconnected
:return: | entailment |
def _connect_temporarily(self, port_v, target=True):
"""Set a connection between the current connection and the given port
:param rafcon.gui.mygaphas.items.ports.PortView port_v: The port to be connected
:param bool target: Whether the connection origin or target should be connected
"""
if target:
handle = self._connection_v.to_handle()
else:
handle = self._connection_v.from_handle()
port_v.add_connected_handle(handle, self._connection_v, moving=True)
port_v.tmp_connect(handle, self._connection_v)
self._connection_v.set_port_for_handle(port_v, handle)
# Redraw state of port to make hover state visible
self._redraw_port(port_v) | Set a connection between the current connection and the given port
:param rafcon.gui.mygaphas.items.ports.PortView port_v: The port to be connected
:param bool target: Whether the connection origin or target should be connected | entailment |
def _disconnect_temporarily(self, port_v, target=True):
"""Removes a connection between the current connection and the given port
:param rafcon.gui.mygaphas.items.ports.PortView port_v: The port that was connected
:param bool target: Whether the connection origin or target should be disconnected
"""
if target:
handle = self._connection_v.to_handle()
else:
handle = self._connection_v.from_handle()
port_v.remove_connected_handle(handle)
port_v.tmp_disconnect()
self._connection_v.reset_port_for_handle(handle)
# Redraw state of port to make hover state visible
self._redraw_port(port_v) | Removes a connection between the current connection and the given port
:param rafcon.gui.mygaphas.items.ports.PortView port_v: The port that was connected
:param bool target: Whether the connection origin or target should be disconnected | entailment |
def on_button_press(self, event):
"""Handle button press events.
If the (mouse) button is pressed on top of a Handle (item.Handle), that handle is grabbed and can be
dragged around.
"""
if not event.get_button()[1] == 1: # left mouse button
return False
view = self.view
item, handle = HandleFinder(view.hovered_item, view).get_handle_at_point((event.x, event.y))
if not handle: # Require a handle
return False
# Connection handle must belong to a port and the MOVE_PORT_MODIFIER must not be pressed
if not isinstance(item, StateView) or handle not in [port.handle for port in item.get_all_ports()] or (
event.get_state()[1] & constants.MOVE_PORT_MODIFIER):
return False
for port in item.get_all_ports():
if port.handle is handle:
self._start_port_v = port
if port in item.get_logic_ports():
self._is_transition = True
if port is item.income or isinstance(port, InputPortView) or port in item.scoped_variables:
self._parent_state_v = port.parent
elif port.parent.parent:
self._parent_state_v = port.parent.parent
else: # Outgoing port of the root state was clicked on, no connection can be drawn here
self._parent_state_v = None
return True | Handle button press events.
If the (mouse) button is pressed on top of a Handle (item.Handle), that handle is grabbed and can be
dragged around. | entailment |
def on_button_press(self, event):
"""Handle button press events.
If the (mouse) button is pressed on top of a Handle (item.Handle), that handle is grabbed and can be
dragged around.
"""
if not event.get_button()[1] == 1: # left mouse button
return False
view = self.view
item, handle = HandleFinder(view.hovered_item, view).get_handle_at_point((event.x, event.y))
# Handle must be the end handle of a connection
if not handle or not isinstance(item, ConnectionView) or handle not in item.end_handles():
return False
if handle is item.from_handle():
self._start_port_v = item.from_port
else:
self._start_port_v = item.to_port
self._parent_state_v = item.parent
self._end_handle = handle
if isinstance(item, TransitionView):
self._is_transition = True
self._connection_v = item
return True | Handle button press events.
If the (mouse) button is pressed on top of a Handle (item.Handle), that handle is grabbed and can be
dragged around. | entailment |
def register_view(self, view):
"""Called when the View was registered"""
super(ToolBarController, self).register_view(view)
self.view['button_new'].connect('clicked', self.on_button_new_clicked)
self.view['button_open'].connect('clicked', self.on_button_open_clicked)
self.view['button_save'].connect('clicked', self.on_button_save_clicked)
self.view['button_refresh'].connect('clicked', self.on_button_refresh_clicked)
self.view['button_refresh_selected'].connect('clicked', self.on_button_refresh_selected_clicked)
self.view['button_refresh_libs'].connect('clicked', self.on_button_refresh_libs_clicked)
self.view['button_bake_state_machine'].connect('clicked', self.on_button_bake_state_machine_clicked) | Called when the View was registered | entailment |
def initialize(self):
"""Initializes the library manager
It searches through all library paths given in the config file for libraries, and loads the states.
This cannot be done in the __init__ function as the library_manager can be compiled and executed by
singleton.py before the state*.pys are loaded
"""
logger.debug("Initializing LibraryManager: Loading libraries ... ")
self._libraries = {}
self._library_root_paths = {}
self._replaced_libraries = {}
self._skipped_states = []
self._skipped_library_roots = []
# 1. Load libraries from config.yaml
for library_root_key, library_root_path in config.global_config.get_config_value("LIBRARY_PATHS").items():
library_root_path = self._clean_path(library_root_path)
if os.path.exists(library_root_path):
logger.debug("Adding library root key '{0}' from path '{1}'".format(
library_root_key, library_root_path))
self._load_libraries_from_root_path(library_root_key, library_root_path)
else:
logger.warning("Configured path for library root key '{}' does not exist: {}".format(
library_root_key, library_root_path))
# 2. Load libraries from RAFCON_LIBRARY_PATH
library_path_env = os.environ.get('RAFCON_LIBRARY_PATH', '')
library_paths = set(library_path_env.split(os.pathsep))
for library_root_path in library_paths:
if not library_root_path:
continue
library_root_path = self._clean_path(library_root_path)
if not os.path.exists(library_root_path):
logger.warning("The library specified in RAFCON_LIBRARY_PATH does not exist: {}".format(library_root_path))
continue
_, library_root_key = os.path.split(library_root_path)
if library_root_key in self._libraries:
if os.path.realpath(self._library_root_paths[library_root_key]) == os.path.realpath(library_root_path):
logger.info("The library root key '{}' and root path '{}' exists multiple times in your environment"
" and will be skipped.".format(library_root_key, library_root_path))
else:
logger.warning("The library '{}' is already existing and will be overridden with '{}'".format(
library_root_key, library_root_path))
self._load_libraries_from_root_path(library_root_key, library_root_path)
else:
self._load_libraries_from_root_path(library_root_key, library_root_path)
logger.debug("Adding library '{1}' from {0}".format(library_root_path, library_root_key))
self._libraries = OrderedDict(sorted(self._libraries.items()))
logger.debug("Initialization of LibraryManager done") | Initializes the library manager
It searches through all library paths given in the config file for libraries, and loads the states.
This cannot be done in the __init__ function as the library_manager can be compiled and executed by
singleton.py before the state*.pys are loaded | entailment |
def _clean_path(path):
"""Create a fully fissile absolute system path with no symbolic links and environment variables"""
path = path.replace('"', '')
path = path.replace("'", '')
# Replace ~ with /home/user
path = os.path.expanduser(path)
# Replace environment variables
path = os.path.expandvars(path)
# If the path is relative, assume it is relative to the config file directory
if not os.path.isabs(path):
path = os.path.join(config.global_config.path, path)
# Clean path, e.g. replace /./ with /
path = os.path.abspath(path)
# Eliminate symbolic links
path = os.path.realpath(path)
return path | Create a fully fissile absolute system path with no symbolic links and environment variables | entailment |
def _load_nested_libraries(self, library_path, target_dict):
"""Recursively load libraries within path
Adds all libraries specified in a given path and stores them into the provided library dictionary. The library
entries in the dictionary consist only of the path to the library in the file system.
:param library_path: the path to add all libraries from
:param target_dict: the target dictionary to store all loaded libraries to
"""
for library_name in os.listdir(library_path):
library_folder_path, library_name = self.check_clean_path_of_library(library_path, library_name)
full_library_path = os.path.join(library_path, library_name)
if os.path.isdir(full_library_path) and library_name[0] != '.':
if os.path.exists(os.path.join(full_library_path, storage.STATEMACHINE_FILE)) \
or os.path.exists(os.path.join(full_library_path, storage.STATEMACHINE_FILE_OLD)):
target_dict[library_name] = full_library_path
else:
target_dict[library_name] = {}
self._load_nested_libraries(full_library_path, target_dict[library_name])
target_dict[library_name] = OrderedDict(sorted(target_dict[library_name].items())) | Recursively load libraries within path
Adds all libraries specified in a given path and stores them into the provided library dictionary. The library
entries in the dictionary consist only of the path to the library in the file system.
:param library_path: the path to add all libraries from
:param target_dict: the target dictionary to store all loaded libraries to | entailment |
def get_os_path_to_library(self, library_path, library_name, allow_user_interaction=True):
"""Find library_os_path of library
This function retrieves the file system library_os_path of a library specified by a library_path and a
library_name. In case the library does not exist any more at its original location, the user has to specify
an alternative location.
:param str library_path: The library_path of the library, that must be relative and within a library_root_path
given in the config.yaml by LIBRARY_PATHS
:param str library_name: The name of the library
:param bool allow_user_interaction: Whether the user may be asked to specify library location
:return: library_os_path within filesystem, library_path, library_name
:rtype: str, str, str
:raises rafcon.core.custom_exceptions.LibraryNotFoundException: if the cannot be found
"""
original_path_and_name = os.path.join(library_path, library_name)
library_path_root = library_path.split(os.sep)[0]
if library_path.split(os.sep) and \
(library_path.startswith(os.sep) or library_path.endswith(os.sep) or os.sep + os.sep in library_path):
raise LibraryNotFoundException("A library_path is not considered to start or end with {2} or to have two "
"path separators {2}{2} in a row like '{0}' with library name {1}"
"".format(library_path, library_name, os.sep))
if not self._library_root_paths:
raise LibraryNotFoundException("There are no libraries registered")
# skip already skipped states
if original_path_and_name in self._skipped_states or library_path_root in self._skipped_library_roots:
# if an already skipped state shall be loaded again, directly raise the exception to jump over this state
raise LibraryNotFoundException("Library '{0}' not found in subfolder {1}".format(library_name,
library_path))
# replace already replaced states
if original_path_and_name in self._replaced_libraries:
new_library_os_path = self._replaced_libraries[original_path_and_name][0]
new_library_path = self._replaced_libraries[original_path_and_name][1]
# only show debug message if a state is automatically replaced by the appropriate library state
# chosen by the user before
if not self._replaced_libraries[original_path_and_name][2]:
logger.debug("The library with library_path \"{0}\" and name \"{1}\" "
"is automatically replaced by the library "
"with file system library_os_path \"{2}\" and library_path \"{3}\""
"".format(library_path, library_name, new_library_os_path, new_library_path))
return new_library_os_path, new_library_path, library_name
# a boolean to indicate if a state was regularly found or by the help of the user
regularly_found = True
library_os_path = self._get_library_os_path_from_library_dict_tree(library_path, library_name)
while library_os_path is None: # until the library is found or the user aborts
regularly_found = False
new_library_os_path = None
if allow_user_interaction:
notice = "Cannot find library '{0}' in library_path '{1}' in any of the library root paths. " \
"Please check your library root paths configuration in config.yaml " \
"LIBRARY_PATHS and environment variable RAFCON_LIBRARY_PATH. " \
"If your library_path is correct and the library was moved, please " \
"select the new root/library_os_path folder of the library which should be situated within a "\
"loaded library_root_path. If not, please abort.".format(library_name, library_path)
interface.show_notice_func(notice)
new_library_os_path = interface.open_folder_func("Select root folder for library name '{0}'"
"".format(original_path_and_name))
if new_library_os_path is None:
# User clicked cancel => cancel library search
# If the library root path is existent (e.g. "generic") and only the specific library state is not (
# e.g. "generic/wait", then the state is added to the skipped states.
# If the library root path is not existing, we ignore the whole library, preventing the user from
# endless dialogs for each missing library state.
if library_path_root not in self.libraries:
self._skipped_library_roots.append(library_path_root)
else:
self._skipped_states.append(original_path_and_name)
raise LibraryNotFoundException("Library '{0}' not found in sub-folder {1}".format(library_name,
library_path))
if not os.path.exists(new_library_os_path):
logger.error('Specified library_os_path does not exist')
continue
# check if valid library_path and library_name can be created
library_path, library_name = self.get_library_path_and_name_for_os_path(new_library_os_path)
if library_path is None:
logger.error("Specified library_os_path not within loaded library_root_path list or your config.yaml "
"file LIBRARY_PATHS or in the list of paths in environment variable RAFCON_LIBRARY_PATH")
continue # Allow the user to change the directory
# verification if library is also in library tree
library_os_path = self._get_library_os_path_from_library_dict_tree(library_path, library_name)
if library_os_path is not None:
assert library_os_path == new_library_os_path
# save the replacement in order that for a future occurrence the correct library_os_path can be used,
# without asking the user for the correct library_os_path
self._replaced_libraries[original_path_and_name] = (library_os_path, library_path, regularly_found)
return library_os_path, library_path, library_name | Find library_os_path of library
This function retrieves the file system library_os_path of a library specified by a library_path and a
library_name. In case the library does not exist any more at its original location, the user has to specify
an alternative location.
:param str library_path: The library_path of the library, that must be relative and within a library_root_path
given in the config.yaml by LIBRARY_PATHS
:param str library_name: The name of the library
:param bool allow_user_interaction: Whether the user may be asked to specify library location
:return: library_os_path within filesystem, library_path, library_name
:rtype: str, str, str
:raises rafcon.core.custom_exceptions.LibraryNotFoundException: if the cannot be found | entailment |
def _get_library_os_path_from_library_dict_tree(self, library_path, library_name):
"""Hand verified library os path from libraries dictionary tree."""
if library_path is None or library_name is None:
return None
path_list = library_path.split(os.sep)
target_lib_dict = self.libraries
# go down the path to the correct library
for path_element in path_list:
if path_element not in target_lib_dict: # Library cannot be found
target_lib_dict = None
break
target_lib_dict = target_lib_dict[path_element]
return None if target_lib_dict is None or library_name not in target_lib_dict else target_lib_dict[library_name] | Hand verified library os path from libraries dictionary tree. | entailment |
def _get_library_root_key_for_os_path(self, path):
"""Return library root key if path is within library root paths"""
path = os.path.realpath(path)
library_root_key = None
for library_root_key, library_root_path in self._library_root_paths.items():
rel_path = os.path.relpath(path, library_root_path)
if rel_path.startswith('..'):
library_root_key = None
continue
else:
break
return library_root_key | Return library root key if path is within library root paths | entailment |
def get_library_path_and_name_for_os_path(self, path):
"""Generate valid library_path and library_name
The method checks if the given os path is in the list of loaded library root paths and use respective
library root key/mounting point to concatenate the respective library_path and separate respective library_name.
:param str path: A library os path a library is situated in.
:return: library path library name
:rtype: str, str
"""
library_path = None
library_name = None
library_root_key = self._get_library_root_key_for_os_path(path)
if library_root_key is not None:
library_root_path = self._library_root_paths[library_root_key]
path_elements_without_library_root = path[len(library_root_path)+1:].split(os.sep)
library_name = path_elements_without_library_root[-1]
sub_library_path = ''
if len(path_elements_without_library_root[:-1]):
sub_library_path = os.sep + os.sep.join(path_elements_without_library_root[:-1])
library_path = library_root_key + sub_library_path
return library_path, library_name | Generate valid library_path and library_name
The method checks if the given os path is in the list of loaded library root paths and use respective
library root key/mounting point to concatenate the respective library_path and separate respective library_name.
:param str path: A library os path a library is situated in.
:return: library path library name
:rtype: str, str | entailment |
def get_library_instance(self, library_path, library_name):
"""Generate a Library instance from within libraries dictionary tree."""
if self.is_library_in_libraries(library_path, library_name):
from rafcon.core.states.library_state import LibraryState
return LibraryState(library_path, library_name, "0.1")
else:
logger.warning("Library manager will not create a library instance which is not in the mounted libraries.") | Generate a Library instance from within libraries dictionary tree. | entailment |
def get_library_state_copy_instance(self, lib_os_path):
""" A method to get a state copy of the library specified via the lib_os_path.
:param lib_os_path: the location of the library to get a copy for
:return:
"""
# originally libraries were called like this; DO NOT DELETE; interesting for performance tests
# state_machine = storage.load_state_machine_from_path(lib_os_path)
# return state_machine.version, state_machine.root_state
# TODO observe changes on file system and update data
if lib_os_path in self._loaded_libraries:
# this list can also be taken to open library state machines TODO -> implement it -> because faster
state_machine = self._loaded_libraries[lib_os_path]
# logger.info("Take copy of {0}".format(lib_os_path))
# as long as the a library state root state is never edited so the state first has to be copied here
state_copy = copy.deepcopy(state_machine.root_state)
return state_machine.version, state_copy
else:
state_machine = storage.load_state_machine_from_path(lib_os_path)
self._loaded_libraries[lib_os_path] = state_machine
if config.global_config.get_config_value("NO_PROGRAMMATIC_CHANGE_OF_LIBRARY_STATES_PERFORMED", False):
return state_machine.version, state_machine.root_state
else:
state_copy = copy.deepcopy(state_machine.root_state)
return state_machine.version, state_copy | A method to get a state copy of the library specified via the lib_os_path.
:param lib_os_path: the location of the library to get a copy for
:return: | entailment |
def remove_library_from_file_system(self, library_path, library_name):
"""Remove library from hard disk."""
library_file_system_path = self.get_os_path_to_library(library_path, library_name)[0]
shutil.rmtree(library_file_system_path)
self.refresh_libraries() | Remove library from hard disk. | entailment |
def buses_of_vlvl(network, voltage_level):
""" Get bus-ids of given voltage level(s).
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
voltage_level: list
Returns
-------
list
List containing bus-ids.
"""
mask = network.buses.v_nom.isin(voltage_level)
df = network.buses[mask]
return df.index | Get bus-ids of given voltage level(s).
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
voltage_level: list
Returns
-------
list
List containing bus-ids. | entailment |
def buses_grid_linked(network, voltage_level):
""" Get bus-ids of a given voltage level connected to the grid.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
voltage_level: list
Returns
-------
list
List containing bus-ids.
"""
mask = ((network.buses.index.isin(network.lines.bus0) |
(network.buses.index.isin(network.lines.bus1))) &
(network.buses.v_nom.isin(voltage_level)))
df = network.buses[mask]
return df.index | Get bus-ids of a given voltage level connected to the grid.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
voltage_level: list
Returns
-------
list
List containing bus-ids. | entailment |
def geolocation_buses(network, session):
"""
If geopandas is installed:
Use Geometries of buses x/y(lon/lat) and Polygons
of Countries from RenpassGisParameterRegion
in order to locate the buses
Else:
Use coordinats of buses to locate foreign buses, which is less accurate.
Parameters
----------
network_etrago: : class: `etrago.tools.io.NetworkScenario`
eTraGo network object compiled by: meth: `etrago.appl.etrago`
session: : sqlalchemy: `sqlalchemy.orm.session.Session < orm/session_basics.html >`
SQLAlchemy session to the OEDB
"""
if geopandas:
# Start db connetion
# get renpassG!S scenario data
RenpassGISRegion = RenpassGisParameterRegion
# Define regions
region_id = ['DE', 'DK', 'FR', 'BE', 'LU', 'AT',
'NO', 'PL', 'CH', 'CZ', 'SE', 'NL']
query = session.query(RenpassGISRegion.gid,
RenpassGISRegion.u_region_id,
RenpassGISRegion.stat_level,
RenpassGISRegion.geom,
RenpassGISRegion.geom_point)
# get regions by query and filter
Regions = [(gid, u_region_id, stat_level, geoalchemy2.shape.to_shape(
geom), geoalchemy2.shape.to_shape(geom_point))
for gid, u_region_id, stat_level,
geom, geom_point in query.filter(RenpassGISRegion.u_region_id.
in_(region_id)).all()]
crs = {'init': 'epsg:4326'}
# transform lon lat to shapely Points and create GeoDataFrame
points = [Point(xy) for xy in zip(network.buses.x, network.buses.y)]
bus = gpd.GeoDataFrame(network.buses, crs=crs, geometry=points)
# Transform Countries Polygons as Regions
region = pd.DataFrame(
Regions, columns=['id', 'country', 'stat_level', 'Polygon',
'Point'])
re = gpd.GeoDataFrame(region, crs=crs, geometry=region['Polygon'])
# join regions and buses by geometry which intersects
busC = gpd.sjoin(bus, re, how='inner', op='intersects')
# busC
# Drop non used columns
busC = busC.drop(['index_right', 'Point', 'id', 'Polygon',
'stat_level', 'geometry'], axis=1)
# add busC to eTraGo.buses
network.buses['country_code'] = busC['country']
network.buses.country_code[network.buses.country_code.isnull()] = 'DE'
# close session
session.close()
else:
buses_by_country(network)
transborder_lines_0 = network.lines[network.lines['bus0'].isin(
network.buses.index[network.buses['country_code'] != 'DE'])].index
transborder_lines_1 = network.lines[network.lines['bus1'].isin(
network.buses.index[network.buses['country_code']!= 'DE'])].index
#set country tag for lines
network.lines.loc[transborder_lines_0, 'country'] = \
network.buses.loc[network.lines.loc[transborder_lines_0, 'bus0'].\
values,'country_code'].values
network.lines.loc[transborder_lines_1, 'country'] = \
network.buses.loc[network.lines.loc[transborder_lines_1, 'bus1'].\
values,'country_code'].values
network.lines['country'].fillna('DE', inplace=True)
doubles = list(set(transborder_lines_0.intersection(transborder_lines_1)))
for line in doubles:
c_bus0 = network.buses.loc[network.lines.loc[line, 'bus0'],
'country_code']
c_bus1 = network.buses.loc[network.lines.loc[line, 'bus1'],
'country_code']
network.lines.loc[line, 'country'] = '{}{}'.format(c_bus0, c_bus1)
transborder_links_0 = network.links[network.links['bus0'].isin(
network.buses.index[network.buses['country_code']!= 'DE'])].index
transborder_links_1 = network.links[network.links['bus1'].isin(
network.buses.index[network.buses['country_code'] != 'DE'])].index
#set country tag for links
network.links.loc[transborder_links_0, 'country'] = \
network.buses.loc[network.links.loc[transborder_links_0, 'bus0'].\
values, 'country_code'].values
network.links.loc[transborder_links_1, 'country'] = \
network.buses.loc[network.links.loc[transborder_links_1, 'bus1'].\
values, 'country_code'].values
network.links['country'].fillna('DE', inplace=True)
doubles = list(set(transborder_links_0.intersection(transborder_links_1)))
for link in doubles:
c_bus0 = network.buses.loc[
network.links.loc[link, 'bus0'], 'country_code']
c_bus1 = network.buses.loc[
network.links.loc[link, 'bus1'], 'country_code']
network.links.loc[link, 'country'] = '{}{}'.format(c_bus0, c_bus1)
return network | If geopandas is installed:
Use Geometries of buses x/y(lon/lat) and Polygons
of Countries from RenpassGisParameterRegion
in order to locate the buses
Else:
Use coordinats of buses to locate foreign buses, which is less accurate.
Parameters
----------
network_etrago: : class: `etrago.tools.io.NetworkScenario`
eTraGo network object compiled by: meth: `etrago.appl.etrago`
session: : sqlalchemy: `sqlalchemy.orm.session.Session < orm/session_basics.html >`
SQLAlchemy session to the OEDB | entailment |
def buses_by_country(network):
"""
Find buses of foreign countries using coordinates
and return them as Pandas Series
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
Returns
-------
foreign_buses: Series containing buses by country
"""
poland = pd.Series(index=network.
buses[(network.buses['x'] > 17)].index,
data="PL")
czech = pd.Series(index=network.
buses[(network.buses['x'] < 17) &
(network.buses['x'] > 15.1)].index,
data="CZ")
denmark = pd.Series(index=network.
buses[((network.buses['y'] < 60) &
(network.buses['y'] > 55.2)) |
((network.buses['x'] > 11.95) &
(network.buses['x'] < 11.97) &
(network.buses['y'] > 54.5))].
index,
data="DK")
sweden = pd.Series(index=network.buses[(network.buses['y'] > 60)].index,
data="SE")
austria = pd.Series(index=network.
buses[(network.buses['y'] < 47.33) &
(network.buses['x'] > 9) |
((network.buses['x'] > 9.65) &
(network.buses['x'] < 9.9) &
(network.buses['y'] < 47.5) &
(network.buses['y'] > 47.3)) |
((network.buses['x'] > 12.14) &
(network.buses['x'] < 12.15) &
(network.buses['y'] > 47.57) &
(network.buses['y'] < 47.58)) |
(network.buses['y'] < 47.6) &
(network.buses['x'] > 14.1)].index,
data="AT")
switzerland = pd.Series(index=network.
buses[((network.buses['x'] > 8.1) &
(network.buses['x'] < 8.3) &
(network.buses['y'] < 46.8)) |
((network.buses['x'] > 7.82) &
(network.buses['x'] < 7.88) &
(network.buses['y'] > 47.54) &
(network.buses['y'] < 47.57)) |
((network.buses['x'] > 10.91) &
(network.buses['x'] < 10.92) &
(network.buses['y'] > 49.91) &
(network.buses['y'] < 49.92))].index,
data="CH")
netherlands = pd.Series(index=network.
buses[((network.buses['x'] < 6.96) &
(network.buses['y'] < 53.15) &
(network.buses['y'] > 53.1)) |
((network.buses['x'] < 5.4) &
(network.buses['y'] > 52.1))].index,
data="NL")
luxembourg = pd.Series(index=network.
buses[((network.buses['x'] < 6.15) &
(network.buses['y'] < 49.91) &
(network.buses['y'] > 49.65))].index,
data="LU")
france = pd.Series(index=network.
buses[(network.buses['x'] < 4.5) |
((network.buses['x'] > 7.507) &
(network.buses['x'] < 7.508) &
(network.buses['y'] > 47.64) &
(network.buses['y'] < 47.65)) |
((network.buses['x'] > 6.2) &
(network.buses['x'] < 6.3) &
(network.buses['y'] > 49.1) &
(network.buses['y'] < 49.2)) |
((network.buses['x'] > 6.7) &
(network.buses['x'] < 6.76) &
(network.buses['y'] > 49.13) &
(network.buses['y'] < 49.16))].index,
data="FR")
foreign_buses = pd.Series()
foreign_buses = foreign_buses.append([poland, czech, denmark, sweden,
austria, switzerland,
netherlands, luxembourg, france])
network.buses['country_code'] = foreign_buses[network.buses.index]
network.buses['country_code'].fillna('DE', inplace=True)
return foreign_buses | Find buses of foreign countries using coordinates
and return them as Pandas Series
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
Returns
-------
foreign_buses: Series containing buses by country | entailment |
def clip_foreign(network):
"""
Delete all components and timelines located outside of Germany.
Add transborder flows divided by country of origin as
network.foreign_trade.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
Returns
-------
network : :class:`pypsa.Network
Overall container of PyPSA
"""
# get foreign buses by country
foreign_buses = network.buses[network.buses.country_code != 'DE']
network.buses = network.buses.drop(
network.buses.loc[foreign_buses.index].index)
# identify transborder lines (one bus foreign, one bus not) and the country
# it is coming from
"""transborder_lines = pd.DataFrame(index=network.lines[
((network.lines['bus0'].isin(network.buses.index) == False) &
(network.lines['bus1'].isin(network.buses.index) == True)) |
((network.lines['bus0'].isin(network.buses.index) == True) &
(network.lines['bus1'].isin(network.buses.index) == False))].index)
transborder_lines['bus0'] = network.lines['bus0']
transborder_lines['bus1'] = network.lines['bus1']
transborder_lines['country'] = ""
for i in range(0, len(transborder_lines)):
if transborder_lines.iloc[i, 0] in foreign_buses.index:
transborder_lines['country'][i] = foreign_buses[str(
transborder_lines.iloc[i, 0])]
else:
transborder_lines['country'][i] = foreign_buses[str(
transborder_lines.iloc[i, 1])]
# identify amount of flows per line and group to get flow per country
transborder_flows = network.lines_t.p0[transborder_lines.index]
for i in transborder_flows.columns:
if network.lines.loc[str(i)]['bus1'] in foreign_buses.index:
transborder_flows.loc[:, str(
i)] = transborder_flows.loc[:, str(i)]*-1
network.foreign_trade = transborder_flows.\
groupby(transborder_lines['country'], axis=1).sum()"""
# drop foreign components
network.lines = network.lines.drop(network.lines[
(network.lines['bus0'].isin(network.buses.index) == False) |
(network.lines['bus1'].isin(network.buses.index) == False)].index)
network.links = network.links.drop(network.links[
(network.links['bus0'].isin(network.buses.index) == False) |
(network.links['bus1'].isin(network.buses.index) == False)].index)
network.transformers = network.transformers.drop(network.transformers[
(network.transformers['bus0'].isin(network.buses.index) == False) |
(network.transformers['bus1'].isin(network.
buses.index) == False)].index)
network.generators = network.generators.drop(network.generators[
(network.generators['bus'].isin(network.buses.index) == False)].index)
network.loads = network.loads.drop(network.loads[
(network.loads['bus'].isin(network.buses.index) == False)].index)
network.storage_units = network.storage_units.drop(network.storage_units[
(network.storage_units['bus'].isin(network.
buses.index) == False)].index)
components = ['loads', 'generators', 'lines', 'buses', 'transformers',
'links']
for g in components: # loads_t
h = g + '_t'
nw = getattr(network, h) # network.loads_t
for i in nw.keys(): # network.loads_t.p
cols = [j for j in getattr(
nw, i).columns if j not in getattr(network, g).index]
for k in cols:
del getattr(nw, i)[k]
return network | Delete all components and timelines located outside of Germany.
Add transborder flows divided by country of origin as
network.foreign_trade.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
Returns
-------
network : :class:`pypsa.Network
Overall container of PyPSA | entailment |
def foreign_links(network):
"""Change transmission technology of foreign lines from AC to DC (links).
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
Returns
-------
network : :class:`pypsa.Network
Overall container of PyPSA
"""
foreign_buses = network.buses[network.buses.country_code != 'DE']
foreign_lines = network.lines[network.lines.bus0.astype(str).isin(
foreign_buses.index) | network.lines.bus1.astype(str).isin(
foreign_buses.index)]
foreign_links = network.links[network.links.bus0.astype(str).isin(
foreign_buses.index) | network.links.bus1.astype(str).isin(
foreign_buses.index)]
network.links = network.links.drop(
network.links.index[network.links.index.isin(foreign_links.index)
& network.links.bus0.isin(network.links.bus1) &
(network.links.bus0 > network.links.bus1)])
foreign_links = network.links[network.links.bus0.astype(str).isin(
foreign_buses.index) | network.links.bus1.astype(str).isin(
foreign_buses.index)]
network.links.loc[foreign_links.index, 'p_min_pu'] = -1
network.links.loc[foreign_links.index, 'efficiency'] = 1
network.import_components_from_dataframe(
foreign_lines.loc[:, ['bus0', 'bus1', 'capital_cost', 'length']]
.assign(p_nom=foreign_lines.s_nom).assign(p_min_pu=-1)
.set_index('N' + foreign_lines.index),
'Link')
network.lines = network.lines.drop(foreign_lines.index)
return network | Change transmission technology of foreign lines from AC to DC (links).
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
Returns
-------
network : :class:`pypsa.Network
Overall container of PyPSA | entailment |
def set_q_foreign_loads(network, cos_phi=1):
"""Set reative power timeseries of loads in neighbouring countries
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
cos_phi: float
Choose ration of active and reactive power of foreign loads
Returns
-------
network : :class:`pypsa.Network
Overall container of PyPSA
"""
foreign_buses = network.buses[network.buses.country_code != 'DE']
network.loads_t['q_set'][network.loads.index[
network.loads.bus.astype(str).isin(foreign_buses.index)]] = \
network.loads_t['p_set'][network.loads.index[
network.loads.bus.astype(str).isin(
foreign_buses.index)]] * math.tan(math.acos(cos_phi))
network.generators.control[network.generators.control == 'PQ'] = 'PV'
return network | Set reative power timeseries of loads in neighbouring countries
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
cos_phi: float
Choose ration of active and reactive power of foreign loads
Returns
-------
network : :class:`pypsa.Network
Overall container of PyPSA | entailment |
def connected_grid_lines(network, busids):
""" Get grid lines connected to given buses.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
busids : list
List containing bus-ids.
Returns
-------
:class:`pandas.DataFrame
PyPSA lines.
"""
mask = network.lines.bus1.isin(busids) |\
network.lines.bus0.isin(busids)
return network.lines[mask] | Get grid lines connected to given buses.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
busids : list
List containing bus-ids.
Returns
-------
:class:`pandas.DataFrame
PyPSA lines. | entailment |
def connected_transformer(network, busids):
""" Get transformer connected to given buses.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
busids : list
List containing bus-ids.
Returns
-------
:class:`pandas.DataFrame
PyPSA transformer.
"""
mask = (network.transformers.bus0.isin(busids))
return network.transformers[mask] | Get transformer connected to given buses.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
busids : list
List containing bus-ids.
Returns
-------
:class:`pandas.DataFrame
PyPSA transformer. | entailment |
def load_shedding(network, **kwargs):
""" Implement load shedding in existing network to identify
feasibility problems
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
marginal_cost : int
Marginal costs for load shedding
p_nom : int
Installed capacity of load shedding generator
Returns
-------
"""
marginal_cost_def = 10000 # network.generators.marginal_cost.max()*2
p_nom_def = network.loads_t.p_set.max().max()
marginal_cost = kwargs.get('marginal_cost', marginal_cost_def)
p_nom = kwargs.get('p_nom', p_nom_def)
network.add("Carrier", "load")
start = network.generators.index.to_series().str.rsplit(
' ').str[0].astype(int).sort_values().max() + 1
index = list(range(start, start + len(network.buses.index)))
network.import_components_from_dataframe(
pd.DataFrame(
dict(marginal_cost=marginal_cost,
p_nom=p_nom,
carrier='load shedding',
bus=network.buses.index),
index=index),
"Generator"
)
return | Implement load shedding in existing network to identify
feasibility problems
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
marginal_cost : int
Marginal costs for load shedding
p_nom : int
Installed capacity of load shedding generator
Returns
------- | entailment |
def data_manipulation_sh(network):
""" Adds missing components to run calculations with SH scenarios.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
"""
from shapely.geometry import Point, LineString, MultiLineString
from geoalchemy2.shape import from_shape, to_shape
# add connection from Luebeck to Siems
new_bus = str(network.buses.index.astype(np.int64).max() + 1)
new_trafo = str(network.transformers.index.astype(np.int64).max() + 1)
new_line = str(network.lines.index.astype(np.int64).max() + 1)
network.add("Bus", new_bus, carrier='AC',
v_nom=220, x=10.760835, y=53.909745)
network.add("Transformer", new_trafo, bus0="25536",
bus1=new_bus, x=1.29960, tap_ratio=1, s_nom=1600)
network.add("Line", new_line, bus0="26387",
bus1=new_bus, x=0.0001, s_nom=1600)
network.lines.loc[new_line, 'cables'] = 3.0
# bus geom
point_bus1 = Point(10.760835, 53.909745)
network.buses.set_value(new_bus, 'geom', from_shape(point_bus1, 4326))
# line geom/topo
network.lines.set_value(new_line, 'geom', from_shape(MultiLineString(
[LineString([to_shape(network.
buses.geom['26387']), point_bus1])]), 4326))
network.lines.set_value(new_line, 'topo', from_shape(LineString(
[to_shape(network.buses.geom['26387']), point_bus1]), 4326))
# trafo geom/topo
network.transformers.set_value(new_trafo,
'geom', from_shape(MultiLineString(
[LineString(
[to_shape(network
.buses.geom['25536']),
point_bus1])]), 4326))
network.transformers.set_value(new_trafo, 'topo', from_shape(
LineString([to_shape(network.buses.geom['25536']), point_bus1]), 4326))
return | Adds missing components to run calculations with SH scenarios.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA | entailment |
def results_to_csv(network, args, pf_solution=None):
""" Function the writes the calaculation results
in csv-files in the desired directory.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
args: dict
Contains calculation settings of appl.py
pf_solution: pandas.Dataframe or None
If pf was calculated, df containing information of convergence else None.
"""
path = args['csv_export']
if path == False:
return None
if not os.path.exists(path):
os.makedirs(path, exist_ok=True)
network.export_to_csv_folder(path)
data = pd.read_csv(os.path.join(path, 'network.csv'))
data['time'] = network.results['Solver'].Time
data = data.apply(_enumerate_row, axis=1)
data.to_csv(os.path.join(path, 'network.csv'), index=False)
with open(os.path.join(path, 'args.json'), 'w') as fp:
json.dump(args, fp)
if not isinstance(pf_solution, type(None)):
pf_solution.to_csv(os.path.join(path, 'pf_solution.csv'), index=True)
if hasattr(network, 'Z'):
file = [i for i in os.listdir(
path.strip('0123456789')) if i == 'Z.csv']
if file:
print('Z already calculated')
else:
network.Z.to_csv(path.strip('0123456789') + '/Z.csv', index=False)
return | Function the writes the calaculation results
in csv-files in the desired directory.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
args: dict
Contains calculation settings of appl.py
pf_solution: pandas.Dataframe or None
If pf was calculated, df containing information of convergence else None. | entailment |
def parallelisation(network, args, group_size, extra_functionality=None):
"""
Function that splits problem in selected number of
snapshot groups and runs optimization successive for each group.
Not useful for calculations with storage untis or extension.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
args: dict
Contains calculation settings of appl.py
Returns
-------
network : :class:`pypsa.Network
Overall container of PyPSA
"""
print("Performing linear OPF, {} snapshot(s) at a time:".
format(group_size))
t = time.time()
for i in range(int((args['end_snapshot'] - args['start_snapshot'] + 1)
/ group_size)):
if i > 0:
network.storage_units.state_of_charge_initial = network.\
storage_units_t.state_of_charge.loc[
network.snapshots[group_size * i - 1]]
network.lopf(network.snapshots[
group_size * i:group_size * i + group_size],
solver_name=args['solver_name'],
solver_options=args['solver_options'],
extra_functionality=extra_functionality)
network.lines.s_nom = network.lines.s_nom_opt
print(time.time() - t / 60)
return | Function that splits problem in selected number of
snapshot groups and runs optimization successive for each group.
Not useful for calculations with storage untis or extension.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
args: dict
Contains calculation settings of appl.py
Returns
-------
network : :class:`pypsa.Network
Overall container of PyPSA | entailment |
def set_slack(network):
""" Function that chosses the bus with the maximum installed power as slack
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
Returns
-------
network : :class:`pypsa.Network
Overall container of PyPSA
"""
old_slack = network.generators.index[network.
generators.control == 'Slack'][0]
# check if old slack was PV or PQ control:
if network.generators.p_nom[old_slack] > 50 and network.generators.\
carrier[old_slack] in ('solar', 'wind'):
old_control = 'PQ'
elif network.generators.p_nom[old_slack] > 50 and network.generators.\
carrier[old_slack] not in ('solar', 'wind'):
old_control = 'PV'
elif network.generators.p_nom[old_slack] < 50:
old_control = 'PQ'
old_gens = network.generators
gens_summed = network.generators_t.p.sum()
old_gens['p_summed'] = gens_summed
max_gen_buses_index = old_gens.groupby(['bus']).agg(
{'p_summed': np.sum}).p_summed.sort_values().index
for bus_iter in range(1, len(max_gen_buses_index) - 1):
if old_gens[(network.
generators['bus'] == max_gen_buses_index[-bus_iter]) &
(network.generators['control'] == 'PV')].empty:
continue
else:
new_slack_bus = max_gen_buses_index[-bus_iter]
break
network.generators = network.generators.drop('p_summed', 1)
new_slack_gen = network.generators.\
p_nom[(network.generators['bus'] == new_slack_bus) & (
network.generators['control'] == 'PV')].sort_values().index[-1]
network.generators = network.generators.set_value(
old_slack, 'control', old_control)
network.generators = network.generators.set_value(
new_slack_gen, 'control', 'Slack')
return network | Function that chosses the bus with the maximum installed power as slack
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
Returns
-------
network : :class:`pypsa.Network
Overall container of PyPSA | entailment |
def pf_post_lopf(network, args, extra_functionality, add_foreign_lopf):
""" Function that prepares and runs non-linar load flow using PyPSA pf.
If network has been extendable, a second lopf with reactances adapted to
new s_nom is needed.
If crossborder lines are DC-links, pf is only applied on german network.
Crossborder flows are still considerd due to the active behavior of links.
To return a network containing the whole grid, the optimised solution of the
foreign components can be added afterwards.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
args: dict
Contains calculation settings of appl.py
extra_fuctionality: function or NoneType
Adds constraint to optimization (e.g. when applied snapshot clustering)
add_foreign_lopf: boolean
Choose if foreign results of lopf should be added to the network when
foreign lines are DC
Returns
-------
pf_solve: pandas.Dataframe
Contains information about convergency and calculation time of pf
"""
network_pf = network
# Update x of extended lines and transformers
if network_pf.lines.s_nom_extendable.any() or \
network_pf.transformers.s_nom_extendable.any():
storages_extendable = network_pf.storage_units.p_nom_extendable.copy()
lines_extendable = network_pf.lines.s_nom_extendable.copy()
links_extendable = network_pf.links.p_nom_extendable.copy()
trafos_extendable = network_pf.transformers.s_nom_extendable.copy()
storages_p_nom = network_pf.storage_units.p_nom.copy()
lines_s_nom= network_pf.lines.s_nom.copy()
links_p_nom = network_pf.links.p_nom.copy()
trafos_s_nom = network_pf.transformers.s_nom.copy()
network_pf.lines.x[network.lines.s_nom_extendable] = \
network_pf.lines.x * network.lines.s_nom /\
network_pf.lines.s_nom_opt
network_pf.lines.r[network.lines.s_nom_extendable] = \
network_pf.lines.r * network.lines.s_nom /\
network_pf.lines.s_nom_opt
network_pf.lines.b[network.lines.s_nom_extendable] = \
network_pf.lines.b * network.lines.s_nom_opt /\
network_pf.lines.s_nom
network_pf.lines.g[network.lines.s_nom_extendable] = \
network_pf.lines.g * network.lines.s_nom_opt /\
network_pf.lines.s_nom
network_pf.transformers.x[network.transformers.s_nom_extendable] = \
network_pf.transformers.x * network.transformers.s_nom / \
network_pf.transformers.s_nom_opt
network_pf.lines.s_nom_extendable = False
network_pf.transformers.s_nom_extendable = False
network_pf.storage_units.p_nom_extendable = False
network_pf.links.p_nom_extendable = False
network_pf.lines.s_nom = network.lines.s_nom_opt
network_pf.transformers.s_nom = network.transformers.s_nom_opt
network_pf.storage_units.p_nom = network_pf.storage_units.p_nom_opt
network_pf.links.p_nom = network_pf.links.p_nom_opt
network_pf.lopf(network.snapshots,
solver_name=args['solver'],
solver_options=args['solver_options'],
extra_functionality=extra_functionality)
network_pf.storage_units.p_nom_extendable = storages_extendable
network_pf.lines.s_nom_extendable = lines_extendable
network_pf.links.p_nom_extendable = links_extendable
network_pf.transformers.s_nom_extendable = trafos_extendable
network_pf.storage_units.p_nom = storages_p_nom
network_pf.lines.s_nom = lines_s_nom
network_pf.links.p_nom = links_p_nom
network_pf.transformers.s_nom = trafos_s_nom
# For the PF, set the P to the optimised P
network_pf.generators_t.p_set = network_pf.generators_t.p_set.reindex(
columns=network_pf.generators.index)
network_pf.generators_t.p_set = network_pf.generators_t.p
network_pf.storage_units_t.p_set = network_pf.storage_units_t.p_set\
.reindex(columns=network_pf.storage_units.index)
network_pf.storage_units_t.p_set = network_pf.storage_units_t.p
network_pf.links_t.p_set = network_pf.links_t.p_set.reindex(
columns=network_pf.links.index)
network_pf.links_t.p_set = network_pf.links_t.p0
# if foreign lines are DC, execute pf only on sub_network in Germany
if (args['foreign_lines']['carrier'] == 'DC') or ((args['scn_extension']!=
None) and ('BE_NO_NEP 2035' in args['scn_extension'])):
n_bus = pd.Series(index=network.sub_networks.index)
for i in range(0, len(network.sub_networks.index)-1):
n_bus[i] = len(network.buses.index[
network.buses.sub_network.astype(int) == i])
sub_network_DE = n_bus.index[n_bus == n_bus.max()]
foreign_bus = network.buses[network.buses.sub_network !=
sub_network_DE.values[0]]
foreign_comp = {'Bus': network.buses[
network.buses.sub_network !=
sub_network_DE.values[0]],
'Generator': network.generators[
network.generators.bus.isin(
foreign_bus.index)],
'Load': network.loads[
network.loads.bus.isin(foreign_bus.index)],
'Transformer': network.transformers[
network.transformers.bus0.isin(
foreign_bus.index)],
'StorageUnit': network.storage_units[
network.storage_units.bus.isin(
foreign_bus.index)]}
foreign_series = {'Bus': network.buses_t.copy(),
'Generator': network.generators_t.copy(),
'Load': network.loads_t.copy(),
'Transformer': network.transformers_t.copy(),
'StorageUnit': network.storage_units_t.copy()}
for comp in sorted(foreign_series):
attr = sorted(foreign_series[comp])
for a in attr:
if not foreign_series[comp][a].empty:
if a != 'p_max_pu':
foreign_series[comp][a] = foreign_series[comp][a][
foreign_comp[comp].index]
else:
foreign_series[comp][a] = foreign_series[comp][a][
foreign_comp[comp][foreign_comp[
comp]['carrier'].isin(
['solar', 'wind_onshore',
'wind_offshore',
'run_of_river'])].index]
network.buses = network.buses.drop(foreign_bus.index)
network.generators = network.generators[
network.generators.bus.isin(network.buses.index)]
network.loads = network.loads[
network.loads.bus.isin(network.buses.index)]
network.transformers = network.transformers[
network.transformers.bus0.isin(network.buses.index)]
network.storage_units = network.storage_units[
network.storage_units.bus.isin(network.buses.index)]
# Set slack bus
network = set_slack(network)
# execute non-linear pf
pf_solution = network_pf.pf(network.snapshots, use_seed=True)
# if selected, copy lopf results of neighboring countries to network
if ((args['foreign_lines']['carrier'] == 'DC') or ((args['scn_extension']!=
None) and ('BE_NO_NEP 2035' in args['scn_extension']))) and add_foreign_lopf:
for comp in sorted(foreign_series):
network.import_components_from_dataframe(foreign_comp[comp], comp)
for attr in sorted(foreign_series[comp]):
network.import_series_from_dataframe(foreign_series
[comp][attr], comp, attr)
pf_solve = pd.DataFrame(index=pf_solution['converged'].index)
pf_solve['converged'] = pf_solution['converged'].values
pf_solve['error'] = pf_solution['error'].values
pf_solve['n_iter'] = pf_solution['n_iter'].values
if not pf_solve[~pf_solve.converged].count().max() == 0:
logger.warning("PF of %d snapshots not converged.",
pf_solve[~pf_solve.converged].count().max())
return pf_solve | Function that prepares and runs non-linar load flow using PyPSA pf.
If network has been extendable, a second lopf with reactances adapted to
new s_nom is needed.
If crossborder lines are DC-links, pf is only applied on german network.
Crossborder flows are still considerd due to the active behavior of links.
To return a network containing the whole grid, the optimised solution of the
foreign components can be added afterwards.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
args: dict
Contains calculation settings of appl.py
extra_fuctionality: function or NoneType
Adds constraint to optimization (e.g. when applied snapshot clustering)
add_foreign_lopf: boolean
Choose if foreign results of lopf should be added to the network when
foreign lines are DC
Returns
-------
pf_solve: pandas.Dataframe
Contains information about convergency and calculation time of pf | entailment |
def distribute_q(network, allocation='p_nom'):
""" Function that distributes reactive power at bus to all installed
generators and storages.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
allocation: str
Choose key to distribute reactive power:
'p_nom' to dirstribute via p_nom
'p' to distribute via p_set
Returns
-------
"""
network.allocation = allocation
if allocation == 'p':
p_sum = network.generators_t['p'].\
groupby(network.generators.bus, axis=1).sum().\
add(network.storage_units_t['p'].abs().groupby(
network.storage_units.bus, axis=1).sum(), fill_value=0)
q_sum = network.generators_t['q'].\
groupby(network.generators.bus, axis=1).sum()
q_distributed = network.generators_t.p / \
p_sum[network.generators.bus.sort_index()].values * \
q_sum[network.generators.bus.sort_index()].values
q_storages = network.storage_units_t.p / \
p_sum[network.storage_units.bus.sort_index()].values *\
q_sum[network.storage_units.bus.sort_index()].values
if allocation == 'p_nom':
q_bus = network.generators_t['q'].\
groupby(network.generators.bus, axis=1).sum().add(
network.storage_units_t.q.groupby(
network.storage_units.bus, axis = 1).sum(), fill_value=0)
p_nom_dist = network.generators.p_nom_opt.sort_index()
p_nom_dist[p_nom_dist.index.isin(network.generators.index
[network.generators.carrier ==
'load shedding'])] = 0
q_distributed = q_bus[
network.generators.bus].multiply(p_nom_dist.values) /\
(network.generators.p_nom_opt[network.generators.carrier !=
'load shedding'].groupby(
network.generators.bus).sum().add(
network.storage_units.p_nom_opt.groupby
(network.storage_units.bus).sum(), fill_value=0))[
network.generators.bus.sort_index()].values
q_distributed.columns = network.generators.index
q_storages = q_bus[network.storage_units.bus]\
.multiply(network.storage_units.p_nom_opt.values) / \
((network.generators.p_nom_opt[network.generators.carrier !=
'load shedding'].groupby(
network.generators.bus).sum().add(
network.storage_units.p_nom_opt.
groupby(network.storage_units.bus).sum(), fill_value=0))[
network.storage_units.bus].values)
q_storages.columns = network.storage_units.index
q_distributed[q_distributed.isnull()] = 0
q_distributed[q_distributed.abs() == np.inf] = 0
q_storages[q_storages.isnull()] = 0
q_storages[q_storages.abs() == np.inf] = 0
network.generators_t.q = q_distributed
network.storage_units_t.q = q_storages
return network | Function that distributes reactive power at bus to all installed
generators and storages.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
allocation: str
Choose key to distribute reactive power:
'p_nom' to dirstribute via p_nom
'p' to distribute via p_set
Returns
------- | entailment |
def calc_line_losses(network):
""" Calculate losses per line with PF result data
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
s0 : series
apparent power of line
i0 : series
current of line
-------
"""
# Line losses
# calculate apparent power S = sqrt(p² + q²) [in MW]
s0_lines = ((network.lines_t.p0**2 + network.lines_t.q0**2).
apply(np.sqrt))
# calculate current I = S / U [in A]
i0_lines = np.multiply(s0_lines, 1000000) / \
np.multiply(network.lines.v_nom, 1000)
# calculate losses per line and timestep network.\
# lines_t.line_losses = I² * R [in MW]
network.lines_t.losses = np.divide(i0_lines**2 * network.lines.r, 1000000)
# calculate total losses per line [in MW]
network.lines = network.lines.assign(
losses=np.sum(network.lines_t.losses).values)
# Transformer losses
# https://books.google.de/books?id=0glcCgAAQBAJ&pg=PA151&lpg=PA151&dq=
# wirkungsgrad+transformator+1000+mva&source=bl&ots=a6TKhNfwrJ&sig=
# r2HCpHczRRqdgzX_JDdlJo4hj-k&hl=de&sa=X&ved=
# 0ahUKEwib5JTFs6fWAhVJY1AKHa1cAeAQ6AEIXjAI#v=onepage&q=
# wirkungsgrad%20transformator%201000%20mva&f=false
# Crastan, Elektrische Energieversorgung, p.151
# trafo 1000 MVA: 99.8 %
network.transformers = network.transformers.assign(
losses=np.multiply(network.transformers.s_nom, (1 - 0.998)).values)
# calculate total losses (possibly enhance with adding these values
# to network container)
losses_total = sum(network.lines.losses) + sum(network.transformers.losses)
print("Total lines losses for all snapshots [MW]:", round(losses_total, 2))
losses_costs = losses_total * np.average(network.buses_t.marginal_price)
print("Total costs for these losses [EUR]:", round(losses_costs, 2))
return | Calculate losses per line with PF result data
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
s0 : series
apparent power of line
i0 : series
current of line
------- | entailment |
def set_line_costs(network, args,
cost110=230, cost220=290, cost380=85, costDC=375):
""" Set capital costs for extendable lines in respect to PyPSA [€/MVA]
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
args: dict containing settings from appl.py
cost110 : capital costs per km for 110kV lines and cables
default: 230€/MVA/km, source: costs for extra circuit in
dena Verteilnetzstudie, p. 146)
cost220 : capital costs per km for 220kV lines and cables
default: 280€/MVA/km, source: costs for extra circuit in
NEP 2025, capactity from most used 220 kV lines in model
cost380 : capital costs per km for 380kV lines and cables
default: 85€/MVA/km, source: costs for extra circuit in
NEP 2025, capactity from most used 380 kV lines in NEP
costDC : capital costs per km for DC-lines
default: 375€/MVA/km, source: costs for DC transmission line
in NEP 2035
-------
"""
network.lines["v_nom"] = network.lines.bus0.map(network.buses.v_nom)
network.lines.loc[(network.lines.v_nom == 110),
'capital_cost'] = cost110 * network.lines.length /\
args['branch_capacity_factor']['HV']
network.lines.loc[(network.lines.v_nom == 220),
'capital_cost'] = cost220 * network.lines.length/\
args['branch_capacity_factor']['eHV']
network.lines.loc[(network.lines.v_nom == 380),
'capital_cost'] = cost380 * network.lines.length/\
args['branch_capacity_factor']['eHV']
network.links.loc[network.links.p_nom_extendable,
'capital_cost'] = costDC * network.links.length
return network | Set capital costs for extendable lines in respect to PyPSA [€/MVA]
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
args: dict containing settings from appl.py
cost110 : capital costs per km for 110kV lines and cables
default: 230€/MVA/km, source: costs for extra circuit in
dena Verteilnetzstudie, p. 146)
cost220 : capital costs per km for 220kV lines and cables
default: 280€/MVA/km, source: costs for extra circuit in
NEP 2025, capactity from most used 220 kV lines in model
cost380 : capital costs per km for 380kV lines and cables
default: 85€/MVA/km, source: costs for extra circuit in
NEP 2025, capactity from most used 380 kV lines in NEP
costDC : capital costs per km for DC-lines
default: 375€/MVA/km, source: costs for DC transmission line
in NEP 2035
------- | entailment |
def set_trafo_costs(network, args, cost110_220=7500, cost110_380=17333,
cost220_380=14166):
""" Set capital costs for extendable transformers in respect
to PyPSA [€/MVA]
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
cost110_220 : capital costs for 110/220kV transformer
default: 7500€/MVA, source: costs for extra trafo in
dena Verteilnetzstudie, p. 146; S of trafo used in osmTGmod
cost110_380 : capital costs for 110/380kV transformer
default: 17333€/MVA, source: NEP 2025
cost220_380 : capital costs for 220/380kV transformer
default: 14166€/MVA, source: NEP 2025
"""
network.transformers["v_nom0"] = network.transformers.bus0.map(
network.buses.v_nom)
network.transformers["v_nom1"] = network.transformers.bus1.map(
network.buses.v_nom)
network.transformers.loc[(network.transformers.v_nom0 == 110) & (
network.transformers.v_nom1 == 220), 'capital_cost'] = cost110_220/\
args['branch_capacity_factor']['HV']
network.transformers.loc[(network.transformers.v_nom0 == 110) & (
network.transformers.v_nom1 == 380), 'capital_cost'] = cost110_380/\
args['branch_capacity_factor']['HV']
network.transformers.loc[(network.transformers.v_nom0 == 220) & (
network.transformers.v_nom1 == 380), 'capital_cost'] = cost220_380/\
args['branch_capacity_factor']['eHV']
return network | Set capital costs for extendable transformers in respect
to PyPSA [€/MVA]
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
cost110_220 : capital costs for 110/220kV transformer
default: 7500€/MVA, source: costs for extra trafo in
dena Verteilnetzstudie, p. 146; S of trafo used in osmTGmod
cost110_380 : capital costs for 110/380kV transformer
default: 17333€/MVA, source: NEP 2025
cost220_380 : capital costs for 220/380kV transformer
default: 14166€/MVA, source: NEP 2025 | entailment |
def add_missing_components(network):
# Munich
"""Add missing transformer at Heizkraftwerk Nord in Munich and missing
transformer in Stuttgart
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
Returns
-------
network : :class:`pypsa.Network
Overall container of PyPSA
"""
"""https://www.swm.de/privatkunden/unternehmen/energieerzeugung/heizkraftwerke.html?utm_medium=301
to bus 25096:
25369 (86)
28232 (24)
25353 to 25356 (79)
to bus 23822: (110kV bus of 380/110-kV-transformer)
25355 (90)
28212 (98)
25357 to 665 (85)
25354 to 27414 (30)
27414 to 28212 (33)
25354 to 28294 (32/63)
28335 to 28294 (64)
28335 to 28139 (28)
Overhead lines:
16573 to 24182 (part of 4)
"""
"""
Installierte Leistung der Umspannungsebene Höchst- zu Hochspannung
(380 kV / 110 kV): 2.750.000 kVA
https://www.swm-infrastruktur.de/strom/netzstrukturdaten/strukturmerkmale.html
"""
new_trafo = str(network.transformers.index.astype(int).max() + 1)
network.add("Transformer", new_trafo, bus0="16573", bus1="23648",
x=0.135 / (2750 / 2),
r=0.0, tap_ratio=1, s_nom=2750 / 2)
def add_110kv_line(bus0, bus1, overhead=False):
new_line = str(network.lines.index.astype(int).max() + 1)
if not overhead:
network.add("Line", new_line, bus0=bus0, bus1=bus1, s_nom=280)
else:
network.add("Line", new_line, bus0=bus0, bus1=bus1, s_nom=260)
network.lines.loc[new_line, "scn_name"] = "Status Quo"
network.lines.loc[new_line, "v_nom"] = 110
network.lines.loc[new_line, "version"] = "added_manually"
network.lines.loc[new_line, "frequency"] = 50
network.lines.loc[new_line, "cables"] = 3.0
network.lines.loc[new_line, "country"] = 'DE'
network.lines.loc[new_line, "length"] = (
pypsa.geo.haversine(network.buses.loc[bus0, ["x", "y"]],
network.buses.loc[bus1, ["x", "y"]])
[0][0] * 1.2)
if not overhead:
network.lines.loc[new_line, "r"] = (network.lines.
loc[new_line, "length"] *
0.0177)
network.lines.loc[new_line, "g"] = 0
# or: (network.lines.loc[new_line, "length"]*78e-9)
network.lines.loc[new_line, "x"] = (network.lines.
loc[new_line, "length"] *
0.3e-3)
network.lines.loc[new_line, "b"] = (network.lines.
loc[new_line, "length"] *
250e-9)
elif overhead:
network.lines.loc[new_line, "r"] = (network.lines.
loc[new_line, "length"] *
0.05475)
network.lines.loc[new_line, "g"] = 0
# or: (network.lines.loc[new_line, "length"]*40e-9)
network.lines.loc[new_line, "x"] = (network.lines.
loc[new_line, "length"] *
1.2e-3)
network.lines.loc[new_line, "b"] = (network.lines.
loc[new_line, "length"] *
9.5e-9)
add_110kv_line("16573", "28353")
add_110kv_line("16573", "28092")
add_110kv_line("25096", "25369")
add_110kv_line("25096", "28232")
add_110kv_line("25353", "25356")
add_110kv_line("23822", "25355")
add_110kv_line("23822", "28212")
add_110kv_line("25357", "665")
add_110kv_line("25354", "27414")
add_110kv_line("27414", "28212")
add_110kv_line("25354", "28294")
add_110kv_line("28335", "28294")
add_110kv_line("28335", "28139")
add_110kv_line("16573", "24182", overhead=True)
# Stuttgart
"""
Stuttgart:
Missing transformer, because 110-kV-bus is situated outside
Heizkraftwerk Heilbronn:
"""
# new_trafo = str(network.transformers.index.astype(int).max()1)
network.add("Transformer", '99999', bus0="18967", bus1="25766",
x=0.135 / 300, r=0.0, tap_ratio=1, s_nom=300)
"""
According to:
https://assets.ctfassets.net/xytfb1vrn7of/NZO8x4rKesAcYGGcG4SQg/b780d6a3ca4c2600ab51a30b70950bb1/netzschemaplan-110-kv.pdf
the following lines are missing:
"""
add_110kv_line("18967", "22449", overhead=True) # visible in OSM & DSO map
add_110kv_line("21165", "24068", overhead=True) # visible in OSM & DSO map
add_110kv_line("23782", "24089", overhead=True)
# visible in DSO map & OSM till 1 km from bus1
"""
Umspannwerk Möhringen (bus 23697)
https://de.wikipedia.org/wiki/Umspannwerk_M%C3%B6hringen
there should be two connections:
to Sindelfingen (2*110kV)
to Wendingen (former 220kV, now 2*110kV)
the line to Sindelfingen is connected, but the connection of Sindelfingen
itself to 380kV is missing:
"""
add_110kv_line("19962", "27671", overhead=True) # visible in OSM & DSO map
add_110kv_line("19962", "27671", overhead=True)
"""
line to Wendingen is missing, probably because it ends shortly before the
way of the substation and is connected via cables:
"""
add_110kv_line("23697", "24090", overhead=True) # visible in OSM & DSO map
add_110kv_line("23697", "24090", overhead=True)
# Lehrte
"""
Lehrte: 220kV Bus located outsinde way of Betriebszentrtum Lehrte and
therefore not connected:
"""
def add_220kv_line(bus0, bus1, overhead=False):
new_line = str(network.lines.index.astype(int).max() + 1)
if not overhead:
network.add("Line", new_line, bus0=bus0, bus1=bus1, s_nom=550)
else:
network.add("Line", new_line, bus0=bus0, bus1=bus1, s_nom=520)
network.lines.loc[new_line, "scn_name"] = "Status Quo"
network.lines.loc[new_line, "v_nom"] = 220
network.lines.loc[new_line, "version"] = "added_manually"
network.lines.loc[new_line, "frequency"] = 50
network.lines.loc[new_line, "cables"] = 3.0
network.lines.loc[new_line, "country"] = 'DE'
network.lines.loc[new_line, "length"] = (
pypsa.geo.haversine(network.buses.loc[bus0, ["x", "y"]],
network.buses.loc[bus1, ["x", "y"]])[0][0] *
1.2)
if not overhead:
network.lines.loc[new_line, "r"] = (network.lines.
loc[new_line, "length"] *
0.0176)
network.lines.loc[new_line, "g"] = 0
# or: (network.lines.loc[new_line, "length"]*67e-9)
network.lines.loc[new_line, "x"] = (network.lines.
loc[new_line, "length"] *
0.3e-3)
network.lines.loc[new_line, "b"] = (network.lines.
loc[new_line, "length"] *
210e-9)
elif overhead:
network.lines.loc[new_line, "r"] = (network.lines.
loc[new_line, "length"] *
0.05475)
network.lines.loc[new_line, "g"] = 0
# or: (network.lines.loc[new_line, "length"]*30e-9)
network.lines.loc[new_line, "x"] = (network.lines.
loc[new_line, "length"] * 1e-3)
network.lines.loc[new_line, "b"] = (network.lines.
loc[new_line, "length"] * 11e-9
)
add_220kv_line("266", "24633", overhead=True)
# temporary turn buses of transformers
network.transformers["v_nom0"] = network.transformers.bus0.map(
network.buses.v_nom)
network.transformers["v_nom1"] = network.transformers.bus1.map(
network.buses.v_nom)
new_bus0 = network.transformers.bus1[network.transformers.v_nom0>network.transformers.v_nom1]
new_bus1 = network.transformers.bus0[network.transformers.v_nom0>network.transformers.v_nom1]
network.transformers.bus0[network.transformers.v_nom0>network.transformers.v_nom1] = new_bus0.values
network.transformers.bus1[network.transformers.v_nom0>network.transformers.v_nom1] = new_bus1.values
return network | Add missing transformer at Heizkraftwerk Nord in Munich and missing
transformer in Stuttgart
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
Returns
-------
network : :class:`pypsa.Network
Overall container of PyPSA | entailment |
def convert_capital_costs(network, start_snapshot, end_snapshot, p=0.05, T=40):
""" Convert capital_costs to fit to pypsa and caluculated time
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
p : interest rate, default 0.05
T : number of periods, default 40 years (source: StromNEV Anlage 1)
-------
"""
# Add costs for DC-converter
network.links.capital_cost = network.links.capital_cost + 400000
# Calculate present value of an annuity (PVA)
PVA = (1 / p) - (1 / (p * (1 + p) ** T))
# Apply function on lines, links, trafos and storages
# Storage costs are already annuized yearly
network.lines.loc[network.lines.s_nom_extendable == True,
'capital_cost'] = (network.lines.capital_cost /
(PVA * (8760 / (end_snapshot - start_snapshot + 1))))
network.links.loc[network.links.p_nom_extendable == True,
'capital_cost'] = network. links.capital_cost /\
(PVA * (8760 / (end_snapshot - start_snapshot + 1)))
network.transformers.loc[network.transformers.s_nom_extendable == True,
'capital_cost'] = network.transformers.capital_cost / \
(PVA * (8760 / (end_snapshot - start_snapshot + 1)))
network.storage_units.loc[network.storage_units.p_nom_extendable == True,
'capital_cost'] = network.storage_units.capital_cost / \
(8760 / (end_snapshot - start_snapshot + 1))
return network | Convert capital_costs to fit to pypsa and caluculated time
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
p : interest rate, default 0.05
T : number of periods, default 40 years (source: StromNEV Anlage 1)
------- | entailment |
def find_snapshots(network, carrier, maximum = True, minimum = True, n = 3):
"""
Function that returns snapshots with maximum and/or minimum feed-in of
selected carrier.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
carrier: str
Selected carrier of generators
maximum: bool
Choose if timestep of maximal feed-in is returned.
minimum: bool
Choose if timestep of minimal feed-in is returned.
n: int
Number of maximal/minimal snapshots
Returns
-------
calc_snapshots : 'pandas.core.indexes.datetimes.DatetimeIndex'
List containing snapshots
"""
if carrier == 'residual load':
power_plants = network.generators[network.generators.carrier.
isin(['solar', 'wind', 'wind_onshore'])]
power_plants_t = network.generators.p_nom[power_plants.index] * \
network.generators_t.p_max_pu[power_plants.index]
load = network.loads_t.p_set.sum(axis=1)
all_renew = power_plants_t.sum(axis=1)
all_carrier = load - all_renew
if carrier in ('solar', 'wind', 'wind_onshore',
'wind_offshore', 'run_of_river'):
power_plants = network.generators[network.generators.carrier
== carrier]
power_plants_t = network.generators.p_nom[power_plants.index] * \
network.generators_t.p_max_pu[power_plants.index]
all_carrier = power_plants_t.sum(axis=1)
if maximum and not minimum:
times = all_carrier.sort_values().head(n=n)
if minimum and not maximum:
times = all_carrier.sort_values().tail(n=n)
if maximum and minimum:
times = all_carrier.sort_values().head(n=n)
times = times.append(all_carrier.sort_values().tail(n=n))
calc_snapshots = all_carrier.index[all_carrier.index.isin(times.index)]
return calc_snapshots | Function that returns snapshots with maximum and/or minimum feed-in of
selected carrier.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
carrier: str
Selected carrier of generators
maximum: bool
Choose if timestep of maximal feed-in is returned.
minimum: bool
Choose if timestep of minimal feed-in is returned.
n: int
Number of maximal/minimal snapshots
Returns
-------
calc_snapshots : 'pandas.core.indexes.datetimes.DatetimeIndex'
List containing snapshots | entailment |
def ramp_limits(network):
""" Add ramping constraints to thermal power plants.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
Returns
-------
"""
carrier = ['coal', 'biomass', 'gas', 'oil', 'waste', 'lignite',
'uranium', 'geothermal']
data = {'start_up_cost':[77, 57, 42, 57, 57, 77, 50, 57], #€/MW
'start_up_fuel':[4.3, 2.8, 1.45, 2.8, 2.8, 4.3, 16.7, 2.8], #MWh/MW
'min_up_time':[5, 2, 3, 2, 2, 5, 12, 2],
'min_down_time':[7, 2, 2, 2, 2, 7, 17, 2],
# =============================================================================
# 'ramp_limit_start_up':[0.4, 0.4, 0.4, 0.4, 0.4, 0.6, 0.5, 0.4],
# 'ramp_limit_shut_down':[0.4, 0.4, 0.4, 0.4, 0.4, 0.6, 0.5, 0.4]
# =============================================================================
'p_min_pu':[0.33, 0.38, 0.4, 0.38, 0.38, 0.5, 0.45, 0.38]
}
df = pd.DataFrame(data, index=carrier)
fuel_costs = network.generators.marginal_cost.groupby(
network.generators.carrier).mean()[carrier]
df['start_up_fuel'] = df['start_up_fuel'] * fuel_costs
df['start_up_cost'] = df['start_up_cost'] + df['start_up_fuel']
df.drop('start_up_fuel', axis=1, inplace=True)
for tech in df.index:
for limit in df.columns:
network.generators.loc[network.generators.carrier == tech,
limit] = df.loc[tech, limit]
network.generators.start_up_cost = network.generators.start_up_cost\
*network.generators.p_nom
network.generators.committable = True | Add ramping constraints to thermal power plants.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
Returns
------- | entailment |
def get_args_setting(args, jsonpath='scenario_setting.json'):
"""
Get and open json file with scenaio settings of eTraGo ``args``.
The settings incluedes all eTraGo specific settings of arguments and
parameters for a reproducible calculation.
Parameters
----------
json_file : str
Default: ``scenario_setting.json``
Name of scenario setting json file
Returns
-------
args : dict
Dictionary of json file
"""
if not jsonpath == None:
with open(jsonpath) as f:
args = json.load(f)
return args | Get and open json file with scenaio settings of eTraGo ``args``.
The settings incluedes all eTraGo specific settings of arguments and
parameters for a reproducible calculation.
Parameters
----------
json_file : str
Default: ``scenario_setting.json``
Name of scenario setting json file
Returns
-------
args : dict
Dictionary of json file | entailment |
def set_line_country_tags(network):
"""
Set country tag for AC- and DC-lines.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
"""
transborder_lines_0 = network.lines[network.lines['bus0'].isin(
network.buses.index[network.buses['country_code'] != 'DE'])].index
transborder_lines_1 = network.lines[network.lines['bus1'].isin(
network.buses.index[network.buses['country_code']!= 'DE'])].index
#set country tag for lines
network.lines.loc[transborder_lines_0, 'country'] = \
network.buses.loc[network.lines.loc[transborder_lines_0, 'bus0']\
.values, 'country_code'].values
network.lines.loc[transborder_lines_1, 'country'] = \
network.buses.loc[network.lines.loc[transborder_lines_1, 'bus1']\
.values, 'country_code'].values
network.lines['country'].fillna('DE', inplace=True)
doubles = list(set(transborder_lines_0.intersection(transborder_lines_1)))
for line in doubles:
c_bus0 = network.buses.loc[network.lines.loc[line, 'bus0'], 'country']
c_bus1 = network.buses.loc[network.lines.loc[line, 'bus1'], 'country']
network.lines.loc[line, 'country'] = '{}{}'.format(c_bus0, c_bus1)
transborder_links_0 = network.links[network.links['bus0'].isin(
network.buses.index[network.buses['country_code']!= 'DE'])].index
transborder_links_1 = network.links[network.links['bus1'].isin(
network.buses.index[network.buses['country_code'] != 'DE'])].index
#set country tag for links
network.links.loc[transborder_links_0, 'country'] = \
network.buses.loc[network.links.loc[transborder_links_0, 'bus0']\
.values, 'country_code'].values
network.links.loc[transborder_links_1, 'country'] = \
network.buses.loc[network.links.loc[transborder_links_1, 'bus1']\
.values, 'country_code'].values
network.links['country'].fillna('DE', inplace=True)
doubles = list(set(transborder_links_0.intersection(transborder_links_1)))
for link in doubles:
c_bus0 = network.buses.loc[network.links.loc[link, 'bus0'], 'country']
c_bus1 = network.buses.loc[network.links.loc[link, 'bus1'], 'country']
network.links.loc[link, 'country'] = '{}{}'.format(c_bus0, c_bus1) | Set country tag for AC- and DC-lines.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA | entailment |
def crossborder_capacity(network, method, capacity_factor):
"""
Adjust interconnector capacties.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
method : string
Method of correction. Options are 'ntc_acer' and 'thermal_acer'.
'ntc_acer' corrects all capacities according to values published by
the ACER in 2016.
'thermal_acer' corrects certain capacities where our dataset most
likely overestimates the thermal capacity.
capacity_factor : float
branch capacity factor. Reduction by branch-capacity
factor is applied afterwards and shouln't effect ntc-values, which
already include (n-1)-security. To exclude the ntc-capacities from the
capacity factor, the crossborder-capacities are diveded by the factor
in this function. For thermal-acer this is excluded by setting branch
capacity factors to one.
"""
if method == 'ntc_acer':
cap_per_country = {'AT': 4900,
'CH': 2695,
'CZ': 1301,
'DK': 913,
'FR': 3593,
'LU': 2912,
'NL': 2811,
'PL': 280,
'SE': 217,
'CZAT': 574,
'ATCZ': 574,
'CZPL': 312,
'PLCZ': 312,
'ATCH': 979,
'CHAT': 979,
'CHFR': 2087,
'FRCH': 2087,
'FRLU': 364,
'LUFR': 364,
'SEDK': 1928,
'DKSE': 1928}
elif method == 'thermal_acer':
cap_per_country = {'CH': 12000,
'DK': 4000,
'SEDK': 3500,
'DKSE': 3500}
capacity_factor = {'HV': 1, 'eHV':1}
if not network.lines[network.lines.country != 'DE'].empty:
weighting = network.lines.loc[network.lines.country!='DE', 's_nom'].\
groupby(network.lines.country).transform(lambda x: x/x.sum())
weighting_links = network.links.loc[network.links.country!='DE', 'p_nom'].\
groupby(network.links.country).transform(lambda x: x/x.sum())
network.lines["v_nom"] = network.lines.bus0.map(network.buses.v_nom)
for country in cap_per_country:
index_HV = network.lines[(network.lines.country == country) &(
network.lines.v_nom == 110)].index
index_eHV = network.lines[(network.lines.country == country) &(
network.lines.v_nom > 110)].index
index_links = network.links[network.links.country == country].index
if not network.lines[network.lines.country == country].empty:
network.lines.loc[index_HV, 's_nom'] = weighting[index_HV] * \
cap_per_country[country] / capacity_factor['HV']
network.lines.loc[index_eHV, 's_nom'] = \
weighting[index_eHV] * cap_per_country[country] /\
capacity_factor['eHV']
if not network.links[network.links.country == country].empty:
network.links.loc[index_links, 'p_nom'] = \
weighting_links[index_links] * cap_per_country\
[country]
if country == 'SE':
network.links.loc[network.links.country == country, 'p_nom'] =\
cap_per_country[country]
if not network.lines[network.lines.country == (country+country)].empty:
i_HV = network.lines[(network.lines.v_nom == 110)&(
network.lines.country ==country+country)].index
i_eHV = network.lines[(network.lines.v_nom == 110)&(
network.lines.country ==country+country)].index
network.lines.loc[i_HV, 's_nom'] = \
weighting[i_HV] * cap_per_country[country]/\
capacity_factor['HV']
network.lines.loc[i_eHV, 's_nom'] = \
weighting[i_eHV] * cap_per_country[country]/\
capacity_factor['eHV']
if not network.links[network.links.country == (country+country)].empty:
i_links = network.links[network.links.country ==
(country+country)].index
network.links.loc[i_links, 'p_nom'] = \
weighting_links[i_links] * cap_per_country\
[country]*capacity_factor | Adjust interconnector capacties.
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
method : string
Method of correction. Options are 'ntc_acer' and 'thermal_acer'.
'ntc_acer' corrects all capacities according to values published by
the ACER in 2016.
'thermal_acer' corrects certain capacities where our dataset most
likely overestimates the thermal capacity.
capacity_factor : float
branch capacity factor. Reduction by branch-capacity
factor is applied afterwards and shouln't effect ntc-values, which
already include (n-1)-security. To exclude the ntc-capacities from the
capacity factor, the crossborder-capacities are diveded by the factor
in this function. For thermal-acer this is excluded by setting branch
capacity factors to one. | entailment |
def set_branch_capacity(network, args):
"""
Set branch capacity factor of lines and transformers, different factors for
HV (110kV) and eHV (220kV, 380kV).
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
args: dict
Settings in appl.py
"""
network.lines["s_nom_total"] = network.lines.s_nom.copy()
network.transformers["s_nom_total"] = network.transformers.s_nom.copy()
network.lines["v_nom"] = network.lines.bus0.map(
network.buses.v_nom)
network.transformers["v_nom0"] = network.transformers.bus0.map(
network.buses.v_nom)
network.lines.s_nom[network.lines.v_nom == 110] = \
network.lines.s_nom * args['branch_capacity_factor']['HV']
network.lines.s_nom[network.lines.v_nom > 110] = \
network.lines.s_nom * args['branch_capacity_factor']['eHV']
network.transformers.s_nom[network.transformers.v_nom0 == 110]\
= network.transformers.s_nom * args['branch_capacity_factor']['HV']
network.transformers.s_nom[network.transformers.v_nom0 > 110]\
= network.transformers.s_nom * args['branch_capacity_factor']['eHV'] | Set branch capacity factor of lines and transformers, different factors for
HV (110kV) and eHV (220kV, 380kV).
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
args: dict
Settings in appl.py | entailment |
def max_line_ext(network, snapshots, share=1.01):
"""
Sets maximal share of overall network extension
as extra functionality in LOPF
Parameters
----------
share: float
Maximal share of network extension in p.u.
"""
lines_snom = network.lines.s_nom.sum()
links_pnom = network.links.p_nom.sum()
def _rule(m):
lines_opt = sum(m.passive_branch_s_nom[index]
for index
in m.passive_branch_s_nom_index)
links_opt = sum(m.link_p_nom[index]
for index
in m.link_p_nom_index)
return (lines_opt + links_opt) <= (lines_snom + links_pnom) * share
network.model.max_line_ext = Constraint(rule=_rule) | Sets maximal share of overall network extension
as extra functionality in LOPF
Parameters
----------
share: float
Maximal share of network extension in p.u. | entailment |
def min_renewable_share(network, snapshots, share=0.72):
"""
Sets minimal renewable share of generation as extra functionality in LOPF
Parameters
----------
share: float
Minimal share of renewable generation in p.u.
"""
renewables = ['wind_onshore', 'wind_offshore',
'biomass', 'solar', 'run_of_river']
res = list(network.generators.index[
network.generators.carrier.isin(renewables)])
total = list(network.generators.index)
snapshots = network.snapshots
def _rule(m):
"""
"""
renewable_production = sum(m.generator_p[gen, sn]
for gen
in res
for sn in snapshots)
total_production = sum(m.generator_p[gen, sn]
for gen in total
for sn in snapshots)
return (renewable_production >= total_production * share)
network.model.min_renewable_share = Constraint(rule=_rule) | Sets minimal renewable share of generation as extra functionality in LOPF
Parameters
----------
share: float
Minimal share of renewable generation in p.u. | entailment |
def max_curtailment(network, snapshots, curtail_max=0.03):
"""
each RE can only be curtailed (over all snapshots)
with respect to curtail_max
Parameters
----------
curtail_max: float
maximal curtailment per power plant in p.u.
"""
renewables = ['wind_onshore', 'wind_offshore',
'solar']
res = list(network.generators.index[
(network.generators.carrier.isin(renewables))
& (network.generators.bus.astype(str).isin(network.buses.index[network.buses.country_code == 'DE']))])
# network.import_series_from_dataframe(pd.DataFrame(
# index=network.generators_t.p_set.index,
# columns=network.generators.index[
# network.generators.carrier=='biomass'],
# data=1), "Generator", "p_max_pu")
res_potential = (network.generators.p_nom[res]*network.generators_t.p_max_pu[res]).sum()
snapshots = network.snapshots
for gen in res:
def _rule(m, gen):
"""
"""
#import pdb; pdb.set_trace()
re_n = sum(m.generator_p[gen, sn]
for sn in snapshots)
potential_n = res_potential[gen]
return (re_n >= (1-curtail_max) * potential_n)
setattr(network.model, "max_curtailment"+gen, Constraint(res, rule=_rule)) | each RE can only be curtailed (over all snapshots)
with respect to curtail_max
Parameters
----------
curtail_max: float
maximal curtailment per power plant in p.u. | entailment |
def get_node(guild_id: int, ignore_ready_status: bool = False) -> Node:
"""
Gets a node based on a guild ID, useful for noding separation. If the
guild ID does not already have a node association, the least used
node is returned. Skips over nodes that are not yet ready.
Parameters
----------
guild_id : int
ignore_ready_status : bool
Returns
-------
Node
"""
guild_count = 1e10
least_used = None
for node in _nodes:
guild_ids = node.player_manager.guild_ids
if ignore_ready_status is False and not node.ready.is_set():
continue
elif len(guild_ids) < guild_count:
guild_count = len(guild_ids)
least_used = node
if guild_id in guild_ids:
return node
if least_used is None:
raise IndexError("No nodes found.")
return least_used | Gets a node based on a guild ID, useful for noding separation. If the
guild ID does not already have a node association, the least used
node is returned. Skips over nodes that are not yet ready.
Parameters
----------
guild_id : int
ignore_ready_status : bool
Returns
-------
Node | entailment |
async def join_voice(guild_id: int, channel_id: int):
"""
Joins a voice channel by ID's.
Parameters
----------
guild_id : int
channel_id : int
"""
node = get_node(guild_id)
voice_ws = node.get_voice_ws(guild_id)
await voice_ws.voice_state(guild_id, channel_id) | Joins a voice channel by ID's.
Parameters
----------
guild_id : int
channel_id : int | entailment |
async def connect(self, timeout=None):
"""
Connects to the Lavalink player event websocket.
Parameters
----------
timeout : int
Time after which to timeout on attempting to connect to the Lavalink websocket,
``None`` is considered never, but the underlying code may stop trying past a
certain point.
Raises
------
asyncio.TimeoutError
If the websocket failed to connect after the given time.
"""
self._is_shutdown = False
combo_uri = "ws://{}:{}".format(self.host, self.rest)
uri = "ws://{}:{}".format(self.host, self.port)
log.debug(
"Lavalink WS connecting to %s or %s with headers %s", combo_uri, uri, self.headers
)
tasks = tuple({self._multi_try_connect(u) for u in (combo_uri, uri)})
for task in asyncio.as_completed(tasks, timeout=timeout):
with contextlib.suppress(Exception):
if await cast(Awaitable[Optional[websockets.WebSocketClientProtocol]], task):
break
else:
raise asyncio.TimeoutError
log.debug("Creating Lavalink WS listener.")
self._listener_task = self.loop.create_task(self.listener())
for data in self._queue:
await self.send(data)
self.ready.set()
self.update_state(NodeState.READY) | Connects to the Lavalink player event websocket.
Parameters
----------
timeout : int
Time after which to timeout on attempting to connect to the Lavalink websocket,
``None`` is considered never, but the underlying code may stop trying past a
certain point.
Raises
------
asyncio.TimeoutError
If the websocket failed to connect after the given time. | entailment |
async def listener(self):
"""
Listener task for receiving ops from Lavalink.
"""
while self._ws.open and self._is_shutdown is False:
try:
data = json.loads(await self._ws.recv())
except websockets.ConnectionClosed:
break
raw_op = data.get("op")
try:
op = LavalinkIncomingOp(raw_op)
except ValueError:
socket_log.debug("Received unknown op: %s", data)
else:
socket_log.debug("Received known op: %s", data)
self.loop.create_task(self._handle_op(op, data))
self.ready.clear()
log.debug("Listener exited: ws %s SHUTDOWN %s.", self._ws.open, self._is_shutdown)
self.loop.create_task(self._reconnect()) | Listener task for receiving ops from Lavalink. | entailment |
async def join_voice_channel(self, guild_id, channel_id):
"""
Alternative way to join a voice channel if node is known.
"""
voice_ws = self.get_voice_ws(guild_id)
await voice_ws.voice_state(guild_id, channel_id) | Alternative way to join a voice channel if node is known. | entailment |
async def disconnect(self):
"""
Shuts down and disconnects the websocket.
"""
self._is_shutdown = True
self.ready.clear()
self.update_state(NodeState.DISCONNECTING)
await self.player_manager.disconnect()
if self._ws is not None and self._ws.open:
await self._ws.close()
if self._listener_task is not None and not self.loop.is_closed():
self._listener_task.cancel()
self._state_handlers = []
_nodes.remove(self)
log.debug("Shutdown Lavalink WS.") | Shuts down and disconnects the websocket. | entailment |
def register_view(self, view):
"""Called when the View was registered
Can be used e.g. to connect signals. Here, the destroy signal is connected to close the application
:param rafcon.gui.views.state_editor.overview.StateOverviewView view: A state overview view instance
"""
# prepare State Type Change ComboBox
super(StateOverviewController, self).register_view(view)
self.allowed_state_classes = self.get_allowed_state_classes(self.model.state)
view['entry_name'].connect('focus-out-event', self.on_focus_out)
view['entry_name'].connect('key-press-event', self.check_for_enter)
if self.model.state.name:
view['entry_name'].set_text(self.model.state.name)
view['label_id_value'].set_text(self.model.state.state_id)
l_store = Gtk.ListStore(GObject.TYPE_STRING)
combo = Gtk.ComboBoxText()
combo.set_name("state_type_combo")
combo.set_focus_on_click(False)
combo.set_model(l_store)
combo.show_all()
view['type_viewport'].add(combo)
view['type_viewport'].show()
# Prepare label for state_name -> Library states cannot be changed
if isinstance(self.model, LibraryStateModel):
l_store.prepend(['LIBRARY'])
combo.set_sensitive(False)
self.view['library_path'].set_text(self.model.state.library_path + "/" + self.model.state.library_name)
self.view['library_path'].set_sensitive(True)
self.view['library_path'].set_editable(False)
view['show_content_checkbutton'].set_active(self.model.meta['gui']['show_content'] is True)
view['show_content_checkbutton'].connect('toggled', self.on_toggle_show_content)
# self.view['properties_widget'].remove(self.view['show_content_checkbutton'])
else:
self.view['properties_widget'].remove(self.view['label_library_path'])
self.view['properties_widget'].remove(self.view['library_path'])
self.view['properties_widget'].remove(self.view['label_show_content'])
self.view['properties_widget'].remove(self.view['show_content_checkbutton'])
self.view['properties_widget'].resize(2, 5)
for state_class in self.allowed_state_classes:
if isinstance(self.model.state, state_class):
l_store.prepend([state_class.__name__])
else:
l_store.append([state_class.__name__])
combo.set_active(0)
view['type_combobox'] = combo
view['type_combobox'].connect('changed', self.change_type)
# Prepare "is start state check button"
has_no_start_state_state_types = [BarrierConcurrencyState, PreemptiveConcurrencyState]
if not self.with_is_start_state_check_box or isinstance(self.model.state, DeciderState) or \
self.model.state.is_root_state or type(self.model.parent.state) in has_no_start_state_state_types:
view['is_start_state_checkbutton'].destroy()
else:
view['is_start_state_checkbutton'].set_active(bool(self.model.is_start))
view['is_start_state_checkbutton'].connect('toggled', self.on_toggle_is_start_state)
if isinstance(self.model.state, DeciderState):
combo.set_sensitive(False)
# in case the state is inside of a library
if self.model.state.get_next_upper_library_root_state():
view['entry_name'].set_editable(False)
combo.set_sensitive(False)
view['is_start_state_checkbutton'].set_sensitive(False)
if isinstance(self.model, LibraryStateModel):
self.view['show_content_checkbutton'].set_sensitive(False) | Called when the View was registered
Can be used e.g. to connect signals. Here, the destroy signal is connected to close the application
:param rafcon.gui.views.state_editor.overview.StateOverviewView view: A state overview view instance | entailment |
def run(self):
""" This defines the sequence of actions that are taken when the barrier concurrency state is executed
:return:
"""
logger.debug("Starting execution of {0}{1}".format(self, " (backwards)" if self.backward_execution else ""))
self.setup_run()
# data to be accessed by the decider state
child_errors = {}
final_outcomes_dict = {}
decider_state = self.states[UNIQUE_DECIDER_STATE_ID]
try:
concurrency_history_item = self.setup_forward_or_backward_execution()
self.start_child_states(concurrency_history_item, decider_state)
# print("bcs1")
#######################################################
# wait for all child threads to finish
#######################################################
for history_index, state in enumerate(self.states.values()):
# skip the decider state
if state is not decider_state:
self.join_state(state, history_index, concurrency_history_item)
self.add_state_execution_output_to_scoped_data(state.output_data, state)
self.update_scoped_variables_with_output_dictionary(state.output_data, state)
# save the errors of the child state executions for the decider state
if 'error' in state.output_data:
child_errors[state.state_id] = (state.name, state.output_data['error'])
final_outcomes_dict[state.state_id] = (state.name, state.final_outcome)
# print("bcs2")
#######################################################
# handle backward execution case
#######################################################
if self.backward_execution:
# print("bcs2.1.")
return self.finalize_backward_execution()
else:
# print("bcs2.2.")
self.backward_execution = False
# print("bcs3")
#######################################################
# execute decider state
#######################################################
decider_state_error = self.run_decider_state(decider_state, child_errors, final_outcomes_dict)
# print("bcs4")
#######################################################
# handle no transition
#######################################################
transition = self.get_transition_for_outcome(decider_state, decider_state.final_outcome)
if transition is None:
# final outcome is set here
transition = self.handle_no_transition(decider_state)
# if the transition is still None, then the child_state was preempted or aborted, in this case return
decider_state.state_execution_status = StateExecutionStatus.INACTIVE
# print("bcs5")
if transition is None:
self.output_data["error"] = RuntimeError("state aborted")
else:
if decider_state_error:
self.output_data["error"] = decider_state_error
self.final_outcome = self.outcomes[transition.to_outcome]
# print("bcs6")
return self.finalize_concurrency_state(self.final_outcome)
except Exception as e:
logger.error("{0} had an internal error: {1}\n{2}".format(self, str(e), str(traceback.format_exc())))
self.output_data["error"] = e
self.state_execution_status = StateExecutionStatus.WAIT_FOR_NEXT_STATE
return self.finalize(Outcome(-1, "aborted")) | This defines the sequence of actions that are taken when the barrier concurrency state is executed
:return: | entailment |
def run_decider_state(self, decider_state, child_errors, final_outcomes_dict):
""" Runs the decider state of the barrier concurrency state. The decider state decides on which outcome the
barrier concurrency is left.
:param decider_state: the decider state of the barrier concurrency state
:param child_errors: error of the concurrent branches
:param final_outcomes_dict: dictionary of all outcomes of the concurrent branches
:return:
"""
decider_state.state_execution_status = StateExecutionStatus.ACTIVE
# forward the decider specific data
decider_state.child_errors = child_errors
decider_state.final_outcomes_dict = final_outcomes_dict
# standard state execution
decider_state.input_data = self.get_inputs_for_state(decider_state)
decider_state.output_data = self.create_output_dictionary_for_state(decider_state)
decider_state.start(self.execution_history, backward_execution=False)
decider_state.join()
decider_state_error = None
if decider_state.final_outcome.outcome_id == -1:
if 'error' in decider_state.output_data:
decider_state_error = decider_state.output_data['error']
# standard output data processing
self.add_state_execution_output_to_scoped_data(decider_state.output_data, decider_state)
self.update_scoped_variables_with_output_dictionary(decider_state.output_data, decider_state)
return decider_state_error | Runs the decider state of the barrier concurrency state. The decider state decides on which outcome the
barrier concurrency is left.
:param decider_state: the decider state of the barrier concurrency state
:param child_errors: error of the concurrent branches
:param final_outcomes_dict: dictionary of all outcomes of the concurrent branches
:return: | entailment |
def _check_transition_validity(self, check_transition):
""" Transition of BarrierConcurrencyStates must least fulfill the condition of a ContainerState.
Start transitions are forbidden in the ConcurrencyState.
:param check_transition: the transition to check for validity
:return:
"""
valid, message = super(BarrierConcurrencyState, self)._check_transition_validity(check_transition)
if not valid:
return False, message
# Only the following transitions are allowed in barrier concurrency states:
# - Transitions from the decider state to the parent state\n"
# - Transitions from not-decider states to the decider state\n"
# - Transitions from not_decider states from aborted/preempted outcomes to the
# aborted/preempted outcome of the parent
from_state_id = check_transition.from_state
to_state_id = check_transition.to_state
from_outcome_id = check_transition.from_outcome
to_outcome_id = check_transition.to_outcome
if from_state_id == UNIQUE_DECIDER_STATE_ID:
if to_state_id != self.state_id:
return False, "Transition from the decider state must go to the parent state"
else:
if to_state_id != UNIQUE_DECIDER_STATE_ID:
if from_outcome_id not in [-2, -1] or to_outcome_id not in [-2, -1]:
return False, "Transition from this state must go to the decider state. The only exception are " \
"transition from aborted/preempted to the parent aborted/preempted outcomes"
return True, message | Transition of BarrierConcurrencyStates must least fulfill the condition of a ContainerState.
Start transitions are forbidden in the ConcurrencyState.
:param check_transition: the transition to check for validity
:return: | entailment |
def add_state(self, state, storage_load=False):
"""Overwrite the parent class add_state method
Add automatic transition generation for the decider_state.
:param state: The state to be added
:return:
"""
state_id = super(BarrierConcurrencyState, self).add_state(state)
if not storage_load and not self.__init_running and not state.state_id == UNIQUE_DECIDER_STATE_ID:
# the transitions must only be created for the initial add_state call and not during each load procedure
for o_id, o in list(state.outcomes.items()):
if not o_id == -1 and not o_id == -2:
self.add_transition(state.state_id, o_id, self.states[UNIQUE_DECIDER_STATE_ID].state_id, None)
return state_id | Overwrite the parent class add_state method
Add automatic transition generation for the decider_state.
:param state: The state to be added
:return: | entailment |
def states(self, states):
""" Overwrite the setter of the container state base class as special handling for the decider state is needed.
:param states: the dictionary of new states
:raises exceptions.TypeError: if the states parameter is not of type dict
"""
# First safely remove all existing states (recursively!), as they will be replaced
state_ids = list(self.states.keys())
for state_id in state_ids:
# Do not remove decider state, if teh new list of states doesn't contain an alternative one
if state_id == UNIQUE_DECIDER_STATE_ID and UNIQUE_DECIDER_STATE_ID not in states:
continue
self.remove_state(state_id)
if states is not None:
if not isinstance(states, dict):
raise TypeError("states must be of type dict")
# Ensure that the decider state is added first, as transition to this states will automatically be
# created when adding further states
decider_state = states.pop(UNIQUE_DECIDER_STATE_ID, None)
if decider_state is not None:
self.add_state(decider_state)
for state in states.values():
self.add_state(state) | Overwrite the setter of the container state base class as special handling for the decider state is needed.
:param states: the dictionary of new states
:raises exceptions.TypeError: if the states parameter is not of type dict | entailment |
def remove_state(self, state_id, recursive=True, force=False, destroy=True):
""" Overwrite the parent class remove state method by checking if the user tries to delete the decider state
:param state_id: the id of the state to remove
:param recursive: a flag to indicate a recursive disassembling of all substates
:param force: a flag to indicate forcefully deletion of all states (important of the decider state in the
barrier concurrency state)
:param destroy: a flag which indicates if the state should not only be disconnected from the state but also
destroyed, including all its state elements
:raises exceptions.AttributeError: if the state_id parameter is the decider state
"""
if state_id == UNIQUE_DECIDER_STATE_ID and force is False:
raise AttributeError("You are not allowed to delete the decider state.")
else:
return ContainerState.remove_state(self, state_id, recursive=recursive, force=force, destroy=destroy) | Overwrite the parent class remove state method by checking if the user tries to delete the decider state
:param state_id: the id of the state to remove
:param recursive: a flag to indicate a recursive disassembling of all substates
:param force: a flag to indicate forcefully deletion of all states (important of the decider state in the
barrier concurrency state)
:param destroy: a flag which indicates if the state should not only be disconnected from the state but also
destroyed, including all its state elements
:raises exceptions.AttributeError: if the state_id parameter is the decider state | entailment |
def get_outcome_for_state_name(self, name):
""" Returns the final outcome of the child state specified by name.
Note: This is utility function that is used by the programmer to make a decision based on the final outcome
of its child states. A state is not uniquely specified by the name, but as the programmer normally does not want
to use state-ids in his code this utility function was defined.
:param name: The name of the state to get the final outcome for.
:return:
"""
return_value = None
for state_id, name_outcome_tuple in self.final_outcomes_dict.items():
if name_outcome_tuple[0] == name:
return_value = name_outcome_tuple[1]
break
return return_value | Returns the final outcome of the child state specified by name.
Note: This is utility function that is used by the programmer to make a decision based on the final outcome
of its child states. A state is not uniquely specified by the name, but as the programmer normally does not want
to use state-ids in his code this utility function was defined.
:param name: The name of the state to get the final outcome for.
:return: | entailment |
def get_outcome_for_state_id(self, state_id):
""" Returns the final outcome of the child state specified by the state_id.
:param state_id: The id of the state to get the final outcome for.
:return:
"""
return_value = None
for s_id, name_outcome_tuple in self.final_outcomes_dict.items():
if s_id == state_id:
return_value = name_outcome_tuple[1]
break
return return_value | Returns the final outcome of the child state specified by the state_id.
:param state_id: The id of the state to get the final outcome for.
:return: | entailment |
def get_errors_for_state_name(self, name):
""" Returns the error message of the child state specified by name.
Note: This is utility function that is used by the programmer to make a decision based on the final outcome
of its child states. A state is not uniquely specified by the name, but as the programmer normally does not want
to use state-ids in his code this utility function was defined.
:param name: The name of the state to get the error message for
:return:
"""
return_value = None
for state_id, name_outcome_tuple in self.child_errors.items():
if name_outcome_tuple[0] == name:
return_value = name_outcome_tuple[1]
break
return return_value | Returns the error message of the child state specified by name.
Note: This is utility function that is used by the programmer to make a decision based on the final outcome
of its child states. A state is not uniquely specified by the name, but as the programmer normally does not want
to use state-ids in his code this utility function was defined.
:param name: The name of the state to get the error message for
:return: | entailment |
def add_controller(self, key, controller):
"""Add child controller
The passed controller is registered as child of self. The register_actions method of the child controller is
called, allowing the child controller to register shortcut callbacks.
:param key: Name of the controller (unique within self), to later access it again
:param ExtendedController controller: Controller to be added as child
"""
assert isinstance(controller, ExtendedController)
controller.parent = self
self.__child_controllers[key] = controller
if self.__shortcut_manager is not None and controller not in self.__action_registered_controllers:
controller.register_actions(self.__shortcut_manager)
self.__action_registered_controllers.append(controller) | Add child controller
The passed controller is registered as child of self. The register_actions method of the child controller is
called, allowing the child controller to register shortcut callbacks.
:param key: Name of the controller (unique within self), to later access it again
:param ExtendedController controller: Controller to be added as child | entailment |
def remove_controller(self, controller):
"""Remove child controller and destroy it
Removes all references to the child controller and calls destroy() on the controller.
:param str | ExtendedController controller: Either the child controller object itself or its registered name
:return: Whether the controller was existing
:rtype: bool
"""
# Get name of controller
if isinstance(controller, ExtendedController):
# print(self.__class__.__name__, " remove ", controller.__class__.__name__)
for key, child_controller in self.__child_controllers.items():
if controller is child_controller:
break
else:
return False
else:
key = controller
# print(self.__class__.__name__, " remove key ", key, self.__child_controllers.keys())
if key in self.__child_controllers:
if self.__shortcut_manager is not None:
self.__action_registered_controllers.remove(self.__child_controllers[key])
self.__child_controllers[key].unregister_actions(self.__shortcut_manager)
self.__child_controllers[key].destroy()
del self.__child_controllers[key]
# print("removed", controller.__class__.__name__ if not isinstance(controller, str) else controller)
return True
# print("do not remove", controller.__class__.__name__)
return False | Remove child controller and destroy it
Removes all references to the child controller and calls destroy() on the controller.
:param str | ExtendedController controller: Either the child controller object itself or its registered name
:return: Whether the controller was existing
:rtype: bool | entailment |
def register_actions(self, shortcut_manager):
"""Register callback methods for triggered actions in all child controllers.
:param rafcon.gui.shortcut_manager.ShortcutManager shortcut_manager: Shortcut Manager Object holding mappings
between shortcuts and actions.
"""
assert isinstance(shortcut_manager, ShortcutManager)
self.__shortcut_manager = shortcut_manager
for controller in list(self.__child_controllers.values()):
if controller not in self.__action_registered_controllers:
try:
controller.register_actions(shortcut_manager)
except Exception as e:
logger.error("Error while registering action for {0}: {1}".format(controller.__class__.__name__, e))
self.__action_registered_controllers.append(controller) | Register callback methods for triggered actions in all child controllers.
:param rafcon.gui.shortcut_manager.ShortcutManager shortcut_manager: Shortcut Manager Object holding mappings
between shortcuts and actions. | entailment |
def destroy(self):
"""Recursively destroy all Controllers
The method remove all controllers, which calls the destroy method of the child controllers. Then,
all registered models are relieved and and the widget hand by the initial view argument is destroyed.
"""
self.disconnect_all_signals()
controller_names = [key for key in self.__child_controllers]
for controller_name in controller_names:
self.remove_controller(controller_name)
self.relieve_all_models()
if self.parent:
self.__parent = None
if self._view_initialized:
# print(self.__class__.__name__, "destroy view", self.view, self)
self.view.get_top_widget().destroy()
self.view = None
self._Observer__PROP_TO_METHS.clear() # prop name --> set of observing methods
self._Observer__METH_TO_PROPS.clear() # method --> set of observed properties
self._Observer__PAT_TO_METHS.clear() # like __PROP_TO_METHS but only for pattern names (to optimize search)
self._Observer__METH_TO_PAT.clear() # method --> pattern
self._Observer__PAT_METH_TO_KWARGS.clear() # (pattern, method) --> info
self.observe = None
else:
logger.warning("The controller {0} seems to be destroyed before the view was fully initialized. {1} "
"Check if you maybe do not call {2} or there exist most likely threading problems."
"".format(self.__class__.__name__, self.model, ExtendedController.register_view)) | Recursively destroy all Controllers
The method remove all controllers, which calls the destroy method of the child controllers. Then,
all registered models are relieved and and the widget hand by the initial view argument is destroyed. | entailment |
def observe_model(self, model):
"""Make this model observable within the controller
The method also keeps track of all observed models, in order to be able to relieve them later on.
:param gtkmvc3.Model model: The model to be observed
"""
self.__registered_models.add(model)
return super(ExtendedController, self).observe_model(model) | Make this model observable within the controller
The method also keeps track of all observed models, in order to be able to relieve them later on.
:param gtkmvc3.Model model: The model to be observed | entailment |
def relieve_model(self, model):
"""Do no longer observe the model
The model is also removed from the internal set of tracked models.
:param gtkmvc3.Model model: The model to be relieved
"""
self.__registered_models.remove(model)
return super(ExtendedController, self).relieve_model(model) | Do no longer observe the model
The model is also removed from the internal set of tracked models.
:param gtkmvc3.Model model: The model to be relieved | entailment |
def relieve_all_models(self):
"""Relieve all registered models
The method uses the set of registered models to relieve them.
"""
map(self.relieve_model, list(self.__registered_models))
self.__registered_models.clear() | Relieve all registered models
The method uses the set of registered models to relieve them. | entailment |
def change_data_type(self, data_type, default_value=None):
"""This method changes both the data type and default value. If one of the parameters does not fit,
an exception is thrown and no property is changed. Using this method ensures a consistent data type
and default value and only notifies once.
:param data_type: The new data type
:param default_value: The new default value
:return:
"""
old_data_type = self.data_type
self.data_type = data_type
if default_value is None:
default_value = self.default_value
if type_helpers.type_inherits_of_type(type(default_value), self._data_type):
self._default_value = default_value
else:
if old_data_type.__name__ == "float" and data_type == "int":
if self.default_value:
self._default_value = int(default_value)
else:
self._default_value = 0
elif old_data_type.__name__ == "int" and data_type == "float":
if self.default_value:
self._default_value = float(default_value)
else:
self._default_value = 0.0
else:
self._default_value = None | This method changes both the data type and default value. If one of the parameters does not fit,
an exception is thrown and no property is changed. Using this method ensures a consistent data type
and default value and only notifies once.
:param data_type: The new data type
:param default_value: The new default value
:return: | entailment |
def check_default_value(self, default_value, data_type=None):
"""Check whether the passed default value suits to the passed data type. If no data type is passed, the
data type of the data port is used. If the default value does not fit, an exception is thrown. If the default
value is of type string, it is tried to convert that value to the data type.
:param default_value: The default value to check
:param data_type: The data type to use
:raises exceptions.AttributeError: if check fails
:return: The converted default value
"""
if data_type is None:
data_type = self.data_type
if default_value is not None:
# If the default value is passed as string, we have to convert it to the data type
if isinstance(default_value, string_types):
if len(default_value) > 1 and default_value[0] == '$':
return default_value
if default_value == "None":
return None
default_value = type_helpers.convert_string_value_to_type_value(default_value, data_type)
if default_value is None:
raise AttributeError("Could not convert default value '{0}' to data type '{1}'.".format(
default_value, data_type))
else:
if not isinstance(default_value, self.data_type):
if self._no_type_error_exceptions:
logger.warning("Handed default value '{0}' is of type '{1}' but data port data type is {2} {3}."
"".format(default_value, type(default_value), data_type, self))
else:
raise TypeError("Handed default value '{0}' is of type '{1}' but data port data type is {2}"
"{3} of {4}.".format(default_value, type(default_value), data_type,
self,
self.parent.get_path() if self.parent is not None else ""))
return default_value | Check whether the passed default value suits to the passed data type. If no data type is passed, the
data type of the data port is used. If the default value does not fit, an exception is thrown. If the default
value is of type string, it is tried to convert that value to the data type.
:param default_value: The default value to check
:param data_type: The data type to use
:raises exceptions.AttributeError: if check fails
:return: The converted default value | entailment |
def open_folder_cmd_line(query, default_path=None):
"""Queries the user for a path to open
:param str query: Query that asks the user for a specific folder path to be opened
:param str default_path: Path to use if the user doesn't specify a path
:return: Input path from the user or `default_path` if nothing is specified or None if path does not exist
:rtype: str
"""
user_input = input(query + ': ')
if len(user_input) == 0:
user_input = default_path
if not user_input or not os.path.isdir(user_input):
return None
return user_input | Queries the user for a path to open
:param str query: Query that asks the user for a specific folder path to be opened
:param str default_path: Path to use if the user doesn't specify a path
:return: Input path from the user or `default_path` if nothing is specified or None if path does not exist
:rtype: str | entailment |
def create_folder_cmd_line(query, default_name=None, default_path=None):
"""Queries the user for a path to be created
:param str query: Query that asks the user for a specific folder path to be created
:param str default_name: Default name of the folder to be created
:param str default_path: Path in which the folder is created if the user doesn't specify a path
:return: Input path from the user or `default_path` if nothing is specified or None if directory could ne be created
:rtype: str
"""
default = None
if default_name and default_path:
default = os.path.join(default_path, default_name)
user_input = input(query + ' [default {}]: '.format(default))
if len(user_input) == 0:
user_input = default
if not user_input:
return None
if not os.path.isdir(user_input):
try:
os.makedirs(user_input)
except OSError:
return None
return user_input | Queries the user for a path to be created
:param str query: Query that asks the user for a specific folder path to be created
:param str default_name: Default name of the folder to be created
:param str default_path: Path in which the folder is created if the user doesn't specify a path
:return: Input path from the user or `default_path` if nothing is specified or None if directory could ne be created
:rtype: str | entailment |
def save_folder_cmd_line(query, default_name=None, default_path=None):
"""Queries the user for a path or file to be saved into
The folder or file has not to be created already and will not be created by this function. The parent directory
of folder and file has to exist otherwise the function will return None.
:param str query: Query that asks the user for a specific folder/file path to be created
:param str default_name: Default name of the folder to be created
:param str default_path: Path in which the folder is created if the user doesn't specify a path
:return: Input path from the user or `default_path` if nothing is specified and None if directory does not exist
:rtype: str
"""
default = None
if default_name and default_path:
default = os.path.join(default_path, default_name)
user_input = input(query + ' [default {}]: '.format(default))
if len(user_input) == 0:
user_input = default
if not user_input or not os.path.isdir(os.path.dirname(user_input)):
return None
return user_input | Queries the user for a path or file to be saved into
The folder or file has not to be created already and will not be created by this function. The parent directory
of folder and file has to exist otherwise the function will return None.
:param str query: Query that asks the user for a specific folder/file path to be created
:param str default_name: Default name of the folder to be created
:param str default_path: Path in which the folder is created if the user doesn't specify a path
:return: Input path from the user or `default_path` if nothing is specified and None if directory does not exist
:rtype: str | entailment |
def register_view(self, view):
"""Called when the View was registered
"""
super(StateTransitionsListController, self).register_view(view)
def cell_text(column, cell_renderer, model, iter, data):
t_id = model.get_value(iter, self.ID_STORAGE_ID)
in_external = 'external' if model.get_value(iter, self.IS_EXTERNAL_STORAGE_ID) else 'internal'
# print(t_id, in_external, self.combo[in_external])
if column.get_title() == 'Source State':
cell_renderer.set_property("model", self.combo[in_external][t_id]['from_state'])
cell_renderer.set_property("text-column", 0)
cell_renderer.set_property("has-entry", False)
elif column.get_title() == 'Source Outcome':
cell_renderer.set_property("model", self.combo[in_external][t_id]['from_outcome'])
cell_renderer.set_property("text-column", 0)
cell_renderer.set_property("has-entry", False)
elif column.get_title() == 'Target State':
cell_renderer.set_property("model", self.combo[in_external][t_id]['to_state'])
cell_renderer.set_property("text-column", 0)
cell_renderer.set_property("has-entry", False)
elif column.get_title() == 'Target Outcome':
cell_renderer.set_property("model", self.combo[in_external][t_id]['to_outcome'])
cell_renderer.set_property("text-column", 0)
cell_renderer.set_property("has-entry", False)
else:
logger.warning("Column has no cell_data_func %s %s" % (column.get_name(), column.get_title()))
view['from_state_col'].set_cell_data_func(view['from_state_combo'], cell_text)
view['to_state_col'].set_cell_data_func(view['to_state_combo'], cell_text)
view['from_outcome_col'].set_cell_data_func(view['from_outcome_combo'], cell_text)
view['to_outcome_col'].set_cell_data_func(view['to_outcome_combo'], cell_text)
if self.model.state.get_next_upper_library_root_state():
view['from_state_combo'].set_property("editable", False)
view['from_outcome_combo'].set_property("editable", False)
view['to_state_combo'].set_property("editable", False)
view['to_outcome_combo'].set_property("editable", False)
else:
self.connect_signal(view['from_state_combo'], "edited", self.on_combo_changed_from_state)
self.connect_signal(view['from_outcome_combo'], "edited", self.on_combo_changed_from_outcome)
self.connect_signal(view['to_state_combo'], "edited", self.on_combo_changed_to_state)
self.connect_signal(view['to_outcome_combo'], "edited", self.on_combo_changed_to_outcome)
view.tree_view.connect("grab-focus", self.on_focus)
self.update(initiator='"register view"') | Called when the View was registered | entailment |
def remove_core_element(self, model):
"""Remove respective core element of handed transition model
:param TransitionModel model: Transition model which core element should be removed
:return:
"""
assert model.transition.parent is self.model.state or model.transition.parent is self.model.parent.state
gui_helper_state_machine.delete_core_element_of_model(model) | Remove respective core element of handed transition model
:param TransitionModel model: Transition model which core element should be removed
:return: | entailment |
def get_possible_combos_for_transition(trans, model, self_model, is_external=False):
""" The function provides combos for a transition and its respective
:param trans:
:param model:
:param self_model:
:param is_external:
:return:
"""
from_state_combo = Gtk.ListStore(GObject.TYPE_STRING, GObject.TYPE_STRING)
from_outcome_combo = Gtk.ListStore(GObject.TYPE_STRING)
to_state_combo = Gtk.ListStore(GObject.TYPE_STRING)
to_outcome_combo = Gtk.ListStore(GObject.TYPE_STRING)
trans_dict = model.state.transitions
# get from state
if trans is None:
from_state = None
elif trans.from_state is not None:
from_state = model.state.states[trans.from_state]
else:
from_state = model.state if is_external else self_model.state
# collect all free from-outcome-combo and from_state which are still valid -> filter all outcome already in use
free_from_outcomes_dict = {}
for state in model.state.states.values():
from_o_combo = state.outcomes.values()
# print([o.outcome_id for o in from_o_combo], state_model.state.state_id)
for transition in trans_dict.values():
# print(transition, [[o.outcome_id == transition.from_outcome, transition.from_state == state_model.state.state_id] for o in from_o_combo])
from_o_combo = [o for o in from_o_combo if not (o.outcome_id == transition.from_outcome and
transition.from_state == state.state_id)]
# print([o.outcome_id for o in from_o_combo])
if len(from_o_combo) > 0:
free_from_outcomes_dict[state.state_id] = from_o_combo
# check if parent has start_state
if model.state.start_state_id is None:
free_from_outcomes_dict[model.state.state_id] = [None]
# for from-state-combo use all states with free outcomes and from_state
combined_states = [model.state] if is_external else [self_model.state]
combined_states.extend(model.state.states.values())
free_from_states = [state for state in combined_states if state.state_id in free_from_outcomes_dict]
if trans is None:
return None, None, None, None, free_from_states, free_from_outcomes_dict
def append_from_state_combo(possible_state):
if possible_state.state_id == self_model.state.state_id:
from_state_combo.append(['self.' + possible_state.state_id, possible_state.state_id])
elif is_external and from_state.state_id == model.state.state_id:
from_state_combo.append(['parent.' + possible_state.state_id, possible_state.state_id])
else:
from_state_combo.append([possible_state.name + '.' + possible_state.state_id, possible_state.state_id])
append_from_state_combo(from_state)
for state in free_from_states:
if from_state is not state:
append_from_state_combo(state)
# for from-outcome-combo collect all combos for actual transition
# -> actual outcome + free outcomes of actual from_state.state_id
if trans is not None:
if trans.from_outcome is None:
from_outcome_combo.append(["None"])
else:
outcome = from_state.outcomes[trans.from_outcome]
from_outcome_combo.append([outcome.name + "." + str(outcome.outcome_id)])
for outcome in free_from_outcomes_dict.get(from_state.state_id, []):
if outcome is None:
from_outcome_combo.append(["None"])
else:
from_outcome_combo.append([outcome.name + "." + str(outcome.outcome_id)])
# get to state
if trans.to_state == model.state.state_id:
to_state = model.state if is_external else self_model.state
else:
to_state = model.state.states[trans.to_state]
# for to-state-combo filter from_state -> first actual to_state + other optional states
def generate_to_state_combo(possible_state):
if possible_state.state_id == self_model.state.state_id:
to_state_combo.append(["self." + possible_state.state_id])
elif is_external and possible_state.state_id == model.state.state_id:
to_state_combo.append(['parent.' + possible_state.state_id])
else:
to_state_combo.append([possible_state.name + '.' + possible_state.state_id])
to_states = [model.state] if is_external else [self_model.state]
to_states.extend(model.state.states.values())
generate_to_state_combo(to_state)
for state in to_states:
if not to_state.state_id == state.state_id:
generate_to_state_combo(state)
# for to-outcome-combo use parent combos -> first actual outcome + other outcome
def append_to_outcome_combo(possible_outcome):
if is_external:
to_outcome_combo.append(['parent.' + possible_outcome.name + "." + str(possible_outcome.outcome_id)])
else:
to_outcome_combo.append(['self.' + possible_outcome.name + "." + str(possible_outcome.outcome_id)])
if trans.to_outcome is not None:
append_to_outcome_combo(model.state.outcomes[trans.to_outcome])
for outcome in model.state.outcomes.values():
if not (trans.to_outcome == outcome.outcome_id and trans.to_state == model.state.state_id):
append_to_outcome_combo(outcome)
return from_state_combo, from_outcome_combo, to_state_combo, to_outcome_combo, free_from_states, free_from_outcomes_dict | The function provides combos for a transition and its respective
:param trans:
:param model:
:param self_model:
:param is_external:
:return: | entailment |
def _update_internal_data_base(self):
""" Updates Internal combo knowledge for any actual transition by calling get_possible_combos_for_transition-
function for those.
"""
model = self.model
# print("clean data base")
### FOR COMBOS
# internal transitions
# - take all internal states
# - take all not used internal outcomes of this states
# external transitions
# - take all external states
# - take all external outcomes
# - take all not used own outcomes
### LINKING
# internal -> transition_id -> from_state = outcome combos
# -> ...
# external -> state -> outcome combos
self.combo['internal'] = {}
self.combo['external'] = {}
self.combo['free_from_states'] = {}
self.combo['free_from_outcomes_dict'] = {}
self.combo['free_ext_from_outcomes_dict'] = {}
self.combo['free_ext_from_outcomes_dict'] = {}
if isinstance(model, ContainerStateModel):
# check for internal combos
for transition_id, transition in model.state.transitions.items():
self.combo['internal'][transition_id] = {}
[from_state_combo, from_outcome_combo,
to_state_combo, to_outcome_combo,
free_from_states, free_from_outcomes_dict] = \
self.get_possible_combos_for_transition(transition, self.model, self.model)
self.combo['internal'][transition_id]['from_state'] = from_state_combo
self.combo['internal'][transition_id]['from_outcome'] = from_outcome_combo
self.combo['internal'][transition_id]['to_state'] = to_state_combo
self.combo['internal'][transition_id]['to_outcome'] = to_outcome_combo
self.combo['free_from_states'] = free_from_states
self.combo['free_from_outcomes_dict'] = free_from_outcomes_dict
if not model.state.transitions:
[x, y, z, v, free_from_states, free_from_outcomes_dict] = \
self.get_possible_combos_for_transition(None, self.model, self.model)
self.combo['free_from_states'] = free_from_states
self.combo['free_from_outcomes_dict'] = free_from_outcomes_dict
# TODO check why the can happen should not be handed always the LibraryStateModel
if not (self.model.state.is_root_state or self.model.state.is_root_state_of_library):
# check for external combos
for transition_id, transition in model.parent.state.transitions.items():
if transition.from_state == model.state.state_id or transition.to_state == model.state.state_id:
self.combo['external'][transition_id] = {}
[from_state_combo, from_outcome_combo,
to_state_combo, to_outcome_combo,
free_from_states, free_from_outcomes_dict] = \
self.get_possible_combos_for_transition(transition, self.model.parent, self.model, True)
self.combo['external'][transition_id]['from_state'] = from_state_combo
self.combo['external'][transition_id]['from_outcome'] = from_outcome_combo
self.combo['external'][transition_id]['to_state'] = to_state_combo
self.combo['external'][transition_id]['to_outcome'] = to_outcome_combo
self.combo['free_ext_from_states'] = free_from_states
self.combo['free_ext_from_outcomes_dict'] = free_from_outcomes_dict
if not model.parent.state.transitions:
[x, y, z, v, free_from_states, free_from_outcomes_dict] = \
self.get_possible_combos_for_transition(None, self.model.parent, self.model, True)
self.combo['free_ext_from_states'] = free_from_states
self.combo['free_ext_from_outcomes_dict'] = free_from_outcomes_dict | Updates Internal combo knowledge for any actual transition by calling get_possible_combos_for_transition-
function for those. | entailment |
def _update_tree_store(self):
""" Updates TreeStore of the Gtk.ListView according internal combo knowledge gained by
_update_internal_data_base function call.
"""
self.list_store.clear()
if self.view_dict['transitions_internal'] and isinstance(self.model, ContainerStateModel) and \
len(self.model.state.transitions) > 0:
for transition_id in self.combo['internal'].keys():
# print("TRANSITION_ID: ", transition_id, self.model.state.transitions)
t = self.model.state.transitions[transition_id]
if t.from_state is not None:
from_state = self.model.state.states[t.from_state]
from_state_label = from_state.name
from_outcome_label = from_state.outcomes[t.from_outcome].name
else:
from_state_label = "self (" + self.model.state.name + ")"
from_outcome_label = ""
if t.to_state is None:
to_state_label = "self (" + self.model.state.name + ")"
to_outcome = None if t.to_outcome is None else self.model.state.outcomes[t.to_outcome]
to_outcome_label = "None" if to_outcome is None else to_outcome.name
else:
# print(t.to_state, self.model.states)
if t.to_state == self.model.state.state_id:
to_state_label = "self (" + self.model.state.name + ")"
to_outcome_label = self.model.state.outcomes[t.to_outcome].name
else:
to_state_label = self.model.state.states[t.to_state].name
to_outcome_label = None
self.list_store.append([transition_id, # id
from_state_label, # from-state
from_outcome_label, # from-outcome
to_state_label, # to-state
to_outcome_label, # to-outcome
False, # is_external
t,
self.model.state,
True,
self.model.get_transition_m(transition_id)])
if self.view_dict['transitions_external'] and self.model.parent and \
len(self.model.parent.state.transitions) > 0:
for transition_id in self.combo['external'].keys():
# print("TRANSITION_ID: ", transition_id, self.model.parent.state.transitions)
try:
t = self.model.parent.state.transitions[transition_id]
# logger.info(str(t))
from_state = None
if t.from_state is not None:
from_state = self.model.parent.states[t.from_state].state
if from_state is None:
from_state_label = "parent (" + self.model.state.parent.name + ")"
from_outcome_label = ""
elif from_state.state_id == self.model.state.state_id:
from_state_label = "self (" + from_state.name + ")"
from_outcome_label = from_state.outcomes[t.from_outcome].name
else:
from_state_label = from_state.name
from_outcome_label = from_state.outcomes[t.from_outcome].name
if t.to_state == self.model.parent.state.state_id:
to_state_label = 'parent (' + self.model.parent.state.name + ")"
to_outcome_label = self.model.parent.state.outcomes[t.to_outcome].name
else:
if t.to_state == self.model.state.state_id:
to_state_label = "self (" + self.model.state.name + ")"
else:
to_state_label = self.model.parent.state.states[t.to_state].name
to_outcome_label = None
self.list_store.append([transition_id, # id
from_state_label, # from-state
from_outcome_label, # from-outcome
to_state_label, # to-state
to_outcome_label, # to-outcome
True, # is_external
t,
self.model.state,
True,
self.model.parent.get_transition_m(transition_id)])
except Exception as e:
logger.warning("There was a problem while updating the data-flow widget TreeStore. {0}".format(e)) | Updates TreeStore of the Gtk.ListView according internal combo knowledge gained by
_update_internal_data_base function call. | entailment |
def register_actions(self, shortcut_manager):
"""Register callback methods for triggered actions
:param rafcon.gui.shortcut_manager.ShortcutManager shortcut_manager:
"""
shortcut_manager.add_callback_for_action("delete", self.trans_list_ctrl.remove_action_callback)
shortcut_manager.add_callback_for_action("add", self.trans_list_ctrl.add_action_callback) | Register callback methods for triggered actions
:param rafcon.gui.shortcut_manager.ShortcutManager shortcut_manager: | entailment |
def move_dirty_lock_file(dirty_lock_file, sm_path):
""" Move the dirt_lock file to the sm_path and thereby is not found by auto recovery of backup anymore """
if dirty_lock_file is not None \
and not dirty_lock_file == os.path.join(sm_path, dirty_lock_file.split(os.sep)[-1]):
logger.debug("Move dirty lock from root tmp folder {0} to state machine folder {1}"
"".format(dirty_lock_file, os.path.join(sm_path, dirty_lock_file.split(os.sep)[-1])))
os.rename(dirty_lock_file, os.path.join(sm_path, dirty_lock_file.split(os.sep)[-1])) | Move the dirt_lock file to the sm_path and thereby is not found by auto recovery of backup anymore | entailment |
def write_backup_meta_data(self):
"""Write the auto backup meta data into the current tmp-storage path"""
auto_backup_meta_file = os.path.join(self._tmp_storage_path, FILE_NAME_AUTO_BACKUP)
storage.storage_utils.write_dict_to_json(self.meta, auto_backup_meta_file) | Write the auto backup meta data into the current tmp-storage path | entailment |
def update_last_backup_meta_data(self):
"""Update the auto backup meta data with internal recovery information"""
self.meta['last_backup']['time'] = get_time_string_for_float(self.last_backup_time)
self.meta['last_backup']['file_system_path'] = self._tmp_storage_path
self.meta['last_backup']['marked_dirty'] = self.state_machine_model.state_machine.marked_dirty | Update the auto backup meta data with internal recovery information | entailment |
def update_last_sm_origin_meta_data(self):
"""Update the auto backup meta data with information of the state machine origin"""
# TODO finally maybe remove this when all backup features are integrated into one backup-structure
# data also used e.g. to backup tabs
self.meta['last_saved']['time'] = self.state_machine_model.state_machine.last_update
self.meta['last_saved']['file_system_path'] = self.state_machine_model.state_machine.file_system_path | Update the auto backup meta data with information of the state machine origin | entailment |
def _check_for_dyn_timed_auto_backup(self):
""" The method implements the timed storage feature.
The method re-initiating a new timed thread if the state-machine not already stored to backup
(what could be caused by the force_temp_storage_interval) or force the storing of the state-machine if there
is no new request for a timed backup. New timed backup request are intrinsically represented by
self._timer_request_time and initiated by the check_for_auto_backup-method.
The feature uses only one thread for each ModificationHistoryModel and lock to be thread save.
"""
current_time = time.time()
self.timer_request_lock.acquire()
# sm = self.state_machine_model.state_machine
# TODO check for self._timer_request_time is None to avoid and reset auto-backup in case and fix it better
if self._timer_request_time is None:
# logger.warning("timer_request is None")
return self.timer_request_lock.release()
if self.timed_temp_storage_interval < current_time - self._timer_request_time:
# logger.info("{0} Perform timed auto-backup of state-machine {1}.".format(time.time(),
# sm.state_machine_id))
self.check_for_auto_backup(force=True)
else:
duration_to_wait = self.timed_temp_storage_interval - (current_time - self._timer_request_time)
hard_limit_duration_to_wait = self.force_temp_storage_interval - (current_time - self.last_backup_time)
hard_limit_active = hard_limit_duration_to_wait < duration_to_wait
# logger.info('{2} restart_thread {0} time to go {1}, hard limit {3}'.format(sm.state_machine_id,
# duration_to_wait, time.time(),
# hard_limit_active))
if hard_limit_active:
self.set_timed_thread(hard_limit_duration_to_wait, self.check_for_auto_backup, True)
else:
self.set_timed_thread(duration_to_wait, self._check_for_dyn_timed_auto_backup)
self.timer_request_lock.release() | The method implements the timed storage feature.
The method re-initiating a new timed thread if the state-machine not already stored to backup
(what could be caused by the force_temp_storage_interval) or force the storing of the state-machine if there
is no new request for a timed backup. New timed backup request are intrinsically represented by
self._timer_request_time and initiated by the check_for_auto_backup-method.
The feature uses only one thread for each ModificationHistoryModel and lock to be thread save. | entailment |
def check_for_auto_backup(self, force=False):
""" The method implements the checks for possible auto backup of the state-machine according duration till
the last change together with the private method _check_for_dyn_timed_auto_backup.
If the only_fix_interval is True this function is called ones in the beginning and is called by a timed-
threads in a fix interval.
:param force: is a flag that force the temporary backup of the state-machine to the tmp-folder
:return:
"""
if not self.timed_temp_storage_enabled:
return
sm = self.state_machine_model.state_machine
current_time = time.time()
if not self.only_fix_interval and not self.marked_dirty:
# logger.info("adjust last_backup_time " + str(sm.state_machine_id))
self.last_backup_time = current_time # used as 'last-modification-not-backup-ed' time
is_not_timed_or_reached_time_to_force = \
current_time - self.last_backup_time > self.force_temp_storage_interval or self.only_fix_interval
if (sm.marked_dirty and is_not_timed_or_reached_time_to_force) or force:
if not self.only_fix_interval or self.marked_dirty:
thread = threading.Thread(target=self.perform_temp_storage)
thread.start()
# self.last_backup_time = current_time # used as 'last-backup' time
if self.only_fix_interval:
self.set_timed_thread(self.force_temp_storage_interval, self.check_for_auto_backup)
else:
if not self.only_fix_interval:
self.timer_request_lock.acquire()
if self._timer_request_time is None:
# logger.info('{0} start_thread {1}'.format(current_time, sm.state_machine_id))
self._timer_request_time = current_time
self.set_timed_thread(self.timed_temp_storage_interval, self._check_for_dyn_timed_auto_backup)
else:
# logger.info('{0} update_thread {1}'.format(current_time, sm.state_machine_id))
self._timer_request_time = current_time
self.timer_request_lock.release()
else:
self.set_timed_thread(self.force_temp_storage_interval, self.check_for_auto_backup) | The method implements the checks for possible auto backup of the state-machine according duration till
the last change together with the private method _check_for_dyn_timed_auto_backup.
If the only_fix_interval is True this function is called ones in the beginning and is called by a timed-
threads in a fix interval.
:param force: is a flag that force the temporary backup of the state-machine to the tmp-folder
:return: | entailment |
def on_drag_data_get(self, widget, context, data, info, time):
"""dragged state is inserted and its state_id sent to the receiver
:param widget:
:param context:
:param data: SelectionData: contains state_id
:param info:
:param time:
"""
import rafcon.gui.helpers.state_machine as gui_helper_state_machine
state = self._get_state()
gui_helper_state_machine.add_state_by_drag_and_drop(state, data) | dragged state is inserted and its state_id sent to the receiver
:param widget:
:param context:
:param data: SelectionData: contains state_id
:param info:
:param time: | entailment |
def on_mouse_click(self, widget, event):
"""state insertion on mouse click
:param widget:
:param Gdk.Event event: mouse click event
"""
import rafcon.gui.helpers.state_machine as gui_helper_state_machine
if self.view.get_path_at_pos(int(event.x), int(event.y)) is not None \
and len(self.view.get_selected_items()) > 0:
return gui_helper_state_machine.insert_state_into_selected_state(self._get_state(), False) | state insertion on mouse click
:param widget:
:param Gdk.Event event: mouse click event | entailment |
def on_mouse_motion(self, widget, event):
"""selection on mouse over
:param widget:
:param Gdk.Event event: mouse motion event
"""
path = self.view.get_path_at_pos(int(event.x), int(event.y))
if path is not None:
self.view.select_path(path)
else:
self.view.unselect_all() | selection on mouse over
:param widget:
:param Gdk.Event event: mouse motion event | entailment |
def _get_state(self):
"""get state instance which was clicked on
:return: State that represents the icon which was clicked on
:rtype: rafcon.core.states.State
"""
selected = self.view.get_selected_items()
if not selected:
return
shorthand, state_class = self.view.states[selected[0][0]]
return state_class() | get state instance which was clicked on
:return: State that represents the icon which was clicked on
:rtype: rafcon.core.states.State | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.