code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
label = Gtk.Label() set_label_markup(label, '&#x' + icon + ';', constants.ICON_FONT, font_size) label.show() return label
def create_button_label(icon, font_size=constants.FONT_SIZE_NORMAL)
Create a button label with a chosen icon. :param icon: The icon :param font_size: The size of the icon :return: The created label
4.737273
7.045896
0.672345
title = '' title_list = tab_label_text.split('_') for word in title_list: title += word.upper() + ' ' title.strip() return title
def get_widget_title(tab_label_text)
Transform Notebook tab label to title by replacing underscores with white spaces and capitalizing the first letter of each word. :param tab_label_text: The string of the tab label to be transformed :return: The transformed title as a string
3.09869
3.115989
0.994448
child = notebook.get_nth_page(page_num) tab_label_eventbox = notebook.get_tab_label(child) return get_widget_title(tab_label_eventbox.get_tooltip_text())
def get_notebook_tab_title(notebook, page_num)
Helper function that gets a notebook's tab title given its page number :param notebook: The GTK notebook :param page_num: The page number of the tab, for which the title is required :return: The title of the tab
5.406518
5.309458
1.018281
text = get_notebook_tab_title(notebook, page_num) set_label_markup(title_label, text, constants.INTERFACE_FONT, constants.FONT_SIZE_BIG, constants.LETTER_SPACING_1PT) return text
def set_notebook_title(notebook, page_num, title_label)
Set the title of a GTK notebook to one of its tab's titles :param notebook: The GTK notebook :param page_num: The page number of a specific tab :param title_label: The GTK label holding the notebook's title :return: The new title of the notebook
7.509992
8.755969
0.8577
box = Gtk.Box.new(Gtk.Orientation.HORIZONTAL, 10) box.set_border_width(0) icon_label = Gtk.Label() text_label = Gtk.AccelLabel.new(label_text) text_label.set_xalign(0) box.pack_start(icon_label, False, False, 0) box.pack_start(text_label, True, True, 0) return box, icon_label, text_label
def create_menu_box_with_icon_and_label(label_text)
Creates a MenuItem box, which is a replacement for the former ImageMenuItem. The box contains, a label for the icon and one for the text. :param label_text: The text, which is displayed for the text label :return:
2.068156
2.192736
0.943185
size = global_runtime_config.get_config_value(window_key + '_WINDOW_SIZE') position = global_runtime_config.get_config_value(window_key + '_WINDOW_POS') maximized = global_runtime_config.get_config_value(window_key + '_WINDOW_MAXIMIZED') # un-maximize here on purpose otherwise resize and reposition fails if not maximized: window.unmaximize() if not size: size = constants.WINDOW_SIZE[window_key + '_WINDOW'] window.resize(*size) if position: position = (max(0, position[0]), max(0, position[1])) screen_width = Gdk.Screen.width() screen_height = Gdk.Screen.height() if position[0] < screen_width and position[1] < screen_height: window.move(*position) else: window.set_position(Gtk.WindowPosition.MOUSE) if maximized: window.maximize() window.show()
def set_window_size_and_position(window, window_key)
Adjust GTK Window's size, position and maximized state according to the corresponding values in the runtime_config file. The maximize method is triggered last to restore also the last stored size and position of the window. If the runtime_config does not exist, or the corresponding values are missing in the file, default values for the window size are used, and the mouse position is used to adjust the window's position. :param window: The GTK Window to be adjusted :param window_key: The window's key stored in the runtime config file
3.037398
2.971393
1.022213
# See # http://pyGtk.org/pygtk2reference/class-gtkwidget.html#method-gtkwidget--is-focus and # http://pyGtk.org/pygtk2reference/class-gtkwidget.html#method-gtkwidget--has-focus # for detailed information about the difference between is_focus() and has_focus() if not view: # view needs to be initialized return False # widget parameter must be set and a Gtk.Widget if not isinstance(widget, Gtk.Widget): return False # Either the widget itself or one of its children must be the focus widget within their toplevel child_is_focus = False if not isinstance(widget, Gtk.Container) else bool(widget.get_focus_child()) if not child_is_focus and not widget.is_focus(): return False def has_focus(widget): if widget.has_focus(): return True if not isinstance(widget, Gtk.Container): return False return any(has_focus(child) for child in widget.get_children()) # Either, for any of widget or its children, has_focus must be True, in this case the widget has the global focus. if has_focus(widget): return True # Or the callback was not triggered by a shortcut, but e.g. a mouse click or a call from a test. # If the callback was triggered by a shortcut action, the event has at least a length of two and the second # element is a Gdk.ModifierType if len(event) < 2 or (len(event) >= 2 and not isinstance(event[1], Gdk.ModifierType)): return True return False
def react_to_event(view, widget, event)
Checks whether the widget is supposed to react to passed event The function is intended for callback methods registering to shortcut actions. As several widgets can register to the same shortcut, only the one having the focus should react to it. :param gtkmvc3.View view: The view in which the widget is registered :param Gtk.Widget widget: The widget that subscribed to the shortcut action, should be the top widget of the view :param event: The event that caused the callback :return: Whether the widget is supposed to react to the event or not :rtype: bool
4.662598
4.589172
1.016
return len(event) >= 2 and not isinstance(event[1], Gdk.ModifierType) and event[0] == Gtk.accelerator_parse(key_string)[0]
def is_event_of_key_string(event, key_string)
Condition check if key string represent the key value of handed event and whether the event is of right type The function checks for constructed event tuple that are generated by the rafcon.gui.shortcut_manager.ShortcutManager. :param tuple event: Event tuple generated by the ShortcutManager :param str key_string: Key string parsed to a key value and for condition check
5.098007
4.897092
1.041027
super(TopToolBarController, self).register_view(view) view['maximize_button'].connect('clicked', self.on_maximize_button_clicked) self.update_maximize_button()
def register_view(self, view)
Called when the View was registered
5.036294
4.784678
1.052588
super(TopToolBarUndockedWindowController, self).register_view(view) view['redock_button'].connect('clicked', self.on_redock_button_clicked)
def register_view(self, view)
Called when the View was registered
9.102304
8.386217
1.085389
super(SingleWidgetWindowController, self).register_view(view) self.shortcut_manager = ShortcutManager(self.view['main_window']) self.register_actions(self.shortcut_manager) view['main_window'].connect('destroy', Gtk.main_quit)
def register_view(self, view)
Called when the View was registered Can be used e.g. to connect signals. Here, the destroy signal is connected to close the application
5.378646
5.497203
0.978433
def func_wrapper(*args, **kwargs): if not getattr(func, "currently_executing", False): func.currently_executing = True try: return func(*args, **kwargs) finally: func.currently_executing = False else: logger.verbose("Avoid parallel execution of function {}".format(func)) return func_wrapper
def avoid_parallel_execution(func)
A decorator to avoid the parallel execution of a function. If the function is currently called, the second call is just skipped. :param func: The function to decorate :return:
2.446574
2.796668
0.874818
self.state_execution_status = StateExecutionStatus.ACTIVE logger.debug("Entering library state '{0}' with name '{1}'".format(self.library_name, self.name)) # self.state_copy.parent = self.parent self.state_copy._run_id = self._run_id self.state_copy.input_data = self.input_data self.state_copy.output_data = self.output_data self.state_copy.execution_history = self.execution_history self.state_copy.backward_execution = self.backward_execution self.state_copy.run() logger.debug("Exiting library state '{0}' with name '{1}'".format(self.library_name, self.name)) self.state_execution_status = StateExecutionStatus.WAIT_FOR_NEXT_STATE self.finalize(self.state_copy.final_outcome)
def run(self)
This defines the sequence of actions that are taken when the library state is executed It basically just calls the run method of the container state :return:
3.028985
2.817141
1.075198
if force: return State.remove_outcome(self, outcome_id, force, destroy) else: raise NotImplementedError("Remove outcome is not implemented for library state {}".format(self))
def remove_outcome(self, outcome_id, force=False, destroy=True)
Overwrites the remove_outcome method of the State class. Prevents user from removing a outcome from the library state. For further documentation, look at the State class. :raises exceptions.NotImplementedError: in any case
5.910231
5.440654
1.086309
if force: return State.remove_output_data_port(self, data_port_id, force, destroy) else: raise NotImplementedError("Remove output data port is not implemented for library state {}".format(self))
def remove_output_data_port(self, data_port_id, force=False, destroy=True)
Overwrites the remove_output_data_port method of the State class. Prevents user from removing a output data port from the library state. For further documentation, look at the State class. :param bool force: True if the removal should be forced :raises exceptions.NotImplementedError: in the removal is not forced
4.609519
3.290886
1.400692
current_library_hierarchy_depth = 1 library_root_state = self.get_next_upper_library_root_state() while library_root_state is not None: current_library_hierarchy_depth += 1 library_root_state = library_root_state.parent.get_next_upper_library_root_state() return current_library_hierarchy_depth
def library_hierarchy_depth(self)
Calculates the library hierarchy depth Counting starts at the current library state. So if the there is no upper library state the depth is one. :return: library hierarchy depth :rtype: int
2.708348
2.436502
1.111572
if config_file is None: if path is None: path, config_file = split(resource_filename(__name__, CONFIG_FILE)) else: config_file = CONFIG_FILE super(Config, self).load(config_file, path)
def load(self, config_file=None, path=None)
Loads the configuration from a specific file :param config_file: the name of the config file :param path: the path to the config file
3.531987
4.012261
0.880298
super(StateEditorController, self).register_view(view) view.prepare_the_labels() # the preparation of the labels is done here to take into account plugin hook changes view['add_input_port_button'].connect('clicked', self.inputs_ctrl.on_add) view['add_output_port_button'].connect('clicked', self.outputs_ctrl.on_add) if isinstance(self.model, ContainerStateModel): view['add_scoped_variable_button'].connect('clicked', self.scopes_ctrl.on_add) view['remove_input_port_button'].connect('clicked', self.inputs_ctrl.on_remove) view['remove_output_port_button'].connect('clicked', self.outputs_ctrl.on_remove) if isinstance(self.model, ContainerStateModel): view['remove_scoped_variable_button'].connect('clicked', self.scopes_ctrl.on_remove) if isinstance(self.model, LibraryStateModel) or self.model.state.get_next_upper_library_root_state(): view['add_input_port_button'].set_sensitive(False) view['remove_input_port_button'].set_sensitive(False) view['add_output_port_button'].set_sensitive(False) view['remove_output_port_button'].set_sensitive(False) view['add_scoped_variable_button'].set_sensitive(False) view['remove_scoped_variable_button'].set_sensitive(False) view.inputs_view.show() view.outputs_view.show() view.scopes_view.show() view.outcomes_view.show() view.transitions_view.show() view.data_flows_view.show() # show scoped variables if show content is enabled -> if disabled the tab stays and indicates a container state if isinstance(self.model, LibraryStateModel) and not self.model.show_content(): view.scopes_view.hide() view.linkage_overview.scope_view.hide() # Container states do not have a source editor and library states does not show there source code # Thus, for those states we do not have to add the source controller and can hide the source code tab # logger.info("init state: {0}".format(model)) lib_with_and_ES_as_root = isinstance(self.model, LibraryStateModel) and \ not isinstance(self.model.state_copy, ContainerStateModel) if not isinstance(self.model, ContainerStateModel) and not isinstance(self.model, LibraryStateModel) or \ lib_with_and_ES_as_root: view.source_view.show() if isinstance(self.model, LibraryStateModel) and not self.model.show_content(): view.remove_source_tab() view.remove_scoped_variables_tab() else: view.scopes_view.show() if isinstance(self.model, LibraryStateModel) and \ (not self.model.show_content() or not isinstance(self.model.state_copy, ContainerStateModel)): view.remove_scoped_variables_tab() view.remove_source_tab() if global_gui_config.get_config_value("SEMANTIC_DATA_MODE", False): view.bring_tab_to_the_top('Semantic Data') else: if isinstance(self.model.state, LibraryState): view.bring_tab_to_the_top('Description') else: view.bring_tab_to_the_top('Linkage Overview') if isinstance(self.model, ContainerStateModel): self.scopes_ctrl.reload_scoped_variables_list_store() plugins.run_hook("post_state_editor_register_view", self)
def register_view(self, view)
Called when the View was registered Can be used e.g. to connect signals. Here, the destroy signal is connected to close the application
3.348031
3.379374
0.990725
msg = info['arg'] # print(self.__class__.__name__, "state_type_changed check", info) if msg.action in ['change_state_type', 'change_root_state_type'] and msg.after: # print(self.__class__.__name__, "state_type_changed") import rafcon.gui.singleton as gui_singletons msg = info['arg'] new_state_m = msg.affected_models[-1] states_editor_ctrl = gui_singletons.main_window_controller.get_controller('states_editor_ctrl') states_editor_ctrl.recreate_state_editor(self.model, new_state_m)
def state_type_changed(self, model, prop_name, info)
Reopen state editor when state type is changed When the type of the observed state changes, a new model is created. The look of this controller's view depends on the kind of model. Therefore, we have to destroy this editor and open a new one with the new model.
6.038775
5.350435
1.128651
import rafcon.gui.singleton as gui_singletons states_editor_ctrl = gui_singletons.main_window_controller.get_controller('states_editor_ctrl') state_identifier = states_editor_ctrl.get_state_identifier(self.model) states_editor_ctrl.close_page(state_identifier, delete=True)
def state_destruction(self, model, prop_name, info)
Close state editor when state is being destructed
6.941753
5.956321
1.165443
if self.state_machine is None: logger.verbose("Multiple calls of prepare destruction for {0}".format(self)) self.destruction_signal.emit() if self.history is not None: self.history.prepare_destruction() if self.auto_backup is not None: self.auto_backup.prepare_destruction() try: self.unregister_observer(self) self.root_state.unregister_observer(self) except KeyError: # Might happen if the observer was already unregistered pass with self.state_machine.modification_lock(): self.root_state.prepare_destruction() self.root_state = None self.state_machine = None super(StateMachineModel, self).prepare_destruction()
def prepare_destruction(self)
Prepares the model for destruction Unregister itself as observer from the state machine and the root state
4.244893
3.459887
1.226888
self.state_machine.marked_dirty = True msg = info.arg if model is not self and msg.change.startswith('sm_notification_'): # Signal was caused by the root state # Emit state_meta_signal to inform observing controllers about changes made to the meta data within the # state machine # -> removes mark of "sm_notification_"-prepend to mark root-state msg forwarded to state machine label msg = msg._replace(change=msg.change.replace('sm_notification_', '', 1)) self.state_meta_signal.emit(msg)
def meta_changed(self, model, prop_name, info)
When the meta was changed, we have to set the dirty flag, as the changes are unsaved
17.264606
16.710831
1.033139
# print("ACTION_signal_triggered state machine: ", model, prop_name, info) self.state_machine.marked_dirty = True msg = info.arg if model is not self and msg.action.startswith('sm_notification_'): # Signal was caused by the root state # Emit state_action_signal to inform observing controllers about changes made to the state within the # state machine # print("DONE1 S", self.state_machine.state_machine_id, msg, model) # -> removes mark of "sm_notification_"-prepend to mark root-state msg forwarded to state machine label msg = msg._replace(action=msg.action.replace('sm_notification_', '', 1)) self.state_action_signal.emit(msg) # print("FINISH DONE1 S", self.state_machine.state_machine_id, msg) else: # print("DONE2 S", self.state_machine.state_machine_id, msg) pass
def action_signal_triggered(self, model, prop_name, info)
When the action was performed, we have to set the dirty flag, as the changes are unsaved
7.840683
7.596233
1.03218
path_elements = path.split('/') path_elements.pop(0) current_state_model = self.root_state for state_id in path_elements: if isinstance(current_state_model, ContainerStateModel): if state_id in current_state_model.states: current_state_model = current_state_model.states[state_id] else: raise ValueError("Invalid path: State with id '{}' not found in state with id {}".format( state_id, current_state_model.state.state_id)) elif isinstance(current_state_model, LibraryStateModel): if state_id == current_state_model.state_copy.state.state_id: current_state_model = current_state_model.state_copy else: raise ValueError("Invalid path: state id '{}' does not coincide with state id '{}' of state_copy " "of library state with id '{}'".format( state_id, current_state_model.state_copy.state.state_id, current_state_model.state.state_id)) else: raise ValueError("Invalid path: State with id '{}' has no children".format( current_state_model.state.state_id)) return current_state_model
def get_state_model_by_path(self, path)
Returns the `StateModel` for the given `path` Searches a `StateModel` in the state machine, who's path is given by `path`. :param str path: Path of the searched state :return: The state with that path :rtype: StateModel :raises: ValueError, if path is invalid/not existing with this state machine
2.020073
2.077799
0.972217
meta_data_path = path if path is not None else self.state_machine.file_system_path if meta_data_path: path_meta_data = os.path.join(meta_data_path, storage.FILE_NAME_META_DATA) try: tmp_meta = storage.load_data_file(path_meta_data) except ValueError: tmp_meta = {} else: tmp_meta = {} # JSON returns a dict, which must be converted to a Vividict tmp_meta = Vividict(tmp_meta) if recursively: root_state_path = None if not path else os.path.join(path, self.root_state.state.state_id) self.root_state.load_meta_data(root_state_path) if tmp_meta: # assign the meta data to the state self.meta = tmp_meta self.meta_signal.emit(MetaSignalMsg("load_meta_data", "all", True))
def load_meta_data(self, path=None, recursively=True)
Load meta data of state machine model from the file system The meta data of the state machine model is loaded from the file system and stored in the meta property of the model. Existing meta data is removed. Also the meta data of root state and children is loaded. :param str path: Optional path to the meta data file. If not given, the path will be derived from the state machine's path on the filesystem
3.990304
3.837542
1.039807
if copy_path: meta_file_json = os.path.join(copy_path, storage.FILE_NAME_META_DATA) else: meta_file_json = os.path.join(self.state_machine.file_system_path, storage.FILE_NAME_META_DATA) storage_utils.write_dict_to_json(self.meta, meta_file_json) self.root_state.store_meta_data(copy_path)
def store_meta_data(self, copy_path=None)
Save meta data of the state machine model to the file system This method generates a dictionary of the meta data of the state machine and stores it on the filesystem. :param str copy_path: Optional, if the path is specified, it will be used instead of the file system path
3.101176
2.917665
1.062896
kwargs.update(representation=representation, universal=universal, include_punct=include_punct, include_erased=include_erased) return Corpus(self.convert_tree(ptb_tree, **kwargs) for ptb_tree in ptb_trees)
def convert_trees(self, ptb_trees, representation='basic', universal=True, include_punct=True, include_erased=False, **kwargs)
Convert a list of Penn Treebank formatted strings (ptb_trees) into Stanford Dependencies. The dependencies are represented as a list of sentences (CoNLL.Corpus), where each sentence (CoNLL.Sentence) is itself a list of CoNLL.Token objects. Currently supported representations are 'basic', 'collapsed', 'CCprocessed', and 'collapsedTree' which behave the same as they in the CoreNLP command line tools. (note that in the online CoreNLP demo, 'collapsed' is called 'enhanced') Additional arguments: universal (if True, use universal dependencies if they're available), include_punct (if False, punctuation tokens will not be included), and include_erased (if False and your representation might erase tokens, those tokens will be omitted from the output). See documentation on your backend to see if it supports further options.
2.343706
3.31104
0.707846
import os import errno install_dir = os.path.expanduser(INSTALL_DIR) try: os.makedirs(install_dir) except OSError as ose: if ose.errno != errno.EEXIST: raise ose jar_filename = os.path.join(install_dir, jar_base_filename) return jar_filename
def setup_and_get_default_path(self, jar_base_filename)
Determine the user-specific install path for the Stanford Dependencies jar if the jar_url is not specified and ensure that it is writable (that is, make sure the directory exists). Returns the full path for where the jar file should be installed.
2.225731
2.093494
1.063166
if os.path.exists(self.jar_filename): return jar_url = self.get_jar_url(version) if verbose: print("Downloading %r -> %r" % (jar_url, self.jar_filename)) opener = ErrorAwareURLOpener() opener.retrieve(jar_url, filename=self.jar_filename)
def download_if_missing(self, version=None, verbose=True)
Download the jar for version into the jar_filename specified in the constructor. Will not overwrite jar_filename if it already exists. version defaults to DEFAULT_CORENLP_VERSION (ideally the latest but we can't guarantee that since PyStanfordDependencies is distributed separately).
2.976841
2.659747
1.11922
if representation not in REPRESENTATIONS: repr_desc = ', '.join(map(repr, REPRESENTATIONS)) raise ValueError("Unknown representation: %r (should be one " "of %s)" % (representation, repr_desc))
def _raise_on_bad_representation(representation)
Ensure that representation is a known Stanford Dependency representation (raises a ValueError if the representation is invalid).
3.349579
3.134801
1.068514
if jar_filename is None: return if not isinstance(jar_filename, string_type): raise TypeError("jar_filename is not a string: %r" % jar_filename) if not os.path.exists(jar_filename): raise ValueError("jar_filename does not exist: %r" % jar_filename)
def _raise_on_bad_jar_filename(jar_filename)
Ensure that jar_filename is a valid path to a jar file.
2.100768
1.960079
1.071777
if version is None: version = DEFAULT_CORENLP_VERSION try: string_type = basestring except NameError: string_type = str if not isinstance(version, string_type): raise TypeError("Version must be a string or None (got %r)." % version) jar_filename = 'stanford-corenlp-%s.jar' % version return 'http://search.maven.org/remotecontent?filepath=' + \ 'edu/stanford/nlp/stanford-corenlp/%s/%s' % (version, jar_filename)
def get_jar_url(version=None)
Get the URL to a Stanford CoreNLP jar file with a specific version. These jars come from Maven since the Maven version is smaller than the full CoreNLP distributions. Defaults to DEFAULT_CORENLP_VERSION.
2.985238
2.505481
1.191483
StanfordDependencies._raise_on_bad_jar_filename(jar_filename) extra_args.update(jar_filename=jar_filename, download_if_missing=download_if_missing, version=version) if backend == 'jpype': try: from .JPypeBackend import JPypeBackend return JPypeBackend(**extra_args) except ImportError: warnings.warn('Error importing JPypeBackend, ' 'falling back to SubprocessBackend.') backend = 'subprocess' except RuntimeError as r: warnings.warn('RuntimeError with JPypeBackend (%s), ' 'falling back to SubprocessBackend.' % r[0]) backend = 'subprocess' except TypeError as t: warnings.warn('TypeError with JPypeBackend (%s), ' 'falling back to SubprocessBackend.' % t[0]) backend = 'subprocess' if backend == 'subprocess': from .SubprocessBackend import SubprocessBackend return SubprocessBackend(**extra_args) raise ValueError("Unknown backend: %r (known backends: " "'subprocess' and 'jpype')" % backend)
def get_instance(jar_filename=None, version=None, download_if_missing=True, backend='jpype', **extra_args)
This is the typical mechanism of constructing a StanfordDependencies instance. The backend parameter determines which backend to load (currently can be 'subprocess' or 'jpype'). To determine which jar file is used, you must specify jar_filename, download_if_missing=True, and/or version. - If jar_filename is specified, that jar is used and the other two flags are ignored. - Otherwise, if download_if_missing, we will download a jar file from the Maven repository. This jar file will be the latest known version of CoreNLP unless the version flag is specified (e.g., version='3.4.1') in which case we'll attempt to download and use that version. Once downloaded, it will be stored in your home directory and not downloaded again. - If jar_filename and download_if_missing are not specified, version must be set to a version previously downloaded in the above step. All remaining keyword arguments are passes on to the StanfordDependencies backend constructor. If the above options are confusing, don't panic! You can leave them all blank -- get_instance() is designed to provide the best and latest available conversion settings by default.
2.314744
2.245591
1.030795
self._raise_on_bad_input(ptb_tree) self._raise_on_bad_representation(representation) tree = self.treeReader(ptb_tree) if tree is None: raise ValueError("Invalid Penn Treebank tree: %r" % ptb_tree) deps = self._get_deps(tree, include_punct, representation, universal=universal) tagged_yield = self._listify(tree.taggedYield()) indices_to_words = dict(enumerate(tagged_yield, 1)) sentence = Sentence() covered_indices = set() def add_token(index, form, head, deprel, extra): tag = indices_to_words[index].tag() if add_lemmas: lemma = self.stem(form, tag) else: lemma = None token = Token(index=index, form=form, lemma=lemma, cpos=tag, pos=tag, feats=None, head=head, deprel=deprel, phead=None, pdeprel=None, extra=extra) sentence.append(token) # add token for each dependency for dep in deps: index = dep.dep().index() head = dep.gov().index() deprel = dep.reln().toString() form = indices_to_words[index].value() dep_is_copy = dep.dep().copyCount() gov_is_copy = dep.gov().copyCount() if dep_is_copy or gov_is_copy: extra = {} if dep_is_copy: extra['dep_is_copy'] = dep_is_copy if gov_is_copy: extra['gov_is_copy'] = gov_is_copy else: extra = None add_token(index, form, head, deprel, extra) covered_indices.add(index) if include_erased: # see if there are any tokens that were erased # and add them as well all_indices = set(indices_to_words.keys()) for index in all_indices - covered_indices: form = indices_to_words[index].value() if not include_punct and not self.puncFilter(form): continue add_token(index, form, head=0, deprel='erased', extra=None) # erased generally disrupt the ordering of the sentence sentence.sort() if representation == 'basic': sentence.renumber() return sentence
def convert_tree(self, ptb_tree, representation='basic', include_punct=True, include_erased=False, add_lemmas=False, universal=True)
Arguments are as in StanfordDependencies.convert_trees but with the addition of add_lemmas. If add_lemmas=True, we will run the Stanford CoreNLP lemmatizer and fill in the lemma field.
2.717002
2.712055
1.001824
key = (form, tag) if key not in self.lemma_cache: lemma = self.stemmer(*key).word() self.lemma_cache[key] = lemma return self.lemma_cache[key]
def stem(self, form, tag)
Returns the stem of word with specific form and part-of-speech tag according to the Stanford lemmatizer. Lemmas are cached.
3.558271
3.145335
1.131285
if universal: converter = self.universal_converter if self.universal_converter == self.converter: import warnings warnings.warn("This jar doesn't support universal " "dependencies, falling back to Stanford " "Dependencies. To suppress this message, " "call with universal=False") else: converter = self.converter if include_punct: egs = converter(tree, self.acceptFilter) else: egs = converter(tree) if representation == 'basic': deps = egs.typedDependencies() elif representation == 'collapsed': deps = egs.typedDependenciesCollapsed(True) elif representation == 'CCprocessed': deps = egs.typedDependenciesCCprocessed(True) else: # _raise_on_bad_representation should ensure that this # assertion doesn't fail assert representation == 'collapsedTree' deps = egs.typedDependenciesCollapsedTree() return self._listify(deps)
def _get_deps(self, tree, include_punct, representation, universal)
Get a list of dependencies from a Stanford Tree for a specific Stanford Dependencies representation.
4.884078
4.555131
1.072215
new_list = [] for index in range(len(collection)): new_list.append(collection[index]) return new_list
def _listify(collection)
This is a workaround where Collections are no longer iterable when using JPype.
3.062541
2.556442
1.19797
def get(field): value = getattr(self, field) if value is None: value = '_' elif field == 'feats': value = '|'.join(value) return str(value) return '\t'.join([get(field) for field in FIELD_NAMES])
def as_conll(self)
Represent this Token as a line as a string in CoNLL-X format.
3.79736
3.306845
1.148333
fields = text.split('\t') fields[0] = int(fields[0]) # index fields[6] = int(fields[6]) # head index if fields[5] != '_': # feats fields[5] = tuple(fields[5].split('|')) fields = [value if value != '_' else None for value in fields] fields.append(None) # for extra return this_class(**dict(zip(FIELD_NAMES_PLUS, fields)))
def from_conll(this_class, text)
Construct a Token from a line in CoNLL-X format.
3.560025
3.377145
1.054152
mapping = {0: 0} # old index -> real index needs_renumbering = False for real_index, token in enumerate(self, 1): mapping[token.index] = real_index if token.index != real_index: needs_renumbering = True if needs_renumbering: # update all indices self[:] = [token._replace(index=mapping[token.index], head=mapping[token.head]) for token in self]
def renumber(self)
Destructively renumber the indices based on the actual tokens (e.g., if there are gaps between token indices, this will remove them). Old Token objects will still exist, so you'll need to update your references.
3.786036
3.319134
1.14067
import asciitree from collections import defaultdict children = defaultdict(list) # since erased nodes may be missing, multiple tokens may have same # index (CCprocessed), etc. token_to_index = {} roots = [] for token in self: children[token.head].append(token) token_to_index[token] = token.index if token.head == 0: roots.append(token) assert roots, "Couldn't find root Token(s)" if len(roots) > 1: # multiple roots so we make a fake one to be their parent root = Token(0, 'ROOT', 'ROOT-LEMMA', 'ROOT-CPOS', 'ROOT-POS', None, None, 'ROOT-DEPREL', None, None, None) token_to_index[root] = 0 children[0] = roots else: root = roots[0] def child_func(token): index = token_to_index[token] return children[index] if not str_func: def str_func(token): return ' %s [%s]' % (token.form, token.deprel) return asciitree.draw_tree(root, child_func, str_func)
def as_asciitree(self, str_func=None)
Represent this Sentence as an ASCII tree string. Requires the asciitree package. A default token stringifier is provided but for custom formatting, specify a str_func which should take a single Token and return a string.
3.904516
3.811379
1.024437
digraph_kwargs = digraph_kwargs or {} id_prefix = id_prefix or '' node_formatter = node_formatter or (lambda token: {}) edge_formatter = edge_formatter or (lambda token: {}) import graphviz graph = graphviz.Digraph(**digraph_kwargs) # add root node graph.node(id_prefix + '0', 'root', **node_formatter(None)) # add remaining nodes and edges already_added = set() for token in self: token_id = id_prefix + str(token.index) parent_id = id_prefix + str(token.head) if token_id not in already_added: graph.node(token_id, token.form, **node_formatter(token)) graph.edge(parent_id, token_id, label=token.deprel, **edge_formatter(token)) already_added.add(token_id) return graph
def as_dotgraph(self, digraph_kwargs=None, id_prefix=None, node_formatter=None, edge_formatter=None)
Returns this sentence as a graphviz.Digraph. Requires the graphviz Python package and graphviz itself. There are several ways to customize. Graph level keyword arguments can be passed as a dictionary to digraph_kwargs. If you're viewing multiple Sentences in the same graph, you'll need to set a unique prefix string in id_prefix. Lastly, you can change the formatting of nodes and edges with node_formatter and edge_formatter. Both take a single Token as an argument (for edge_formatter, the Token represents the child token) and return a dictionary of keyword arguments which are passed to the node and edge creation functions in graphviz. The node_formatter will also be called with None as its token when adding the root.
2.079849
1.893537
1.098394
stream = iter(stream) sentence = this_class() for line in stream: line = line.strip() if line: sentence.append(Token.from_conll(line)) elif sentence: return sentence return sentence
def from_conll(this_class, stream)
Construct a Sentence. stream is an iterable over strings where each string is a line in CoNLL-X format. If there are multiple sentences in this stream, we only return the first one.
2.936434
2.484718
1.181798
stream = iter(stream) sentence = this_class() covered_indices = set() tags_and_words = ptb_tags_and_words_re.findall(tree) # perform some basic cleanups tags_and_words = [(tag, word.replace(r'\/', '/')) for (tag, word) in tags_and_words if tag != '-NONE-'] for line in stream: if not line.strip(): if sentence: # empty line means the sentence is over break else: continue line = line.replace(r'\/', '/') matches = deps_re.findall(line) assert len(matches) == 1 (deprel, gov_form, head, gov_is_copy, form, index, dep_is_copy) = matches[0] index = int(index) tag, word = tags_and_words[index - 1] assert form == word covered_indices.add(index) if not include_punct and deprel == 'punct': continue if gov_is_copy or dep_is_copy: extra = {} if gov_is_copy: extra['gov_is_copy'] = len(gov_is_copy) if dep_is_copy: extra['dep_is_copy'] = len(dep_is_copy) else: extra = None token = Token(index, form, None, tag, tag, None, int(head), deprel, None, None, extra) sentence.append(token) if include_erased: # look through words in the tree to see if any of them # were erased for index, (tag, word) in enumerate(tags_and_words, 1): if index in covered_indices: continue token = Token(index, word, None, tag, tag, None, 0, 'erased', None, None, None) sentence.append(token) sentence.sort() return sentence
def from_stanford_dependencies(this_class, stream, tree, include_erased=False, include_punct=True)
Construct a Sentence. stream is an iterable over strings where each string is a line representing a Stanford Dependency as in the output of the command line Stanford Dependency tool: deprel(gov-index, dep-depindex) The corresponding Penn Treebank formatted tree must be provided as well.
2.871253
2.900183
0.990025
stream = iter(stream) corpus = this_class() while 1: # read until we get an empty sentence sentence = Sentence.from_conll(stream) if sentence: corpus.append(sentence) else: break return corpus
def from_conll(this_class, stream)
Construct a Corpus. stream is an iterable over strings where each string is a line in CoNLL-X format.
3.867441
3.389801
1.140905
stream = iter(stream) corpus = this_class() for tree in trees: sentence = Sentence.from_stanford_dependencies(stream, tree, include_erased, include_punct) corpus.append(sentence) return corpus
def from_stanford_dependencies(this_class, stream, trees, include_erased=False, include_punct=True)
Construct a Corpus. stream is an iterable over strings where each string is a line representing a Stanford Dependency as in the output of the command line Stanford Dependency tool: deprel(gov-index, dep-depindex) Sentences are separated by blank lines. A corresponding list of Penn Treebank formatted trees must be provided as well.
2.873633
3.29577
0.871916
self._raise_on_bad_representation(representation) input_file = tempfile.NamedTemporaryFile(delete=False) try: for ptb_tree in ptb_trees: self._raise_on_bad_input(ptb_tree) tree_with_line_break = ptb_tree + "\n" input_file.write(tree_with_line_break.encode("utf-8")) input_file.flush() input_file.close() command = [self.java_command, '-ea', '-cp', self.jar_filename, JAVA_CLASS_NAME, '-' + representation, '-treeFile', input_file.name] # if we're including erased, we want to include punctuation # since otherwise we won't know what SD considers punctuation if include_punct or include_erased: command.append('-keepPunct') if not universal: command.append('-originalDependencies') if debug: print('Command:', ' '.join(command)) sd_process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) return_code = sd_process.wait() stderr = sd_process.stderr.read() stdout = sd_process.stdout.read() if debug: print("stdout: {%s}" % stdout) print("stderr: {%s}" % stderr) print('Exit code:', return_code) self._raise_on_bad_exit_or_output(return_code, stderr) finally: os.remove(input_file.name) try: sentences = Corpus.from_stanford_dependencies(stdout.splitlines(), ptb_trees, include_erased, include_punct) for sentence, ptb_tree in zip(sentences, ptb_trees): if len(sentence) == 0: raise ValueError("Invalid PTB tree: %r" % ptb_tree) except: print("Error during conversion") if not debug: print("stdout: {%s}" % stdout) print("stderr: {%s}" % stderr) raise assert len(sentences) == len(ptb_trees), \ "Only got %d sentences from Stanford Dependencies when " \ "given %d trees." % (len(sentences), len(ptb_trees)) return sentences
def convert_trees(self, ptb_trees, representation='basic', include_punct=True, include_erased=False, universal=True, debug=False)
Convert a list of Penn Treebank formatted trees (ptb_trees) into Stanford Dependencies. The dependencies are represented as a list of sentences, where each sentence is itself a list of Token objects. Currently supported representations are 'basic', 'collapsed', 'CCprocessed', and 'collapsedTree' which behave the same as they in the CoreNLP command line tools. (note that in the online CoreNLP demo, 'collapsed' is called 'enhanced') Setting debug=True will cause debugging information (including the java command run to be printed.
2.795694
2.777072
1.006706
if endpoint_name is None: return None factory = relation_factory(endpoint_name) if factory: return factory.from_name(endpoint_name)
def endpoint_from_name(endpoint_name)
The object used for interacting with the named relations, or None.
4.345373
3.134745
1.386196
relation_name = None value = _get_flag_value(flag) if isinstance(value, dict) and 'relation' in value: # old-style RelationBase relation_name = value['relation'] elif flag.startswith('endpoint.'): # new-style Endpoint relation_name = flag.split('.')[1] elif '.' in flag: # might be an unprefixed new-style Endpoint relation_name = flag.split('.')[0] if relation_name not in hookenv.relation_types(): return None if relation_name: factory = relation_factory(relation_name) if factory: return factory.from_flag(flag) return None
def endpoint_from_flag(flag)
The object used for interacting with relations tied to a flag, or None.
3.566146
3.471278
1.027329
role, interface = hookenv.relation_to_role_and_interface(relation_name) if not (role and interface): hookenv.log('Unable to determine role and interface for relation ' '{}'.format(relation_name), hookenv.ERROR) return None return _find_relation_factory(_relation_module(role, interface))
def relation_factory(relation_name)
Get the RelationFactory for the given relation name. Looks for a RelationFactory in the first file matching: ``$CHARM_DIR/hooks/relations/{interface}/{provides,requires,peer}.py``
3.619728
3.878629
0.933249
_append_path(hookenv.charm_dir()) _append_path(os.path.join(hookenv.charm_dir(), 'hooks')) base_module = 'relations.{}.{}'.format(interface, role) for module in ('reactive.{}'.format(base_module), base_module): if module in sys.modules: break try: importlib.import_module(module) break except ImportError: continue else: hookenv.log('Unable to find implementation for relation: ' '{} of {}'.format(role, interface), hookenv.ERROR) return None return sys.modules[module]
def _relation_module(role, interface)
Return module for relation based on its role and interface, or None. Prefers new location (reactive/relations) over old (hooks/relations).
2.97548
2.921687
1.018412
if not module: return None # All the RelationFactory subclasses candidates = [o for o in (getattr(module, attr) for attr in dir(module)) if (o is not RelationFactory and o is not RelationBase and isclass(o) and issubclass(o, RelationFactory))] # Filter out any factories that are superclasses of another factory # (none of the other factories subclass it). This usually makes # the explict check for RelationBase and RelationFactory unnecessary. candidates = [c1 for c1 in candidates if not any(issubclass(c2, c1) for c2 in candidates if c1 is not c2)] if not candidates: hookenv.log('No RelationFactory found in {}'.format(module.__name__), hookenv.WARNING) return None if len(candidates) > 1: raise RuntimeError('Too many RelationFactory found in {}' ''.format(module.__name__)) return candidates[0]
def _find_relation_factory(module)
Attempt to find a RelationFactory subclass in the module. Note: RelationFactory and RelationBase are ignored so they may be imported to be used as base classes without fear.
3.762451
3.510966
1.071629
if 'local-data' in key: continue if 'namespace' in data: continue relation_name = data.pop('relation_name') if data['scope'] == scopes.GLOBAL: data['namespace'] = relation_name unitdata.kv().set(key, data) else: # split the conv based on the relation ID new_keys = [] for rel_id in hookenv.relation_ids(relation_name): new_key = Conversation._key(rel_id, data['scope']) new_units = set(hookenv.related_units(rel_id)) & set(data['units']) if new_units: unitdata.kv().set(new_key, { 'namespace': rel_id, 'scope': data['scope'], 'units': sorted(new_units), }) new_keys.append(new_key) unitdata.kv().unset(key) # update the states pointing to the old conv key to point to the # (potentially multiple) new key(s) for flag in get_flags(): value = _get_flag_value(flag) if not value: continue if key not in value['conversations']: continue value['conversations'].remove(key) value['conversations'].extend(new_keys) set_flag(flag, value)
def _migrate_conversations(): # noqa for key, data in unitdata.kv().getrange('reactive.conversations.').items()
Due to issue #28 (https://github.com/juju-solutions/charms.reactive/issues/28), conversations needed to be updated to be namespaced per relation ID for SERVICE and UNIT scope. To ensure backwards compatibility, this updates all convs in the old format to the new. TODO: Remove in 2.0.0
3.545489
3.228775
1.098091
if relation_name: relation = relation_from_name(relation_name) if relation is None: raise ValueError('Relation not found: %s' % relation_name) elif flag or state: relation = relation_from_flag(flag or state) if relation is None: raise ValueError('Relation not found: %s' % (flag or state)) else: raise ValueError('Must specify either relation_name or flag') result = getattr(relation, method)(*args) if isinstance(relation, RelationBase) and method == 'conversations': # special case for conversations to make them work from CLI result = [c.scope for c in result] return result
def relation_call(method, relation_name=None, flag=None, state=None, *args)
Invoke a method on the class implementing a relation via the CLI
2.919402
2.904553
1.005112
value = _get_flag_value(flag) if value is None: return None relation_name = value['relation'] conversations = Conversation.load(value['conversations']) return cls.from_name(relation_name, conversations)
def from_flag(cls, flag)
Find relation implementation in the current charm, based on the name of an active flag. You should not use this method directly. Use :func:`endpoint_from_flag` instead.
5.755527
5.652489
1.018229
if relation_name is None: return None relation_class = cls._cache.get(relation_name) if relation_class: return relation_class(relation_name, conversations) role, interface = hookenv.relation_to_role_and_interface(relation_name) if role and interface: relation_class = cls._find_impl(role, interface) if relation_class: cls._cache[relation_name] = relation_class return relation_class(relation_name, conversations) return None
def from_name(cls, relation_name, conversations=None)
Find relation implementation in the current charm, based on the name of the relation. :return: A Relation instance, or None
2.465309
2.344235
1.051648
module = _relation_module(role, interface) if not module: return None return cls._find_subclass(module)
def _find_impl(cls, role, interface)
Find relation implementation based on its role and interface.
7.927527
5.375884
1.474646
for attr in dir(module): candidate = getattr(module, attr) if (isclass(candidate) and issubclass(candidate, cls) and candidate is not RelationBase): return candidate return None
def _find_subclass(cls, module)
Attempt to find subclass of :class:`RelationBase` in the given module. Note: This means strictly subclasses and not :class:`RelationBase` itself. This is to prevent picking up :class:`RelationBase` being imported to be used as the base class.
3.760832
2.814907
1.336041
if scope is None: if self.scope is scopes.UNIT: scope = hookenv.remote_unit() elif self.scope is scopes.SERVICE: scope = hookenv.remote_service_name() else: scope = self.scope if scope is None: raise ValueError('Unable to determine default scope: no current hook or global scope') for conversation in self._conversations: if conversation.scope == scope: return conversation else: raise ValueError("Conversation with scope '%s' not found" % scope)
def conversation(self, scope=None)
Get a single conversation, by scope, that this relation is currently handling. If the scope is not given, the correct scope is inferred by the current hook execution context. If there is no current hook execution context, it is assume that there is only a single global conversation scope for this relation. If this relation's scope is not global and there is no current hook execution context, then an error is raised.
3.621497
2.969781
1.219449
self.conversation(scope).toggle_state(state, active)
def toggle_state(self, state, active=TOGGLE, scope=None)
Toggle the state for the :class:`Conversation` with the given scope. In Python, this is equivalent to:: relation.conversation(scope).toggle_state(state, active) See :meth:`conversation` and :meth:`Conversation.toggle_state`.
13.532097
5.304676
2.550975
self.conversation(scope).set_remote(key, value, data, **kwdata)
def set_remote(self, key=None, value=None, data=None, scope=None, **kwdata)
Set data for the remote end(s) of the :class:`Conversation` with the given scope. In Python, this is equivalent to:: relation.conversation(scope).set_remote(key, value, data, scope, **kwdata) See :meth:`conversation` and :meth:`Conversation.set_remote`.
6.136528
3.759795
1.632144
return self.conversation(scope).get_remote(key, default)
def get_remote(self, key, default=None, scope=None)
Get data from the remote end(s) of the :class:`Conversation` with the given scope. In Python, this is equivalent to:: relation.conversation(scope).get_remote(key, default) See :meth:`conversation` and :meth:`Conversation.get_remote`.
11.706836
5.720131
2.046603
self.conversation(scope).set_local(key, value, data, **kwdata)
def set_local(self, key=None, value=None, data=None, scope=None, **kwdata)
Locally store some data, namespaced by the current or given :class:`Conversation` scope. In Python, this is equivalent to:: relation.conversation(scope).set_local(data, scope, **kwdata) See :meth:`conversation` and :meth:`Conversation.set_local`.
6.070638
4.061594
1.494644
return self.conversation(scope).get_local(key, default)
def get_local(self, key, default=None, scope=None)
Retrieve some data previously set via :meth:`set_local`. In Python, this is equivalent to:: relation.conversation(scope).get_local(key, default) See :meth:`conversation` and :meth:`Conversation.get_local`.
11.084932
5.219571
2.123725
if self.scope == scopes.GLOBAL: # the namespace is the relation name and this conv speaks for all # connected instances of that relation return hookenv.relation_ids(self.namespace) else: # the namespace is the relation ID return [self.namespace]
def relation_ids(self)
The set of IDs of the specific relation instances that this conversation is communicating with.
11.226216
9.503194
1.18131
relation_name = hookenv.relation_type() relation_id = hookenv.relation_id() unit = hookenv.remote_unit() service = hookenv.remote_service_name() if scope is scopes.UNIT: scope = unit namespace = relation_id elif scope is scopes.SERVICE: scope = service namespace = relation_id else: namespace = relation_name key = cls._key(namespace, scope) data = unitdata.kv().get(key, {'namespace': namespace, 'scope': scope, 'units': []}) conversation = cls.deserialize(data) conversation.units.add(unit) unitdata.kv().set(key, cls.serialize(conversation)) return conversation
def join(cls, scope)
Get or create a conversation for the given scope and active hook context. The current remote unit for the active hook context will be added to the conversation. Note: This uses :mod:`charmhelpers.core.unitdata` and requires that :meth:`~charmhelpers.core.unitdata.Storage.flush` be called.
3.432578
3.166017
1.084194
unit = hookenv.remote_unit() self.units.remove(unit) if self.units: unitdata.kv().set(self.key, self.serialize(self)) else: unitdata.kv().unset(self.key)
def depart(self)
Remove the current remote unit, for the active hook context, from this conversation. This should be called from a `-departed` hook.
5.853467
4.710832
1.242555
return { 'namespace': conversation.namespace, 'units': sorted(conversation.units), 'scope': conversation.scope, }
def serialize(cls, conversation)
Serialize a conversation instance for storage.
7.271805
6.211757
1.170652
conversations = [] for key in keys: conversation = unitdata.kv().get(key) if conversation: conversations.append(cls.deserialize(conversation)) return conversations
def load(cls, keys)
Load a set of conversations by their keys.
4.639656
3.887758
1.193402
state = state.format(relation_name=self.relation_name) value = _get_flag_value(state, { 'relation': self.relation_name, 'conversations': [], }) if self.key not in value['conversations']: value['conversations'].append(self.key) set_flag(state, value)
def set_state(self, state)
Activate and put this conversation into the given state. The relation name will be interpolated in the state name, and it is recommended that it be included to avoid conflicts with states from other relations. For example:: conversation.set_state('{relation_name}.state') If called from a converation handling the relation "foo", this will activate the "foo.state" state, and will add this conversation to that state. Note: This uses :mod:`charmhelpers.core.unitdata` and requires that :meth:`~charmhelpers.core.unitdata.Storage.flush` be called.
5.099207
3.859271
1.321288
state = state.format(relation_name=self.relation_name) value = _get_flag_value(state) if not value: return if self.key in value['conversations']: value['conversations'].remove(self.key) if value['conversations']: set_flag(state, value) else: clear_flag(state)
def remove_state(self, state)
Remove this conversation from the given state, and potentially deactivate the state if no more conversations are in it. The relation name will be interpolated in the state name, and it is recommended that it be included to avoid conflicts with states from other relations. For example:: conversation.remove_state('{relation_name}.state') If called from a converation handling the relation "foo", this will remove the conversation from the "foo.state" state, and, if no more conversations are in this the state, will deactivate it.
4.262163
3.427171
1.243639
state = state.format(relation_name=self.relation_name) value = _get_flag_value(state) if not value: return False return self.key in value['conversations']
def is_state(self, state)
Test if this conversation is in the given state.
8.567874
6.900054
1.241711
if active is TOGGLE: active = not self.is_state(state) if active: self.set_state(state) else: self.remove_state(state)
def toggle_state(self, state, active=TOGGLE)
Toggle the given state for this conversation. The state will be set ``active`` is ``True``, otherwise the state will be removed. If ``active`` is not given, it will default to the inverse of the current state (i.e., ``False`` if the state is currently set, ``True`` if it is not; essentially toggling the state). For example:: conv.toggle_state('{relation_name}.foo', value=='foo') This will set the state if ``value`` is equal to ``foo``.
2.583246
5.163998
0.500242
if data is None: data = {} if key is not None: data[key] = value data.update(kwdata) if not data: return for relation_id in self.relation_ids: hookenv.relation_set(relation_id, data)
def set_remote(self, key=None, value=None, data=None, **kwdata)
Set data for the remote end(s) of this conversation. Data can be passed in either as a single dict, or as key-word args. Note that, in Juju, setting relation data is inherently service scoped. That is, if the conversation only includes a single unit, the data will still be set for that unit's entire service. However, if this conversation's scope encompasses multiple services, the data will be set for all of those services. :param str key: The name of a field to set. :param value: A value to set. This value must be json serializable. :param dict data: A mapping of keys to values. :param **kwdata: A mapping of keys to values, as keyword arguments.
3.120999
3.298361
0.946227
cur_rid = hookenv.relation_id() departing = hookenv.hook_name().endswith('-relation-departed') for relation_id in self.relation_ids: units = hookenv.related_units(relation_id) if departing and cur_rid == relation_id: # Work around the fact that Juju 2.0 doesn't include the # departing unit in relation-list during the -departed hook, # by adding it back in ourselves. units.append(hookenv.remote_unit()) for unit in units: if unit not in self.units: continue value = hookenv.relation_get(key, unit, relation_id) if value: return value return default
def get_remote(self, key, default=None)
Get a value from the remote end(s) of this conversation. Note that if a conversation's scope encompasses multiple units, then those units are expected to agree on their data, whether that is through relying on a single leader to set the data or by all units eventually converging to identical data. Thus, this method returns the first value that it finds set by any of its units.
4.226774
4.213329
1.003191
if data is None: data = {} if key is not None: data[key] = value data.update(kwdata) if not data: return unitdata.kv().update(data, prefix='%s.%s.' % (self.key, 'local-data'))
def set_local(self, key=None, value=None, data=None, **kwdata)
Locally store some data associated with this conversation. Data can be passed in either as a single dict, or as key-word args. For example, if you need to store the previous value of a remote field to determine if it has changed, you can use the following:: prev = conversation.get_local('field') curr = conversation.get_remote('field') if prev != curr: handle_change(prev, curr) conversation.set_local('field', curr) Note: This uses :mod:`charmhelpers.core.unitdata` and requires that :meth:`~charmhelpers.core.unitdata.Storage.flush` be called. :param str key: The name of a field to set. :param value: A value to set. This value must be json serializable. :param dict data: A mapping of keys to values. :param **kwdata: A mapping of keys to values, as keyword arguments.
4.152603
4.566528
0.909357
key = '%s.%s.%s' % (self.key, 'local-data', key) return unitdata.kv().get(key, default)
def get_local(self, key, default=None)
Retrieve some data previously set via :meth:`set_local` for this conversation.
6.180571
5.982132
1.033172
old_flags = get_flags() unitdata.kv().update({flag: value}, prefix='reactive.states.') if flag not in old_flags: tracer().set_flag(flag) FlagWatch.change(flag) trigger = _get_trigger(flag, None) for flag_name in trigger['set_flag']: set_flag(flag_name) for flag_name in trigger['clear_flag']: clear_flag(flag_name)
def set_flag(flag, value=None)
set_flag(flag) Set the given flag as active. :param str flag: Name of flag to set. .. note:: **Changes to flags are reset when a handler crashes.** Changes to flags happen immediately, but they are only persisted at the end of a complete and successful run of the reactive framework. All unpersisted changes are discarded when a hook crashes.
6.728759
6.786707
0.991461
old_flags = get_flags() unitdata.kv().unset('reactive.states.%s' % flag) unitdata.kv().set('reactive.dispatch.removed_state', True) if flag in old_flags: tracer().clear_flag(flag) FlagWatch.change(flag) trigger = _get_trigger(None, flag) for flag_name in trigger['set_flag']: set_flag(flag_name) for flag_name in trigger['clear_flag']: clear_flag(flag_name)
def clear_flag(flag)
Clear / deactivate a flag. :param str flag: Name of flag to set. .. note:: **Changes to flags are reset when a handler crashes.** Changes to flags happen immediately, but they are only persisted at the end of a complete and successful run of the reactive framework. All unpersisted changes are discarded when a hook crashes.
6.320064
6.580816
0.960377
if not any((when, when_not)): raise ValueError('Must provide one of when or when_not') if all((when, when_not)): raise ValueError('Only one of when or when_not can be provided') if not any((set_flag, clear_flag)): raise ValueError('Must provide at least one of set_flag or clear_flag') trigger = _get_trigger(when, when_not) if set_flag and set_flag not in trigger['set_flag']: trigger['set_flag'].append(set_flag) if clear_flag and clear_flag not in trigger['clear_flag']: trigger['clear_flag'].append(clear_flag) _save_trigger(when, when_not, trigger)
def register_trigger(when=None, when_not=None, set_flag=None, clear_flag=None)
Register a trigger to set or clear a flag when a given flag is set. Note: Flag triggers are handled at the same time that the given flag is set. :param str when: Flag to trigger on when it is set. :param str when_not: Flag to trigger on when it is cleared. :param str set_flag: If given, this flag will be set when `when` is set. :param str clear_flag: If given, this flag will be cleared when `when` is set. Note: Exactly one of either `when` or `when_not`, and at least one of `set_flag` or `clear_flag` must be provided.
1.837488
1.874506
0.980252
flags = unitdata.kv().getrange('reactive.states.', strip=True) or {} return sorted(flags.keys())
def get_flags()
Return a list of all flags which are set.
24.088924
18.59412
1.295513
FlagWatch.reset() def _test(to_test): return list(filter(lambda h: h.test(), to_test)) def _invoke(to_invoke): while to_invoke: unitdata.kv().set('reactive.dispatch.removed_state', False) for handler in list(to_invoke): to_invoke.remove(handler) hookenv.log('Invoking reactive handler: %s' % handler.id(), level=hookenv.INFO) handler.invoke() if unitdata.kv().get('reactive.dispatch.removed_state'): # re-test remaining handlers to_invoke = _test(to_invoke) break FlagWatch.commit() tracer().start_dispatch() # When in restricted context, only run hooks for that context. if restricted: unitdata.kv().set('reactive.dispatch.phase', 'restricted') hook_handlers = _test(Handler.get_handlers()) tracer().start_dispatch_phase('restricted', hook_handlers) _invoke(hook_handlers) return unitdata.kv().set('reactive.dispatch.phase', 'hooks') hook_handlers = _test(Handler.get_handlers()) tracer().start_dispatch_phase('hooks', hook_handlers) _invoke(hook_handlers) unitdata.kv().set('reactive.dispatch.phase', 'other') for i in range(100): FlagWatch.iteration(i) other_handlers = _test(Handler.get_handlers()) if i == 0: tracer().start_dispatch_phase('other', other_handlers) tracer().start_dispatch_iteration(i, other_handlers) if not other_handlers: break _invoke(other_handlers) FlagWatch.reset()
def dispatch(restricted=False)
Dispatch registered handlers. When dispatching in restricted mode, only matching hook handlers are executed. Handlers are dispatched according to the following rules: * Handlers are repeatedly tested and invoked in iterations, until the system settles into quiescence (that is, until no new handlers match to be invoked). * In the first iteration, :func:`@hook <charms.reactive.decorators.hook>` and :func:`@action <charms.reactive.decorators.action>` handlers will be invoked, if they match. * In subsequent iterations, other handlers are invoked, if they match. * Added flags will not trigger new handlers until the next iteration, to ensure that chained flags are invoked in a predictable order. * Removed flags will cause the current set of matched handlers to be re-tested, to ensure that no handler is invoked after its matching flag has been removed. * Other than the guarantees mentioned above, the order in which matching handlers are invoked is undefined. * Flags are preserved between hook and action invocations, and all matching handlers are re-invoked for every hook and action. There are :doc:`decorators <charms.reactive.decorators>` and :doc:`helpers <charms.reactive.helpers>` to prevent unnecessary reinvocations, such as :func:`~charms.reactive.decorators.only_once`.
3.339227
3.35185
0.996234
# Add $CHARM_DIR and $CHARM_DIR/hooks to sys.path so # 'import reactive.leadership', 'import relations.pgsql' works # as expected, as well as relative imports like 'import ..leadership' # or 'from . import leadership'. Without this, it becomes difficult # for layers to access APIs provided by other layers. This addition # needs to remain in effect, in case discovered modules are doing # late imports. _append_path(hookenv.charm_dir()) _append_path(os.path.join(hookenv.charm_dir(), 'hooks')) for search_dir in ('reactive', 'hooks/reactive', 'hooks/relations'): search_path = os.path.join(hookenv.charm_dir(), search_dir) for dirpath, dirnames, filenames in os.walk(search_path): for filename in filenames: filepath = os.path.join(dirpath, filename) _register_handlers_from_file(search_path, filepath)
def discover()
Discover handlers based on convention. Handlers will be loaded from the following directories and their subdirectories: * ``$CHARM_DIR/reactive/`` * ``$CHARM_DIR/hooks/reactive/`` * ``$CHARM_DIR/hooks/relations/`` They can be Python files, in which case they will be imported and decorated functions registered. Or they can be executables, in which case they must adhere to the :class:`ExternalHandler` protocol.
5.69672
5.011592
1.136709
action_id = _action_id(action, suffix) if action_id not in cls._HANDLERS: if LOG_OPTS['register']: hookenv.log('Registering reactive handler for %s' % _short_action_id(action, suffix), level=hookenv.DEBUG) cls._HANDLERS[action_id] = cls(action, suffix) return cls._HANDLERS[action_id]
def get(cls, action, suffix=None)
Get or register a handler for the given action. :param func action: Callback that is called when invoking the Handler :param func suffix: Optional suffix for the handler's ID
3.962452
4.238814
0.934802
_predicate = predicate if isinstance(predicate, partial): _predicate = 'partial(%s, %s, %s)' % (predicate.func, predicate.args, predicate.keywords) if LOG_OPTS['register']: hookenv.log(' Adding predicate for %s: %s' % (self.id(), _predicate), level=hookenv.DEBUG) self._predicates.append(predicate)
def add_predicate(self, predicate)
Add a new predicate callback to this handler.
4.330152
4.22278
1.025427
if not hasattr(self, '_args_evaled'): # cache the args in case handler is re-invoked due to flags change self._args_evaled = list(chain.from_iterable(self._args)) return self._args_evaled
def _get_args(self)
Lazily evaluate the args.
6.328173
4.891643
1.29367
args = self._get_args() self._action(*args) for callback in self._post_callbacks: callback()
def invoke(self)
Invoke this handler.
7.835725
6.681901
1.172679
self._CONSUMED_FLAGS.update(flags) self._flags.update(flags)
def register_flags(self, flags)
Register flags as being relevant to this handler. Relevant flags will be used to determine if the handler should be re-invoked due to changes in the set of active flags. If this handler has already been invoked during this :func:`dispatch` run and none of its relevant flags have been set or removed since then, then the handler will be skipped. This is also used for linting and composition purposes, to determine if a layer has unhandled flags.
7.974359
11.570922
0.689172
# flush to ensure external process can see flags as they currently # are, and write flags (flush releases lock) unitdata.kv().flush() subprocess.check_call([self._filepath, '--invoke', self._test_output], env=os.environ)
def invoke(self)
Call the external handler to be invoked.
25.341591
23.680647
1.070139
def _register(action): def arg_gen(): # use a generator to defer calling of hookenv.relation_type, for tests rel = endpoint_from_name(hookenv.relation_type()) if rel: yield rel handler = Handler.get(action) handler.add_predicate(partial(_hook, hook_patterns)) handler.add_args(arg_gen()) return action return _register
def hook(*hook_patterns)
Register the decorated function to run when the current hook matches any of the ``hook_patterns``. This decorator is generally deprecated and should only be used when absolutely necessary. The hook patterns can use the ``{interface:...}`` and ``{A,B,...}`` syntax supported by :func:`~charms.reactive.bus.any_hook`. Note that hook decorators **cannot** be combined with :func:`when` or :func:`when_not` decorators.
9.449812
11.51105
0.820934
def _register(action): handler = Handler.get(action) handler.add_predicate(partial(any_file_changed, filenames, **kwargs)) return action return _register
def when_file_changed(*filenames, **kwargs)
Register the decorated function to run when one or more files have changed. :param list filenames: The names of one or more files to check for changes (a callable returning the name is also accepted). :param str hash_type: The type of hash to use for determining if a file has changed. Defaults to 'md5'. Must be given as a kwarg.
7.303371
9.025939
0.809154
def _decorator(func): action_id = _action_id(func) short_action_id = _short_action_id(func) @wraps(func) def _wrapped(*args, **kwargs): active_flags = get_flags() missing_flags = [flag for flag in desired_flags if flag not in active_flags] if missing_flags: hookenv.log('%s called before flag%s: %s' % ( short_action_id, 's' if len(missing_flags) > 1 else '', ', '.join(missing_flags)), hookenv.WARNING) return func(*args, **kwargs) _wrapped._action_id = action_id _wrapped._short_action_id = short_action_id return _wrapped return _decorator
def not_unless(*desired_flags)
Assert that the decorated function can only be called if the desired_flags are active. Note that, unlike :func:`when`, this does **not** trigger the decorated function if the flags match. It **only** raises an exception if the function is called when the flags do not match. This is primarily for informational purposes and as a guard clause.
2.193943
2.318197
0.946401
if action is None: # allow to be used as @only_once or @only_once() return only_once action_id = _action_id(action) handler = Handler.get(action) handler.add_predicate(lambda: not was_invoked(action_id)) handler.add_post_callback(partial(mark_invoked, action_id)) return action
def only_once(action=None)
.. deprecated:: 0.5.0 Use :func:`when_not` in combination with :func:`set_state` instead. This handler is deprecated because it might actually be `called multiple times <https://github.com/juju-solutions/charms.reactive/issues/22>`_. Register the decorated function to be run once, and only once. This decorator will never cause arguments to be passed to the handler.
5.55838
5.970598
0.930959
def _register(action): handler = Handler.get(action) handler.add_predicate(partial(_restricted_hook, 'collect-metrics')) return action return _register
def collect_metrics()
Register the decorated function to run for the collect_metrics hook.
16.791155
11.979013
1.401714
return tuple(flag.format(endpoint_name=endpoint_name) for flag in flags)
def _expand_endpoint_name(endpoint_name, flags)
Populate any ``{endpoint_name}`` tags in the flag names for the given handler, based on the handlers module / file name.
5.34386
4.93647
1.082527
params = signature(handler).parameters has_self = len(params) == 1 and list(params.keys())[0] == 'self' has_endpoint_class = any(isclass(g) and issubclass(g, Endpoint) for g in handler.__globals__.values()) return has_self and has_endpoint_class
def _is_endpoint_method(handler)
from the context. Unfortunately, we can't directly detect whether a handler is an Endpoint method, because at the time of decoration, the class doesn't actually exist yet so it's impossible to get a reference to it. So, we use the heuristic of seeing if the handler takes only a single ``self`` param and there is an Endpoint class in the handler's globals.
2.948328
2.251568
1.309455
current_hook = hookenv.hook_name() # expand {role:interface} patterns i_pat = re.compile(r'{([^:}]+):([^}]+)}') hook_patterns = _expand_replacements(i_pat, hookenv.role_and_interface_to_relations, hook_patterns) # expand {A,B,C,...} patterns c_pat = re.compile(r'{((?:[^:,}]+,?)+)}') hook_patterns = _expand_replacements(c_pat, lambda v: v.split(','), hook_patterns) return current_hook in hook_patterns
def any_hook(*hook_patterns)
Assert that the currently executing hook matches one of the given patterns. Each pattern will match one or more hooks, and can use the following special syntax: * ``db-relation-{joined,changed}`` can be used to match multiple hooks (in this case, ``db-relation-joined`` and ``db-relation-changed``). * ``{provides:mysql}-relation-joined`` can be used to match a relation hook by the role and interface instead of the relation name. The role must be one of ``provides``, ``requires``, or ``peer``. * The previous two can be combined, of course: ``{provides:mysql}-relation-{joined,changed}``
4.443907
4.500521
0.987421
changed = False for filename in filenames: if callable(filename): filename = str(filename()) else: filename = str(filename) old_hash = unitdata.kv().get('reactive.files_changed.%s' % filename) new_hash = host.file_hash(filename, hash_type=hash_type) if old_hash != new_hash: unitdata.kv().set('reactive.files_changed.%s' % filename, new_hash) changed = True # mark as changed, but keep updating hashes return changed
def any_file_changed(filenames, hash_type='md5')
Check if any of the given files have changed since the last time this was called. :param list filenames: Names of files to check. Accepts callables returning the filename. :param str hash_type: Algorithm to use to check the files.
3.218358
3.434839
0.936975
global hooks if sys.version_info[0] > 2 and sys.version_info[1] > 4: fsglob = sorted(glob.iglob(pathname, recursive=True)) else: fsglob = sorted(glob.iglob(pathname)) for path in fsglob: real_path = os.path.realpath(path) # Append hooks file directory to the sys.path so submodules can be # loaded too. if os.path.dirname(real_path) not in sys.path: sys.path.append(os.path.dirname(real_path)) module = imp.load_source(os.path.basename(path), real_path) for name in dir(module): obj = getattr(module, name) if hasattr(obj, 'dredd_hooks') and callable(obj): func_hooks = getattr(obj, 'dredd_hooks') for hook, name in func_hooks: if hook == BEFORE_ALL: hooks._before_all.append(obj) if hook == AFTER_ALL: hooks._after_all.append(obj) if hook == BEFORE_EACH: hooks._before_each.append(obj) if hook == AFTER_EACH: hooks._after_each.append(obj) if hook == BEFORE_EACH_VALIDATION: hooks._before_each_validation.append(obj) if hook == BEFORE_VALIDATION: add_named_hook(hooks._before_validation, obj, name) if hook == BEFORE: add_named_hook(hooks._before, obj, name) if hook == AFTER: add_named_hook(hooks._after, obj, name)
def load_hook_files(pathname)
Loads files either defined as a glob or a single file path sorted by filenames.
2.204897
2.196781
1.003694
hook_name = hookenv.hook_name() restricted_mode = hook_name in ['meter-status-changed', 'collect-metrics'] hookenv.log('Reactive main running for hook %s' % hookenv.hook_name(), level=hookenv.INFO) if restricted_mode: hookenv.log('Restricted mode.', level=hookenv.INFO) # work-around for https://bugs.launchpad.net/juju-core/+bug/1503039 # ensure that external handlers can tell what hook they're running in if 'JUJU_HOOK_NAME' not in os.environ: os.environ['JUJU_HOOK_NAME'] = hook_name try: bus.discover() if not restricted_mode: # limit what gets run in restricted mode hookenv._run_atstart() bus.dispatch(restricted=restricted_mode) except Exception: tb = traceback.format_exc() hookenv.log('Hook error:\n{}'.format(tb), level=hookenv.ERROR) raise except SystemExit as x: if x.code not in (None, 0): raise if not restricted_mode: # limit what gets run in restricted mode hookenv._run_atexit() unitdata._KV.flush()
def main(relation_name=None)
This is the main entry point for the reactive framework. It calls :func:`~bus.discover` to find and load all reactive handlers (e.g., :func:`@when <decorators.when>` decorated blocks), and then :func:`~bus.dispatch` to trigger handlers until the queue settles out. Finally, :meth:`unitdata.kv().flush <charmhelpers.core.unitdata.Storage.flush>` is called to persist the flags and other data. :param str relation_name: Optional name of the relation which is being handled.
4.222849
3.81085
1.108112