code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_describe_version() return d
def describe_version(self, )
get the thrift api version
3.178563
2.69493
1.17946
self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_describe_ring(keyspace) return d
def describe_ring(self, keyspace)
get the token ring: a map of ranges to host addresses, represented as a set of TokenRange instead of a map from range to list of endpoints, because you can't use Thrift structs as map keys: https://issues.apache.org/jira/browse/THRIFT-162 for the same reason, we can't return a set here, even though order is neither important nor predictable. Parameters: - keyspace
3.082847
3.274282
0.941534
self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_describe_token_map() return d
def describe_token_map(self, )
get the mapping between token->node ip without taking replication into consideration https://issues.apache.org/jira/browse/CASSANDRA-4092
3.374472
4.851974
0.695484
self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_describe_partitioner() return d
def describe_partitioner(self, )
returns the partitioner used by this cluster
3.23125
3.710906
0.870744
self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_describe_snitch() return d
def describe_snitch(self, )
returns the snitch used by this cluster
3.003276
3.377055
0.889318
self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_describe_keyspace(keyspace) return d
def describe_keyspace(self, keyspace)
describe specified keyspace Parameters: - keyspace
2.840525
3.078968
0.922558
self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_describe_splits(cfName, start_token, end_token, keys_per_split) return d
def describe_splits(self, cfName, start_token, end_token, keys_per_split)
experimental API for hadoop/parallel query support. may change violently and without warning. returns list of token strings such that first subrange is (list[0], list[1]], next is (list[1], list[2]], etc. Parameters: - cfName - start_token - end_token - keys_per_split
2.069409
2.936166
0.7048
self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_trace_next_query() return d
def trace_next_query(self, )
Enables tracing for the next query in this connection and returns the UUID for that trace session The next query will be traced idependently of trace probability and the returned UUID can be used to query the trace keyspace
2.667336
3.561334
0.748971
self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_describe_splits_ex(cfName, start_token, end_token, keys_per_split) return d
def describe_splits_ex(self, cfName, start_token, end_token, keys_per_split)
Parameters: - cfName - start_token - end_token - keys_per_split
1.993756
2.044657
0.975105
self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_system_add_column_family(cf_def) return d
def system_add_column_family(self, cf_def)
adds a column family. returns the new schema id. Parameters: - cf_def
2.411258
3.175674
0.75929
self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_system_drop_column_family(column_family) return d
def system_drop_column_family(self, column_family)
drops a column family. returns the new schema id. Parameters: - column_family
2.417657
2.987378
0.80929
self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_system_add_keyspace(ks_def) return d
def system_add_keyspace(self, ks_def)
adds a keyspace and any column families that are part of it. returns the new schema id. Parameters: - ks_def
2.64745
3.566642
0.742281
self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_system_drop_keyspace(keyspace) return d
def system_drop_keyspace(self, keyspace)
drops a keyspace and any column families that are part of it. returns the new schema id. Parameters: - keyspace
2.569014
3.337705
0.769695
self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_system_update_keyspace(ks_def) return d
def system_update_keyspace(self, ks_def)
updates properties of a keyspace. returns the new schema id. Parameters: - ks_def
2.653663
3.336617
0.795316
self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_system_update_column_family(cf_def) return d
def system_update_column_family(self, cf_def)
updates properties of a column family. returns the new schema id. Parameters: - cf_def
2.414958
3.369572
0.716696
self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_execute_cql_query(query, compression) return d
def execute_cql_query(self, query, compression)
Executes a CQL (Cassandra Query Language) statement and returns a CqlResult containing the results. Parameters: - query - compression
2.651906
3.175478
0.83512
self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_execute_cql3_query(query, compression, consistency) return d
def execute_cql3_query(self, query, compression, consistency)
Parameters: - query - compression - consistency
2.487144
2.378847
1.045525
self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_prepare_cql_query(query, compression) return d
def prepare_cql_query(self, query, compression)
Prepare a CQL (Cassandra Query Language) statement by compiling and returning - the type of CQL statement - an id token of the compiled CQL stored on the server side. - a count of the discovered bound markers in the statement Parameters: - query - compression
2.96708
3.033178
0.978208
self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_prepare_cql3_query(query, compression) return d
def prepare_cql3_query(self, query, compression)
Parameters: - query - compression
2.912625
2.686311
1.084247
self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_execute_prepared_cql_query(itemId, values) return d
def execute_prepared_cql_query(self, itemId, values)
Executes a prepared CQL (Cassandra Query Language) statement by passing an id token and a list of variables to bind and returns a CqlResult containing the results. Parameters: - itemId - values
2.524063
3.086625
0.817742
self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_execute_prepared_cql3_query(itemId, values, consistency) return d
def execute_prepared_cql3_query(self, itemId, values, consistency)
Parameters: - itemId - values - consistency
2.363477
2.260819
1.045407
self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_set_cql_version(version) return d
def set_cql_version(self, version)
@deprecated This is now a no-op. Please use the CQL3 specific methods instead. Parameters: - version
2.790328
3.041302
0.917478
if name == 'mumps': return LinSolverMUMPS(prop) elif name == 'superlu': return LinSolverSUPERLU(prop) elif name == 'umfpack': return LinSolverUMFPACK(prop) elif name == 'default': try: return new_linsolver('mumps',prop) except ImportError: return new_linsolver('superlu',prop) else: raise ValueError('invalid linear solver name')
def new_linsolver(name,prop)
Creates a linear solver. Parameters ---------- name : string prop : string Returns ------- solver : :class:`LinSolver <optalg.lin_solver.LinSolver>`
2.301871
2.424089
0.949582
# Optproblem if isinstance(problem,OptProblem): return problem # Other else: # Type Base if (not hasattr(problem,'G') or (problem.G.shape[0] == problem.G.shape[1] and problem.G.shape[0] == problem.G.nnz and np.all(problem.G.row == problem.G.col) and np.all(problem.G.data == 1.))): return create_problem_from_type_base(problem) # Type A else: return create_problem_from_type_A(problem)
def cast_problem(problem)
Casts problem object with known interface as OptProblem. Parameters ---------- problem : Object
3.489
3.690958
0.945283
p = OptProblem() # Init attributes p.phi = problem.phi p.gphi = problem.gphi p.Hphi = problem.Hphi p.A = problem.A p.b = problem.b p.f = problem.f p.J = problem.J p.H_combined = problem.H_combined p.u = problem.u p.l = problem.l p.x = problem.x p.P = None p.lam = None p.nu = None p.mu = None p.pi = None p.wrapped_problem = problem # Methods def eval(cls,x): cls.wrapped_problem.eval(x) cls.phi = cls.wrapped_problem.phi cls.gphi = cls.wrapped_problem.gphi cls.Hphi = cls.wrapped_problem.Hphi cls.f = cls.wrapped_problem.f cls.J = cls.wrapped_problem.J def combine_H(cls,coeff,ensure_psd=False): cls.wrapped_problem.combine_H(coeff,ensure_psd) cls.H_combined = cls.wrapped_problem.H_combined p.eval = MethodType(eval,p) p.combine_H = MethodType(combine_H,p) # Return return p
def create_problem_from_type_base(problem)
Creates OptProblem from type-base problem. Parameters ---------- problem : Object
2.643253
2.641232
1.000765
p = OptProblem() nx = problem.get_num_primal_variables() nz = problem.G.shape[0] p.phi = problem.phi p.gphi = np.hstack((problem.gphi,np.zeros(nz))) p.Hphi = coo_matrix((problem.Hphi.data,(problem.Hphi.row,problem.Hphi.col)),shape=(nx+nz,nx+nz)) p.A = bmat([[problem.A,None],[problem.G,-eye(nz)]],format='coo') p.b = np.hstack((problem.b,np.zeros(nz))) p.f = problem.f p.J = coo_matrix((problem.J.data,(problem.J.row,problem.J.col)),shape=(problem.J.shape[0],nx+nz)) p.H_combined = coo_matrix((problem.H_combined.data,(problem.H_combined.row,problem.H_combined.col)),shape=(nx+nz,nx+nz)) p.u = np.hstack((problem.get_upper_limits(),problem.u)) p.l = np.hstack((problem.get_lower_limits(),problem.l)) p.x = np.hstack((problem.x,np.zeros(nz))) p.P = None p.lam = None p.nu = None p.mu = None p.pi = None p.wrapped_problem = problem def eval(cls,xz): x = xz[:nx] z = xz[nx:] prob = cls.wrapped_problem prob.eval(x) cls.phi = prob.phi cls.gphi = np.hstack((prob.gphi,np.zeros(nz))) cls.Hphi = coo_matrix((prob.Hphi.data,(prob.Hphi.row,prob.Hphi.col)),shape=(nx+nz,nx+nz)) cls.f = prob.f cls.J = coo_matrix((prob.J.data,(prob.J.row,prob.J.col)),shape=(prob.J.shape[0],nx+nz)) def combine_H(cls,coeff,ensure_psd=False): prob = cls.wrapped_problem prob.combine_H(coeff,ensure_psd=ensure_psd) cls.H_combined = coo_matrix((prob.H_combined.data,(prob.H_combined.row,prob.H_combined.col)),shape=(nx+nz,nx+nz)) def recover_primal_variables(cls,x): return x[:nx] def recover_dual_variables(cls,lam,nu,mu,pi): prob = cls.wrapped_problem return lam[:prob.A.shape[0]],nu,mu[nx:],pi[nx:] p.eval = MethodType(eval,p) p.combine_H = MethodType(combine_H,p) p.recover_primal_variables = MethodType(recover_primal_variables,p) p.recover_dual_variables = MethodType(recover_dual_variables,p) # Return return p
def create_problem_from_type_A(problem)
Creates OptProblem from type-A problem. Parameters ---------- problem : Object
2.137381
2.150452
0.993922
return lam,nu,mu,pi
def recover_dual_variables(self,lam,nu,mu,pi)
Recovers dual variables for original problem. Parameters ---------- lam : ndarray nu : ndarray mu : ndarray pi : ndarray
6.26999
8.441854
0.742727
if self.x is not None: return self.x.size if self.gphi is not None: return self.gphi.size if self.Hphi is not None: return self.Hphi.shape[0] if self.A is not None: return self.A.shape[1] if self.J is not None: return self.J.shape[1] if self.u is not None: return self.u.size if self.l is not None: return self.l.size return 0
def get_num_primal_variables(self)
Gets number of primal variables. Returns ------- num : int
2.061034
2.079883
0.990937
if self.problem: return self.problem.recover_dual_variables(self.lam*self.obj_sca, self.nu*self.obj_sca, self.mu*self.obj_sca, self.pi*self.obj_sca) else: return None,None,None,None
def get_dual_variables(self)
Gets dual variables. Returns ------- lam : vector nu : vector mu : vector pi : vector
4.723594
3.858155
1.224314
return {'status': self.status, 'error_msg': self.error_msg, 'k': self.k, 'x': self.x, 'lam': self.lam*self.obj_sca, 'nu': self.nu*self.obj_sca, 'mu': self.mu*self.obj_sca, 'pi': self.pi*self.obj_sca}
def get_results(self)
Gets results. Returns ------- results : dictionary
3.315639
3.501868
0.94682
# Parameters of line search c1 = 1e-4 c2 = 5e-1 # Initialize lower bound, upper bound and step l = 0. if 1. < smax: s = 1. else: s = smax u = np.NaN phi = F dphi = np.dot(GradF,p) # Check that p is descent direction if dphi >= 0: raise OptSolverError_BadSearchDir(self) # Bisection for i in range(0,maxiter): xsp = x+s*p fdata = func(xsp) phis = fdata.F dphis = np.dot(fdata.GradF,p) if phis > phi + c1*s*dphi: u = s elif dphis > 0 and dphis > -c2*dphi: u = s elif dphis < 0 and -dphis > -c2*dphi: l = s if s >= smax: return s,fdata else: return s,fdata if np.isnan(u): s = np.min([2.*s,smax]) else: s = (l + u)/2. raise OptSolverError_LineSearch(self)
def line_search(self,x,p,F,GradF,func,smax=np.inf,maxiter=40)
Finds steplength along search direction p that satisfies the strong Wolfe conditions. Parameters ---------- x : current point (ndarray) p : search direction (ndarray) F : function value at `x` (float) GradF : gradient of function at `x` (ndarray) func : function of `x` that returns function object with attributes `F` and `GradF` (function) smax : maximum allowed steplength (float) Returns ------- s : stephlength that satisfies the Wolfe conditions (float).
3.890009
3.714647
1.047208
self.k = 0. self.x = np.zeros(0) self.lam = np.zeros(0) self.nu = np.zeros(0) self.mu = np.zeros(0) self.pi = np.zeros(0) self.status = self.STATUS_UNKNOWN self.error_msg = '' self.obj_sca = 1.
def reset(self)
Resets solver data.
4.006464
3.33406
1.201677
for key,value in list(parameters.items()): if key in self.parameters: self.parameters[key] = value
def set_parameters(self,parameters)
Sets solver parameters. Parameters ---------- parameters : dict
2.893797
3.84739
0.752146
self.factorize(A) return self.solve(b)
def factorize_and_solve(self, A, b)
Factorizes A and solves Ax=b. Returns ------- x : vector
6.170871
12.473903
0.494703
assert os.path.exists(src) assert os.path.exists(dest) # Only at the root. Could be made more complicated and recursive later for filename in os.listdir(src): if filename.endswith(EXCLUDED_EXTENSIONS): continue # Flat directory structure elif not os.path.isfile(os.path.join(src, filename)): continue with open(os.path.join(src, filename)) as f: output_contents = f.read().format(**fmt_vars) with open(os.path.join(dest, filename), 'w') as file_obj: file_obj.write(output_contents)
def format_files(src: str, dest: str, **fmt_vars: str) -> None
Copies all files inside src into dest while formatting the contents of the files into the output. For example, a file with the following contents: {foo} bar {baz} and the vars {'foo': 'herp', 'baz': 'derp'} will end up in the output as herp bar derp :param text src: Source directory. :param text dest: Destination directory. :param dict fmt_vars: Vars to format into the files.
3.244297
3.181197
1.019835
A = coo_matrix(A) self.mumps.set_shape(A.shape[0]) self.mumps.set_centralized_assembled_rows_cols(A.row+1,A.col+1) self.mumps.run(job=1) self.analyzed = True
def analyze(self,A)
Analyzes structure of A. Parameters ---------- A : matrix For symmetric systems, should contain only lower diagonal part.
6.2451
6.635857
0.941114
A = coo_matrix(A) self.mumps.set_centralized_assembled_values(A.data) self.mumps.run(job=2)
def factorize(self,A)
Factorizes A. Parameters ---------- A : matrix For symmetric systems, should contain only lower diagonal part.
10.148644
12.0445
0.842596
x = b.copy() self.mumps.set_rhs(x) self.mumps.run(job=3) return x
def solve(self,b)
Solves system Ax=b. Parameters ---------- b : ndarray Returns ------- x : ndarray
8.439265
11.677441
0.722698
A = coo_matrix(A) x = b.copy() self.mumps.set_centralized_assembled_values(A.data) self.mumps.set_rhs(x) self.mumps.run(job=5) return x
def factorize_and_solve(self,A,b)
Factorizes A and sovles Ax=b. Parameters ---------- A : matrix b : ndarray Returns ------- x : ndarray
6.432451
8.14156
0.790076
assert A.dtype == 'd' and b.dtype == 'd', "Only double precision supported." with DMumpsContext(par=1, sym=0, comm=comm) as ctx: if ctx.myid == 0: # Set the sparse matrix -- only necessary on ctx.set_centralized_sparse(A.tocoo()) x = b.copy() ctx.set_rhs(x) # Silence most messages ctx.set_silent() # Analysis + Factorization + Solve ctx.run(job=6) if ctx.myid == 0: return x
def spsolve(A, b, comm=None)
Sparse solve A\b.
8.603741
8.580486
1.00271
if self.myid != 0: return A = A.tocoo() n = A.shape[0] assert A.shape == (n, n), "Expected a square matrix." self.set_shape(n) self.set_centralized_assembled(A.row+1, A.col+1, A.data)
def set_centralized_sparse(self, A)
Set assembled matrix on processor 0. Parameters ---------- A : `scipy.sparse.coo_matrix` Sparse matrices of other formats will be converted to COOrdinate form.
3.790403
3.679656
1.030097
self.set_centralized_assembled_rows_cols(irn, jcn) self.set_centralized_assembled_values(a)
def set_centralized_assembled(self, irn, jcn, a)
Set assembled matrix on processor 0. The row and column indices (irn & jcn) should be one based.
3.018115
3.070561
0.98292
if self.myid != 0: return assert irn.size == jcn.size self._refs.update(irn=irn, jcn=jcn) self.id.nz = irn.size self.id.irn = self.cast_array(irn) self.id.jcn = self.cast_array(jcn)
def set_centralized_assembled_rows_cols(self, irn, jcn)
Set assembled matrix indices on processor 0. The row and column indices (irn & jcn) should be one based.
4.226171
4.054789
1.042267
if self.myid != 0: return assert a.size == self.id.nz self._refs.update(a=a) self.id.a = self.cast_array(a)
def set_centralized_assembled_values(self, a)
Set assembled matrix values on processor 0.
11.51138
10.240273
1.124128
self.set_distributed_assembled_rows_cols(irn_loc, jcn_loc) self.set_distributed_assembled_values(a_loc)
def set_distributed_assembled(self, irn_loc, jcn_loc, a_loc)
Set the distributed assembled matrix. Distributed assembled matrices require setting icntl(18) != 0.
2.822527
2.55345
1.105378
assert irn_loc.size == jcn_loc.size self._refs.update(irn_loc=irn_loc, jcn_loc=jcn_loc) self.id.nz_loc = irn_loc.size self.id.irn_loc = self.cast_array(irn_loc) self.id.jcn_loc = self.cast_array(jcn_loc)
def set_distributed_assembled_rows_cols(self, irn_loc, jcn_loc)
Set the distributed assembled matrix row & column numbers. Distributed assembled matrices require setting icntl(18) != 0.
3.227047
3.136993
1.028707
assert a_loc.size == self._refs['irn_loc'].size self._refs.update(a_loc=a_loc) self.id.a_loc = self.cast_array(a_loc)
def set_distributed_assembled_values(self, a_loc)
Set the distributed assembled matrix values. Distributed assembled matrices require setting icntl(18) != 0.
8.686303
8.619902
1.007703
assert rhs.size == self.id.n self._refs.update(rhs=rhs) self.id.rhs = self.cast_array(rhs)
def set_rhs(self, rhs)
Set the right hand side. This matrix will be modified in place.
11.691386
11.155181
1.048068
self.set_icntl(1, -1) # output stream for error msgs self.set_icntl(2, -1) # otuput stream for diagnostic msgs self.set_icntl(3, -1) # output stream for global info self.set_icntl(4, 0)
def set_silent(self)
Silence most messages.
4.893021
4.691124
1.043038
if self.id is not None and self._mumps_c is not None: self.id.job = -2 # JOB_END self._mumps_c(self.id) self.id = None self._refs = None
def destroy(self)
Delete the MUMPS context and release all array references.
8.961205
6.828707
1.312284
self._mumps_c(self.id) if self.id.infog[0] < 0: raise RuntimeError("MUMPS error: %d" % self.id.infog[0])
def mumps(self)
Call MUMPS, checking for errors in the return code. The desired job should have already been set using `ctx.set_job(...)`. As a convenience, you may wish to call `ctx.run(job=...)` which sets the job and calls MUMPS.
6.819203
7.265684
0.938549
if not isinstance(from_state, string_types): raise ValueError("Invalid data flow origin port: from_state must be a string") if not isinstance(from_key, int): raise ValueError("Invalid data flow origin port: from_key must be of type int") old_from_state = self.from_state old_from_key = self.from_key self._from_state = from_state self._from_key = from_key valid, message = self._check_validity() if not valid: self._from_state = old_from_state self._from_key = old_from_key raise ValueError("The data flow origin could not be changed: {0}".format(message))
def modify_origin(self, from_state, from_key)
Set both from_state and from_key at the same time to modify data flow origin :param str from_state: State id of the origin state :param int from_key: Data port id of the origin port :raises exceptions.ValueError: If parameters have wrong types or the new data flow is not valid
2.449852
2.162042
1.133119
if not isinstance(to_state, string_types): raise ValueError("Invalid data flow target port: from_state must be a string") if not isinstance(to_key, int): raise ValueError("Invalid data flow target port: from_outcome must be of type int") old_to_state = self.to_state old_to_key = self.to_key self._to_state = to_state self._to_key = to_key valid, message = self._check_validity() if not valid: self._to_state = old_to_state self._to_key = old_to_key raise ValueError("The data flow target could not be changed: {0}".format(message))
def modify_target(self, to_state, to_key)
Set both to_state and to_key at the same time to modify data flow target :param str to_state: State id of the target state :param int to_key: Data port id of the target port :raises exceptions.ValueError: If parameters have wrong types or the new data flow is not valid
2.767561
2.408442
1.149108
shortcut_manager.add_callback_for_action("copy", self._copy) shortcut_manager.add_callback_for_action("paste", self._paste) shortcut_manager.add_callback_for_action("cut", self._cut) shortcut_manager.add_callback_for_action("undo", self._undo) shortcut_manager.add_callback_for_action("redo", self._redo) shortcut_manager.add_callback_for_action("apply", self._apply) shortcut_manager.add_callback_for_action("open_external_editor", self._open_external_editor)
def register_actions(self, shortcut_manager)
Register callback methods for triggered actions :param rafcon.gui.shortcut_manager.ShortcutManager shortcut_manager: Shortcut Manager Object holding mappings between shortcuts and actions.
1.815892
1.732979
1.047844
# work around to avoid changes at all (e.g. by enter-key) if text view property editable is False # TODO if SourceView3 is used in future check if this can be skipped if not self.view.textview.get_editable() and not self.view.while_in_set_enabled: if hasattr(self.view.get_buffer(), 'begin_not_undoable_action'): self.view.get_buffer().begin_not_undoable_action() self.view.set_enabled(False, self.source_text) if hasattr(self.view.get_buffer(), 'end_not_undoable_action'): self.view.get_buffer().end_not_undoable_action() if self.view: self.view.apply_tag('default')
def code_changed(self, source)
Apply checks and adjustments of the TextBuffer and TextView after every change in buffer. The method re-apply the tag (style) for the buffer. It avoids changes while editable-property set to False which are caused by a bug in the GtkSourceView2. GtkSourceView2 is the default used TextView widget here. The text buffer is reset after every change to last stored source-text by a respective work around which suspends any generation of undo items and avoids a recursive call of the method set_enabled by observing its while_in_set_enabled flag. :param TextBuffer source: :return:
6.048097
3.962109
1.526484
if isinstance(self.model.state, LibraryState): return self.set_script_text(self.view.get_text())
def apply_clicked(self, button)
Triggered when the Apply-Shortcut in the editor is triggered.
13.057484
11.159739
1.170053
line_number, line_offset = self.get_cursor_position() self.get_buffer().set_text(text) self.set_cursor_position(line_number, line_offset)
def set_text(self, text)
The method insert text into the text buffer of the text view and preserves the cursor location. :param str text: which is insert into the text buffer. :return:
3.117048
3.221091
0.967699
self.while_in_set_enabled = True # Apply color scheme by set text 'workaround' (with current buffer source) self.set_text(self.get_text()) if text is None else self.set_text(text) if on: self.textview.set_editable(True) self.apply_tag('default') else: self.apply_tag('deactivated') self.textview.set_editable(on) self.while_in_set_enabled = False
def set_enabled(self, on, text=None)
Set the default input or deactivated (disabled) style scheme The method triggers the signal 'changed' by using set_text. Therefore, the method use the while_in_set_enabled flag to make activities of the method observable. If a method trigger this method and was triggered by a changed-signal this flag is supposed to avoid recursive calls. :param bool on: enable flag. :param str text: optional text to insert. :return:
6.650549
5.092666
1.305907
logger.info("Run pre-initiation hook of {} plugin.".format(__file__.split(os.path.sep)[-2])) # Example: Monkey-Path rafcon.core.script.Script class to print additional log-message while execution from rafcon.core.script import Script old_execute_method = Script.execute def new_execute_method(self, state, inputs=None, outputs=None, backward_execution=False): logger.debug("patched version of Script class is used.") result = old_execute_method(self, state, inputs, outputs, backward_execution) logger.debug("patched version of Script execute-method is finished with result: {}.".format(result)) return result Script.execute = new_execute_method
def pre_init()
The pre_init function of the plugin. Here rafcon-classes can be extended/monkey-patched or completely substituted. A example is given with the rafcon_execution_hooks_plugin. :return:
6.344505
5.439614
1.166352
logger.info("Run post-initiation hook of {} plugin.".format(__file__.split(os.path.sep)[-2])) from . import core_template_observer # Example 1: initiate observer some elements of the execution engine core_template_observer.ExecutionEngineObserver() # Example 2: initiate observer execution status core_template_observer.ExecutionStatusObserver() from . import gtkmvc_template_observer # Example 3: gtkmvc3 general modification observer # initiate observer of root_state model-object which already implements a power full recursive notification pattern gtkmvc_template_observer.RootStateModificationObserver() # Example 4: gtkmvc3 meta signal observer gtkmvc_template_observer.MetaSignalModificationObserver()
def post_init(setup_config)
The post_init function of the plugin. Here observer can be registered to the observables and other pre-init functionality of the plugin should be triggered. A simple example is given with the rafcon_execution_hooks_plugin. A complex example is given with the rafcon_monitoring_plugin. :param setup_config: :return:
12.615027
11.989067
1.052211
models = set(models) # Ensure that models is a set and that we do not operate on the parameter itself models_to_remove = set() # check all models for model in models: parent_m = model.parent # check if any (grand-)parent is already in the selection, if so, remove the child while parent_m is not None: if parent_m in models: models_to_remove.add(model) break parent_m = parent_m.parent for model in models_to_remove: models.remove(model) if models_to_remove: logger.debug("The selection has been reduced, as it may not contain elements whose children are also selected") return models
def reduce_to_parent_states(models)
Remove all models of states that have a state model with parent relation in the list The function filters the list of models, so that for no model in the list, one of it (grand-)parents is also in the list. E.g. if the input models consists of a hierarchy state with two of its child states, the resulting list only contains the hierarchy state. :param set models: The set of selected models :return: The reduced set of selected models :rtype: set
4.387811
4.117935
1.065537
def handle_update(selection, *args, **kwargs): old_selection = selection.get_all() update_selection(selection, *args, **kwargs) new_selection = selection.get_all() affected_models = old_selection ^ new_selection if len(affected_models) != 0: # The selection was updated deselected_models = old_selection - new_selection selected_models = new_selection - old_selection map(selection.relieve_model, deselected_models) map(selection.observe_model, selected_models) # Maintain internal lists for fast access selection.update_core_element_lists() # Clear focus if no longer in selection if selection.focus and selection.focus not in new_selection: del selection.focus # Send notifications about changes affected_classes = set(model.core_element.__class__ for model in affected_models) msg_namedtuple = SelectionChangedSignalMsg(update_selection.__name__, new_selection, old_selection, affected_classes) selection.selection_changed_signal.emit(msg_namedtuple) if selection.parent_signal is not None: selection.parent_signal.emit(msg_namedtuple) return handle_update
def updates_selection(update_selection)
Decorator indicating that the decorated method could change the selection
4.12814
4.055305
1.01796
from rafcon.gui.singleton import main_window_controller currently_pressed_keys = main_window_controller.currently_pressed_keys if main_window_controller else set() if any(key in currently_pressed_keys for key in [constants.EXTEND_SELECTION_KEY, constants.EXTEND_SELECTION_KEY_ALT]): return True return False
def extend_selection()
Checks is the selection is to be extended The selection is to be extended, if a special modifier key (typically <Ctrl>) is being pressed. :return: If to extend the selection :rtype: True
5.089209
4.599132
1.106559
if not hasattr(models, "__iter__"): models = {models} if not all([isinstance(model, (AbstractStateModel, StateElementModel)) for model in models]): raise TypeError("The selection supports only models with base class AbstractStateModel or " "StateElementModel, see handed elements {0}".format(models)) return models if isinstance(models, set) else set(models)
def _check_model_types(self, models)
Check types of passed models for correctness and in case raise exception :rtype: set :returns: set of models that are valid for the class
7.015238
7.047876
0.995369
if models is None: return models = self._check_model_types(models) self._selected.update(models) self._selected = reduce_to_parent_states(self._selected)
def add(self, models)
Adds the passed model(s) to the selection
6.964146
5.7154
1.218488
models = self._check_model_types(models) for model in models: if model in self._selected: self._selected.remove(model)
def remove(self, models)
Removed the passed model(s) from the selection
3.604304
3.11132
1.158449
# Do not add None values to selection if models is None: models = set() models = self._check_model_types(models) if len(models) > 1: models = reduce_to_parent_states(models) self._selected = set(models)
def set(self, models)
Sets the selection to the passed model(s)
6.702899
5.797615
1.156148
if extend_selection(): self._selected.difference_update(self.get_selected_elements_of_core_class(core_class)) else: self._selected.clear() models = self._check_model_types(models) if len(models) > 1: models = reduce_to_parent_states(models) self._selected.update(models)
def handle_prepared_selection_of_core_class_elements(self, core_class, models)
Handles the selection for TreeStore widgets maintaining lists of a specific `core_class` elements If widgets hold a TreeStore with elements of a specific `core_class`, the local selection of that element type is handled by that widget. This method is called to integrate the local selection with the overall selection of the state machine. If no modifier key (indicating to extend the selection) is pressed, the state machine selection is set to the passed selection. If the selection is to be extended, the state machine collection will consist of the widget selection plus all previously selected elements not having the core class `core_class`. :param State | StateElement core_class: The core class of the elements the widget handles :param models: The list of models that are currently being selected locally
5.000527
5.034295
0.993292
models = self._check_model_types(models) if extend_selection(): already_selected_elements = models & self._selected newly_selected_elements = models - self._selected self._selected.difference_update(already_selected_elements) self._selected.update(newly_selected_elements) else: self._selected = models self._selected = reduce_to_parent_states(self._selected)
def handle_new_selection(self, models)
Handles the selection for generic widgets This is a helper method for generic widgets that want to modify the selection. These widgets can pass a list of newly selected (or clicked on) models. The method looks at the previous selection, the passed models and the list of pressed (modifier) keys: * If no modifier key is pressed, the previous selection is cleared and the new selection is set to the passed models * If the extend-selection modifier key is pressed, elements of `models` that are _not_ in the previous selection are selected, those that are in the previous selection are deselected :param models: The list of models that are newly selected/clicked on
4.117887
4.384147
0.939268
if model is None: del self.focus return self._check_model_types(model) self.add(model) focus_msg = FocusSignalMsg(model, self._focus) self._focus = model self._selected.add(model) self._selected = reduce_to_parent_states(self._selected) self.focus_signal.emit(focus_msg)
def focus(self, model)
Sets the passed model as focused element :param ModelMT model: The element to be focused
5.936629
6.928536
0.856837
focus_msg = FocusSignalMsg(None, self._focus) self._focus = None self.focus_signal.emit(focus_msg)
def focus(self)
Unsets the focused element
7.899204
8.925581
0.885007
def get_selected_elements_of_core_class(core_class): return set(element for element in self._selected if isinstance(element.core_element, core_class)) self._states = get_selected_elements_of_core_class(State) self._transitions = get_selected_elements_of_core_class(Transition) self._data_flows = get_selected_elements_of_core_class(DataFlow) self._input_data_ports = get_selected_elements_of_core_class(InputDataPort) self._output_data_ports = get_selected_elements_of_core_class(OutputDataPort) self._scoped_variables = get_selected_elements_of_core_class(ScopedVariable) self._outcomes = get_selected_elements_of_core_class(Outcome)
def update_core_element_lists(self)
Maintains inner lists of selected elements with a specific core element class
2.173254
2.029446
1.070861
if core_element_type is Outcome: return self.outcomes elif core_element_type is InputDataPort: return self.input_data_ports elif core_element_type is OutputDataPort: return self.output_data_ports elif core_element_type is ScopedVariable: return self.scoped_variables elif core_element_type is Transition: return self.transitions elif core_element_type is DataFlow: return self.data_flows elif core_element_type is State: return self.states raise RuntimeError("Invalid core element type: " + core_element_type)
def get_selected_elements_of_core_class(self, core_element_type)
Returns all selected elements having the specified `core_element_type` as state element class :return: Subset of the selection, only containing elements having `core_element_type` as state element class :rtype: set
2.225817
2.34825
0.947862
if model is None: return len(self._selected) == 0 return model in self._selected
def is_selected(self, model)
Checks whether the given model is selected :param model: :return: True if the model is within the selection, False else :rtype: bool
4.720918
4.870331
0.969322
if logger is None: logger = _logger logger.debug("Opening path with command: {0} {1}".format(command, path)) # This splits the command in a matter so that the command gets called in a separate shell and thus # does not lock the window. args = shlex.split('{0} "{1}"'.format(command, path)) try: subprocess.Popen(args, shell=shell, cwd=cwd) return True except OSError as e: logger.error('The operating system raised an error: {}'.format(e)) return False
def execute_command_with_path_in_process(command, path, shell=False, cwd=None, logger=None)
Executes a specific command in a separate process with a path as argument. :param command: the command to be executed :param path: the path as first argument to the shell command :param bool shell: Whether to use a shell :param str cwd: The working directory of the command :param logger: optional logger instance which can be handed from other module :return: None
4.686698
4.767081
0.983138
if logger is None: logger = _logger logger.debug("Run shell command: {0}".format(command)) try: subprocess.Popen(command, shell=shell, cwd=cwd) return True except OSError as e: logger.error('The operating system raised an error: {}'.format(e)) return False
def execute_command_in_process(command, shell=False, cwd=None, logger=None)
Executes a specific command in a separate process :param command: the command to be executed :param bool shell: Whether to use a shell :param str cwd: The working directory of the command :param logger: optional logger instance which can be handed from other module :return: None
3.213768
3.463697
0.927843
self.states = {} # Create model for each child class child_states = self.state.states for child_state in child_states.values(): # Create hierarchy model_class = get_state_model_class_for_state(child_state) if model_class is not None: self._add_model(self.states, child_state, model_class, child_state.state_id, load_meta_data) else: logger.error("Unknown state type '{type:s}'. Cannot create model.".format(type=type(child_state)))
def _load_child_state_models(self, load_meta_data)
Adds models for each child state of the state :param bool load_meta_data: Whether to load the meta data of the child state
3.739501
3.674032
1.017819
self.scoped_variables = [] for scoped_variable in self.state.scoped_variables.values(): self._add_model(self.scoped_variables, scoped_variable, ScopedVariableModel)
def _load_scoped_variable_models(self)
Adds models for each scoped variable of the state
4.115829
2.853987
1.442133
self.data_flows = [] for data_flow in self.state.data_flows.values(): self._add_model(self.data_flows, data_flow, DataFlowModel)
def _load_data_flow_models(self)
Adds models for each data flow of the state
3.964762
2.736999
1.44858
self.transitions = [] for transition in self.state.transitions.values(): self._add_model(self.transitions, transition, TransitionModel)
def _load_transition_models(self)
Adds models for each transition of the state
6.070611
4.718157
1.286649
# logger.verbose("Prepare destruction container state ...") if recursive: for scoped_variable in self.scoped_variables: scoped_variable.prepare_destruction() for connection in self.transitions[:] + self.data_flows[:]: connection.prepare_destruction() for state in self.states.values(): state.prepare_destruction(recursive) del self.scoped_variables[:] del self.transitions[:] del self.data_flows[:] self.states.clear() self.scoped_variables = None self.transitions = None self.data_flows = None self.states = None super(ContainerStateModel, self).prepare_destruction(recursive)
def prepare_destruction(self, recursive=True)
Prepares the model for destruction Recursively un-registers all observers and removes references to child models. Extends the destroy method of the base class by child elements of a container state.
3.425004
3.403899
1.0062
# if info.method_name == 'change_state_type': # Handled in method 'change_state_type' # return # If this model has been changed (and not one of its child states), then we have to update all child models # This must be done before notifying anybody else, because other may relay on the updated models if self.state == info['instance']: if 'after' in info: self.update_child_models(model, prop_name, info) # if there is and exception set is_about_to_be_destroyed_recursively flag to False again if info.method_name in ["remove_state"] and isinstance(info.result, Exception): state_id = info.kwargs['state_id'] if 'state_id' in info.kwargs else info.args[1] self.states[state_id].is_about_to_be_destroyed_recursively = False else: # while before notification mark all states which get destroyed recursively if info.method_name in ["remove_state"] and \ info.kwargs.get('destroy', True) and info.kwargs.get('recursive', True): state_id = info.kwargs['state_id'] if 'state_id' in info.kwargs else info.args[1] self.states[state_id].is_about_to_be_destroyed_recursively = True changed_list = None cause = None # If the change happened in a child state, notify the list of all child states if (isinstance(model, AbstractStateModel) and model is not self) or ( # The state was changed directly not isinstance(model, AbstractStateModel) and model.parent is not self): # One of the member models was changed changed_list = self.states cause = 'state_change' # If the change happened in one of the transitions, notify the list of all transitions elif isinstance(model, TransitionModel) and model.parent is self: changed_list = self.transitions cause = 'transition_change' # If the change happened in one of the data flows, notify the list of all data flows elif isinstance(model, DataFlowModel) and model.parent is self: changed_list = self.data_flows cause = 'data_flow_change' # If the change happened in one of the scoped variables, notify the list of all scoped variables elif isinstance(model, ScopedVariableModel) and model.parent is self: changed_list = self.scoped_variables cause = 'scoped_variable_change' if not (cause is None or changed_list is None): if 'before' in info: changed_list._notify_method_before(self.state, cause, (self.state,), info) elif 'after' in info: changed_list._notify_method_after(self.state, cause, None, (self.state,), info) # Finally call the method of the base class, to forward changes in ports and outcomes super(ContainerStateModel, self).model_changed(model, prop_name, info)
def model_changed(self, model, prop_name, info)
This method notifies the model lists and the parent state about changes The method is called each time, the model is changed. This happens, when the state itself changes or one of its children (states, transitions, data flows) changes. Changes of the children cannot be observed directly, therefore children notify their parent about their changes by calling this method. This method then checks, what has been changed by looking at the model that is passed to it. In the following it notifies the list in which the change happened about the change. E.g. one child state changes its name. The model of that state observes itself and notifies the parent ( i.e. this state model) about the change by calling this method with the information about the change. This method recognizes that the model is of type StateModel and therefore triggers a notify on the list of state models. "_notify_method_before" is used as trigger method when the changing function is entered and "_notify_method_after" is used when the changing function returns. This changing function in the example would be the setter of the property name. :param model: The model that was changed :param prop_name: The property that was changed :param info: Information about the change (e.g. the name of the changing function)
3.712838
3.411078
1.088465
# Update is_start flag in child states if the start state has changed (eventually) if info.method_name in ['start_state_id', 'add_transition', 'remove_transition']: self.update_child_is_start() if info.method_name in ["add_transition", "remove_transition", "transitions"]: (model_list, data_list, model_name, model_class, model_key) = self._get_model_info("transition") elif info.method_name in ["add_data_flow", "remove_data_flow", "data_flows"]: (model_list, data_list, model_name, model_class, model_key) = self._get_model_info("data_flow") elif info.method_name in ["add_state", "remove_state", "states"]: (model_list, data_list, model_name, model_class, model_key) = self._get_model_info("state", info) elif info.method_name in ["add_scoped_variable", "remove_scoped_variable", "scoped_variables"]: (model_list, data_list, model_name, model_class, model_key) = self._get_model_info("scoped_variable") else: return if isinstance(info.result, Exception): # Do nothing if the observed function raised an exception pass elif "add" in info.method_name: self.add_missing_model(model_list, data_list, model_name, model_class, model_key) elif "remove" in info.method_name: destroy = info.kwargs.get('destroy', True) recursive = info.kwargs.get('recursive', True) self.remove_specific_model(model_list, info.result, model_key, recursive, destroy) elif info.method_name in ["transitions", "data_flows", "states", "scoped_variables"]: self.re_initiate_model_list(model_list, data_list, model_name, model_class, model_key)
def update_child_models(self, _, name, info)
This method is always triggered when the state model changes It keeps the following models/model-lists consistent: transition models data-flow models state models scoped variable models
2.541975
2.327146
1.092314
for scoped_variable_m in self.scoped_variables: if scoped_variable_m.scoped_variable.data_port_id == data_port_id: return scoped_variable_m return None
def get_scoped_variable_m(self, data_port_id)
Returns the scoped variable model for the given data port id :param data_port_id: The data port id to search for :return: The model of the scoped variable with the given id
2.30497
2.616577
0.88091
for scoped_var_m in self.scoped_variables: if scoped_var_m.scoped_variable.data_port_id == data_port_id: return scoped_var_m return StateModel.get_data_port_m(self, data_port_id)
def get_data_port_m(self, data_port_id)
Searches and returns the model of a data port of a given state The method searches a port with the given id in the data ports of the given state model. If the state model is a container state, not only the input and output data ports are looked at, but also the scoped variables. :param data_port_id: The data port id to be searched :return: The model of the data port or None if it is not found
3.341879
2.944374
1.135005
for transition_m in self.transitions: if transition_m.transition.transition_id == transition_id: return transition_m return None
def get_transition_m(self, transition_id)
Searches and return the transition model with the given in the given container state model :param transition_id: The transition id to be searched :return: The model of the transition or None if it is not found
3.007133
3.531174
0.851596
for data_flow_m in self.data_flows: if data_flow_m.data_flow.data_flow_id == data_flow_id: return data_flow_m return None
def get_data_flow_m(self, data_flow_id)
Searches and return the data flow model with the given in the given container state model :param data_flow_id: The data flow id to be searched :return: The model of the data flow or None if it is not found
2.076664
2.324658
0.89332
super(ContainerStateModel, self).store_meta_data(copy_path) for state_key, state in self.states.items(): state.store_meta_data(copy_path)
def store_meta_data(self, copy_path=None)
Store meta data of container states to the filesystem Recursively stores meta data of child states. For further insides read the description of also called respective super class method. :param str copy_path: Optional copy path if meta data is not stored to the file system path of state machine
3.82637
3.448628
1.109534
for scoped_variable_m in self.scoped_variables: source_scoped_variable_m = source_state_m.get_scoped_variable_m( scoped_variable_m.scoped_variable.data_port_id) scoped_variable_m.meta = deepcopy(source_scoped_variable_m.meta) for transition_m in self.transitions: source_transition_m = source_state_m.get_transition_m(transition_m.transition.transition_id) transition_m.meta = deepcopy(source_transition_m.meta) for data_flow_m in self.data_flows: source_data_flow_m = source_state_m.get_data_flow_m(data_flow_m.data_flow.data_flow_id) data_flow_m.meta = deepcopy(source_data_flow_m.meta) for state_key, state in self.states.items(): state.copy_meta_data_from_state_m(source_state_m.states[state_key]) super(ContainerStateModel, self).copy_meta_data_from_state_m(source_state_m)
def copy_meta_data_from_state_m(self, source_state_m)
Dismiss current meta data and copy meta data from given state model In addition to the state model method, also the meta data of container states is copied. Then, the meta data of child states are recursively copied. :param source_state_m: State model to load the meta data from
1.886915
1.897621
0.994358
super(ContainerStateModel, self)._generate_element_meta_data(meta_data) for transition_m in self.transitions: self._copy_element_meta_data_to_meta_file_data(meta_data, transition_m, "transition", transition_m.transition.transition_id) for data_flow_m in self.data_flows: self._copy_element_meta_data_to_meta_file_data(meta_data, data_flow_m, "data_flow", data_flow_m.data_flow.data_flow_id) for scoped_variable_m in self.scoped_variables: self._copy_element_meta_data_to_meta_file_data(meta_data, scoped_variable_m, "scoped_variable", scoped_variable_m.scoped_variable.data_port_id)
def _generate_element_meta_data(self, meta_data)
Generate meta data for state elements and add it to the given dictionary This method retrieves the meta data of the container state elements (data flows, transitions) and adds it to the given meta data dictionary. :param meta_data: Dictionary of meta data
2.395169
2.161741
1.107982
from gi.repository import Gtk from os.path import expanduser, pathsep, dirname, isdir from rafcon.gui.singleton import main_window_controller from rafcon.gui.runtime_config import global_runtime_config last_path = global_runtime_config.get_config_value('LAST_PATH_OPEN_SAVE', "") selected_filename = None if last_path and isdir(last_path): selected_filename = last_path.split(pathsep)[-1] last_path = dirname(last_path) else: last_path = expanduser('~') dialog = Gtk.FileChooserDialog(query, None, Gtk.FileChooserAction.SELECT_FOLDER, (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OPEN, Gtk.ResponseType.OK)) # Allows confirming with Enter and double-click dialog.set_default_response(Gtk.ResponseType.OK) if main_window_controller: dialog.set_transient_for(main_window_controller.view.get_top_widget()) dialog.set_current_folder(last_path) if selected_filename is not None: dialog.select_filename(selected_filename) dialog.set_show_hidden(False) # Add library roots to list of shortcut folders add_library_root_path_to_shortcut_folders_of_dialog(dialog) response = dialog.run() if response != Gtk.ResponseType.OK: dialog.destroy() if default_path and os.path.isdir(default_path): return default_path return None path = dialog.get_filename() dialog.destroy() if os.path.isdir(path): global_runtime_config.set_config_value('LAST_PATH_OPEN_SAVE', path) return path return None
def open_folder(query, default_path=None)
Shows a user dialog for folder selection A dialog is opened with the prompt `query`. The current path is set to the last path that was opened/created. The roots of all libraries is added to the list of shortcut folders. :param str query: Prompt asking the user for a specific folder :param str default_path: Path to use if user does not specify one :return: Path selected by the user or `default_path` if no path was specified or None if none of the paths is valid :rtype: str
2.950729
2.805068
1.051928
from gi.repository import Gtk from os.path import expanduser, dirname, join, exists, isdir from rafcon.core.storage.storage import STATEMACHINE_FILE from rafcon.gui.singleton import main_window_controller from rafcon.gui.runtime_config import global_runtime_config last_path = global_runtime_config.get_config_value('LAST_PATH_OPEN_SAVE', "") if last_path and isdir(last_path) and not exists(join(last_path, STATEMACHINE_FILE)): pass elif last_path: last_path = dirname(last_path) else: last_path = expanduser('~') dialog = Gtk.FileChooserDialog(query, None, Gtk.FileChooserAction.CREATE_FOLDER, (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_SAVE, Gtk.ResponseType.OK)) # Allows confirming with Enter and double-click dialog.set_default_response(Gtk.ResponseType.OK) if main_window_controller: dialog.set_transient_for(main_window_controller.view.get_top_widget()) dialog.set_current_folder(last_path) if default_name: dialog.set_current_name(default_name) dialog.set_show_hidden(False) # Add library roots to list of shortcut folders add_library_root_path_to_shortcut_folders_of_dialog(dialog) response = dialog.run() if response != Gtk.ResponseType.OK: dialog.destroy() if default_path and default_name: default = os.path.join(default_path, default_name) if os.path.isdir(default): return default return None path = dialog.get_filename() dialog.destroy() if os.path.isdir(path): global_runtime_config.set_config_value('LAST_PATH_OPEN_SAVE', path) return path return None
def create_folder(query, default_name=None, default_path=None)
Shows a user dialog for folder creation A dialog is opened with the prompt `query`. The current path is set to the last path that was opened/created. The roots of all libraries is added to the list of shortcut folders. :param str query: Prompt asking the user for a specific folder :param str default_name: Default name of the folder to be created :param str default_path: Path in which the folder is created if the user doesn't specify a path :return: Path created by the user or `default_path`\`default_name` if no path was specified or None if none of the paths is valid :rtype: str
3.006036
2.807685
1.070646
logger.debug("Creating new state-machine...") root_state = HierarchyState("new root state") state_machine = StateMachine(root_state) rafcon.core.singleton.state_machine_manager.add_state_machine(state_machine)
def add_state_machine(widget, event=None)
Create a new state-machine when the user clicks on the '+' next to the tabs
6.786823
6.065489
1.118924
super(StateMachinesEditorController, self).register_view(view) self.view['notebook'].connect('switch-page', self.on_switch_page) # Add all already open state machines for state_machine in self.model.state_machines.values(): self.add_graphical_state_machine_editor(state_machine)
def register_view(self, view)
Called when the View was registered
5.390501
5.434597
0.991886
shortcut_manager.add_callback_for_action('close', self.on_close_shortcut) # Call register_action of parent in order to register actions for child controllers super(StateMachinesEditorController, self).register_actions(shortcut_manager)
def register_actions(self, shortcut_manager)
Register callback methods fot triggered actions. :param rafcon.gui.shortcut_manager.ShortcutManager shortcut_manager: Shortcut Manager Object holding mappings between shortcuts and actions.
9.454352
8.433776
1.121011
page = widget.get_nth_page(page_number) for tab_info in self.tabs.values(): if tab_info['page'] is page: state_machine_m = tab_info['state_machine_m'] self.on_close_clicked(event, state_machine_m, None, force=False) return
def close_state_machine(self, widget, page_number, event=None)
Triggered when the close button in the tab is clicked
4.354547
4.051946
1.07468
state_machine_m = self.model.get_selected_state_machine_model() if state_machine_m is None: return self.on_close_clicked(None, state_machine_m, None, force=False)
def on_close_shortcut(self, *args)
Close selected state machine (triggered by shortcut)
5.831435
3.825398
1.5244
assert isinstance(state_machine_m, StateMachineModel) sm_id = state_machine_m.state_machine.state_machine_id logger.debug("Create new graphical editor for state machine with id %s" % str(sm_id)) graphical_editor_view = GraphicalEditorGaphasView(state_machine_m) graphical_editor_ctrl = GraphicalEditorGaphasController(state_machine_m, graphical_editor_view) self.add_controller(sm_id, graphical_editor_ctrl) tab, tab_label = create_tab_header('', self.on_close_clicked, self.on_mouse_right_click, state_machine_m, 'refused') set_tab_label_texts(tab_label, state_machine_m, state_machine_m.state_machine.marked_dirty) page = graphical_editor_view['main_frame'] self.view.notebook.append_page(page, tab) self.view.notebook.set_tab_reorderable(page, True) page.show_all() self.tabs[sm_id] = {'page': page, 'state_machine_m': state_machine_m, 'file_system_path': state_machine_m.state_machine.file_system_path, 'marked_dirty': state_machine_m.state_machine.marked_dirty, 'root_state_name': state_machine_m.state_machine.root_state.name} self.observe_model(state_machine_m) graphical_editor_view.show() self.view.notebook.show() self.last_focused_state_machine_ids.append(sm_id)
def add_graphical_state_machine_editor(self, state_machine_m)
Add to for new state machine If a new state machine was added, a new tab is created with a graphical editor for this state machine. :param StateMachineModel state_machine_m: The new state machine model
3.534846
3.566535
0.991115
selected_state_machine_id = self.model.selected_state_machine_id if selected_state_machine_id is None: return page_id = self.get_page_num(selected_state_machine_id) # to retrieve the current tab colors number_of_pages = self.view["notebook"].get_n_pages() old_label_colors = list(range(number_of_pages)) for p in range(number_of_pages): page = self.view["notebook"].get_nth_page(p) label = self.view["notebook"].get_tab_label(page).get_child().get_children()[0] # old_label_colors[p] = label.get_style().fg[Gtk.StateType.NORMAL] old_label_colors[p] = label.get_style_context().get_color(Gtk.StateType.NORMAL) if not self.view.notebook.get_current_page() == page_id: self.view.notebook.set_current_page(page_id) # set the old colors for p in range(number_of_pages): page = self.view["notebook"].get_nth_page(p) label = self.view["notebook"].get_tab_label(page).get_child().get_children()[0] # Gtk TODO style = label.get_style_context()
def notification_selected_sm_changed(self, model, prop_name, info)
If a new state machine is selected, make sure the tab is open
2.683777
2.567708
1.045203
sm_id = state_machine_m.state_machine.state_machine_id if sm_id in self.tabs: sm = state_machine_m.state_machine # create new tab label if tab label properties are not up to date if not self.tabs[sm_id]['marked_dirty'] == sm.marked_dirty or \ not self.tabs[sm_id]['file_system_path'] == sm.file_system_path or \ not self.tabs[sm_id]['root_state_name'] == sm.root_state.name: label = self.view["notebook"].get_tab_label(self.tabs[sm_id]["page"]).get_child().get_children()[0] set_tab_label_texts(label, state_machine_m, unsaved_changes=sm.marked_dirty) self.tabs[sm_id]['file_system_path'] = sm.file_system_path self.tabs[sm_id]['marked_dirty'] = sm.marked_dirty self.tabs[sm_id]['root_state_name'] = sm.root_state.name else: logger.warning("State machine '{0}' tab label can not be updated there is no tab.".format(sm_id))
def update_state_machine_tab_label(self, state_machine_m)
Updates tab label if needed because system path, root state name or marked_dirty flag changed :param StateMachineModel state_machine_m: State machine model that has changed :return:
3.183643
2.914675
1.092281