code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def meta_changed(self, model, prop_name, info): self.state_machine.marked_dirty = True msg = info.arg if model is not self and msg.change.startswith('sm_notification_'): msg = msg._replace(change=msg.change.replace('sm_notification_', '', 1)) self.state_meta_signal.emit(m...
When the meta was changed, we have to set the dirty flag, as the changes are unsaved
def cli(ctx, *args, **kwargs): backend = kwargs.get('backend', None) model = kwargs.get('model', None) printer = kwargs.get('printer', None) debug = kwargs.get('debug') ctx.meta['MODEL'] = model ctx.meta['BACKEND'] = backend ctx.meta['PRINTER'] = printer logging.basicConfig(level='DEBUG'...
Command line interface for the brother_ql Python package.
def chainproperty(func): func = assertionproperty(func) setattr(AssertionBuilder, func.fget.__name__, func) return func
Extend sure with a custom chain property.
def list_jobs(tail): query = ( db.session.query(models.CrawlerJob) .order_by(models.CrawlerJob.id.desc()) ) if tail != 0: query = query.limit(tail) results = query.yield_per(10).all() _show_table(results=results)
Show info about the existing crawler jobs.
def extract_file_args(subparsers): extract_parser = subparsers.add_parser('extract_file', help='Extract a single secret from' 'Vault to a local file') extract_parser.add_argument('vault_path', h...
Add the command line options for the extract_file operation
def _ppid(): ret = {} if __grains__['kernel'] == 'SunOS': cmd = 'ps -a -o pid,ppid | tail +2' else: cmd = 'ps -ax -o pid,ppid | tail -n+2' out = __salt__['cmd.run'](cmd, python_shell=True) for line in out.splitlines(): pid, ppid = line.split() ret[pid] = ppid retu...
Return a dict of pid to ppid mappings
def cli(env): manager = CapacityManager(env.client) items = manager.get_create_options() items.sort(key=lambda term: int(term['capacity'])) table = formatting.Table(["KeyName", "Description", "Term", "Default Hourly Price Per Instance"], title="Reserved Capacity Options") ...
List options for creating Reserved Capacity
def resolve_attribute(name, bases, default=None): for base in bases: if hasattr(base, name): return getattr(base, name) return default
Find the first definition of an attribute according to MRO order.
def delete(self, key, sort_key): primary_key = key key = self.prefixed('{}:{}'.format(key, sort_key)) self.logger.debug('Storage - delete {}'.format(key)) if sort_key is not None: self.cache[self.prefixed(primary_key)].remove(sort_key) for index in self._secondary_ind...
Delete an element in dictionary
def add_xml_to_node(self, node): node.tag = self.xml_element_name() node.set('xblock-family', self.entry_point) for field_name, field in self.fields.items(): if field_name in ('children', 'parent', 'content'): continue if field.is_set_on(self) or field.for...
For exporting, set data on `node` from ourselves.
def _update_dPrxy(self): super(ExpCM_empirical_phi_divpressure, self)._update_dPrxy() if 'omega2' in self.freeparams: with scipy.errstate(divide='raise', under='raise', over='raise', invalid='ignore'): scipy.copyto(self.dPrxy['omega2'], -self.ln_pi...
Update `dPrxy`, accounting for dependence of `Prxy` on `omega2`.
def run(): parser = OptionParser(version='%prog {0}'.format(__version__)) parser.add_option('-a', '--addr', default='localhost', help=('The address or host to listen on. Specify -a ' '0.0.0.0 to listen on all addresses. ' 'Default: lo...
The entry point from the praw-multiprocess utility.
def updateRPYText(self): 'Updates the displayed Roll, Pitch, Yaw Text' self.rollText.set_text('Roll: %.2f' % self.roll) self.pitchText.set_text('Pitch: %.2f' % self.pitch) self.yawText.set_text('Yaw: %.2f' % self.yaw)
Updates the displayed Roll, Pitch, Yaw Text
def update(self, a, b, c, d): self.table.ravel()[:] = [a, b, c, d] self.N = self.table.sum()
Update contingency table with new values without creating a new object.
def download_manylinux_wheels(self, abi, packages, directory): for package in packages: arguments = ['--only-binary=:all:', '--no-deps', '--platform', 'manylinux1_x86_64', '--implementation', 'cp', '--abi', abi, '--dest', directory, package] ...
Download wheel files for manylinux for all the given packages.
def baremetal(self): if self._baremetal is not None: return self._baremetal endpoint = self._instance.get_endpoint_for_service_type( "baremetal", region_name=self._instance._region_name, ) token = self._instance.auth.get_token(self._instance.session) ...
Returns an baremetal service client
def H13(self): "Information measure of correlation 2." return np.sqrt(1 - np.exp(-2 * (self.hxy2 - self.H9())))
Information measure of correlation 2.
def via_dom_id(self, dom_id, det_id): try: return DOM.from_json([ d for d in self._json if d["DOMId"] == dom_id and d["DetOID"] == det_id ][0]) except IndexError: log.critical("No DOM found for DOM ID '{0}'".format(dom_id))
Return DOM for given dom_id
def find_write_contribs() -> None: map_file_auth = {} for filename in scantree('cltk'): filepath = filename.path authors_list = get_authors(filepath) if authors_list: map_file_auth[filepath] = authors_list map_auth_file = defaultdict(list) for file, authors_file in ma...
Look for files, find authors, sort, write file.
def keys(self): keys = [] for app_name, __ in self.items(): keys.append(app_name) return keys
return a list of all app_names
def list_cands(candsfile, threshold=0.): loc, prop, d0 = pc.read_candidates(candsfile, snrmin=threshold, returnstate=True) if 'snr2' in d0['features']: snrcol = d0['features'].index('snr2') elif 'snr1' in d0['features']: snrcol = d0['features'].index('snr1') dmindcol = d0['featureind'].i...
Prints candidate info in time order above some threshold
def delayed_redraw(self): with self._defer_lock: whence = self._defer_whence self._defer_whence = self._defer_whence_reset flag = self._defer_flag self._defer_flag = False if flag: self.redraw_now(whence=whence)
Handle delayed redrawing of the canvas.
def stop(self): if getattr(self, "_jsc", None): try: self._jsc.stop() except Py4JError: warnings.warn( 'Unable to cleanly shutdown Spark JVM process.' ' It is possible that the process has crashed,' ...
Shut down the SparkContext.
def manager(self): from flask_script import Manager, Command manager = Manager(usage="Migrate database.") manager.add_command('create', Command(self.cmd_create)) manager.add_command('migrate', Command(self.cmd_migrate)) manager.add_command('rollback', Command(self.cmd_rollback)) ...
Integrate a Flask-Script.
def _get_fba_problem(model, tfba, solver): p = FluxBalanceProblem(model, solver) if tfba: p.add_thermodynamic() return p
Convenience function for returning the right FBA problem instance
def load_from_file(cls, file_path: str): with open(file_path, "r") as f: data = json.load(f) item = cls.decode(data=data) return item
Read and reconstruct the data from a JSON file.
def reset_everything(self, payload): kill_signal = signals['9'] self.process_handler.kill_all(kill_signal, True) self.process_handler.wait_for_finish() self.reset = True answer = {'message': 'Resetting current queue', 'status': 'success'} return answer
Kill all processes, delete the queue and clean everything up.
def create_entity(project_id, entity_type_id, entity_value, synonyms): import dialogflow_v2 as dialogflow entity_types_client = dialogflow.EntityTypesClient() synonyms = synonyms or [entity_value] entity_type_path = entity_types_client.entity_type_path( project_id, entity_type_id) entity = d...
Create an entity of the given entity type.
def OnNodeSelected(self, event): try: node = self.sorted[event.GetIndex()] except IndexError, err: log.warn(_('Invalid index in node selected: %(index)s'), index=event.GetIndex()) else: if node is not self.selected_node: wx...
We have selected a node with the list control, tell the world
def f(field: str, kwargs: Dict[str, Any], default: Optional[Any] = None) -> str: if default is not None: return str(kwargs.get(field, default)) return str(kwargs[field])
Alias for more readable command construction
def noargs(self): "Returns True if the callable takes no arguments" noargs = inspect.ArgSpec(args=[], varargs=None, keywords=None, defaults=None) return self.argspec == noargs
Returns True if the callable takes no arguments
def encode_batch(self, inputBatch): X = inputBatch encode = self.encode Y = np.array([ encode(x) for x in X]) return Y
Encodes a whole batch of input arrays, without learning.
def list_scanners(zap_helper, scanners): scanner_list = zap_helper.zap.ascan.scanners() if scanners is not None and 'all' not in scanners: scanner_list = filter_by_ids(scanner_list, scanners) click.echo(tabulate([[s['id'], s['name'], s['policyId'], s['enabled'], s['attackStrength'], s['alertThreshol...
Get a list of scanners and whether or not they are enabled.
def create_templates(self, templates): count = 0 for template in templates: if not self.template_exists_db(template): name, location, description, language = template text = self.open_file(location) html_content = self.get_html_content(text) ...
Gets a list of templates to insert into the database
def to_task(self): from google.appengine.api.taskqueue import Task from google.appengine.api.taskqueue import TaskRetryOptions self._increment_recursion_level() self.check_recursion_depth() url = "%s/%s" % (ASYNC_ENDPOINT, self.function_path) kwargs = { 'url':...
Return a task object representing this async job.
def xmlrpc_task_done(self, result): (task_id, task_results) = result del self.scheduled_tasks[task_id] self.task_store.update_results(task_id, task_results) self.results += 1 return True
Take the results of a computation and put it into the results list.
def group_comments_by_round(comments, ranking=0): comment_rounds = {} ordered_comment_round_names = [] for comment in comments: comment_round_name = ranking and comment[11] or comment[7] if comment_round_name not in comment_rounds: comment_rounds[comment_round_name] = [] ...
Group comments by the round to which they belong
def subtract_weeks(self, weeks: int) -> datetime: self.value = self.value - timedelta(weeks=weeks) return self.value
Subtracts number of weeks from the current value
def unindent(self, lines): indent = min( len(self.re.match(r'^ *', line).group()) for line in lines) return [line[indent:].rstrip() for line in lines]
Removes any indentation that is common to all of the given lines.
def read_files(filenames): if isinstance(filenames, list): for filename in filenames: with open(filename, 'r') as infile: return infile.read() else: with open(filenames, 'r') as infile: return infile.read()
Read a file into memory.
def cree_widgets(self): for t in self.FIELDS: if type(t) is str: attr, kwargs = t, {} else: attr, kwargs = t[0], t[1].copy() self.champs.append(attr) is_editable = kwargs.pop("is_editable", self.is_editable) args = [self...
Create widgets and store them in self.widgets
def clear(self, results=True, errors=True): if results: self.results = [] if errors: self.errors = []
Clears results and errors lists.
def _get_bounds(mapper, values): array = np.array([mapper.get(x) for x in values]) return array[:, 0], array[:, 1]
Extract first and second value from tuples of mapped bins.
def image_create(cmptparms, cspace): lst = [ctypes.c_int, ctypes.POINTER(ImageComptParmType), ctypes.c_int] OPENJPEG.opj_image_create.argtypes = lst OPENJPEG.opj_image_create.restype = ctypes.POINTER(ImageType) image = OPENJPEG.opj_image_create(len(cmptparms), cmptparms, cspace) return(image)
Wrapper for openjpeg library function opj_image_create.
def _smallest_buffer(self): smallest = np.inf for buffer in self.buffers: if buffer is None: return 0 elif buffer.shape[0] < smallest: smallest = buffer.shape[0] return smallest
Get the size of the smallest buffer.
def retry(default=None): def decorator(func): @functools.wraps(func) def _wrapper(*args, **kw): for pos in range(1, MAX_RETRIES): try: return func(*args, **kw) except (RuntimeError, requests.ConnectionError) as error: ...
Retry functions after failures
def check(self, profile_data, training_metadata=[]): data = { "profile_json": _validate_dict(profile_data, "profile_data"), "training_metadata": _validate_training_metadata(training_metadata), } response = self.client.post("profile/json/check", data=data) return r...
Use the api to check weither the profile_data are valid.
def run(self, refresh_interval=0.05): try: from asciimatics.screen import Screen except ImportError: raise ExternalError("You must have asciimatics installed to use LinebufferUI", suggestion="pip install iotilecore[ui]") Screen.wrapper(self...
Set up the loop, check that the tool is installed
def subscriptions(self): if _debug: ChangeOfValueServices._debug("subscriptions") subscription_list = [] for obj, cov_detection in self.cov_detections.items(): for cov in cov_detection.cov_subscriptions: subscription_list.append(cov) return subscription_list
Generator for the active subscriptions.
def run(m, w, trace=False, steps=1000, show_stack=3): is_pda = True stack = None if not m.oneway: is_pda = False for s in range(m.num_stores): if s == m.input: pass elif m.has_cell(s): pass elif m.has_stack(s): if stack is None: ...
Runs an automaton, automatically selecting a search method.
def users(accountable, query): users = accountable.users(query) headers = ['display_name', 'key'] if users: rows = [[v for k, v in sorted(u.items()) if k in headers] for u in users] rows.insert(0, headers) print_table(SingleTable(rows)) else: click.secho('...
Executes a user search for the given query.
def run(self): self.stop = False while not self.stop: self.tracker.create_snapshot() sleep(self.interval)
Loop until a stop signal is set.
def _generate_username(self): while True: username = str(uuid.uuid4()) username = username.replace('-', '') username = username[:-2] try: User.objects.get(username=username) except User.DoesNotExist: return username
Generate a unique username
def atanh(x, context=None): return _apply_function_in_current_context( BigFloat, mpfr.mpfr_atanh, (BigFloat._implicit_convert(x),), context, )
Return the inverse hyperbolic tangent of x.
def _post_data(options=None, xml=None): params = {'token': options['token'].strip(), 'cmd': 'submitcheck', 'XMLDATA': xml} res = salt.utils.http.query( url=options['url'], method='POST', params=params, data='', decode=True, status=True, header_dict={}, ...
Post data to Nagios NRDP
def extend(self, ampal_container): if isinstance(ampal_container, AmpalContainer): self._ampal_objects.extend(ampal_container) else: raise TypeError( 'Only AmpalContainer objects may be merged with ' 'an AmpalContainer.') return
Extends an `AmpalContainer` with another `AmpalContainer`.
def getBody(cls, url, method='GET', headers={}, data=None, socket=None, timeout=120): if not 'User-Agent' in headers: headers['User-Agent'] = ['Tensor HTTP checker'] return cls().request(url, method, headers, data, socket, timeout)
Make an HTTP request and return the body
def pull_en_words() -> None: ENGLISH_WORDS_URL = "https://github.com/dwyl/english-words.git" en_words_path = Path(config.EN_WORDS_PATH) if not en_words_path.is_file(): subprocess.run(["git", "clone", ENGLISH_WORDS_URL, str(en_words_path.parent)])
Fetches a repository containing English words.
def save_state(state, output_dir, keep=False): params_file = os.path.join(output_dir, "model.pkl") with gfile.GFile(params_file, "wb") as f: pickle.dump((state.params, state.step, state.history), f) if keep: params_file = os.path.join(output_dir, "model_{}.pkl".format(state.step)) with gfile.GFile(par...
Save State and optionally gin config.
def rouge_2(hypotheses, references): rouge_2 = [ rouge_n([hyp], [ref], 2) for hyp, ref in zip(hypotheses, references) ] rouge_2_f, _, _ = map(np.mean, zip(*rouge_2)) return rouge_2_f
Calculate ROUGE-2 F1, precision, recall scores
def add_data(self, request, pk=None): resp = super().add_data(request, pk) entity = self.get_object() for collection in entity.collections.all(): collection.data.add(*request.data['ids']) return resp
Add data to Entity and it's collection.
def start(self): def _heartbeat(): if not self._client.lifecycle.is_live: return self._heartbeat() self._heartbeat_timer = self._client.reactor.add_timer(self._heartbeat_interval, _heartbeat) self._heartbeat_timer = self._client.reactor.add_timer(self....
Starts sending periodic HeartBeat operations.
def api_key(self): if not self._api_key: error_msg = ( f"Email is enabled but API_KEY is not set. " f"See settings.{self.api_key_attr}" ) try: self._api_key = getattr(settings, self.api_key_attr) except AttributeErro...
Returns the api_key or None.
def next_frame_l1(): hparams = next_frame_basic_deterministic() hparams.loss["targets"] = modalities.video_l1_loss hparams.top["targets"] = modalities.video_l1_top hparams.video_modality_loss_cutoff = 2.4 return hparams
Basic conv model with L1 modality.
def random_uniform(attrs, inputs, proto_obj): try: from onnx.mapping import TENSOR_TYPE_TO_NP_TYPE except ImportError: raise ImportError("Onnx and protobuf need to be installed. " "Instructions to install - https://github.com/onnx/onnx") new_attrs = translation_util...
Draw random samples from a uniform distribtuion.
def mksls(src, dst=None): with salt.utils.files.fopen(src, 'r') as fh_: ps_opts = xml.to_dict(ET.fromstring(fh_.read())) if dst is not None: with salt.utils.files.fopen(dst, 'w') as fh_: salt.utils.yaml.safe_dump(ps_opts, fh_, default_flow_style=False) else: return salt.u...
Convert an AutoYAST file to an SLS file
def gen_undef(): empty_reg = ReilEmptyOperand() return ReilBuilder.build(ReilMnemonic.UNDEF, empty_reg, empty_reg, empty_reg)
Return an UNDEF instruction.
def counter_mean_and_median(counter): if not counter: return np.nan, np.nan total = sum(v for k, v in counter.items()) mid = total / 2 weighted_sum = 0 items_seen = 0 median_found = False for k, v in sorted(counter.items()): weighted_sum += k * v items_seen += v ...
Calculate the mean and median value of a counter
def get(self, id, depth=3, schema=None): uri = URIRef(id) if schema is None: for o in self.graph.objects(subject=uri, predicate=RDF.type): schema = self.parent.get_schema(str(o)) if schema is not None: break else: schema...
Construct a single object based on its ID.
def ret_pcre_minions(self): tgt = re.compile(self.tgt) refilter = functools.partial(filter, tgt.match) return self._ret_minions(refilter)
Return minions that match via pcre
def register(model, admin=None, category=None): def _model_admin_wrapper(admin_class): site.register(model, admin_class=admin_class) if category: site.register_block(model, category) return admin_class return _model_admin_wrapper
Decorator to registering you Admin class.
def _trim_and_decode(ids, subtokenizer): try: index = list(ids).index(tokenizer.EOS_ID) return subtokenizer.decode(ids[:index]) except ValueError: return subtokenizer.decode(ids)
Trim EOS and PAD tokens from ids, and decode to return a string.
def prepare(self): text = self.property('caption') if text: capw = int(self.property('caption_width', 0)) item = self.addText(text, capw)
Prepares this graphic item to be displayed.
def parse_geometry(geometry, ratio=None): if "%" not in geometry: return xy_geometry_parser(geometry, ratio) return float(geometry.strip("%")) / 100.0
Enhanced parse_geometry parser with percentage support.
def string(v): if nodesetp(v): if not v: return u'' return string_value(v[0]) elif numberp(v): if v == float('inf'): return u'Infinity' elif v == float('-inf'): return u'-Infinity' elif str(v) == 'nan': return u'NaN' ...
Convert a value to a string.
def x10_command_type(command): command_type = X10CommandType.DIRECT if command in [X10_COMMAND_ALL_UNITS_OFF, X10_COMMAND_ALL_LIGHTS_ON, X10_COMMAND_ALL_LIGHTS_OFF]: command_type = X10CommandType.BROADCAST return command_type
Return the X10 command type from an X10 command.
def _update_project_watch(config, task_presenter, results, long_description, tutorial): logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S') path = os.getcwd() event_handler = PbsHan...
Update a project in a loop.
def remove_dependent_cols(M, tol=1e-6, display=False): R = la.qr(M, mode='r')[0][:M.shape[1], :] I = (abs(R.diagonal())>tol) if sp.any(~I) and display: print(('cols ' + str(sp.where(~I)[0]) + ' have been removed because linearly dependent on the others')) R = M[:,I] else:...
Returns a matrix where dependent columsn have been removed
def delete(self): if lib.EnvDeleteActivation(self._env, self._act) != 1: raise CLIPSError(self._env) self._env = None
Remove the activation from the agenda.
def dump(self): id = self.get("id") if not id: id = "(none)" else: id = id[0] parent = self.get("parent") if not parent: parent = "(none)" else: parent = parent[0] print "'%s'" % id print "Parent project:%s",...
Prints the project attributes.
def training_loop(self): if not self.restarting: self._write_counters(self._local_step_at_start, self._global_step) tf.logging.info( "Training %s up to %d, %d to go", self.model_mode, self.target_local_step, self.steps_to_go ) yield self._write_counters(self.target_local_step, ...
Context manager wrapping the training loop, updates step counters.
def content_type(self, data): self._content_type = str(data) self.add_header('Content-Type', str(data))
The Content-Type header value for this request.
def delete_model(self, meta: dict): bucket = self.connect() if bucket is None: raise BackendRequiredError blob_name = "models/%s/%s.asdf" % (meta["model"], meta["uuid"]) self._log.info(blob_name) try: self._log.info("Deleting model ...") bucket...
Delete the model from GCS.
def run_migration(connection, queries, engine): with connection.cursor() as cursorMig: queries = parse_statements(queries, engine) for query in queries: cursorMig.execute(query) connection.commit() return True
Apply a migration to the SQL server
def __add_stack(self, span, limit=None): span.stack = [] frame_count = 0 tb = traceback.extract_stack() tb.reverse() for frame in tb: if limit is not None and frame_count >= limit: break if "INSTANA_DEV" not in os.environ: i...
Adds a backtrace to this span
def listThirdPartyLibs(self, configuration = 'Development'): interrogator = self._getUE4BuildInterrogator() return interrogator.list(self.getPlatformIdentifier(), configuration, self._getLibraryOverrides())
Lists the supported Unreal-bundled third-party libraries
def _write_images(self, iteration:int)->None: "Writes model generated, original and real images to Tensorboard" self.img_gen_vis.write(learn=self.learn, trn_batch=self.trn_batch, val_batch=self.val_batch, iteration=iteration, tbwriter=self.tbwriter)
Writes model generated, original and real images to Tensorboard
def merge_values(values1,values2): array1 = values_to_array(values1) array2 = values_to_array(values2) if array1.size == 0: return array2 if array2.size == 0: return array1 merged_array = [] for row_array1 in array1: for row_array2 in array2: merged_row = np.h...
Merges two numpy arrays by calculating all possible combinations of rows
def getRandomBinaryTreeLeafNode(binaryTree): if binaryTree.internal == True: if random.random() > 0.5: return getRandomBinaryTreeLeafNode(binaryTree.left) else: return getRandomBinaryTreeLeafNode(binaryTree.right) else: return binaryTree
Get random binary tree node.
def draw(self): pos = np.arange(self.scores_.shape[0]) + 0.5 self.ax.barh(pos, self.scores_) self.ax.set_yticks(pos) self.ax.set_yticklabels(self.features_) return self.ax
Draws the feature correlation to dependent variable, called from fit.
def getLastPoses(self, unRenderPoseArrayCount, unGamePoseArrayCount): fn = self.function_table.getLastPoses pRenderPoseArray = TrackedDevicePose_t() pGamePoseArray = TrackedDevicePose_t() result = fn(byref(pRenderPoseArray), unRenderPoseArrayCount, byref(pGamePoseArray), unGamePoseArrayC...
Get the last set of poses returned by WaitGetPoses.
def mode(self): mu = self.mean() sigma = self.std() ret_val = math.exp(mu - sigma**2) if math.isnan(ret_val): ret_val = float("inf") return ret_val
Computes the mode of a log-normal distribution built with the stats data.
def start(self): self.agent.submit(self._start()) self.is_running = True
starts behaviour in the event loop
def _cx_state_psutil(self, tags=None): metrics = defaultdict(int) tags = [] if tags is None else tags for conn in psutil.net_connections(): protocol = self._parse_protocol_psutil(conn) status = self.tcp_states['psutil'].get(conn.status) metric = self.cx_state_...
Collect metrics about connections state using psutil
def _file_size(file_path, uncompressed=False): _, ext = os.path.splitext(file_path) if uncompressed: if ext in {".gz", ".gzip"}: with gzip.GzipFile(file_path, mode="rb") as fp: try: fp.seek(0, os.SEEK_END) return fp.tell() ...
Return size of a single file, compressed or uncompressed
def avl_release_parent(node): parent = node.parent if parent is not None: if parent.right is node: parent.right = None elif parent.left is node: parent.left = None else: raise AssertionError('impossible state') node.parent = None parent...
removes the parent of a child
def pack(window, sizer, expand=1.1): "simple wxPython pack function" tsize = window.GetSize() msize = window.GetMinSize() window.SetSizer(sizer) sizer.Fit(window) nsize = (10*int(expand*(max(msize[0], tsize[0])/10)), 10*int(expand*(max(msize[1], tsize[1])/10.))) window.SetSize...
simple wxPython pack function
def gen_anytext(*args): bag = [] for term in args: if term is not None: if isinstance(term, list): for term2 in term: if term2 is not None: bag.append(term2) else: bag.append(term) return ' '.join(bag...
Convenience function to create bag of words for anytext property
def save(self): try: response = requests.post(self._upload_url, auth=self.jss.session.auth, verify=self.jss.session.verify, files=self.resource) except JSSPostError as error: ...
POST the object to the JSS.
def highwiredict2xmlstring(highwire_elements, ordering=HIGHWIRE_ORDER): highwire_elements.sort(key=lambda obj: ordering.index(obj.name)) root = Element('metadata') for element in highwire_elements: attribs = {'name': element.name, 'content': element.content} SubElement(root, 'meta', attribs)...
Create an XML string from the highwire data dictionary.