code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def decompressBuffer(buffer): "complements the compressBuffer function in CacheClient" zbuf = cStringIO.StringIO(buffer) zfile = gzip.GzipFile(fileobj=zbuf) deflated = zfile.read() zfile.close() return deflated
complements the compressBuffer function in CacheClient
def _file_to_folder(gi, fname, sample_info, libitems, library, folder): full_name = os.path.join(folder["name"], os.path.basename(fname)) file_type = "vcf_bgzip" if full_name.endswith(".vcf.gz") else "auto" if full_name.endswith(".vcf.gz"): full_name = full_name.replace(".vcf.gz", ".vcf") for it...
Check if file exists on Galaxy, if not upload to specified folder.
def async(self, space): "Check if this is an exception that should better not be caught." return (self.match(space, space.w_SystemExit) or self.match(space, space.w_KeyboardInterrupt))
Check if this is an exception that should better not be caught.
def new_registry(attribute=None): registry = {} def register(key): def decorator(func): registry[key] = func if attribute: setattr(func, attribute, key) return func return decorator return registry, register
Returns an empty dict and a @register decorator.
def _get_diff_text(old, new): diff = difflib.unified_diff(old.splitlines(1), new.splitlines(1)) return ''.join([x.replace('\r', '') for x in diff])
Returns the diff of two text blobs.
def select_port(default_port=20128): import socket while True: try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP) sock.bind( ("127.0.0.1", default_port) ) except socke...
Find and return a non used port
def update(self, quiet=False, prefix='[polyglot_data] '): self.clear_status_cache() for pkg in self.packages(): if self.status(pkg) == self.STALE: self.download(pkg, quiet=quiet, prefix=prefix)
Re-download any packages whose status is STALE.
def heads(config, **kwargs): with alembic_lock( config.registry["sqlalchemy.engine"], config.alembic_config() ) as alembic_config: alembic.command.heads(alembic_config, **kwargs)
Show current available heads.
def do_run(self, line): self._split_args(line, 0, 0) self._command_processor.get_operation_queue().execute() self._print_info_if_verbose( "All operations in the write queue were successfully executed" )
run Perform each operation in the queue of write operations.
def update(self): for linenum in reversed(sorted(self.updates)): self.replace_baseline_repr(linenum, self.updates[linenum]) if not self.TEST_MODE: path = '{}.update{}'.format(*os.path.splitext(self.path)) with io.open(path, 'w', encoding='utf-8') as fh: ...
Replace baseline representations previously registered for update.
def remove(self, payload): succeeded = [] failed = [] for key in payload['keys']: running = self.process_handler.is_running(key) if not running: removed = self.queue.remove(key) if removed: succeeded.append(str(key)) ...
Remove specified entries from the queue.
def _vertical_x(axis, ticks=None, max_width=5): if ticks is None: ticks = axis.get_xticks() if (np.array(ticks) == np.rint(ticks)).all(): ticks = np.rint(ticks).astype(np.int) if max([len(str(tick)) for tick in ticks]) > max_width: axis.set_xticklabels(ticks, rotation='vertical')
Switch labels to vertical if they are long.
def sort(self): users = [] for _, group in itertools.groupby(sorted(self.commits), operator.attrgetter('author_mail')): if group: users.append(self.merge_user_commits(group)) self.sorted_commits = sorted(users, ...
Sort by commit size, per author.
def delete(self): ret = False q = self.query pk = self.pk if pk: pk_name = self.schema.pk.name self.query.is_field(pk_name, pk).delete() setattr(self, pk_name, None) self.reset_modified() for field_name in self.schema.fields: ...
delete the object from the db if pk is set
def solc_arguments(libraries=None, combined='bin,abi', optimize=True, extra_args=None): args = [ '--combined-json', combined, ] def str_of(address): try: return address.decode('utf8') except AttributeError: return address if optimize: ...
Build the arguments to call the solc binary.
def pgdump(self, *pgdumpargs): db, env = self.get_db_args_env() args = ['-d', db['name'], '-h', db['host'], '-U', db['user'], '-w' ] + list(pgdumpargs) stdout, stderr = External.run( 'pg_dump', args, capturestd=True, env=env) if stderr: log.warn('s...
Run a pg_dump command
def add_sqlvm_to_aglistener(instance, sqlvm_resource_id): if not is_valid_resource_id(sqlvm_resource_id): raise CLIError("Invalid SQL virtual machine resource id.") vm_list = instance.load_balancer_configurations[0].sql_virtual_machine_instances if sqlvm_resource_id not in vm_list: instance....
Add a SQL virtual machine to an availability group listener.
def build(self, region=None, profile=None): with self.lock: key = "{}-{}".format(profile, region) try: provider = self.providers[key] except KeyError: msg = "Missed memoized lookup ({}), creating new AWS Provider." logger.debug(...
Get or create the provider for the given region and profile.
def _get_credentials(username=None, password=None, dbhost=None): system_config = dbhost.objectmodels['systemconfig'].find_one({ 'active': True }) try: salt = system_config.salt.encode('ascii') except (KeyError, AttributeError): log('No systemconfig or it is without a salt! ' ...
Obtain user credentials by arguments or asking the user
def _join_sequence(seq, last_separator=''): count = len(seq) return ', '.join(_format_element(element, count, i, last_separator) for i, element in enumerate(seq))
Join a sequence into a string.
def xml_tag (self, name, content, attrs=None): self.write(self.indent*self.level) self.write(u"<%s" % xmlquote(name)) if attrs: for aname, avalue in attrs.items(): args = (xmlquote(aname), xmlquoteattr(avalue)) self.write(u' %s="%s"' % args) se...
Write XML tag with content.
def acceptNavigationRequest(self, url, kind, is_main_frame): ready_url = url.toEncoded().data().decode() is_clicked = kind == self.NavigationTypeLinkClicked if is_clicked and self.root_url not in ready_url: QtGui.QDesktopServices.openUrl(url) return False return s...
Open external links in browser and internal links in the webview
def _encode_buffer(string, f): if isinstance(string, str): string = string.encode() f.write(str(len(string)).encode()) f.write(_TYPE_SEP) f.write(string)
Writes the bencoded form of the input string or bytes
def _ProcessHistogram(self, tag, wall_time, step, histo): histo = self._ConvertHistogramProtoToTuple(histo) histo_ev = HistogramEvent(wall_time, step, histo) self.histograms.AddItem(tag, histo_ev) self.compressed_histograms.AddItem(tag, histo_ev, self._CompressHistogram)
Processes a proto histogram by adding it to accumulated state.
def send_config_set( self, config_commands=None, exit_config_mode=False, delay_factor=1, max_loops=150, strip_prompt=False, strip_command=False, config_mode_command=None, ): return super(VyOSSSH, self).send_config_set( config_comman...
Remain in configuration mode.
def get(ctx, key): file = ctx.obj['FILE'] stored_value = get_key(file, key) if stored_value: click.echo('%s=%s' % (key, stored_value)) else: exit(1)
Retrieve the value for the given key.
def pretty_print_graphql(query, use_four_spaces=True): output = visit(parse(query), CustomPrintingVisitor()) if use_four_spaces: return fix_indentation_depth(output) return output
Take a GraphQL query, pretty print it, and return it.
def new_connection_status(self, conn_status): if conn_status.status == CONNECTION_STATUS_CONNECTED: self._mz.update_members() if (conn_status.status == CONNECTION_STATUS_DISCONNECTED or conn_status.status == CONNECTION_STATUS_LOST): self._mz.reset_members()
Handle reception of a new ConnectionStatus.
def record_file_factory(self): try: get_distribution('invenio-records-files') from invenio_records_files.utils import record_file_factory default = record_file_factory except DistributionNotFound: def default(pid, record, filename): return ...
Load default record file factory.
def from_json(cls, json_data): new_instance = cls() for field_name, field_obj in cls._get_fields().items(): if isinstance(field_obj, NestedDocumentField): if field_name in json_data: nested_field = field_obj.__get__(new_instance, new_instance.__class__) ...
Converts json data to a new document instance
def _writable_sample_file(file, warn=True, wfun=None): if wfun is None: wfun = lambda x, y: '"{}" is not writable; use "{}" instead'.format(x, y) dir = os.path.dirname(file) dir = os.getcwd() if dir == '' else dir if os.access(dir, os.W_OK): return file else: dir2 = tempfile....
Check to see if file is writable, if not use temporary file
def filter_hidden_frames(self): for group in self.groups: group.filter_hidden_frames() self.frames[:] = [frame for group in self.groups for frame in group.frames]
Remove the frames according to the paste spec.
def _glob_pjoin(*parts): if parts[0] in ('.', ''): parts = parts[1:] return pjoin(*parts).replace(os.sep, '/')
Join paths for glob processing
def _get_rows_height(self): tab = self.grid.current_table no_rows = self.grid.code_array.shape[0] default_row_height = self.grid.code_array.cell_attributes.\ default_cell_attributes["row-height"] non_standard_row_heights = [] __row_heights = self.grid.code_array.row_h...
Returns the total height of all grid rows
def form_sent(request, slug, template="forms/form_sent.html"): published = Form.objects.published(for_user=request.user) context = {"form": get_object_or_404(published, slug=slug)} return render_to_response(template, context, RequestContext(request))
Show the response message.
def _improve_answer_span(doc_tokens, input_start, input_end, tokenizer, orig_answer_text): tok_answer_text = " ".join(tokenizer.tokenize(orig_answer_text)) for new_start in range(input_start, input_end + 1): for new_end in range(input_end, new_start - 1, -1): text_sp...
Returns tokenized answer spans that better match the annotated answer.
def mdev(self, tau0, tau): prefactor = self.mdev_from_qd(tau0=tau0, tau=tau) c = self.c_mvar() mvar = pow(prefactor, 2)*pow(tau, c) return np.sqrt(mvar)
return predicted MDEV of noise-type at given tau
def read_data_transition(self, length, whence=None, skip=False, stream_event=ION_STREAM_INCOMPLETE_EVENT): if whence is None: whence = self.whence return Transition( None, _read_data_handler(length, whence, self, skip, stream_event) )
Returns an immediate event_transition to read a specified number of bytes.
def skydir_to_pixel(self, skydir): if self.coordsys in ['CEL', 'EQU']: skydir = skydir.transform_to('icrs') lon = skydir.ra.deg lat = skydir.dec.deg else: skydir = skydir.transform_to('galactic') lon = skydir.l.deg lat = skydir.b.de...
Return the pixel index of a SkyCoord object.
def scene_add(sequence_number, scene_id, animation_id, name, color, velocity, config): (red, green, blue) = (int(color[0]*255), int(color[1]*255), int(color[2]*255)) return MessageWriter().string("scene.add").uint64(sequence_number).uint32(scene_id).uint32(animation_id).string(name) \ ...
Create a scene.add message
def dump_logs(job_queue='run_reach_queue', job_status='RUNNING'): jobs = get_jobs(job_queue, job_status) for job in jobs: get_job_log(job, write_file=True)
Write logs for all jobs with given the status to files.
def prune_urls(url_set, start_url, allowed_list, ignored_list): result = set() for url in url_set: allowed = False for allow_url in allowed_list: if url.startswith(allow_url): allowed = True break if not allowed: continue ig...
Prunes URLs that should be ignored.
async def _on_status_change(self, update): info = update['payload'] new_number = info['new_status'] name = update['service'] if name not in self.services: return with self._state_lock: is_changed = self.services[name].state != new_number self.s...
Update a service that has its status updated.
def sets(self, values): if self.cache: self.cache.set(self.app.config['OAISERVER_CACHE_KEY'], values)
Set list of sets.
def _CheckForExistingMappings(mapping_type, message_type, python_name, json_name): if mapping_type == 'field': getter = GetCustomJsonFieldMapping elif mapping_type == 'enum': getter = GetCustomJsonEnumMapping remapping = getter(message_type, python_name=python_n...
Validate that no mappings exist for the given values.
def no_more_deployments_remain(self): no_more_deployments = True deployments = __salt__['boto_apigateway.describe_api_deployments'](restApiId=self.restApiId, **self._common_aws_args).get('deployments') if deployments: ...
Helper function to find whether there are deployments left with stages associated
def smembers(self, key, *, encoding=_NOTSET): return self.execute(b'SMEMBERS', key, encoding=encoding)
Get all the members in a set.
def to_agraph(self): A = nx.nx_agraph.to_agraph(self) A.graph_attr.update( {"dpi": 227, "fontsize": 20, "fontname": "Menlo", "rankdir": "TB"} ) A.node_attr.update({"fontname": "Menlo"}) def build_tree(cluster_name, root_graph): subgraph_nodes = [ ...
Export to a PyGraphviz AGraph object.
def to_dict(self): return dict((parameter.name, parameter.value) for parameter in self.values())
Converts the set of parameters into a dict
def updateTrackerItem( self, point = None ): item = self.trackerItem() if not item: return gridRect = self._buildData.get('grid_rect') if ( not (gridRect and gridRect.isValid()) ): item.setVisible(False) return if ( point is not None ):...
Updates the tracker item information.
def json_encode(obj): try: return json_encode.encoder.encode(obj) except AttributeError: json_encode.encoder = DjangoJSONEncoder(default=six.text_type) return json_encode(obj)
Encode a python object to json
def expand_uri(self, **kwargs): kwargs = dict([(k, v if v != 0 else '0') for k, v in kwargs.items()]) return uritemplate.expand(self.link.uri, kwargs)
Returns the template uri expanded with the current arguments
def scan(self): self._logger.info("iface '%s' scans", self.name()) self._wifi_ctrl.scan(self._raw_obj)
Trigger the wifi interface to scan.
def line_cap_type(self): key = self._data.get(b'strokeStyleLineCapType').enum return self.STROKE_STYLE_LINE_CAP_TYPES.get(key, str(key))
Cap type, one of `butt`, `round`, `square`.
def to_global(s): if s.startswith('GPSTime'): s = 'Gps' + s[3:] if '_' in s: s = "".join([i.capitalize() for i in s.split("_")]) return s[0].lower() + s[1:]
Format a global variable name.
def rc_notfound(cls, interface_type, resource_name=None): msg = "Resource class for %s not provided and default not found." % interface_type if resource_name: msg = "Could not parse '%s'. %s" % (resource_name, msg) return cls(msg)
Exception used when no resource class is provided and no default is found.
def from_subdir(cls, container, info_obj): return cls(container, info_obj['subdir'], obj_type=cls.type_cls.SUBDIR)
Create from subdirectory info object.
def setup(cls, app): if cls.directive_name is None: raise NotImplementedError('directive_name must be set by ' 'subclasses of BaseDirective') if not app.registry.has_domain('http'): setup_httpdomain(app) app.add_config_value('{}_harne...
Called by Sphinx to setup an extension.
def cigar_array(self): if self._cigar: return self._cigar self._cigar = [CIGARDatum(int(m[0]),m[1]) for m in re.findall('([0-9]+)([MIDNSHP=X]+)',self.entries.cigar)] return self._cigar
cache this one to speed things up a bit
def gettext(ui_file_path): with open(ui_file_path, 'r') as fin: content = fin.read() content = re.sub(r'_translate\(".*",\s', '_(', content) content = content.replace( ' _translate = QtCore.QCoreApplication.translate', '') with open(ui_file_path, 'w') as fout: fout.write(c...
Let you use gettext instead of the Qt tools for l18n
def lint(input_file, debug=False): options = load_options() if isinstance(input_file, string_types): text = input_file else: text = input_file.read() checks = get_checks(options) errors = [] for check in checks: result = check(text) for error in result: ...
Run the linter on the input file.
def cache_info(cache_dir, cache_key): filename = os.path.join(cache_dir, cache_key) return os.path.getmtime(filename) if os.path.exists(filename) else 0
Returns the cache files mtime or 0 if it does not exists
def safe_invoke_callback(callback, *args, **kwargs): try: return callback(*args, **kwargs) except Exception: _LOGGER.exception("Error while executing Future callback.")
Invoke a callback, swallowing and logging any exceptions.
def parse_name(lexer: Lexer) -> NameNode: token = expect_token(lexer, TokenKind.NAME) return NameNode(value=token.value, loc=loc(lexer, token))
Convert a name lex token into a name parse node.
def text_assert(self, anchor, byte=False): if not self.text_search(anchor, byte=byte): raise DataNotFound(u'Substring not found: %s' % anchor)
If `anchor` is not found then raise `DataNotFound` exception.
def mathTransformToMatrix(mathTransform): m = MathTransform().compose(mathTransform.offset, mathTransform.scale, mathTransform.rotation) return tuple(m)
Take a ShallowTransform object and return a 6-tuple.
def timerEvent(self, event): if event.timerId() == self._hide_timer.timerId(): self._hide_timer.stop() self.hide()
Reimplemented to hide the widget when the hide timer fires.
def _fetch_contribution_info(self): self.contributions = {} query = (Contribution.query .with_parent(self.event) .options(joinedload('legacy_mapping'), joinedload('timetable_entry').lazyload('*'))) for contribution in query: ...
Build the list of information entries for contributions of the event
def convert_weights_to_numpy(weights_dict): return dict([(k.replace("arg:", "").replace("aux:", ""), v.asnumpy()) for k, v in weights_dict.items()])
Convert weights to numpy
def process(ctx, dataset, kwargs): "processes the data to a friendly format" kwargs = parse_kwargs(kwargs) data(dataset, **ctx.obj).process(**kwargs)
processes the data to a friendly format
def _second(self): self._second_one_loop() A = self._A if A[2, 1] == 0: return True elif A[2, 1] % A[1, 1] == 0: self._second_finalize() self._Ps += self._L self._L = [] return True else: return False
Find Smith normal form for Right-low 2x2 matrix
def export_admin_action(self, request, queryset): export_format = request.POST.get('file_format') if not export_format: messages.warning(request, _('You must select an export format.')) else: formats = self.get_export_formats() file_format = formats[int(export...
Exports the selected rows using file_format.
def save_current_nb_as_html(info=False): assert in_ipynb() full_path = get_notebook_name() path, filename = os.path.split(full_path) wd_save = os.getcwd() os.chdir(path) cmd = 'jupyter nbconvert --to html "{}"'.format(filename) os.system(cmd) os.chdir(wd_save) if info: print(...
Save the current notebook as html file in the same directory
def insert_many(cls, documents): from mongoframes.queries import to_refs frames = cls._ensure_frames(documents) signal('insert').send(cls, frames=frames) documents = [to_refs(f._document) for f in frames] ids = cls.get_collection().insert_many(documents).inserted_ids for ...
Insert a list of documents
def show_xref(self, f_a): if f_a: bytecode._PrintSubBanner("XREF Read") xrefs_from = f_a.get_xref_read() for ref_class, ref_method in xrefs_from: bytecode._PrintDefault(ref_method.get_name()) bytecode._PrintDefault('\n') bytecode._P...
Display where this field is read or written
def _srcprob_app(self,xmlfile=None, overwrite=False, **kwargs): loglevel = kwargs.get('loglevel', self.loglevel) self.logger.log(loglevel, 'Computing src probability for component %s.', self.name) srcmdl_file = self.files['srcmdl'] if xmlfile is not None: ...
Run srcprob for an analysis component as an application
def getIndicesFromInstId(self, instId): if instId in self._idToIdxCache: return self._idToIdxCache[instId] indices = [] for impliedFlag, modName, symName in self._indexNames: mibObj, = mibBuilder.importSymbols(modName, symName) try: syntax, ins...
Return index values for instance identification
def include(self, filename): if filename != STDIN and filename in [x[0] for x in self.filestack]: self.warning(' Recursive inclusion') self.filestack.append([filename, 1, self.lex, self.input_data]) self.lex = lex.lex(object=self) result = self.put_current_line() try:...
Changes FILENAME and line count
def time_from_number(self, value): if not isinstance(value, numbers.Real): return None delta = datetime.timedelta(days=value) minutes, second = divmod(delta.seconds, 60) hour, minute = divmod(minutes, 60) return datetime.time(hour, minute, second)
Converts a float value to corresponding time instance.
def _batched_write_command( namespace, operation, command, docs, check_keys, opts, ctx): buf = StringIO() buf.write(_ZERO_64) buf.write(b"\x00\x00\x00\x00\xd4\x07\x00\x00") to_send, length = _batched_write_command_impl( namespace, operation, command, docs, check_keys, opts, ctx, buf) ...
Create the next batched insert, update, or delete command.
def param_redirect(request, viewname, *args): url = reverse(viewname, PARAMS_URL_CONF, args) params = request.GET.urlencode().split('&') if hasattr(request, 'cparam'): for k, v in request.cparam.items(): params.append('{0}={1}'.format(k, v)) new_params = '&'.join(x for x in params if...
Redirect and keep URL parameters if any.
def remove_program_temp_directory(): if os.path.exists(program_temp_directory): max_retries = 3 curr_retries = 0 time_between_retries = 1 while True: try: shutil.rmtree(program_temp_directory) break except IOError: ...
Remove the global temp directory and all its contents.
def connect(self): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect(("127.0.0.1", self._port)) return sock
Returns a new socket connection to this server.
def _action_allowed(self, action): if getattr(self.layer.participation_settings, '{0}_allowed'.format(action)) is False: return False else: return getattr(self.participation_settings, '{0}_allowed'.format(action))
participation actions can be disabled on layer level, or disabled on a per node basis
def _validate_file_format(self, file_format): if file_format not in self.valid_file_formats: raise InvalidFileFormatError( "{} is not a valid file format".format(file_format) ) return file_format
Validates file format, raising error if invalid.
def validate_config(raise_=True): ELIBConfig.check() known_paths = set() duplicate_values = set() missing_values = set() for config_value in ConfigValue.config_values: if config_value.path not in known_paths: known_paths.add(config_value.path) else: duplicate_...
Verifies that all configuration values have a valid setting
def configured_options(self): stdout_lines = self._check_output(['config', '--list']).splitlines() return {key: value for key, value in [line.split('=') for line in stdout_lines]}
What are the configured options in the git repo.
def Start(self): self._shutdown = False self._main_thread = threading.Thread(target=self._MainThreadProc) self._main_thread.name = 'Cloud Debugger main worker thread' self._main_thread.daemon = True self._main_thread.start()
Starts the worker thread.
def _tab_type(self): with open(self.abspath) as fobj: contents = fobj.readlines() for line in contents: if 'COMPONENTS' in line: return 'keyword' else: return 'fixed'
Private method to define the tab type
def from_master_secret(class_, master_secret): I64 = hmac.HMAC(key=b"Bitcoin seed", msg=master_secret, digestmod=hashlib.sha512).digest() return class_(chain_code=I64[32:], secret_exponent=from_bytes_32(I64[:32]))
Generate a Wallet from a master password.
def datapath4file(filename, ext:str='.tgz', archive=True): "Return data path to `filename`, checking locally first then in the config file." local_path = URLs.LOCAL_PATH/'data'/filename if local_path.exists() or local_path.with_suffix(ext).exists(): return local_path elif archive: return Config.data_arc...
Return data path to `filename`, checking locally first then in the config file.
def OnContentChange(self, event): self.ignore_changes = True self.SetValue(u"" if event.text is None else event.text) self.ignore_changes = False event.Skip()
Event handler for updating the content
def load_config(filename): logger.debug("Event Definitions configuration file: %s", filename) with open(filename, 'r') as cf: config = cf.read() try: events_config = yaml.safe_load(config) except yaml.YAMLError as err: if hasattr(err, 'problem_mark'): mark = err.probl...
Load the event definitions from yaml config file.
def filter_inactive_ports(query): port_model = models_v2.Port query = (query .filter(port_model.status == n_const.PORT_STATUS_ACTIVE)) return query
Filter ports that aren't in active status
def _applyMultichan(samples, func): if len(samples.shape) == 1 or samples.shape[1] == 1: newsamples = func(samples) else: y = np.array([]) for i in range(samples.shape[1]): y = np.concatenate((y, func(samples[:,i]))) newsamples = y.reshape(samples.shape[1], -1).T ...
Apply func to each channel of audio data in samples
def _set_value(self, parameter, new_value): try: self.block_matcher.__setattr__(parameter, new_value) except BadBlockMatcherArgumentError: return self.update_disparity_map()
Try setting new parameter on ``block_matcher`` and update map.
def make_scratch_dirs(file_mapping, dry_run=True): scratch_dirs = {} for value in file_mapping.values(): scratch_dirname = os.path.dirname(value) scratch_dirs[scratch_dirname] = True for scratch_dirname in scratch_dirs: if dry_run: print("mkdir...
Make any directories need in the scratch area
def fetch(url, binary, outfile, noprint, rendered): with chrome_context.ChromeContext(binary=binary) as cr: resp = cr.blocking_navigate_and_get_source(url) if rendered: resp['content'] = cr.get_rendered_page_source() resp['binary'] = False resp['mimie'] = 'text/html' if not noprint: if resp['binary'] i...
Fetch a specified URL's content, and output it to the console.
def from_data( name, coors, ngroups, conns, mat_ids, descs, igs = None ): if igs is None: igs = range( len( conns ) ) mesh = Mesh(name) mesh._set_data(coors = coors, ngroups = ngroups, conns = [conns[ig] for ig in igs], ...
Create a mesh from mesh data.
def complete_extra(self, args): "Completions for the 'extra' command." if len(args) == 0: return self._listdir('./') return self._complete_path(args[-1])
Completions for the 'extra' command.