code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def stack_2_eqn(self,p): stack_eqn = [] if p: for n in p.stack: self.eval_eqn(n,stack_eqn) return stack_eqn[-1] return []
returns equation string for program stack
def socket_send(self): if len(self.send_buffer): try: sent = self.sock.send(self.send_buffer) except socket.error, err: print("!! SEND error '%d:%s' from %s" % (err[0], err[1], self.addrport())) self.active = False ...
Called by TelnetServer when send data is ready.
def ListGrrBinaries(context=None): items = context.SendIteratorRequest("ListGrrBinaries", None) return utils.MapItemsIterator( lambda data: GrrBinary(data=data, context=context), items)
Lists all registered Grr binaries.
def validate_protocol(protocol): if not re.match(PROTOCOL_REGEX, protocol): raise ValueError(f'invalid protocol: {protocol}') return protocol.lower()
Validate a protocol, a string, and return it.
def print_title(title: str) -> None: msg = "**** {} ".format(title) print("{}{}{}{}".format(Style.BRIGHT, Fore.GREEN, msg, "*" * (80 - len(msg))))
Helper function to print a title.
def destroy(name): node = show_instance(name, call='action') params = {'SUBID': node['SUBID']} result = _query('server/destroy', method='POST', decode=False, data=_urlencode(params)) if result.get('body') == '' and result.get('text') == '': return True return result
Remove a node from Vultr
def handler(self): printtime('Creating and populating objects', self.start) self.populate() printtime('Populating {} sequence profiles'.format(self.analysistype), self.start) self.profiler() self.annotatethreads() self.cdsthreads() self.cdssequencethreads() ...
Run the required analyses
def apply_limit_to_sql(cls, sql, limit, database): if cls.limit_method == LimitMethod.WRAP_SQL: sql = sql.strip('\t\n ;') qry = ( select('*') .select_from( TextAsFrom(text(sql), ['*']).alias('inner_qry'), ) ...
Alters the SQL statement to apply a LIMIT clause
def isroutine(object): return (isbuiltin(object) or isfunction(object) or ismethod(object) or ismethoddescriptor(object))
Return true if the object is any kind of function or method.
def _display_stream(normalized_data, stream): try: stream.write(normalized_data['stream']) except UnicodeEncodeError: stream.write(normalized_data['stream'].encode("utf-8"))
print stream message from docker-py stream.
def mtf_bitransformer_tiny(): hparams = mtf_bitransformer_base() hparams.batch_size = 2 hparams.mesh_shape = "" hparams.d_model = 128 hparams.encoder_layers = ["self_att", "drd"] * 2 hparams.decoder_layers = ["self_att", "enc_att", "drd"] * 2 hparams.num_heads = 4 hparams.d_ff = 512 return hparams
Small encoder-decoder model for testing.
def orchestration(self): if self._orchestration is not None: return self._orchestration API_VERSIONS = { '1': 'heatclient.v1.client.Client', } heat_client = utils.get_client_class( API_NAME, self._instance._api_version[API_NAME], ...
Returns an orchestration service client
def assert_looks_like(first, second, msg=None): first = _re.sub("\s+", " ", first.strip()) second = _re.sub("\s+", " ", second.strip()) if first != second: raise AssertionError(msg or "%r does not look like %r" % (first, second))
Compare two strings if all contiguous whitespace is coalesced.
def _get_cache_filename(name, filename): filename = os.path.abspath(filename)[1:] home_folder = os.path.expanduser('~') base_cache_dir = os.path.join(home_folder, '.git-lint', 'cache') return os.path.join(base_cache_dir, name, filename)
Returns the cache location for filename and linter name.
def _encode_json(obj): def _dump_obj(obj): if isinstance(obj, dict): return obj d = dict() for k in dir(obj): if not k.startswith('_'): d[k] = getattr(obj, k) return d return json.dumps(obj, default=_dump_obj)
Encode object as json str.
def product(sequence, initial=1): if not isinstance(sequence, collections.Iterable): raise TypeError("'{}' object is not iterable".format(type(sequence).__name__)) return reduce(operator.mul, sequence, initial)
like the built-in sum, but for multiplication.
def append_waiting_queue(self, transfer_coordinator): logger.debug("Add to waiting queue count=%d" % self.waiting_coordinator_count()) with self._lockw: self._waiting_transfer_coordinators.append(transfer_coordinator)
append item to waiting queue
def list_hooks(self, key_name): kf = self.dct[key_name] if 'hooks' not in kf: return [] else: return kf['hooks'].iterkeys()
Return list of all hooks attached to key_name.
def center_of_mass(self): center = np.zeros(3) total_weight = 0 for site in self: wt = site.species.weight center += site.coords * wt total_weight += wt return center / total_weight
Center of mass of molecule.
def rst_add_mathjax(content): _, ext = os.path.splitext(os.path.basename(content.source_path)) if ext != '.rst': return if 'class="math"' in content._content: content._content += "<script type='text/javascript'>%s</script>" % rst_add_mathjax.mathjax_script
Adds mathjax script for reStructuredText
def _delay(self): if not self.next_scheduled: self.next_scheduled = self.clock_func() + self.interval return while True: current = self.clock_func() if current >= self.next_scheduled: extratime = current - self.next_scheduled ...
Delay for between zero and self.interval time units
def initialize_dendrites(self): self.dendrites = SM32() self.dendrites.reshape(self.dim, self.num_dendrites) for row in range(self.num_dendrites): synapses = numpy.random.choice(self.dim, self.dendrite_length, replace = False) for synapse in synapses: self.dendrites[synapse, row] = 1
Initialize all the dendrites of the neuron to a set of random connections
def missing_nodes(self): missing = set() for target_addr, target_attrs in self.graph.node.items(): if 'target_obj' not in target_attrs: missing.add(target_addr) return missing
The set of targets known as dependencies but not yet defined.
def randomize(length=6, choices=None): if type(choices) == str: choices = list(choices) choices = choices or ascii_lowercase return "".join(choice(choices) for _ in range(length))
Returns a random string of the given length.
def readSB(self, bits): shift = 32 - bits return int32(self.readbits(bits) << shift) >> shift
Read a signed int using the specified number of bits
def _run_progress_callbacks(self, bytes_transferred): if bytes_transferred: for callback in self._progress_callbacks: try: callback(bytes_transferred=bytes_transferred) except Exception as ex: logger.error("Exception: %s" % str(...
pass the number of bytes process to progress callbacks
def timer(): if sys.platform == "win32": default_timer = time.clock else: default_timer = time.time return default_timer()
Timer used for calculate time elapsed
def exception_handle(method): def wrapper(*args, **kwargs): try: result = method(*args, **kwargs) return result except ProxyError: LOG.exception('ProxyError when try to get %s.', args) raise ProxyError('A proxy error occurred.') except Connecti...
Handle exception raised by requests library.
def _bp(editor, force=False): eb = editor.window_arrangement.active_editor_buffer if not force and eb.has_unsaved_changes: editor.show_message(_NO_WRITE_SINCE_LAST_CHANGE_TEXT) else: editor.window_arrangement.go_to_previous_buffer()
Go to previous buffer.
def _get_feed_data(self, file_paths): rv = {} for i in file_paths: _ = i.split('/') category = _[-2] name = _[-1].split('.')[0] page_config, md = self._get_config_and_content(i) parsed_md = tools.parse_markdown(md, self.site_config) ...
get data to display in feed file
def _get_name(self): if self.name is not None: return self.name if self.scoring_ is None: return 'score' if isinstance(self.scoring_, str): return self.scoring_ if isinstance(self.scoring_, partial): return self.scoring_.func.__name__ ...
Find name of scoring function.
def python(code, show=True): setup = "import os;" \ "os.environ[\'DJANGO_SETTINGS_MODULE\']=\'%s.settings\';" \ "import django;" \ "django.setup();" % env.proj_app full_code = 'python -c "%s%s"' % (setup, code.replace("`", "\\\`")) with project(): if show: ...
Runs Python code in the project's virtual environment, with Django loaded.
def _find_by_chain(self, browser, criteria, tag, constraints): return self._filter_elements( browser.find_elements_by_ios_class_chain(criteria), tag, constraints)
Find element matches by iOSChainString.
def reboot(self, devices): for device in devices: self.logger.info('Rebooting: %s', device.id) try: device.reboot() except packet.baseapi.Error: raise PacketManagerException('Unable to reboot instance "{}"'.format(device.id))
Reboot one or more devices.
def ivorn_present(session, ivorn): return bool( session.query(Voevent.id).filter(Voevent.ivorn == ivorn).count())
Predicate, returns whether the IVORN is in the database.
def _required_attribute(element, name, default): if element.get(name) is None: element.set(name, default)
Add attribute with default value to element if it doesn't already exist.
def _main_loop(self): self.logger.debug("Running main loop") old_time = 0 while True: for plugin_key in self.plugins_dict: obj = self.plugins_dict[plugin_key] self._process_plugin(obj) if self.settings['STATS_DUMP'] != 0: ne...
The internal while true main loop for the redis monitor
def _flip_sign_row(self, i): L = np.eye(3, dtype='intc') L[i, i] = -1 self._L.append(L.copy()) self._A = np.dot(L, self._A)
Multiply -1 for all elements in row
def __add_shared(self, original_token): sid = self.__new_sid() token = SymbolToken(original_token.text, sid, self.__import_location(sid)) self.__add(token) return token
Adds a token, normalizing the SID and import reference to this table.
def __html_rep(self, game_key, rep_code): seas, gt, num = game_key.to_tuple() url = [ self.__domain, "scores/htmlreports/", str(seas-1), str(seas), "/", rep_code, "0", str(gt), ("%04i" % (num)), ".HTM" ] url = ''.join(url) return self.__open(url)
Retrieves the nhl html reports for the specified game and report code
def local_not_complete(self): if self.process and self.process.poll() is None: self._timeout_check() return True return False
Returns True if task is local and not completed
def interlink_static_files(generator): if generator.settings['STATIC_PATHS'] != []: return try: static_content = generator.context['static_content'] except KeyError: static_content = generator.context['filenames'] relpath = relpath_to_site(generator.settings['DEFAULT_LANG'], _MAI...
Add links to static files in the main site if necessary
def export(self, name, columns, points): logger.debug("Export {} stats to ZeroMQ".format(name)) data = dict(zip(columns, points)) if data == {}: return False message = [b(self.prefix), b(name), asbytes(json.dumps(data))] try: ...
Write the points to the ZeroMQ server.
def route(self, path): logging.getLogger(__name__).debug("Routing path '%s'.", path) cls = None for strategy in self._strategies: if strategy.can_route(path): cls = strategy.route(path) break if cls is None: raise RoutingError(path)...
Returns the task handling the given request path.
def _get_intra_event_std(self, C, mag, sa1180, vs30, vs30measured, rrup): phi_al = self._get_phi_al_regional(C, mag, vs30measured, rrup) derAmp = self._get_derivative(C, sa1180, vs30) phi_amp = 0.4 idx = phi_al < phi_amp if np.any(idx): ph...
Returns Phi as described at pages 1046 and 1047
def import_model(cls, ins_name): try: package_space = getattr(cls, 'package_space') except AttributeError: raise ValueError('package_space not exist') else: return import_object(ins_name, package_space)
Import model class in models package
def type(self): if self is FeatureType.TIMESTAMP: return list if self is FeatureType.BBOX: return BBox return dict
Returns type of the data for the given FeatureType.
def main(client_id, client_secret, scopes): client_config = ClientConfigBuilder( client_type=ClientConfigBuilder.CLIENT_TYPE_WEB, client_id=client_id, client_secret=client_secret) flow = InstalledAppFlow.from_client_config( client_config.Build(), scopes=scopes) flow.redirect_uri = _REDIRECT_URI ...
Retrieve and display the access and refresh token.
def calibrate_data(params, raw_data, calib_data): start = calib_data.before(datetime.max) if start is None: start = datetime.min start = raw_data.after(start + SECOND) if start is None: return start del calib_data[start:] calibrator = Calib(params, raw_data) def calibgen(inpu...
Calibrate' raw data, using a user-supplied function.
def _get_error_message(self, model_class, method_name, action_method_name): if action_method_name: return "'{}' does not have '{}' or '{}' defined.".format(model_class, method_name, action_method_name) else: return "'{}' does not have '{}' defined.".format(model_class, method_nam...
Get assertion error message depending if there are actions permissions methods defined.
def OnMacroToolbarToggle(self, event): self.main_window.macro_toolbar.SetGripperVisible(True) macro_toolbar_info = self.main_window._mgr.GetPane("macro_toolbar") self._toggle_pane(macro_toolbar_info) event.Skip()
Macro toolbar toggle event handler
def pseudo_partial_waves(self): pseudo_partial_waves = OrderedDict() for (mesh, values, attrib) in self._parse_all_radfuncs("pseudo_partial_wave"): state = attrib["state"] pseudo_partial_waves[state] = RadialFunction(mesh, values) return pseudo_partial_waves
Dictionary with the pseudo partial waves indexed by state.
def compute(self, inputVector, learn, activeArray): super(SpatialPoolerWrapper, self).compute(inputVector, learn, activeArray) self._updateAvgActivityPairs(activeArray)
This method resembles the primary public method of the SpatialPooler class.
def html(self) -> str: if self._inner_element: return self.start_tag + self._inner_element.html + self.end_tag return super().html
Get whole html representation of this node.
def display_callback(self, cpu_cycles, op_address, address, value): self.display.write_byte(cpu_cycles, op_address, address, value) return value
called via memory write_byte_middleware
def _CreateZMQSocket(self): super(ZeroMQBufferedQueue, self)._CreateZMQSocket() if not self._zmq_thread: thread_name = '{0:s}_zmq_responder'.format(self.name) self._zmq_thread = threading.Thread( target=self._ZeroMQResponder, args=[self._queue], name=thread_name) self._zmq_thread.sta...
Creates a ZeroMQ socket as well as a regular queue and a thread.
def guess_width(self): if len(self.header) <= 4: nspace = 6 elif len(self.header) <= 6: nspace = 5 else: nspace = 4 ncol = len(self.header) self._width = [nspace] * ncol width = [0] * ncol for idx, item in enumerate(self.header)...
auto fit column width
def _string_to_substitute(self, mo, methods_dict): matched_text, f_name = mo.groups() if f_name not in methods_dict: return matched_text a_tree = ast.parse(matched_text) args_list = [self._get_value_from_ast(a) for a in a_tree.body[0].value.args] kwargs_list = { ...
Return the string to be substituted for the match.
def reporter(self): logging.info('Creating {} report'.format(self.analysistype)) make_path(self.reportpath) header = 'Strain,ReferenceGenus,ReferenceFile,ReferenceGenomeMashDistance,Pvalue,NumMatchingHashes\n' data = '' for sample in self.metadata: try: ...
Create the MASH report
def _add_meta_info(self, eopatch, request_params, service_type): for param, eoparam in zip(['time', 'time_difference', 'maxcc'], ['time_interval', 'time_difference', 'maxcc']): if eoparam not in eopatch.meta_info: eopatch.meta_info[eoparam] = request_params[param] if 'service...
Adds any missing metadata info to EOPatch
def human_size_to_bytes(human_size): size_exp_map = {'K': 1, 'M': 2, 'G': 3, 'T': 4, 'P': 5} human_size_str = six.text_type(human_size) match = re.match(r'^(\d+)([KMGTP])?$', human_size_str) if not match: raise ValueError( 'Size must be all digits, with an optional unit type ' ...
Convert human-readable units to bytes
def _request(self, base_url, client_id, client_secret, parameters, **kwargs): logging.debug('Getting an OAuth token for client "%s" with scope "%s"', client_id, parameters.get('scope')) headers = {'Content-Type': 'application/x-www-form-urlencoded', ...
Make an API request to get the token
def trace(self): if len(self.errors) > 0: numerrs = len(self.errors) print("========= Trace (" + str(numerrs) + ") =========") self._print_errs() self.errors = []
Print the errors trace if there are some errors
def value_at_coord(dset,coords): return nl.numberize(nl.run(['3dmaskave','-q','-dbox'] + list(coords) + [dset],stderr=None).output)
returns value at specified coordinate in ``dset``
def _glyph_for_monomer_pattern(self, pattern): pattern.matches_key = lambda: str(pattern) agent_id = self._make_agent_id(pattern) if pattern.monomer.name in ('__source', '__sink'): return None glyph = emaker.glyph(emaker.label(text=pattern.monomer.name), ...
Add glyph for a PySB MonomerPattern.
def startAll(self): self.logger.info("Starting all workers...") for worker in self.getWorkers(): process = self.getWorker(worker) self.logger.debug("Starting {0}".format(process.name)) process.start() self.logger.info("Started all workers")
Start all registered Workers.
def terminal_size(): try: (rows, columns) = os.popen('stty size', 'r').read().split() rows = int(rows) columns = int(columns) return (columns, rows) except: pass return (80, 24)
Detect the current size of terminal window as a numer of rows and columns.
def do_shell(self, args: argparse.Namespace) -> None: import subprocess tokens = [args.command] + args.command_args for index, _ in enumerate(tokens): if tokens[index]: first_char = tokens[index][0] if first_char in constants.QUOTES: ...
Execute a command as if at the OS prompt
def put(self, endpoint: str, **kwargs) -> dict: return self._request('PUT', endpoint, **kwargs)
HTTP PUT operation to API endpoint.
def _download_extract(self, resource): if isinstance(resource, six.string_types): resource = resource_lib.Resource(url=resource) def callback(path): resource.path = path return self._extract(resource) return self._download(resource).then(callback)
Download-extract `Resource` or url, returns Promise->path.
def OnPasteAs(self, event): data = self.main_window.clipboard.get_clipboard() key = self.main_window.grid.actions.cursor with undo.group(_("Paste As...")): self.main_window.actions.paste_as(key, data) self.main_window.grid.ForceRefresh() event.Skip()
Clipboard paste as event handler
def pop(self, key=util_const.NoParam, default=util_const.NoParam): if key is not util_const.NoParam: if default is util_const.NoParam: return (key, self._dict.pop(key)) else: return (key, self._dict.pop(key, default)) try: _heap = self....
Pop the next item off the queue
def live_scores(self, live_scores): headers = ['League', 'Home Team Name', 'Home Team Goals', 'Away Team Goals', 'Away Team Name'] result = [headers] result.extend([game['league'], game['homeTeamName'], game['goalsHomeTeam'], game['goalsAwayTeam'], ...
Store output of live scores to a CSV file
def RunValidationOutputToFilename(feed, options, output_filename): try: output_file = open(output_filename, 'w') exit_code = RunValidationOutputToFile(feed, options, output_file) output_file.close() except IOError as e: print('Error while writing %s: %s' % (output_filename, e)) output_filename =...
Validate feed, save HTML at output_filename and return an exit code.
def morelikethis(self, index, doc_type, id, fields, **query_params): path = make_path(index, doc_type, id, '_mlt') query_params['mlt_fields'] = ','.join(fields) body = query_params["body"] if "body" in query_params else None return self._send_request('GET', path, body=body, params=query_...
Execute a "more like this" search query against one or more fields and get back search hits.
def textMD5(text): m = hash_md5() if isinstance(text, str): m.update(text.encode()) else: m.update(text) return m.hexdigest()
Get md5 of a piece of text
def _find_titles(self, row_index, column_index): titles = [] for column_search in range(self.start[1], column_index): cell = self.table[row_index][column_search] if cell == None or (isinstance(cell, basestring) and not cell): continue elif isinstance(c...
Helper method to find all titles for a particular cell.
def plot_by_gene_and_domain(name, seqs, tax, id2name): for gene in set([seq[0] for seq in list(seqs.values())]): for domain in set([seq[1] for seq in list(seqs.values())]): plot_insertions(name, seqs, gene, domain, tax, id2name)
plot insertions for each gene and domain
def _quantize(x, params, randomize=True): if not params.quantize: return x if not randomize: return tf.bitcast( tf.cast(x / params.quantization_scale, tf.int16), tf.float16) abs_x = tf.abs(x) sign_x = tf.sign(x) y = abs_x / params.quantization_scale y = tf.floor(y + tf.random_uniform(common_...
Quantize x according to params, optionally randomizing the rounding.
def updateEditorGeometry(self, editor, option, index): cti = index.model().getItem(index) if cti.checkState is None: displayRect = option.rect else: checkBoxRect = widgetSubCheckBoxRect(editor, option) offset = checkBoxRect.x() + checkBoxRect.width() ...
Ensures that the editor is displayed correctly with respect to the item view.
def with_source(self, lease): super().with_source(lease) self.offset = lease.offset self.sequence_number = lease.sequence_number
Init Azure Blob Lease from existing.
def fetch_json(self, method, url, data=None, expected_status_code=None): return self.fetch(method, url, data, expected_status_code).json()
Return json decoded data from fetch
def list_customer_users(self, customer_id): content = self._fetch("/customer/users/%s" % customer_id) return map(lambda x: FastlyUser(self, x), content)
List all users from a specified customer id.
def _from_dict(cls, _dict): args = {} if 'batches' in _dict: args['batches'] = [ BatchStatus._from_dict(x) for x in (_dict.get('batches')) ] return cls(**args)
Initialize a Batches object from a json dictionary.
def change_id(self, new_id): self._load_raw_content() self._id = new_id self.get_filename(renew=True) self.get_filepath(renew=True) return
Change the id of this content.
def getBlizzBotPlayers(): ret = {} for pName,p in iteritems(getKnownPlayers()): if p.isComputer: ret[pName] = p return ret
identify all of Blizzard's built-in bots
def invcdf(x): x_flat = np.ravel(x) x_trans = np.array([flib.ppnd16(y, 1) for y in x_flat]) return np.reshape(x_trans, np.shape(x))
Inverse of normal cumulative density function.
def save_config(self, cmd="write", confirm=False, confirm_response=""): return super(IpInfusionOcNOSBase, self).save_config( cmd=cmd, confirm=confirm, confirm_response=confirm_response )
Saves Config Using write command
def png_img_html_from_pyplot_figure(fig: "Figure", dpi: int = 100, extra_html_class: str = None) -> str: if fig is None: return "" memfile = io.BytesIO() fig.savefig(memfile, format="png", dpi=dpi) memfile.seek(0) pngblo...
Converts a ``pyplot`` figure to an HTML IMG tag with encapsulated PNG.
def cluster_autocomplete(self, text, line, start_index, end_index): "autocomplete for the use command, obtain list of clusters first" if not self.CACHED_CLUSTERS: clusters = [cluster.name for cluster in api.get_all_clusters()] self.CACHED_CLUSTERS = clusters if text: ...
autocomplete for the use command, obtain list of clusters first
def reset(self): simulation = self.survey_scenario.simulation holder = simulation.get_holder(self.weight_name) holder.array = numpy.array(self.initial_weight, dtype = holder.variable.dtype)
Reset the calibration to it initial state
def upload(self, local_path, remote_url): bucket, key = _parse_url(remote_url) with open(local_path, 'rb') as fp: return self.call("PutObject", bucket=bucket, key=key, body=fp)
Copy a local file to an S3 location.
def Escape(self, string="", **_): self.stack[-1] += self.STRING_ESCAPES.get(string, string)
Support standard string escaping.
def launch(self, tunnelPorts=None): if self.isLocal(): c = self._getWorkerCommandList() self.subprocesses.append(subprocess.Popen(c)) else: BASE_SSH[0] = self.ssh_executable sshCmd = BASE_SSH if not self.rsh else BASE_RSH if tunnelPorts is not ...
Launch every worker assigned on this host.
def run_task(self, task, source_patterns=None): self.log('Running Task: %s...' % task.get('name', ''), 1) self.default_encoding = task.get('default_encoding', '') options = self.setup_spellchecker(task) personal_dict = self.setup_dictionary(task) glob_flags = self._to_flags(task....
Walk source and initiate spell check.
def check_config(config): essential_keys = ['number_earthquakes'] for key in essential_keys: if key not in config: raise ValueError('For Kijko Nonparametric Gaussian the key %s ' 'needs to be set in the configuation' % key) if config.get('tolerance', 0.0) <= ...
Check config file inputs and overwrite bad values with the defaults
def delete_by_user_name(user_name): try: del_count = TabMember.delete().where(TabMember.user_name == user_name) del_count.execute() return True except: return False
Delete user in the database by `user_name`.
def unpacktar(tarfile, destdir): nullfd = open(os.devnull, "w") tarfile = cygpath(os.path.abspath(tarfile)) log.debug("unpack tar %s into %s", tarfile, destdir) try: check_call([TAR, '-xzf', tarfile], cwd=destdir, stdout=nullfd, preexec_fn=_noumask) except Exception: ...
Unpack given tarball into the specified dir
def parseSLURM(string): import subprocess, os hostsstr = subprocess.check_output(["scontrol", "show", "hostnames", string]) if sys.version_info.major > 2: hostsstr = hostsstr.decode() hosts = hostsstr.split(os.linesep) hosts = filter(None, hosts) hosts = [(host, 1) for host in hosts] ...
Return a host list from a SLURM string
def omnigraffle(self): temp = self.rdf_source("dot") try: from os.path import expanduser home = expanduser("~") filename = home + "/tmp/turtle_sketch.dot" f = open(filename, "w") except: filename = "turtle_sketch.dot" f = open(filename, "w") f.write(temp) f.close() try: os.system("open ...
tries to open an export directly in omnigraffle