code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def write(path, doc, mode=MODE_TSV, **kwargs): if mode == MODE_TSV: with TxtWriter.from_path(path) as writer: writer.write_doc(doc) elif mode == MODE_JSON: write_json(path, doc, **kwargs)
Helper function to write doc to TTL-TXT format
def _dump_header(self): with open(self._file, 'w') as _file: _file.write(self._hsrt) self._sptr = _file.tell() _file.write(self._hend)
Initially dump file heads and tails.
def tasks(self, name): found = self[name] if isinstance(found, Shovel): return [v for _, v in found.items()] return [found]
Get all the tasks that match a name
def sample_double_norm(mean, std_upper, std_lower, size): from scipy.special import erfinv samples = np.empty(size) percentiles = np.random.uniform(0., 1., size) cutoff = std_lower / (std_lower + std_upper) w = (percentiles < cutoff) percentiles[w] *= 0.5 / cutoff samples[w] = mean + np.sqrt...
Note that this function requires Scipy.
def refresh(self, key): s0 = self[key] s = self.backtrack(key) s.persist(**s0.metadata['persist_kwargs'])
Recreate and re-persist the source for the given unique ID
def register_jobs(self, job_dict): njobs = len(job_dict) sys.stdout.write("Registering %i total jobs: " % njobs) for i, job_details in enumerate(job_dict.values()): if i % 10 == 0: sys.stdout.write('.') sys.stdout.flush() self.register_job(...
Register a bunch of jobs in this archive
def node_copy(node, nodefactory=Node): return nodefactory(node.tag, node.attrib.copy(), node.text, [node_copy(n, nodefactory) for n in node])
Make a deep copy of the node
def expand(self, id_user): CmtCOLLAPSED.query.filter(db.and_( CmtCOLLAPSED.id_bibrec == self.id_bibrec, CmtCOLLAPSED.id_cmtRECORDCOMMENT == self.id, CmtCOLLAPSED.id_user == id_user)).delete(synchronize_session=False)
Expand comment beloging to user.
def export_theta(ckout, data): cns_file = chromhacks.bed_to_standardonly(ckout["cns"], data, headers="chromosome") cnr_file = chromhacks.bed_to_standardonly(ckout["cnr"], data, headers="chromosome") out_file = "%s-theta.input" % utils.splitext_plus(cns_file)[0] if not utils.file_exists(out_file): ...
Provide updated set of data with export information for TheTA2 input.
def math_funcdef_handle(tokens): internal_assert(len(tokens) == 2, "invalid assignment function definition tokens", tokens) return tokens[0] + ("" if tokens[1].startswith("\n") else " ") + tokens[1]
Process assignment function definition.
def _forward_iterator(self): "Returns a forward iterator over the trie" path = [(self, 0, Bits())] while path: node, idx, prefix = path.pop() if idx==0 and node.value is not None and not node.prune_value: yield (self._unpickle_key(prefix), self._unpickle_v...
Returns a forward iterator over the trie
def save(self, *args, **kwargs): adding_new = False if not self.pk or (not self.cpu and not self.ram): if self.model.cpu: self.cpu = self.model.cpu if self.model.ram: self.ram = self.model.ram adding_new = True super(DeviceToMod...
when creating a new record fill CPU and RAM info if available
def media_kind(kind): if kind in [1]: return const.MEDIA_TYPE_UNKNOWN if kind in [3, 7, 11, 12, 13, 18, 32]: return const.MEDIA_TYPE_VIDEO if kind in [2, 4, 10, 14, 17, 21, 36]: return const.MEDIA_TYPE_MUSIC if kind in [8, 64]: return const.MEDIA_TYPE_TV raise excepti...
Convert iTunes media kind to API representation.
def file_exists_on_unit(self, sentry_unit, file_name): try: sentry_unit.file_stat(file_name) return True except IOError: return False except Exception as e: msg = 'Error checking file {}: {}'.format(file_name, e) amulet.raise_status(amu...
Check if a file exists on a unit.
def timestamp(self, timestamp): clone = copy.deepcopy(self) clone._timestamp = timestamp return clone
Allows for custom timestamps to be saved with the record.
def should_skip_logging(func): disabled = strtobool(request.headers.get("x-request-nolog", "false")) return disabled or getattr(func, SKIP_LOGGING, False)
Should we skip logging for this handler?
def send_confirm_email_email(self, user, user_email): if not self.user_manager.USER_ENABLE_EMAIL: return if not self.user_manager.USER_ENABLE_CONFIRM_EMAIL: return email = user_email.email if user_email else user.email object_id = user_email.id if user_email else user.id token = ...
Send the 'email confirmation' email.
def lrun(command, *args, **kwargs): return run('cd {0} && {1}'.format(ROOT, command), *args, **kwargs)
Run a local command from project root
def determine_file_extension_based_on_format(format_specifier): if format_specifier == FMT_INI: return 'ini' if format_specifier == FMT_DELIMITED: return '' if format_specifier == FMT_XML: return 'xml' if format_specifier == FMT_JSON: return 'json' if format_specifier == FMT_YAML: return 'yml' raise Val...
returns file extension string
def preprocess_record(self, pid, record, links_factory=None, **kwargs): links_factory = links_factory or (lambda x, record=None, **k: dict()) metadata = copy.deepcopy(record.replace_refs()) if self.replace_refs \ else record.dumps() return dict( pid=pid, metad...
Prepare a record and persistent identifier for serialization.
def cycle(self, *args): if not args: raise TypeError('no items for cycling given') return args[self.index0 % len(args)]
Cycles among the arguments with the current loop index.
def hideEvent(self, event): super(CallTipWidget, self).hideEvent(event) self._text_edit.cursorPositionChanged.disconnect( self._cursor_position_changed) self._text_edit.removeEventFilter(self)
Reimplemented to disconnect signal handlers and event filter.
def addOutParameter(self, name, type, namespace=None, element_type=0): parameter = ParameterInfo(name, type, namespace, element_type) self.outparams.append(parameter) return parameter
Add an output parameter description to the call info.
def team_integrationLogs(self, **kwargs) -> SlackResponse: self._validate_xoxp_token() return self.api_call("team.integrationLogs", http_verb="GET", params=kwargs)
Gets the integration logs for the current team.
def xml(self, text=TEXT): def convert(line): xml = " <item>\n" for f in line.index: xml += " <field name=\"%s\">%s</field>\n" % (f, line[f]) xml += " </item>\n" return xml return "<items>\n" + '\n'.join(self._data.apply(convert, axis=1...
Generate an XML output from the report data.
def _log_send(self, logrecord): for field, value in self.__log_extensions: setattr(logrecord, field, value) self.__send_to_frontend({"band": "log", "payload": logrecord})
Forward log records to the frontend.
def list_domains(self): self.connect() results = self.server.list_domains(self.session_id) return {i['domain']: i['subdomains'] for i in results}
Return all domains. Domain is a key, so group by them
def run_forever(self): res = self.slack.rtm.start() self.log.info("current channels: %s", ','.join(c['name'] for c in res.body['channels'] if c['is_member'])) self.id = res.body['self']['id'] self.name = res.body['self']['name'] ...
Run the bot, blocking forever.
def spreadsheet(service, id): request = service.spreadsheets().get(spreadsheetId=id) try: response = request.execute() except apiclient.errors.HttpError as e: if e.resp.status == 404: raise KeyError(id) else: raise return response
Fetch and return spreadsheet meta data with Google sheets API.
def _init_template(self, cls, base_init_template): if self.__class__ is not cls: raise TypeError("Inheritance from classes with @GtkTemplate decorators " "is not allowed at this time") connected_signals = set() self.__connected_template_signals__ = connected_signals base_...
This would be better as an override for Gtk.Widget
def _calc_d(aod700, p): p0 = 101325. dp = 1/(18 + 152*aod700) d = -0.337*aod700**2 + 0.63*aod700 + 0.116 + dp*np.log(p/p0) return d
Calculate the d coefficient.
def contexts(self): if not hasattr(self, "_contexts"): cs = {} for cr in self.doc["contexts"]: cs[cr["name"]] = copy.deepcopy(cr["context"]) self._contexts = cs return self._contexts
Returns known contexts by exposing as a read-only property.
def _loopreport(self): while 1: eventlet.sleep(0.2) ac2popenlist = {} for action in self.session._actions: for popen in action._popenlist: if popen.poll() is None: lst = ac2popenlist.setdefault(action.activity, []) ...
Loop over the report progress
def insert_sequences_into_tree(aln, moltype, params={}): new_aln=get_align_for_phylip(StringIO(aln)) aln2 = Alignment(new_aln) seqs = aln2.toFasta() parsinsert_app = ParsInsert(params=params) result = parsinsert_app(seqs) tree = DndParser(result['Tree'].read(), constructor=PhyloNode) result....
Returns a tree from placement of sequences
def cookie_eater(ctx): state = ctx.create_state() state["ready"] = True for _ in state.when_change("cookies"): eat_cookie(state)
Eat cookies as they're baked.
def graft_neuron(root_section): assert isinstance(root_section, Section) return Neuron(soma=Soma(root_section.points[:1]), neurites=[Neurite(root_section)])
Returns a neuron starting at root_section
def version(verbose): print(Fore.BLUE + '-=' * 15) print(Fore.YELLOW + 'Superset ' + Fore.CYAN + '{version}'.format( version=config.get('VERSION_STRING'))) print(Fore.BLUE + '-=' * 15) if verbose: print('[DB] : ' + '{}'.format(db.engine)) print(Style.RESET_ALL)
Prints the current version number
def space_search(args): r = fapi.list_workspaces() fapi._check_response_code(r, 200) workspaces = r.json() extra_terms = [] if args.bucket: workspaces = [w for w in workspaces if re.search(args.bucket, w['workspace']['bucketName'])] extra_terms.append('bucket') ...
Search for workspaces matching certain criteria
def build(self): self._molecules = [] if self.handedness == 'l': handedness = -1 else: handedness = 1 rot_ang = self.rot_ang * handedness for i in range(self.num_of_repeats): dup_unit = copy.deepcopy(self.repeat_unit) z = (self.rise...
Builds a Solenoid using the defined attributes.
def use_comparative_assessment_offered_view(self): self._object_views['assessment_offered'] = COMPARATIVE for session in self._get_provider_sessions(): try: session.use_comparative_assessment_offered_view() except AttributeError: pass
Pass through to provider AssessmentOfferedLookupSession.use_comparative_assessment_offered_view
def string_to_char(l): if not l: return [] if l == ['']: l = [' '] maxlen = reduce(max, map(len, l)) ll = [x.ljust(maxlen) for x in l] result = [] for s in ll: result.append([x for x in s]) return result
Convert 1-D list of strings to 2-D list of chars.
def setup_debug_logging(): logger = logging.getLogger("xbahn") logger.setLevel(logging.DEBUG) ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) ch.setFormatter(logging.Formatter("%(name)s: %(message)s")) logger.addHandler(ch)
set up debug logging
def dump(self, **kwargs): import sys if not self.seq: self.seq = self.getSequence() print("StringGenerator version: %s" % (__version__)) print("Python version: %s" % sys.version) self.seq.dump() return self.render(**kwargs)
Print the parse tree and then call render for an example.
def full_research_organism(soup): "research-organism list including inline tags, such as italic" if not raw_parser.research_organism_keywords(soup): return [] return list(map(node_contents_str, raw_parser.research_organism_keywords(soup)))
research-organism list including inline tags, such as italic
def lambda_handler(event, context): print("Client token: " + event['authorizationToken']) print("Method ARN: " + event['methodArn']) principalId = "user|a1b2c3d4" tmp = event['methodArn'].split(':') apiGatewayArnTmp = tmp[5].split('/') awsAccountId = tmp[4] policy = AuthPolicy(principalId, a...
validate the incoming token
def _log_control(self, s): if self.encoding is not None: s = s.decode(self.encoding, 'replace') self._log(s, 'send')
Write control characters to the appropriate log files
def _definition(self): headerReference = self._sectPr.get_headerReference(self._hdrftr_index) return self._document_part.header_part(headerReference.rId)
|HeaderPart| object containing content of this header.
def check_text(self, text): if to_text_string(text) == u'': self.button_ok.setEnabled(False) else: self.button_ok.setEnabled(True)
Disable empty layout name possibility
def _jar_classfiles(self, jar_file): for cls in ClasspathUtil.classpath_entries_contents([jar_file]): if cls.endswith('.class'): yield cls
Returns an iterator over the classfiles inside jar_file.
def _maybeCleanSessions(self): sinceLast = self._clock.seconds() - self._lastClean if sinceLast > self.sessionCleanFrequency: self._cleanSessions()
Clean expired sessions if it's been long enough since the last clean.
def pass_outputs_v1(self): der = self.parameters.derived.fastaccess flu = self.sequences.fluxes.fastaccess out = self.sequences.outlets.fastaccess for bdx in range(der.nmbbranches): out.branched[bdx][0] += flu.outputs[bdx]
Updates |Branched| based on |Outputs|.
def reputation(self, query, include_reasons=False, **kwargs): return self._results('reputation', '/v1/reputation', domain=query, include_reasons=include_reasons, cls=Reputation, **kwargs)
Pass in a domain name to see its reputation score
def dbus_readBytesFD(self, fd, byte_count): f = os.fdopen(fd, 'rb') result = f.read(byte_count) f.close() return bytearray(result)
Reads byte_count bytes from fd and returns them.
def eval_now(self, code): result = eval(self.reformat(code)) if result is None or isinstance(result, (bool, int, float, complex)): return repr(result) elif isinstance(result, bytes): return "b" + self.wrap_str_of(result) elif isinstance(result, str): r...
Reformat and evaluate a code snippet and return code for the result.
def _setattr_default(obj, attr, value, default): if value is None: setattr(obj, attr, default) else: setattr(obj, attr, value)
Set an attribute of an object to a value or default value.
def simulate(self, n): self.tree._tree.seed_node.states = self.ancestral_states(n) categories = np.random.randint(self.ncat, size=n).astype(np.intc) for node in self.tree.preorder(skip_seed=True): node.states = self.evolve_states(node.parent_node.states, categories, node.pmats) ...
Evolve multiple sites during one tree traversal
def lookup(ctx, path): regions = parse_intervals(path, as_context=ctx.obj['semantic']) _report_from_regions(regions, ctx.obj)
Determine which tests intersect a source interval.
def dispatch(self): try: webapp2.RequestHandler.dispatch(self) finally: self.session_store.save_sessions(self.response)
Wraps the dispatch method to add session support.
def thread_pool(): if not LocalImage._thread_pool: logger.info("Starting LocalImage threadpool") LocalImage._thread_pool = concurrent.futures.ThreadPoolExecutor( thread_name_prefix="Renderer") return LocalImage._thread_pool
Get the rendition threadpool
def show_flavor(self, flavor, **_params): return self.get(self.flavor_path % (flavor), params=_params)
Fetches information for a certain Neutron service flavor.
def parse(cls, s, schema_only=False): a = cls() a.state = 'comment' a.lineno = 1 for l in s.splitlines(): a.parseline(l) a.lineno += 1 if schema_only and a.state == 'data': break return a
Parse an ARFF File already loaded into a string.
def runCommandSplits(splits, silent=False, shell=False): try: if silent: with open(os.devnull, 'w') as devnull: subprocess.check_call( splits, stdout=devnull, stderr=devnull, shell=shell) else: subprocess.check_call(splits, shell=shell) ...
Run a shell command given the command's parsed command line
def _unix_word_rubout(text, pos): words = text[:pos].rsplit(None, 1) if len(words) < 2: return text[pos:], 0 else: index = text.rfind(words[1], 0, pos) return text[:index] + text[pos:], index
Kill the word behind pos, using white space as a word boundary.
def all_subclasses(cls): subclasses = cls.__subclasses__() descendants = (descendant for subclass in subclasses for descendant in all_subclasses(subclass)) return set(subclasses) | set(descendants)
Recursively returns all the subclasses of the provided class.
def execute(self, elem_list): if self.condition.is_true(elem_list): return self.action.act(elem_list) else: return elem_list
If condition, return a new elem_list provided by executing action.
def augment_reading_list(self, primary_query, augment_query=None, reverse_negate=False): primary_query = self.validate_query(primary_query) augment_query = self.get_validated_augment_query(augment_query=augment_query) try: if reverse_negate: primary_query = primary_qu...
Apply injected logic for slicing reading lists with additional content.
def seconds_to_time(x): t = int(x * 10**6) ms = t % 10**6 t = t // 10**6 s = t % 60 t = t // 60 m = t % 60 t = t // 60 h = t return time(h, m, s, ms)
Convert a number of second into a time
def update_portfolio(self): if not self._dirty_portfolio: return portfolio = self._portfolio pt = self.position_tracker portfolio.positions = pt.get_positions() position_stats = pt.stats portfolio.positions_value = position_value = ( position_stats...
Force a computation of the current portfolio state.
def _connect_to_rabbitmq(self): global pending_rabbitmq_connection, rabbitmq_connection if not rabbitmq_connection: LOGGER.info('Creating a new RabbitMQ connection') pending_rabbitmq_connection = self._new_rabbitmq_connection()
Connect to RabbitMQ and assign a local attribute
def _graph_reduction(adj, x, g, f): as_list = set() as_nodes = {v for v in adj if len(adj[v]) <= f and is_almost_simplicial(adj, v)} while as_nodes: as_list.union(as_nodes) for n in as_nodes: dv = len(adj[n]) if dv > g: g = dv if g > f: ...
we can go ahead and remove any simplicial or almost-simplicial vertices from adj.
def add_membership(self, user, role): targetGroup = AuthGroup.objects(role=role, creator=self.client).first() if not targetGroup: return False target = AuthMembership.objects(user=user, creator=self.client).first() if not target: target = AuthMembership(user=user,...
make user a member of a group
def remove_xml_element_string(name, content): ET.register_namespace("", "http://soap.sforce.com/2006/04/metadata") tree = ET.fromstring(content) tree = remove_xml_element(name, tree) clean_content = ET.tostring(tree, encoding=UTF8) return clean_content
Remove XML elements from a string
async def send_command(self, command): _LOGGER.debug("Sending command to projector %s", command) if self.__checkLock(): return False self.__setLock(command) response = await self.send_request( timeout=self.__get_timeout(command), params=EPSON_KEY_COMMA...
Send command to Epson.
def add_edge(self, u, v, key=None, attr_dict=None, **attr): if attr_dict is None: attr_dict = attr else: try: attr_dict.update(attr) except AttributeError: raise NetworkXError( "The attr_dict argument must be a dicti...
Version of add_edge that only writes to the database once.
def assert_called_once(_mock_self): self = _mock_self if not self.call_count == 1: msg = ("Expected '%s' to have been called once. Called %s times." % (self._mock_name or 'mock', self.call_count)) raise AssertionError(msg)
assert that the mock was called only once.
def _getshapes_2d(center, max_radius, shape): index_mean = shape * center index_radius = max_radius / 2.0 * np.array(shape) min_idx = np.maximum(np.floor(index_mean - index_radius), 0).astype(int) max_idx = np.ceil(index_mean + index_radius).astype(int) idx = [slice(minx, maxx) for minx, maxx in zip...
Calculate indices and slices for the bounding box of a disk.
def fill_in_by_selector(self, selector, value): elem = find_element_by_jquery(world.browser, selector) elem.clear() elem.send_keys(value)
Fill in the form element matching the CSS selector.
def _case_format(self, occur): if self.occur == 1: self.attr["nma:implicit"] = "true" ccnt = len(self.rng_children()) if ccnt == 0: return "<empty/>%s" if ccnt == 1 or not self.interleave: return self.start_tag("group") + "%s" + self.end_tag("group") retur...
Return the serialization format for a case node.
def session(self): if self._session is None: from .tcex_session import TcExSession self._session = TcExSession(self) return self._session
Return an instance of Requests Session configured for the ThreatConnect API.
def single_case(self, i, case): if self.single_stack: self.single_stack.pop() self.single_stack.append(case) try: t = next(i) if self.use_format and t in _CURLY_BRACKETS: self.handle_format(t, i) elif t == '\\': try:...
Uppercase or lowercase the next character.
def new(self, node: Node): return Property(self.name, self._setter, node)
Creates property for node
def create_event_object(self, event_type, code, value, timeval=None): if not timeval: timeval = self.__get_timeval() try: event_code = self.manager.codes['type_codes'][...
Create an evdev style object.
def cross_correlate(self, templates, **kwargs): if isinstance(templates, (Spectrum1D, )): template_dispersion = templates.disp template_fluxes = templates.flux else: template_dispersion = templates[0] template_fluxes = templates[1] return _cross_co...
Cross correlate the spectrum against a set of templates.
def retrieve_dcnm_subnet_info(self, tenant_id, direc): serv_obj = self.get_service_obj(tenant_id) subnet_dict = serv_obj.get_dcnm_subnet_dict(direc) return subnet_dict
Retrieves the DCNM subnet info for a tenant.
def teardown(self): self.log.debug('teardown: in') self.running = False self.shutdown_server() shutil.rmtree(self.tmp_diff_folder, ignore_errors=True)
Tear down the server or keep it alive.
def length_limits(max_length_limit, length_limit_step): string_len = len(str(max_length_limit)) return [ str(i).zfill(string_len) for i in xrange( length_limit_step, max_length_limit + length_limit_step - 1, length_limit_step ) ]
Generates the length limits
def cpp_prog_builder(build_context, target): yprint(build_context.conf, 'Build CppProg', target) workspace_dir = build_context.get_workspace('CppProg', target.name) build_cpp(build_context, target, target.compiler_config, workspace_dir)
Build a C++ binary executable
def main(): args = parse_args() config_logger(args) logger = structlog.get_logger(__name__) if args.show_version: print_version() sys.exit(0) version = pkg_resources.get_distribution('lander').version logger.info('Lander version {0}'.format(version)) config = Configuration(ar...
Entrypoint for ``lander`` executable.
def read_footer(filename): with open(filename, 'rb') as file_obj: if not _check_header_magic_bytes(file_obj) or \ not _check_footer_magic_bytes(file_obj): raise ParquetFormatException("{0} is not a valid parquet file " "(missing magic bytes)" ...
Read the footer and return the FileMetaData for the specified filename.
def write(self): dumped = self._fax.codec.dump(self.__state, open(self.state_file, 'w'))
write all needed state info to filesystem
def ratio_and_percentage_with_time_remaining(current, total, time_remaining): return "{} / {} ({}% completed) (~{} remaining)".format( current, total, int(current / total * 100), time_remaining)
Returns the progress ratio, percentage and time remaining.
def _round_half_hour(record): k = record.datetime + timedelta(minutes=-(record.datetime.minute % 30)) return datetime(k.year, k.month, k.day, k.hour, k.minute, 0)
Round a time DOWN to half nearest half-hour.
def list_loadbalancers(self, retrieve_all=True, **_params): return self.list('loadbalancers', self.lbaas_loadbalancers_path, retrieve_all, **_params)
Fetches a list of all loadbalancers for a project.
def flush(self): if self._cache_modified_count > 0: self.storage.write(self.cache) self._cache_modified_count = 0
Flush all unwritten data to disk.
def check_keyname(self, rule): keynames = rule.get('keynames') if not keynames: self.logdebug('no keynames requirement.\n') return True if not isinstance(keynames, list): keynames = [keynames] if self.keyname in keynames: self.logdebug('key...
If a key name is specified, verify it is permitted.
async def _on_rpc_command(self, event): payload = event['payload'] rpc_id = payload['rpc_id'] tag = payload['response_uuid'] args = payload['payload'] result = 'success' response = b'' if self._rpc_dispatcher is None or not self._rpc_dispatcher.has_rpc(rpc_id): ...
Received an RPC command that we should execute.
def _update_names(self): d = dict( table=self.table_name, time=self.time, space=self.space, grain=self.grain, variant=self.variant, segment=self.segment ) assert self.dataset name = PartialPartitionName(**d).promote(...
Update the derived names
def detect_converters(pattern: str, converter_dict: Dict[str, Callable], default: Callable = str): converters = {} for matched in VARS_PT.finditer(pattern): matchdict = matched.groupdict() varname = matchdict['varname'] converter = matchdict['c...
detect pairs of varname and converter from pattern
def first_setup(self): if ATTR_FIRST_SETUP not in self.raw: return None return datetime.utcfromtimestamp(self.raw[ATTR_FIRST_SETUP])
This is a guess of the meaning of this value.
def expr2dimacscnf(ex): litmap, nvars, clauses = ex.encode_cnf() return litmap, DimacsCNF(nvars, clauses)
Convert an expression into an equivalent DIMACS CNF.