content stringlengths 35 762k | sha1 stringlengths 40 40 | id int64 0 3.66M |
|---|---|---|
def dilate( data, iterations=1, structure=None ):
"""Dilate a binary ND array by a number of iterations."""
# Convert to binary, just in case.
mask = binarise(data).astype(int)
if not structure:
structure = ndimage.generate_binary_structure(3,1)
# Check we have positive iterat... | 724b10f0c1d0d417f4ca693a5322349f390da17c | 12,100 |
from PyPDF4 import PdfFileReader
def text_from_pdf(file_name : str) -> str:
"""
Extract text from PDF file
==========================
Parameters
----------
file_name : str
Name of the file to extract text from.
Returns
-------
str
The extracted text.
"""
... | a1b0077b143b4fee211dd38b6beabf58c7692177 | 12,101 |
import configparser
def read_section(section, fname):
"""Read the specified section of an .ini file."""
conf = configparser.ConfigParser()
conf.read(fname)
val = {}
try:
val = dict((v, k) for v, k in conf.items(section))
return val
except configparser.NoSectionError:
re... | 65d6b81b45fc7b75505dd6ee4dda19d13ebf7095 | 12,102 |
import torch
def freqz(
b, a=1, worN=512, whole=False, fs=2 * np.pi, log=False, include_nyquist=False
):
"""Compute the frequency response of a digital filter."""
h = None
lastpoint = 2 * np.pi if whole else np.pi
if log:
w = np.logspace(0, lastpoint, worN, endpoint=include_nyquist and ... | d6950acc8535791968d34edf8c4ebd557000b72e | 12,103 |
def _helper_fit_partition(self, pnum, endog, exog, fit_kwds,
init_kwds_e={}):
"""handles the model fitting for each machine. NOTE: this
is primarily handled outside of DistributedModel because
joblib cannot handle class methods.
Parameters
----------
self : Distributed... | 30b7e6d48c2f0fa3eb2d2486fee9a87dad609886 | 12,104 |
def generate_2d_scatter(data, variables, class_data=None, class_names=None,
nrows=None, ncols=None, sharex=False, sharey=False,
show_legend=True, xy_line=False, trendline=False,
cmap_class=None, shorten_variables=False,
**kw... | 9d2a843f07cbfed921831a64ad39b4c13a947500 | 12,105 |
def getOptions(options):
"""translate command line options to PAML options."""
codeml_options = {}
if options.analysis == "branch-specific-kaks":
codeml_options["seqtype"] = "1"
codeml_options["model"] = "1"
elif options.analysis == "branch-fixed-kaks":
codeml_options["seqtype... | 86b0fa157e9a9c48d7bf683e57f24f49e32f15e7 | 12,106 |
import json
def deserialize_block_to_json(block: Block) -> str:
"""Deserialize Block object to JSON string
Parameters
----------
block : Block
Block object
Returns
-------
str
JSON string
"""
try:
if block:
return json.dumps(
{
... | b78f31ef076bcb36011df45c6c5c68563a47e71e | 12,107 |
def get_invocations(benchmark: Benchmark):
"""
Returns a list of invocations that invoke the tool for the given benchmark.
It can be assumed that the current directory is the directory from which execute_invocations.py is executed.
For QCOMP 2020, this should return a list of invocations for all tracks ... | 4afeaa62cecd2ae21f6e112fe5c87dff54310a95 | 12,108 |
def see(node: "Position", move: Move = None) -> float:
"""Static-Exchange-Evaluation
Args:
node: The current position to see
move (Move, optional): The capture move to play. Defaults to None.
Returns:
float: The score associated with this capture. Positive is good.
"""
c = ... | e6062b7cd09e9b2dca514e5be23b7fa870ff923f | 12,109 |
import sys
def new(option):
"""
Create a new message queue object; options must contain the type of
queue (which is the name of the child class), see above.
"""
options = option.copy()
qtype = options.pop("type", "DQS")
try:
__import__("messaging.queue.%s" % (qtype.lower()))
ex... | 9e285f4bee5442a41c10b32158595da5e03707de | 12,110 |
def rk4(f, t0, y0, h, N):
""""Solve IVP given by y' = f(t, y), y(t_0) = y_0 with step size h > 0, for N steps,
using the Runge-Kutta 4 method.
Also works if y is an n-vector and f is a vector-valued function."""
t = t0 + np.array([i * h for i in range(N+1)])
m = len(y0)
y = np.zeros((N+1, m))
... | e6b7c3d1ac0ea765a3ac9ebac69159dd2c2eab78 | 12,111 |
import os
def main():
"""
Example entry point; please see Enumeration example for more in-depth
comments on preparing and cleaning up the system.
:return: True if successful, False otherwise.
:rtype: bool
"""
# Since this application saves images in the current folder
# we must ensur... | 62b846f3dd26106ea6dcc01bed841895f3691728 | 12,112 |
import torch
def parse_predictions(est_data, gt_data, config_dict):
""" Parse predictions to OBB parameters and suppress overlapping boxes
Args:
est_data, gt_data: dict
{point_clouds, center, heading_scores, heading_residuals,
size_scores, size_residuals, sem_cls_scores}
... | 9d31b44e37e7af458084b29927eaa29d2e1889af | 12,113 |
def benchmark_op(op, burn_iters: int = 2, min_iters: int = 10):
"""Final endpoint for all kb.benchmarks functions."""
assert not tf.executing_eagerly()
with tf.compat.v1.Session() as sess:
sess.run(tf.compat.v1.global_variables_initializer())
bm = tf.test.Benchmark()
result = bm.run_... | 4b46cbb4f487332b43e1c06daef294e9e01f13a5 | 12,114 |
def run_truncated_sprt(list_alpha, list_beta, logits_concat, labels_concat, verbose=False):
""" Calculate confusion matrix, mean hitting time, and truncate rate of a batch.
Args:
list_alpha: A list of floats.
list_beta: A list of floats with the same length as list_alpha's.
logits_concat... | 7934b1de29c60a59df056cbb1e4dce42e76ca540 | 12,115 |
def get_users(metadata):
"""
Pull users, handles hidden user errors
Parameters:
metadata: sheet of metadata from mwclient
Returns:
the list of users
"""
users = []
for rev in metadata:
try:
users.append(rev["user"])
except (KeyError):
u... | 48dbae6a63019b0e4c2236a97e147102fe4d8758 | 12,116 |
from re import S
def solve(FLT_MIN, FLT_MAX):
"""Solving cos(x) <= -0.99, dx/dt=1, x(0) = 0
# Basic steps:
# 1. First compute the n terms for each ode
# 2. Next replace the guard with ode(t), so that it is only in t
# 3. Then compute the number of terms needed for g(t)
# 4. Finally, compute g(... | 90288d73717d02b966beb66396e0be16f68f55f5 | 12,117 |
from tvm.contrib import graph_executor
def run_tvm_graph(
coreml_model, target, device, input_data, input_name, output_shape, output_dtype="float32"
):
"""Generic function to compile on relay and execute on tvm"""
if isinstance(input_data, list):
shape_dict = {}
dtype_dict = {}
for... | 391b3e0dcca3c1893da69a6ce1ac219f7c56dfa0 | 12,118 |
import math
def detect_peaks(data, srate):
"""
obrain maximum and minimum values from blood pressure or pleth waveform
the minlist is always one less than the maxlist
"""
ret = []
if not isinstance(data, np.ndarray):
data = np.array(data)
raw_data = np.copy(data)
raw_srate = ... | ad327b10dd6bcecb3036ecfb5cdcf07defecf2ff | 12,119 |
from typing import Tuple
from typing import Dict
def add_entity_to_watchlist(client: Client, args) -> Tuple[str, Dict, Dict]:
"""Adds an entity to a watchlist.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Outputs.
"""
watchlist_name = arg... | 28270c3fa0985458a1fc18f5fd4d2c8661eae1dc | 12,120 |
def is_base255(channels):
"""check if a color is in base 01"""
if isinstance(channels, str):
return False
return all(_test_base255(channels).values()) | e8e6176785303f8f1130c7e99f929ec183e145c5 | 12,121 |
def make_unrestricted_prediction(solution: SolverState) -> tuple[Role, ...]:
"""
Uses a list of true/false statements and possible role sets
to return a rushed list of predictions for all roles.
Does not restrict guesses to the possible sets.
"""
all_role_guesses, curr_role_counts = get_basic_gu... | 2662979b0fdca524dcea368daf7b11283906ecbb | 12,122 |
import argparse
def get_args():
"""Parse command-line arguments."""
parser = argparse.ArgumentParser(description="Expression aggregator")
parser.add_argument(
"-e", "--expressions", nargs="+", help="Expressions", required=True
)
parser.add_argument(
"-d", "--descriptors", nargs="+"... | a33401b0407ca8538f09918c8ec9074ca21e2438 | 12,123 |
def computeFourteenMeVPoint(xs, E14='14.2 MeV', useCovariance=True, covariance=None):
"""
Compute the value of the cross section at 14.2 MeV.
If the covariance is provided, the uncertainty on the 14.2 MeV point will be computed.
:param xs: reference to the cross section
:param E14: the 14 MeV point... | 4d3165518e227f0c1027d45507c6d67e1e27bf0b | 12,124 |
def conn_reshape_directed(da, net=False, sep='-', order=None, rm_missing=False,
fill_value=np.nan, to_dataframe=False,
inplace=False):
"""Reshape a raveled directed array of connectivity.
This function takes a DataArray of shape (n_pairs, n_directions) or
... | bb6747cc47b263545fce219f8357d8773fb428bc | 12,125 |
import shlex
import subprocess
def cmd_appetite(manifest, extra_params, num_threads=1, delete_logs=False):
"""Run appetite with defined params
:param manifest: manifest to reference
:param extra_params: extra params if needed
:param num_threads: Number of threads to use
:param delete_logs: Delete ... | f22fdd1d436f616cac31517c0594d94416ef6366 | 12,126 |
def create_manager(user):
"""
Return a ManageDNS object associated with user (for history)
"""
if 'REVERSE_ZONE' in app.config:
revzone = app.config['REVERSE_ZONE']
else:
revzone = None
return ManageDNS(nameserver=app.config['SERVER'], forward_zone=app.config['FORWARD_ZONE'],
... | 0832ce4353775a19cc015490f4febf6df6bd8f04 | 12,127 |
from datetime import datetime
import requests
import json
import sys
import time
import operator
def rank_urls(urls, year=None, filename=None):
"""
Takes a list of URLs and searches for them in
Hacker News submissions. Prints or saves each
URL and its total points to a given filename
in descending order of point... | 80de668d98cfbcca8ca6aeae269d6d2a683ae2d3 | 12,128 |
def get_psi_part(v, q):
"""Return the harmonic oscillator wavefunction for level v on grid q."""
Hr = make_Hr(v + 1)
return N(v) * Hr[v](q) * np.exp(-q * q / 2.0) | 9d4d6a62b7ee434d5d92a694a4a2491fd8a94f97 | 12,129 |
import os
def get_tempdir() -> str:
"""Get the directory where temporary files are stored."""
return next((os.environ[var] for var in (
'XDG_RUNTIME_DIR', 'TMPDIR', 'TMP', 'TEMP'
) if var in os.environ), '/tmp') | 95c90d9f297bbd76e1f083d07058db1b46c275ba | 12,130 |
def get_pij(d, scale, i, optim = "fast"):
"""
Compute probabilities conditioned on point i from a row of distances
d and a Gaussian scale (scale = 2*sigma^2). Vectorized and unvectorized
versions available.
"""
if optim == "none":
#
# TO BE DONE
#
re... | 4a1ee1d91ba949789cc96d2ed1873197afbf4b67 | 12,131 |
import os
import hashlib
def hash_file(path):
"""
Returns the hash of a file.
Based on https://stackoverflow.com/questions/22058048/hashing-a-file-in-python
"""
# Return error as hash, if file does not exist
if not os.path.exists(path):
return f"error hashing file, file does not exist... | 722afbe2b6dafa35fbd91b28b4b33d407057b562 | 12,132 |
import os
import glob
import itertools
import re
def get_section_names(target_dir, dir_name, ext="wav"):
"""
Get section name (almost equivalent to machine ID).
target_dir : str
base directory path
dir_name : str
sub directory name
ext : str (default="wav)
file extension o... | 1fea77ad452b936019201ff35fe8479a8ad7efd5 | 12,133 |
def get_user_input(prompt: str, current_setting: str):
"""
Get user input
:param prompt: prompt to display
:param current_setting: current value
:return:
"""
if current_setting != '':
print(f'-- Current setting: {current_setting}')
use_current = '/return to use current'
e... | 358bd937db4ae111eb515385f0f61391a7ae665c | 12,134 |
def class_is_u16_len(cls):
"""
Return True if cls_name is an object which uses initial uint16 length
"""
ofclass = loxi_globals.unified.class_by_name(cls)
if not ofclass:
return False
if len(ofclass.members) < 1:
return False
m = ofclass.members[0]
if not isinstance(m... | f45a35e98ff0a2cf6d10ac31e5e31b501f7edcfd | 12,135 |
import zlib
def uploadfile(ticket_id):
"""
Anexa um arquivo ao ticket.
"""
if "file" not in request.files:
return "arquivo inválido"
filename = request.files.get("file").filename
maxfilesize = int(cfg("attachments", "max-size"))
blob = b""
filesize = 0
while True:
c... | d6dae213ff1b6f48b1cd2efb739e52eba6531692 | 12,136 |
def split(data, train_ids, test_ids, valid_ids=None):
"""Split data into train, test (and validation) subsets."""
datasets = {
"train": (
tuple(map(lambda x: x[train_ids], data[0])),
data[1][train_ids],
),
"test": (tuple(map(lambda x: x[test_ids], data[0])), data[... | 0156d39a5920c5ba7e3ab05a85358b1a960cf239 | 12,137 |
def parse_value_file(path):
"""return param: [(value type, value)]"""
data = {}
samples = [x.strip("\n").split("\t") for x in open(path)]
for row in samples:
parameter = row[0]
values = [x for x in row[1:] if x != SKIP_VAL]
if values != []:
if parameter not in data:
... | 7dd51f21877ec8ce4a8a64c288a5e862ea0e2a52 | 12,138 |
import os
def get_stop_words(stop_text, filename='ChineseStopWords.txt'):
"""读取指定停用词文件"""
_fp = os.path.join(stopwords_path, filename)
with open(_fp, 'r', encoding='utf-8') as f:
lines = f.readlines()
stop_words = [word.strip() for word in lines]
if stop_text:
input_stop_words = st... | 8c5355519e58ddcf82c87c9f357af614b9021188 | 12,139 |
def _is_comments_box(shape):
""" Checks if this shape represents a Comments question; RECTANGLE with a green outline """
if shape.get('shapeType') != 'RECTANGLE':
return False
color = get_dict_nested_value(shape, 'shapeProperties', 'outline', 'outlineFill', 'solidFill', 'color', 'rgbColor')
re... | 5fa4abba6cc0db3552e90bd73ea9aa7659665ffe | 12,140 |
import os
def read_tree(path):
"""Returns a dict with {filepath: content}."""
if not os.path.isdir(path):
return None
out = {}
for root, _, filenames in os.walk(path):
for filename in filenames:
p = os.path.join(root, filename)
with open(p, 'rb') as f:
out[os.path.relpath(p, path)]... | 1c3c7be9723ac9f60ab570a7287b067dd2c5536e | 12,141 |
def get_config(cfg, name):
"""Given the argument name, read the value from the config file.
The name can be multi-level, like 'optimizer.lr'
"""
name = name.split('.')
suffix = ''
for item in name:
assert item in cfg, f'attribute {item} not cfg{suffix}'
cfg = cfg[item]
... | 4b0a8eedb057a26d67cd5c9f7698c33754b29249 | 12,142 |
def get_current_frame_content_entire_size(driver):
# type: (AnyWebDriver) -> ViewPort
"""
:return: The size of the entire content.
"""
try:
width, height = driver.execute_script(_JS_GET_CONTENT_ENTIRE_SIZE)
except WebDriverException:
raise EyesError('Failed to extract entire size... | 6df557ae5f628897a0d178695a13349787532168 | 12,143 |
def conv_slim_capsule(input_tensor,
input_dim,
output_dim,
layer_name,
input_atoms=8,
output_atoms=8,
stride=2,
kernel_size=5,
padding='SAME',
... | 626719fa607c7e02e2315d5082e9536b995ab080 | 12,144 |
def p_op_mean0_update(prev_p_op_mean0: float, p_op_var0: float, op_choice: int):
"""0-ToM updates mean choice probability estimate"""
# Input variable transforms
p_op_var0 = np.exp(p_op_var0)
# Update
new_p_op_mean0 = prev_p_op_mean0 + p_op_var0 * (
op_choice - inv_logit(prev_p_op_mean0)
... | 2a66e0e6089813c8605e658bb68c103d2b07515d | 12,145 |
from aiida.orm import QueryBuilder, Code
from aiida.common.exceptions import NotExistent
def get_last_code(entry_point_name):
"""Return a `Code` node of the latest code executable of the given entry_point_name in the database.
The database will be queried for the existence of a inpgen node.
If this is no... | 801ef01075dccaf2d6d91c8bfcd9f038dc749ba7 | 12,146 |
def digamma(x):
"""Digamma function.
Parameters
----------
x : array-like
Points on the real line
out : ndarray, optional
Output array for the values of `digamma` at `x`
Returns
-------
ndarray
Values of `digamma` at `x`
"""
return _digamma(x) | fb0eb21ee4255851aaa90bea1ad2b8729c7b0137 | 12,147 |
def get_time_index_dataset_from_file(the_file: h5py.File) -> h5py.Dataset:
"""Return the dataset for time indices from the H5 file object."""
return the_file[TIME_INDICES] | dfcd2017285ac252becbb2de7d1b4c4eb178534e | 12,148 |
import os
def get_database_connection():
"""Возвращает соединение с базой данных Redis, либо создаёт новый, если он ещё не создан."""
global _database
if _database is None:
database_password = os.getenv("DB_PASSWORD", default=None)
database_host = os.getenv("DB_HOST", default='localhost')
... | 3417bc17a357a7bcd54131301f3963a10d76e027 | 12,149 |
def is_feature_enabled(feature_name):
"""A short-form method for server-side usage. This method evaluates and
returns the values of the feature flag, using context from the server only.
Args:
feature_name: str. The name of the feature flag that needs to
be evaluated.
Returns:
... | 65c3b74988d7eb145352d2835b90a60abedfeba1 | 12,150 |
def str_to_size(size_str):
"""
Receives a human size (i.e. 10GB) and converts to an integer size in
mebibytes.
Args:
size_str (str): human size to be converted to integer
Returns:
int: formatted size in mebibytes
Raises:
ValueError: in case size provided in invalid
... | 0051b7cf55d295a4fffcc41ed5b0d900243ef2da | 12,151 |
def point_line_distance(point, line):
"""Distance between a point and great circle arc on a sphere."""
start, end = line
if start == end:
dist = great_circle_distance(point, start, r=1)/np.pi*180
else:
dist = cross_track_distance(point, line, r=1)
dist = abs(dist/np.pi*180)
r... | 472b9134034fb0f0e03ad4964f97c2be7337db56 | 12,152 |
def uniq_by(array, iteratee=None):
"""This method is like :func:`uniq` except that it accepts iteratee which
is invoked for each element in array to generate the criterion by which
uniqueness is computed. The order of result values is determined by the
order they occur in the array. The iteratee is invo... | c4398add1597a447f400bd6c100cc10eda4e63a4 | 12,153 |
def process_lvq_pak(dataset_name='lvq-pak', kind='all', numeric_labels=True, metadata=None):
"""
kind: {'test', 'train', 'all'}, default 'all'
numeric_labels: boolean (default: True)
if set, target is a vector of integers, and label_map is created in the metadata
to reflect the mapping to th... | 7d9aaed88fb20dc151c61d23760e05e77965838c | 12,154 |
def StrToPtkns(path_string):
""" The inverse of PtknsToStr(), this function splits a string like
'/usr/local/../bin/awk' into ['usr','local','..','bin','awk'].
For illustrative purposes only. Use text.split('/') directly instead."""
return orig_text.split('/') | c6259c2ae34f987d1e6cd8a23bec963c8cd4b466 | 12,155 |
def load_key(file, callback=util.passphrase_callback):
# type: (AnyStr, Callable) -> EC
"""
Factory function that instantiates a EC object.
:param file: Names the filename that contains the PEM representation
of the EC key pair.
:param callback: Python callback object that will be... | 72d47a88d80141b385f8212134fb682507ce47d4 | 12,156 |
import fnmatch
def glob_path_match(path: str, pattern_list: list) -> bool:
"""
Checks if path is in a list of glob style wildcard paths
:param path: path of file / directory
:param pattern_list: list of wildcard patterns to check for
:return: Boolean
"""
return any(fnmatch(path, pattern) f... | 7c21e8f1c441641990826cf1d6d29d4add40e9ca | 12,157 |
def sample_df(df, col_name='family', n_sample_per_class=120, replace = False):
"""
samples the dataframe based on a column, duplicates only if the
number of initial rows < required sample size
"""
samples = df.groupby(col_name)
list_cls = df[col_name].unique()
df_lst = []
for cls in li... | cc229e9cbd4a094b9a42893f15d99303f1f14c2d | 12,158 |
import base64
def inventory_user_policies_header(encode):
"""generate output header"""
if encode == 'on':
return misc.format_line((
base64.b64encode(str("Account")),
base64.b64encode(str("UserName")),
base64.b64encode(str("PolicyName")),
base64.b64encode(str... | 80d170505c0f05e48c2854c9f5370d161de953fb | 12,159 |
def fields_dict(cls):
"""
Return an ordered dictionary of ``attrs`` attributes for a class, whose
keys are the attribute names.
:param type cls: Class to introspect.
:raise TypeError: If *cls* is not a class.
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
cla... | 863106132a8ce27c8cb12c8ace8f4204b43484c3 | 12,160 |
def cdlxsidegap3methods(
client,
symbol,
timeframe="6m",
opencol="open",
highcol="high",
lowcol="low",
closecol="close",
):
"""This will return a dataframe of upside/downside gap three methods for the given symbol across
the given timeframe
Args:
client (pyEX.Client): Cl... | f77f8e7404b2be942919a652facbfea412e962d3 | 12,161 |
def finditer(pattern, string, flags=0):
"""Return an iterator over all non-overlapping matches in the
string. For each match, the iterator returns a match object.
Empty matches are included in the result."""
return _pyre().finditer(pattern, string, flags) | ff84f88a200b469bbea010b04d4b3f36fd340c9c | 12,162 |
def nufft_j(x, y, freq = None, period_max=1., period_min=.5/24, window=False, oversamp=10.):
"""
nufft_j(x, y, period_max=1.,
period_min=.5/24, window=False, oversamp=10.):
Basic STFT algorithm
for evenly sampled data
"""
srt = np.argsort(x)
x = x[srt] # get sorted x, y arrays
y =... | dc690bd294c28d8b70befc1463eaeed018bf98bf | 12,163 |
import logging
def logger(verbosity=levels['error'], log_file=None):
"""Create a logger which streams to the console, and optionally a file."""
# create/get logger for this instance
logger = logging.getLogger(__name__)
logger.setLevel(levels['debug'])
fmt = logging.Formatter('%(asctime)s - %(leve... | da17765b4d52b388920873df85fadeeed262c2d7 | 12,164 |
import sys
def head(input_file, in_format, nrows):
"""
Convert tables between formats, optionally modifying column names in the tables
"""
# Guess format if not specified:
if in_format.upper() == "AUTO":
in_format = utils.guess_format(input_file)
# Read PARQUET:
if in_format.uppe... | 9ed8b83ea91c48672f3f896cf1c8d1ec12100029 | 12,165 |
def getNamespacePermissions(paths):
"""Get L{Namespace}s and L{NamespacePermission}s for the specified paths.
@param paths: A sequence of L{Namespace.path}s to get L{Namespace}s and
L{NamespacePermission}s for.
@return: A C{ResultSet} yielding C{(Namespace, NamespacePermission)}
2-tuples fo... | cf6c9a898bdc08130702d4aeb6570790a9dc8edc | 12,166 |
def plot(x, y, ey=[], ex=[], frame=[], kind="scatter", marker_option=".",
ls="-", lw=1, label="", color="royalblue", zorder=1, alpha=1.,
output_folder="", filename=""):
"""
Erstellt einen Plot (plot, scatter oder errorbar).
Parameters
----------
x : array-like
x-Wert... | 038a12ab841a617bf1ca3106d5664f8942c9e259 | 12,167 |
def find_nominal_hv(filename, nominal_gain):
"""
Finds nominal HV of a measured PMT dataset
Parameters
----------
filename: string
nominal gain: float
gain for which the nominal HV should be found
Returns
-------
nominal_hv: int
nominal HV
"""
f = h5py.File... | 122c5c14314e1ad3521a67a7b9287969a471818d | 12,168 |
def parse_match(field, tokens):
"""Parses a match or match_phrase node
:arg field: the field we're querying on
:arg tokens: list of tokens to consume
:returns: list of match clauses
"""
clauses = []
while tokens and tokens[-1] not in (u'OR', u'AND'):
token = tokens.pop()
... | ac970f319b74317637c31265981ecebab6ca9611 | 12,169 |
def get_filesystem(namespace):
"""
Returns a patched pyfilesystem for static module storage based on
`DJFS_SETTINGS`. See `patch_fs` documentation for additional details.
The file system will have two additional properties:
1) get_url: A way to get a URL for a static file download
2) expire... | ae00307c0c38a554bebe1bbd940ace0f2d154b47 | 12,170 |
def sym_normalize_adj(adj):
"""symmetrically normalize adjacency matrix"""
adj = sp.coo_matrix(adj)
degree = np.array(adj.sum(1)).flatten()
d_inv_sqrt = np.power(np.maximum(degree, np.finfo(float).eps), -0.5)
d_mat_inv_sqrt = sp.diags(d_inv_sqrt)
return adj.dot(d_mat_inv_sqrt).transpose().dot(d_... | a172ec18cd88ac8a50356453eb159c001b21d9b1 | 12,171 |
def prepare_label(input_batch, new_size):
"""Resize masks and perform one-hot encoding.
Args:
input_batch: input tensor of shape [batch_size H W 1].
new_size: a tensor with new height and width.
Returns:
Outputs a tensor of shape [batch_size h w 21]
with last dimension comprised of... | 3cd049b0d610ed2cec79e17464a0b3d18baa0ab2 | 12,172 |
def find_index_halfmax(data1d):
"""
Find the two indices at half maximum for a bell-type curve (non-parametric). Uses center of mass calculation.
:param data1d:
:return: xmin, xmax
"""
# normalize data between 0 and 1
data1d = data1d / float(np.max(data1d))
# loop across elements and sto... | 2bc506075218b34d971beebd1ed6d08b0841aec9 | 12,173 |
from typing import Optional
def format_autoupdate_jira_msg(
message_body: str, header_body: Optional[str] = None
) -> str:
"""
Format a JIRA message with useful headers.
An "Automated JIRA Update" title will be added,
as well as either a URL link if a ``BUILD_URL`` env variable is present,
or... | 8470987c886c4c696ebd7537369b9baee9883e20 | 12,174 |
def _unescape_token(escaped_token):
"""Inverse of _escape_token().
Args:
escaped_token: a unicode string
Returns:
token: a unicode string
"""
def match(m):
if m.group(1) is None:
return "_" if m.group(0) == "\\u" else "\\"
try:
return chr(int(m... | 1e596373f64d2163e467dddf4851e60dba6faa00 | 12,175 |
def create_option_learner(action_space: Box) -> _OptionLearnerBase:
"""Create an option learner given its name."""
if CFG.option_learner == "no_learning":
return KnownOptionsOptionLearner()
if CFG.option_learner == "oracle":
return _OracleOptionLearner()
if CFG.option_learner == "direct_... | 5642b5c6713dcf3204a0bf98e4435cfb2874e1c6 | 12,176 |
def parse_foochow_romanized_phrase(phrase, allow_omit_ingbing = True):
"""Parse a dash-separated phrase / word in Foochow Romanized."""
syllables = phrase.strip().split('-')
result = []
for syllable in syllables:
try:
parsed = FoochowRomanizedSyllable.from_string(syllable, allow_omi... | d9b5fa15ab11a596e14c7eecff2ce4fc7ef520ae | 12,177 |
def _update(dict_merged: _DepDict, dict_new: _DepDict) -> _DepDict:
"""
Merge a dictionary `dict_new` into `dict_merged` asserting if there are
conflicting (key, value) pair.
"""
for k, v in dict_new.items():
v = dict_new[k]
if k in dict_merged:
if v != dict_merged[k]:
... | 8c96256dd96f0a75d8e8cde039c7193699bf763f | 12,178 |
from datetime import datetime
def date_convert(value):
"""
日期字符串转化为数据库的日期类型
:param value:
:return:
"""
try:
create_date = datetime.strptime(value, '%Y/%m/%d').date()
except Exception as e:
create_date = datetime.now().date()
return create_date | 40d7a213a8aeed692940bbb285fdad1bbb5b65a6 | 12,179 |
def discriminator_txt2img_resnet(input_images, t_txt, is_train=True, reuse=False):
""" 64x64 + (txt) --> real/fake """
# https://github.com/hanzhanggit/StackGAN/blob/master/stageI/model.py
# Discriminator with ResNet : line 197 https://github.com/reedscot/icml2016/blob/master/main_cls.lua
w_init = tf.ra... | 200d23ccffe631ea9bea2de5afa82a1794192a7b | 12,180 |
import imghdr
def get_img_content(session,
file_url,
extension=None,
max_retry=3,
req_timeout=5):
"""
Returns:
(data, actual_ext)
"""
retry = max_retry
while retry > 0:
try:
response = sessi... | 156005420ebc1503d5cf7a194051b93d9fccb8ed | 12,181 |
import urllib
def nextbus(a, r, c="vehicleLocations", e=0):
"""Returns the most recent latitude and
longitude of the selected bus line using
the NextBus API (nbapi)"""
nbapi = "http://webservices.nextbus.com"
nbapi += "/service/publicXMLFeed?"
nbapi += "command=%s&a=%s&r=%s&t=%s" % (c,a,r,e)
xml = mini... | 13d1d26fcda1ad01e145dc1d1d0d4e5377efa576 | 12,182 |
def xml_translate(callback, value):
""" Translate an XML value (string), using `callback` for translating text
appearing in `value`.
"""
if not value:
return value
try:
root = parse_xml(value)
result = translate_xml_node(root, callback, parse_xml, serialize_xml)
... | b95f61fd1f78d567f69bdae3f5d0a1599d7b5cdc | 12,183 |
def check_sc_sa_pairs(tb, pr_sc, pr_sa, ):
"""
Check whether pr_sc, pr_sa are allowed pairs or not.
agg_ops = ['', 'MAX', 'MIN', 'COUNT', 'SUM', 'AVG']
"""
bS = len(pr_sc)
check = [False] * bS
for b, pr_sc1 in enumerate(pr_sc):
pr_sa1 = pr_sa[b]
hd_types1 = tb[b]['types']
... | ded05192f26516e54e469bb1fe44ff6170ecea13 | 12,184 |
def translate_resource_args(func):
"""
Decorator that converts Issue and Project resources to their keys when used as arguments.
"""
@wraps(func)
def wrapper(*args, **kwargs):
arg_list = []
for arg in args:
if isinstance(arg, (Issue, Project)):
arg_list.ap... | 091c2bbc2875f32c643afe0a88ddb5980ff9f90c | 12,185 |
def nan_jumps_dlc(files, max_jump=200):
"""Nan stretches in between large jumps, assuming most of the trace is correct"""
# copy the data
corrected_trace = files.copy()
# get the column names
column_names = corrected_trace.columns
# run through the columns
for column in column_names:
... | 5c06e6d020c4b85f2749d46f693dc29fd4f8326c | 12,186 |
def index():
"""View: Site Index Page"""
return render_template("pages/index.html") | 045c45082215bbc6888944386ba901af7412b0a6 | 12,187 |
from typing import Optional
import aiohttp
import urllib
import hashlib
async def get_latest_digest_from_registry(
repository: str,
tag: str,
credentials: Optional[meadowrun.credentials.RawCredentials],
) -> str:
"""
Queries the Docker Registry HTTP API to get the current digest of the specified
... | 95b0044d5c6e744f548891c1f78115eb40ddaf4c | 12,188 |
def histogram(x, bins, bandwidth, epsilon=1e-10):
"""
Function that estimates the histogram of the input tensor.
The calculation uses kernel density estimation which requires a bandwidth (smoothing) parameter.
"""
pdf, _ = marginal_pdf(x.unsqueeze(2), bins, bandwidth, epsilon)
return p... | bee459eb69daf41ccb0f2810d5e88139f57cad87 | 12,189 |
import re
import os
def file_deal(paths, set_list: list, list_search: list, list_enter: list, file_path: dict, clear_list: bool = False,
pattern=r'^[.\n]*$', is_file=True, replace_str: str = '', names: dict = None):
"""
:param clear_list: is need clear the list
:param paths: DirPicker path ... | a34dbfe7c968626aca5c68e6aa2c0287ebd7146f | 12,190 |
def generate_kam(
kam_path: str
) -> nx.DiGraph:
"""
Generates the knowledge assembly model as a NetworkX graph.
:param kam_path: Path to the file containing the source, relationship and the target nodes of a knowledge
assembly model (KAM).
:return: KAM graph as a NetworkX DiGraph.
"""
... | 54f6e6cc0440a9b81cb48078030943013e599847 | 12,191 |
def toDrive(collection, folder, namePattern='{id}', scale=30,
dataType="float", region=None, datePattern=None,
extra=None, verbose=False, **kwargs):
""" Upload all images from one collection to Google Drive. You can use
the same arguments as the original function
ee.batch.export.imag... | 2bf55de8894a063d2e74a462a3959753a1396c0a | 12,192 |
def area(box):
"""Computes area of boxes.
B: batch_size
N: number of boxes
Args:
box: a float Tensor with [N, 4], or [B, N, 4].
Returns:
a float Tensor with [N], or [B, N]
"""
with tf.name_scope('Area'):
y_min, x_min, y_max, x_max = tf.split(
value=box, num_or_size_splits=4, axis=-1... | 64e7a8e530d28a6c88f8a1c7fd2bb1b6d880617c | 12,193 |
def read(fn):
"""
return a list of the operating systems and a list of the groups in
the given fingerbank config file
"""
cfg = parse_config_with_heredocs(fn)
return create_systems_and_groups(cfg) | 900e70093e469527f20f7be6ed091edf58ff3ace | 12,194 |
def decay_value(base_value, decay_rate, decay_steps, step):
""" decay base_value by decay_rate every decay_steps
:param base_value:
:param decay_rate:
:param decay_steps:
:param step:
:return: decayed value
"""
return base_value*decay_rate**(step/decay_steps) | c593f5e46d7687fbdf9760eb10be06dca3fb6f7b | 12,195 |
def setup_flask_app(manager_ip='localhost',
driver='',
hash_salt=None,
secret_key=None):
"""Setup a functioning flask app, when working outside the rest-service
:param manager_ip: The IP of the manager
:param driver: SQLA driver for postgres (e.g.... | ca71e94dcfcc4949abc4782a74cd67ce1d089d06 | 12,196 |
import shutil
def download_file(file_id, unique_id, credentials):
"""Downloads a file from google drive if user has been authenticated using oauth2
Args:
file_id (str): [The google drive id of the file]
unique_id (str): [The name of the video that is to be used for stored file]
Returns:
... | ae1f0e9648602737a295ac313f3984d31c51fc7e | 12,197 |
def join_lines(new_lines, txt):
"""Joins lines, adding a trailing return if the original text had one."""
return add_ending('\n'.join(new_lines), txt) | 097fccf3ce6a7a5aab9d4f470c35833af3f63836 | 12,198 |
def get_components_with_metrics(config):
"""
:type: config mycroft_holmes.config.Config
"""
storage = MetricsStorage(config=config)
components = []
for feature_name, feature_spec in config.get_features().items():
feature_id = config.get_feature_id(feature_name)
metrics = config... | 478743f29620530d7c4d7ca916ec595fa7a1ab3b | 12,199 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.