content stringlengths 35 762k | sha1 stringlengths 40 40 | id int64 0 3.66M |
|---|---|---|
def trajnet_batch_multi_eval(preds, gt, seq_start_end):
"""Calculate Top-k ADE, Top-k FDE for batch of samples.
pred = Num_modes x Num_ped x Num_timesteps x 2
gt = Num_ped x Num_timesteps x 2
seq_start_end (batch delimiter) = Num_batches x 2
"""
s_topk_ade = 0
s_topk_fde = 0
for (start,... | ff93309e61d871a2d337810cc1836950f883c184 | 10,200 |
def disemvowel(sentence):
"""Disemvowel:
Given a sentence, return the sentence with all vowels removed.
>>> disemvowel('the quick brown fox jumps over the lazy dog')
'th qck brwn fx jmps vr th lzy dg'
"""
vowels = ('a','e','i','o','u')
for x in sentence:
if x in vowels:
... | d9b6d873c29e82cb65e43f71e2b6298af18b25fd | 10,201 |
def runPolyReg(xValueList, yValueList, degrees):
"""
Preforms *Polynomial Regression* based on the arguments provided.
Note that we split the data by the *First* 80 percent of the data and then the *Last* 20 percent of the data, rather than randomly splitting the data by 80/20 for the Train/Test split.
... | 25d4699f720d943dc49264edc12f2246df51f053 | 10,202 |
def unfold_phi_vulpiani(phidp, kdp):
"""Alternative phase unfolding which completely relies on :math:`K_{DP}`.
This unfolding should be used in oder to iteratively reconstruct
:math:`Phi_{DP}` and :math:`K_{DP}` (see :cite:`Vulpiani2012`).
Parameters
----------
phidp : :class:`numpy:numpy.ndar... | 72386a05500c4ba11385e3b57288655e0a207352 | 10,203 |
def get_result_df(session):
"""
query the match table and put results into pandas dataframe,
to train the team-level model.
"""
df_past = pd.DataFrame(
np.array(
[
[s.fixture.date, s.fixture.home_team, s.fixture.away_team, s.home_score, s.away_score]
... | 364d9e7f9ef1a97018402fa964f246954f51f945 | 10,204 |
def permute1d(preserve_symmetry = True):
"""Choose order to rearrange rows or columns of puzzle."""
bp = block_permutation(preserve_symmetry)
ip = [block_permutation(False),block_permutation(preserve_symmetry)]
if preserve_symmetry:
ip.append([2-ip[0][2],2-ip[0][1],2-ip[0][0]])
else:
... | a9ccd2cb486e0ee3d50840c6ab41871396f3ca93 | 10,205 |
import os
import subprocess
def find_diff(sha, files=None):
"""Find the diff since the given sha."""
if files:
for file_or_dir in files:
msg = f"{file_or_dir} doesn't exist. Please provide a valid path."
assert os.path.exists(file_or_dir), msg
else:
files = ['*.py']... | a9cdd27318180c1e9f6572dbb1c49b4a17384236 | 10,206 |
import os
def split_name_with_nii(filename):
"""
Returns the clean basename and extension of a file.
Means that this correctly manages the ".nii.gz" extensions.
:param filename: The filename to clean
:return: A tuple of the clean basename and the full extension
"""
base, ext = os.path.spli... | d897804e4a0b773a1c23bff8ad8d7e7e678a9799 | 10,207 |
from typing import Iterable
from typing import List
def take(n: int, iterable: Iterable[T_]) -> List[T_]:
"""Return first n items of the iterable as a list"""
return list(islice(iterable, n)) | 491cdaaa20ad67b480ea92acaeb53e4edf2b4d56 | 10,208 |
def abs(rv):
"""
Returns the absolute value of a random variable
"""
return rv.abs() | 6bf2f8420f8a5e883dfddfc9a93106662a8f1a74 | 10,209 |
def compute_ssm(X, metric="cosine"):
"""Computes the self-similarity matrix of X."""
D = distance.pdist(X, metric=metric)
D = distance.squareform(D)
for i in range(D.shape[0]):
for j in range(D.shape[1]):
if np.isnan(D[i, j]):
D[i, j] = 0
D /= D.max()
return 1... | 646d9af2134db13b69391817ddfeace0fef1217d | 10,210 |
def escape(instruction):
"""
Escape used dot graph characters in given instruction so they will be
displayed correctly.
"""
instruction = instruction.replace('<', r'\<')
instruction = instruction.replace('>', r'\>')
instruction = instruction.replace('|', r'\|')
instruction = instruction.... | 936ed1d6c55650bf5f9ce52af8f113a9d466a534 | 10,211 |
def _json_object_hook(d):
"""
JSON to object helper
:param d: data
:return: namedtuple
"""
keys = []
for k in d.keys():
if k[0].isdigit():
k = 'd_{}'.format(k)
keys.append(k)
return namedtuple('X', keys)(*d.values()) | a4a534a975d6faff440f66065d4954e2a5a91ff2 | 10,212 |
def _fourier_interpolate(x, y):
""" Simple linear interpolation for FFTs"""
xs = np.linspace(x[0], x[-1], len(x))
intp = interp1d(x, y, kind="linear", fill_value="extrapolate")
ys = intp(xs)
return xs, ys | cfe663b9e261bbaea2ab6fe58366f4ec3726468c | 10,213 |
import hashlib
def compute_hash_json_digest(*args, **kwargs):
"""compute json hash of given args and kwargs and return md5 hex digest"""
as_json = compute_hash_json(*args, **kwargs)
return hashlib.md5(as_json).hexdigest() | 98dfedb000e2780dba5007d9fe6abd7a74a43a31 | 10,214 |
def hello_world():
"""Print welcome message as the response body."""
return '{"info": "Refer to internal http://metadata-db for more information"}' | ecb2208053e4ff530bcc0dcc117172449a51afbd | 10,215 |
def get_role(server: discord.Server, role_arg: str) -> discord.Role:
"""
Get a role from a passed command parameter (name, mention or ID).
:return:
"""
try:
role_id = extract_role_id(role_arg)
except discord.InvalidArgument: # no ID, treat as a role name
try:
role = ... | 2f7b2ae3ec3ed950c70eec6338e2f06f771e4bde | 10,216 |
import google
from datetime import datetime
def build_timestamp(timestamp=None) -> google.protobuf.timestamp_pb2.Timestamp:
"""Convert Python datetime to Protobuf Timestamp"""
# https://github.com/protocolbuffers/protobuf/issues/3986
proto_timestamp = google.protobuf.timestamp_pb2.Timestamp()
return p... | ae2278b66c200f007240ca5f683a60ebc1ebddf2 | 10,217 |
from typing import Dict
from typing import Any
import os
def set_workspace(data: Dict[str, Any]) -> Dict[str, Any]:
"""Set workspace."""
workspace_path = data.get("path", None)
if not workspace_path:
raise ClientErrorException("Parameter 'path' is missing in request.")
os.makedirs(workspace_... | 5feb25f748e2fc57eb0ab7bca9f1d1d8c964156c | 10,218 |
def read_blosum():
"""Read blosum dict and delete some keys and values."""
with open('./psiblast/blosum62.pkl', 'rb') as f:
blosum_dict = cPickle.load(f)
temp = blosum_dict.pop('*')
temp = blosum_dict.pop('B')
temp = blosum_dict.pop('Z')
temp = blosum_dict.pop('X')
temp = blosum_dic... | ddbf71c03e05bd156ad688a9fe9692da1d0a3dc4 | 10,219 |
from typing import List
from typing import Tuple
def parse_spans_bio_with_errors(seq: List[str]) -> Tuple[List[Span], List[Error]]:
"""Parse a sequence of BIO labels into a list of spans but return any violations of the encoding scheme.
Note:
In the case where labels violate the span encoded scheme, ... | 6cea777cfb8bf96325f2695af2c48cc22c4884cf | 10,220 |
from typing import Sequence
from typing import Tuple
def find_best_similar_match(i1: int, i2: int, j1: int, j2: int, a: Sequence, b: Sequence, sm: SequenceMatcher = None) \
-> Tuple[int, int, float]:
"""
Finds most similar pair of elements in sequences bounded by indexes a[i1:i2], b[j1: j2].
:par... | ca6e73c2315e2d2419b631cb505131f3daabea4b | 10,221 |
def ConvUpscaleBlock(inputs, n_filters, kernel_size=[3, 3], scale=2):
"""
Basic conv transpose block for Encoder-Decoder upsampling
Apply successivly Transposed Convolution, BatchNormalization, ReLU nonlinearity
"""
net = slim.conv2d_transpose(inputs, n_filters, kernel_size=[3, 3], stride=[2, 2], ac... | 787104a3015bd901105383b203551573f9f07fcb | 10,222 |
import logging
def create_ticket(
client, chat_id, user_id, group_id, recipient_email, subject,
slack_message_url
):
"""Create a new zendesk ticket in response to a new user question.
:param client: The Zendesk web client to use.
:param chat_id: The conversation ID on slack.
:param user_id:... | ea71cd7055b64997660f7da4ea25dec0b41465ba | 10,223 |
def make_random_password(self, length = 10, allowed_chars = 'abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23456789'):
"""
Generate a random password with the given length and given
allowed_chars. The default value of allowed_chars does not have "I" or
"O" or letters and digits that look similar -- just to avoid c... | be155b2537b062a396ed1d5aed6367857b21d49e | 10,224 |
def autocov_vector(x, nlags=None):
"""
This method computes the following function
.. math::
R_{xx}(k) = E{ x(t)x^{*}(t-k) } = E{ x(t+k)x^{*}(k) }
k \in {0, 1, ..., nlags-1}
(* := conjugate transpose)
Note: this is related to
the other commonly used definition for vector autocovarian... | 8725b2695b51c014e8234605bc5e64ad1ca0c26b | 10,225 |
def sequence_masking(x, mask, mode=0, axis=None, heads=1):
"""为序列条件mask的函数
mask: 形如(batch_size, sequence)的0-1矩阵;
mode: 如果是0,则直接乘以mask;
如果是1,则在padding部分减去一个大正数。
axis: 序列所在轴,默认为1;
heads: 相当于batch这一维要被重复的次数。
"""
if mask is None or mode not in [0, 1]:
return x
else:
... | ac7e0da24eca87ab3510c1c274f0caeb2d527816 | 10,226 |
def declare_encoding(log, labelling, encoding, additional_columns, cols=None): #TODO JONAS
"""creates and returns the DataFrame encoded using the declare encoding
:param log:
:param labelling:
:param encoding:
:param additional_columns:
:param cols:
:return:
"""
filter_t = True
... | 956bc6e37d2909abaa96abe862187179bc7b50df | 10,227 |
def __long_description() -> str:
"""Returns project long description."""
return f"{__readme()}\n\n{__changelog()}" | 53260637e4e4f1e59e6a67238577fb6969e7769c | 10,228 |
def captains_draft(path=None, config=None):
"""Similar to captains mode with a 27 heroes, only 3 bans per teams"""
game = _default_game(path, config=config)
game.options.game_mode = int(DOTA_GameMode.DOTA_GAMEMODE_CD)
return game | 05af49626cff0827ff1b78ffb1da082bba160d29 | 10,229 |
def create(width, height, pattern=None):
"""Create an image optionally filled with the given pattern.
:note: You can make no assumptions about the return type; usually it will
be ImageData or CompressedImageData, but patterns are free to return
any subclass of AbstractImage.
:Parameters:
... | dcd287353c84924afcdd0a56e9b51f00cde7bb85 | 10,230 |
import logging
def compute_conformer(smile: str, max_iter: int = -1) -> np.ndarray:
"""Computes conformer.
Args:
smile: Smile string.
max_iter: Maximum number of iterations to perform when optimising MMFF force
field. If set to <= 0, energy optimisation is not performed.
Returns:
A tuple con... | 0b923d7616741312d8ac129d7c7c99081a2c3f97 | 10,231 |
def get_api_key():
"""Load API key."""
api_key_file = open('mailgun_api_key.txt', 'r')
api_key = api_key_file.read()
api_key_file.close()
return api_key.strip() | 55c87d15d616f0f6dfbc727253c2222128b63560 | 10,232 |
def bitserial_conv2d_strategy_hls(attrs, inputs, out_type, target):
"""bitserial_conv2d hls strategy"""
strategy = _op.OpStrategy()
layout = attrs.data_layout
if layout == "NCHW":
strategy.add_implementation(
wrap_compute_bitserial_conv2d(topi.nn.bitserial_conv2d_nchw),
w... | f009b1f7ac073573877b1ddab616868cdf1d42c7 | 10,233 |
import yaml
import os
def visit(planfile,tracefile=None) :
""" Reduce an APOGEE visit
Driver to do 3 chips in parallel
Makes median flux plots
"""
# reduce channels in parallel
chan=['a','b','c' ]
procs=[]
for channel in [1] :
kw={'planfile' : planfile, 'channel'... | c8a17a07a355ad2eb493e247769ea441458735e0 | 10,234 |
def get_fpga_bypass_mode(serverid):
""" Read back FPGA bypass mode setting
"""
try:
interface = get_ipmi_interface(serverid, ["ocsoem", "fpgaread", "mode"])
return parse_get_fpga_bypass_mode(interface, "mode")
except Exception, e:
return set_failure_dict("get_fpga_bypass_mo... | b572a372c6c73bb0f65686b3235e3362d31e8655 | 10,235 |
def lookup_complement(binding):
"""
Extracts a complement link from the scope of the given binding.
Returns an instance of :class:`htsql.core.tr.binding.Recipe`
or ``None`` if a complement link is not found.
`binding` (:class:`htsql.core.tr.binding.Binding`)
A binding node.
"""
pro... | 104f7b0139a8ca6390cb90dc10529d3be9a723ea | 10,236 |
import itertools
def flatten(colours):
"""Flatten the cubular array into one long list."""
return list(itertools.chain.from_iterable(itertools.chain.from_iterable(colours))) | 41576ef947354c30d1995fefdd30ad86bddbfe6f | 10,237 |
def efficientnet_b6(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""EfficientNet-B6"""
# NOTE for train, drop_rate should be 0.5
# kwargs['drop_connect_rate'] = 0.2 # set when training, TODO add as cmd arg
model_name = "tf_efficientnet_b6"
default_cfg = default_cfgs[model_name]
mo... | 0f47b42a000e0d58dd01e7254dc5187e298ad8e5 | 10,238 |
import numpy
def create_word_search_board(number: int):
"""
This function creates a numpy array of zeros, with dimensions of
number x number, which is set by the user. The array is then
iterated through, and zeros are replaced with -1's to avoid
confusion with the alphabet (A) beginning at 0.
... | 31f22d56c947f61840ba87d028eb7de275d33cc9 | 10,239 |
def get_parent_choices(menu, menu_item=None):
"""
Returns flat list of tuples (possible_parent.pk, possible_parent.caption_with_spacer).
If 'menu_item' is not given or None, returns every item of the menu. If given, intentionally omit it and its descendant in the list.
"""
def get_flat_tuples(menu_i... | c88ca93f7e8a7907425a51323ba53bb75bdf29c2 | 10,240 |
def _update_jacobian(state, jac):
"""
we update the jacobian using J(t_{n+1}, y^0_{n+1})
following the scipy bdf implementation rather than J(t_n, y_n) as per [1]
"""
J = jac(state.y0, state.t + state.h)
n_jacobian_evals = state.n_jacobian_evals + 1
LU = jax.scipy.linalg.lu_factor(state.M - ... | 31570ad29dca3ee01281819865e6efe1aec4050d | 10,241 |
from typing import Tuple
from typing import List
def reduce_pad(sess: tf.Session, op_tensor_tuple: Tuple[Op, List[tf.Tensor]], _) -> (str, tf.Operation, tf.Operation):
"""
Pad module reducer
:param sess: current tf session
:param op_tensor_tuple: tuple containing the op to reduce, and a list of input ... | 29d7e8daf85a9fe8fee118fd5ec5dc00018120a9 | 10,242 |
def parse_fastq(fh):
""" Parse reads from a FASTQ filehandle. For each read, we
return a name, nucleotide-string, quality-string triple. """
reads = []
while True:
first_line = fh.readline()
if len(first_line) == 0:
break # end of file
name = first_line[1:].rstr... | d33d3efebdd1c5f61e25397328c6b0412f1911dd | 10,243 |
def minhash_256(features):
# type: (List[int]) -> bytes
"""
Create 256-bit minimum hash digest.
:param List[int] features: List of integer features
:return: 256-bit binary from the least significant bits of the minhash values
:rtype: bytes
"""
return compress(minhash(features), 4) | 1dba3d02dd05bfd2358211fa97d99ce136cc198d | 10,244 |
def coalesce(*values):
"""Returns the first not-None arguement or None"""
return next((v for v in values if v is not None), None) | 245177f43962b4c03c2347725a2e87f8eb5dc08a | 10,245 |
from mitsuba.core.xml import load_string
def test06_load_various_features(variant_scalar_rgb, mesh_format, features, face_normals):
"""Tests the OBJ & PLY loaders with combinations of vertex / face normals,
presence and absence of UVs, etc.
"""
def test():
shape = load_string("""
... | a0117fe48b53e448181014e006ce13368c777d90 | 10,246 |
import torch
def euc_reflection(x, a):
"""
Euclidean reflection (also hyperbolic) of x
Along the geodesic that goes through a and the origin
(straight line)
"""
xTa = torch.sum(x * a, dim=-1, keepdim=True)
norm_a_sq = torch.sum(a ** 2, dim=-1, keepdim=True).clamp_min(MIN_NORM)
proj = x... | 83b5a8559e783b24d36a18fb30059dce82bf9cf7 | 10,247 |
def is_online():
"""Check if host is online"""
conn = httplib.HTTPSConnection("www.google.com", timeout=1)
try:
conn.request("HEAD", "/")
return True
except Exception:
return False
finally:
conn.close() | 4dd9d2050c94674ab60e0dfbcfa0c713915aa2f3 | 10,248 |
def text_value(s):
"""Convert a raw Text property value to the string it represents.
Returns an 8-bit string, in the encoding of the original SGF string.
This interprets escape characters, and does whitespace mapping:
- linebreak (LF, CR, LFCR, or CRLF) is converted to \n
- any other whitespace c... | 24d40367dbefcfbdd0420eb466cf6d09657b2768 | 10,249 |
def modifica_immobile_pw():
"""La funzione riceve l' ID immobile da modificare e ne modifica un attibuto scelto dall'utente """
s = input("Vuoi la lista degli immobili per scegliere il ID Immobile da modificare? (S/N)")
if s == "S" or s =="s":
stampa_immobili_pw()
s= input("Dammi ID... | 99905d61d91178092dba8860265b2034b3f8430b | 10,250 |
def hpat_pandas_series_len(self):
"""
Pandas Series operator :func:`len` implementation
.. only:: developer
Test: python -m hpat.runtests hpat.tests.test_series.TestSeries.test_series_len
Parameters
----------
series: :class:`pandas.Series`
Returns
-------
... | 57bdd2a7f7ae54861943fb44f3bc51f1f6544911 | 10,251 |
from typing import List
def arrays_not_same_size(inputs: List[np.ndarray]) -> bool:
"""Validates that all input arrays are the same size.
Args:
inputs (List[np.ndarray]): Input arrays to validate
Returns:
true if the arrays are the same size and false if they are not
"""
shapes = ... | 8b9988f49d766bc7a27b79cf6495182e98a8fe18 | 10,252 |
def GetReaderForFile(filename):
"""
Given a filename return a VTK reader that can read it
"""
r = vtkPNGReader()
if not r.CanReadFile(filename):
r = vtkPNMReader()
if not r.CanReadFile(filename):
r = vtkJPEGReader()
if not r.CanReadFile(filename):
... | f574417df44f8a43277e62967ec6fd4c986fa85a | 10,253 |
def build_figure_nn(df, non_private, semantic):
"""
Dataframe with one semantic and one model
"""
l = df.query("epsilon > 0").sort_values(["train_size", "epsilon"])
naive, low, high = get_plot_bounds(df)
fig = px.line(
l,
x="train_size",
y="accuracy",
range_y=[lo... | 5eab366e20eaec721d7155d82e42d9222cacd3b5 | 10,254 |
def get_incomplete_sample_nrs(df):
""" Returns sample nrs + topologies if at least 1 algorithm result is missing """
topology_incomplete_sample_nr_map = dict()
n_samples = df.loc[df['sample_idx'].idxmax()]['sample_idx'] + 1
for ilp_method in np.unique(df['algorithm_complete']):
dfx = df[df['algo... | 2d816d80bb2f0c2686780ca49d0c01e89c69e7b5 | 10,255 |
from typing import Optional
def _read_pos_at_ref_pos(rec: AlignedSegment,
ref_pos: int,
previous: Optional[bool] = None) -> Optional[int]:
"""
Returns the read or query position at the reference position.
If the reference position is not within the span o... | 51270a1c1a5f69b179e3623824632443775ec9c7 | 10,256 |
from astropy.io import fits as pf
import numpy as np
import logging
def load_gtis(fits_file, gtistring=None):
"""Load GTI from HDU EVENTS of file fits_file."""
gtistring = _assign_value_if_none(gtistring, 'GTI')
logging.info("Loading GTIS from file %s" % fits_file)
lchdulist = pf.open(fits_file, chec... | c1a8019d052ce437680e6505e65134a5ed66a1a3 | 10,257 |
import requests
def macro_australia_unemployment_rate():
"""
东方财富-经济数据-澳大利亚-失业率
http://data.eastmoney.com/cjsj/foreign_5_2.html
:return: 失业率
:rtype: pandas.DataFrame
"""
url = "http://datainterface.eastmoney.com/EM_DataCenter/JS.aspx"
params = {
"type": "GJZB",
"sty": "... | 260debcfaf342d08acacfe034da51b3d3162393e | 10,258 |
from typing import List
import math
def _convert_flattened_paths(
paths: List,
quantization: float,
scale_x: float,
scale_y: float,
offset_x: float,
offset_y: float,
simplify: bool,
) -> "LineCollection":
"""Convert a list of FlattenedPaths to a :class:`LineCollection`.
Args:
... | 876421cd7f89dc5f3d64357e76f302c633e41ba7 | 10,259 |
def _CustomSetAttr(self, sAttr, oValue):
""" Our setattr replacement for DispatchBaseClass. """
try:
return _g_dCOMForward['setattr'](self, ComifyName(sAttr), oValue)
except AttributeError:
return _g_dCOMForward['setattr'](self, sAttr, oValue) | 8a0fea986531aec66564bafcc679fed3b8631c10 | 10,260 |
def reduce_to_contemporaneous(ts):
"""
Simplify the ts to only the contemporaneous samples, and return the new ts + node map
"""
samples = ts.samples()
contmpr_samples = samples[ts.tables.nodes.time[samples] == 0]
return ts.simplify(
contmpr_samples,
map_nodes=True,
keep_... | 7661a58b6f4b95d5cb4b711db39bb28852151304 | 10,261 |
from datetime import datetime
import subprocess
def mk_inv_part_txt_file(filename):
"""This function downloads the inventory pdf file given by 'part' and
saves it in the 'data' directory.
It also saves the retrieval time of the file.
It produces a txt file for the pdf file with pdftotext.
"""
... | 192f5e42a5c876461767262c50fd2525315aaf62 | 10,262 |
def scenario_map_fn(
example,
*,
snr_range: tuple = (20, 30),
sync_speech_source=True,
add_speech_reverberation_early=True,
add_speech_reverberation_tail=True,
early_rir_samples: int = int(8000 * 0.05), # 50 milli seconds
details=False,
):
"""
... | a3e6e5bf368bdfbb29b9d1c6684daa4077de9061 | 10,263 |
def get_name(tree, from_='name'):
"""
Get the name (token) of the AST node.
:param tree ast:
:rtype: str|None
"""
# return tree['name']['name']
if 'name' in tree and isinstance(tree['name'], str):
return tree['name']
if 'parts' in tree:
return djoin(tree['parts'])
i... | b5f1e97eb570859b01bf9489c6b9d4874511fdcc | 10,264 |
def pcaTable(labels,vec_mean,vec_std,val_mean,val_std):
"""Make table with PCA formation mean and std"""
header="\\begin{center}\n\\begin{tabular}{| l |"+" c |"*6+"}\\cline{2-7}\n"
header+="\\multicolumn{1}{c|}{} & \\multicolumn{2}{c|}{PC1} & \multicolumn{2}{c|}{PC2} & \multicolumn{2}{c|}{PC3} \\... | 646fc1b5344a716b8f30714f112a477063bf91ce | 10,265 |
def render_reference_page(conn: Connection, reference: str) -> str:
"""Create HTML section that lists all notes that cite the reference."""
sql = """
SELECT note, Bibliography.html,Notes.html FROM Citations
JOIN Notes ON Citations.note = Notes.id
JOIN Bibliography ON Bibliogr... | 6ab73d0d85da28676e7bb3cf42b3304cd0d6ad47 | 10,266 |
import logging
def normalize_bridge_id(bridge_id: str):
"""Normalize a bridge identifier."""
bridge_id = bridge_id.lower()
# zeroconf: properties['id'], field contains semicolons after each 2 char
if len(bridge_id) == 17 and sum(True for c in "aa:bb:cc:dd:ee:ff"
if... | 5370cc49e4c0272da2a471006bbbf3fd5e5521bf | 10,267 |
import imp
def pyc_file_from_path(path):
"""Given a python source path, locate the .pyc.
See http://www.python.org/dev/peps/pep-3147/
#detecting-pep-3147-availability
http://www.python.org/dev/peps/pep-3147/#file-extension-checks
"""
has3147 = hasattr(imp, 'get_tag')
... | 459011ca1f07a023b139695cd2368767d46ca396 | 10,268 |
def get_bytes_per_data_block(header):
"""Calculates the number of bytes in each 128-sample datablock."""
N = 128 # n of amplifier samples
# Each data block contains N amplifier samples.
bytes_per_block = N * 4 # timestamp data
bytes_per_block += N * 2 * header['num_amplifier_channels']
# ... | 524e9015dacaf99042dd1493b24a418fff8c6b04 | 10,269 |
def recovered():
"""
Real Name: b'Recovered'
Original Eqn: b'INTEG ( RR, 0)'
Units: b'Person'
Limits: (None, None)
Type: component
b''
"""
return integ_recovered() | 1a5133a3cc9231e3f7a90b54557ea9e836975eae | 10,270 |
def Mux(sel, val1, val0):
"""Choose between two values.
Parameters
----------
sel : Value, in
Selector.
val1 : Value, in
val0 : Value, in
Input values.
Returns
-------
Value, out
Output ``Value``. If ``sel`` is asserted, the Mux returns ``val1``, else ``val0... | 62fa5abf293a1321af5e4a209427b896756e5617 | 10,271 |
def get_identity_list(user, provider=None):
"""
Given the (request) user
return all identities on all active providers
"""
identity_list = CoreIdentity.shared_with_user(user)
if provider:
identity_list = identity_list.filter(provider=provider)
return identity_list | f5a9a7e461813edbc293338dca242d4dd4877281 | 10,272 |
def get_user_pool_domain(prefix: str, region: str) -> str:
"""Return a user pool domain name based on the prefix received and region.
Args:
prefix: The domain prefix for the domain.
region: The region in which the pool resides.
"""
return "%s.auth.%s.amazoncognito.com" % (prefix, regio... | dc1eec674379d04bd8b23318207ac5b2e6a905f3 | 10,273 |
import re
import os
from functools import reduce
def corr_activity(ppath, recordings, states, nskip=10, pzscore=True, bands=[]):
"""
correlate DF/F during states with delta power, theta power, sigma power and EMG amplitude
:param ppath: base filder
:param recordings: list of recordings
:param stat... | c16cc2b347f7ac1cd2f53d9092e3c0b678f1799c | 10,274 |
def add_dep_info(tgt_tokens, lang, spacy_nlp, include_detail_tag=True):
"""
:param tgt_tokens: a list of CoNLLUP_Token_Template() Objects from CoNLL_Annotations.py file
:param spacy_nlp: Spacy language model of the target sentence to get the proper Dependency Tree
:return:
"""
doc = spacy_nlp.to... | 0083d16f4344a6afaeb5fba9a6b2e9282d617ef3 | 10,275 |
async def apod(request: Request) -> dict:
"""Get the astronomy picture of the day."""
http_client = request.app.state.http_client
async with http_client.session.get(
f"https://api.nasa.gov/planetary/apod?api_key={NASA_API}"
) as resp:
data = await resp.json()
return {
"titl... | edc526732904c5f0a29c144023df7fefb6d7743c | 10,276 |
def main_view(request, url, preview=False):
"""
@param request: HTTP request
@param url: string
@param preview: boolean
"""
url_result = parse_url(url)
current_site = get_site()
# sets tuple (template_name, posts_on_page)
current_template = get_template()
language = get_language... | 6febddd1e98f94865a364b8cf9a339574a303809 | 10,277 |
def parse_configs(code_config, field_config, time_config):
"""
Wrapper to validate and parse each of the config files. Returns a
a dictionary with config types as keys and parsed config files as values.
"""
# performing basic validation of config paths, obtaining dictionary of
# config types a... | 1d625e0b56ea4d197280b91a7993a16c82a2461d | 10,278 |
def butter_highpass_filter_eda(data):
""" High pass filter for 1d EDA data.
"""
b, a = eda_hpf()
y = lfilter(b, a, data)
return y | 1449d09a810e0c1ff78ab325106a6235cb94d26b | 10,279 |
def normalize_null_vals(reported_val):
"""
Takes a reported value and returns a normalized NaN is null, nan, empty, etc.
Else returns reported value.
"""
if is_empty_value(reported_val):
return np.NaN
else:
return reported_val | 790ebbad188390752401699f2d04fddbd08bcc7e | 10,280 |
import sys
def in_notebook() -> bool:
"""Evaluate whether the module is currently running in a jupyter notebook."""
return "ipykernel" in sys.modules | 3be74bda76eaf0ff32c1d48d23c52f8d5f0ea728 | 10,281 |
def test_insert(type):
"""
>>> test_insert(int_)
[0, 1, 2, 3, 4, 5]
"""
tlist = nb.typedlist(type, [1,3])
tlist.insert(0,0)
tlist.insert(2,2)
tlist.insert(4,4)
tlist.insert(8,5)
return tlist | a0eb1f5bbf861863b47c6639d1159afffb63093e | 10,282 |
def get_next_month_range(unbounded=False):
"""获取 下个月的开始和结束时间.
:param unbounded: 开区间
"""
return get_month_range(months=1, unbounded=unbounded) | 989054b3e523400ed28ab0d3d9a840d6606ee8cc | 10,283 |
import functools
def probit_regression(
dataset_fn,
name='probit_regression',
):
"""Bayesian probit regression with a Gaussian prior.
Args:
dataset_fn: A function to create a classification data set. The dataset must
have binary labels.
name: Name to prepend to ops created in this function,... | b4cca9054d0ebd8c349cdb148443246616ed6120 | 10,284 |
def block_inception_c(inputs, scope=None, reuse=None):
"""Builds Inception-C block for Inception v4 network."""
# By default use stride=1 and SAME padding
with slim.arg_scope([slim.conv2d, slim.avg_pool2d, slim.max_pool2d],
stride=1, padding='SAME'):
with tf.variable_scope(scope, 'BlockI... | b6cfe5d6eeaaef2d4a02420577884393f4bfcd4b | 10,285 |
import six
from datetime import datetime
def from_json(js):
"""
Helper to parse json values from server into python types
"""
if js is None or js is True or js is False or isinstance(js, six.text_type):
# JsNull, JsBoolean, JsString
return js
if not isinstance(js, dict) or 'type' ... | 0e4f94e8fdfb634ea3a1f1f84d3ff3d5bb125175 | 10,286 |
def exposexml(func):
"""
Convenience decorator function to expose XML
"""
def wrapper(self, data, expires, contentType="application/xml"):
data = func(self, data)
_setCherryPyHeaders(data, contentType, expires)
return self.templatepage('XML', data=data,
... | 57c62490e51693551801aa4722de2e08d3fd3817 | 10,287 |
import os
import uuid
def _SetSource(build_config,
messages,
is_specified_source,
no_source,
source,
gcs_source_staging_dir,
ignore_file,
hide_logs=False):
"""Set the source for the build config."""
default_gc... | 41abbce50eeabee9780b5822ecb31921611bcae4 | 10,288 |
def transform_categorical_by_percentage(TRAIN, TEST=None,
handle_unknown="error", verbose=0):
"""
Transform categorical features to numerical. The categories are encoded
by their relative frequency (in the TRAIN dataset).
To be consistent with scikit-learn transfo... | ce2c568b40109e11d1920a211314aebdc076be7f | 10,289 |
def buildDescription(flinfoDescription='', flickrreview=False, reviewer='',
override='', addCategory='', removeCategories=False):
"""Build the final description for the image.
The description is based on the info from flickrinfo and improved.
"""
description = '== {{int:filedesc}}... | 94a529e5a26a4390536359e0233f26d32465e3ed | 10,290 |
def allowed_file(filename, extensions):
"""
Check file is image
:param filename: string
:param extensions: list
:return bool:
"""
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in extensions | c61e77205e40cd05fc0ea6e4e4f770180f15e6d8 | 10,291 |
import os
def read(infile):
"""Read result from disk."""
_, ext = os.path.splitext(infile)
ext = ext.strip('.')
return read_funcs[ext](infile) | 36601f9d7bb2c83e14aabdf3569062f0e7e509b7 | 10,292 |
import hashlib
import os
def _url_in_cache(url):
"""
Determine if a URL's response exists in the cache.
Parameters
----------
url : string
the url to look for in the cache
Returns
-------
filepath : string
path to cached response for url if it exists in the cache,
... | 4c3d43f6b6ce4efb4b3ee9b627e1c4e1ab17687b | 10,293 |
def get_number_of_recovery_codes(userid):
"""
Get and return the number of remaining recovery codes for `userid`.
Parameters:
userid: The userid for which to check the count of recovery codes.
Returns:
An integer representing the number of remaining recovery codes.
"""
return d... | f292fbbc2e2ed55f53136988ef7d6f13ab2881e6 | 10,294 |
import platform
import os
def creation_time(path_to_file):
"""The file creation time.
Try to get the date that a file was created, falling back to when
it was last modified if that isn't possible.
See http://stackoverflow.com/a/39501288/1709587 for explanation.
"""
if platform.system() == 'Wi... | 5d342f0f1b8e006e94ee5e8192b9fc3f4d9b53eb | 10,295 |
async def mailbox_search(search_term: str, Authorize: AuthJWT = Depends(),Token = Depends(auth_schema)):
"""Search email with a search term"""
Authorize.jwt_required()
try:
return JSONResponse(dumps({"success": True, "email": database.search(search_term)}))
except Exception as err:
retur... | 28d82605b78e124eb029125e9b2c9625bc207a64 | 10,296 |
def otel_service(docker_ip, docker_services):
"""Ensure that port is listening."""
# `port_for` takes a container port and returns the corresponding host port
port = docker_services.port_for("otel-collector", 4317)
docker_services.wait_until_responsive(
timeout=30.0, pause=5, check=lambda: is_p... | 19bd5f21ce30fa3cf5202369bf8e0a43456fdf9e | 10,297 |
from re import T
import numpy
def broadcast(vec: T.Tensor, matrix: T.Tensor) -> T.Tensor:
"""
Broadcasts vec into the shape of matrix following numpy rules:
vec ~ (N, 1) broadcasts to matrix ~ (N, M)
vec ~ (1, N) and (N,) broadcast to matrix ~ (M, N)
Args:
vec: A vector (either flat, row... | 3d471489ecef50a70a668db0262d0d21a8b76c86 | 10,298 |
def neighbour_list_n_out(nlist_i: NeighbourList,
nlist_j: NeighbourList) -> np.ndarray:
"""
Compute n^out between two NeighbourList object.
Args:
nlist_i (NeighbourList): A NeighbourList object for neighbour lists at time 0.
nlist_j (NeighbourList): A NeighbourL... | 15069fbb3995f7f65f61b9145353df7116d45e23 | 10,299 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.