content stringlengths 35 762k | sha1 stringlengths 40 40 | id int64 0 3.66M |
|---|---|---|
def check_media(url):
"""Check if something is available or has a new hash
Checks if url is available, uf yes, download and hash it, then see if it has changed
Args:
url: A complete url to something
Returns:
0 if available and no change.
1 if not available.
2 if it... | f7237207a7ff6e555533cebe4dc83fa77538886c | 17,000 |
import random
def breed(tree1, tree2):
"""My breeding function.
Basically makes a copy of tree1, and swaps sub-trees with tree2 at
a random depth. Pretty much relies on my simplistic tree structure.
I have no fucking clue if this will work. I can't even debug it since
I have no way of printing m... | 56888f068375c6e35caa80cc40752a0fae71047d | 17,001 |
from typing import Iterable
from typing import Any
from typing import Tuple
def tuple_from_iterable(val: Iterable[Any]) -> Tuple[Any, ...]:
"""Builds a tuple from an iterable.
Workaround for https://github.com/python-attrs/attrs/issues/519
"""
return tuple(val) | 7880b1395f14aa690f967b9548456105b544d337 | 17,002 |
from typing import Counter
def merge_vocabs(vocabs, vocab_size=None):
"""
Merge individual vocabularies (assumed to be generated from disjoint
documents) into a larger vocabulary.
Args:
vocabs: `torchtext.vocab.Vocab` vocabularies to be merged
vocab_size: `int` the final vocabulary siz... | db83e858c1a8910b382bcd485923ef6ba9a1466e | 17,003 |
def read_g_char(in_name, pop="ESP", debug=False):
"""
Read charges and energy from a Gaussian log file.
Parameters
----------
in_name : str
Name of the file to read
pop : str, optional
Kind of charge to read, mulliken or esp
debug : bool, optional
Return extra energy... | 97e0e3433a8b6966335adc8e4cc66154a8470138 | 17,004 |
def count_nonzero(X, axis=None, sample_weight=None):
"""A variant of X.getnnz() with extension to weighting on axis 0
Useful in efficiently calculating multilabel metrics.
Parameters
----------
X : sparse matrix of shape (n_samples, n_labels)
Input data. It should be of CSR format.
ax... | e6754cca480d626dd7ba2c96426e5eebf17a1fcb | 17,005 |
def isolate_shape_axis(base, target, axis_list = ['X','Y','Z']):
"""
Given a base mesh, only take axis movement on the target that is specified in axis_list.
Args:
base (str): The base mesh that has no targets applied.
target (str): The target mesh vertices moved to a different position... | a097442c2c379338890e5571d0e3516553fe70f3 | 17,006 |
from typing import List
def _ge(t1: 'Tensor', t2: 'Tensor', isnew: bool) -> 'Tensor':
"""
Also see
--------
:param t1:
:param t2:
:param isnew:
:return:
"""
data = t1.data >= t2.data
requires_grad = t1.requires_grad or t2.requires_grad
depends_on: List[Dependency] = []
... | 02b0407c3b2bc3ed6bf65555ab62257e3f041d0e | 17,007 |
from typing import Dict
from typing import Any
import logging
import numpy
def convert_homogeneous_graph(graph: Dict[str, Any],
num_graphs: int,
output_dir: str):
"""Process a homogeneous graph."""
# NOTE(blais): We could in theory stash the data in the... | 4aa0751437861af58159228c018a3f6d94b8613a | 17,008 |
def second_deriv_log_pdf(phi, alpha, beta, eps=1e-4):
"""Second derivative of `log_pdf` with respect to latitude."""
return (
log_pdf(phi + eps, alpha, beta)
- 2 * log_pdf(phi, alpha, beta)
+ log_pdf(phi - eps, alpha, beta)
) / eps ** 2 | 5df140d62466481997a472e260241961e872cbe3 | 17,009 |
from typing import Optional
from typing import Callable
from typing import Iterator
import uuid
import shutil
def create_map(
tag: Optional[str],
func: Callable,
args_and_kwargs: Iterator[ARGS_AND_KWARGS],
map_options: Optional[options.MapOptions] = None,
) -> maps.Map:
"""
All map calls lead ... | 6135d7dd0b6366d941ccbc8463644fb71a97f4b0 | 17,010 |
from typing import OrderedDict
def normalize_data(data, zp=25., zpsys='ab'):
"""Return a copy of the data with all flux and fluxerr values normalized
to the given zeropoint. Assumes data has already been standardized.
Parameters
----------
data : `~numpy.ndarray`
Structured array.
zp ... | 9aa3c4faf6f9a9f98afd9e11d2bafaf1b026519c | 17,011 |
import base64
def CreateMessage(sender, to, subject, message_text):
"""
Creates an object containing a base64url encoded email object.
"""
message = MIMEText(message_text)
message['to'] = to
message['from'] = sender
message['subject'] = subject
raw_message = base64.urlsafe_b64encode(message.as_bytes())
raw_m... | 8d55b64ebf4801781126f244441f619201d51190 | 17,012 |
def bg_lookup(bg_name: str) -> str:
"""Look up ANSI escape codes based on background color name.
:param bg_name: background color name to look up ANSI escape code(s) for
:return: ANSI escape code(s) associated with this color
:raises ValueError if the color cannot be found
"""
try:
ansi... | 8c520f599bc41ce847e5c602ddf8500fe366f24d | 17,013 |
def readData(f):
"""
Parse taxon count table (from count-taxon.py)
Parameters:
-----------
f : str
file name of taxon count table
Returns:
--------
tuple
a list of taxons and a list of their counts
"""
taxa_lis = []
num_lis = []
for n, line in enumerate... | fcd10e1d7dc1db0b871c7a4802f012eec43c08a9 | 17,014 |
import click
import sys
import requests
def workspace_check(func):
"""
Decorator for confirming <workspace> is defined in the CONFIG_PATH (i.e. kaos workspace set has been run).
"""
def wrapper(*args, **kwargs):
config = ConfigParser(interpolation=ExtendedInterpolation())
config.read(... | 089236e5f2f19973dab8ca419efbb588ec37e3a7 | 17,015 |
def detect_onsets_offsets(data, threshold, min_distance):
"""
detects when a when a signal jumps above zero, and when it goes back to zero
"""
on = (data > threshold) # when the data is greater than zero
left_on = np.concatenate(([0], on), axis=0)[0:-1]
onset = np.squeeze(np.where(on & (left_on... | faa81445828b72bc7d7433a4c2c8740bb36050bb | 17,016 |
def STEPConstruct_PointHasher_IsEqual(*args):
"""
* Returns True when the two keys are the same. Two same keys must have the same hashcode, the contrary is not necessary.
:param Point1:
:type Point1: gp_Pnt
:param Point2:
:type Point2: gp_Pnt
:rtype: bool
"""
return _STEPConstruct.STEP... | b3aa095d723203b05ea29ec4f5b34a70bc4c5276 | 17,017 |
def step_smooth(x) :
""" Smooth polinomial rising step from 0(x=0) to 1(x=1)
"""
return np.select([x>1, x>0], [1, 3*np.square(x)-2*np.power(x,3)], default=0) | ccf53e2561e256d2114510598ebb7a2ec1ce7cbd | 17,018 |
def getBitSizeOfVarInt64(value):
"""
Gets bit size of variable 64-bit signed integer value.
:param value: Value to use for bit size calculation.
:returns: Bit size of the value.
"""
return _getBitSizeOfVarIntImpl(value, VARINT64_MAX_VALUES, signed=True) | 20c52df5ec9a00680e771f206319a02e3ba3de66 | 17,019 |
import functools
def nan_if_exception(func):
"""Wrap func such that np.nan is returned if func raises an exception.
KeyboardInterrupt and SystemExit are still raised.
Examples:
>>> @nan_if_exception
... def f(x, y):
... assert x + y >= 5
>>> f(1, 2)
nan
>>> def f(x, y):
... | f03c314741c47805d767fc62fbce49cda9be35fe | 17,020 |
def get_client_public_key(patient_id, study_id):
"""Grabs a user's public key file from s3."""
key_pair_paths = construct_s3_key_paths(study_id, patient_id)
key = s3_retrieve(key_pair_paths['public'], study_id, raw_path=True)
return encryption.import_RSA_key( key ) | d6e6560c49f925a8f87a84829d632f67957f3c79 | 17,021 |
from typing import List
from typing import Tuple
def plot_offset_direction(
dsaimage: Image, coords: SkyCoord, ra_offsets: List[float], dec_offsets: List[float]
) -> Tuple["matplotlib.fig", "matplotlib.axes.Axes"]:
"""Plot measured offsets on an image."""
fig, ax = dsaimage.show()
dsaimage.add_arr... | 24416863b795538ee8c86fa1949f6d701b66f28d | 17,022 |
import os
def get_file_path(mdir=None) -> str:
"""
makes user select a file using a TUI. `mdir` is the main starting directory which defaults to current
working directory.
.. note::
This clears screen a lot of times and might make your app ugly but
provides user with a easy way to cho... | 09c6d49e116b732972a47359eee399e492cd9c8f | 17,023 |
def sensitivity_metric(event_id_1, event_id_2):
"""Determine similarity between two epochs, given their event ids."""
if event_id_1 == 1 and event_id_2 == 1:
return 0 # Completely similar
if event_id_1 == 2 and event_id_2 == 2:
return 0.5 # Somewhat similar
elif event_id_1 == 1 and eve... | b04c5fa27ef655dd3f371c3ce6ef0410c55dd05b | 17,024 |
def duracion_promedio_peliculas(p1: dict, p2: dict, p3: dict, p4: dict, p5: dict) -> str:
"""Calcula la duracion promedio de las peliculas que entran por parametro.
Esto es, la duración total de todas las peliculas dividida sobre el numero de peliculas.
Retorna la duracion promedio en una cadena de ... | a8cfcc96a43480ee6830cc212343a33148036c5d | 17,025 |
def _to_test_data(text):
"""
Lines should be of this format: <word> <normal_form> <tag>.
Lines that starts with "#" and blank lines are skipped.
"""
return [l.split(None, 2) for l in text.splitlines()
if l.strip() and not l.startswith("#")] | 8f0bae9f81d2d14b5654622f1493b23abd88424d | 17,026 |
import copy
def append(motion1, motion2):
"""
Combines two motion sequences into one. motion2 is appended to motion1.
The operation is not done in place.
Note that the operation places the sequences next to each other without
attempting to blend between the poses. To interpolate between the end o... | dc51812f450a072ad283173a15fb2c07ae978e5b | 17,027 |
from datetime import datetime
from typing import List
def service(
fmt: SupportedFormats,
begints: datetime = Query(
..., description="Inclusive UTC timestamp window start for issuance."
),
endts: datetime = Query(
..., description="Exclusive UTC timestamp window end for issuance."
... | eeb0a8b1187ff2386401440b6ddd812b81cd0fdd | 17,028 |
def cols_shuffled(expr_df, dist_df=None, algo="agno", seed=0):
""" Return a copy of the expr_df DataFrame with columns shuffled randomly.
:param pandas.DataFrame expr_df: the DataFrame to copy and shuffle
:param pandas.DataFrame dist_df: the distance DataFrame to inform us about distances between columns
... | 37773c5219ecc92925c155e9d911c42ddbebc8ea | 17,029 |
import os
def verify(origin_dir, real_width, real_height, image_suffix):
"""
Verifique o tamanho da imagem
:return:
"""
if not os.path.exists(origin_dir):
print("[Aviso] O diretório {} não pode ser encontrado, ele será criado em breve".format(origin_dir))
os.makedirs(origin_dir)
... | c51707aa9be9bf187c6ed19a8d534bb1085fe8e6 | 17,030 |
from typing import Dict
from typing import Any
from re import L
def build_model(task_description: Dict[str, Any]) -> Dict[str, Any]:
"""Build the predinet model."""
# ---------------------------
# Setup and process inputs
processors = {"image": process_image, "task_id": process_task_id}
mlp_inputs... | 03c7951d3fb0fddbfb1e4bad3b4e5ce54253f994 | 17,031 |
def create_unet_model(N_classes, input_shape=(None, None, 1), dropout_rate=0.24, learning_rate=1e-5):
"""
Implementation of Unet mode for multiclass semantic segmentation
:param N_classes: Number of classes of segmentation map
:param input_shape: input image shape
:param dropout_rate: dropout rate
... | 93c15376eed0c5cf1abe689ef1daca6c8877e61a | 17,032 |
import os
def extension(name: str, compile_args=(), link_args=(), include_dirs=(),
libraries=(), language='c++', **kwargs):
"""Build standard Cython extension."""
path = os.path.sep.join(['src', *name.split('.')]) + '.pyx'
include_dirs = ['include', *include_dirs]
return Extension(name,... | 09548bb188b1b858a52bb69a7a8b8b8e66d6df15 | 17,033 |
from uutils.torch_uu.models.learner_from_opt_as_few_shot_paper import get_default_learner
from typing import Optional
def get_5cnn_model(image_size: int = 84,
bn_eps: float = 1e-3,
bn_momentum: float = 0.95,
n_classes: int = 5,
filter_size: i... | 6b3e21e33433102b16b70c88dd5d033e1f069b86 | 17,034 |
def valid_extract_input_specification(instance_of_property, depth, language_code, named_entity_label):
""" Checks if the input for the extraction is valid. Both to help
the user get correct input and to sanitize it to avoid
attacks as the values are used to generate filenames.
"""
patte... | c71f744fef82e54ca2fad0ea64c8637692256299 | 17,035 |
def get_company_data(mid):
"""Looks up stock ticker information for a company via its Freebase ID."""
query = MID_TO_TICKER_QUERY % mid
bindings = make_wikidata_request(query)
if not bindings:
if mid:
print("%s No company data found for MID: %s" % (WARNING, mid))
return Non... | 464b9ef795938b2d83fd6a629b9af09ff165a922 | 17,036 |
def embed_data_into_square_lattice(data):
"""Insert MR image into square 2D array."""
dims = np.array(data.shape)
offset_x = int((dims.max() - dims[0]) / 2.)
offset_y = int((dims.max() - dims[1]) / 2.)
temp = np.zeros((dims.max(), dims.max()))
temp[offset_x:offset_x+dims[0], offset_y:offset_y+... | e701e871b4df9f4085b2548ad1e10f93ce33bf38 | 17,037 |
def is_partial_link_text_selector(selector):
"""
A basic method to determine if a selector is a partial link text selector.
"""
if (
selector.startswith("partial_link=")
or selector.startswith("partial_link_text=")
or selector.startswith("partial_text=")
or selector.start... | 4f21143173e46ed273ca719ea1aac8489afa2395 | 17,038 |
def scell(obj, dims, method=1, **kwds):
"""Build supercell based on `dims`.
Uses coords_frac and cell.
Parameters
----------
obj : Structure or Trajectory
dims : tuple (nx, ny, nz) for a N = nx * ny * nz supercell
method : int, optional
Switch between numpy-ish (1) or loop (2) impl... | e0cf7e03323c5994d0c56ba171d168aed105cfda | 17,039 |
def create_config(device: str = 'CPU', *,
per_process_gpu_memory_fraction: float = 0.0,
log_device_placement: bool = False) -> tf.ConfigProto:
"""Creates tf.ConfigProto for specifi device"""
config = tf.ConfigProto(log_device_placement=log_device_placement)
if is_gpu(devi... | 0b6f351bcad2d816d6c03896ed60223cf2bb90c9 | 17,040 |
from typing import Union
from typing import Dict
def format_childproc(cp: Union[Event, Dict]):
"""Format childproc event into single line."""
return f" @{as_configured_timezone(cp.get('event_timestamp'))}: {cp.get('childproc_cmdline')} - {cp.get('childproc_process_guid')}" | f233a0fca52fdbef8d7ed0177772c9a8d196ec0d | 17,041 |
import os
def create_not_mnist_doubleset() -> (list, list):
"""
A function which iterates through notMNIST images and sorts into two lists of images and arrays.
:return x: images as ndarrays
:return y: labels of images
"""
try:
with np.load("./notMNIST_all/all_data.npz") as f:
... | bf1766f41f7ee659151a7185bed84b45faf6b7b0 | 17,042 |
def format_maven_jar_dep_name(group_id, artifact_id, repository = DEFAULT_REPOSITORY_NAME):
"""
group_id: str
artifact_id: str
repository: str = "maven"
"""
return "@%s//:%s" % (repository, format_maven_jar_name(group_id, artifact_id)) | a331ce788a510c09c32a1d2c7d1f8d4fbeaba975 | 17,043 |
import ctypes
def PCO_GetCameraName(handle):
"""
This function retrieves the name of the camera.
"""
f = pixelfly_dll.PCO_GetCameraName
f.argtypes = (ctypes.wintypes.HANDLE, ctypes.c_char_p, ctypes.wintypes.WORD)
f.restype = ctypes.c_int
cameraName = ctypes.create_string_buffer(41)
re... | f704f2a875f29f0876553c631de032d25b5166f4 | 17,044 |
import sys
def import_object(absolute_name):
"""
根据名字 import 对象
:param absolute_name: 按照 module:name 的格式
:return: 返回对应对象
"""
try:
module_name, obj_name = absolute_name.split(':')
module = sys.modules.get(module_name, None)
if not module:
module = import_modu... | 02de503c436fdd64ce2cd1530b36c663e7ccab9b | 17,045 |
def issingleton(var):
""" If isunitset(var) is True, this function returns True,
otherwise isscalar(var) is returned.
"""
# Here we define singleton as a unit set or scalar
if isunitset(var):
return True
return isscalar(var) | cd1808ad99647486e81e0f903047db9327b77fb8 | 17,046 |
def satisfiesF(L):
"""
Assumes L is a list of strings
Assume function f is already defined for you and it maps a string to a Boolean
Mutates L such that it contains all of the strings, s, originally in L such
that f(s) returns True, and no other elements. Remaining elements in L
... | 429c385f51ba254fff7170f4e69725cc98c8b337 | 17,047 |
def calc_circle_radius(area: float) -> float:
"""
Calculate radius from area.
>>> calc_circle_radius(10.0)
1.7841241161527712
"""
assert not area < 0
radius = numpy_to_python_type(np.sqrt(area / np.pi))
assert isinstance(radius, float)
return radius | 06086e1b130bef960fad200b350cba01b647466e | 17,048 |
import tqdm
def load_imgs(paths, target_size):
"""Load images from `paths`."""
pairs = np.empty((len(paths), 2, *target_size), dtype=np.float32)
for i, row in tqdm(paths.iterrows(), total=len(pairs)):
img1 = img_to_array(load_img(row.p1, target_size=target_size)) / 255
img2 = img_to_array(... | b91b86bcae29a6bf2d1227a25a2b8297c6be1734 | 17,049 |
def load_dict_from_hdf5(h5_filepath):
"""
Load h5 file as a dict
"""
def recursively_load_dict_contents_from_group(h5_obj, path):
"""
Recursively load a dict from h5 file
"""
ans = {}
for key, item in h5_obj[path].items():
if isinstance(item, h5py._hl.... | 2339cc6edb83ed59fb43ec49503d86758d37d83e | 17,050 |
def interactive_grid_shape(grid, max_n=200, plotfxn=None, **kwargs):
""" Interactive ipywidgets for select the shape of a grid
Parameters
----------
grid : pygridgen.Gridgen
The base grid from which the grids of new shapes (resolutions) will be
generated.
max_n : int (default = 200)... | ef126f39f8433a65deb22e09ea825f342a38bea1 | 17,051 |
from lgsvl.utils import transform_to_forward
from typing import Optional
def generate_initial_state(initial_pos: Transform, initial_speed: Optional[float] = None) -> AgentState:
"""
:param initial_speed: Initial speed in km/h
"""
movement = AgentState()
movement.transform = initial_pos
if init... | 30906410d3fe92b84f3d2c93a49db24f90a8ec8b | 17,052 |
def resolve_ami(ami=None, arch="x86_64", tags=frozenset(), tag_keys=frozenset()):
"""
Find an AMI by ID, name, or tags.
- If an ID is given, it is returned with no validation; otherwise, selects the most recent AMI from:
- All available AMIs in this account with the Owner tag equal to this user's IAM us... | 32495fb78a611f57b0e025b0ff68b51a190c7297 | 17,053 |
def _filter_colors(hcl, ihue, nhues, minsat):
"""
Filter colors into categories.
Parameters
----------
hcl : tuple
The data.
ihue : int
The hue column.
nhues : int
The total number of hues.
minsat : float
The minimum saturation used for the "grays" column... | f7ca00bdd17766c859b5262c1b9ae12187c23222 | 17,054 |
def SWO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "swo.owl", **kwargs
) -> Graph:
"""Return... | 39936ffcd272e0d6c6c7e5510699bac68a465eb9 | 17,055 |
def upload_needed_files (handle, bucket, prefix, dir_path, kind, iter):
"""
upload the needed local files of a particular kind
"""
extension = f".{kind}"
count = 0
for uuid in iter:
file_name = uuid + extension
local_path = dir_path / file_name
grid_path = prefix + "/pub... | 9357e991eb14eaf9de54beda3ec86defc3e1ecaf | 17,056 |
def detect_tag(filename):
"""Return type and position of ID3v2 tag in filename.
Returns (tag_class, offset, length), where tag_class
is either Tag22, Tag23, or Tag24, and (offset, length)
is the position of the tag in the file.
"""
with fileutil.opened(filename, "rb") as file:
file.seek(... | 5b32c122d804aa5def21c59e73e4369c64b7cbbe | 17,057 |
def merge_peaks(peaks, start_merge_at, end_merge_at,
max_buffer=int(1e5)):
"""Merge specified peaks with their neighbors, return merged peaks
:param peaks: Record array of strax peak dtype.
:param start_merge_at: Indices to start merge at
:param end_merge_at: EXCLUSIVE indices to end me... | 75f86b0c27cb2cac145234cfd9254105048be9a8 | 17,058 |
def batchedpatternsgenerator(generatorfunction):
"""Decorator that assumes patterns (X,y) and stacks them in batches
This can be thought of a specialized version of the batchedgenerator
that assumes the base generator returns instances of data patterns,
as tuples of numpy arrays (X,y). When groupin... | 19a8e8d5c2872c38d469c41e163a947f208fc806 | 17,059 |
def reduce_min(raw_tensor, axis, keepdims=False):
"""
calculate reduce_min of raw_tensor, only support float16
Args:
raw_tensor (tvm.tensor.Tensor): input tensor
axis (Union[int, list]): reduce axis (range : [-len(raw_tensor.shape), len(raw_tensor.shape) - 1])
keepdims (bool): if tr... | b4473ca577a939f3c149758fd73a59c79b1f0db0 | 17,060 |
def align2local(seq):
"""
Returns list such that
'ATG---CTG-CG' ==> [0,1,2,2,2,3,4,5,5,6,7]
Used to go from align -> local space
"""
i = -1
lookup = []
for c in seq:
if c != "-":
i += 1
lookup.append(i)
return lookup | aa914a60d5db7801a3cf1f40e713e95c98cd647e | 17,061 |
def load_nodegraph(filename):
"""Load a nodegraph object from the given filename and return it.
Keyword argument:
filename -- the name of the nodegraph file
"""
nodegraph = _Nodegraph(1, [1])
nodegraph.load(filename)
return nodegraph | cd552fda874f1e8667bd09e95bdf43e6c5bd75c1 | 17,062 |
def get_bprop_sqrt(self):
"""Grad definition for `Sqrt` operation."""
mul_func = P.Mul()
fill_func = P.Fill()
div_op = P.RealDiv()
sqrt = P.Sqrt()
dtype = P.DType()
def bprop(x, out, dout):
temp = div_op(fill_func(dtype(x), shape_op(x), 0.5), sqrt(x))
dx = mul_func(dout, tem... | b297695effd9d063384b3343337d1647050b5f1a | 17,063 |
def classify_top1_batch(image):
"""Define method `classify_top1` for servable `resnet50`.
The input is `image` and the output is `lable`."""
x = register.add_stage(preprocess_batch, image, outputs_count=1, batch_size=1024)
x = register.add_stage(resnet_model, x, outputs_count=1)
x = register.add_st... | ff4ae67619f29e0e22e275845709ab73daabe2f0 | 17,064 |
def ngram_word(max_features=2_000):
"""Word count vectorizer.
Args:
max_features: number of features to consider.
"""
return CountVectorizer(
ngram_range=(1, 3),
analyzer='word',
max_features=max_features,
) | 2b8935b72a836ff6ab3cdb0b17939806d9f7ce02 | 17,065 |
def func_dispatcher(intent):
"""
Simple effect dispatcher that takes callables taking a box,
and calls them with the given box.
"""
def performer(dispatcher, intent, box):
intent(box)
return performer | 48dc23a8124569d5537c38b8f704fdea282853e8 | 17,066 |
import os
def get_system_path():
""" Get the system path as a list of files
Returns:
List of names in the system path
"""
path = os.getenv('PATH')
if path:
return path.split(os.pathsep)
return [] | 766931b403444c584edf71d053f5b3f5de6bf265 | 17,067 |
import multiprocessing
import tqdm
def encode(x, bps_arrangement='random', n_bps_points=512, radius=1.5, bps_cell_type='dists',
verbose=1, random_seed=13, x_features=None, custom_basis=None, n_jobs=-1):
"""Converts point clouds to basis point set (BPS) representation, multi-processing version
Para... | 66edc2dd5d42fe53e55f2e5b95e2069123510006 | 17,068 |
def parsec_params_list_to_dict(var):
"""
convert parsec parameter array to dictionary
:param var:
:return:
"""
parsec_params = dict()
parsec_params["rle"] = var[0]
parsec_params["x_pre"] = var[1]
parsec_params["y_pre"] = var[2]
parsec_params["d2ydx2_pre"] = var[3]
parsec_para... | 4ea4b4d2c0cbcb8fb49619e103b09f354c80de6a | 17,069 |
def parse_msiinfo_suminfo_output(output_string):
"""
Return a dictionary containing information from the output of `msiinfo suminfo`
"""
# Split lines by newline and place lines into a list
output_list = output_string.splitlines()
results = {}
# Partition lines by the leftmost ":", use the s... | 6883e8fba9a37b9f877bdf879ebd14d1120eb88a | 17,070 |
from typing import Dict
from typing import Any
import json
from datetime import datetime
def create_indicators_fields(tag_details: Dict[str, Any]) -> Dict[str, Any]:
"""
Returns the indicator fields
Args:
tag_details: a dictionary containing the tag details.
Returns:
A dictionary repre... | 349ab542d2c25cb24fe40aeb98c16a9bfccc871f | 17,071 |
def spatial_difference(gdf1: GeoDataFrame, gdf2: GeoDataFrame) -> GeoDataFrame:
"""Removes polygons from the first GeoDataFrame that intersect with polygons from the second GeoDataFrame
:param gdf1: First input data frame
:param gdf2: Second input data frame
:return: Resulting data frame
"""
gd... | 2713376f45ed574399f9f406a06a60a47f002579 | 17,072 |
def frustumShellIxx(rb, rt, t, h, diamFlag=False):
"""This function returns a frustum's mass-moment of inertia (divided by density) about the
transverse x/y-axis passing through the center of mass with radii or diameter inputs.
NOTE: This is for a frustum SHELL, not a solid
INPUTS:
Parameters
... | 3d23805d4f7ed952b50752ac4ab8158c2826137f | 17,073 |
def default_shaders():
"""
Returns a list with all thte default shadres of the current DCC
:return: str
"""
return shader_utils.get_default_shaders() | 795ca337c9ba163bb70ce4ff226c04d1034ec542 | 17,074 |
def get_domains_and_slugs():
"""
returns all the domain names and slugs as dictionary
{domain_name: slug}
"""
return_data = {}
domain_slugs = Domain.objects.filter(active=1).order_by('name')
if domain_slugs:
for domain in domain_slugs:
return_data[domain.name] = domain.sl... | d19af879fe96895808f1c1815d3cc563499d358d | 17,075 |
def has_video_ads() -> bool:
"""has_video_ads() -> bool
(internal)
"""
return bool() | 6b4822bb18171df5bfc5b4f3797e574557cd65dd | 17,076 |
def calculate_purchasing_plan(total_days, sellers, starting_bread=10, best_before_date=30, debug = False):
"""
total_days : positive int
sellers : list of tuple (day, price)
starting_bread : int, optional
best_before_date : positive int, (how long the bread lasts)
debug : boolean, (prints cost m... | 474354d1316839691fe3e0bffbb1352167e8095c | 17,077 |
from typing import Iterable
def compile_sites(inp: NetInput,
y_true: Iterable[np.ndarray],
y_pred: Iterable[np.ndarray],
masks: Iterable[np.ndarray]):
"""
Prepares sites to be dumped in tsv file
:param inp: NetInput
:param y_true: True known classe... | 14f655e18b5651c22373d4c23b51f55704cd63c8 | 17,078 |
def femda_estimator(X, labels, eps = 1e-5, max_iter = 20):
""" Estimates the matrix of means and the tensor of scatter matrix of the dataset using MLE estimator.
To tackle singular matrix issues, we use regularization.
Parameters
----------
X : 2-d array of size n*m
... | 639532f9307e023561d6193730473533b240fb28 | 17,079 |
def get_collections():
"""read .db file, return raw collection"""
col = {}
f = open(collection_db, "rb")
version = nextint(f)
ncol = nextint(f)
for i in range(ncol):
colname = nextstr(f)
col[colname] = []
for j in range(nextint(f)):
f.read(2)
col[c... | b134e7e970fa7f5486226d2c2cab3c63ab9f67c3 | 17,080 |
def ot_has_small_bandgap(cp2k_input, cp2k_output, bandgap_thr_ev):
""" Returns True if the calculation used OT and had a smaller bandgap then the guess needed for the OT.
(NOTE: It has been observed also negative bandgap with OT in CP2K!)
cp2k_input: dict
cp2k_output: dict
bandgap_thr_ev: float [eV]... | fbc63c373d052111932ea0fd2cd458d59b486d10 | 17,081 |
def profile():
"""Checking if user is already logged_in"""
if 'logged_in' in session:
'''getting all the account info for the user for displaying it on the profile page'''
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute('SELECT * FROM accounts WHERE username =... | 4796058d3bbc911cc0610b7a5458be80fa330d67 | 17,082 |
import logging
def gather_metrics(config, worker_output, endpoint_output, container_names):
"""Process the raw output to lists of dicts
Args:
config (dict): Parsed configuration
worker_output (list(list(str))): Output of each container ran on the edge
endpoint_output (list(list(str)))... | 17263ba1e1c717f52b3d4cb05f373a54630f8e06 | 17,083 |
import time
import json
async def ping(ws):
"""Send a ping request on an established websocket connection.
:param ws: an established websocket connection
:return: the ping response
"""
ping_request = {
'emit': "ping",
'payload': {
'timestamp': int(time.time())
... | 587d2a72cbc5f50f0ffb0bda63668a0ddaf4c9c3 | 17,084 |
def split_files_each_proc(file_arr,nprocs):
""" Returns array that distributes samples across all processors. """
ntot = len(file_arr)
post_proc_file_arr = []
for i in range(0,nprocs):
each_proc_arr = []
ib,ie = split_array_old(ntot,nprocs,i)
if i... | 0c5c481d1b9a9e0d5c6efdfb7abf6669f5a05ecf | 17,085 |
def get_data_generators_for_output(output):
""" Get the data generators involved in an output
Args:
output (:obj:`Output`): report or plot
Returns:
:obj:`set` of :obj:`DataGenerator`: data generators involved in the output
"""
data_generators = set()
if isinstance(output, Repo... | d05fde5b5ce25504b53d8ca4491235d3ab3b8680 | 17,086 |
def MarkovChainFunction(data, bins):
""" Data should be numpy array; bins is an integer """
#Normalize data
datMin = min(data)
datMax = max(data)
datNorm = (data - datMin)/(datMax - datMin)
# Create Markov Transition Table:
mesh = np.linspace(0, 1, bins)
meshReal = (mesh*(datMa... | 09a73103e3dc6c3412803b58c8823ac1459921aa | 17,087 |
from re import M
def clustering_report(y_true, y_pred) -> pd.DataFrame:
"""
Generate cluster evaluation metrics.
Args:
y_true: Array of actual labels
y_pred: Array of predicted clusters
Returns:
Pandas DataFrame with metrics.
"""
return pd.DataFrame(
{
... | dc124dc4f248a2acedfd6201a205f285adc6ec1c | 17,088 |
from typing import List
from typing import Sequence
from typing import Set
from typing import Tuple
def _create_sampler_data(
datastores: List[Datastore], variables: Sequence[Variable],
preconditions: Set[LiftedAtom], add_effects: Set[LiftedAtom],
delete_effects: Set[LiftedAtom], param_option: Parameteriz... | ecf7ed06183264722df5d6e2d645bb899cb8358b | 17,089 |
def call_assign_job(job_id, mex_id):
""" Function to send an update to the MEx Sentinel to assign a Job to an MEx. """
try:
rospy.wait_for_service('/mex_sentinel/assign_job_to_mex', rospy.Duration(1))
try:
assign_job = rospy.ServiceProxy('mex_sentinel/assign_job_to_mex', AssignJobToM... | 9c5b2aa27e8d04949fbb4c5a2c9eb2ac86ccd9a7 | 17,090 |
def full(
coords, nodata=np.nan, dtype=np.float32, name=None, attrs={}, crs=None, lazy=False
):
"""Return a full DataArray based on a geospatial coords dictionary.
Arguments
---------
coords: sequence or dict of array_like, optional
Coordinates (tick labels) to use for indexing along each d... | 41bb4fce22a8dd280dee0d4891ff81bd88d263b5 | 17,091 |
import os
def calculate_alignment(
sequences, mode, matrix, gapopen, gapextend, hash=uuid4().hex):
"""
1 - remove modifications
2 - convert sequence
3 - muscle - msa
4 - revert sequences
5 - add original modifications
"""
new_file_lines = []
for i, element in enumerate(sequ... | 93851e4e2afd9fd88665fc418d0ef680dc41847f | 17,092 |
from typing import Union
from typing import List
from typing import Tuple
from typing import Dict
def add_weight_decay(
model: nn.Module, weight_decay: float = 1e-5, skip_list: Union[List, Tuple] = ()
) -> List[Dict]:
"""Helper function to not decay weights in BatchNorm layers
Source: https://discuss.pyto... | 27efae02eaaf0bdc94f3763c1069165c47e08acb | 17,093 |
def find_bands_hdu(hdu_list, hdu):
"""Discover the extension name of the BANDS HDU.
Parameters
----------
hdu_list : `~astropy.io.fits.HDUList`
hdu : `~astropy.io.fits.BinTableHDU` or `~astropy.io.fits.ImageHDU`
Returns
-------
hduname : str
Extension name of the BANDS HDU. N... | 3b170109d199482c651861764b0ec21a44aa7933 | 17,094 |
def read_raw_binary_file(file_path):
"""can actually be any file"""
with open(file_path, 'rb') as f:
return f.read() | b03bc1d4c00f9463ded0ea022023e66fd298a7ad | 17,095 |
def encode_cl_value(entity: CLValue) -> dict:
"""Encodes a CL value.
"""
def _encode_parsed(type_info: CLType) -> str:
if type_info.typeof in TYPES_NUMERIC:
return str(int(entity.parsed))
elif type_info.typeof == CLTypeKey.BYTE_ARRAY:
return entity.parsed.hex()
... | 09d75f9552347e4fd121dcd1a57f26ac46756870 | 17,096 |
def escape(string):
""" Escape a passed string so that we can send it to the
regular expressions engine.
"""
ret = None
def replfunc(m):
if ( m[0] == "\\" ):
return("\\\\\\\\")
else:
return("\\\\" + m[0])
# @note - I had an issue getting replfunc to be ca... | c2682757fec2ddaefb32bb792fee44dd63c539fd | 17,097 |
def batch_apply(fn, inputs):
"""Folds time into the batch dimension, runs fn() and unfolds the result.
Args:
fn: Function that takes as input the n tensors of the tf.nest structure,
with shape [time*batch, <remaining shape>], and returns a tf.nest
structure of batched tensors.
inputs: tf.nest s... | 4cc220a7891f236dc6741e9c203862c5ee33e978 | 17,098 |
def grab_haul_list(creep: Creep, roomName, totalStructures, add_storage=False):
"""
위에 허울러가 에너지를 채울 목록 확인.
:param creep:
:param roomName: 방이름.
:param totalStructures: 본문 all_structures 와 동일
:param add_storage: 스토리지를 포함할 것인가? priority == 0 인 상황 아니면 포함할일이 없음.
:return: 허울러의 에너지 채울 대상목록
"""... | d1d944c221089363a7e546bdc03dd51cd178fc35 | 17,099 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.