content stringlengths 35 762k | sha1 stringlengths 40 40 | id int64 0 3.66M |
|---|---|---|
def get_duplicated_members(first_name, last_name):
"""同じ名前の持つメンバーが存在するかどうか
:param first_name:
:param last_name:
:return:
"""
first_name = first_name.strip() if first_name else None
last_name = last_name.strip() if last_name else None
queryset = models.Member.objects.filter(
firs... | f7160d1a710b123e62a5e0ebf0d4e973303f4c2b | 9,200 |
import logging
def get_oauth_id():
"""Returns user email ID if OAUTH token present, or None."""
try:
user_email = oauth.get_current_user(SCOPE).email()
except oauth.Error as e:
user_email = None
logging.error('OAuth failure: {}'.format(e))
return user_email | d16a1785cf3cfd12f57ab3bbc1fd5318bc634dd2 | 9,201 |
import re
def check_for_publication(form, formsets, user_data):
"""
Run additional validation across forms fields for status LILACS-Express and LILACS
"""
valid = valid_descriptor = valid_url = True
# regex match starts with S (Serial) and ends with (as) analytic
regex_sas = r"^S.*as$"
Sas... | c36364c75f97c7eb299611471df1ff29e9837bfd | 9,202 |
def _generate_var_name(prefix, field_name):
"""
Generate the environment variable name, given a prefix
and the configuration field name.
Examples:
>>> _generate_var_name("", "some_var")
"SOME_VAR"
>>> _generate_var_name("my_app", "some_var")
"MY_APP_SOME_VAR"
:param prefix: the pr... | 9065d1deb76789582e68df779ec2c961a7d4aedc | 9,203 |
def VelocityPostProcessingChooser(transport):
"""
pick acceptable velocity postprocessing based on input
"""
tryNew = True
velocityPostProcessor = None
if transport.conservativeFlux is not None:
if (transport.mesh.parallelPartitioningType == 0 and transport.mesh.nLayersOfOverlap==0): #el... | 44484d2b1f35ac865d9b5a1d53a62f4234bea4ee | 9,204 |
def get_node_hierarchical_structure(graph: nx.Graph, node: str, hop: int):
"""
explore hierarchical neighborhoods of node
"""
layers = [[node]]
curLayer = {node}
visited = {node}
for _ in range(hop):
if len(curLayer) == 0:
break
nextLayer = set()
for neigh... | 132db2de60459ea41a142ae17e5ad08fb325692c | 9,205 |
def svn_utf_cstring_from_utf8_string(*args):
"""svn_utf_cstring_from_utf8_string(svn_string_t src, apr_pool_t pool) -> svn_error_t"""
return _core.svn_utf_cstring_from_utf8_string(*args) | 0651604821f3164f6b4847397a7a484bd0a51568 | 9,206 |
def fitness_sum(element):
"""
Test fitness function.
"""
return np.sum(element) | c2a6881864e4a31ed0ffe18c276573a7bdd6a867 | 9,207 |
def Stepk(k, basetree=[]): # XXX. make sure basetree is passed as expected.
"""Try to solve the puzzle using assumptions.
k --> The step number. (1st step is solving exactly,
2nd step is solving using 1 assumption,
3rd step is solving using 2 assumptions and so on.)
Note: The assumpt... | cbf28b995deee1ff3432c46d3e48cf9b0c8fd31a | 9,208 |
def load_handler(path, *args, **kwargs):
"""
Given a path to a handler, return an instance of that handler.
E.g.::
>>> load_handler('anthill.framework.core.files.uploadhandler.TemporaryFileUploadHandler', request)
<TemporaryFileUploadHandler object at 0x...>
"""
return import_string... | 26a9b3ebaa0ab2362a9a1ab977281c25334e0d9c | 9,209 |
def validate_twilio_request():
"""Ensure a request is coming from Twilio by checking the signature."""
validator = RequestValidator(current_app.config['TWILIO_AUTH_TOKEN'])
if 'X-Twilio-Signature' not in request.headers:
return False
signature = request.headers['X-Twilio-Signature']
if 'SmsS... | bb35e83223ac8530a6da8fed581ba5cbc8afe47e | 9,210 |
def paper_selection(text=[], keywords=[]):
"""
This function calculates the similarity between keywords or phrases relating a text. So it is possible to compare
several texts and keywords in once to see which text is the best relating special keywords. Also a plot is
generated, where it is possible to s... | ac6d16ac183f081ef193bf43e782019c38c04106 | 9,211 |
import os
def list_subpackages(package_trail,verbose=False):
""" package_trails = list_subpackages(package_trail)
returns a list of package trails
Inputs:
package_trail : a list of dependant package names, as strings
example: os.path -> ['os','path']
Outputs:
... | b1aebf9a87041da92ac5e2eee9d4e668ab88839d | 9,212 |
def _get_out_of_bounds_window(radius, padding_value):
"""Return a window full of padding_value."""
return padding_value * np.ones((2 * radius + 1, 2 * radius + 1), dtype=int) | 0dacf7d63f5e0be21deb92f02fe3b76bd201b5ec | 9,213 |
import sys
import io
def open_fw(file_name, encoding=ENCODING, encode=True):
"""Open file for writing respecting Python version and OS differences.
Sets newline to Linux line endings on Python 3
When encode=False does not set encoding on nix and Python 3 to keep as bytes
"""
if sys.version_info >... | 296bde81f1af70d861be641ec698deba58958915 | 9,214 |
import logging
def covid_API_request(
location: str = "Exeter",
location_type: str = "ltla") -> dict[str]:
"""Requests current COVID data from the Cov19API for a given area.
Uses the Cov19API to request the most recent COVID data for
a given area. Returns data as a list of comma separated... | 5b931e3d30f51ff64fc206cf5d30f7fd925d2b78 | 9,215 |
def resize(img, height, width, is_flow, mask=None):
"""Resize an image or flow field to a new resolution.
In case a mask (per pixel {0,1} flag) is passed a weighted resizing is
performed to account for missing flow entries in the sparse flow field. The
weighting is based on the resized mask, which determines t... | 9d0543a88382028522ae469fc773dcebc006b5c3 | 9,216 |
def num_decodings2(enc_mes):
"""
:type s: str
:rtype: int
"""
if not enc_mes or enc_mes.startswith('0'):
return 0
stack = [1, 1]
for i in range(1, len(enc_mes)):
if enc_mes[i] == '0':
if enc_mes[i-1] == '0' or enc_mes[i-1] > '2':
# only '10', '20' ... | ae4ff7181e34003dcc7ec264ed2727bc716708a5 | 9,217 |
def spot2Cmyk(spot, default=None):
"""Answers the CMYK value of spot color. If the value does not exist,
answer default of black. Note that this is a double conversion:
spot-->rgb-->cmyk
>>> '%0.2f, %0.2f, %0.2f, %0.2f' % spot2Cmyk(300)
'0.78, 0.33, 0.00, 0.22'
>>> # Nonexistent spot colors ma... | 307c8e934cdac2f5fb857e8f8f122c9862adab6d | 9,218 |
import re
def clean(text):
"""
Removes irrelevant parts from :param: text.
"""
# Collect spans
spans = []
# Drop HTML comments
for m in comment.finditer(text):
spans.append((m.start(), m.end()))
# Drop self-closing tags
for pattern in selfClosing_tag_patterns:
for... | 0e942e36035d2129ca0be814268e7c6e4552435e | 9,219 |
from dateutil import tz
from datetime import datetime
import numpy
def get_offset(t0,t1,zone,station,gps):
"""
Determine UTC to local Local offset to be applied.
Parameters
----------
t0 : datetime
Starting timestamp
t1 : datetime
End timestamp
zone : str
Define timi... | f6ed5f50528a67735097abf17d3039008a61b547 | 9,220 |
def my_render_template(html, **arguments):
"""Call render_template with comparison_types as one of the arguments.
:param string html: name of the template
:param **arguments: other arguments to be passed while rendering template
"""
arguments.setdefault(
'comparison_types', ComparisonType.g... | 0a639a9dd8cef8c0cc659444d32138acf9a43e41 | 9,221 |
def find_or_create_qualification(qualification_name, description,
must_be_owned=True):
"""Query amazon to find the existing qualification name, return the Id. If
it exists and must_be_owned is true but we don't own it, this prints an
error and returns none. If it doesn't exi... | 92855fbaee2c1f5d190b2c4cd67078b07c6f4e51 | 9,222 |
import torch
def hard_to_soft(Y_h, k):
"""Converts a 1D tensor of hard labels into a 2D tensor of soft labels
Source: MeTaL from HazyResearch, https://github.com/HazyResearch/metal/blob/master/metal/utils.py
Args:
Y_h: an [n], or [n,1] tensor of hard (int) labels in {1,...,k}
k: the larges... | d31c6749569e19cbbdd91c59e66982497190163d | 9,223 |
def find_negations(doc, neg_comma=True, neg_modals=True, debug=False):
"""
Takes as input a list of words and returns the positions (indices) of the words
that are in the context of a negation.
:param list doc: a list of words (strings)
:param bool neg_comma: if True, the negation context ends on a... | 7b609949c3f2ea22887147e6bb13ad41de71bba3 | 9,224 |
from typing import List
from sys import getrecursionlimit
from sys import setrecursionlimit
def int_pow(base: int, power: int, modulus: int=None, safe: bool=True):
"""
Calculate `base` raised to `power`, optionally mod `modulus`
The python standard library offers the same functionality,
and this funct... | d578449c45a0b10d19a521b95866b8cc0025df43 | 9,225 |
import decimal
def truncate_decimal_places(value: decimal.Decimal, places: int = 1) -> float:
"""
Truncate a float (i.e round towards zero) to a given number of decimal places.
NB: Takes a decimal but returns a float!
>>> truncate_decimal_places(12.364, 1)
12.3
>>> round_decimal_places(-12.... | 11b924a5e4f6560674b1f7378f6a4001a3265a97 | 9,226 |
def site_url(self, url):
"""
Return the fully qualified URL for the given URL fragment.
"""
try:
# In Django < 1.9, `live_server_url` is decorated as a `property`, but
# we need to access it on the class.
base_url = self.testclass.live_server_url.__get__(self.testclass)
exce... | 4f82cc766d0144fb11e897e7c9ceba57a6881f23 | 9,227 |
def myisinteger(
num: int) -> bool:
"""
Checks if num is an integer
"""
val = 1 if num == floor(num) else 0
return val | a8a1980fb35429d300cb262629f1d20774202f95 | 9,228 |
def _get_timeunit(min_time: pd.Timestamp, max_time: pd.Timestamp, dflt: int) -> str:
"""Auxillary function to find an appropriate time unit. Will find the
time unit such that the number of time units are closest to dflt."""
dt_secs = {
"year": 60 * 60 * 24 * 365,
"quarter": 60 * 60 * 24 * 9... | 96b1a036bdb64b9c684ed8ac9123cf788ddc189d | 9,229 |
import pathlib
import sys
def get_resource_path(relative_path):
"""
relative_path = "data/beach.jpg"
relative_path = pathlib.Path("data") / "beach.jpg"
relative_path = os.path.join("data", "beach.jpg")
"""
rel_path = pathlib.Path(relative_path)
dev_base_path = pathlib.Path(__file__).resolv... | becad13eb95d988b49ea7ef141e9c3436379af6e | 9,230 |
def data_sample(df, x, y, group_number, quantile):
"""
分组选点法
x: 分组变量
y: 取值变量
"""
group_width = (np.max(df[x]) - np.min(df[x])) / group_number # 分组宽度
x_group = np.arange(np.min(df[x]), np.max(df[x]), group_width) # 分组的X
# 选取每组中设定的分位数的点, 对点数大于零的组选点
if len(quantile) == 3:
data... | 9be1ec948f9d427f7b6136b0c4f6bf5622be5843 | 9,231 |
def index(request):
""" Shows all challenges related to the current user """
profile = request.user.get_profile()
chall_user = profile.get_extension(ChallengeUser)
challs = ChallengeGame.get_active(chall_user)
played = ChallengeGame.get_played(chall_user)[:10]
if not chall_user.is_eligible():
... | b649f74777eedd1093e884f71949cd43c7a215ad | 9,232 |
def hansen(threshold, geojson, begin, end, logger):
"""For a given threshold and geometry return a dictionary of ha area.
The threshold is used to identify which band of loss and tree to select.
asset_id should be 'projects/wri-datalab/HansenComposite_14-15'
Methods used to identify data:
Gain band ... | f7d43c8a0d5c8869232d53b2f625c2568de3a1b0 | 9,233 |
def rectangluarMask(image):
"""
this function will take an image as an input and created a rectangluar mask(image sized) and in the center of canvas
"""
mask = np.zeros(image.shape[:2], dtype = 'uint8')
(cX, cY) = (image.shape[1]//2, image.shape[0]//2)
cv2.rectangle(mask, (cX-75, cY-75), (cX+75, cY+75), 255, -1)
... | df5ae1e31eb259bc02ff75282d2dea2b4a7f547b | 9,234 |
from datetime import datetime
def get_artist_listen_for_change_streams(artist: Artist=None):
"""
Computation steps:
1. Define start and end dates
2. Create stream filters for the current artist
3. aggregate the streams from the Model
4. Return just the number (maybe a dict idk)
"""
# ... | c0620809e7ebf10138e3c6c93520787c30efa4f9 | 9,235 |
def flip_dict(d):
"""Returns a dict with values and keys reversed.
Args:
d: The dict to flip the values and keys of.
Returns:
A dict whose keys are the values of the original dict, and whose values
are the corresponding keys.
"""
return {v: k for k, v in d.items()} | c9c960209663639613739979c0dc4066a63c44cb | 9,236 |
import subprocess
def branch_exists(branch: str) -> bool:
""" Check if the branch exists in the current Git repo. """
try:
subprocess.check_call(
["git", "rev-parse", "--quiet", "--verify", branch],
stdout=subprocess.DEVNULL,
)
return True
except subprocess.... | 352adba56d824fff29bf5c91788a1154bea64f1b | 9,237 |
def has_sample(args):
"""Returns if some kind of sample id is given in args.
"""
return args.sample or args.samples or args.sample_tag | c2ae87acb11232d7f56cb9e09eb8509720669058 | 9,238 |
import sys
def get_search_selection(config: models.Config) -> models.Config:
"""Gets search criteria for search mode"""
search_selection: models.SearchSelection = models.SearchSelection()
print('\nPlease select what system you want to search')
print('Press Enter to do a general site wide search')
... | 9e84a8aad45b53df9312d2e0ae03fa94b9496ff9 | 9,239 |
from typing import Callable
from typing import Any
from typing import Type
import inspect
def make_key_type(func: Callable[..., Any]) -> Type[CallKey]:
"""Construct a type representing a functions signature."""
sig = inspect.signature(func)
# make a format string that unpacks and names the parameters nic... | 9f6ab0a5ac20fcc69518f24669035a6b7c6246b6 | 9,240 |
def gen_string(prop=None):
"""
Generate String value
:param prop: dict
Examples: {'minLength': 10, 'maxLength': 154}
{'pattern': '^\\d+\\w*$'}
"""
if not prop:
prop = {}
min_length = prop.get("minLength", 1)
max_length = prop.get("maxLength", 1024)
pattern = pro... | 6a7a51712b2f8a47711e76901d5f425226f9e2ef | 9,241 |
def standardize_data(data, eps=None):
"""
Standardize each image data to have zero mean and unit standard-deviation (z-score)
Inputs:
data: [np.ndarray] unnormalized data
Outputs:
data: [np.ndarray] normalized data
"""
if eps is None:
eps = 1.0 / np.sqrt(data[0,...].size)
data, orig_shape = re... | 3267ed737bfb35f08daa91040d87698e157d6ccf | 9,242 |
def _get_transmission(self,d,E='config'):
""" calculate the transmittion after thickness d (in m) of material at energy E (in eV)."""
return np.exp(-d*1e6/self.absorption_length(E)) | ac11a97e424390e40544f16b7259bbf9ace30dcb | 9,243 |
def calculate_density(
input_layer,
field=None,
cell_size=None,
cell_size_units="Meters",
radius=None,
radius_units=None,
bounding_polygon_layer=None,
area_units=None,
classification_type="EqualInterval",
num_classes=10,
output_name... | 271d1d50cd362f8e660de4ac93cef8a6cb43d967 | 9,244 |
def reverse(operation):
""" decorator that returns sa.not_ for sending operation"""
def decorated(*args, **kwargs):
return sqlalchemy.not_(operation(*args, **kwargs))
return decorated | 3a77ed5e0db081bd67ccbc1c90731f46001288f2 | 9,245 |
def disable_static_generator(view_func):
"""Decorator which prevents caching the response from a view on disk
Flags the view with a ``disable_static_generator`` attribute so
staticgenerator won't ever save its response on the filesystem.
Example::
@disable_static_generator
def myview(... | 5ad9dff33b1340d909467dcada90a43c1cc7618d | 9,246 |
import tqdm
def create_lengths(text):
"""Create a data frame of the sentence lengths from a text"""
lengths = []
for sentence in tqdm(text):
lengths.append(len(sentence))
return pd.DataFrame(lengths, columns=['counts']) | 6a239563b19d1d2b2f72ae3d425e94f7b28a0d62 | 9,247 |
def basic_auth_string(key, value):
"""Returns basic auth string from key and value"""
key_pass = b":".join((_to_bytes(key), _to_bytes(value)))
token = b64encode(key_pass).decode()
return f"Basic {token}" | 3de47ff05251792d0f5e782af4d7c30d83dfd860 | 9,248 |
from .reader import get_file_hashes
import traceback
import traceback
import os
import argparse
import sys
import yaml
def main(argv = None):
"""
Main function for the ``amplimap`` executable. This function:
- parses command line arguments
- reads, merges and checks each of these config files, if the... | ce59067d09976d9e16014acf395151e1ac42aa00 | 9,249 |
import functools
def measureit(_func=None, *, output: Output = None, number: int = 1):
"""
Measure the energy consumption of monitored devices during the execution of the decorated function (if multiple runs it will measure the mean energy)
:param output: output instance that will receive the power consu... | bb28c7423f5d2a479de052554f68d6c99494180d | 9,250 |
def csv_template(n_types, n_type_covariates, initialize_coeffs=True):
"""Creates a template for the parameter specification.
Parameters
----------
n_types : int, optional
Number of types in the model. Default is one.
n_type_covariates : int, optional
Number of covariates to predict ... | d211373b1939242600b0c5c15a30b16f58eab229 | 9,251 |
import time
import os
import scipy
def get_surround(ppath, recordings, istate, win, signal_type, recalc_highres=False,
tstart=0, tend=-1, ma_thr=20, ma_state=3, flatten_tnrem=False,
nsr_seg=2, perc_overlap=0.95, null=False, null_win=[0.5,0.5],
p_iso=0, pcluster=0, ... | adee4efc854002570f6a6151e3f86967b94a1bf5 | 9,252 |
import os
def load_quantized_bert_base(batch_size=1, seq_len=384):
"""
Load the quantized bert-base model from TLCBench, possibly downloading it from github
and caching the converted int8 QNN module to disk.
In addition to returing the relay module and its parameters, it also returns input name
a... | 3f7a533f7424bd4727c963c6c8dc8c2c46b2d9b0 | 9,253 |
from . import setup as jssetup
def setup(app):
"""A temporary setup function so that we can use it for
backwards compatability.
This should be removed after a deprecation cycle.
"""
# To avoid circular imports we'll lazily import
js.logger.warning(
(
"`jupyter-sphinx` was ... | 16a64701d3b77a1d58126df458d4a3016be1e366 | 9,254 |
import torch
def hinge_loss(logit, target, margin, reduce='sum'):
"""
Args:
logit (torch.Tensor): (N, C, d_1, d_2, ..., d_K)
target (torch.Tensor): (N, d_1, d_2, ..., d_K)
margin (float):
"""
target = target.unsqueeze(1)
tgt_logit = torch.gather(logit, dim=1, index=target)
... | 0eb499d4164b37dee657ad0e0a5c1480324434bc | 9,255 |
import subprocess
def setTimeSync(state):
"""
Set the state of host/guest time synchronization using vmware-toolbox-cmd.
Returns None on success and an error message on failure.
"""
# Translate the boolean to a string for vmware-toolbox-cmd
if state:
setStr = 'enable'
else:
set... | 74369a46b357175615b28c4b9bcb57f3e2d501fe | 9,256 |
import threading
def do_cleanup(ips, args):
"""
:param ips:
:param args:
:return: None
"""
def _cleanup_single_node(ip):
def _rpc(cmd):
return rpc(ip, cmd, args.user, args.password, args.key, suppress_output=not args.verbose)
# TODO: (Make this more targeted)
... | 7623608a3a73a2196c9b2636a066a2aa85c6b659 | 9,257 |
import argparse
def getargopts():
"""Parse command line arguments."""
opts = argparse.ArgumentParser()
opts.add_argument('--port', type=int,
help="Port to listen to (default 8888)",
default=8888)
opts.add_argument('srcbase', help="Base source directory.")
... | ef565edbde535c1078e63b50ff5ba18a17cf4efb | 9,258 |
def roc(model, image, mask, ignore=None, sky=None, n_mask=1, seed=1, thresholds=np.linspace(0.001, 0.999, 500),
dilate=False, rad=1):
""" evaluate model on test set with the ROC curve
:param model: deepCR object
:param image: np.ndarray((N, W, H)) image array
:param mask: np.ndarray((N, W, H)) ... | 741381b707e4c732202c0cfdac512b13483f533f | 9,259 |
def parse_testconfig(conffile):
"""Parses the config file for the whole testsuite."""
repo_path, drop_caches, tests_dir, testlog_dir = '', '', '', ''
basebranch, baserev, repo_prof_path, repo_gprof_path = '', '', None, None
fileopen = open(conffile, 'r')
for line in fileopen:
line = line.spl... | a01e30a0355eac229018c7736e7d9903f59402ed | 9,260 |
def get_filtered_df(df, vocab_file):
""" Return a data frame with only the words present in the vocab file. """
if vocab_file:
vocab = open(vocab_file).readlines()
vocab = [v.strip() for v in vocab]
# Get the set of words.
words = pd.Series(df.word.values.ravel()).unique()
... | 7fbfcfd92adc2b55ad3024b6e31ced743fa9ac50 | 9,261 |
def pkcs5_pad(data):
"""
Pad data using PKCS5
"""
pad = KEYCZAR_AES_BLOCK_SIZE - len(data) % KEYCZAR_AES_BLOCK_SIZE
data = data + pad * chr(pad).encode("utf-8")
return data | c4bb6f28284fe8d5d14f8efcede6858959f1b4cc | 9,262 |
import pickle
def gatherData(data, neat, gen, hyp, fileName, savePop=False):
"""Collects run data, saves it to disk, and exports pickled population
Args:
data - (DataGatherer) - collected run data
neat - (Neat) - neat algorithm container
.pop - [Ind] - ... | 56c8a01b2e07280dc17a5fa3d76331e39e112d8d | 9,263 |
def impute_bad_concentration_fits(c_lgtc, c_lgtc_min=0.1):
"""Overwrite bad concentration parameter fit values."""
c_lgtc = np.where(c_lgtc < c_lgtc_min, c_lgtc_min, c_lgtc)
return c_lgtc | 88f85003a2030ea34cfe72de85c1061981f86957 | 9,264 |
from re import DEBUG
def _build_sub_nics(all_nics):
"""
Aggregate all sub nics into their sub groups.
I.E. All nic\.X.\.*\.Y nics go into a list where all Y's are the same.
:param all_nics: All nics to consider.
:type all_nics: list
"""
sub_nics = {}
for nic in all_nics['nics']:
... | 4285205790cb9b2e7ae2b646ea34feb4e22a395a | 9,265 |
import numpy
import math
def pearsonr(a0, a1):
"""Pearson r, product-moment correlation coefficient, of two samples.
Covariance divided by product of standard deviations.
https://en.wikipedia.org/wiki/Pearson_product-moment_correlation_coefficient#For_a_sample
"""
n = len(a0)
assert n == len... | 64135ebc840bb1673ece1aec24f22c960f89af20 | 9,266 |
def __column(matrix, i):
"""Returns columns from a bidimensional Python list (a list of lists)"""
return [row[i] for row in matrix] | f455245eb8bbda90f185479afc85eecfb481c70c | 9,267 |
import math
def datamask(fmri_data, mask_data):
"""
filter the data by a ROI mask
Parameters:
fmri_data : array
The fMRI data.
The shape of fmri_data is [nx, ny, nz]. nx, ny, nz represent the size of the fMRI data.
mask_data : array
The mask data.
The shape of mas... | 235c676636b5cff42fba4da539ad83ed7c4f999a | 9,268 |
def make_global_batch_norm_tests(options):
"""Make a set of tests to do batch_norm_with_global_normalization."""
test_parameters = [{
"dtype": [tf.float32],
"input_shape": [[1, 1, 6, 2], [3, 4, 5, 4]],
"epsilon": [0.1, 0.0001],
"scale_after": [True, False],
}]
def build_graph(parameter... | eb68bd9cdd09c98471939ea88b8ea60b9772ab90 | 9,269 |
import copy
import os
def write_stats_file(run_output_dict):
"""Writes a dummy PolyChord format .stats file for tests functions for
processing stats files. This is written to:
base_dir/file_root.stats
Also returns the data in the file as a dict for comparison.
Parameters
----------
run_... | 5a3d7b81d8315fd39d5348f9140a001b020c7584 | 9,270 |
import json
def dict_serialize(seqlen_dist_dict):
"""
dict->str
Turns {1:'a',2:'b'}->"[[1,'a'],[2,'b']]"
Why? Because this format plays nice with shell script that runs xlmr_bench.
Avoids curly braces and spaces that makes shell script str input unhappy.
"""
seqlen_dist_lst = list(seqlen... | a61c51debff922d128fbb26bbe2121063511d4c4 | 9,271 |
def pi_cdecimal():
"""cdecimal"""
D = C.Decimal
lasts, t, s, n, na, d, da = D(0), D(3), D(3), D(1), D(0), D(0), D(24)
while s != lasts:
lasts = s
n, na = n+na, na+8
d, da = d+da, da+32
t = (t * n) / d
s += t
return s | 384bedfc4ca9ba2f869e062581eddc917f9a0104 | 9,272 |
def e_list(a_list: AList) -> set[E]:
"""Unique elements in adjacency list."""
return set(e for n in a_list for nb in a_list[n] for e in a_list[n][nb]) | a59f6170b08faf94d05059f2e77c68f3290acf88 | 9,273 |
import os
import logging
def load_complete_state(options, cwd, subdir, skip_update):
"""Loads a CompleteState.
This includes data from .isolate and .isolated.state files. Never reads the
.isolated file.
Arguments:
options: Options instance generated with process_isolate_options. For either
... | 4f33fefd254519e2a7c3e8c98859e70b553be015 | 9,274 |
def GDAL_QUERY(filename, sql, data={}):
"""
GDAL_QUERY
"""
res = []
sql = sformat(sql, data)
ds = ogr.OpenShared(filename)
if ds:
try:
layer = ds.ExecuteSQL(sql)
definition = layer.GetLayerDefn()
n = definition.GetFieldCount()
for featu... | 33e455ef64bf0d168f9d2c03c9ba630a2d9729c3 | 9,275 |
def get_event_bpt_hea():
"""
Get hardware address for BREAKPOINT event
@return: hardware address
"""
ev = ida_dbg.get_debug_event()
assert ev, "Could not retrieve debug event"
return ida_idd.get_event_bpt_hea(ev) | ecbb928b48788055328f45ed796a76f8ee7e7fa8 | 9,276 |
import torch
def solve_maxent_ce(payoffs, steps=1000000, lams=None, lr=None):
"""Calculates the maximum-entropy correlated equilibrium as defined in
Ortiz et al. (2007).
payoffs (torch.Tensor):
Joint payoff tensor.
steps (int, optional):
Number of SGD steps to use in calculations (def... | 0004b6bbdd5347987c069a68d5baf9a707c85b0c | 9,277 |
def d_latlon(p1, p2):
"""
计算两点间的距离。原文件使用了较复杂的算法,代价较高
这里使用较为相对简单的算法代替,精度不会损失很多
"""
lon_diff, lat_diff = p1 - p2
lon_diff *= cos((p1[1] + p2[1]) * 0.00872664625997165)
return sqrt(lat_diff * lat_diff + lon_diff * lon_diff) * earth_radians | 53fb2c89f5df196f3ae0fd5ccc67b082b246a580 | 9,278 |
def _path_list_creator(path, file_prefix_name, number_of_digits_zfill, file_suffix_name):
"""Creates a list of paths where the files have a predefined prefix,
an incremental number and a predefined suffix on their name,
respectively. Eg.: img01.zdf
Args:
path: a path that leads to the f... | 4850edbbf544284b0736ee52188bd53119c50fdf | 9,279 |
import sys
import os
from warnings import warn
def determine_paths(env):
"""
Fill the 'CUDA_TOOLKIT_PATH' into environment if it is not there.
@return: the paths.
@rtype: tuple
"""
home = os.environ.get('HOME', '')
programfiles = os.environ.get('PROGRAMFILES', '')
homedrive = os.envir... | 6ff582d08ce449a57f79da2eba390459b9e2b799 | 9,280 |
def crosscorrelation(array1, array2, std1, std2, **kwargs):
""" Compute crosscorrelation. """
_ = std1, std2, kwargs
xp = cp.get_array_module(array1) if CUPY_AVAILABLE else np
window = array1.shape[-1]
pad_width = [(0, 0)] * (array2.ndim - 1) + [(window//2, window - window//2)]
padded = xp.pad(a... | b24e3577d2a8d28444a4eefd1ef1d80924f08aaf | 9,281 |
def score_max_depths(graph, max_depths):
"""
In order to assess the quality of the approximate partitioning method
we've developed, we will run it with different values for max_depth
and see how it affects the norm_cut score of the resulting partitions.
Recall that smaller norm_cut scores correspond... | 35abf18b9bb3299262dd25923c3dc7cc832a90fc | 9,282 |
def build_multi(mapping, inserts, key_residues, pdbfnames, chains):
"""Superimpose multiple structures onto a reference, showing equivalent
selected residues in each.
To reduce clutter, only show residues deviating from the reference side
chain by at least `threshold` Angstroms RMS.
"""
# TODO ... | 7ab41587b08d30960ca28e651c71e7e03d323df9 | 9,283 |
import re
def tamper(payload, **kwargs):
"""
Replaces instances of UNION with -.1UNION
Requirement:
* MySQL
Notes:
* Reference: https://raw.githubusercontent.com/y0unge/Notes/master/SQL%20Injection%20WAF%20Bypassing%20shortcut.pdf
>>> tamper('1 UNION ALL SELECT')
'1-.1UNION ... | cbf4fc5b81bc7760aafe6cf65fa498945285e5bb | 9,284 |
def svn_wc_transmit_prop_deltas(*args):
"""
svn_wc_transmit_prop_deltas(char path, svn_wc_adm_access_t adm_access, svn_wc_entry_t entry,
svn_delta_editor_t editor, void baton,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_transmit_prop_deltas(*args) | d92cff618027f3bc763491c7122bdf5187b6ba15 | 9,285 |
from typing import Mapping
from typing import Container
from typing import Set
from typing import Sequence
def _make_immutable(obj):
"""Recursively convert a container and objects inside of it into immutable data types."""
if isinstance(obj, (text_type, binary_type)):
return obj
elif isinstance(o... | 1f7b51c7b0c5d16dfd9fb0eb10e1ca9410287f85 | 9,286 |
def get_source_tokens_tensor(src_tokens):
"""
To enable integration with PyText, src_tokens should be able to support
more features than just token embeddings. Hence when dictionary features are
passed from PyText it will be passed as a tuple
(token_embeddings, dict_feat, ..). Thus, in this case whe... | cf20ceeba82c595dc62b267794ca758360e0386b | 9,287 |
def merge_config_and_args(config, args):
"""
Creates a configuration dictionary based upon command line arguments.
Parameters
----------
config : dict
configurations loaded from the config file
args : object
arguments and there values which could be \
passed in the c... | 3935cfc525fb99b9513a608ef0e5e8fd7de708f3 | 9,288 |
def contemp2pottemp(salt, tcon, tpot0=None, **rootkwargs):
"""Calculate conservative temp -> potential temp.
Calculate the potential temperature from the absolute salinity and
conservative temperature. Applies either Newton's method or Halley's
method. See `aux.rootfinder` for details on implementation... | fd627f1561e21daaa18f9d84c0fc12d5ab87e7e5 | 9,289 |
import random
import string
def get_random_string(length: int) -> str:
"""
With combination of lower and upper case
"""
return ''.join(random.choice(string.ascii_letters) for i in range(length)) | b9d0c760e92603a4fe1f625615b96a1c2265f22a | 9,290 |
import errno
def _write_callback(connection_id, data_buffer, data_length_pointer):
"""
Callback called by Secure Transport to actually write to the socket
:param connection_id:
An integer identifing the connection
:param data_buffer:
A char pointer FFI type containing the data to wri... | 4daa1130c18b28abe92b5a550d1aac734f74d3dc | 9,291 |
import functools
import pickle
def cache(**kwargs):
"""
Cache decorator.
Should be called with `@cache(ttl_sec=123, transform=transform_response)`
Arguments:
ttl_sec: optional,number The time in seconds to cache the response if
status code < 400
transform: optional,func Th... | 10be4de3f0c6125fb502e2b3598bce18eff52375 | 9,292 |
def RegisterTensorTransformer(name):
"""Registers a dataset."""
def decorator(obj):
TENSOR_TRANSFORMER_REGISTRY[name] = obj
obj.name = name
return obj
return decorator | e033e09ff5172175328c02638a07e9b0ae112615 | 9,293 |
import os
import configparser
def read_token():
"""Reads and returns the authentication token.
It tries to read the token from a already existing config file first.
If there is no token it will get one from the putio api and store it
in a config file.
Location of the config file::
~/.pu... | 9358ddbc530804e4efaa976881f33bd8848b21e0 | 9,294 |
from pathlib import Path
import copy
import threading
import shutil
def handler_factory(
jinja_template_rendered: BytesIO,
base_dir: Path,
events: list = None,
username: str = "thqm",
password: str = None,
oneshot: bool = False,
allow_custom_events: bool = False,
):
"""Create a HTTPHan... | 119f1ecd6ba6b3172087f85091360f5e5c0c909d | 9,295 |
def add():
"""This is a temporary function to allow users to easily add tracks, mainly for testing."""
form = SQLFORM(db.memo)
if form.process().accepted:
redirect(URL('default', 'index'))
return dict(form=form) | 9ae3f5f707b880667790fa1396e25999188a6c68 | 9,296 |
from inmanta_plugins.terraform.helpers.const import (
TERRAFORM_RESOURCE_STATE_PARAMETER,
)
from typing import Callable
from typing import Optional
from typing import Dict
from typing import List
from pathlib import Path
async def test_update_failed(
project: Project,
server: Server,
client: Clien... | 969fb6136ecf2fd1adc3651aaba6d5b44e795e70 | 9,297 |
def is_valid_mac(mac):
"""
Validate mac address
:param mac:
:return: boolean
"""
res = False
if isinstance(mac, str):
if mac:
res = mac_address.match(mac.lower()) is not None
return res | 2d89a6afbe76d99d6d7cf3e1bfa2e9954c5f2a20 | 9,298 |
def get_model(tokenizer, lstm_units):
"""
Constructs the model,
Embedding vectors => LSTM => 2 output Fully-Connected neurons with softmax activation
"""
# get the GloVe embedding vectors
embedding_matrix = get_embedding_vectors(tokenizer)
model = Sequential()
model.add(Embedding(len(tok... | fd8352081898b4fcffe122a7058d0069caa7ab21 | 9,299 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.