code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def form_valid(self, form):
slotIds = form.cleaned_data['slotIds']
deleteSlot = form.cleaned_data.get('deleteSlot', False)
these_slots = InstructorAvailabilitySlot.objects.filter(id__in=slotIds)
if deleteSlot:
these_slots.delete()
else:
for this_slot in th... | Modify or delete the availability slot as requested and return success message. |
def form_valid(self, form):
form_valid_from_parent = super(HostCreate, self).form_valid(form)
messages.success(self.request, 'Host {} Successfully Created'.format(self.object))
return form_valid_from_parent | First call the parent's form valid then let the user know it worked. |
def percent_of_the_time(p):
def decorator(fn):
def wrapped(*args, **kwargs):
if in_percentage(p):
fn(*args, **kwargs)
return wrapped
return decorator | Function has a X percentage chance of running |
def _jstype(self, stype, sval):
if stype == self.IS_LIST:
return "array"
if stype == self.IS_DICT:
return "object"
if isinstance(sval, Scalar):
return sval.jstype
v = sval._schema
return self._jstype(self._whatis(v), v) | Get JavaScript name for given data type, called by `_build_schema`. |
def error_handler(task):
@wraps(task)
def wrapper(self, *args, **kwargs):
try:
return task(self, *args, **kwargs)
except Exception as e:
self.connected = False
if not self.testing:
exc_type, exc_obj, exc_tb = sys.exc_info()
fnam... | Handle and log RPC errors. |
def _check(self):
if self._returncode:
rarfile.check_returncode(self, '')
if self._remain != 0:
raise rarfile.BadRarFile("Failed the read enough data") | Do not check final CRC. |
def _add_products(self, tile, show_all=False):
products = tile.products
unique_id = tile.unique_id
base_path = tile.output_folder
for prod_path, prod_type in products.items():
if prod_path == 'tilebus_definitions' or prod_path == 'include_directories':
continu... | Add all products from a tile into our product map. |
def escape_keywords(arr):
for i in arr:
i = i if i not in kwlist else i + '_'
i = i if '-' not in i else i.replace('-', '_')
yield i | append _ to all python keywords |
def _check_list_minions(self, expr, greedy, ignore_missing=False):
if isinstance(expr, six.string_types):
expr = [m for m in expr.split(',') if m]
minions = self._pki_minions()
return {'minions': [x for x in expr if x in minions],
'missing': [] if ignore_missing else ... | Return the minions found by looking via a list |
def _remove(self, timer):
assert timer.timer_heap == self
del self.timers[timer]
assert timer in self.heap
self.heap.remove(timer)
heapq.heapify(self.heap) | Remove timer from heap lock and presence are assumed |
def build(site, tagdata):
tagdata.sort()
tagmax = 0
for tagname, tagcount in tagdata:
if tagcount > tagmax:
tagmax = tagcount
steps = getsteps(site.tagcloud_levels, tagmax)
tags = []
for tagname, tagcount in tagdata:
weight = [twt[0] \
for twt in steps if twt[1] >= tagcount and twt[1] > 0][0]+1
tags.a... | Returns the tag cloud for a list of tags. |
def clear(self) -> None:
if self._cache_directory is not None:
assert os.path.exists(self._cache_directory_index), "Attempt to clear a non-existent cache"
self._load()
for e in self._cache.values():
if os.path.exists(e.loc):
os.remove(e.loc... | Clear all cache entries for directory and, if it is a 'pure' directory, remove the directory itself |
def prep_recal(data):
if dd.get_recalibrate(data) in [True, "gatk"]:
logger.info("Prepare BQSR tables with GATK: %s " % str(dd.get_sample_name(data)))
dbsnp_file = tz.get_in(("genome_resources", "variation", "dbsnp"), data)
if not dbsnp_file:
logger.info("Skipping GATK BaseRecali... | Do pre-BQSR recalibration, calculation of recalibration tables. |
def glr_path_static():
return os.path.abspath(os.path.join(os.path.dirname(__file__), '_static')) | Returns path to packaged static files |
def add_listener(self, listener, message_type, data=None, one_shot=False):
lst = self._one_shots if one_shot else self._listeners
if message_type not in lst:
lst[message_type] = []
lst[message_type].append(Listener(listener, data)) | Add a listener that will receice incoming messages. |
def dinfFlowDirection(self,
flow_dir_grid,
slope_grid,
pit_filled_elevation_grid=None):
log("PROCESS: DinfFlowDirection")
if pit_filled_elevation_grid:
self.pit_filled_elevation_grid = pit_filled_elevation_grid
... | Calculates flow direction with Dinf method |
def add_delta_step(self, delta: float):
if self.delta_last_experience_collection:
self.delta_last_experience_collection += delta
else:
self.delta_last_experience_collection = delta | Inform Metrics class about time to step in environment. |
def freqpoly_plot(data):
rel_data = OrderedDict()
for key, val in data.items():
tot = sum(val.values(), 0)
rel_data[key] = {k: v / tot for k, v in val.items()}
fplotconfig = {
'data_labels': [
{'name': 'Absolute', 'ylab': 'Frequency', 'xlab': '... | make freqpoly plot of merged read lengths |
def delete(self, resource, resource_id):
service_def, resource_def, path = self._get_service_information(
resource)
delete_path = "{0}{1}/" . format(path, resource_id)
return self.call(path=delete_path, method="delete") | A base function that performs a default delete DELETE request for a given object |
def interface(self, context):
self.context = context
self.callback = self.context.get("callback") | Implement the interface for the adapter object |
def kernel():
print('================================')
print(' WARNING: upgrading the kernel')
print('================================')
time.sleep(5)
print('-[kernel]----------')
cmd('rpi-update', True)
print(' >> You MUST reboot to load the new kernel <<') | Handle linux kernel update |
def _get_token_create_url(config):
role_name = config.get('role_name', None)
auth_path = '/v1/auth/token/create'
base_url = config['url']
return '/'.join(x.strip('/') for x in (base_url, auth_path, role_name) if x) | Create Vault url for token creation |
def counter_style(self, val, style):
if style == 'decimal-leading-zero':
if val < 10:
valstr = "0{}".format(val)
else:
valstr = str(val)
elif style == 'lower-roman':
valstr = _to_roman(val).lower()
elif style == 'upper-roman':
... | Return counter value in given style. |
def sendall(self, *args, **kwargs):
return self._safe_call(
False,
super(SSLFileobjectMixin, self).sendall,
*args, **kwargs
) | Send whole message to the socket. |
def class_type_changed(self):
if self.source_path.text():
self.reset_avaliable(self.source_path.text()) | Forces a reset if the class type is changed from instruments to scripts or vice versa |
def register(self, password):
if len(password) < 8:
raise ValueError("Password must be at least 8 characters.")
params = {"name": self.nick, "password": password}
resp = self.conn.make_api_call("register", params)
if "error" in resp:
raise RuntimeError(f"{resp['er... | Registers the current user with the given password. |
def atlas_node_add_callback(atlas_state, callback_name, callback):
if callback_name == 'store_zonefile':
atlas_state['zonefile_crawler'].set_store_zonefile_callback(callback)
else:
raise ValueError("Unrecognized callback {}".format(callback_name)) | Add a callback to the initialized atlas state |
def on_reset_button(self, _):
for graph in self.visible_graphs.values():
graph.reset()
for graph in self.graphs.values():
try:
graph.source.reset()
except NotImplementedError:
pass
self.clock_view.set_text(ZERO_TIME)
sel... | Reset graph data and display empty graph |
def index(self, values=None, only_index=None):
assert self.indexable, "Field not indexable"
assert not only_index or self.has_index(only_index), "Invalid index"
if only_index:
only_index = only_index if isclass(only_index) else only_index.__class__
if values is None:
... | Index all values stored in the field, or only given ones if any. |
def visit_binop(self, node):
left = self._precedence_parens(node, node.left)
right = self._precedence_parens(node, node.right, is_left=False)
if node.op == "**":
return "%s%s%s" % (left, node.op, right)
return "%s %s %s" % (left, node.op, right) | return an astroid.BinOp node as string |
def os_details():
bits, linkage = platform.architecture()
results = {
"platform.arch.bits": bits,
"platform.arch.linkage": linkage,
"platform.machine": platform.machine(),
"platform.process": platform.processor(),
"sys.byteorder": sys.byteorder... | Returns a dictionary containing details about the operating system |
def request(self, method, url, params=None, **kwargs):
params_key = tuple(params.items()) if params else ()
if method.upper() == "GET":
if (url, params_key) in self.get_cache:
print("Returning cached response for:", method, url, params)
return self.get_cache[(... | Perform a request, or return a cached response if available. |
def chdir(self, directory_path, make=False):
if os.sep in directory_path:
for directory in directory_path.split(os.sep):
if make and not self.directory_exists(directory):
try:
self.session.mkd(directory)
except ftplib.er... | Change directories and optionally make the directory if it doesn't exist. |
def build_requirements(docs_path, package_name="yacms"):
mezz_string = "yacms=="
project_path = os.path.join(docs_path, "..")
requirements_file = os.path.join(project_path, package_name,
"project_template", "requirements.txt")
with open(requirements_file, "r") as f:
... | Updates the requirements file with yacms's version number. |
def _create_gitlab_prometheus_instance(self, instance, init_config):
allowed_metrics = init_config.get('allowed_metrics')
if allowed_metrics is None:
raise CheckException("At least one metric must be whitelisted in `allowed_metrics`.")
gitlab_instance = deepcopy(instance)
git... | Set up the gitlab instance so it can be used in OpenMetricsBaseCheck |
def display_terminal_carbon(mol):
for i, a in mol.atoms_iter():
if mol.neighbor_count(i) == 1:
a.visible = True | Set visible=True to the terminal carbon atoms. |
def getApplicationLaunchArguments(self, unHandle, pchArgs, unArgs):
fn = self.function_table.getApplicationLaunchArguments
result = fn(unHandle, pchArgs, unArgs)
return result | Get the args list from an app launch that had the process already running, you call this when you get a VREvent_ApplicationMimeTypeLoad |
def _get_title(self,directory,filename):
fullfile=os.path.join(directory,filename+'.title')
try:
logger.debug('trying to open [%s]'%(fullfile))
_title=(open(fullfile).readline().strip())
logger.debug("_updatemeta: %s - title is '%s'",filename,_title)
except:
... | Loads image title if any |
def _scale_back_response(bqm, response, scalar, ignored_interactions,
ignored_variables, ignore_offset):
if len(ignored_interactions) + len(
ignored_variables) + ignore_offset == 0:
response.record.energy = np.divide(response.record.energy, scalar)
else:
resp... | Helper function to scale back the response of sample method |
def init_not_msvc(self):
paths = os.environ.get('LD_LIBRARY_PATH', '').split(':')
for gomp in ('libgomp.so', 'libgomp.dylib'):
if cxx is None:
continue
cmd = [cxx, '-print-file-name=' + gomp]
try:
path = os.path.dirname(check_output(cmd... | Find OpenMP library and try to load if using ctype interface. |
def removePeer(self, url):
q = models.Peer.delete().where(
models.Peer.url == url)
q.execute() | Remove peers by URL. |
def extract(self, pbf, output):
logging.info("Extracting POI nodes from {0} to {1}".format(pbf, output))
with open(output, 'w') as f:
def nodes_callback(nodes):
for node in nodes:
node_id, tags, coordinates = node
if any([t in tags for ... | extract POI nodes from osm pbf extract |
def _handle_template_param(self):
if self._context & contexts.TEMPLATE_NAME:
if not self._context & (contexts.HAS_TEXT | contexts.HAS_TEMPLATE):
self._fail_route()
self._context ^= contexts.TEMPLATE_NAME
elif self._context & contexts.TEMPLATE_PARAM_VALUE:
... | Handle a template parameter at the head of the string. |
def run(self):
try:
for index, port in enumerate(self.possible_ports):
try:
self.httpd = self._create_server(port)
except socket.error as e:
if (index + 1 < len(self.possible_ports) and
e.error == err... | Sets up live server, and then loops over handling http requests. |
def module_del(self, key):
if key in self._module_event_map:
del self._module_event_map[key]
if key in self._watch_modules.names:
self._watch_modules.remove(key) | Deregister from python module change events. |
def nacl_bindings_pick_scrypt_params(opslimit, memlimit):
if opslimit < 32768:
opslimit = 32768
r = 8
if opslimit < (memlimit // 32):
p = 1
maxn = opslimit // (4 * r)
for n_log2 in range(1, 63):
if (2 ** n_log2) > (maxn // 2):
break
else:
... | Python implementation of libsodium's pickparams |
def record(self, tags, measurement_map, timestamp, attachments=None):
assert all(vv >= 0 for vv in measurement_map.values())
for measure, value in measurement_map.items():
if measure != self._registered_measures.get(measure.name):
return
view_datas = []
... | records stats with a set of tags |
def as_bel(self) -> str:
return '{}(fus({}:{}, "{}", {}:{}, "{}"))'.format(
self._func,
self.partner_5p.namespace,
self.partner_5p._priority_id,
self.range_5p.as_bel(),
self.partner_3p.namespace,
self.partner_3p._priority_id,
se... | Return this fusion as a BEL string. |
def register(self, prefix, viewset, base_name=None, router_class=None):
if base_name is None:
base_name = self.get_default_base_name(viewset)
if router_class is not None:
kwargs = {'trailing_slash': bool(self.trailing_slash)}
single_object_router_classes = (
... | Append the given viewset to the proper registry. |
def change_response(x, prob, index):
N = (x==index).sum()
x[x==index] = dist.sample(N) | change every response in x that matches 'index' by randomly sampling from prob |
def write_slide_list(self, logname, slides):
with open('%s/%s' % (self.cache, logname), 'w') as logfile:
for slide in slides:
heading = slide['heading']['text']
filename = self.get_image_name(heading)
print('%s,%d' % (filename, slide.get('time', 0)),
... | Write list of slides to logfile |
def update(self):
from ambry.orm.exc import NotFoundError
from requests.exceptions import ConnectionError, HTTPError
from boto.exception import S3ResponseError
d = {}
try:
for k, v in self.list(full=True):
if not v:
continue
... | Cache the list into the data section of the record |
def merge_sims(oldsims, newsims, clip=None):
if oldsims is None:
result = newsims or []
elif newsims is None:
result = oldsims
else:
result = sorted(oldsims + newsims, key=lambda item: -item[1])
if clip is not None:
result = result[:clip]
return result | Merge two precomputed similarity lists, truncating the result to `clip` most similar items. |
def _check_file_field(self, field):
is_field = field in self.field_names
is_file = self.__meta_metadata(field, 'field_type') == 'file'
if not (is_field and is_file):
msg = "'%s' is not a field or not a 'file' field" % field
raise ValueError(msg)
else:
... | Check that field exists and is a file field |
def virtual_interface_list(self, name):
nt_ks = self.compute_conn
nets = nt_ks.virtual_interfaces.list(self._server_uuid_from_name(name))
return [network.__dict__ for network in nets] | Get virtual interfaces on slice |
def on_train_begin(self, **kwargs: Any) -> None:
"Prepare file with metric names."
self.path.parent.mkdir(parents=True, exist_ok=True)
self.file = self.path.open('a') if self.append else self.path.open('w')
self.file.write(','.join(self.learn.recorder.names[:(None if self.add_time ... | Prepare file with metric names. |
def remove_cv(type_):
nake_type = remove_alias(type_)
if not is_const(nake_type) and not is_volatile(nake_type):
return type_
result = nake_type
if is_const(result):
result = remove_const(result)
if is_volatile(result):
result = remove_volatile(result)
if is_const(result)... | removes const and volatile from the type definition |
def item_enclosure_url(self, item):
try:
url = item.image.url
except (AttributeError, ValueError):
img = BeautifulSoup(item.html_content, 'html.parser').find('img')
url = img.get('src') if img else None
self.cached_enclosure_url = url
if url:
... | Return an image for enclosure. |
def flush (self, overlap=0):
self.buf += self.empty.join(self.tmpbuf)
self.tmpbuf = []
if overlap and overlap < self.pos:
data = self.buf[:-overlap]
self.buf = self.buf[-overlap:]
else:
data = self.buf
self.buf = self.empty
return d... | Flush buffered data and return it. |
def identify_names(filename):
node, _ = parse_source_file(filename)
if node is None:
return {}
finder = NameFinder()
finder.visit(node)
names = list(finder.get_mapping())
names += extract_object_names_from_docs(filename)
example_code_obj = collections.OrderedDict()
for name, full... | Builds a codeobj summary by identifying and resolving used names. |
def _mean_square_error(y, y_pred, w):
return np.average(((y_pred - y) ** 2), weights=w) | Calculate the mean square error. |
def subtract(self, route):
for address in self.raw_maps.pop(route, NullHardwareMap()).iterkeys():
self.pop(address, NullHardwareNode()) | Remove the route entirely. |
def make_replacement_visitor(find_expression, replace_expression):
def visitor_fn(expression):
if expression == find_expression:
return replace_expression
else:
return expression
return visitor_fn | Return a visitor function that replaces every instance of one expression with another one. |
def remake_display(self, *args):
Builder.load_string(self.kv)
if hasattr(self, '_kv_layout'):
self.remove_widget(self._kv_layout)
del self._kv_layout
self._kv_layout = KvLayout()
self.add_widget(self._kv_layout) | Remake any affected widgets after a change in my ``kv``. |
async def join(self, ctx):
perms = discord.Permissions.none()
perms.read_messages = True
perms.send_messages = True
perms.manage_messages = True
perms.embed_links = True
perms.read_message_history = True
perms.attach_files = True
perms.add_reactions = True... | Sends you the bot invite link. |
def _parse_myinfo(client, command, actor, args):
_, server, version, usermodes, channelmodes = args.split(None, 5)[:5]
s = client.server
s.host = server
s.version = version
s.user_modes = set(usermodes)
s.channel_modes = set(channelmodes) | Parse MYINFO and update the Host object. |
def _render_context(self, template, block, **context):
return u''.join(block(template.new_context(context))) | Render a block to a string with its context |
def _ha_gen_method(func):
def wrapped(self, *args, **kw):
self._reset_retries()
while(True):
try:
results = func(self, *args, **kw)
while(True):
yield results.next()
except RequestError as e:
... | Method decorator for 'generator type' methods |
def parse_limits(self):
if 'limits' in self._model:
if not isinstance(self._model['limits'], list):
raise ParseError('Expected limits to be a list')
for limit in parse_limits_list(
self._context, self._model['limits']):
yield limit | Yield tuples of reaction ID, lower, and upper bound flux limits |
def all_server_links(server_id, rel_to=None):
return [
server_link(rel, server_id, self_rel=(rel == rel_to))
for rel in ('delete-server', 'get-server-info', 'server-command')
] | Get a list of all links to be included with Servers. |
def execute_sql(self, sql):
cursor = self.get_cursor()
cursor.execute(sql)
return cursor | Executes SQL and returns cursor for it |
def _cnn_filter(in_file, vrn_files, data):
tensor_type = "read_tensor"
score_file = _cnn_score_variants(in_file, tensor_type, data)
return _cnn_tranch_filtering(score_file, vrn_files, tensor_type, data) | Perform CNN filtering on input VCF using pre-trained models. |
def keys(self):
" Returns the keys of all the elements."
if self.ndims == 1:
return [k[0] for k in self.data.keys()]
else:
return list(self.data.keys()) | Returns the keys of all the elements. |
def in6_getifaddr():
ifaddrs = []
ip6s = get_ips(v6=True)
for iface in ip6s:
ips = ip6s[iface]
for ip in ips:
scope = in6_getscope(ip)
ifaddrs.append((ip, scope, iface))
if conf.use_npcap and scapy.consts.LOOPBACK_INTERFACE:
ifaddrs.append(("::1", 0, scapy... | Returns all IPv6 addresses found on the computer |
def GetClientVersion(client_id, token=None):
if data_store.RelationalDBEnabled():
sinfo = data_store.REL_DB.ReadClientStartupInfo(client_id=client_id)
if sinfo is not None:
return sinfo.client_info.client_version
else:
return config.CONFIG["Source.version_numeric"]
else:
with aff4.FACTOR... | Returns last known GRR version that the client used. |
def to_ufo_blue_values(self, ufo, master):
alignment_zones = master.alignmentZones
blue_values = []
other_blues = []
for zone in sorted(alignment_zones):
pos = zone.position
size = zone.size
val_list = blue_values if pos == 0 or size >= 0 else other_blues
val_list.extend(... | Set postscript blue values from Glyphs alignment zones. |
def end_policy_update(self):
if self.time_policy_update_start:
self.delta_policy_update = time() - self.time_policy_update_start
else:
self.delta_policy_update = 0
delta_train_start = time() - self.time_training_start
LOGGER.debug(" Policy Update Training Metrics ... | Inform Metrics class that policy update has started. |
def allowed(self, method, _dict, allow):
for key in _dict.keys():
if key not in allow:
raise LunrError("'%s' is not an argument for method '%s'"
% (key, method)) | Only these items are allowed in the dictionary |
def readerWalker(self):
ret = libxml2mod.xmlReaderWalker(self._o)
if ret is None:raise treeError('xmlReaderWalker() failed')
__tmp = xmlTextReader(_obj=ret)
return __tmp | Create an xmltextReader for a preparsed document. |
def _get_space(self):
title = '%s._space_id' % self.__class__.__name__
list_kwargs = {
'q': "'%s' in parents" % self.drive_space,
'spaces': self.drive_space,
'fields': 'files(name, parents)',
'pageSize': 1
}
try:
response = self... | a helper method to retrieve id of drive space |
def crop_to_extents(img1, img2, padding):
beg_coords1, end_coords1 = crop_coords(img1, padding)
beg_coords2, end_coords2 = crop_coords(img2, padding)
beg_coords = np.fmin(beg_coords1, beg_coords2)
end_coords = np.fmax(end_coords1, end_coords2)
img1 = crop_3dimage(img1, beg_coords, end_coords)
im... | Crop the images to ensure both fit within the bounding box |
def _bsecurate_cli_print_component_file(args):
data = fileio.read_json_basis(args.file)
return printing.component_basis_str(data, elements=args.elements) | Handles the print-component-file subcommand |
def decompress(self, data):
decompressed = ""
pos = 0
while pos < len(data):
currentChar = data[pos]
if currentChar != self.referencePrefix:
decompressed += currentChar
pos += 1
else:
nextChar = data[pos + 1]
if nextChar != self.referencePrefix:
distance = self.__decodeReferenceInt(d... | Decompresses LZ77 compressed text data |
def token_list_to_text(tokenlist):
ZeroWidthEscape = Token.ZeroWidthEscape
return ''.join(item[1] for item in tokenlist if item[0] != ZeroWidthEscape) | Concatenate all the text parts again. |
def which(program):
" Check program is exists. "
head, _ = op.split(program)
if head:
if is_exe(program):
return program
else:
for path in environ["PATH"].split(pathsep):
exe_file = op.join(path, program)
if is_exe(exe_file):
return exe... | Check program is exists. |
def add_style(self, name, fg=None, bg=None, options=None):
style = Style(name)
if fg is not None:
style.fg(fg)
if bg is not None:
style.bg(bg)
if options is not None:
if "bold" in options:
style.bold()
if "underline" in opti... | Adds a new style |
def clone_from(self, other):
for other_finfo in other.data:
self.clone_editor_from(other_finfo, set_current=True)
self.set_stack_index(other.get_stack_index()) | Clone EditorStack from other instance |
def items(self):
if type(self.transaction['items']['item']) == list:
return self.transaction['items']['item']
else:
return [self.transaction['items']['item'],] | Lista dos items do pagamento |
def _get_field_error_dict(self, field):
return {
'name': field.html_name,
'id': 'id_{}'.format(field.html_name),
'errors': field.errors,
} | Returns the dict containing the field errors information |
def remove(self, row_or_row_indices):
if not row_or_row_indices:
return
if isinstance(row_or_row_indices, int):
rows_remove = [row_or_row_indices]
else:
rows_remove = row_or_row_indices
for col in self._columns:
self._columns[col] = [elem f... | Removes a row or multiple rows of a table in place. |
def getTimeOfClassesRemaining(self,numClasses=0):
occurrences = EventOccurrence.objects.filter(
cancelled=False,
event__in=[x.event for x in self.temporaryeventregistration_set.filter(event__series__isnull=False)],
).order_by('-endTime')
if occurrences.count() > numClasse... | For checking things like prerequisites, it's useful to check if a requirement is 'almost' met |
def assertpathsandfiles(self):
assert os.path.isdir(self.miseqpath), u'MiSeqPath is not a valid directory {0!r:s}'.format(self.miseqpath)
if not self.miseqfolder:
miseqfolders = glob('{}*/'.format(self.miseqpath))
self.miseqfolder = sorted(miseqfolders)[-1]
self.miseq... | Assertions to make sure that arguments are at least mostly valid |
def callback_checkbox(attr, old, new):
import numpy
for i in range(len(lines)):
lines[i].visible = i in param_checkbox.active
scats[i].visible = i in param_checkbox.active
return None | Update visible data from parameters selectin in the CheckboxSelect |
def value(self):
value = getattr(self.instrument, self.probe_name)
self.buffer.append(value)
return value | reads the value from the instrument |
def local_regon_checksum(digits):
weights_for_check_digit = [2, 4, 8, 5, 0, 9, 7, 3, 6, 1, 2, 4, 8]
check_digit = 0
for i in range(0, 13):
check_digit += weights_for_check_digit[i] * digits[i]
check_digit %= 11
if check_digit == 10:
check_digit = 0
return check_digit | Calculates and returns a control digit for given list of digits basing on local REGON standard. |
def protoFast():
r, x = blind(m)
y,kw,tTilde = eval(w,t,x,msk,s)
z = deblind(r, y) | Runs the protocol but omits proof generation and verification. |
def _rule2path(cls, rule):
typeless = re.sub(r'<\w+?:', '<', rule)
return typeless.replace('<', '{').replace('>', '}') | Convert relative Flask rule to absolute OpenAPI path. |
def umi_below_threshold(umi_quals, quality_encoding, quality_filter_threshold):
below_threshold = get_below_threshold(
umi_quals, quality_encoding, quality_filter_threshold)
return any(below_threshold) | return true if any of the umi quals is below the threshold |
def __create_safari_driver(self):
if not os.getenv(self.__SELENIUM_SERVER_JAR_ENV):
try:
selenium_server_path = self._config_reader.get(
self.SELENIUM_SERVER_LOCATION)
self._env_vars[
self.__SELENIUM_SERVER_JAR_ENV] = selenium_s... | Creates an instance of Safari webdriver. |
def read_aldb(self, mem_addr=0x0000, num_recs=0):
if self._aldb.version == ALDBVersion.Null:
_LOGGER.info('Device %s does not contain an All-Link Database',
self._address.human)
else:
_LOGGER.info('Reading All-Link Database for device %s',
... | Read the device All-Link Database. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.