code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def next_video(self, _):
self.cnt_video += 1
lg.info('Update video to ' + str(self.cnt_video)) | Also runs when file is loaded, so index starts at 2. |
def value(self, raw_value):
try:
return base64.b64decode(bytes(raw_value, 'utf-8')).decode('utf-8')
except binascii.Error as err:
raise ValueError(str(err)) | Decode param with Base64. |
def remove_variants(self, variants):
chroms = set([i.chrom for i in variants])
for chrom in chroms:
if self.append_chromosome:
chrom = 'chr%s' % chrom
to_delete = [pos for pos in self.positions[chrom] if pos in variants]
for pos in to_delete:
... | Remove a list of variants from the positions we are scanning |
def main():
log.setup_main_logger(console=True, file_logging=False)
log.log_sockeye_version(logger)
params = argparse.ArgumentParser(description="Rerank nbest lists of translations."
" Reranking sorts a list of hypotheses according"
... | Commandline interface to rerank nbest lists. |
def open(cls, filename):
file_info = cls.parse_remote(filename)
blob_service = cls.connect(filename)
return BlobHandle(blob_service=blob_service,
container=file_info.container,
blob=file_info.blob,
chunk_size=cls._BLOB... | Provide a handle-like object for streaming. |
def build(content_directory=None, out_directory=None):
content_directory = content_directory or '.'
out_directory = os.path.abspath(out_directory or default_out_directory)
repo = require_repo(content_directory)
if out_directory == '.':
raise ValueError('Output directory must be different than th... | Builds the site from its content and presentation repository. |
def extend(self, *keys, **kwargs):
this = copy.deepcopy(self)
value_type = kwargs.get('value_type', EnumValue)
if not keys:
raise EnumEmptyError()
keys = tuple(keys)
values = [None] * len(keys)
for i, key in enumerate(keys):
value = value_type(this... | Return a new enumeration object extended with the specified items. |
def was_run_code(self, get_all=True):
if self.stored is None:
return ""
else:
if get_all:
self.stored = ["\n".join(self.stored)]
return self.stored[-1] | Get all the code that was run. |
def _newConsole(cls, console):
self = cls.__new__(cls)
_BaseConsole.__init__(self)
self.console_c = console
self.console = self
self.width = _lib.TCOD_console_get_width(console)
self.height = _lib.TCOD_console_get_height(console)
return self | Make a Console instance, from a console ctype |
def showpath(path):
if logger.verbose:
return os.path.abspath(path)
else:
path = os.path.relpath(path)
if path.startswith(os.curdir + os.sep):
path = path[len(os.curdir + os.sep):]
return path | Format a path for displaying. |
def save(self, path: Union[str, bytes, int]) -> None:
with open(path, 'w') as f:
def write(s: str) -> None:
f.write(s)
self._write_qasm(write) | Write QASM output to a file specified by path. |
def _did_retrieve(self, connection):
response = connection.response
try:
self.from_dict(response.data[0])
except:
pass
return self._did_perform_standard_operation(connection) | Callback called after fetching the object |
def adjacent(geohash, direction):
assert direction in 'nsew', "Invalid direction: %s"%direction
assert geohash, "Invalid geohash: %s"%geohash
neighbor = {
'n': [ 'p0r21436x8zb9dcf5h7kjnmqesgutwvy', 'bc01fg45238967deuvhjyznpkmstqrwx' ],
's': [ '14365h7k9dcfesgujnmqp0r2twvyx8zb', '238967debc01fg45kmstqrwxuv... | Return the adjacent geohash for a given direction. |
def readline(self, size=None):
(line, nl) = self.__buffer.read_until_nl(self.__retrieve_data)
if self.__sf.access_type_has_universal_nl and nl is not None:
self.__newlines[nl] = True
return line | Read a single line of text with EOF. |
def fetch(self, remote='origin'):
git(self.gitdir, "fetch", remote, _env=self.env()) | fetch from a remote |
def leader_for_partition(self, partition):
if partition.topic not in self._partitions:
return None
elif partition.partition not in self._partitions[partition.topic]:
return None
return self._partitions[partition.topic][partition.partition].leader | Return node_id of leader, -1 unavailable, None if unknown. |
def load_path(self, path):
containing_module, _, last_item = path.rpartition('.')
if last_item[0].isupper():
path = containing_module
imported_obj = importlib.import_module(path)
if last_item[0].isupper():
try:
imported_obj = getattr(imported_obj, ... | Load and return a given import path to a module or class |
def _get_var_name(self, register_name, mode):
var_name = {
"pre": self._translator.get_name_init(register_name),
"post": self._translator.get_name_curr(register_name),
}
return var_name[mode] | Get variable name for a register considering pre and post mode. |
def _add_internal_event(self, name, send_event=False, internal_event_factory=None):
if not internal_event_factory:
internal_event_factory = self.internal_event_factory
return self.add_event(names, send_event=send_event, event_factory=internal_event_factory) | This is only here to ensure my constant hatred for Python 2's horrid variable argument support. |
def val(self, strictkey):
ruamelkey = self.ruamelindex(strictkey)
return self._select(self._pointer.val(ruamelkey, strictkey)) | Return a chunk referencing a value in a mapping with the key 'key'. |
async def send(self, sender, **kwargs):
if not self.receivers:
return []
responses = []
futures = []
for receiver in self._get_receivers(sender):
method = receiver()
if callable(method):
futures.append(method(sender=sender, **kwargs))
... | send a signal from the sender to all connected receivers |
def shell_exec(command):
out = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0]
return out.decode('utf-8') | Executes the given shell command and returns its output. |
def process_message(self, message, *args, **kwargs):
if not message.level in PERSISTENT_MESSAGE_LEVELS:
return message
user = kwargs.get("user") or self.get_user()
try:
anonymous = user.is_anonymous()
except TypeError:
anonymous = user.is_anonymous
... | If its level is into persist levels, convert the message to models and save it |
def shell(
state,
fancy=False,
shell_args=None,
anyway=False,
):
from ..core import load_dot_env, do_shell
if "PIPENV_ACTIVE" in os.environ:
venv_name = os.environ.get("VIRTUAL_ENV", "UNKNOWN_VIRTUAL_ENVIRONMENT")
if not anyway:
echo(
"{0} {1} {2}\nNo ... | Spawns a shell within the virtualenv. |
def requests_for_variant(self, request, variant_id=None):
requests = ProductRequest.objects.filter(variant__id=variant_id)
serializer = self.serializer_class(requests, many=True)
return Response(data=serializer.data, status=status.HTTP_200_OK) | Get all the requests for a single variant |
def fw_update(self, data, fw_name=None):
LOG.debug("FW Update %s", data)
self._fw_update(fw_name, data) | Top level FW update function. |
def delete(self, object_type, object_id):
tag_names = request.get_json(force=True)
if not tag_names:
return Response(status=403)
db.session.query(TaggedObject).filter(and_(
TaggedObject.object_type == object_type,
TaggedObject.object_id == object_id),
... | Remove tags from an object. |
def encode_location(location: BioCLocation):
return etree.Element('location',
{'offset': str(location.offset), 'length': str(location.length)}) | Encode a single location. |
def remove_dns_challenge_txt(self, zone_id, domain, txt_challenge):
print("Deleting DNS challenge..")
resp = self.route53.change_resource_record_sets(
HostedZoneId=zone_id,
ChangeBatch=self.get_dns_challenge_change_batch('DELETE', domain, txt_challenge)
)
return r... | Remove DNS challenge TXT. |
def pil2tensor(image:Union[NPImage,NPArray],dtype:np.dtype)->TensorImage:
"Convert PIL style `image` array to torch style image tensor."
a = np.asarray(image)
if a.ndim==2 : a = np.expand_dims(a,2)
a = np.transpose(a, (1, 0, 2))
a = np.transpose(a, (2, 1, 0))
return torch.from_numpy(a.astype(dty... | Convert PIL style `image` array to torch style image tensor. |
def richardson(vals, k, c=None):
if c is None:
c = richardson_parameter(vals, k)
return vals[k] - (vals[k] - vals[k - 1]) / c | Richardson extrapolation with parameter estimation |
def _initialize_lists(model_class, name, bases, attrs):
model_class._lists = {}
for k, v in attrs.iteritems():
if isinstance(v, ListField):
model_class._lists[k] = v
v.name = v.name or k | Stores the list fields descriptors of a model. |
def do_dir(self, args, unknown):
if unknown:
self.perror("dir does not take any positional arguments:", traceback_war=False)
self.do_help('dir')
self._last_result = cmd2.CommandResult('', 'Bad arguments')
return
contents = os.listdir(self.cwd)
fmt ... | List contents of current directory. |
def run_mash(self):
self.pipeline = True
mash.Mash(inputobject=self,
analysistype='mash') | Run MASH to determine the closest refseq genomes |
def skeletonize_labels(labels):
colors = color_labels(labels)
max_color = np.max(colors)
if max_color == 0:
return labels
result = np.zeros(labels.shape, labels.dtype)
for i in range(1,max_color+1):
mask = skeletonize(colors==i)
result[mask] = labels[mask]
return result | Skeletonize a labels matrix |
def index():
stats = dict((k, {"count": 0}) for k, tt in conf.InputTables)
countminmax = "SUM(count) AS count, MIN(day) AS first, MAX(day) AS last"
for input, table in [(x, t) for x, tt in conf.InputTables for t in tt]:
row = db.fetchone("counts", countminmax, type=table)
if not row["co... | Handler for showing the GUI index page. |
def vtt_talk_sources(ttFont) -> List[str]:
VTT_SOURCE_TABLES = {'TSI0', 'TSI1', 'TSI2', 'TSI3', 'TSI5'}
tables_found = [tag for tag in ttFont.keys() if tag in VTT_SOURCE_TABLES]
return tables_found | Return the tags of VTT source tables found in a font. |
def remove_step_method(self, step_method):
try:
for s in step_method.stochastics:
self.step_method_dict[s].remove(step_method)
if hasattr(self, "step_methods"):
self.step_methods.discard(step_method)
self._sm_assigned = False
except Att... | Removes a step method. |
def sample_trajectories(self, rollout_length, batch_info) -> Trajectories:
indexes = self.backend.sample_batch_trajectories(rollout_length)
transition_tensors = self.backend.get_trajectories(indexes, rollout_length)
return Trajectories(
num_steps=rollout_length,
num_envs=... | Sample batch of trajectories and return them |
def _create_drawables(self, tokensource):
lineno = charno = maxcharno = 0
for ttype, value in tokensource:
while ttype not in self.styles:
ttype = ttype.parent
style = self.styles[ttype]
value = value.expandtabs(4)
lines = value.splitlines(... | Create drawables for the token content. |
def generate_unique_name(self, basename):
counts = self.__counts
try:
count = counts[basename]
counts[basename] += 1
except KeyError:
count = 0
counts[basename] = 1
prefix = self.Naming_prefix
if count == 0:
name = prefi... | Generates a unique name for a child given a base name. |
def _series_mdgel(self):
self.pages.useframes = False
self.pages.keyframe = 0
md = self.mdgel_metadata
if md['FileTag'] in (2, 128):
dtype = numpy.dtype('float32')
scale = md['ScalePixel']
scale = scale[0] / scale[1]
if md['FileTag'] == 2:
... | Return image series in MD Gel file. |
def read_utf(self, offset, len):
try:
result = self.data[offset:offset + len].decode('utf-8')
except UnicodeDecodeError:
result = str('')
return result | Reads a UTF-8 string of a given length from the packet |
def check_available(self):
success = True
start_time = datetime.datetime.utcnow()
message = ''
LOGGER.debug('Checking layer id %s' % self.id)
signals.post_save.disconnect(layer_post_save, sender=Layer)
try:
self.update_thumbnail()
except ValueError, er... | Check for availability of a layer and provide run metrics. |
def _name_with_flags(self, include_restricted, title=None):
name = "Special: " if self.special else ""
name += self.name
if title:
name += " - {}".format(title)
if include_restricted and self.restricted:
name += " (R)"
name += " (BB)" if self.both_blocks e... | Generate the name with flags. |
def addFailure(self, test, err, capt=None, tbinfo=None):
self.__insert_test_result(constants.State.FAILURE, test, err) | After a test failure, we want to record testcase run information. |
def stop_NoteContainer(self, nc, channel=1):
self.notify_listeners(self.MSG_PLAY_NC, {'notes': nc,
'channel': channel})
if nc is None:
return True
for note in nc:
if not self.stop_Note(note, channel):
return False
return True | Stop playing the notes in NoteContainer nc. |
def _repr_html_():
from bonobo.commands.version import get_versions
return (
'<div style="padding: 8px;">'
' <div style="float: left; width: 20px; height: 20px;">{}</div>'
' <pre style="white-space: nowrap; padding-left: 8px">{}</pre>'
"</div>"
).format(__logo__, "<br/>".jo... | This allows to easily display a version snippet in Jupyter. |
def use_plenary_book_view(self):
self._book_view = PLENARY
for session in self._get_provider_sessions():
try:
session.use_plenary_book_view()
except AttributeError:
pass | Pass through to provider CommentBookSession.use_plenary_book_view |
def watering_time(self):
index = self.id - 1
auto_watering_time =\
self._attributes['rain_delay_mode'][index]['auto_watering_time']
manual_watering_time =\
self._attributes['rain_delay_mode'][index]['manual_watering_time']
if auto_watering_time > manual_watering_t... | Return watering_time from zone. |
def delete(self, docids):
logger.info("asked to drop %i documents" % len(docids))
for index in [self.opt_index, self.fresh_index]:
if index is not None:
index.delete(docids)
self.flush(save_index=True) | Delete specified documents from the index. |
def returner(ret):
if __salt__['config.option']('returner.kafka.topic'):
topic = __salt__['config.option']('returner.kafka.topic')
conn = _get_conn()
producer = Producer({'bootstrap.servers': conn})
producer.poll(0)
producer.produce(topic, salt.utils.json.dumps(ret), str(ret)... | Return information to a Kafka server |
def distance(latitude_1, longitude_1, latitude_2, longitude_2):
coef = mod_math.cos(latitude_1 / 180. * mod_math.pi)
x = latitude_1 - latitude_2
y = (longitude_1 - longitude_2) * coef
return mod_math.sqrt(x * x + y * y) * ONE_DEGREE | Distance between two points. |
def median_date(dt_list):
idx = len(dt_list)/2
if len(dt_list) % 2 == 0:
md = mean_date([dt_list[idx-1], dt_list[idx]])
else:
md = dt_list[idx]
return md | Calcuate median datetime from datetime list |
def verify_signature(self, addr):
return verify(virtualchain.address_reencode(addr), self.get_plaintext_to_sign(), self.sig) | Given an address, verify whether or not it was signed by it |
def log_percent(self):
done = self.total - self.todo
percent = int(float(done) / self.total * 100)
if not hasattr(self, 'prev_percent'):
self.prev_percent = 0
self.progress('Sent %s of data in %d %s task(s)',
humansize(self.sent.sum()), self.tota... | Log the progress of the computation in percentage |
def run_file(self, filename):
import __main__
__main__.__dict__.clear()
__main__.__dict__.update({
"__name__": "__main__",
"__file__": filename,
"__builtins__": __builtins__,
})
with open(filename, "rb") as fp:
statement = compile(f... | Run the file `filename` with trace |
def drawQuad(self, img=None, quad=None, thickness=30):
if img is None:
img = self.img
if quad is None:
quad = self.quad
q = np.int32(quad)
c = int(img.max())
cv2.line(img, tuple(q[0]), tuple(q[1]), c, thickness)
cv2.line(img, tuple(q[1]), t... | Draw the quad into given img |
def kill(self, sig):
if self.is_alive() and self._loop:
self._loop.call_soon_threadsafe(self._loop.stop) | Invoke the stop on the event loop method. |
def close(self):
if not (yield from super().close()):
return False
adapters = self._ethernet_adapters + self._serial_adapters
for adapter in adapters:
if adapter is not None:
for nio in adapter.ports.values():
if nio and isinstance(nio,... | Closes this IOU VM. |
def json_encode(obj, serialize):
if hasattr(obj, 'to_dict'):
return obj.to_dict(serialize=serialize)
elif isinstance(obj, datetime):
return obj.date().isoformat()
elif isinstance(obj, date):
return obj.isoformat()
elif isinstance(obj, ProxyDict):
return dict(obj)
elif... | Handle encoding complex types. |
def recv(self, mac_addr=broadcast_addr, timeout=0):
if self.keep_listening:
try:
return self.inq[str(mac_addr)].get(timeout=timeout)
except Empty:
return ""
else:
self.log("is down.") | read packet off the recv queue for a given address, optional timeout to block and wait for packet |
def _checker_mixer(slice1,
slice2,
checker_size=None):
checkers = _get_checkers(slice1.shape, checker_size)
if slice1.shape != slice2.shape or slice2.shape != checkers.shape:
raise ValueError('size mismatch between cropped slices and checkers!!!')
mixed = slice1... | Mixes the two slices in alternating areas specified by checkers |
def collapse_fastq(args):
try:
umi_fn = args.fastq
if _is_umi(args.fastq):
umis = collapse(args.fastq)
umi_fn = os.path.join(args.out, splitext_plus(os.path.basename(args.fastq))[0] + "_umi_trimmed.fastq")
write_output(umi_fn, umis, args.minimum)
seqs = co... | collapse fasq files after adapter trimming |
def common_api_auth_options(f):
@click.option(
"-k",
"--api-key",
hide_input=True,
envvar="CLOUDSMITH_API_KEY",
help="The API key for authenticating with the API.",
)
@click.pass_context
@functools.wraps(f)
def wrapper(ctx, *args, **kwargs):
opts = con... | Add common API authentication options to commands. |
def cli(ctx):
dir_path = os.path.join(os.path.expanduser('~'), '.keep', '.credentials')
if os.path.exists(dir_path):
if click.confirm('[CRITICAL] Reset credentials saved in ~/.keep/.credentials ?', abort=True):
os.remove(dir_path)
utils.register() | Register user over server. |
def check_updates(self, startup=False):
from spyder.workers.updates import WorkerUpdates
self.check_updates_action.setDisabled(True)
if self.thread_updates is not None:
self.thread_updates.terminate()
self.thread_updates = QThread(self)
self.worker_updates = Wor... | Check for spyder updates on github releases using a QThread. |
def options(self):
self.engine.configure({})
conf = self.engine.as_dict()
conf["returns"] = [oname for oname in six.iterkeys(self._outputs)]
conf["args"] = [iname for iname in six.iterkeys(self._inputs)]
return jsonify(conf) | Engine options discover HTTP entry point |
def ending_long_process(self, message=""):
QApplication.restoreOverrideCursor()
self.show_message(message, timeout=2000)
QApplication.processEvents() | Clear main window's status bar and restore mouse cursor. |
def int_to_alpha(n, upper=True):
"Generates alphanumeric labels of form A-Z, AA-ZZ etc."
casenum = 65 if upper else 97
label = ''
count= 0
if n == 0: return str(chr(n + casenum))
while n >= 0:
mod, div = n % 26, n
for _ in range(count):
div //= 26
div %= 26
... | Generates alphanumeric labels of form A-Z, AA-ZZ etc. |
def bail(self, msg):
client_name = self.event['client'].get('name', 'error:no-client-name')
check_name = self.event['check'].get('name', 'error:no-check-name')
print('{}: {}/{}'.format(msg, client_name, check_name))
sys.exit(0) | Gracefully terminate with message |
def process_rules(self, path: Path, system: System):
self.context.update({
'system': system,
})
document = FileSystem.load_yaml(path, required=True)
for module, rules in document.items():
click.secho('process: {0}'.format(module), fg='green')
self._pro... | writes the templates read from the rules document |
def run(self, node, expr=None, lineno=None, with_raise=True):
if time.time() - self.start_time > self.max_time:
raise RuntimeError(ERR_MAX_TIME.format(self.max_time))
out = None
if len(self.error) > 0:
return out
if node is None:
return out
if ... | Execute parsed Ast representation for an expression. |
def _compute_nonlinear_magnitude_term(self, C, mag):
return self._compute_linear_magnitude_term(C, mag) +\
C["b3"] * ((mag - 7.0) ** 2.) | Computes the non-linear magnitude term |
def server_static(filepath):
mimetype = "image/svg+xml" if filepath.endswith(".svg") else "auto"
return bottle.static_file(filepath, root=conf.StaticPath, mimetype=mimetype) | Handler for serving static files. |
def ping(self):
msg_code = riak.pb.messages.MSG_CODE_PING_REQ
codec = self._get_codec(msg_code)
msg = codec.encode_ping()
resp_code, _ = self._request(msg, codec)
if resp_code == riak.pb.messages.MSG_CODE_PING_RESP:
return True
else:
return False | Ping the remote server |
def noise_gaussian(self, mean, std):
assert std > 0
ng = self.sym.sym('ng_{:d}'.format(len(self.scope['ng'])))
self.scope['ng'].append(ng)
return mean + std*ng | Create a gaussian noise variable |
def ingest(topic, text, **kwargs):
if not text:
raise ValueError('No text given to ingest for topic: ' + topic)
data = {'topic': topic, 'text': text.strip()}
data.update(kwargs)
db.markovify.insert(data) | Ingest the given text for the topic |
def escapeForIRI(xri):
xri = xri.replace('%', '%25')
xri = _xref_re.sub(_escape_xref, xri)
return xri | Escape things that need to be escaped when transforming to an IRI. |
def getProductUIDs(self):
uids = []
for orderitem in self.objectValues('XupplyOrderItem'):
product = orderitem.getProduct()
if product is not None:
uids.append(orderitem.getProduct().UID())
return uids | return the uids of the products referenced by order items |
def _expand_users(device_users, common_users):
expected_users = deepcopy(common_users)
expected_users.update(device_users)
return expected_users | Creates a longer list of accepted users on the device. |
def parent_directory(self):
self.chdir(os.path.join(getcwd_or_home(), os.path.pardir)) | Change working directory to parent directory |
def bedInterval(self, who):
"return a BED6 entry, thus DOES coordinate conversion for minus strands"
if who == 't':
st, en = self.tStart, self.tEnd
if self.tStrand == '-':
st, en = self.tSize-en, self.tSize-st
return (self.tName, st, en, self.id, self.... | return a BED6 entry, thus DOES coordinate conversion for minus strands |
def cli(ctx, timeout, proxy, output, quiet, lyric, again):
ctx.obj = NetEase(timeout, proxy, output, quiet, lyric, again) | A command tool to download NetEase-Music's songs. |
def discover(self):
if self.transport:
if self.discovery_countdown <= 0:
self.discovery_countdown = self.discovery_interval
msg = GetService(BROADCAST_MAC, self.source_id, seq_num=0, payload={}, ack_requested=False, response_requested=True)
self.transp... | Method to send a discovery message |
def _process_oauth_response(self, response):
"Extracts the fields from an oauth response"
if response.status_code == 200:
credentials = parse_qs(response.text)
self._init_oauth(
credentials.get('oauth_token')[0],
credentials.get('oauth_token_secret... | Extracts the fields from an oauth response |
def make_url(domain, location):
url = urlparse(location)
if url.scheme == '' and url.netloc == '':
return domain + url.path
elif url.scheme == '':
return 'http://' + url.netloc + url.path
else:
return url.geturl() | This function helps to make full url path. |
def P(Document, *fields, **kw):
__always__ = kw.pop('__always__', set())
projected = set()
omitted = set()
for field in fields:
if field[0] in ('-', '!'):
omitted.add(field[1:])
elif field[0] == '+':
projected.add(field[1:])
else:
projected.add(field)
if not projected:
names = set(getattr(Document... | Generate a MongoDB projection dictionary using the Django ORM style. |
def _format_ret(self, full_ret):
ret = {}
out = ''
retcode = 0
for key, data in six.iteritems(full_ret):
ret[key] = data['ret']
if 'out' in data:
out = data['out']
ret_retcode = self._get_retcode(data)
if ret_retcode > retco... | Take the full return data and format it to simple output |
def update_bgp_speaker(self, bgp_speaker_id, body=None):
return self.put(self.bgp_speaker_path % bgp_speaker_id, body=body) | Update a BGP speaker. |
def _find_last_good_run(build):
run_name = request.form.get('run_name', type=str)
utils.jsonify_assert(run_name, 'run_name required')
last_good_release = (
models.Release.query
.filter_by(
build_id=build.id,
status=models.Release.GOOD)
.order_by(models.Release... | Finds the last good release and run for a build. |
def stop(self):
if self._process is None:
return
if self._shared:
BackendManager.SHARE_COUNT -= 1
if BackendManager.SHARE_COUNT:
return
comm('stopping backend process')
for s in self._sockets:
s._callback = None
... | Stops the backend process. |
def _add_task(self, task):
if hasattr(task, '_task_group'):
raise RuntimeError('task is already part of a group')
if self._closed:
raise RuntimeError('task group is closed')
task._task_group = self
if task.done():
self._done.append(task)
else:
... | Add an already existing task to the task group. |
def _print_links(self, model, links):
for link in links:
if link['o2o'] is True:
link_type = self._one_to_one
elif link['m2m'] is True:
link_type = self._many_to_many
else:
link_type = self._one_to_many
linked_model ... | Print links that start from model. |
def getfield(self, pkt, buf):
self.set_endianess(pkt)
return self.fld.getfield(pkt, buf) | retrieve the field with endianness |
def unblock_worker(self, worker_id, reason):
params = {'WorkerId': worker_id, 'Reason': reason}
return self._process_request('UnblockWorker', params) | Unblock a worker from working on my tasks. |
def simple_tokenize(name):
last_names, first_names = name.split(',')
last_names = _RE_NAME_TOKEN_SEPARATOR.split(last_names)
first_names = _RE_NAME_TOKEN_SEPARATOR.split(first_names)
first_names = [NameToken(n) if len(n) > 1 else NameInitial(n)
for n in first_names if n]
last_name... | Simple tokenizer function to be used with the normalizers. |
def make_op_return_outputs(data, inputs, change_address, fee=OP_RETURN_FEE,
send_amount=0, format='bin'):
return [
{ "script_hex": make_op_return_script(data, format=format), "value": send_amount },
{ "script_hex": make_pay_to_address_script(change_address),
"val... | Builds the outputs for an OP_RETURN transaction. |
def _get_drive_url(self, url, session):
response = session.get(url, stream=True)
if response.status_code != 200:
raise DownloadError(
'Failed to get url %s. HTTP code: %d.' % (url, response.status_code))
for k, v in response.cookies.items():
if k.startswith('download_warning'):
... | Returns url, possibly with confirmation token. |
def task_config(self, task: Task) -> Any:
return self.get(task.__class__.__name__) | Return the task-specific configuration. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.