code stringlengths 51 2.34k | sequence stringlengths 1.16k 13.1k | docstring stringlengths 11 171 |
|---|---|---|
def _build_message(self, to, text, subject=None, mtype=None, unsubscribe_url=None):
if subject is None:
subject = u'%s' % _('No Subject')
if mtype == 'html':
msg = self.mime_multipart()
text_part = self.mime_multipart('alternative')
text_part.attach(self.m... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '16']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_build_message'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '10', '13']}; {'id': '4', 'type': 'identifier', ... | Constructs a MIME message from message and dispatch models. |
def compress(func):
@wraps(func)
def wrapper(*args, **kwargs):
result = func(*args, **kwargs)
if ('gzip' in bottle.request.headers.get('Accept-Encoding', '') and
isinstance(result, string_type) and
len(result) > 1024):
if isinstance(result, unicode):
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'compress'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'func'}; ... | Compress route return data with gzip compression |
def cleanup_virtualenv(bare=True):
if not bare:
click.echo(crayons.red("Environment creation aborted."))
try:
vistir.path.rmtree(project.virtualenv_location)
except OSError as e:
click.echo(
"{0} An error occurred while removing {1}!".format(
crayons.red("... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cleanup_virtualenv'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'default_parameter', 'children': ['5',... | Removes the virtualenv directory from the system. |
def last_commit():
try:
root = subprocess.check_output(
['hg', 'parent', '--template={node}'],
stderr=subprocess.STDOUT).strip()
return root.decode('utf-8')
except subprocess.CalledProcessError:
return None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'last_commit'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5']}; {'id': '5', 'type':... | Returns the SHA1 of the last commit. |
def _serializer(obj):
import datetime
if isinstance(obj, datetime.datetime):
if obj.utcoffset() is not None:
obj = obj - obj.utcoffset()
return obj.__str__()
return obj | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_serializer'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'obj'}... | helper function to serialize some objects for prettier return |
def list_to_json(source_list):
result = []
for item in source_list:
result.append(item.to_json())
return result | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'list_to_json'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sour... | Serialise all the items in source_list to json |
def register(self, recipe):
if not isinstance(recipe, (list, tuple)):
recipe = [recipe, ]
for item in recipe:
recipe = self.get_recipe_instance_from_class(item)
self._registry[recipe.slug] = recipe | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'register'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Registers a new recipe class. |
def _api_get(path, server=None):
server = _get_server(server)
response = requests.get(
url=_get_url(server['ssl'], server['url'], server['port'], path),
auth=_get_auth(server['user'], server['password']),
headers=_get_headers(),
verify=False
)
return _api_... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_api_get'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'pat... | Do a GET request to the API |
def register_actionhandler(self, action_handler: type) -> None:
for k in action_handler.__dict__:
if k.startswith('_'):
continue
app = action_handler_adapter(action_handler, k)
self.register_app(k, app) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'register_actionhandler'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children... | register class as action handler |
def OnDeleteCols(self, event):
bbox = self.grid.selection.get_bbox()
if bbox is None or bbox[1][1] is None:
del_point = self.grid.actions.cursor[1]
no_cols = 1
else:
del_point = bbox[0][1]
no_cols = self._get_no_rowscols(bbox)[1]
with undo.... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'OnDeleteCols'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Deletes columns from all tables of the grid |
def place_notes_at(self, notes, at):
for x in self.bar:
if x[0] == at:
x[0][2] += notes | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'place_notes_at'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Place notes at the given index. |
def cancelOperation(self):
if self.isLongTouchingPoint:
self.toggleLongTouchPoint()
elif self.isTouchingPoint:
self.toggleTouchPoint()
elif self.isGeneratingTestCondition:
self.toggleGenerateTestCondition() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cancelOperation'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Cancels the ongoing operation if any. |
def release(self):
lock = vars(self).pop('lock', missing)
lock is not missing and self._release(lock) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'release'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | Release the lock and cleanup |
def pending_tasks(self, res):
"Synchronized access to tasks"
jobs, lock = self._jobs
with lock:
return jobs[res].copy() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'pending_tasks'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Synchronized access to tasks |
def enable_reporting(self):
if self.mode is not INPUT:
raise IOError("{0} is not an input and can therefore not report".format(self))
if self.type == ANALOG:
self.reporting = True
msg = bytearray([REPORT_ANALOG + self.pin_number, 1])
self.board.sp.write(ms... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'enable_reporting'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Set an input pin to report values. |
def compile_pillar(self):
load = {'id': self.minion_id,
'grains': self.grains,
'saltenv': self.opts['saltenv'],
'pillarenv': self.opts['pillarenv'],
'pillar_override': self.pillar_override,
'extra_minion_data': self.extra_minion_dat... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'compile_pillar'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Return the pillar data from the master |
def load_merge_candidate(self, filename=None, config=None):
self.config_replace = False
self._load_candidate(filename, config, False) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'load_merge_candidate'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children':... | Open the candidate config and replace. |
def updatePassword(self,
user,
currentPassword,
newPassword):
return self.__post('/api/updatePassword',
data={
'user': user,
'currentPassword': currentPas... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'updatePassword'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [... | Change the password of a user. |
def generate_default_schema(output):
original_path = os.path.join(os.path.dirname(__file__),
'data',
'randomnames-schema.json')
shutil.copyfile(original_path, output) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'generate_default_schema'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Get default schema for fake PII |
def delete_job_prefix(self, name, persist=True):
for job in list(self.opts['schedule'].keys()):
if job.startswith(name):
del self.opts['schedule'][job]
for job in self._get_schedule(include_opts=False):
if job.startswith(name):
log.warning("Cannot ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'delete_job_prefix'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [],... | Deletes a job from the scheduler. Ignores jobs from pillar |
def release():
if not is_working_tree_clean():
print('Your working tree is not clean. Refusing to create a release.')
return
print('Rebuilding the AUTHORS file to check for modifications...')
authors()
if not is_working_tree_clean():
print('Your working tree is not clean after th... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'release'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '17', '22', '26', '45', '... | Create a new release and upload it to PyPI. |
def __get_job_status(self):
job = self.__get_job()
if "succeeded" in job.obj["status"] and job.obj["status"]["succeeded"] > 0:
job.scale(replicas=0)
if self.print_pod_logs_on_exit:
self.__print_pod_logs()
if self.delete_on_success:
self... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '__get_job_status'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Return the Kubernetes job status |
def add(self, element):
key = self._transform(element)
if key not in self._elements:
self._elements[key] = element | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'add'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ... | Add an element to this set. |
def retry_unpaid_invoices(self):
self._sync_invoices()
for invoice in self.invoices.filter(paid=False, closed=False):
try:
invoice.retry()
except InvalidRequestError as exc:
if str(exc) != "Invoice is already paid":
raise | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'retry_unpaid_invoices'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Attempt to retry collecting payment on the customer's unpaid invoices. |
def handle(self, msg):
debug_msg = ': {!r}'.format(msg) if self.debug else ''
log.debug('request: %d bytes%s', len(msg), debug_msg)
buf = io.BytesIO(msg)
code, = util.recv(buf, '>B')
if code not in self.methods:
log.warning('Unsupported command: %s (%d)', msg_name(cod... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'handle'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Handle SSH message from the SSH client and return the response. |
def _allowAnotherAt(cls, parent):
site = parent.get_site()
if site is None:
return False
return not cls.peers().descendant_of(site.root_page).exists() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_allowAnotherAt'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | You can only create one of these pages per site. |
def generate_changelog(context):
changelog_content = [
'\n
% (
context.new_version,
context.repo_url,
context.current_version,
context.new_version,
)
]
git_log_content = None
git_log = 'log --oneline --no-merges --no-color'.split(' ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'generate_changelog'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Generates an automatic changelog from your commit messages. |
def run_coroutine_threadsafe(self, coro, loop=None, callback=None):
if not asyncio.iscoroutine(coro):
raise TypeError("A await in coroutines. object is required")
loop = loop or self.loop
future = NewFuture(callback=callback)
def callback_func():
try:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'run_coroutine_threadsafe'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9']}; {'id': '4', 'type': 'identifier', 'c... | Be used when loop running in a single non-main thread. |
def load(self):
glTexImage3D(GL_TEXTURE_3D, 0, GL_LUMINANCE16_ALPHA16,
self.width, self.width, self.width, 0, GL_LUMINANCE_ALPHA,
GL_UNSIGNED_SHORT, ctypes.byref(self.data)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'load'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id... | Load the noise texture data into the current texture unit |
def setStation(self, number):
if number < 0:
number = len(self.stations) - 1
elif number >= len(self.stations):
number = 0
self.selection = number
maxDisplayedItems = self.bodyMaxY - 2
if self.selection - self.startPos >= maxDisplayedItems:
sel... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'setStation'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Select the given station number |
def url_ok(match_tuple: MatchTuple) -> bool:
try:
result = requests.get(match_tuple.link, timeout=5)
return result.ok
except (requests.ConnectionError, requests.Timeout):
return False | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'url_ok'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'typed_parameter', 'children': ['5', '6']}; ... | Check if a URL is reachable. |
def _get_fname_nio(store):
try:
f = store.ds.file
except AttributeError:
return None
try:
return f.path
except AttributeError:
return None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_fname_nio'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'st... | Try to get the file name from the NioDataStore store |
def _get(self, name, interval, config, timestamp, **kws):
i_bucket, r_bucket, i_key, r_key = self._calc_keys(config, name, timestamp)
fetch = kws.get('fetch') or self._type_get
process_row = kws.get('process_row') or self._process_row
rval = OrderedDict()
if config['coarse']:
data = process_ro... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9']}; {'id': '4', 'type': 'identifier', 'children': ... | Fetch a single interval from redis. |
def _process_pending_variables(self):
self._pending_variables, pending = {}, self._pending_variables
for name, data in pending.items():
self[name] = data | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_process_pending_variables'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Try to apply the variables that were set but not known yet. |
async def get(self):
shepherd = self.request.app.vmshepherd
data = {'presets': {}, 'config': shepherd.config}
presets = await shepherd.preset_manager.list_presets()
runtime = shepherd.runtime_manager
for name in presets:
preset = shepherd.preset_manager.get_preset(nam... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'get'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id'... | Inject all preset data to Panel and Render a Home Page |
def calc_query(self):
if self.query_dist is None:
self.query_dist = self.exp4p_.next(-1, None, None)
else:
self.query_dist = self.exp4p_.next(
self.calc_reward_fn(),
self.queried_hist_[-1],
self.dataset.data[self.queried_hist_[-1]][... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'calc_query'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | Calculate the sampling query distribution |
def visit_Compound(self, node):
self.memory.append_scope()
for child in node.children:
return_value = self.visit(child)
if isinstance(child, ReturnStatement):
return return_value
if isinstance(child, (IfStatement, WhileStatement)):
if r... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'visit_Compound'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Visitor for `Compound` AST node. |
def buildFITSName(geisname):
_indx = geisname.rfind('.')
_fitsname = geisname[:_indx] + '_' + geisname[_indx + 1:-1] + 'h.fits'
return _fitsname | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'buildFITSName'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'gei... | Build a new FITS filename for a GEIS input image. |
def check_has_docstring(self, api):
if not api.__doc__:
msg = 'The Api class "{}" lacks a docstring.'
return [msg.format(api.__name__)] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'check_has_docstring'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | An API class must have a docstring. |
def get(self, name):
if not self.loaded:
raise RegistryNotLoaded(self)
if not self._registry.get(name):
raise NotificationNotRegistered(
f"Notification not registered. Got '{name}'."
)
return self._registry.get(name) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'get'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ... | Returns a Notification by name. |
def all(self):
class_list = list(self.get_class_list())
if not class_list:
self.cache = []
return []
if self.cache is not None:
return self.cache
results = []
for cls_path in class_list:
module_name, class_name = cls_path.rsplit('.'... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'all'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id'... | Returns a list of cached instances. |
def init(cls, site):
bash_header = ""
for k,v in site.items():
bash_header += "%s=%s" % (k.upper(), v)
bash_header += '\n'
site['bash_header'] = bash_header
if cls.git_template:
print "Cloning template files..."
repo_local_copy = utils.clon... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'init'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cls'}; ... | put site settings in the header of the script |
def getArguments(names, local_dict=None, global_dict=None):
call_frame = sys._getframe(2)
clear_local_dict = False
if local_dict is None:
local_dict = call_frame.f_locals
clear_local_dict = True
try:
frame_globals = call_frame.f_globals
if global_dict is None:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getArguments'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Get the arguments based on the names. |
def Title(self):
titlefield = self.Schema().getField('title')
if titlefield.widget.visible:
return safe_unicode(self.title).encode('utf-8')
else:
return safe_unicode(self.id).encode('utf-8') | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'Title'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'i... | Return the Batch ID if title is not defined |
def team_scores(self, team_scores, time):
headers = ['Date', 'Home Team Name', 'Home Team Goals',
'Away Team Goals', 'Away Team Name']
result = [headers]
result.extend([score["utcDate"].split('T')[0],
score['homeTeam']['name'],
sco... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'team_scores'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Store output of team scores to a CSV file |
def read_series_matrix(path, encoding):
assert isinstance(path, str)
accessions = None
titles = None
celfile_urls = None
with misc.smart_open_read(path, mode='rb', try_gzip=True) as fh:
reader = csv.reader(fh, dialect='excel-tab', encoding=encoding)
for l in reader:
if no... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'read_series_matrix'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Read the series matrix. |
def _get_fashion_mnist(directory):
for filename in [
_MNIST_TRAIN_DATA_FILENAME, _MNIST_TRAIN_LABELS_FILENAME,
_MNIST_TEST_DATA_FILENAME, _MNIST_TEST_LABELS_FILENAME
]:
generator_utils.maybe_download(directory,
_FASHION_MNIST_LOCAL_FILE_PREFIX + filename,
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_fashion_mnist'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Download all FashionMNIST files to directory unless they are there. |
def kron(a, b):
if hasattr(a, '__kron__'):
return a.__kron__(b)
if a is None:
return b
else:
raise ValueError(
'Kron is waiting for two TT-vectors or two TT-matrices') | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'kron'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'a'}; {'... | Kronecker product of two TT-matrices or two TT-vectors |
def find_existing_page(self, titles_hierarchy):
titles_filters = {'publishing_is_draft': True}
for parent_count, ancestor_title \
in enumerate(titles_hierarchy[::-1]):
parent_path = '__'.join(['parent'] * parent_count)
filter_name = '%s%stranslations__title' % (
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'find_existing_page'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Find and return existing page matching the given titles hierarchy |
def output_file(self, _container):
p = local.path(_container)
if p.exists():
if not ui.ask("Path '{0}' already exists."
" Overwrite?".format(p)):
sys.exit(0)
CFG["container"]["output"] = str(p) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'output_file'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Find and writes the output path of a chroot container. |
def get(self, key, recursive=False, sorted=False, quorum=False,
timeout=None):
return self.adapter.get(key, recursive=recursive, sorted=sorted,
quorum=quorum, timeout=timeout) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '18']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'get'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9', '12', '15']}; {'id': '4', 'type': 'identifier', 'children':... | Gets a value of key. |
def findrootname(filename):
puncloc = [filename.find(char) for char in string.punctuation]
if sys.version_info[0] >= 3:
val = sys.maxsize
else:
val = sys.maxint
for num in puncloc:
if num !=-1 and num < val:
val = num
return filename[0:val] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'findrootname'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'file... | Return the rootname of the given file. |
def finish(self):
self.update(self.maxval)
if self.signal_set:
signal.signal(signal.SIGWINCH, signal.SIG_DFL) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'finish'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'... | Used to tell the progress is finished. |
def size(cls, crawler):
key = make_key('queue_pending', crawler)
return unpack_int(conn.get(key)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'size'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cls'}; ... | Total operations pending for this crawler |
def setup_endpoints(provider):
app_routing = {}
endpoints = [
AuthorizationEndpoint(
pyoidcMiddleware(provider.authorization_endpoint)),
TokenEndpoint(
pyoidcMiddleware(provider.token_endpoint)),
UserinfoEndpoint(
pyoidcMiddleware(provider.userinfo_end... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'setup_endpoints'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'p... | Setup the OpenID Connect Provider endpoints. |
def isometric_view_interactive(self):
interactor = self.iren.GetInteractorStyle()
renderer = interactor.GetCurrentRenderer()
renderer.view_isometric() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'isometric_view_interactive'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], ... | sets the current interactive render window to isometric view |
def _clear_empty_values(args):
result = {}
for param in args:
if args[param] is not None:
result[param] = args[param]
return result | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_clear_empty_values'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Scrap junk data from a dict. |
def _translate(self, embedding):
"Translates an embedding back to linear coordinates if necessary."
if embedding is None:
return None
if not self._linear:
return embedding
return [_bulk_to_linear(self.M, self.N, self.L, chain) for chain in embedding] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_translate'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Translates an embedding back to linear coordinates if necessary. |
def printWelcomeMessage(msg, place=10):
logging.debug('*' * 30)
welcome = ' ' * place
welcome+= msg
logging.debug(welcome)
logging.debug('*' * 30 + '\n') | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'printWelcomeMessage'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Print any welcome message |
def _logstash(url, data):
result = salt.utils.http.query(
url,
'POST',
header_dict=_HEADERS,
data=salt.utils.json.dumps(data),
decode=True,
status=True,
opts=__opts__
)
return result | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_logstash'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'ur... | Issues HTTP queries to the logstash server. |
def varchar(self, field=None):
assert field is not None, "The field parameter must be passed to the 'varchar' method."
max_length = field.max_length
def source():
length = random.choice(range(1, max_length + 1))
return "".join(random.choice(general_chars) for i in xrange(... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'varchar'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Returns a chunk of text, of maximum length 'max_length' |
def citedby_url(self):
cite_link = self.coredata.find('link[@rel="scopus-citedby"]', ns)
try:
return cite_link.get('href')
except AttributeError:
return None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'citedby_url'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | URL to Scopus page listing citing papers. |
def _process_rules(self, rules: dict, system: System):
self._source = None
if not self._shall_proceed(rules):
return
self.context.update(rules.get('context', {}))
self.path = rules.get('path', '')
self.source = rules.get('source', None)
self._process_rule(rule... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '13']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_process_rules'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '9']}; {'id': '4', 'type': 'identifier', 'children': [], '... | process a set of rules for a target |
def prepare_page(self, *args, **kwargs):
super(BaseBackend, self).prepare_page(*args, **kwargs) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'prepare_page'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | This is called after the page has been loaded, good time to do extra polishing |
def __extract_modules(self, loader, name, is_pkg):
mod = loader.find_module(name).load_module(name)
if hasattr(mod, '__method__'):
module_router = ModuleRouter(mod,
ignore_names=self.__serialize_module_paths()
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '__extract_modules'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children'... | if module found load module and save all attributes in the module found |
def run_version(args: dict) -> int:
version = environ.package_settings.get('version', 'unknown')
print('VERSION: {}'.format(version))
return 0 | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'run_version'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'typed_parameter', 'children': ['5', '6... | Displays the current version |
def handle_unlock(
mediator_state: MediatorTransferState,
state_change: ReceiveUnlock,
channelidentifiers_to_channels: ChannelMap,
) -> TransitionResult[MediatorTransferState]:
events = list()
balance_proof_sender = state_change.balance_proof.sender
channel_identifier = state_change.... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '16', '22']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'handle_unlock'}; {'id': '3', 'type': 'parameters', 'children': ['4', '8', '12']}; {'id': '4', 'type': 'typed_parameter', 'child... | Handle a ReceiveUnlock state change. |
def check_by_selector(self, selector):
elem = find_element_by_jquery(world.browser, selector)
if not elem.is_selected():
elem.click() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'check_by_selector'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Check the checkbox matching the CSS selector. |
def toggle_show_source(self, checked):
if checked:
self.switch_to_plain_text()
self.docstring = not checked
self.force_refresh()
self.set_option('rich_mode', not checked) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'toggle_show_source'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Toggle show source code |
def iscm_md_append_array(self, arraypath, member):
array_path = string.split(arraypath, ".")
array_key = array_path.pop()
current = self.metadata
for k in array_path:
if not current.has_key(k):
current[k] = {}
current = current[k]
if not cu... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'iscm_md_append_array'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': ... | Append a member to a metadata array entry |
def deliver_dashboard(schedule):
dashboard = schedule.dashboard
dashboard_url = _get_url_path(
'Superset.dashboard',
dashboard_id=dashboard.id,
)
driver = create_webdriver()
window = config.get('WEBDRIVER_WINDOW')['dashboard']
driver.set_window_size(*window)
driver.get(dashbo... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'deliver_dashboard'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Given a schedule, delivery the dashboard as an email report |
def _get_connection_info():
info = 'Connection: %s,' % CONN.url
if CONN.creds is not None:
info += ' userid=%s,' % CONN.creds[0]
else:
info += ' no creds,'
info += ' cacerts=%s,' % ('sys-default' if CONN.ca_certs is None
else CONN.ca_certs)
info += ' ver... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_connection_info'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '13', '36', ... | Return a string with the connection info. |
def _resize(self):
lines = self.text.split('\n')
xsize, ysize = 0, 0
for line in lines:
size = self.textctrl.GetTextExtent(line)
xsize = max(xsize, size[0])
ysize = ysize + size[1]
xsize = int(xsize*1.2)
self.textctrl.SetSize((xsize, ysize))
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_resize'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | calculate and set text size, handling multi-line |
def loop_pengembalian_akhiran(self):
self.restore_prefix()
removals = self.removals
reversed_removals = reversed(removals)
current_word = self.current_word
for removal in reversed_removals:
if not self.is_suffix_removal(removal):
continue
i... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'loop_pengembalian_akhiran'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], '... | ECS Loop Pengembalian Akhiran |
def reduce_max(attrs, inputs, proto_obj):
new_attrs = translation_utils._fix_attribute_names(attrs, {'axes':'axis'})
return 'max', new_attrs, inputs | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'reduce_max'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Reduce the array along a given axis by maximum value |
def relation_call(method, relation_name=None, flag=None, state=None, *args):
if relation_name:
relation = relation_from_name(relation_name)
if relation is None:
raise ValueError('Relation not found: %s' % relation_name)
elif flag or state:
relation = relation_from_flag(flag o... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '16']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'relation_call'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8', '11', '14']}; {'id': '4', 'type': 'identifier', 'child... | Invoke a method on the class implementing a relation via the CLI |
def catalogFactory(name, **kwargs):
fn = lambda member: inspect.isclass(member) and member.__module__==__name__
catalogs = odict(inspect.getmembers(sys.modules[__name__], fn))
if name not in list(catalogs.keys()):
msg = "%s not found in catalogs:\n %s"%(name,list(kernels.keys()))
logger.erro... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'catalogFactory'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Factory for various catalogs. |
def parse_data_line(self, line):
it = self._generate(line)
reader = csv.DictReader(it, fieldnames=self.headers)
values = reader.next()
values['DefaultResult'] = 'ResidualError'
values['LineName'] = re.sub(r'\W', '', values['LineName'].strip())
valu... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'parse_data_line'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Parses the data line into a dictionary for the importer |
def commit_events(self):
for event in sorted(self._event_buf):
self.store.record_event(event)
self._snapshot.process_event(event)
self._event_buf = [] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'commit_events'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Applies all outstanding `Event`s to the internal state |
def execute(tokens):
if not validate_rc():
print('Your .vacationrc file has errors!')
echo_vacation_rc()
return
for action, value in tokens:
if action == 'show':
show()
elif action == 'log':
log_vacation_days()
elif action == 'echo':
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'execute'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'tokens'};... | Perform the actions described by the input tokens. |
async def create_tunnel_connection(self, req):
tunnel_address = req.tunnel_address
connection = await self.create_connection(tunnel_address)
response = connection.current_consumer()
for event in response.events().values():
event.clear()
response.start(HttpTunnel(self,... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'create_tunnel_connection'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [... | Create a tunnel connection |
def int_check(*args, func=None):
func = func or inspect.stack()[2][3]
for var in args:
if not isinstance(var, numbers.Integral):
name = type(var).__name__
raise ComplexError(
f'Function {func} expected integral number, {name} got instead.') | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'int_check'}; {'id': '3', 'type': 'parameters', 'children': ['4', '6']}; {'id': '4', 'type': 'list_splat_pattern', 'children': ['5']}; ... | Check if arguments are integrals. |
def remove_cable_distributor(self, cable_dist):
if cable_dist in self.cable_distributors() and isinstance(cable_dist,
MVCableDistributorDing0):
self._cable_distributors.remove(cable_dist)
if self._graph.has_node(cable_dist... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'remove_cable_distributor'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [... | Removes a cable distributor from _cable_distributors if existing |
def format_value(value):
if isinstance(value, (bool, np.bool_)):
return str(value)
elif isinstance(value, (int, np.integer)):
return '{:n}'.format(value)
elif isinstance(value, (float, np.floating)):
return '{:g}'.format(value)
else:
return... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'format_value'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'valu... | Pretty-print an arbitrary value. |
def tokenize(string):
for match in TOKENS_REGEX.finditer(string):
yield Token(match.lastgroup, match.group().strip(), match.span()) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'tokenize'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'string'}... | Match and yield all the tokens of the input string. |
def wind_bft(ms):
"Convert wind from metres per second to Beaufort scale"
if ms is None:
return None
for bft in range(len(_bft_threshold)):
if ms < _bft_threshold[bft]:
return bft
return len(_bft_threshold) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'wind_bft'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'ms'}; {'... | Convert wind from metres per second to Beaufort scale |
def api_run_delete(run_id):
data = current_app.config["data"]
RunFacade(data).delete_run(run_id)
return "DELETED run %s" % run_id | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'api_run_delete'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'ru... | Delete the given run and corresponding entities. |
def _get_csv_fieldnames(csv_reader):
fieldnames = []
for row in csv_reader:
for col in row:
field = (
col.strip()
.replace('"', "")
.replace(" ", "")
.replace("(", "")
.replace(")", "")
.lower()
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_csv_fieldnames'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Finds fieldnames in Polarion exported csv file. |
def wrap(self, width):
res = []
prev_state = set()
part = []
cwidth = 0
for char, _width, state in zip(self._string, self._width, self._state):
if cwidth + _width > width:
if prev_state:
part.append(self.ANSI_RESET)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'wrap'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Returns a partition of the string based on `width` |
def _iget(key, lookup_dict):
for k, v in lookup_dict.items():
if k.lower() == key.lower():
return v
return None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_iget'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'key'};... | Case-insensitive search for `key` within keys of `lookup_dict`. |
def connected_socket(address, timeout=3):
sock = socket.create_connection(address, timeout)
yield sock
sock.close() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'connected_socket'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | yields a connected socket |
def gwcalctyp(self):
dig0 = str(self._SIGMA_TYPES[self.type])
dig1 = str(self._SC_MODES[self.sc_mode])
return dig1.strip() + dig0.strip() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'gwcalctyp'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Returns the value of the gwcalctyp input variable. |
def superdict(arg=()):
def update(obj, arg):
return obj.update(arg) or obj
return update(defaultdict(superdict), arg) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'superdict'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'default_parameter', 'children': ['5', '6']}; {... | Recursive defaultdict which can init with other dict |
def fetch_and_execute_function_to_run(self, key):
(driver_id, serialized_function,
run_on_other_drivers) = self.redis_client.hmget(
key, ["driver_id", "function", "run_on_other_drivers"])
if (utils.decode(run_on_other_drivers) == "False"
and self.worker.mode == ray.... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'fetch_and_execute_function_to_run'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'chi... | Run on arbitrary function on the worker. |
def macro(parser, token):
name = token.strip()
parser.build_method(name, endnodes=['endmacro'])
return ast.Yield(value=ast.Str(s='')) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'macro'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'parser... | Works just like block, but does not render. |
def _validate_param(rtype, fields):
try:
model = rtype_to_model(rtype)
model_fields = model.all_fields
except ValueError:
raise InvalidQueryParams(**{
'detail': 'The fields query param provided with a '
'field type of "%s" is unknown.' % rtype,
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_validate_param'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Ensure the sparse fields exists on the models |
def load_json(data=None, path=None, name='NT'):
if data and not path:
return mapper(json.loads(data), _nt_name=name)
if path and not data:
return mapper(json.load(path), _nt_name=name)
if data and path:
raise ValueError('expected one source and received two') | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '13']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'load_json'}; {'id': '3', 'type': 'parameters', 'children': ['4', '7', '10']}; {'id': '4', 'type': 'default_parameter', 'children': ['... | Map namedtuples with json data. |
def _ewp_files_set(self, ewp_dic, project_dic):
try:
ewp_dic['project']['file'] = []
except KeyError:
pass
ewp_dic['project']['group'] = []
i = 0
for group_name, files in project_dic['groups'].items():
ewp_dic['project']['group'].append({'name'... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_ewp_files_set'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Fills files in the ewp dictionary |
def unbind(self, exchange, source, routing_key='', nowait=True,
arguments={}, ticket=None, cb=None):
nowait = nowait and self.allow_nowait() and not cb
args = Writer()
args.write_short(ticket or self.default_ticket).\
write_shortstr(exchange).\
write_shorts... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '22']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'unbind'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '10', '13', '16', '19']}; {'id': '4', 'type': 'identifie... | Unbind an exchange from another. |
def init(req, model):
rels = model.relationships
params = req.get_param_as_list('include') or []
params = [param.lower() for param in params]
for param in params:
_validate_no_nesting(param)
_validate_rels(param, rels)
return params | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'init'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'req'}; ... | Return an array of fields to include. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.