code stringlengths 51 2.34k | sequence stringlengths 1.16k 13.1k | docstring stringlengths 11 171 |
|---|---|---|
def _reduce(self, op, name, axis=0, skipna=True, numeric_only=None,
filter_type=None, **kwds):
return op(self.get_values(), skipna=skipna, **kwds) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '21']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_reduce'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '10', '13', '16', '19']}; {'id': '4', 'type': 'identifi... | perform a reduction operation |
def update_base_image(path: str):
with open(path, 'r') as file_handle:
contents = file_handle.read()
regex = re.compile('from\s+(?P<source>[^\s]+)', re.IGNORECASE)
matches = regex.findall(contents)
if not matches:
return None
match = matches[0]
os.system('docker pull {}'.format(m... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update_base_image'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'typed_parameter', 'children': ['5', '6... | Pulls the latest version of the base image |
def clean(self, *args, **kwargs):
if not self.pk:
node = self.node
layer = Layer.objects.get(pk=node.layer_id)
if layer.participation_settings.rating_allowed is not True:
raise ValidationError("Rating not allowed for this layer")
if node.participat... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'clean'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Check if rating can be inserted for parent node or parent layer |
def add_pane(self, pane, vsplit=False):
assert isinstance(pane, Pane)
assert isinstance(vsplit, bool)
split_cls = VSplit if vsplit else HSplit
if self.active_pane is None:
self.root.append(pane)
else:
parent = self._get_parent(self.active_pane)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'add_pane'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Add another pane to this Window. |
def apply_patch(self):
patch = self.patches.get(self.storage.__class__.__name__)
if patch:
patch.apply(self) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'apply_patch'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | apply adjustment patch for storage |
def _get_route_info(self, request):
resolve_match = resolve(request.path)
app_name = resolve_match.app_name
namespace = resolve_match.namespace
url_name = resolve_match.url_name
view_name = resolve_match.view_name
return {
"app_name": app_name or None,
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_route_info'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Return information about the current URL. |
def unregister(self, collector):
with self._lock:
for name in self._collector_to_names[collector]:
del self._names_to_collectors[name]
del self._collector_to_names[collector] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'unregister'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Remove a collector from the registry. |
def angSepVincenty(ra1, dec1, ra2, dec2):
ra1_rad = np.radians(ra1)
dec1_rad = np.radians(dec1)
ra2_rad = np.radians(ra2)
dec2_rad = np.radians(dec2)
sin_dec1, cos_dec1 = np.sin(dec1_rad), np.cos(dec1_rad)
sin_dec2, cos_dec2 = np.sin(dec2_rad), np.cos(dec2_rad)
delta_ra = ra2_rad - ra1_rad
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'angSepVincenty'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [... | Vincenty formula for distances on a sphere |
def commit_account_vesting(self, block_height):
log.debug("Commit all database state before vesting")
self.db.commit()
if block_height in self.vesting:
traceback.print_stack()
log.fatal("Tried to vest tokens twice at {}".format(block_height))
os.abort()
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'commit_account_vesting'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [],... | vest any tokens at this block height |
def width_aware_slice(self, index):
if wcswidth(self.s) == -1:
raise ValueError('bad values for width aware slicing')
index = normalize_slice(self.width, index)
counter = 0
parts = []
for chunk in self.chunks:
if index.start < counter + chunk.width and ind... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'width_aware_slice'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Slice based on the number of columns it would take to display the substring. |
def repr_node(self, dist, level=1):
output = [self._repr_dist(dist)]
for other, label in self.adjacency_list[dist]:
dist = self._repr_dist(other)
if label is not None:
dist = '%s [%s]' % (dist, label)
output.append(' ' * level + str(dist))
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'repr_node'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Prints only a subgraph |
def check_geo(geo):
geo = copy.copy(geo)
def fix_item(item):
if isinstance(item, six.binary_type):
return item.decode()
return item
def fix_list(lst):
return [fix_item(i) for i in lst]
if isinstance(geo.reduce, six.binary_type):
geo.reduce = geo.reduce.decode(... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'check_geo'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'geo'}; ... | Checks a geo and makes sure the text fields are not binary |
def format_jid_instance_ext(jid, job):
ret = format_job_instance(job)
ret.update({
'JID': jid,
'StartTime': jid_to_time(jid)})
return ret | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'format_jid_instance_ext'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': []... | Format the jid correctly with jid included |
def print_subprocess_output(subp):
if subp:
if subp.errorcode != 0:
print('<error errorcode="%s">' % str(subp.errorcode))
print(subp.stderr)
print("</error>")
print_tag('stdout', '\n%s\n' % subp.stdout)
else:
print_tag('success', '\n%s\n' %... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'print_subprocess_output'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Prints the stdout and stderr output. |
def read_tree_from_json(srcpath):
with open(srcpath) as infile:
json_tree = json.load(infile)
if json_tree is None:
raise ValueError('Could not find ricecooker json tree')
return json_tree | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'read_tree_from_json'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Load ricecooker json tree data from json file at `srcpath`. |
def send_json_message(address, message, **kwargs):
data = {
'message': message,
}
if not kwargs.get('subject_id'):
data['subject_id'] = address
data.update(kwargs)
hxdispatcher.send(address, data) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'send_json_message'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [],... | a shortcut for message sending |
def __buttonEvent(event):
global boxRoot, __widgetTexts, __replyButtonText
__replyButtonText = __widgetTexts[event.widget]
boxRoot.quit() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '__buttonEvent'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'eve... | Handle an event that is generated by a person clicking a button. |
def from_quad_tree(cls, quad_tree):
assert bool(re.match('^[0-3]*$', quad_tree)), 'QuadTree value can only consists of the digits 0, 1, 2 and 3.'
zoom = len(str(quad_tree))
offset = int(math.pow(2, zoom)) - 1
google_x, google_y = [reduce(lambda result, bit: (result << 1) | bit, bits, 0)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'from_quad_tree'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Creates a tile from a Microsoft QuadTree |
def any_has_focus(self):
f = (self.hasFocus() or self.parent.hasFocus() or
self.tips.hasFocus() or self.canvas.hasFocus())
return f | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'any_has_focus'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Returns if tour or any of its components has focus. |
def create_roteiro(self):
return Roteiro(
self.networkapi_url,
self.user,
self.password,
self.user_ldap) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'create_roteiro'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Get an instance of roteiro services facade. |
def save_swagger_spec(self, filepath=None):
if filepath is True or filepath is None:
filepath = self.file_spec.format(server=self.server)
json.dump(self.origin_spec, open(filepath, 'w+'), indent=3) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'save_swagger_spec'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Saves a copy of the origin_spec to a local file in JSON format |
def _settings_changed(self, *args, **kwargs):
setting, value = kwargs['setting'], kwargs['value']
if setting == self.name:
self._reload(value) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_settings_changed'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '7']}; {'id': '4', 'type': 'identifier', 'children': [],... | Handle changes to core settings. |
def _set_slots_to_null(self, cls):
if hasattr(cls, "__slots__"):
for s in cls.__slots__:
self.__setattr__(s, Null)
for b in cls.__bases__:
self._set_slots_to_null(b) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_set_slots_to_null'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | WHY ARE SLOTS NOT ACCESIBLE UNTIL WE ASSIGN TO THEM? |
def _set_state(self, state):
if state != self._association_state:
self.__log_debug('- %s -> %s', self._association_state, state)
self._association_state = state
if state == self.State.ESTABLISHED:
self.__state = 'connected'
for channel in list(self._data_c... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_set_state'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Transition the SCTP association to a new state. |
def cli(env, ipv6, test):
mgr = SoftLayer.NetworkManager(env.client)
version = 4
if ipv6:
version = 6
if not (test or env.skip_confirmations):
if not formatting.confirm("This action will incur charges on your "
"account. Continue?"):
raise ex... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cli'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'env... | Creates a global IP. |
def timeout(self, value):
if value == TIMEOUT_SESSION:
self._config.timeout = None
self._backend_client.expires = None
else:
self._config.timeout = value
self._calculate_expires() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'timeout'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Sets a custom timeout value for this session |
def free(self):
if self._ptr is None:
return
Gauged.array_free(self.ptr)
FloatArray.ALLOCATIONS -= 1
self._ptr = None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'free'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id... | Free the underlying C array |
def refreshTitles(self):
for index in range(self.count()):
widget = self.widget(index)
self.setTabText(index, widget.windowTitle()) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'refreshTitles'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Refreshes the titles for each view within this tab panel. |
def _determine_nTrackIterations(self,nTrackIterations):
if not nTrackIterations is None:
self.nTrackIterations= nTrackIterations
return None
if numpy.fabs(self.misalignment(quantity=False)) < 1./180.*numpy.pi:
self.nTrackIterations= 0
elif numpy.fabs(self.misa... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_determine_nTrackIterations'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children'... | Determine a good value for nTrackIterations based on the misalignment between stream and orbit; just based on some rough experience for now |
def read_hdf5_dict(h5f, names=None, group=None, **kwargs):
if group:
h5g = h5f[group]
else:
h5g = h5f
if names is None:
names = [key for key in h5g if _is_timeseries_dataset(h5g[key])]
out = kwargs.pop('dict_type', TimeSeriesDict)()
kwargs.setdefault('array_type', out.EntryCl... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '13']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'read_hdf5_dict'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8', '11']}; {'id': '4', 'type': 'identifier', 'children':... | Read a `TimeSeriesDict` from HDF5 |
def _fit_m(D, a0, logp, tol=1e-7, maxiter=1000):
N,K = D.shape
s = a0.sum()
for i in xrange(maxiter):
m = a0 / s
a1 = _ipsi(logp + (m*(psi(a0) - logp)).sum())
a1 = a1/a1.sum() * s
if norm(a1 - a0) < tol:
return a1
a0 = a1
raise Exception('Failed to con... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '13']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_fit_m'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '10']}; {'id': '4', 'type': 'identifier', 'children': []... | With fixed precision s, maximize mean m |
def _sha256_sign(self, method, url, headers, body):
d = ''
sign_headers = method.upper() + '|' + url + '|'
for key, value in sorted(headers.items()):
if key.startswith('X-Mcash-'):
sign_headers += d + key.upper() + '=' + value
d = '&'
rsa_signa... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_sha256_sign'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'children'... | Sign the request with SHA256. |
def errReceived(self, data):
lines = data.splitlines()
for line in lines:
log_error("*** {name} stderr *** {line}",
name=self.name,
line=self.errFilter(line)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'errReceived'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Connected process wrote to stderr |
def add_user(self, recipient_email):
self.import_key(emailid=recipient_email)
emailid_list = self.list_user_emails()
self.y = self.decrypt()
emailid_list.append(recipient_email)
self.encrypt(emailid_list=emailid_list) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'add_user'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Add user to encryption |
def parse_at_element(
self,
element,
state
):
if self._attribute:
parsed_value = self._parse_attribute(element, self._attribute, state)
else:
parsed_value = self._parser_func(element.text, state)
return _hooks_apply_after_parse(self... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'parse_at_element'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Parse the primitive value at the XML element. |
def cache_data(self):
if not self.slug_name:
self.slug_name = slugify(self.name).strip()
if len(self.slug_name) > 255:
self.slug_name = self.slug_name[0:254] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cache_data'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | Cache some basic data such as financial statement metrics |
def _get_rsa_key(self):
url = 'https://steamcommunity.com/mobilelogin/getrsakey/'
values = {
'username': self._username,
'donotcache' : self._get_donotcachetime(),
}
req = self.post(url, data=values)
data = req.json()
if not data['success']... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_rsa_key'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | get steam RSA key, build and return cipher |
def _start_index(self, start=None):
if start is None:
return 0
start_stage = translate_stage_name(start)
internal_names = [translate_stage_name(s.name) for s in self._stages]
try:
return internal_names.index(start_stage)
except ValueError:
rais... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_start_index'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Seek to the first stage to run. |
def send(self, data):
if self.readyState != 'open':
raise InvalidStateError
if not isinstance(data, (str, bytes)):
raise ValueError('Cannot send unsupported data type: %s' % type(data))
self.transport._data_channel_send(self, data) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'send'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Send `data` across the data channel to the remote peer. |
def _require_bucket(self, bucket_name):
if not self.exists(bucket_name) and not self.claim_bucket(bucket_name):
raise OFSException("Invalid bucket: %s" % bucket_name)
return self._get_bucket(bucket_name) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_require_bucket'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Also try to create the bucket. |
def _linux_stp(br, state):
brctl = _tool_path('brctl')
return __salt__['cmd.run']('{0} stp {1} {2}'.format(brctl, br, state),
python_shell=False) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_linux_stp'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'b... | Internal, sets STP state |
def _get_filename(self):
if self._fname is None:
timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
fname = "%s-%s.log" % (timestamp, abs(id(self)))
self._fname = os.path.join(self.file_path, fname)
return self._fname | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_filename'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Return a unique file name. |
def raise_error(self, message, *params, **key_params):
s = 'Parser error in '
self.xml_node_stack.reverse()
if len(self.xml_node_stack) > 1:
node = self.xml_node_stack[0]
s += '<{0}'.format(node.tag)
if 'name' in node.lattrib:
s += ' name=\"{0}... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'raise_error'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '8']}; {'id': '4', 'type': 'identifier', 'children': [],... | Raise a parse error. |
def from_record(cls, record, crs):
if 'type' not in record:
raise TypeError("The data isn't a valid record.")
return cls(to_shape(record), crs) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'from_record'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Load vector from record. |
def parent_org_sdo_ids(self):
return [sdo.get_owner()._narrow(SDOPackage.SDO).get_sdo_id() \
for sdo in self._obj.get_organizations() if sdo] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'parent_org_sdo_ids'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | The SDO IDs of the compositions this RTC belongs to. |
def add_child(self, child):
if not isinstance(child, DependencyNode):
raise TypeError('"child" must be a DependencyNode')
self._children.append(child) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'add_child'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Add a child node |
def sink_pubsub(client, to_delete):
topic = _sink_pubsub_setup(client)
to_delete.append(topic)
SINK_NAME = "robots-pubsub-%d" % (_millis(),)
FILTER = "logName:apache-access AND textPayload:robot"
UPDATED_FILTER = "textPayload:robot"
DESTINATION = "pubsub.googleapis.com/%s" % (topic.full_name,)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'sink_pubsub'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Sink log entries to pubsub. |
def _get_internal_field_by_name(self, name):
field = self._all_fields.get(name, self._all_fields.get('%s.%s' % (self._full_name, name)))
if field is not None:
return field
for field_name in self._all_fields:
if field_name.endswith('.%s' % name):
return sel... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_internal_field_by_name'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children'... | Gets the field by name, or None if not found. |
def parse_assessor_content(experiment_config):
if experiment_config.get('assessor'):
if experiment_config['assessor'].get('builtinAssessorName'):
experiment_config['assessor']['className'] = experiment_config['assessor']['builtinAssessorName']
else:
validate_customized_file(e... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'parse_assessor_content'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Validate whether assessor in experiment_config is valid |
def read_kwfile(fname):
d={}
f=open(fname)
for line in f:
try:
kvpair=re.findall("(.*):: (.*)=(.*)$",line)[0]
d['name']=os.path.basename(kvpair[0])
key,val=kvpair[1:]
d[key.lower()]=val
except (ValueError,IndexError):
break
f.cl... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'read_kwfile'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'fname... | Syntax used as of r452 in commissioning tests |
def id(self, opts_id):
old_id = self._id
self._id = opts_id
if old_id is not None:
cleanup_custom_options(old_id)
if opts_id is not None and opts_id != old_id:
if opts_id not in Store._weakrefs:
Store._weakrefs[opts_id] = []
ref = weakr... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'id'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | Handles tracking and cleanup of custom ids. |
def reformat_cmd(self, text):
text = text.replace('az', '')
if text and SELECT_SYMBOL['scope'] == text[0:2]:
text = text.replace(SELECT_SYMBOL['scope'], "")
if self.shell_ctx.default_command:
text = self.shell_ctx.default_command + ' ' + text
return text | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'reformat_cmd'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | reformat the text to be stripped of noise |
def _find_sock():
if socket.has_ipv6:
try:
return socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
except socket.gaierror:
pass
return socket.socket(socket.AF_INET, socket.SOCK_DGRAM) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_find_sock'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '30']}; {'id': '5', 't... | Create a UDP socket |
def list_organizations(self):
try:
res = self._send_request('GET', self._org_url, '', 'organizations')
if res and res.status_code in self._resp_ok:
return res.json()
except dexc.DfaClientRequestFailed:
LOG.error("Failed to send request to DCNM.") | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'list_organizations'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Return list of organizations from DCNM. |
def create_table(self, names=None):
scan_shape = (1,)
for src in self._srcs:
scan_shape = max(scan_shape, src['dloglike_scan'].shape)
tab = create_source_table(scan_shape)
for s in self._srcs:
if names is not None and s.name not in names:
continue
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'create_table'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Create an astropy Table object with the contents of the ROI model. |
def __isValidZIP(self, suffix):
if suffix and isinstance(suffix, string_types):
if suffix.endswith(".zip"):
return True
return False | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '__isValidZIP'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Determine if the suffix is `.zip` format |
def height(cls, path):
if os.path.exists( path ):
sb = os.stat( path )
h = (sb.st_size / BLOCK_HEADER_SIZE) - 1
return h
else:
return None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'height'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cls'}... | Get the locally-stored block height |
def cli_resp_formatter(cls, resp):
if not resp.value:
return ''
if resp.status == STATUS_OK:
if type(resp.value) in (str, bool, int, float, six.text_type):
return str(resp.value)
ret = ''
val = resp.value
if not isinstance(val, ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cli_resp_formatter'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Override this method to provide custom formatting of cli response. |
def _send_commit_request(self, retry_delay=None, attempt=None):
if self._commit_call and not self._commit_call.active():
self._commit_call = None
if self._commit_req is not None:
raise OperationInProgress(self._commit_req)
if retry_delay is None:
retry_delay =... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_send_commit_request'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children':... | Send a commit request with our last_processed_offset |
def compiler_preprocessor_verbose(compiler, extraflags):
lines = []
with open(os.devnull, 'r') as devnull:
cmd = [compiler, '-E']
cmd += extraflags
cmd += ['-', '-v']
p = Popen(cmd, stdin=devnull, stdout=PIPE, stderr=PIPE)
p.wait()
p.stdout.close()
lines ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'compiler_preprocessor_verbose'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'childre... | Capture the compiler preprocessor stage in verbose mode |
def _cross_validation_for_one_voxel(clf, vid, num_folds, subject_data, labels):
skf = model_selection.StratifiedKFold(n_splits=num_folds,
shuffle=False)
scores = model_selection.cross_val_score(clf, subject_data,
y=labels,
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_cross_validation_for_one_voxel'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'iden... | Score classifier on data using cross validation. |
def _key(self, username, frozen=False):
if frozen:
return self.frozen + username
return self.prefix + username | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_key'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Translate a username into a key for Redis. |
def enable_all_cpu(self):
for cpu in self.__get_ranges("offline"):
fpath = path.join("cpu%i"%cpu,"online")
self.__write_cpu_file(fpath, b"1") | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'enable_all_cpu'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Enable all offline cpus |
def remove_product_version_from_build_configuration(id=None, name=None, product_version_id=None):
data = remove_product_version_from_build_configuration_raw(id, name, product_version_id)
if data:
return utils.format_json_list(data) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '13']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'remove_product_version_from_build_configuration'}; {'id': '3', 'type': 'parameters', 'children': ['4', '7', '10']}; {'id': '4', 'type... | Remove a ProductVersion from association with a BuildConfiguration |
def merge_dictionaries(a, b):
res = {}
for k in a:
res[k] = a[k]
for k in b:
res[k] = b[k]
return res | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'merge_dictionaries'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Merge two dictionaries; duplicate keys get value from b. |
def clean_whitespace(statement):
import re
statement.text = statement.text.replace('\n', ' ').replace('\r', ' ').replace('\t', ' ')
statement.text = statement.text.strip()
statement.text = re.sub(' +', ' ', statement.text)
return statement | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'clean_whitespace'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Remove any consecutive whitespace characters from the statement text. |
def cmd_loadfile(args):
if len(args) != 1:
fileargs = " ".join(args)
else:
fileargs = args[0]
if not os.path.exists(fileargs):
print("Error loading file ", fileargs);
return
if os.name == 'nt':
fileargs = fileargs.replace("\\", "/")
loadfile(fileargs.strip('"'... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cmd_loadfile'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'args... | callback from menu to load a log file |
def comment_sync(self, comment):
self.host.update(key="comment", value=comment)
self.host.emit("commented", comment=comment) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'comment_sync'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Update comments to host and notify subscribers |
def branches(config, **kwargs):
with alembic_lock(
config.registry["sqlalchemy.engine"], config.alembic_config()
) as alembic_config:
alembic.command.branches(alembic_config, **kwargs) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'branches'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'con... | Show current branch points. |
def center_of_mass(self):
weights = [s.species.weight for s in self]
center_of_mass = np.average(self.frac_coords,
weights=weights, axis=0)
return center_of_mass | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'center_of_mass'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Calculates the center of mass of the slab |
def _build_query_url(self, page = None, verbose = False):
query = []
if len(self.filters) > 0:
query.append(urlencode(self.filters))
if self.sort:
query_str = u"%s=%s" % (u"sort", self.sort)
query.append(query_str)
if self.sort_by:
query_st... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_build_query_url'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [],... | builds the url to call |
def delete_local_operator(self, onnx_name):
if onnx_name not in self.onnx_operator_names or onnx_name not in self.operators:
raise RuntimeError('The operator to be removed not found')
self.onnx_operator_names.discard(onnx_name)
del self.operators[onnx_name] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'delete_local_operator'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Remove the operator whose onnx_name is the input onnx_name |
def _iter_walk(
self,
fs,
path,
namespaces=None,
):
if self.search == "breadth":
return self._walk_breadth(fs, path, namespaces=namespaces)
else:
return self._walk_depth(fs, path, namespaces=namespaces) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_iter_walk'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Get the walk generator. |
def agent_error(e: requests.HTTPError, fatal=True):
try:
data = e.response.json()
details = data['detail']
except JSONDecodeError:
details = e.response.text or str(e.response)
lines = ('[AGENT] {}'.format(line) for line in details.splitlines())
msg = '\n' + '\n'.join(lines)
i... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '13']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'agent_error'}; {'id': '3', 'type': 'parameters', 'children': ['4', '10']}; {'id': '4', 'type': 'typed_parameter', 'children': ['5', '... | Prints an agent error and exits |
def provide_data(self):
return [(k, tuple([self.batch_size] + list(v.shape[1:]))) for k, v in self.data] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'provide_data'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | The name and shape of data provided by this iterator |
def reduce_json(data):
return reduce(lambda x, y: int(x) + int(y), data.values()) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'reduce_json'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'data'... | Reduce a JSON object |
def clear_content(self, content):
content = _unicode(content)
return self.wrapper_match.sub("", content) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'clear_content'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Clear the injected content from the content buffer, and return the results |
def load_cash_balances(self):
from gnucash_portfolio.accounts import AccountsAggregate, AccountAggregate
cfg = self.__get_config()
cash_root_name = cfg.get(ConfigKeys.cash_root)
gc_db = self.config.get(ConfigKeys.gnucash_book_path)
with open_book(gc_db, open_if_lock=True) as book... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'load_cash_balances'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Loads cash balances from GnuCash book and recalculates into the default currency |
def validate_available_choice(enum, to_value):
if to_value is None:
return
if type(to_value) is not int:
try:
to_value = int(to_value)
except ValueError:
message_str = "'{value}' cannot be converted to int"
message = _(six.text_type(message_str))
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'validate_available_choice'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': ... | Validate that to_value is defined as a value in enum. |
def _get_user_info(self, access_token):
info_response = self._call('GET', self.info_url, params={'access_token': access_token})
user_info = info_response.get('info')
return user_info | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_user_info'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Return Clef user info. |
def _conv_general_shape_tuple(self, lhs_shape, rhs_shape, window_strides,
padding, dimension_numbers):
lhs_perm, rhs_perm, out_perm = self._conv_general_permutations(
dimension_numbers)
lhs_trans = onp.take(lhs_shape, lhs_perm)
rhs_trans = onp.take(rhs_shape, rhs_perm... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_conv_general_shape_tuple'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9']}; {'id': '4', 'type': 'iden... | Generalized computation of conv shape. |
def status_message(self):
msg = None
if self.last_ddns_response in response_messages.keys():
return response_messages.get(self.last_ddns_response)
if 'good' in self.last_ddns_response:
ip = re.search(r'(\d{1,3}\.?){4}', self.last_ddns_response).group()
msg = "... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'status_message'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Return friendly response from API based on response code. |
def format(self, value):
if not isinstance(value, Arguments):
value = value.iteritems()
return dict((k, self.fields[k].format(v)) for k, v in value) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'format'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Convert a dictionary of processed values to a dictionary of raw values. |
def close(self):
if self._connection:
self._connection_file.close()
self._connection_file = None
self._connection.close()
self._connection = None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'close'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'i... | Closes connection with the q service. |
def schema_from_context(context):
item_class = context.get('class')
return (
serializer_mapping[item_class] if item_class else BaseSchema,
context.get('many', False)
) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'schema_from_context'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Determine which schema to use. |
def pretty_dict_string(d, indent=0):
s = ''
for key, value in sorted(d.items()):
s += ' ' * indent + str(key)
if isinstance(value, dict):
s += '\n' + pretty_dict_string(value, indent+1)
else:
s += '=' + str(value) + '\n'
return s | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'pretty_dict_string'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Pretty output of nested dictionaries. |
def ping(self):
randomToken = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for x in range(32))
r = self.doQuery('ping?data=' + randomToken)
if r.status_code == 200:
if r.json()['data'] == randomToken:
return True
retur... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'ping'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id... | Return true if the server successfully pinged |
def _disbatch_runner(self, chunk):
full_return = chunk.pop('full_return', False)
pub_data = self.saltclients['runner'](chunk)
tag = pub_data['tag'] + '/ret'
try:
event = yield self.application.event_listener.get_event(self, tag=tag)
ret = event if full_return else... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_disbatch_runner'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Disbatch runner client commands |
def update_db():
logger = get_logger(PROCESS_SCHEDULER)
managed_process_dao = ManagedProcessDao(logger)
managed_process_dao.clear()
for process_name, process_entry in context.process_context.items():
if not isinstance(process_entry, ManagedProcessEntry):
continue
managed_proc... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update_db'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '12', '19', '25']}; {'i... | writes to managed_process table records from the context.process_context |
def add_item(self, item):
item.parent = self
self.items.append(item) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'add_item'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Add a new script or phrase to the folder. |
def delete_downloads():
shutil.rmtree(vtki.EXAMPLES_PATH)
os.makedirs(vtki.EXAMPLES_PATH)
return True | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'delete_downloads'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '14', '23']}; {'... | Delete all downloaded examples to free space or update the files |
def increment(cls, v):
if not isinstance(v, ObjectNumber):
v = ObjectNumber.parse(v)
return v.rev(v.revision+1) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'increment'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cl... | Increment the version number of an object number of object number string |
def parse_route(cls, template):
regex = ''
last_pos = 0
for match in cls.ROUTES_RE.finditer(template):
regex += re.escape(template[last_pos:match.start()])
var_name = match.group(1)
expr = match.group(2) or '[^/]+'
expr = '(?P<%s>%s)' % (var_name, ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'parse_route'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Parse a route definition, and return the compiled regex that matches it. |
def write_config(ip, mac, single, double, long, touch):
click.echo("Write configuration to device %s" % ip)
data = {
'single': single,
'double': double,
'long': long,
'touch': touch,
}
request = requests.post(
'http://{}/{}/{}/'.format(ip, URI, mac), data=data, ti... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'write_config'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9']}; {'id': '4', 'type': 'identifier', 'chi... | Write the current configuration of a myStrom button. |
def _save_fastq_space(items):
to_cleanup = {}
for data in (utils.to_single_data(x) for x in items):
for fname in data.get("files", []):
if os.path.realpath(fname).startswith(dd.get_work_dir(data)):
to_cleanup[fname] = data["config"]
for fname, config in to_cleanup.items()... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_save_fastq_space'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Potentially save fastq space prior to merging, since alignments done. |
def _construct_regex(cls, fmt):
return re.compile(fmt.format(**vars(cls)), flags=re.U) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_construct_regex'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Given a format string, construct the regex with class attributes. |
def rollsingle(self, func, window=20, name=None, fallback=False,
align='right', **kwargs):
rname = 'roll_{0}'.format(func)
if fallback:
rfunc = getattr(lib.fallback, rname)
else:
rfunc = getattr(lib, rname, None)
if not rfunc:
rfunc = getattr(lib.fallback, ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '20']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'rollsingle'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9', '12', '15', '18']}; {'id': '4', 'type': 'identifier'... | Efficient rolling window calculation for min, max type functions |
def device_initialize(self):
existing_device_initialize(self)
self.type = 'Other'
self.id = platform.node()
self.os_version = platform.version()
self.locale = locale.getdefaultlocale()[0] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'device_initialize'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | The device initializer used to assign special properties to all device context objects |
def _exec_requested_job(self):
self._timer.stop()
self._job(*self._args, **self._kwargs) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_exec_requested_job'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Execute the requested job after the timer has timeout. |
def minimum(attrs, inputs, proto_obj):
if len(inputs) > 1:
mxnet_op = symbol.minimum(inputs[0], inputs[1])
for op_input in inputs[2:]:
mxnet_op = symbol.minimum(mxnet_op, op_input)
else:
mxnet_op = symbol.minimum(inputs[0], inputs[0])
return mxnet_op, attrs, inputs | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'minimum'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Elementwise minimum of arrays. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.