code stringlengths 51 2.34k | sequence stringlengths 1.16k 13.1k | docstring stringlengths 11 171 |
|---|---|---|
def pages(self, limit=0):
if limit > 0:
self.iterator.limit = limit
return self.iterator | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'pages'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | Return iterator for pages |
def query_all_issues(after):
page = count(1)
data = []
while True:
page_data = query_issues(next(page), after)
if not page_data:
break
data.extend(page_data)
return data | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'query_all_issues'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Hits the github API for all closed issues after the given date, returns the data. |
def create_cell_renderer_combo(self, tree_view, title="title", assign=0, editable=False, model=None, function=None):
renderer_combo = Gtk.CellRendererCombo()
renderer_combo.set_property('editable', editable)
if model:
renderer_combo.set_property('model', model)
if function:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '21']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'create_cell_renderer_combo'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9', '12', '15', '18']}; {'id': '4', 'typ... | Function creates a CellRendererCombo with title, model |
def field_specific_errors(self):
return {
key: value for key, value in self.error_dict.items()
if key != NON_FIELD_ERRORS
} | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'field_specific_errors'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Returns a dictionary of field-specific validation errors for this row. |
def update(self):
obj = self.__model__.objects.get_for_update(id=self.id)
for name, value in self.__dict__.items():
if name in self._properties:
setattr(obj, name, value)
obj.update()
return obj | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'... | Finds record and update it based in serializer values |
def load(fname):
content = _open(fname).read()
if PY2:
state = pickle.loads(content)
else:
state = pickle.loads(content, encoding='latin1')
voc, vec = state
if len(voc) == 2:
words, counts = voc
word_count = dict(zip(words, counts))
vocab = CountedVocabulary(word_count=... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'load'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'fname'}; {'i... | Load an embedding dump generated by `save` |
def nextSunrise(jd, lat, lon):
return swe.sweNextTransit(const.SUN, jd, lat, lon, 'RISE') | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'nextSunrise'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Returns the JD of the next sunrise. |
def _ostaunicode(src):
if have_py_3:
bytename = src
else:
bytename = src.decode('utf-8')
try:
enc = bytename.encode('latin-1')
encbyte = b'\x08'
except (UnicodeEncodeError, UnicodeDecodeError):
enc = bytename.encode('utf-16_be')
encbyte = b'\x10'
retur... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_ostaunicode'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'src'... | Internal function to create an OSTA byte string from a source string. |
def unwrap(self):
red = [self.red[i] for i in range(self.size)]
green = [self.green[i] for i in range(self.size)]
blue = [self.blue[i] for i in range(self.size)]
if NORMALIZE_GAMMA_RAMPS:
red = [value / 65535.0 for value in red]
green = [value / 65535.0 for value ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'unwrap'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'... | Returns a GLFWgammaramp object. |
def _slugify_internal_collection_name(self, json_repr):
collection = self._coerce_json_to_collection(json_repr)
if collection is None:
return None
internal_name = collection['name']
return slugify(internal_name) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_slugify_internal_collection_name'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'chi... | Parse the JSON, find its name, return a slug of its name |
def as_dict(self):
"json friendly dict representation"
d = {}
d["@module"] = self.__class__.__module__
d["@class"] = self.__class__.__name__
d["spin_mode"] = self.spin_mode.as_dict()
d["smearing"] = self.smearing.as_dict()
d["algorithm"] = self.algorithm.as_dict()... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'as_dict'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | json friendly dict representation |
def config(self, name, suffix):
"Return config variable value, defaulting to environment"
var = '%s_%s' % (name, suffix)
var = var.upper().replace('-', '_')
if var in self._config:
return self._config[var]
return os.environ[var] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'config'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Return config variable value, defaulting to environment |
def submit_sample_url(self, url, params={}, _extra_params={}):
self._check_user_parameters(params)
params = copy.copy(params)
params['sample-url'] = url
return self._submit(params, _extra_params=_extra_params) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'submit_sample_url'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9']}; {'id': '4', 'type': 'identifier', 'children... | Submit a sample at a given URL for analysis. |
def change_number_matches(self, current_match=0, total_matches=0):
if current_match and total_matches:
matches_string = u"{} {} {}".format(current_match, _(u"of"),
total_matches)
self.number_matches_text.setText(matches_string)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'change_number_matches'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children'... | Change number of match and total matches. |
def _febrl_links(df):
index = df.index.to_series()
keys = index.str.extract(r'rec-(\d+)', expand=True)[0]
index_int = numpy.arange(len(df))
df_helper = pandas.DataFrame({
'key': keys,
'index': index_int
})
pairs_df = df_helper.merge(
df_helper, on='key'
)[['index_x', ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_febrl_links'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'df'}... | Get the links of a FEBRL dataset. |
def stop_containers(self):
while len(self._containers):
container = self._containers.pop()
try:
container.kill(signal.SIGKILL)
except docker.errors.APIError:
pass | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'stop_containers'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Stops all containers used by this instance of the backend. |
def _simple_command(self, command, arg=None, **kwargs):
self._protocol.send_command(command, arg)
return self._protocol.handle_simple_responses(**kwargs) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_simple_command'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9']}; {'id': '4', 'type': 'identifier', 'children':... | Send a simple command. |
def list_security_group_rules(self, retrieve_all=True, **_params):
return self.list('security_group_rules',
self.security_group_rules_path,
retrieve_all, **_params) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'list_security_group_rules'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'child... | Fetches a list of all security group rules for a project. |
def intinlist(lst):
for item in lst:
try:
item = int(item)
return True
except ValueError:
pass
return False | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'intinlist'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'lst'}; ... | test if int in list |
def _create_config(self):
configinfo = {'creation_date': [ datetime.datetime.now().date().isoformat()],
'author': [self.site.site_config['default_author']],
'status': [u'draft'],
'lang': [u''],
'tags': [u''],
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_create_config'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Create the default configuration dictionary for this page. |
def ping():
try:
curl_couchdb('/cozy/')
ping = True
except requests.exceptions.ConnectionError, error:
print error
ping = False
return ping | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'ping'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '30']}; {'id': '5', 'type': ... | Ping CozyDB with existing credentials |
def _delete(collection_name, spec, opts, flags):
encoded = _dict_to_bson(spec, False, opts)
return b"".join([
_ZERO_32,
_make_c_string(collection_name),
_pack_int(flags),
encoded]), len(encoded) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_delete'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Get an OP_DELETE message. |
def logp_partial_gradient(self, variable, calculation_set=None):
if self.verbose > 0:
print_('\t' + self.__name__ + ': logp_partial_gradient accessed.')
if not (datatypes.is_continuous(variable)
and datatypes.is_continuous(self)):
return zeros(shape(variable.value... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'logp_partial_gradient'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children':... | gets the logp gradient of this deterministic with respect to variable |
def argmax(self):
if "argmax" not in self.attrs.keys():
def f(dataset, s):
arr = dataset[s]
try:
amin = np.nanargmax(arr)
except ValueError:
amin = 0
idx = np.unravel_index(amin, arr.shape)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'argmax'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'... | Index of the maximum, ignorning nans. |
def sample(self, histogram_logits):
histogram_probs = histogram_logits.exp()
atoms = self.support_atoms.view(1, 1, self.atoms)
return (histogram_probs * atoms).sum(dim=-1).argmax(dim=1) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'sample'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Sample from a greedy strategy with given q-value histogram |
def _contains_cftime_datetimes(array) -> bool:
try:
from cftime import datetime as cftime_datetime
except ImportError:
return False
else:
if array.dtype == np.dtype('O') and array.size > 0:
sample = array.ravel()[0]
if isinstance(sample, dask_array_type):
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_contains_cftime_datetimes'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children':... | Check if an array contains cftime.datetime objects |
def as_dict(self):
return {self.FLD_OP: self._op.name,
self.FLD_MARK: self._pos,
self.FLD_FLD: self._fld} | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'as_dict'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | Representation as a dict for JSON serialization. |
def to_cls(self):
try:
if isinstance(self._to_cls, str):
self._to_cls = fetch_entity_cls_from_registry(self._to_cls)
except AssertionError:
pass
return self._to_cls | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'to_cls'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'... | Property to retrieve to_cls as an entity when possible |
def fw_create(self, data, fw_name=None, cache=False):
LOG.debug("FW create %s", data)
try:
self._fw_create(fw_name, data, cache)
except Exception as exc:
LOG.error("Exception in fw_create %s", str(exc)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'fw_create'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9']}; {'id': '4', 'type': 'identifier', 'children': [], '... | Top level FW create function. |
def remove_file(self):
if not self.fullpath or not self.archived:
raise RuntimeError()
try:
os.remove(self.fullpath)
except:
print("Error removing %s: %s" % (self.fullpath, sys.exc_info()[1])) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'remove_file'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Removes archived file associated with this DP |
def format_item(item, template, name='item'):
ctx = {name: item}
return render_template_to_string(template, **ctx) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'format_item'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Render a template to a string with the provided item in context. |
def message_info(message):
method = message.get('method')
msgid = message.get('id')
error = message.get('error')
if method and msgid is not None:
return 'method call "{}", id = "{}"'.format(method, msgid)
elif method:
return 'notification "{}"'.format(method)
elif error is not No... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'message_info'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'mess... | Return a string describing a message, for debugging purposes. |
def _sanitize_numbers(uncleaned_numbers):
cleaned_numbers = []
for x in uncleaned_numbers:
try:
cleaned_numbers.append(int(x))
except ValueError:
cleaned_numbers.append(x)
return cleaned_numbers | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_sanitize_numbers'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Convert strings to integers if possible |
def requestSubsystem(self, subsystem):
data = common.NS(subsystem)
return self.sendRequest('subsystem', data, wantReply=True) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'requestSubsystem'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Request a subsystem and return a deferred reply. |
def setup(self, redis_conn=None, host='localhost', port=6379):
if redis_conn is None:
if host is not None and port is not None:
self.redis_conn = redis.Redis(host=host, port=port)
else:
raise Exception("Please specify some form of connection "
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '14']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'setup'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8', '11']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Set up the redis connection |
def dict(self):
return_obj = {}
for attr in dir(self):
if not attr.startswith('__') and attr not in self.__reserved:
if isinstance(getattr(self, attr), list):
return_val = []
for item in getattr(self, attr):
if i... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'dict'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id... | converts the class to a dictionary object |
def load_buildfile(self, target):
log.info('Loading: %s', target)
filepath = os.path.join(target.path, app.get_options().buildfile_name)
try:
repo = self.repo_state.GetRepo(target.repo)
return repo.get_file(filepath)
except gitrepo.GitError as err:
log... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'load_buildfile'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Pull a build file from git. |
def vhost_remove(cls, name):
oper = cls.call('hosting.rproxy.vhost.delete', name)
cls.echo('Deleting your virtual host %s' % name)
cls.display_progress(oper)
cls.echo('Your virtual host have been removed')
return oper | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'vhost_remove'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Delete a vhost in a webaccelerator |
def _from_dict(cls, _dict):
args = {}
if 'start_time' in _dict:
args['start_time'] = _dict.get('start_time')
else:
raise ValueError(
'Required property \'start_time\' not present in WordAlternativeResults JSON'
)
if 'end_time' in _dict:... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_from_dict'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'c... | Initialize a WordAlternativeResults object from a json dictionary. |
def export_data( self ):
klass = self.__class__
output = bytearray( b'\x00'*self.get_size() )
queue = []
for name in klass._fields:
self.scrub_field( name )
self.validate_field( name )
self.update_deps()
for name in klass._fields:
klass... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'export_data'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Export data to a byte array. |
def formatBodyNode(root,path):
body = root
body.name = "body"
body.weight = calcFnWeight(body)
body.path = path
body.pclass = None
return body | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'formatBodyNode'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Format the root node for use as the body node. |
def publish_json(self, channel, obj):
return self.publish(channel, json.dumps(obj)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'publish_json'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Post a JSON-encoded message to channel. |
def delete(*args):
from syn.base_utils import this_module
mod = this_module(npop=3)
yield
for arg in args:
name = arg
if not isinstance(name, STR):
name = arg.__name__
delattr(mod, name) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'delete'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'list_splat_pattern', 'children': ['5']}; {'id': '... | For using then deleting objects. |
def provision(self, tool: Tool) -> docker.models.containers.Container:
if not self.is_installed(tool):
raise Exception("tool is not installed: {}".format(tool.name))
client = self.__installation.docker
return client.containers.create(tool.image) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9', '17']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'provision'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Provisions a mountable Docker container for a given tool. |
def init_datamembers(self, rec):
if 'synonym' in self.optional_attrs: rec.synonym = []
if 'xref' in self.optional_attrs: rec.xref = set()
if 'subset' in self.optional_attrs: rec.subset = set()
if 'comment' in self.optional_attrs: rec.comment = ""
if 'relat... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'init_datamembers'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Initialize current GOTerm with data members for storing optional attributes. |
async def json(self, *, encoding: str=None,
loads: JSONDecoder=DEFAULT_JSON_DECODER,
content_type: Optional[str]='application/json') -> Any:
if self._body is None:
await self.read()
if content_type:
ctype = self.headers.get(hdrs.CONTENT_TYPE,... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '25', '27']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'json'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '11', '16']}; {'id': '4', 'type': 'identifier', 'children... | Read and decodes JSON response. |
def real_ip(self):
if self._real_ip is None:
response = get(ICANHAZIP)
self._real_ip = self._get_response_text(response)
return self._real_ip | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'real_ip'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | The actual public IP of this host. |
def hide_navbar_items(portal):
logger.info("*** Hide Navigation Items ***")
object_ids = portal.objectIds()
object_ids = filter(lambda id: id in object_ids, NAV_BAR_ITEMS_TO_HIDE)
for object_id in object_ids:
item = portal[object_id]
item.setExcludeFromNav(True)
item.reindexObjec... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'hide_navbar_items'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Hide root items in navigation |
def main():
if sys.version_info[0] < 3:
sys.stdout = codecs.getwriter("utf-8")(sys.stdout)
options = docopt.docopt(__doc__,
help=True,
version='template_remover v%s' % __VERSION__)
print(template_remover.clean(io.open(options['FILENAME']).read(... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'main'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '30', '47', '68']}; {'id': '... | Entry point for remove_template. |
def _tail_profile(self, db, interval):
latest_doc = None
while latest_doc is None:
time.sleep(interval)
latest_doc = db['system.profile'].find_one()
current_time = latest_doc['ts']
while True:
time.sleep(interval)
cursor = db['system.profil... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_tail_profile'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Tails the system.profile collection |
def ffill(arr, dim=None, limit=None):
import bottleneck as bn
axis = arr.get_axis_num(dim)
_limit = limit if limit is not None else arr.shape[axis]
return apply_ufunc(bn.push, arr,
dask='parallelized',
keep_attrs=True,
output_dtypes=[a... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'ffill'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | forward fill missing values |
def walk_json(d, func):
if isinstance(d, Mapping):
return OrderedDict((k, walk_json(v, func)) for k, v in d.items())
elif isinstance(d, list):
return [walk_json(v, func) for v in d]
else:
return func(d) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'walk_json'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'd'... | Walk over a parsed JSON nested structure `d`, apply `func` to each leaf element and replace it with result |
def aead_filename(aead_dir, key_handle, public_id):
parts = [aead_dir, key_handle] + pyhsm.util.group(public_id, 2) + [public_id]
return os.path.join(*parts) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'aead_filename'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Return the filename of the AEAD for this public_id. |
def init_limit(self, key, lower=None, upper=None, limit=False):
above = agtb(self.__dict__[key], upper)
for idx, item in enumerate(above):
if item == 0.:
continue
maxval = upper[idx]
self.log(
'{0} <{1}.{2}> above its maximum of {3}.'.f... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '15']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'init_limit'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9', '12']}; {'id': '4', 'type': 'identifier', 'children'... | check if data is within limits. reset if violates |
def generate_specifications(self, count=1):
out = {}
copy_from = self.get_copy_from()
for arnum in range(count):
source = copy_from.get(arnum)
if source is None:
out[arnum] = {}
continue
results_range = source.getResultsRange()
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'generate_specifications'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': []... | Returns a mapping of count -> specification |
def squeeze(attrs, inputs, proto_obj):
new_attrs = translation_utils._fix_attribute_names(attrs,
{'axes' : 'axis'})
return 'squeeze', new_attrs, inputs | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'squeeze'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Remove single-dimensional entries from the shape of a tensor. |
def permute(self, ba):
c = ba.copy()
for i in xrange(len(self.mapping)):
ba[i] = c[self.mapping[i]]
return ba | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'permute'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Permute the bitarray ba inplace. |
def field_factory(base_class):
from .fields import TranslationField
class TranslationFieldField(TranslationField, base_class):
pass
TranslationFieldField.__name__ = "Translation%s" % base_class.__name__
return TranslationFieldField | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'field_factory'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'bas... | Takes a field base class and wrap it with ``TranslationField`` class. |
def nmltostring(nml):
if not isinstance(nml,dict):
raise ValueError("nml should be a dict !")
curstr = ""
for key,group in nml.items():
namelist = ["&" + key]
for k, v in group.items():
if isinstance(v, list) or isinstance(v, tuple):
namelist.append(k + " = " + ",".jo... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'nmltostring'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'nml'}... | Convert a dictionary representing a Fortran namelist into a string. |
def elapsed(self):
if not self.started or self._start_time is None:
return 0.0
return self._timing_data[-1][0] - self._start_time | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'elapsed'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | Returns the number of seconds it has been since the start until the latest entry. |
def task_stop(self, **kw):
id, task = self.get_task(**kw)
self._execute(id, 'stop')
return self.get_task(uuid=task['uuid'])[1] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'task_stop'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Marks a task as stopped. |
def cmd_xcode(self, *args):
app_name = self.buildozer.namify(self.buildozer.config.get('app',
'package.name'))
app_name = app_name.lower()
ios_dir = ios_dir = join(self.buildozer.platform_dir, 'kivy-ios')
self.buildozer.cmd('open {}.xcodeproj'.format(
app_name), c... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cmd_xcode'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Open the xcode project. |
def cleanup_lib(self):
if not self.using_openmp:
logging.debug('unloading shared library')
_ctypes.dlclose(self.lib._handle) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cleanup_lib'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | unload the previously loaded shared library |
def saccadic_momentum_effect(durations, forward_angle,
summary_stat=nanmean):
durations_per_da = np.nan * np.ones((len(e_angle) - 1,))
for i, (bo, b1) in enumerate(zip(e_angle[:-1], e_angle[1:])):
idx = (
bo <= forward_angle) & (
forward_angle < b1) &... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'saccadic_momentum_effect'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'childre... | Computes the mean fixation duration at forward angles. |
def _sample(probability_vec):
return map(int,
numpy.random.random(probability_vec.size) <= probability_vec) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_sample'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'probabili... | Return random binary string, with given probabilities. |
def _remove_references(self, removed_part):
removed_part.parent = None
referrers_to_remove = set()
for referrer in removed_part.referrers:
if removed_part not in referrer.ancestors():
for label, referred_part in list(referrer.labels.items()):
if re... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_remove_references'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Remove labels pointing to this part and vice versa. |
def _load_version(cls, state, version):
from ._audio_feature_extractor import _get_feature_extractor
from .._mxnet import _mxnet_utils
state['_feature_extractor'] = _get_feature_extractor(state['feature_extractor_name'])
num_classes = state['num_classes']
num_inputs = state['_fea... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_load_version'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | A function to load a previously saved SoundClassifier instance. |
def session(self):
from sqlalchemy.orm import sessionmaker
from sqlalchemy.event import listen
if not self.Session:
self.Session = sessionmaker(bind=self.engine)
if not self._session:
self._session = self.Session()
if self._schema:
def ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'session'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | Return a SqlAlchemy session. |
def analyses_info(self):
f_config = os.path.join(self.analyses_path, 'index.yaml')
tornado.autoreload.watch(f_config)
with io.open(f_config, 'r', encoding='utf8') as f:
config = yaml.safe_load(f)
self.info.update(config)
if self.debug:
self.info['versi... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'analyses_info'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Add analyses from the analyses folder. |
def _ask(question, default=None, data_type='str', show_hint=False):
data = default
if data_type == 'bool':
data = None
default_string = "Y" if default else "N"
while data not in ('Y', 'J', 'N', '1', '0'):
data = input("%s? [%s]: " % (question, default_string)).upper()
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '14']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_ask'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8', '11']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Interactively ask the user for data |
def slicenet_params1_tiny():
hparams = slicenet_params1()
hparams.attention_type = "simple"
hparams.separability = 0
hparams.hidden_size = 128
hparams.num_hidden_layers = 2
hparams.batch_size = 512
hparams.learning_rate_warmup_steps = 200
return hparams | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'slicenet_params1_tiny'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '11', '17',... | Version for fast local runs. |
def skew_normal_expval(mu, tau, alpha):
delta = alpha / np.sqrt(1. + alpha ** 2)
return mu + np.sqrt(2 / pi / tau) * delta | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'skew_normal_expval'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': []... | Expectation of skew-normal random variables. |
def raw(self, from_, to, body):
if isinstance(to, string_types):
raise TypeError('"to" parameter must be enumerable')
return self._session.post('{}/raw'.format(self._url), json={
'from': from_,
'to': to,
'body': body,
}).json() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'raw'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Send a raw MIME message. |
def parse_sm_config():
sagemaker_config = "/opt/ml/input/config/hyperparameters.json"
if os.path.exists(sagemaker_config):
conf = {}
conf["sagemaker_training_job_name"] = os.getenv('TRAINING_JOB_NAME')
for k, v in six.iteritems(json.load(open(sagemaker_config))):
cast = v.str... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'parse_sm_config'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '9']}; {'id': '5'... | Attempts to parse SageMaker configuration returning False if it can't find it |
def iter_links_element_text(cls, element):
if element.text:
link_type = identify_link_type(element.text)
yield LinkInfo(
element=element, tag=element.tag, attrib=None,
link=element.text,
inline=False, linked=True,
base_link=... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'iter_links_element_text'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': []... | Get the element text as a link. |
def find_focusable(node):
if not node.children:
return node
if node.focus:
return find_focusable(node.children_dict[node.focus[0]]) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'find_focusable'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'no... | Search for the first focusable window within the node tree |
def file_containing_import(import_path, import_root):
if not _import_paths:
load_stdlib()
if os.path.isfile(import_root):
import_root = os.path.dirname(import_root)
search_paths = [import_root] + _import_paths
module_parts = import_path.split('.')
for i in range(len(module_parts), 0,... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'file_containing_import'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [],... | Finds the file that might contain the import_path. |
def process_role(ctx, param, value):
role = Role.query.filter(Role.name == value).first()
if not role:
raise click.BadParameter('Role with name \'%s\' not found.', value)
return role | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'process_role'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Return a role if it exists. |
def _glyph_for_complex_pattern(self, pattern):
monomer_glyphs = []
for monomer_pattern in pattern.monomer_patterns:
glyph = self._glyph_for_monomer_pattern(monomer_pattern)
monomer_glyphs.append(glyph)
if len(monomer_glyphs) > 1:
pattern.matches_key = lambda: ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_glyph_for_complex_pattern'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children':... | Add glyph and member glyphs for a PySB ComplexPattern. |
def _imm_repr(self):
return (type(self).__name__
+ ('(' if _imm_is_persist(self) else '*(')
+ ', '.join([k + '=' + str(v) for (k,v) in six.iteritems(imm_params(self))])
+ ')') | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_imm_repr'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | The default representation function for an immutable object. |
def _set_scatter_signature(self):
self._scatter_signature = (self.thet0, self.thet, self.phi0, self.phi,
self.alpha, self.beta, self.orient) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_set_scatter_signature'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Mark the amplitude and scattering matrices as up to date. |
def match(self, *args, **kwargs):
return self._any_args or \
self._arguments_rule.validate(*args, **kwargs) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'match'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Check the if these args match this expectation. |
def _query(self, filename):
log.Info('Querying size of %s' % filename)
from jottalib.JFS import JFSNotFoundError, JFSIncompleteFile
remote_path = posixpath.join(self.folder.path, filename)
try:
remote_file = self.client.getObject(remote_path)
except JFSNotFoundError:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_query'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Get size of filename |
def _build_matrix(p, q, deriv):
A = [([1 for _ in range(-p, q+1)])]
for i in range(1, p + q + 1):
A.append([j**i for j in range(-p, q+1)])
return np.array(A) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_build_matrix'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Constructs the equation system matrix for the finite difference coefficients |
def _setup_log_prefix(self, plugin_id=''):
self._logger_console_fmtter.prefix = '%s: ' % plugin_id
self._logger_console_fmtter.plugin_id = plugin_id
self._logger_file_fmtter.prefix = '*'
self._logger_file_fmtter.plugin_id = '%s: ' % plugin_id | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_setup_log_prefix'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Setup custom warning notification. |
def encode_metadata(self):
encoded_list = []
for key, value in iteritems(self.metadata):
key_str = str(key)
if re.search(r'^$|[\s,]+', key_str):
msg = 'Upload-metadata key "{}" cannot be empty nor contain spaces or commas.'
raise ValueError(msg.for... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'encode_metadata'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Return list of encoded metadata as defined by the Tus protocol. |
def make(parser):
parser.add_argument(
'mon',
metavar='HOST',
nargs='+',
help='monitor host to pull keys from',
)
parser.set_defaults(
func=gatherkeys,
) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'make'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'parser'}; {'... | Gather authentication keys for provisioning new nodes. |
def decrypt(secret, modN, d, blockSize):
numBlocks = [modExp(blocks, d, modN) for blocks in secret]
numList = blocks2numList(numBlocks, blockSize)
return numList2string(numList) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'decrypt'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | reverse function of encrypt |
def stop_all(self):
for alias, service in self._service_objects.items():
if service.is_alive:
with expects.expect_no_raises(
'Failed to stop service "%s".' % alias):
service.stop() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'stop_all'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ... | Stops all active service instances. |
def unmarshal_event(self, data: str, response_type):
js = json.loads(data)
js['raw_object'] = js['object']
if js['type'].lower() == 'error':
return js
if response_type is not None:
js['object'] = self._api_client.deserialize(
response=SimpleNamespa... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'unmarshal_event'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '9']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Return the K8s response `data` in JSON format. |
def config_from_prefix(prefix):
settings = {}
if prefix.lower() in ('default', 'auto', ''):
settings['zmq_prefix'] = ''
settings['libzmq_extension'] = False
settings['no_libzmq_extension'] = False
elif prefix.lower() in ('bundled', 'extension'):
settings['zmq_prefix'] = ''
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'config_from_prefix'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Get config from zmq prefix |
def _filter_queryset(self, perms, queryset):
user = self.request.user if self.request else AnonymousUser()
return get_objects_for_user(user, perms, queryset) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_filter_queryset'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Filter object objects by permissions of user in request. |
def next_id(self):
msgid = self._id_template.format(self._next_id)
self._next_id += 1
return msgid | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'next_id'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | Return a unique message ID. |
def on_response(self, msg: Dict[str, str]) -> None:
response = msg.get('data', False)
if response:
task = self.__tasks.pop(msg.get('reqid'), False)
if task and not task.cancelled() and not task.done():
task.set_result(msg.get('data')) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '15', '17']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'on_response'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Run when get response from browser. |
def isargument(self, node):
try:
node_id, _ = self.node_to_id(node)
return (node_id in self.name_to_nodes and
any([isinstance(n, ast.Name) and
isinstance(n.ctx, ast.Param)
for n in self.name_to_nodes[node_id]]))
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'isargument'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | checks whether node aliases to a parameter. |
def _process_out_of_bounds(self, value, start, end):
"Clips out of bounds values"
if isinstance(value, np.datetime64):
v = dt64_to_dt(value)
if isinstance(start, (int, float)):
start = convert_timestamp(start)
if isinstance(end, (int, float)):
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_process_out_of_bounds'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'chil... | Clips out of bounds values |
def disconnect(self):
_LOGGING.debug('Disconnecting from stream: %s', self.name)
self.kill_thrd.set()
self.thrd.join()
_LOGGING.debug('Event stream thread for %s is stopped', self.name)
self.kill_thrd.clear() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'disconnect'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | Disconnect from event stream. |
def update_entitlement(owner, repo, identifier, name, token, show_tokens):
client = get_entitlements_api()
data = {}
if name is not None:
data["name"] = name
if token is not None:
data["token"] = token
with catch_raise_api_exception():
data, _, headers = client.entitlements_p... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update_entitlement'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9']}; {'id': '4', 'type': 'identifier'... | Update an entitlement in a repository. |
def _parse_resource_declarations(cls, declarations, resource_map):
resources = {}
for decl in declarations:
name = decl.pop('name')
typename = decl.pop('type')
desc = decl.pop('description', None)
autocreate = decl.pop('autocreate', False)
args... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_parse_resource_declarations'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'chi... | Parse out what resources are declared as shared for this recipe. |
def find_genome_length(self):
for sample in self.metadata:
sample[self.analysistype].genome_length = sum(sample[self.analysistype].contig_lengths) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'find_genome_length'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Determine the total length of all the contigs for each strain |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.