code stringlengths 51 2.34k | sequence stringlengths 1.16k 13.1k | docstring stringlengths 11 171 |
|---|---|---|
def run(self):
self.utils = salt.loader.utils(self.opts, proxy=self.proxy)
if salt.utils.platform.is_windows():
if self.opts['__role'] == 'master':
self.runners = salt.loader.runner(self.opts, utils=self.utils)
else:
self.runners = []
s... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'run'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id'... | Run the master service! |
def downvote(self):
data = self.get_selected_item()
if 'likes' not in data:
self.term.flash()
elif getattr(data['object'], 'archived'):
self.term.show_notification("Voting disabled for archived post", style='Error')
elif data['likes'] or data['likes'] is None:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'downvote'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ... | Downvote the currently selected item. |
def highlight_block(self, text):
text = to_text_string(text)
if text.startswith(("c", "C")):
self.setFormat(0, len(text), self.formats["comment"])
self.highlight_spaces(text)
else:
FortranSH.highlight_block(self, text)
self.setFormat(0, 5, s... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'highlight_block'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Implement highlight specific for Fortran77. |
def font_size_to_pixels(size):
if size is None or not isinstance(size, basestring):
return
conversions = {'em': 16, 'pt': 16/12.}
val = re.findall('\d+', size)
unit = re.findall('[a-z]+', size)
if (val and not unit) or (val and unit[0] == 'px'):
return int(val[0])
elif val and un... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'font_size_to_pixels'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Convert a fontsize to a pixel value |
def __init(self):
res = self._get(url=self._url,
param_dict={"f": "json"},
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
self._json_dict = res
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '__init'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'... | loads the json values |
def diag(A, k=0):
if isinstance(A, Poly):
core, core_new = A.A, {}
for key in A.keys:
core_new[key] = numpy.diag(core[key], k)
return Poly(core_new, A.dim, None, A.dtype)
return numpy.diag(A, k) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'diag'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'A'}; {'... | Extract or construct a diagonal polynomial array. |
def _get_controller_agent(self, arg):
controller_agent = None
controller = arg.get('arg')
if controller is not None:
controller_agent, coords = self._get_agent_from_entity(controller)
elif arg['argument-type'] == 'complex':
controllers = list(arg.get('args').value... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_controller_agent'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Return a single or a complex controller agent. |
def require_session(handler):
@functools.wraps(handler)
async def decorated(request: web.Request) -> web.Response:
request_session_token = request.match_info['session']
session = session_from_request(request)
if not session or request_session_token != session.token:
LOG.warni... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'require_session'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'h... | Decorator to ensure a session is properly in the request |
def download_file(image_name, output_path, width=DEFAULT_WIDTH):
image_name = clean_up_filename(image_name)
logging.info("Downloading %s with width %s", image_name, width)
try:
contents, output_file_name = get_thumbnail_of_file(image_name, width)
except RequestedWidthBiggerThanSourceException:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'download_file'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Download a given Wikimedia Commons file. |
def ask_yes_no(*question: Token, default: bool = False) -> bool:
while True:
tokens = [green, "::", reset] + list(question) + [reset]
if default:
tokens.append("(Y/n)")
else:
tokens.append("(y/N)")
info(*tokens)
answer = read_input()
if answer.... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '14', '16']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'ask_yes_no'}; {'id': '3', 'type': 'parameters', 'children': ['4', '9']}; {'id': '4', 'type': 'typed_parameter', 'children': ['5... | Ask the user to answer by yes or no |
def validate_on_submit(self):
valid = FlaskWtf.validate_on_submit(self)
if not self._schema or not self.is_submitted():
return valid
data = dict()
for field in self._fields:
data[field] = self._fields[field].data
result = self.schema.process(data, context=... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'validate_on_submit'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Extend validate on submit to allow validation with schema |
def convert_field(self, value, conversion):
func = self.CONV_FUNCS.get(conversion)
if func is not None:
value = getattr(value, func)()
elif conversion not in ['R']:
return super(StringFormatter, self).convert_field(value, conversion)
if conversion in ['h', 'H', 'R... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'convert_field'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Apply conversions mentioned above. |
def to_datetime(dt, tzinfo=None, format=None):
if not dt:
return dt
tz = pick_timezone(tzinfo, __timezone__)
if isinstance(dt, (str, unicode)):
if not format:
formats = DEFAULT_DATETIME_INPUT_FORMATS
else:
formats = list(format)
d = None
for fm... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'to_datetime'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Convert a date or time to datetime with tzinfo |
def attempt_reauthorization(blink):
_LOGGER.info("Auth token expired, attempting reauthorization.")
headers = blink.get_auth_token(is_retry=True)
return headers | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'attempt_reauthorization'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Attempt to refresh auth token and links. |
def load_global_catalog():
cat_dir = global_data_dir()
if not os.path.isdir(cat_dir):
return Catalog()
else:
return YAMLFilesCatalog(cat_dir) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'load_global_catalog'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '11']}; {'id'... | Return a catalog for the environment-specific Intake directory |
def use_plenary_hierarchy_view(self):
self._hierarchy_view = PLENARY
for session in self._get_provider_sessions():
try:
session.use_plenary_hierarchy_view()
except AttributeError:
pass | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'use_plenary_hierarchy_view'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Pass through to provider HierarchyLookupSession.use_plenary_hierarchy_view |
def _decode_received(self, msg):
if not isinstance(msg, six.binary_type):
return msg
type = six.byte2int(msg[0:1])
if type >= 48:
return msg.decode('utf-8')
return msg | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_decode_received'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Returns either bytes or str, depending on message type. |
def css(self, css):
return [self.get_node_factory().create(node_id)
for node_id in self._get_css_ids(css).split(",")
if node_id] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'css'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ... | Finds another node by a CSS selector relative to the current node. |
def _varname_inj(self):
if not self.n:
return
m = self.system.dae.m
xy_idx = range(m, self.n + m)
self.system.varname.append(
listname='unamey',
xy_idx=xy_idx,
var_name='P',
element_name=self.name)
self.system.varname.ap... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_varname_inj'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Customize varname for bus injections |
def put(path, obj):
try:
import cPickle as pickle
except:
import pickle
with open(path, 'wb') as file:
return pickle.dump(obj, file) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'put'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'path'}; ... | Write an object to file |
def build_kal_scan_channel_string(kal_bin, channel, args):
option_mapping = {"gain": "-g",
"device": "-d",
"error": "-e"}
base_string = "%s -v -c %s" % (kal_bin, channel)
base_string += options_string_builder(option_mapping, args)
return(base_string) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'build_kal_scan_channel_string'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'ch... | Return string for CLI invocation of kal, for channel scan. |
def unapostrophe(text):
text = re.sub(r'[%s]s?$' % ''.join(APOSTROPHES), '', text)
return text | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'unapostrophe'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'text... | Strip apostrophe and 's' from the end of a string. |
def nonpresent_module_filename():
while True:
module_name = get_random_name()
loader = pkgutil.find_loader(module_name)
if loader is not None:
continue
importlib.invalidate_caches()
return "{}.py".format(module_name) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'nonpresent_module_filename'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5']}; {'id... | Return module name that doesn't already exist |
def append (self, cmd, delay=0.000, attrs=None):
self.lines.append( SeqCmd(cmd, delay, attrs) ) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'append'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Adds a new command with a relative time delay to this sequence. |
def fw_retry_failures_create(self):
for tenant_id in self.fwid_attr:
try:
with self.fwid_attr[tenant_id].mutex_lock:
if self.fwid_attr[tenant_id].is_fw_drvr_create_needed():
fw_dict = self.fwid_attr[tenant_id].get_fw_dict()
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'fw_retry_failures_create'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | This module is called for retrying the create cases. |
def add(self, source_id, profile_data, training_metadata=[], profile_reference=None, timestamp_reception=None):
data = {
"source_id": _validate_source_id(source_id),
"profile_json": _validate_dict(profile_data, "profile_data"),
"training_metadata": _validate_training_metadata... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '16']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'add'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '10', '13']}; {'id': '4', 'type': 'identifier', 'children':... | Use the api to add a new profile using profile_data. |
def opened(filename, mode):
"Open filename, or do nothing if filename is already an open file object"
if isinstance(filename, str):
file = open(filename, mode)
try:
yield file
finally:
if not file.closed:
file.close()
else:
yield file... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'opened'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'filen... | Open filename, or do nothing if filename is already an open file object |
def to_dict(self):
"returns self as a dictionary with _underscore subdicts corrected."
ndict = {}
for key, val in self.__dict__.items():
if key[0] == "_":
ndict[key[1:]] = val
else:
ndict[key] = val
return ndict | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'to_dict'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | returns self as a dictionary with _underscore subdicts corrected. |
def expand_includes(text, path='.'):
def read_and_expand(match):
filename = match.group('filename')
filename = join(path, filename)
text = read(filename)
return expand_includes(
text, path=join(path, dirname(filename)))
return re.sub(r'^\.\. include:: (?P<filename>.*)... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'expand_includes'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Recursively expands includes in given text. |
def insert_after(self, key, new_item, instance=0):
self._insert_item(key, new_item, instance, True) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'insert_after'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': []... | Insert an item after a key |
def add(self, scene):
if not isinstance(scene, Scene):
raise TypeError()
for i, j in enumerate(self.__scenes):
if j.scene_id == scene.scene_id:
self.__scenes[i] = scene
return
self.__scenes.append(scene) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'add'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ... | Add scene, replace existing scene if scene with scene_id is present. |
def sync_one(self, aws_syncr, amazon, function):
function_info = amazon.lambdas.function_info(function.name, function.location)
if not function_info:
amazon.lambdas.create_function(function.name, function.description, function.location, function.runtime, function.role, function.handler, func... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'sync_one'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Make sure this function exists and has only attributes we want it to have |
def _int64_feature(value):
if not isinstance(value, list):
value = [value]
return tf.train.Feature(int64_list=tf.train.Int64List(value=value)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_int64_feature'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'va... | Wrapper for inserting int64 features into Example proto. |
def show_quota(self, project_id, **_params):
return self.get(self.quota_path % (project_id), params=_params) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'show_quota'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Fetch information of a certain project's quotas. |
def _get_property(name):
ret = property(
lambda self: getattr(self.loop, name))
if six.PY3:
try:
ret.__doc__ = getattr(TrainLoop, name).__doc__
except AttributeError:
pass
return ret | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_property'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'nam... | Delegate property to self.loop |
def _BinsToQuery(self, bins, column_name):
result = []
for prev_b, next_b in zip([0] + bins[:-1], bins[:-1] + [None]):
query = "COUNT(CASE WHEN %s >= %f" % (column_name, prev_b)
if next_b is not None:
query += " AND %s < %f" % (column_name, next_b)
query += " THEN 1 END)"
result.... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_BinsToQuery'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Builds an SQL query part to fetch counts corresponding to given bins. |
def get(self, value, default=None):
path = value if isinstance(value, Path) else Path(str(value))
subtree = self
for part in path.parts:
try:
subtree = subtree[part]
except KeyError:
return default
return subtree | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'get'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Return a subtree if exists. |
def emit(self):
self.count += 1
event_name = self.context.subcategory
if hasattr(self.handler, event_name):
getattr(self.handler, event_name)(self.context)
elif hasattr(self.handler, 'default'):
self.handler.default(self.context) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'emit'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id... | We are finished processing one element. Emit it |
def read(self, num_bytes=None):
res = self.get_next(num_bytes)
self.skip(len(res))
return res | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'read'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Read and return the specified bytes from the buffer. |
def _build_implicit_prefetches(
self,
model,
prefetches,
requirements
):
for source, remainder in six.iteritems(requirements):
if not remainder or isinstance(remainder, six.string_types):
continue
related_field = get_model_field(model, ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_build_implicit_prefetches'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', '... | Build a prefetch dictionary based on internal requirements. |
def bytes_to_file(input_data, output_file):
pathlib.Path(output_file.parent).mkdir(parents=True, exist_ok=True)
with open(output_file, "wb") as file:
file.write(input_data) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'bytes_to_file'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Save bytes to a file. |
def _handle_progress(self, total, progress_callback):
current = 0
while True:
current += yield
try:
progress_callback(current, total)
except Exception:
_LOG.exception('Progress callback raised an exception. %s',
progress_callback)
continue | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_handle_progress'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Calls the callback with the current progress and total . |
def _join_chars(chars, length):
mult = int(length / len(chars)) + 1
mult_chars = chars * mult
return "".join(random.sample(mult_chars, length)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_join_chars'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Used by the random character functions. |
def update(self, data):
if data is None:
for device in self.devices:
device.clear_info()
else:
for device, device_info in zip(self.devices, data):
device.device_info = device_info
self.connection.log("Device information updated -> [... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Update the chain object with the predefined data. |
def _push_textbuffer(self):
if self._textbuffer:
self._stack.append(tokens.Text(text="".join(self._textbuffer)))
self._textbuffer = [] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_push_textbuffer'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Push the textbuffer onto the stack as a Text node and clear it. |
def logBranch(self, indent=0, level=logging.DEBUG):
if 0:
print(indent * " " + str(self))
else:
logger.log(level, indent * " " + str(self))
for childItems in self.childItems:
childItems.logBranch(indent + 1, level=level) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '13']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'logBranch'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Logs the item and all descendants, one line per child |
def make_library(**kwargs):
library_yaml = kwargs.pop('library', 'models/library.yaml')
comp_yaml = kwargs.pop('comp', 'config/binning.yaml')
basedir = kwargs.pop('basedir', os.path.abspath('.'))
model_man = kwargs.get('ModelManager', ModelManager(basedir=basedir))
model_comp_dict = model_man.make_l... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'make_library'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'dictionary_splat_pattern', 'children': ['5'... | Build and return a ModelManager object and fill the associated model library |
def reset(self):
self._attempts = 0
self._cur_delay = self.delay
self._cur_stoptime = None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'reset'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'i... | Reset the attempt counter |
def delete(self, wg_uuid, uuid):
url = "%(base)s/%(wg_uuid)s/members/%(uuid)s" % {
'base': self.local_base_url,
'wg_uuid': wg_uuid,
'uuid': uuid
}
return self.core.delete(url) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'delete'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Delete one thread member. |
def __wrap_accepted_val(self, value):
if isinstance(value, tuple):
value = list(value)
elif not isinstance(value, list):
value = [value]
return value | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '__wrap_accepted_val'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Wrap accepted value in the list if yet not wrapped. |
def _reconnect(self):
log.debug("Reconnecting to JLigier...")
self._disconnect()
self._connect()
self._update_subscriptions() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_reconnect'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | Reconnect to JLigier and subscribe to the tags. |
async def init():
global redis_conn
conn = await aioredis.create_connection(
'redis://{}:{}'.format(
SETTINGS.get('FLOW_EXECUTOR', {}).get('REDIS_CONNECTION', {}).get('host', 'localhost'),
SETTINGS.get('FLOW_EXECUTOR', {}).get('REDIS_CONNECTION', {}).get('port', 56379)
),... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'init'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '7', '83']}; {'id': '5', 'ty... | Create a connection to the Redis server. |
def write_json(data, filename, gzip_mode=False):
open_file = open
if gzip_mode:
open_file = gzip.open
try:
with open_file(filename, 'wt') as fh:
json.dump(obj=data, fp=fh, sort_keys=True)
except AttributeError:
fh = open_file(filename, 'wt')
json.dump(obj=data... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'write_json'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Write the python data structure as a json-Object to filename. |
def cub200_iterator(data_path, batch_k, batch_size, data_shape):
return (CUB200Iter(data_path, batch_k, batch_size, data_shape, is_train=True),
CUB200Iter(data_path, batch_k, batch_size, data_shape, is_train=False)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cub200_iterator'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': ... | Return training and testing iterator for the CUB200-2011 dataset. |
def _search(mapping, filename):
result = mapping.get(filename)
if result is not None:
return result
name, ext = os.path.splitext(filename)
result = mapping.get(ext)
if result is not None:
for pattern, result2 in result:
if fnmatch(filename, pattern):
retur... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_search'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'mapp... | Search a Loader data structure for a filename. |
def jacobian_singular(self):
cses, (jac_in_cses,) = self.be.cse(self.get_jac())
if jac_in_cses.nullspace():
return True
else:
return False | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'jacobian_singular'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Returns True if Jacobian is singular, else False. |
def time_restarts(data_path):
path = os.path.join(data_path, 'last_restarted')
if not os.path.isfile(path):
with open(path, 'a'):
os.utime(path, None)
last_modified = os.stat(path).st_mtime
with open(path, 'a'):
os.utime(path, None)
now = os.stat(path).st_mtime
dif = ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'time_restarts'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'dat... | When called will create a file and measure its mtime on restarts |
def read(self, size=None):
if size is None:
return self.buf.read() + self.open_file.read()
contents = self.buf.read(size)
if len(contents) < size:
contents += self.open_file.read(size - len(contents))
return contents | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'read'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Read `size` of bytes. |
def isdicom(fn):
fn = str(fn)
if fn.endswith('.dcm'):
return True
with open(fn,'rb') as fh:
fh.seek(0x80)
return fh.read(4)==b'DICM' | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'isdicom'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'fn'}; {'i... | True if the fn points to a DICOM image |
def _cover2exprs(inputs, noutputs, cover):
fs = list()
for i in range(noutputs):
terms = list()
for invec, outvec in cover:
if outvec[i]:
term = list()
for j, v in enumerate(inputs):
if invec[j] == 1:
term.ap... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_cover2exprs'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Convert a cover to a tuple of Expression instances. |
def open(self):
self.hwman = HardwareManager(port=self._port)
self.opened = True
if self._connection_string is not None:
try:
self.hwman.connect_direct(self._connection_string)
except HardwareError:
self.hwman.close()
raise
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'open'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id... | Open and potentially connect to a device. |
def search_associations_go(
subject_category=None,
object_category=None,
relation=None,
subject=None,
**kwargs):
go_golr_url = "http://golr.geneontology.org/solr/"
go_solr = pysolr.Solr(go_golr_url, timeout=5)
go_solr.get_session().headers['User-Agent'] = get_user_age... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '18']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'search_associations_go'}; {'id': '3', 'type': 'parameters', 'children': ['4', '7', '10', '13', '16']}; {'id': '4', 'type': 'default_p... | Perform association search using Monarch golr |
def _assign_zones(self):
for zone_id in range(1, 5):
zone = \
RainCloudyFaucetZone(
parent=self._parent,
controller=self._controller,
faucet=self,
zone_id=zone_id)
if zone not in self.zones:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_assign_zones'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Assign all RainCloudyFaucetZone managed by faucet. |
def check_no_overlapping_paths(paths):
for path in paths:
list_copy_without_path = list(paths)
list_copy_without_path.remove(path)
if path in list_copy_without_path:
raise ValueError('{} appeared more than once. All paths must be unique.'.format(path))
for p in list_copy_without_path:
if p... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'check_no_overlapping_paths'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Given a list of paths, ensure that all are unique and do not have the same prefix. |
def _context_names():
import inspect
from renku.models import provenance
from renku.models._jsonld import JSONLDMixin
for name in dir(provenance):
cls = getattr(provenance, name)
if inspect.isclass(cls) and issubclass(cls, JSONLDMixin):
yield name | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_context_names'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '8', '14', '21']};... | Return list of valid context names. |
def replace_placeholders(path: Path, properties: Dict[str, str]):
with open(path, encoding='utf8') as file:
file_content = Template(file.read())
with open(path, 'w', encoding='utf8') as file:
file.write(file_content.safe_substitute(properties)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '18']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'replace_placeholders'}; {'id': '3', 'type': 'parameters', 'children': ['4', '8']}; {'id': '4', 'type': 'typed_parameter', 'children':... | Replace placeholders in a file with the values from the mapping. |
async def _make_url(self, url: Text, request: 'Request') -> Text:
if self.sign_webview:
return await request.sign_url(url)
return url | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '13', '15']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_make_url'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '9']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Signs the URL if needed |
def append_args(self, arg):
debug.log("Adding Arguments: %s"%(arg))
if isinstance(arg, (int,float)): self.args.append(str(arg))
if isinstance(arg, str): self.args.append(arg)
if isinstance(arg, list):
if sys.version_info < (3, 0):
self.args.extend([str(x) if not isinstance(x... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'append_args'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | This function appends the provided arguments to the program object. |
def parse_author(self, value):
tokens = tuple([t.upper().strip() for t in value.split(',')])
if len(tokens) == 1:
tokens = value.split(' ')
if len(tokens) > 0:
if len(tokens) > 1:
aulast, auinit = tokens[0:2]
else:
aulast = toke... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'parse_author'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Attempts to split an author name into last and first parts. |
def delete_suspect(self, suspect_id):
suspect_obj = self.suspect(suspect_id)
logger.debug("Deleting suspect {0}".format(suspect_obj.name))
self.session.delete(suspect_obj)
self.save() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'delete_suspect'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | De-link a suspect from a case. |
def distinct(self):
d = collections.defaultdict(set)
for i in range(self.shape[1]):
k = hash(self.values[:, i].tobytes())
d[k].add(i)
return sorted(d.values(), key=len, reverse=True) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'distinct'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ... | Return sets of indices for each distinct haplotype. |
def salt_run():
import salt.cli.run
if '' in sys.path:
sys.path.remove('')
client = salt.cli.run.SaltRun()
_install_signal_handlers(client)
client.run() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'salt_run'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '10', '26', '38', '43']}... | Execute a salt convenience routine. |
def match_regexp(self, value, q, strict=False):
value = stringify(value)
mr = re.compile(q)
if value is not None:
if mr.match(value):
return
self.shout('%r not matching the regexp %r', strict, value, q) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'match_regexp'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': []... | if value matches a regexp q |
def find_sink_variables(self):
is_sink = {name: True for name in self.variables.keys()}
for operator in self.operators.values():
for variable in operator.inputs:
is_sink[variable.onnx_name] = False
return [variable for name, variable in self.variables.items() if is_si... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'find_sink_variables'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Find sink variables in this scope |
def connectionLost(self, reason):
AMP.connectionLost(self, reason)
if self.logout is not None:
self.logout()
self.boxReceiver = self.logout = None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'connectionLost'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | If a login has happened, perform a logout. |
def all_columns(self):
columns = set()
for values in self._parts:
for value in values._parts:
columns.add(value.column_name)
return sorted(columns) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'all_columns'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Return list of all columns. |
def aws_to_unix_id(aws_key_id):
uid_bytes = hashlib.sha256(aws_key_id.encode()).digest()[-2:]
if USING_PYTHON2:
return 2000 + int(from_bytes(uid_bytes) // 2)
else:
return 2000 + (int.from_bytes(uid_bytes, byteorder=sys.byteorder) // 2) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'aws_to_unix_id'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'aw... | Converts a AWS Key ID into a UID |
def _breakRemNewlines(tag):
for i,c in enumerate(tag.contents):
if type(c) != bs4.element.NavigableString:
continue
c.replace_with(re.sub(r' {2,}', ' ', c).replace('\n','')) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_breakRemNewlines'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | non-recursively break spaces and remove newlines in the tag |
def start_group(self, scol, typ):
return Group(parent=self, level=scol, typ=typ) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'start_group'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Start a new group |
def atlasdb_cache_zonefile_info( con=None, path=None ):
global ZONEFILE_INV, NUM_ZONEFILES, ZONEFILE_INV_LOCK
inv = None
with ZONEFILE_INV_LOCK:
inv_len = atlasdb_zonefile_inv_length( con=con, path=path )
inv = atlas_make_zonefile_inventory( 0, inv_len, con=con, path=path )
ZONEFILE_... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'atlasdb_cache_zonefile_info'}; {'id': '3', 'type': 'parameters', 'children': ['4', '7']}; {'id': '4', 'type': 'default_parameter', 'c... | Load up and cache our zonefile inventory from the database |
def new_driver(browser_name, *args, **kwargs):
if browser_name == FIREFOX:
return webdriver.Firefox(*args, **kwargs)
elif browser_name == PHANTOMJS:
executable_path = os.path.join(os.path.dirname(__file__), 'phantomjs/executable/phantomjs_64bit')
driver = webdriver.Ph... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'new_driver'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Instantiates a new WebDriver instance, determining class by environment variables |
def combine_data(self, command2):
if command2 is None:
return
self._data = self._merge(command2._data, self._data) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'combine_data'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Combines the data for this command with another. |
def _create_and_add_parameters(params):
global _current_parameter
if _is_simple_type(params):
_current_parameter = SimpleParameter(params)
_current_option.add_parameter(_current_parameter)
else:
for i in params:
if _is_simple_type(i):
_current_parameter = ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_create_and_add_parameters'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Parses the configuration and creates Parameter instances. |
def infos_on_basis_set(self):
o = []
o.append("=========================================")
o.append("Reading basis set:")
o.append("")
o.append(" Basis set for {} atom ".format(str(self.filename)))
o.append(" Maximum angular momentum = {}".format(self.data['lmax']))
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'infos_on_basis_set'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | infos on the basis set as in Fiesta log |
def authenticate(self, provider):
callback_url = url_for(".callback", provider=provider, _external=True)
provider = self.get_provider(provider)
session['next'] = request.args.get('next') or ''
return provider.authorize(callback_url) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'authenticate'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Starts OAuth authorization flow, will redirect to 3rd party site. |
def setup_groups(portal):
logger.info("*** Setup Roles and Groups ***")
portal_groups = api.get_tool("portal_groups")
for gdata in GROUPS:
group_id = gdata["id"]
if group_id not in portal_groups.listGroupIds():
logger.info("+++ Adding group {title} ({id})".format(**gdata))
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'setup_groups'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'port... | Setup roles and groups for BECHEM |
def delete_custom_field(self, custom_field_key):
custom_field_key = quote(custom_field_key, '')
response = self._delete("/lists/%s/customfields/%s.json" %
(self.list_id, custom_field_key)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'delete_custom_field'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Deletes a custom field associated with this list. |
def descriptions(self):
return {key: val[2] for key, val in six.iteritems(self.defaultParams)
if len(val) >= 3} | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'descriptions'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | The description of each keyword in the rcParams dictionary |
def _refresh_authentication_token(self):
if self.retry == self._MAX_RETRIES:
raise GeocoderAuthenticationFailure(
'Too many retries for auth: %s' % self.retry
)
token_request_arguments = {
'username': self.username,
'password': self.passwor... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_refresh_authentication_token'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [... | POST to ArcGIS requesting a new token. |
def _change_sel_color(self, event):
(r, g, b), (h, s, v), color = self.square.get()
self.red.set(r)
self.green.set(g)
self.blue.set(b)
self.saturation.set(s)
self.value.set(v)
self.hexa.delete(0, "end")
self.hexa.insert(0, color.upper())
if self.al... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_change_sel_color'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Respond to motion of the color selection cross. |
def _tc_below(self):
tr_below = self._tr_below
if tr_below is None:
return None
return tr_below.tc_at_grid_col(self._grid_col) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_tc_below'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | The tc element immediately below this one in its grid column. |
def _get_vs30star(self, vs30, imt):
if imt.name == "SA":
t = imt.period
if t <= 0.50:
v1 = 1500.0
elif t < 3.0:
v1 = np.exp(-0.35 * np.log(t / 0.5) + np.log(1500.))
else:
v1 = 800.0
elif imt.name == "PGA":
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_vs30star'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | This computes equations 8 and 9 at page 1034 |
def on_trial_remove(self, trial_runner, trial):
if trial.status is Trial.PAUSED and trial in self._results:
self._completed_trials.add(trial) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'on_trial_remove'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], '... | Marks trial as completed if it is paused and has previously ran. |
def reload(self):
result = yield from self._control_vm("reset")
log.info("VirtualBox VM '{name}' [{id}] reloaded".format(name=self.name, id=self.id))
log.debug("Reload result: {}".format(result)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'reload'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'... | Reloads this VirtualBox VM. |
def check_order(self, order):
own_order = self.order
for item in order:
if item not in own_order:
raise ValueError(f'Order item {item} not found.')
return order | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'check_order'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | order must be a subset of self.order |
def isInRoom(self, _id):
if SockJSRoomHandler._room.has_key(self._gcls() + _id):
if self in SockJSRoomHandler._room[self._gcls() + _id]:
return True
return False | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'isInRoom'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Check a given user is in given room |
def _get_raw_data(self, is_valid_key, data_key):
result = None
if self._read_imu():
data = self._imu.getIMUData()
if data[is_valid_key]:
raw = data[data_key]
result = {
'x': raw[0],
'y': raw[1],
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_raw_data'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Internal. Returns the specified raw data from the IMU when valid |
def n_faces(self):
if self._faces is not None:
return self._faces.shape[0]
elif self._vertices_indexed_by_faces is not None:
return self._vertices_indexed_by_faces.shape[0] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'n_faces'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | The number of faces in the mesh |
def install(self, to, chmod=0644):
self.copy(to)
path(to).chmod(chmod) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'install'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Copy data and set mode to 'chmod'. |
def add_command(self, command):
try:
self._history.remove(command)
except ValueError:
pass
self._history.insert(0, command)
self._index = -1 | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'add_command'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Adds a command to the history and reset history index. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.