code stringlengths 51 2.34k | sequence stringlengths 1.16k 13.1k | docstring stringlengths 11 171 |
|---|---|---|
def resort_client_actions(portal):
sorted_actions = [
"edit",
"contacts",
"view",
"analysisrequests",
"batches",
"samplepoints",
"profiles",
"templates",
"specs",
"orders",
"reports_listing"
]
type_info = portal.portal_t... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'resort_client_actions'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Resorts client action views |
def time_emd(emd_type, data):
emd = {
'cause': _CAUSE_EMD,
'effect': pyphi.subsystem.effect_emd,
'hamming': pyphi.utils.hamming_emd
}[emd_type]
def statement():
for (d1, d2) in data:
emd(d1, d2)
results = timeit.repeat(statement, number=NUMBER, repeat=REPEAT)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'time_emd'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'emd... | Time an EMD command with the given data as arguments |
def read(self, frame, data):
"Returns a list of values, eats all of data."
seq = []
while data:
elem, data = self.spec.read(frame, data)
seq.append(elem)
return seq, data | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'read'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Returns a list of values, eats all of data. |
def fillNullValues(col, rows):
'Fill null cells in col with the previous non-null value'
lastval = None
nullfunc = isNullFunc()
n = 0
rowsToFill = list(rows)
for r in Progress(col.sheet.rows, 'filling'):
try:
val = col.getValue(r)
except Exception as e:
va... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'fillNullValues'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Fill null cells in col with the previous non-null value |
def get(self, user_name: str) -> User:
user = current_user()
if user.is_admin or user.name == user_name:
return self._get_or_abort(user_name)
else:
abort(403) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'get'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Gets the User Resource. |
def calc_all_routes_info(self, npaths=3, real_time=True, stop_at_bounds=False, time_delta=0):
routes = self.get_route(npaths, time_delta)
results = {route['routeName']: self._add_up_route(route['results'], real_time=real_time, stop_at_bounds=stop_at_bounds) for route in routes}
route_time = [rou... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '17']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'calc_all_routes_info'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8', '11', '14']}; {'id': '4', 'type': 'identifier',... | Calculate all route infos. |
def _run_validators(self, value):
errors = []
for v in self.validators:
try:
v(value)
except ValidationError, e:
errors.extend(e.messages)
if errors:
raise ValidationError(errors) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_run_validators'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Execute all associated validators. |
def XanyKXany(self):
result = np.empty((self.P,self.F_any.shape[1],self.F_any.shape[1]), order='C')
for p in range(self.P):
X1D = self.Fstar_any * self.D[:,p:p+1]
X1X2 = X1D.T.dot(self.Fstar_any)
result[p] = X1X2
return result | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'XanyKXany'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | compute self covariance for any |
def _skip(self, cnt):
while cnt > 0:
if self._cur_avail == 0:
if not self._open_next():
break
if cnt > self._cur_avail:
cnt -= self._cur_avail
self._remain -= self._cur_avail
self._cur_avail = 0
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_skip'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | RAR Seek, skipping through rar files to get to correct position |
def listify(p:OptListOrItem=None, q:OptListOrItem=None):
"Make `p` listy and the same length as `q`."
if p is None: p=[]
elif isinstance(p, str): p = [p]
elif not isinstance(p, Iterable): p = [p]
else:
try: a = len(p)
except: p = [p]
n = q if type(q)==int else len(p) if ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '14']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'listify'}; {'id': '3', 'type': 'parameters', 'children': ['4', '9']}; {'id': '4', 'type': 'typed_default_parameter', 'children': ['5'... | Make `p` listy and the same length as `q`. |
def _parse_canonical_int32(doc):
i_str = doc['$numberInt']
if len(doc) != 1:
raise TypeError('Bad $numberInt, extra field(s): %s' % (doc,))
if not isinstance(i_str, string_type):
raise TypeError('$numberInt must be string: %s' % (doc,))
return int(i_str) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_parse_canonical_int32'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Decode a JSON int32 to python int. |
def tag_info(self):
return self.__class__.__name__ + (
'(%r)' % self.name if self.name
else "") + ": " + self.valuestr() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'tag_info'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ... | Return Unicode string with class, name and unnested value. |
def process_response(self, response: requests.models.Response) -> dict:
try:
output = response.json()
except json.JSONDecodeError:
raise self.BadResponseError(
'Json not returned with status code [' + str(response.status_code) + ']')
if response.status_cod... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '13', '15']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'process_response'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': []... | Checks for correct data response and status codes |
def readline(self, timeout = 0.1):
try:
return self._q.get(block = timeout is not None,
timeout = timeout)
except Empty:
return None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'readline'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Try to read a line from the stream queue. |
def config_status():
s = boto3.Session()
client = s.client('config')
channels = client.describe_delivery_channel_status()[
'DeliveryChannelsStatus']
for c in channels:
print(yaml.safe_dump({
c['name']: dict(
snapshot=str(
c['configSnapshotD... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'config_status'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '13', '22', '32']};... | Check config status in an account. |
def url(self, request):
if self.pk:
if self.has_description():
return request.build_absolute_uri(reverse('assignment_description_file', args=[self.pk]))
else:
return self.download
else:
return None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'url'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ... | Return absolute URL for assignment description. |
def post(self, request, ext):
try:
plugin = plugins.get(ext)
data, meta = self.get_post_data(request)
data = plugin.load(data)
except UnknownPlugin:
raise Http404
else:
content = plugin.render(data)
return self.render_to_res... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'post'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Render data for plugin and return text response. |
def validate_parameters(self):
for p in self.params:
if p not in self.known_params:
raise errors.UnknownParameter(p, self.known_params) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'validate_parameters'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Validate that the parameters are correctly specified. |
def _agent_from_distribution(distribution, value=-1, agent_id=None):
if value < 0:
value = random.random()
for d in sorted(distribution, key=lambda x: x['threshold']):
threshold = d['threshold']
if not ((agent_id is not None and threshold == STATIC_THRESHOLD and agent_id in d['ids']) or ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_agent_from_distribution'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '9']}; {'id': '4', 'type': 'identifier', 'childr... | Used in the initialization of agents given an agent distribution. |
def _parse_block(self):
if self._iter_rows is not None:
return
rows = _avro_rows(self._block, self._avro_schema)
self._num_items = self._block.avro_rows.row_count
self._remaining = self._block.avro_rows.row_count
self._iter_rows = iter(rows) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_parse_block'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Parse metadata and rows from the block only once. |
def download(self, request, **kwargs):
self.method_check(request, allowed=['get'])
basic_bundle = self.build_bundle(request=request)
tileset = self.cached_obj_get(
bundle=basic_bundle,
**self.remove_api_resource_names(kwargs))
filename = helpers.get_tileset_filena... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'download'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | proxy for the helpers.tileset_download method |
def packet_in_handler(self, req_igmp, msg):
ofproto = msg.datapath.ofproto
if ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
in_port = msg.in_port
else:
in_port = msg.match['in_port']
if (igmp.IGMP_TYPE_REPORT_V1 == req_igmp.msgtype or
igmp.IGMP_... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'packet_in_handler'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [],... | the process when the querier received IGMP. |
def WalkChildren(elem):
for child in elem.childNodes:
yield child
for elem in WalkChildren(child):
yield elem | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'WalkChildren'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'elem... | Walk the XML tree of children below elem, returning each in order. |
def _on_error(self, websock, e):
if isinstance(e, (
websocket.WebSocketConnectionClosedException,
ConnectionResetError)):
self.logger.error('websocket closed, did chrome die?')
else:
self.logger.error(
'exception from websocket receiver... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_on_error'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Raises BrowsingException in the thread that created this instance. |
def _on_text_changed(self):
if not self._cleaning:
ln = TextHelper(self).cursor_position()[0]
self._modified_lines.add(ln) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_on_text_changed'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Adjust dirty flag depending on editor's content |
def _is_bval_type_a(grouped_dicoms):
bval_tag = Tag(0x2001, 0x1003)
bvec_x_tag = Tag(0x2005, 0x10b0)
bvec_y_tag = Tag(0x2005, 0x10b1)
bvec_z_tag = Tag(0x2005, 0x10b2)
for group in grouped_dicoms:
if bvec_x_tag in group[0] and _is_float(common.get_fl_value(group[0][bvec_x_tag])) and \
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_is_bval_type_a'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'g... | Check if the bvals are stored in the first of 2 currently known ways for single frame dti |
def strip_html_comments(text):
lines = text.splitlines(True)
new_lines = filter(lambda line: not line.startswith("<!--"), lines)
return "".join(new_lines) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'strip_html_comments'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Strip HTML comments from a unicode string. |
def create_deployment(deployment_name,
token_manager=None,
app_url=defaults.APP_URL):
headers = token_manager.get_access_token_headers()
payload = {
'name': deployment_name,
'isAdmin': True
}
deployment_url = environment.get_deployment_url(app_... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '13']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'create_deployment'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': []... | create a deployment with the specified name |
def dumps(self):
with closing(StringIO()) as fileobj:
self.dump(fileobj)
return fileobj.getvalue() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'dumps'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'i... | Dump this instance as YAML. |
def dispatch_job_hook(self, link, key, job_config, logfile, stream=sys.stdout):
raise NotImplementedError("SysInterface.dispatch_job_hook") | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '14']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'dispatch_job_hook'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9']}; {'id': '4', 'type': 'identifier',... | Hook to dispatch a single job |
def _raise_on_bad_jar_filename(jar_filename):
if jar_filename is None:
return
if not isinstance(jar_filename, string_type):
raise TypeError("jar_filename is not a string: %r" % jar_filename)
if not os.path.exists(jar_filename):
raise ValueError("jar_filename d... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_raise_on_bad_jar_filename'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Ensure that jar_filename is a valid path to a jar file. |
def ping_directories_handler(sender, **kwargs):
entry = kwargs['instance']
if entry.is_visible and settings.SAVE_PING_DIRECTORIES:
for directory in settings.PING_DIRECTORIES:
DirectoryPinger(directory, [entry]) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'ping_directories_handler'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [... | Ping directories when an entry is saved. |
def concat(x, y, axis=0):
if all([isinstance(df, (pd.DataFrame, pd.Series)) for df in [x, y]]):
return pd.concat([x, y], axis=axis)
else:
if axis == 0:
return np.concatenate([x, y])
else:
return np.column_stack([x, y]) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'concat'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Concatenate a sequence of pandas or numpy objects into one entity. |
def from_xml(self, doc):
import xml.sax
handler = DomainDumpParser(self)
xml.sax.parse(doc, handler)
return handler | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'from_xml'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Load this domain based on an XML document |
def step(self):
self.director.window.switch_to()
self.director.window.dispatch_events()
self.director.window.dispatch_event('on_draw')
self.director.window.flip()
pyglet.clock.tick() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'step'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id... | Step the engine one tick |
def score_evidence_list(self, evidences):
def _score(evidences):
if not evidences:
return 0
sources = [ev.source_api for ev in evidences]
uniq_sources = numpy.unique(sources)
syst_factors = {s: self.prior_probs['syst'][s]
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'score_evidence_list'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Return belief score given a list of supporting evidences. |
def _get_dependencies(sql):
dependencies = []
for (_, placeholder, dollar, _) in SqlStatement._get_tokens(sql):
if placeholder:
variable = placeholder[1:]
if variable not in dependencies:
dependencies.append(variable)
elif dollar:
raise Exception('Invalid sql; $ wit... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_dependencies'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Return the list of variables referenced in this SQL. |
def flush(self):
if self.flag[0] != 'r':
with self.write_mutex:
if hasattr(self.db, 'sync'):
self.db.sync()
else:
flag = self.flag
if flag[0] == 'n':
flag = 'c' + flag[1:]
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'flush'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'i... | Synchronizes data to the underlying database file. |
def in_same_box(self, a, b):
assert a in self.micro_indices
assert b in self.micro_indices
for part in self.partition:
if a in part and b in part:
return True
return False | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'in_same_box'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Return ``True`` if nodes ``a`` and ``b``` are in the same box. |
def handle_noargs(self, **options):
r = get_r()
since = datetime.utcnow() - timedelta(days=1)
metrics = {}
categories = r.metric_slugs_by_category()
for category_name, slug_list in categories.items():
metrics[category_name] = []
for slug in slug_list:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'handle_noargs'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Send Report E-mails. |
def decode_arr(data):
data = data.encode('utf-8')
return frombuffer(base64.b64decode(data), float64) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'decode_arr'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'data'}... | Extract a numpy array from a base64 buffer |
def _extract_zip(archive, dest=None, members=None):
dest = dest or os.getcwd()
members = members or archive.infolist()
for member in members:
if isinstance(member, basestring):
member = archive.getinfo(member)
_extract_zip_member(archive, member, dest) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_extract_zip'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Extract the ZipInfo object to a real file on the path targetpath. |
def same_syllabic_feature(ch1, ch2):
if ch1 == '.' or ch2 == '.':
return False
ch1 = 'V' if ch1 in VOWELS else 'C' if ch1 in CONSONANTS else None
ch2 = 'V' if ch2 in VOWELS else 'C' if ch2 in CONSONANTS else None
return ch1 == ch2 | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'same_syllabic_feature'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Return True if ch1 and ch2 are both vowels or both consonants. |
def combining_goal(state):
((corner, edge), (L, U, F, D, R, B)) = state
if "U" not in corner or "U" not in edge: return False
if set(edge).issubset(set(corner)): return True
elif set(edge.facings.keys()).issubset(set(corner.facings.keys())): return False
opposite = {"L":"R", "R":... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'combining_goal'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'st... | Check if two Cubies are combined on the U face. |
def stage_all(self):
LOGGER.info('Staging all files')
self.repo.git.add(A=True) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'stage_all'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Stages all changed and untracked files |
def _parse_total_magnetization(line, lines):
toks = line.split()
res = {"number of electrons": float(toks[3])}
if len(toks) > 5:
res["total magnetization"] = float(toks[5])
return res | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_parse_total_magnetization'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children':... | Parse the total magnetization, which is somewhat hidden |
def chunks(self):
if not hasattr(self, '_it'):
class ChunkIterator(object):
def __iter__(iter):
return iter
def __next__(iter):
try:
chunk = self._next_chunk()
except StopIteration:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'chunks'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'... | Returns a chunk iterator over the sound. |
def args_update(self):
for key, value in self._config_data.items():
setattr(self._default_args, key, value) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'args_update'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Update the argparser namespace with any data from configuration file. |
def getVersionNumber(self):
print '%s call getVersionNumber' % self.port
versionStr = self.__sendCommand(WPANCTL_CMD + 'getprop -v NCP:Version')[0]
return self.__stripValue(versionStr) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getVersionNumber'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | get OpenThreadWpan stack firmware version number |
def com_google_fonts_check_currency_chars(ttFont):
def font_has_char(ttFont, codepoint):
for subtable in ttFont['cmap'].tables:
if codepoint in subtable.cmap:
return True
return False
failed = False
OPTIONAL = {
}
MANDATORY = {
0x20AC: "EURO SIGN"
}
for codepoint, charname in OPT... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'com_google_fonts_check_currency_chars'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'chil... | Font has all expected currency sign characters? |
def isAllowed(self, assoc_type, session_type):
assoc_good = (assoc_type, session_type) in self.allowed_types
matches = session_type in getSessionTypes(assoc_type)
return assoc_good and matches | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'isAllowed'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Is this combination of association type and session type allowed? |
def docker_py_dict(self):
return {
'image': self.image,
'command': self.cmd,
'hostname': self.hostname,
'user': self.user,
'detach': self.detach,
'stdin_open': self.open_stdin,
'tty': self.tty,
'ports': self.exposed_... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'docker_py_dict'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Convert object to match valid docker-py properties. |
def _run_germline(bam_file, data, ref_file, region, out_file):
work_dir = utils.safe_makedir("%s-work" % utils.splitext_plus(out_file)[0])
region_bed = strelka2.get_region_bed(region, [data], out_file, want_gzip=False)
example_dir = _make_examples(bam_file, data, ref_file, region_bed, out_file, work_dir)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_run_germline'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'children... | Single sample germline variant calling. |
def _make_symbol_function(handle, name, func_name):
code, doc_str = _generate_symbol_function_code(handle, name, func_name)
local = {}
exec(code, None, local)
symbol_function = local[func_name]
symbol_function.__name__ = func_name
symbol_function.__doc__ = doc_str
symbol_function.__module__ ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_make_symbol_function'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children':... | Create a symbol function by handle and function name. |
def _jamo_to_hangul_char(lead, vowel, tail=0):
lead = ord(lead) - _JAMO_LEAD_OFFSET
vowel = ord(vowel) - _JAMO_VOWEL_OFFSET
tail = ord(tail) - _JAMO_TAIL_OFFSET if tail else 0
return chr(tail + (vowel - 1) * 28 + (lead - 1) * 588 + _JAMO_OFFSET) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_jamo_to_hangul_char'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': ... | Return the Hangul character for the given jamo characters. |
def writetofile(self, filename):
f = open(filename, "w")
f.write(self.read())
f.close() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'writetofile'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Writes the in-memory zip to a file. |
def map_parameters(cls, params):
d = {}
for k, v in six.iteritems(params):
d[cls.FIELD_MAP.get(k.lower(), k)] = v
return d | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'map_parameters'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Maps parameters to form field names |
def from_percent(position_percent):
if not isinstance(position_percent, int):
raise PyVLXException("Position::position_percent_has_to_be_int")
if position_percent < 0:
raise PyVLXException("Position::position_percent_has_to_be_positive")
if position_percent > 100:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'from_percent'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'posi... | Create raw value out of percent position. |
def parse(filename):
with open(filename) as f:
parser = ASDLParser()
return parser.parse(f.read()) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'parse'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'filename'};... | Parse ASDL from the given file and return a Module node describing it. |
def sample_stats_prior_to_xarray(self):
data = self.sample_stats_prior
if not isinstance(data, dict):
raise TypeError("DictConverter.sample_stats_prior is not a dictionary")
return dict_to_dataset(data, library=None, coords=self.coords, dims=self.dims) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'sample_stats_prior_to_xarray'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': []... | Convert sample_stats_prior samples to xarray. |
def key(
seq: Sequence,
tooth: Callable[[Sequence], str] = (
lambda seq: str(random.SystemRandom().choice(seq)).strip()
),
nteeth: int = 6,
delimiter: str = ' ',
) -> str:
return delimiter.join(tooth(seq) for _ in range(nteeth)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '50', '52']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'key'}; {'id': '3', 'type': 'parameters', 'children': ['4', '8', '40', '45']}; {'id': '4', 'type': 'typed_parameter', 'children'... | Concatenate strings generated by the tooth function. |
def abort(self):
if self._handle.closed:
return
elif self._protocol is None:
raise TransportError('transport not started')
self._handle.close(self._on_close_complete)
assert self._handle.closed | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'abort'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'i... | Close the transport immediately. |
def getAttribute(self, attribute):
if type(attribute) in (list, tuple):
if len(attribute) != 2:
raise LookupError, 'To access attributes must use name or (namespace,name)'
ns_dict = self.attributes.get(attribute[0])
if ns_dict is None:
return N... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getAttribute'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | return requested attribute value or None |
def process_fastq_plain(fastq, **kwargs):
logging.info("Nanoget: Starting to collect statistics from plain fastq file.")
inputfastq = handle_compressed_input(fastq)
return ut.reduce_memory_usage(pd.DataFrame(
data=[res for res in extract_from_fastq(inputfastq) if res],
columns=["quals", "len... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'process_fastq_plain'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Combine metrics extracted from a fastq file. |
def success(self, cmd, desc=''):
return self._label_desc(cmd, desc, self.success_color) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'success'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Style for a success message. |
def hsts_header(self):
hsts_policy = 'max-age={0}'.format(self.hsts_age)
if self.hsts_include_subdomains:
hsts_policy += '; includeSubDomains'
return hsts_policy | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'hsts_header'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Returns the proper HSTS policy. |
def from_tfp(
posterior=None,
*,
var_names=None,
model_fn=None,
feed_dict=None,
posterior_predictive_samples=100,
posterior_predictive_size=1,
observed=None,
coords=None,
dims=None
):
return TfpConverter(
posterior=posterior,
var_names=var_names,... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '32']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'from_tfp'}; {'id': '3', 'type': 'parameters', 'children': ['4', '7', '8', '11', '14', '17', '20', '23', '26', '29']}; {'id': '4', 'ty... | Convert tfp data into an InferenceData object. |
def init_build_dir(self):
logger.debug("Initializing %s" % self.build_dir)
if self.verbosity > 1:
self.stdout.write("Initializing build directory")
if self.fs.exists(self.build_dir):
self.fs.removetree(self.build_dir)
self.fs.makedirs(self.build_dir) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'init_build_dir'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Clear out the build directory and create a new one. |
def writeXMLFile(filename, content):
xmlfile = open(filename, 'w')
content = etree.tostring(content, pretty_print=True)
xmlfile.write(content)
xmlfile.close() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'writeXMLFile'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Used only for debugging to write out intermediate files |
def index(self, prefix):
if self.is_external_url_prefix(prefix):
prefix = 'http'
for i, urltype in enumerate(self._url_types):
if urltype.prefix == prefix:
return i
return None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'index'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | Return the model index for a prefix. |
def getOverlayTransformTrackedDeviceComponent(self, ulOverlayHandle, pchComponentName, unComponentNameSize):
fn = self.function_table.getOverlayTransformTrackedDeviceComponent
punDeviceIndex = TrackedDeviceIndex_t()
result = fn(ulOverlayHandle, byref(punDeviceIndex), pchComponentName, unComponen... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getOverlayTransformTrackedDeviceComponent'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': ... | Gets the transform information when the overlay is rendering on a component. |
def fix_crinfo(crinfo, to="axis"):
crinfo = np.asarray(crinfo)
if crinfo.shape[0] == 2:
crinfo = crinfo.T
return crinfo | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'fix_crinfo'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'c... | Function recognize order of crinfo and convert it to proper format. |
def _get_cpu_virtualization(self):
try:
cpu_vt = self._get_bios_setting('ProcVirtualization')
except exception.IloCommandNotSupportedError:
return False
if cpu_vt == 'Enabled':
vt_status = True
else:
vt_status = False
return vt_stat... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_cpu_virtualization'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | get cpu virtualization status. |
def delete(context, resource, id, **kwargs):
etag = kwargs.pop('etag', None)
id = id
subresource = kwargs.pop('subresource', None)
subresource_id = kwargs.pop('subresource_id', None)
uri = '%s/%s/%s' % (context.dci_cs_api, resource, id)
if subresource:
uri = '%s/%s/%s' % (uri, subresourc... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'delete'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Delete a specific resource |
def identify(self, req, resp, resource, uri_kwargs):
try:
return req.get_header('X-Api-Key', True)
except (KeyError, HTTPMissingHeader):
pass | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'identify'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'children': []... | Initialize X-Api-Key authentication middleware. |
def subnode(self, node):
self.children.append(node)
node.parent = self
node.adjust_interleave(node.interleave) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'subnode'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Make `node` receiver's child. |
def list_build_configurations_for_set(id=None, name=None, page_size=200, page_index=0, sort="", q=""):
content = list_build_configurations_for_set_raw(id, name, page_size, page_index, sort, q)
if content:
return utils.format_json_list(content) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '22']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'list_build_configurations_for_set'}; {'id': '3', 'type': 'parameters', 'children': ['4', '7', '10', '13', '16', '19']}; {'id': '4', '... | List all build configurations in a given BuildConfigurationSet. |
def write(filename, mesh, fmt_version, write_binary=True):
try:
writer = _writers[fmt_version]
except KeyError:
try:
writer = _writers[fmt_version.split(".")[0]]
except KeyError:
raise ValueError(
"Need mesh format in {} (got {})".format(
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'write'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Writes a Gmsh msh file. |
def validate_unicode_decode_error_handler(dummy, value):
if value not in _UNICODE_DECODE_ERROR_HANDLERS:
raise ValueError("%s is an invalid Unicode decode error handler. "
"Must be one of "
"%s" % (value, tuple(_UNICODE_DECODE_ERROR_HANDLERS)))
return va... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'validate_unicode_decode_error_handler'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', ... | Validate the Unicode decode error handler option of CodecOptions. |
def _dist_obs_oracle(oracle, query, trn_list):
a = np.subtract(query, [oracle.f_array[t] for t in trn_list])
return (a * a).sum(axis=1) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_dist_obs_oracle'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], ... | A helper function calculating distances between a feature and frames in oracle. |
def convert_file(filename, renderer):
try:
with open(filename, 'r') as fin:
rendered = mistletoe.markdown(fin, renderer)
print(rendered, end='')
except OSError:
sys.exit('Cannot open file "{}".'.format(filename)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'convert_file'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Parse a Markdown file and dump the output to stdout. |
def status(name, init_system, verbose):
try:
status = Serv(init_system, verbose=verbose).status(name)
except ServError as ex:
sys.exit(ex)
click.echo(json.dumps(status, indent=4, sort_keys=True)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'status'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | WIP! Try at your own expense |
def refresh(self):
try:
self._private_to_public = self.cloud_discovery.discover_nodes()
except Exception as ex:
self.logger.warning("Failed to load addresses from Hazelcast.cloud: {}".format(ex.args[0]),
extra=self._logger_extras) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'refresh'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | Refreshes the internal lookup table if necessary. |
def build_mutation_pruner_plugin() -> LaserPlugin:
from mythril.laser.ethereum.plugins.implementations.mutation_pruner import (
MutationPruner,
)
return MutationPruner() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'build_mutation_pruner_plugin'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'type', 'children': ['5']}... | Creates an instance of the mutation pruner plugin |
def _fit_and_score(est, x, y, scorer, train_index, test_index, parameters, fit_params, predict_params):
X_train, y_train = _safe_split(est, x, y, train_index)
train_params = fit_params.copy()
est.set_params(**parameters)
est.fit(X_train, y_train, **train_params)
test_predict_params = predict_params.... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '13']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_fit_and_score'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9', '10', '11', '12']}; {'id': '4', 'type'... | Train survival model on given data and return its score on test data |
def health_check(self):
logger.debug('Health Check on Table: {namespace}'.format(
namespace=self.namespace
))
try:
self.get_all()
return True
except ClientError as e:
logger.exception(e)
logger.error('Error encountered with Data... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'health_check'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Gets a single item to determine if Dynamo is functioning. |
def serialize(self):
return {
'name' : self.name,
'weight' : self.weight,
'value' : self.value,
'msgs' : self.msgs,
'children' : [i.serialize() for i in self.children]
} | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'serialize'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Returns a serializable dictionary that represents the result object |
def query_base_timer(self):
(_, _, time) = unpack('<ccI', self.con.send_xid_command("e3", 6))
return time | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'query_base_timer'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | gets the value from the device's base timer |
def getContainerName(job):
return '--'.join([str(job),
base64.b64encode(os.urandom(9), b'-_').decode('utf-8')])\
.replace("'", '').replace('"', '').replace('_', '') | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getContainerName'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Create a random string including the job name, and return it. |
def copy(self, src, dst, suppress_layouts=False):
url = '/'.join([src.drive,
'api/copy',
str(src.relative_to(src.drive)).rstrip('/')])
params = {'to': str(dst.relative_to(dst.drive)).rstrip('/'),
'suppressLayouts': int(suppress_layouts)}
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'copy'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Copy artifact from src to dst |
def _get_warped_variance(self, mean, std, pred_init=None, deg_gauss_hermite=20):
gh_samples, gh_weights = np.polynomial.hermite.hermgauss(deg_gauss_hermite)
gh_samples = gh_samples[:, None]
gh_weights = gh_weights[None, :]
arg1 = gh_weights.dot(self._get_warped_term(mean, std, gh_samples... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '13']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_warped_variance'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '10']}; {'id': '4', 'type': 'identifier', ... | Calculate the warped variance by using Gauss-Hermite quadrature. |
def _get_versions_manifest(manifest_dir):
all_pkgs = _manifest_progs + [p.get("name", p["cmd"]) for p in _cl_progs] + [p["name"] for p in _alt_progs]
if os.path.exists(manifest_dir):
out = []
for plist in ["toolplus", "python", "r", "debian", "custom"]:
pkg_file = os.path.join(manife... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_versions_manifest'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Retrieve versions from a pre-existing manifest of installed software. |
def regions_coverage(bed_file, target_name, data):
ready_bed = tz.get_in(["depth", target_name, "regions"], data)
if ready_bed:
return ready_bed
else:
return run_mosdepth(data, target_name, bed_file).regions | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'regions_coverage'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Generate coverage over regions of interest using mosdepth. |
def _augment_node(node: BaseEntity) -> BaseEntity:
rv = node.copy()
rv['id'] = node.as_sha512()
rv['bel'] = node.as_bel()
for m in chain(node.get(MEMBERS, []), node.get(REACTANTS, []), node.get(PRODUCTS, [])):
m.update(_augment_node(m))
return rv | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_augment_node'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'typed_parameter', 'children': ['5', ... | Add the SHA-512 identifier to a node's dictionary. |
def islive(self, state):
reachable = [state]
i = 0
while i < len(reachable):
current = reachable[i]
if current in self.finals:
return True
if current in self.map:
for symbol in self.map[current]:
next = self.map[current][symbol]
if next not in reachable:
reachable.append(next)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'islive'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | A state is "live" if a final state can be reached from it. |
def _startReapingProcesses(self):
lc = LoopingCall(self._reapAllProcesses)
lc.clock = self._reactor
lc.start(0.1, False) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_startReapingProcesses'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Start a LoopingCall that calls reapAllProcesses. |
def persist(self, storageLevel):
if not isinstance(storageLevel, StorageLevel):
raise TypeError("`storageLevel` should be a StorageLevel, got %s" % type(storageLevel))
javaStorageLevel = self._java_matrix_wrapper._sc._getJavaStorageLevel(storageLevel)
self._java_matrix_wrapper.call("... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'persist'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Persists the underlying RDD with the specified storage level. |
def getPorts(self):
if self.ports:
return self.ports
if not self._gotPorts:
self.ports = [
portpicker.pick_unused_port(),
portpicker.pick_unused_port(),
portpicker.pick_unused_port(),
]
self._gotPorts = True
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getPorts'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ... | acquire ports to be used by the SC2 client launched by this process |
def bounds(self):
google_x, google_y = self.google
pixel_x_west, pixel_y_north = google_x * TILE_SIZE, google_y * TILE_SIZE
pixel_x_east, pixel_y_south = (google_x + 1) * TILE_SIZE, (google_y + 1) * TILE_SIZE
point_min = Point.from_pixel(pixel_x=pixel_x_west, pixel_y=pixel_y_south, zoom=... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'bounds'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'... | Gets the bounds of a tile represented as the most west and south point and the most east and north point |
def mate_bottom(self):
" bottom of the stator"
return Mate(self, CoordSystem(
origin=(0, 0, -self.length/2),
xDir=(1, 0, 0),
normal=(0, 0, -1)
)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'mate_bottom'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | bottom of the stator |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.