code stringlengths 51 2.34k | sequence stringlengths 1.16k 13.1k | docstring stringlengths 11 171 |
|---|---|---|
def add_request_ids_from_environment(logger, name, event_dict):
if ENV_APIG_REQUEST_ID in os.environ:
event_dict['api_request_id'] = os.environ[ENV_APIG_REQUEST_ID]
if ENV_LAMBDA_REQUEST_ID in os.environ:
event_dict['lambda_request_id'] = os.environ[ENV_LAMBDA_REQUEST_ID]
return event_dict | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'add_request_ids_from_environment'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', ... | Custom processor adding request IDs to the log event, if available. |
def tree_node_to_xml(self, parent, item):
tree = self.treeview
data = self.treedata[item]
node = data.to_xml_node()
children = tree.get_children(item)
for child in children:
cnode = ET.Element('child')
cwidget = self.tree_node_to_xml(item, child)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'tree_node_to_xml'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Converts a treeview item and children to xml nodes |
def getContactUIDForUser(self):
membership_tool = api.get_tool("portal_membership")
member = membership_tool.getAuthenticatedMember()
username = member.getUserName()
r = self.portal_catalog(
portal_type="Contact",
getUsername=username
)
if len(r) =... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getContactUIDForUser'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Get the UID of the user associated with the authenticated user |
def setup_console_logger(log_level='error', log_format=None, date_format=None):
if is_console_configured():
logging.getLogger(__name__).warning('Console logging already configured')
return
__remove_temp_logging_handler()
if log_level is None:
log_level = 'warning'
level = LOG_LEV... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '13']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'setup_console_logger'}; {'id': '3', 'type': 'parameters', 'children': ['4', '7', '10']}; {'id': '4', 'type': 'default_parameter', 'ch... | Setup the console logger |
def reads(text, fmt, as_version=4, **kwargs):
fmt = copy(fmt)
fmt = long_form_one_format(fmt)
ext = fmt['extension']
if ext == '.ipynb':
return nbformat.reads(text, as_version, **kwargs)
format_name = read_format_from_metadata(text, ext) or fmt.get('format_name')
if format_name:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'reads'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Read a notebook from a string |
def to_iso_date(self, dt):
if dt is None:
return ""
if isinstance(dt, DateTime):
return dt.ISO8601()
if isinstance(dt, datetime):
return dt.isoformat()
raise TypeError("{} is neiter an instance of DateTime nor datetime"
.format(... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'to_iso_date'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Return the ISO representation of a date object |
def taf(trans: TafLineTrans) -> str:
summary = []
if trans.wind:
summary.append('Winds ' + trans.wind)
if trans.visibility:
summary.append('Vis ' + trans.visibility[:trans.visibility.find(' (')].lower())
if trans.altimeter:
summary.append('Alt ' + trans.altimeter[:trans.altimeter... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'taf'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'typed_parameter', 'children': ['5', '6']}; {'i... | Condense the translation strings into a single forecast summary string |
def filter_nodes(graph: BELGraph, node_predicates: NodePredicates) -> Iterable[BaseEntity]:
concatenated_predicate = concatenate_node_predicates(node_predicates=node_predicates)
for node in graph:
if concatenated_predicate(graph, node):
yield node | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12', '18']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'filter_nodes'}; {'id': '3', 'type': 'parameters', 'children': ['4', '8']}; {'id': '4', 'type': 'typed_parameter', 'children': [... | Apply a set of predicates to the nodes iterator of a BEL graph. |
def public_dsn(dsn):
m = RE_DSN.match(dsn)
if not m:
log.error('Unable to parse Sentry DSN')
public = '{scheme}://{client_id}@{domain}/{site_id}'.format(
**m.groupdict())
return public | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'public_dsn'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'dsn'};... | Transform a standard Sentry DSN into a public one |
def list_metafeatures(cls, group="all"):
if group == "all":
return copy.deepcopy(cls.IDS)
elif group == "landmarking":
return list(filter(
lambda mf_id: "ErrRate" in mf_id or "Kappa" in mf_id, cls.IDS
))
elif group == "target_dependent":
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'list_metafeatures'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Returns a list of metafeatures computable by the Metafeatures class. |
def _send_command_list(self, commands):
output = ""
for command in commands:
output += self.device.send_command(
command, strip_prompt=False, strip_command=False
)
return output | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_send_command_list'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Wrapper for Netmiko's send_command method (for list of commands. |
def _supply_data(data_sink, context):
try:
data_sink.sink(context)
except Exception as e:
ex = ValueError("An exception occurred while "
"supplying data to data sink '{ds}'\n\n"
"{e}\n\n"
"{help}".format(ds=context.name,
e=str(e), help=context.... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_supply_data'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Supply data to the data sink |
def _process_scrape_info(self, scraper: BaseScraper,
scrape_result: ScrapeResult,
item_session: ItemSession):
if not scrape_result:
return 0, 0
num_inline = 0
num_linked = 0
for link_context in scrape_result.link_conte... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '17']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_process_scrape_info'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '9', '13']}; {'id': '4', 'type': 'identifier', 'chil... | Collect the URLs from the scrape info dict. |
def expr(obj, simplify=True):
if isinstance(obj, Expression):
return obj
elif isinstance(obj, int) and obj in {0, 1}:
return _CONSTS[obj]
elif isinstance(obj, str):
ast = pyeda.parsing.boolexpr.parse(obj)
ex = ast2expr(ast)
if simplify:
ex = ex.simplify()
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'expr'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'obj'}; ... | Convert an arbitrary object into an Expression. |
def _update_heap(self, peer):
rank = self.rank_calculator.get_rank(peer)
if rank == peer.rank:
return
peer.rank = rank
self.peer_heap.update_peer(peer) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_update_heap'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Recalculate the peer's rank and update itself in the peer heap. |
def use_federated_objective_bank_view(self):
self._objective_bank_view = FEDERATED
for session in self._get_provider_sessions():
try:
session.use_federated_objective_bank_view()
except AttributeError:
pass | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'use_federated_objective_bank_view'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children... | Pass through to provider ObjectiveLookupSession.use_federated_objective_bank_view |
def drop_empty_tables(self):
counts = self.count_rows_all()
drops = []
for table, count in counts.items():
if count < 1:
self.drop(table)
drops.append(table)
return drops | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'drop_empty_tables'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Drop all empty tables in a database. |
def _code_line(self, line):
assert self._containers
container = self._containers[-1]
text = line
while text:
if text.startswith(' '):
r = re.match(r'(^ +)', text)
n = len(r.group(1))
container.addElement(S(c=n))
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_code_line'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Add a code line. |
def LookupWsdlType(self, ns, name, allowManagedObjectReference=False):
try:
return GetWsdlType(ns, name)
except KeyError:
if allowManagedObjectReference:
if name.endswith('ManagedObjectReference') and ns == XMLNS_VMODL_BASE:
return GetWsdlType(ns, name[:-len('Ref... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'LookupWsdlType'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': ... | Lookup wsdl type. Handle special case for some vmodl version |
def bytes_to_string(raw_data: bytes, prefix: bool = False) -> str:
prefix_string = ''
if prefix:
prefix_string = '0x'
suffix = ''.join([format(c, "02x") for c in raw_data])
return prefix_string + suffix.upper() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '13', '15']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'bytes_to_string'}; {'id': '3', 'type': 'parameters', 'children': ['4', '8']}; {'id': '4', 'type': 'typed_parameter', 'children'... | Convert a byte array to a hex string. |
def hyperbola(axes, **kwargs):
opens_up = kwargs.pop('opens_up', True)
center = kwargs.pop('center', defaults['center'])
th = N.linspace(0,2*N.pi,kwargs.pop('n', 500))
vals = [N.tan(th),1/N.cos(th)]
if not opens_up:
vals = vals[::-1]
x = axes[0]*vals[0]+center[0]
y = axes[1]*vals[1]+... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'hyperbola'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'ax... | Plots a hyperbola that opens along y axis |
def number_of_permutations(self):
if self.permutations_safe_override:
return factorial(self.coordination)
elif self.permutations is None:
return factorial(self.coordination)
return len(self.permutations) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'number_of_permutations'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Returns the number of permutations of this coordination geometry. |
def _print_summary_map(strm, result_map, ftype):
if len(result_map) == 0:
return 0
npass = len([x for k, x in result_map.iteritems() if len(x) == 0])
strm.write('=====%d/%d %s files passed check=====\n' % (npass, len(result_map), ftype))
for fname, emap in result_map.iteritem... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_print_summary_map'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': []... | Print summary of certain result map. |
def add_file(self, path):
libdoc = LibraryDocumentation(path)
if len(libdoc.keywords) > 0:
if libdoc.doc.startswith("Documentation for resource file"):
libdoc.doc = ""
collection_id = self.add_collection(path, libdoc.name, libdoc.type,
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'add_file'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Add a resource file or library file to the database |
def inHouseJoy(self):
house = self.house()
return props.object.houseJoy[self.obj.id] == house.id | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'inHouseJoy'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | Returns if the object is in its house of joy. |
def _prep_fields_param(fields):
store_samples = False
if fields is None:
return True, None
if isinstance(fields, str):
fields = [fields]
else:
fields = list(fields)
if 'samples' in fields:
fields.remove('samples')
store_samples = True
elif '*' in fields:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_prep_fields_param'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Prepare the `fields` parameter, and determine whether or not to store samples. |
def print_network(self):
edges = set()
def gen_edges(node):
nonlocal edges
name = str(id(node))
yield '{name} [label="{cls_name}"];'.format(
name=name,
cls_name=str(node))
for child in node.children:
if (node... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'print_network'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Generate a graphviz compatible graph. |
async def get_default(cls):
data = await cls._handler.read(id=cls._default_space_id)
return cls(data) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'get_default'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cls'}... | Get the 'default' Space for the MAAS. |
def visit_comprehension(self, node, parent):
newnode = nodes.Comprehension(parent)
newnode.postinit(
self.visit(node.target, newnode),
self.visit(node.iter, newnode),
[self.visit(child, newnode) for child in node.ifs],
getattr(node, "is_async", None),
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'visit_comprehension'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [... | visit a Comprehension node by returning a fresh instance of it |
def summary(self, campaign_id=None):
resource_cls = CampaignSummary
single_resource = False
if not campaign_id:
resource_cls = CampaignSummaries
single_resource = True
return super(API, self).get(
resource_id=campaign_id,
resource_action='s... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'summary'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Returns the campaign summary |
def _link_error_cb(self, errmsg):
logger.warning('Got link error callback [%s] in state [%s]',
errmsg, self.state)
if (self.link is not None):
self.link.close()
self.link = None
if (self.state == State.INITIALIZED):
self.connection_failed.ca... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_link_error_cb'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Called from the link driver when there's an error |
def _get_policies(self):
username = self._get_username_for_key()
policies = self.client.list_user_policies(
UserName=username
)
return policies | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_policies'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Returns all the policy names for a given user |
def progress_bar(self, c, broker):
v = broker.get(c)
if v and isinstance(v, dict) and len(v) > 0 and 'type' in v:
if v["type"] in self.responses:
print(self.responses[v["type"]].color + self.responses[v["type"]].intl + Style.RESET_ALL, end="", file=self.stream)
el... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'progress_bar'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Print the formated progress information for the processed return types |
def add(from_user, from_id, to_user, to_id, type):
"adds a relation to the graph"
if options.users and to_user:
G.add_node(from_user, screen_name=from_user)
G.add_node(to_user, screen_name=to_user)
if G.has_edge(from_user, to_user):
weight = G[from_user][to_user]['weight'] + ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'add'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | adds a relation to the graph |
def infer_format(filename:str) -> str:
_, ext = os.path.splitext(filename)
return ext | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'infer_format'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'typed_parameter', 'children': ['5', '... | Return extension identifying format of given filename |
def syntax_error(lineno, msg):
if global_.has_errors > OPTIONS.max_syntax_errors.value:
msg = 'Too many errors. Giving up!'
msg = "%s:%i: %s" % (global_.FILENAME, lineno, msg)
msg_output(msg)
if global_.has_errors > OPTIONS.max_syntax_errors.value:
sys.exit(1)
global_.has_errors += 1 | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'syntax_error'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Generic syntax error routine |
def _to_unit_base(self, base_unit, values, unit, from_unit):
self._is_numeric(values)
namespace = {'self': self, 'values': values}
if not from_unit == base_unit:
self.is_unit_acceptable(from_unit, True)
statement = '[self._{}_to_{}(val) for val in values]'.format(
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_to_unit_base'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'children... | Return values in a given unit given the input from_unit. |
def forward_reference(self, slot_range: str, owning_class: str) -> bool:
for cname in self.schema.classes:
if cname == owning_class:
return True
elif cname == slot_range:
return False
return True | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '13', '15']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'forward_reference'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '9']}; {'id': '4', 'type': 'identifier', 'childre... | Determine whether slot_range is a forward reference |
def update_dict(d, u):
for key, val in u.items():
if isinstance(val, collections.Mapping):
d[key] = update_dict(d.get(key, {}), val)
else:
d[key] = u[key]
return d | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update_dict'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Recursively updates nested dict d from nested dict u |
def getAllContinuousSets(self):
for dataset in self.getAllDatasets():
iterator = self._client.search_continuous_sets(
dataset_id=dataset.id)
for continuousSet in iterator:
yield continuousSet | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getAllContinuousSets'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Returns all continuous sets on the server. |
def router_added_to_hosting_device(self, context, router):
self._notification(context, 'router_added_to_hosting_device',
[router], operation=None, shuffle_agents=False) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'router_added_to_hosting_device'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'c... | Notify cfg agent about router added to hosting device. |
def build_from_yamlfile(yamlfile):
d = yaml.load(open(yamlfile))
return MktimeFilterDict(d['aliases'], d['selections']) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'build_from_yamlfile'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Build a list of components from a yaml file |
async def pair_with_device(loop):
my_zeroconf = Zeroconf()
details = conf.AppleTV('127.0.0.1', 'Apple TV')
details.add_service(conf.DmapService('login_id'))
atv = pyatv.connect_to_apple_tv(details, loop)
atv.pairing.pin(PIN_CODE)
await atv.pairing.start(zeroconf=my_zeroconf, name=REMOTE_NAME)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'pair_with_device'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Make it possible to pair with device. |
def tensor_index(self, datapoint_index):
if datapoint_index >= self._num_datapoints:
raise ValueError('Datapoint index %d is greater than the number of datapoints (%d)' %(datapoint_index, self._num_datapoints))
return self._index_to_file_num[datapoint_index] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'tensor_index'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Returns the index of the tensor containing the referenced datapoint. |
def in_date(objet, pattern):
if objet:
pattern = re.sub(" ", '', pattern)
objet_str = abstractRender.date(objet)
return bool(re.search(pattern, objet_str))
return False | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'in_date'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'obje... | abstractSearch dans une date datetime.date |
def update_status(self):
try:
self.update_connection_status()
self.max_stream_rate.set(self.get_stream_rate_str())
self.ip.set(self.status.external_ip)
self.uptime.set(self.status.str_uptime)
upstream, downstream = self.status.transmission_rate
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update_status'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Update status informations in tkinter window. |
def _enable_rpcs(self, conn, services, timeout=1.0):
success, result = self._set_notification(conn, services[TileBusService]['characteristics'][TileBusReceiveHeaderCharacteristic], True, timeout)
if not success:
return success, result
return self._set_notification(conn, services[Tile... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_enable_rpcs'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': []... | Prepare this device to receive RPCs |
def del_object_from_parent(self):
if self.parent:
self.parent.objects.pop(self.ref) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'del_object_from_parent'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Delete object from parent object. |
def _GetFilePaths(self, path, pathtype, kb):
environ_vars = artifact_utils.GetWindowsEnvironmentVariablesMap(kb)
path_guesses = path_detection_windows.DetectExecutablePaths([path],
environ_vars)
if not path_guesses:
return []
return [... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_GetFilePaths'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': []... | Guess windows filenames from a commandline string. |
def extract_data_from_response(self, response, data_key=None):
response_json_data = response.json()
if type(response_json_data) == list:
return response_json_data
elif type(response_json_data) == dict:
if data_key is None:
return response_json_data
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'extract_data_from_response'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'child... | Given a response and an optional data_key should return a dictionary of data returned as part of the response. |
def history(self, user=None):
for event in self.changelog:
when, who, what, old, new, ignore = event
if (when >= self.options.since.date and
when <= self.options.until.date):
if user is None or who.startswith(user.login):
yield who,... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'history'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Return relevant who-did-what logs from the ticket history |
def exception_handler(self, ex):
if isinstance(ex, CLIError):
logger.error(ex)
else:
logger.exception(ex)
return 1 | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'exception_handler'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | The default exception handler |
def add_machine(self, machine):
self._logger.debug('Adding machine {} to driver'.format(machine.id))
machine._driver = self
machine._reset()
if machine.id is not None:
Driver._stms_by_id[machine.id] = machine
self._add_event(event_id=None, args=[], kwargs={}, stm=... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'add_machine'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Add the state machine to this driver. |
def replace_in_files(dirname, replace):
filepath = os.path.abspath(dirname / "requirements.in")
if os.path.isfile(filepath) and header_footer_exists(filepath):
replaced = re.sub(Utils.exp, replace, get_file_string(filepath))
with open(filepath, "w") as f:
f.write(replaced)
pr... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'replace_in_files'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Replace current version with new version in requirements files. |
def encodeMsg(self, mesg):
fmt = self.locs.get('log:fmt')
if fmt == 'jsonl':
s = json.dumps(mesg, sort_keys=True) + '\n'
buf = s.encode()
return buf
elif fmt == 'mpk':
buf = s_msgpack.en(mesg)
return buf
mesg = f'Unknown encodin... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'encodeMsg'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Get byts for a message |
def getmessage(self) -> str:
image = {}
for key, default in vars(self.__class__).items():
if not key.startswith('_') and key !='' and (not key in vars(QueueMessage).items()):
if isinstance(default, datetime.date):
imag... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getmessage'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | parse self into unicode string as message content |
def explained_variance(returns, values):
exp_var = 1 - torch.var(returns - values) / torch.var(returns)
return exp_var.item() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'explained_variance'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Calculate how much variance in returns do the values explain |
def _make_container_root(name):
path = _root(name)
if os.path.exists(path):
__context__['retcode'] = salt.defaults.exitcodes.SALT_BUILD_FAIL
raise CommandExecutionError(
'Container {0} already exists'.format(name)
)
else:
try:
os.makedirs(path)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_make_container_root'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Make the container root directory |
def prev(self):
seg = Segment(segment_t=idaapi.get_prev_seg(self.ea))
if seg.ea >= self.ea:
raise exceptions.NoMoreSegments("This is the first segment. no segments exist before it.")
return seg | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'prev'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id... | Get the previous segment. |
def to_python(self):
items = []
for row in self.rows:
formatted_row = [_format_python_value(v) for v in row]
items.append(dict(zip(self.columns, formatted_row)))
return items | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'to_python'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Decode this Table object to standard Python types. |
def set(self, key, value):
return self.__setitem__(key, value, force=True) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'set'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Set a key's value regardless of whether a change is seen. |
def _get_current_deployment_label(self):
deploymentId = self._get_current_deployment_id()
deployment = __salt__['boto_apigateway.describe_api_deployment'](restApiId=self.restApiId,
deploymentId=deploymentId,
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_current_deployment_label'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [... | Helper method to find the deployment label that the stage_name is currently associated with. |
def getURIWithRedirect(self, url):
tries = 0
while tries < 5:
conn = httplib.HTTPConnection(self.server)
conn.request("GET", url)
r1 = conn.getresponse()
if r1.status in [301, 302, 303, 307]:
location = r1.getheader('Loc... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getURIWithRedirect'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | fetch a URL with redirect handling |
def _get_data(self, kwargs):
if "ds" not in kwargs:
raise ValueError("Keyword argument 'ds' missing.")
ds = kwargs["ds"]
if self.req_feature:
if "feature" not in kwargs:
raise ValueError("Keyword argument 'feature' missing.")
return self.get_fe... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_data'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Convenience wrapper to get statistics data |
def load_config(self):
conf_file = os.path.join(self._conf_dir, "dql.json")
if not os.path.exists(conf_file):
return {}
with open(conf_file, "r") as ifile:
return json.load(ifile) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'load_config'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Load your configuration settings from a file |
def growthfromrange(rangegrowth, startdate, enddate):
_yrs = (pd.Timestamp(enddate) - pd.Timestamp(startdate)).total_seconds() /\
dt.timedelta(365.25).total_seconds()
return yrlygrowth(rangegrowth, _yrs) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'growthfromrange'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], '... | Annual growth given growth from start date to end date. |
def validatePrivate(self, field, value) :
if field not in self.arangoPrivates :
raise ValueError("%s is not a private field of collection %s" % (field, self))
if field in self._fields :
self._fields[field].validate(value)
return True | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'validatePrivate'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], '... | validate a private field value |
def _make_map(self, limit):
ny = 2000
y = numpy.random.uniform(0., 1., (ny,1))
limit = numpy.arctan(limit)
m = AdaptiveMap([[-limit, limit]], ninc=100)
theta = numpy.empty(y.shape, float)
jac = numpy.empty(y.shape[0], float)
for itn in range(10):
m.map... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_make_map'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Make vegas grid that is adapted to the pdf. |
def clean(self, value):
if (
self.base_type is not None and
value is not None and
not isinstance(value, self.base_type)
):
if isinstance(self.base_type, tuple):
allowed_types = [typ.__name__ for typ in self.base_type]
allowe... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'clean'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | Take a dirty value and clean it. |
def bibtex(self):
warnings.warn("bibtex should be queried with ads.ExportQuery(); You will "
"hit API ratelimits very quickly otherwise.", UserWarning)
return ExportQuery(bibcodes=self.bibcode, format="bibtex").execute() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'bibtex'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'... | Return a BiBTeX entry for the current article. |
def change_nick(self, nick):
old_nick = self.nick
self.nick = IRCstr(nick)
for c in self.channels:
c.users.remove(old_nick)
c.users.add(self.nick) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'change_nick'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Update this user's nick in all joined channels. |
def OnPreferences(self, event):
preferences = self.interfaces.get_preferences_from_user()
if preferences:
for key in preferences:
if type(config[key]) in (type(u""), type("")):
config[key] = preferences[key]
else:
config... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'OnPreferences'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Preferences event handler that launches preferences dialog |
def cmd_long(self, args):
if len(args) < 1:
print("Usage: long <command> [arg1] [arg2]...")
return
command = None
if args[0].isdigit():
command = int(args[0])
else:
try:
command = eval("mavutil.mavlink." + args[0])
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cmd_long'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | execute supplied command long |
def init(self):
if not self.export_enable:
return None
logger.info(
"Stats will be exported to StatsD server: {}:{}".format(self.host,
self.port))
return StatsClient(self.host,
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'init'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id... | Init the connection to the Statsd server. |
def doc_metadata(doc):
r = doc['Root'].as_dict()
r.update(doc['Contacts'].as_dict())
r['author'] = r.get('author', r.get('creator', r.get('wrangler')))
return r | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'doc_metadata'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'doc'... | Create a metadata dict from a MetatabDoc, for Document conversion |
def value_change(self, vcdId, value):
self.idcode2series[vcdId].append((self.now, value)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'value_change'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | append change from VCD file signal data series |
def FibreCouple(pupils,modeDiameter):
gridSize=pupils.shape[-1]
pupilsVector=np.reshape(pupils,(-1,gridSize**2))
mode=np.reshape(FibreMode(gridSize,modeDiameter),(gridSize**2,))
return np.inner(pupilsVector,mode) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'FibreCouple'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Return the complex amplitudes coupled into a set of fibers |
def default_diff(latest_config, current_config):
pop_no_diff_fields(latest_config, current_config)
diff = DeepDiff(
latest_config,
current_config,
ignore_order=True
)
return diff | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'default_diff'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Determine if two revisions have actually changed. |
def modify_logging(input_fn, output_fn, min_level_value, max_level_value, restore, force):
logging.info('reading in %s' % input_fn)
fh = open(input_fn, 'r')
lines = fh.readlines()
fh.close()
original_contents = ''.join(lines)
if restore:
forwards = restore_logging
backwards = dis... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'modify_logging'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9']}; {'id': '4', 'type': 'identifier', 'c... | Modifies logging statements in the specified file. |
def cli(env, **args):
manager = PlacementManager(env.client)
backend_router_id = helpers.resolve_id(manager.get_backend_router_id_from_hostname,
args.get('backend_router'),
'backendRouter')
rule_id = helpers.resolve_id(man... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cli'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'env'}; {... | Create a placement group. |
def render_to_json(response, request=None, **kwargs):
if hasattr(response, 'status_code'):
status_code = response.status_code
elif issubclass(type(response), Http404):
status_code = 404
elif issubclass(type(response), Exception):
status_code = 500
logger.exception(str(respons... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'render_to_json'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], '... | Creates the main structure and returns the JSON response. |
def ctc_symbol_loss(top_out, targets, model_hparams, vocab_size, weight_fn):
del model_hparams, vocab_size
logits = top_out
with tf.name_scope("ctc_loss", values=[logits, targets]):
targets_shape = targets.get_shape().as_list()
assert len(targets_shape) == 4
assert targets_shape[2] == 1
assert tar... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'ctc_symbol_loss'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'childr... | Compute the CTC loss. |
def recycle_view(self, position):
d = self.declaration
if position < len(d.parent.items):
d.index = position
d.item = d.parent.items[position]
else:
d.index = -1
d.item = None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'recycle_view'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Tell the view to render the item at the given position |
def _draw_satellite_tile(self, tile, x, y):
image, colours = tile.rendered_text
for (i, line) in enumerate(image):
self._screen.paint(line, x, y + i, colour_map=colours[i])
return 1 | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_draw_satellite_tile'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'childr... | Draw a satellite image tile to screen. |
def _get_mark_if_any(self):
line = self.next_line()
if line.startswith(b'mark :'):
return line[len(b'mark :'):]
else:
self.push_line(line)
return None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_mark_if_any'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Parse a mark section. |
def _get_cached_style_urls(self, asset_url_path):
try:
cached_styles = os.listdir(self.cache_path)
except IOError as ex:
if ex.errno != errno.ENOENT and ex.errno != errno.ESRCH:
raise
return []
except OSError:
return []
retu... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_cached_style_urls'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [],... | Gets the URLs of the cached styles. |
def common_wire_version(self):
servers = self.known_servers
if servers:
return min(s.max_wire_version for s in self.known_servers)
return None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'common_wire_version'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Minimum of all servers' max wire versions, or None. |
def find_substring(substring, suffix_tree, edge_repo):
assert isinstance(substring, str)
assert isinstance(suffix_tree, SuffixTree)
assert isinstance(edge_repo, EventSourcedRepository)
if not substring:
return -1
if suffix_tree.case_insensitive:
substring = substring.lower()
curr... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'find_substring'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Returns the index if substring in tree, otherwise -1. |
def step(self, viewer=None):
"This method evolves one step in time"
if not self.is_completed(self.state):
for agent in self.agents:
action = agent.program(self.percept(agent, self.state))
next_state = self.do_action(self.state, action, agent)
i... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'step'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | This method evolves one step in time |
def do_where(virtualenv=False, bare=True):
if not virtualenv:
if not project.pipfile_exists:
click.echo(
"No Pipfile present at project home. Consider running "
"{0} first to automatically generate a Pipfile for you."
"".format(crayons.green("`pipe... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'do_where'}; {'id': '3', 'type': 'parameters', 'children': ['4', '7']}; {'id': '4', 'type': 'default_parameter', 'children': ['5', '6'... | Executes the where functionality. |
def change_name(self, username):
self.release_name()
try:
self.server.register_name(username)
except UsernameInUseException:
logging.log(', '.join(self.server.registered_names))
self.server.register_name(self.name)
raise
self.name =... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'change_name'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | changes the username to given username, throws exception if username used |
def _merge_dicts(self, dict1, dict2, path=[]):
"merges dict2 into dict1"
for key in dict2:
if key in dict1:
if isinstance(dict1[key], dict) and isinstance(dict2[key], dict):
self._merge_dicts(dict1[key], dict2[key], path + [str(key)])
elif ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_merge_dicts'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': []... | merges dict2 into dict1 |
def save_notebook(work_notebook, write_file):
with open(write_file, 'w') as out_nb:
json.dump(work_notebook, out_nb, indent=2) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'save_notebook'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Saves the Jupyter work_notebook to write_file |
def dispatch(self):
if self.request.request.type == 'IntentRequest':
name = self.request.request.intent.name
else:
name = self.request.request.type
if name in self.logic:
self.logic[name]()
else:
error = 'Unable to find a registered logic f... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'dispatch'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ... | Calls the matching logic function by request type or intent name. |
def getIndexedValue(self, index):
actions = self._actions
if (type (actions) is list):
dict = actions[index]
else:
dict = actions
return dict.get('value', 'NoValue') | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getIndexedValue'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Get the value of the indexed Tropo action. |
def process(self, versions):
for tool_name in sorted(versions.keys()):
version = versions[tool_name]
self._log("Using tool '%s', %s" % (tool_name, version)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'process'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Logging version sorted ascending by tool name. |
def from_data(data):
if len(data) == 0:
return None
else:
ptable = PrettyTable()
ptable.field_names = data[0].keys()
for row in data:
ptable.add_row(row)
return ptable | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'from_data'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'data'};... | Construct a Prettytable from list of rows. |
def rex_assert(self, rex, byte=False):
self.rex_search(rex, byte=byte) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'rex_assert'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | If `rex` expression is not found then raise `DataNotFound` exception. |
def sendRequest(self, name, args):
(respEvt, id) = self.newResponseEvent()
self.sendMessage({"id":id, "method":name, "params": args})
return respEvt | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'sendRequest'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | sends a request to the peer |
def inside(self, other):
return ( self.left >= other.left and
self.right <= other.right and
self.top <= other.top and
self.bottom >= other.bottom) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'inside'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Return true if this rectangle is inside the given shape. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.