code stringlengths 51 2.34k | sequence stringlengths 1.16k 13.1k | docstring stringlengths 11 171 |
|---|---|---|
def dataset_splits(self):
return [{
"split": problem.DatasetSplit.TRAIN,
"shards": _TRAIN_SHARDS,
}, {
"split": problem.DatasetSplit.EVAL,
"shards": _DEV_SHARDS,
}] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'dataset_splits'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Splits of data to produce and number of output shards for each. |
def do_allowrep(self, line):
self._split_args(line, 0, 0)
self._command_processor.get_session().get_replication_policy().set_replication_allowed(
True
)
self._print_info_if_verbose("Set replication policy to allow replication") | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'do_allowrep'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | allowrep Allow new objects to be replicated. |
def epoch(ctx, datetime):
return conversions.to_decimal(str(conversions.to_datetime(datetime, ctx).timestamp()), ctx) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'epoch'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'ctx'};... | Converts the given date to the number of seconds since January 1st, 1970 UTC |
def main():
_parse_args()
LOG.info("Starting: %s", __service_id__)
LOG.info('Subscribing to state change events (subscriber = %s)',
__service_name__)
sdp_state = SDPState()
_ = sdp_state.subscribe(subscriber=__service_name__)
_ = _init(sdp_state)
LOG.info('Finished initialising!... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'main'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '9', '17', '25', '31', '42',... | Merge temp_main and main. |
def getFullPathToSnapshot(self, n):
return os.path.join(self.snapDir, str(n)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getFullPathToSnapshot'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Get the full path to snapshot n. |
def restore_type(self, type):
mapping = {
'BOOLEAN': 'boolean',
'DATE': 'date',
'DATETIME': 'datetime',
'INTEGER': 'integer',
'FLOAT': 'number',
'STRING': 'string',
'TIME': 'time',
}
if type not in mapping:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'restore_type'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Restore type from BigQuery |
def parse_yaml(self, y):
self.name = y['name']
if RTS_EXT_NS_YAML + 'comment' in y:
self.comment = y[RTS_EXT_NS_YAML + 'comment']
if RTS_EXT_NS_YAML + 'visible' in y:
visible = y.get(RTS_EXT_NS_YAML + 'visible')
if visible == True or visible == 'true' or visib... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'parse_yaml'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Parse a YAML specification of a data port into this object. |
def __merge_json_values(current, previous):
for value in current:
name = value['name']
previous_value = __find_and_remove_value(previous, value)
if previous_value is not None:
flags = value['flags']
previous_flags = previous_value['flags']
if flags != prev... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '__merge_json_values'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Merges the values between the current and previous run of the script. |
def check_no_proxy_errors(self, **kwargs):
data = self._es.search(body={
"size": max_query_results,
"query": {
"filtered": {
"query": {
"match_all": {}
},
"filter": {
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'check_no_proxy_errors'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Helper method to determine if the proxies logged any major errors related to the functioning of the proxy itself |
def create_blobstore(self, **kwargs):
blobstore = predix.admin.blobstore.BlobStore(**kwargs)
blobstore.create()
blobstore.add_to_manifest(self)
return blobstore | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'create_blobstore'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Creates an instance of the BlobStore Service. |
def temporarySibling(self):
sib = self.parent().child(_secureEnoughString() + self.basename())
sib.requireCreate()
return sib | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'temporarySibling'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Create a path naming a temporary sibling of this path in a secure fashion. |
def api_representation(self):
return dict(EmailAddress=dict(Name=self.name, Address=self.email)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'api_representation'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Returns the JSON formatting required by Outlook's API for contacts |
def contains_array(store, path=None):
path = normalize_storage_path(path)
prefix = _path_to_prefix(path)
key = prefix + array_meta_key
return key in store | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'contains_array'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Return True if the store contains an array at the given logical path. |
def __meta_metadata(self, field, key):
mf = ''
try:
mf = str([f[key] for f in self.metadata
if f['field_name'] == field][0])
except IndexError:
print("%s not in metadata field:%s" % (key, field))
return mf
else:
return ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '__meta_metadata'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], '... | Return the value for key for the field in the metadata |
def filter( names, pat ):
import os, posixpath
result = [ ]
pat = os.path.normcase( pat )
if not pat in _cache:
res = translate( pat )
if len( _cache ) >= _MAXCACHE:
_cache.clear( )
_cache[ pat ] = re.compile( res )
match = _cache[ pat ].match
if os.path is po... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'filter'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'names... | Return the subset of the list NAMES that match PAT |
def fcor(self):
if self.XCBV is None:
return None
else:
return self.flux - self._mission.FitCBVs(self) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'fcor'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id... | The CBV-corrected de-trended flux. |
def function_is_noop(function_node: ast.FunctionDef) -> bool:
return all(node_is_noop(n) for n in function_node.body) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10', '12']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'function_is_noop'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'typed_parameter', 'children': ['... | Function does nothing - is just ``pass`` or docstring. |
def bind(cls, app, *paths, methods=None, name=None, router=None, view=None):
cls.app = app
if cls.app is not None:
for _, m in inspect.getmembers(cls, predicate=inspect.isfunction):
if not hasattr(m, ROUTE_PARAMS_ATTR):
continue
paths_, met... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '20']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'bind'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '8', '11', '14', '17']}; {'id': '4', 'type': 'identifier', 'chi... | Bind to the given application. |
def create_from_ll(cls, lls:LabelLists, bs:int=64, val_bs:int=None, ds_tfms:Optional[TfmList]=None,
num_workers:int=defaults.cpus, dl_tfms:Optional[Collection[Callable]]=None, device:torch.device=None,
test:Optional[PathOrStr]=None, collate_fn:Callable=data_collate, size:int=None, no_che... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '104', '106']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'create_from_ll'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '9', '14', '19', '28', '35', '48', '55', '64', '69... | Create an `ImageDataBunch` from `LabelLists` `lls` with potential `ds_tfms`. |
def form_echo(cls, request,
foo: (Ptypes.form, String('A form parameter'))) -> [
(200, 'Ok', String)]:
log.info('Echoing form param, value is: {}'.format(foo))
for i in range(randint(0, MAX_LOOP_DURATION)):
yield
msg = 'The value sent was: {}'.format(foo... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '17', '23']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'form_echo'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Echo the form parameter. |
def _iter_coords(nsls):
ranges = list()
for nsl in nsls:
if isinstance(nsl, int):
ranges.append(range(nsl, nsl+1))
else:
ranges.append(range(nsl.start, nsl.stop))
yield from itertools.product(*ranges) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_iter_coords'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'nsls... | Iterate through all matching coordinates in a sequence of slices. |
def request(self, app_id=None, body=None, stamp=None, url=None, sig=None):
if self.app_id:
if not self.application_id(app_id):
return False
if (url or sig):
if not (body and stamp and url and sig):
raise ValueError('Unable to validate sender, check... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '20']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'request'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8', '11', '14', '17']}; {'id': '4', 'type': 'identifier', 'child... | Validate application ID and request is from Alexa. |
def resizeToMinimum(self):
offset = self.padding()
min_size = self.minimumPixmapSize()
if self.position() in (XDockToolbar.Position.East,
XDockToolbar.Position.West):
self.resize(min_size.width() + offset, self.height())
elif self.position... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'resizeToMinimum'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Resizes the dock toolbar to the minimum sizes. |
def to_python(self, value):
value = super(BoundingBoxField, self).to_python(value)
try:
bbox = gdal.OGRGeometry.from_bbox(value).geos
except (ValueError, AttributeError):
return []
bbox.srid = self.srid
return bbox | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'to_python'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Returns a GEOS Polygon from bounding box values. |
def visit_keyword(self, node, parent):
newnode = nodes.Keyword(node.arg, parent=parent)
newnode.postinit(self.visit(node.value, newnode))
return newnode | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'visit_keyword'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | visit a Keyword node by returning a fresh instance of it |
def __dict_to_deployment_spec(spec):
spec_obj = AppsV1beta1DeploymentSpec(template=spec.get('template', ''))
for key, value in iteritems(spec):
if hasattr(spec_obj, key):
setattr(spec_obj, key, value)
return spec_obj | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '__dict_to_deployment_spec'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], '... | Converts a dictionary into kubernetes AppsV1beta1DeploymentSpec instance. |
def align(self, out_path=None):
if out_path is None: out_path = self.prefix_path + '.aln'
sh.muscle38("-in", self.path, "-out", out_path)
return AlignedFASTA(out_path) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'align'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | We align the sequences in the fasta file with muscle. |
def split_hostmask(hostmask):
nick, _, host = hostmask.partition('@')
nick, _, user = nick.partition('!')
return nick, user or None, host or None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'split_hostmask'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'ho... | Splits a nick@host string into nick and host. |
def _clean_post_content(blog_url, content):
content = re.sub(
"<img.src=\"%s(.*)\"" % blog_url,
lambda s: "<img src=\"%s\"" % _get_relative_upload(s.groups(1)[0]),
content)
return content | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_clean_post_content'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Replace import path with something relative to blog. |
def go_down(self):
if self.current_option < len(self.items) - 1:
self.current_option += 1
else:
self.current_option = 0
self.draw() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'go_down'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | Go down one, wrap to beginning if necessary |
def index_bounds(x):
if isinstance(x, (pd.DataFrame, pd.Series)):
return x.iloc[0], x.iloc[-1]
else:
return x[0], x[-1] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'index_bounds'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'x'};... | returns tuple with first and last item |
def trim(self, prefixes):
"Prunes any keys beginning with the specified the specified prefixes."
_prefixes, prefixes = set(map(lambda k:self._prepare_key(k), prefixes)), list()
for t in lookahead(sorted(_prefixes)):
if t[1] is not None:
if t[0] == commonprefix(t):
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'trim'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Prunes any keys beginning with the specified the specified prefixes. |
def config_acl(args):
r = fapi.get_repository_config_acl(args.namespace, args.config,
args.snapshot_id)
fapi._check_response_code(r, 200)
acls = sorted(r.json(), key=lambda k: k['user'])
return map(lambda acl: '{0}\t{1}'.format(acl['user'], acl['role']... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'config_acl'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'args'}... | Retrieve access control list for a method configuration |
def map_exception_codes():
werkex = inspect.getmembers(exceptions, lambda x: getattr(x, 'code', None))
return {e.code: e for _, e in werkex} | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'map_exception_codes'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '23']}; {'id'... | Helper function to intialise CODES_TO_EXCEPTIONS. |
def default_config_filename(root_dir=None):
root_dir = Path(root_dir) if root_dir else Path('.').abspath()
locale_dir = root_dir / 'locale'
if not os.path.exists(locale_dir):
locale_dir = root_dir / 'conf' / 'locale'
return locale_dir / BASE_CONFIG_FILENAME | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'default_config_filename'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'default_parameter', 'children': ... | Returns the default name of the configuration file. |
def show_loading_page(self):
loading_template = Template(LOADING)
loading_img = get_image_path('loading_sprites.png')
if os.name == 'nt':
loading_img = loading_img.replace('\\', '/')
message = _("Connecting to kernel...")
page = loading_template.substitute(css_path=CS... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'show_loading_page'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Show a loading animation while the kernel is starting. |
def add_plugin(self, plugin):
new_name = self.plugin_name(plugin)
self._plugins[:] = [p for p in self._plugins
if self.plugin_name(p) != new_name]
self._plugins.append(plugin) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'add_plugin'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Add the given plugin. |
def validate(cls, data):
try:
jsonschema.validate(
data, cls.SCHEMA,
types={'array': (list, tuple)})
except jsonschema.ValidationError as e:
raise InvalidFormat("Failure data not of the"
" expected format: %s" % (e.m... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'validate'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cls... | Validate input data matches expected failure ``dict`` format. |
def _length_scalar_handler(scalar_factory, ion_type, length, ctx):
_, self = yield
if length == 0:
data = b''
else:
yield ctx.read_data_transition(length, self)
data = ctx.queue.read(length)
scalar = scalar_factory(data)
event_cls = IonEvent
if callable(scalar):
e... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_length_scalar_handler'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'chil... | Handles scalars, ``scalar_factory`` is a function that returns a value or thunk. |
def with_condition(self, condition: Callable[[MonitorContext], bool]) -> 'MonitorTask':
self._condition = condition
return self | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '16', '18']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'with_condition'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Sets the task running condition that will be evaluated during the optimisation cycle. |
def _warn_silly_options(cls, args):
if 'page-requisites' in args.span_hosts_allow \
and not args.page_requisites:
_logger.warning(
_('Spanning hosts is allowed for page requisites, '
'but the page requisites option is not on.')
)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_warn_silly_options'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Print warnings about any options that may be silly. |
def _notify(p, **data):
message = data.get("message")
if not message and not sys.stdin.isatty():
message = click.get_text_stream("stdin").read()
data["message"] = message
data = clean_data(data)
ctx = click.get_current_context()
if ctx.obj.get("env_prefix"):
data["env_prefix"] = ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_notify'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'p'};... | The callback func that will be hooked to the ``notify`` command |
def init_git_pillar(opts):
ret = []
for opts_dict in [x for x in opts.get('ext_pillar', [])]:
if 'git' in opts_dict:
try:
pillar = salt.utils.gitfs.GitPillar(
opts,
opts_dict['git'],
per_remote_overrides=git_pillar.P... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'init_git_pillar'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'o... | Clear out the ext pillar caches, used when the master starts |
def filter_cookies(self, request_url: URL=URL()) -> 'BaseCookie[str]':
self._do_expiration()
request_url = URL(request_url)
filtered = SimpleCookie()
hostname = request_url.raw_host or ""
is_not_secure = request_url.scheme not in ("https", "wss")
for cookie in self:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12', '14']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'filter_cookies'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Returns this jar's cookies filtered by their attributes. |
def getThirdPartyLibFiles(self, libs):
platformDefaults = True
if libs[0] == '--nodefaults':
platformDefaults = False
libs = libs[1:]
details = self.getThirdpartyLibs(libs, includePlatformDefaults=platformDefaults)
return details.getLibraryFiles(self.getEngineRoot(), delimiter='\n') | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getThirdPartyLibFiles'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Retrieves the list of library files for building against the Unreal-bundled versions of the specified third-party libraries |
def _handle_dist_server(ds_type, repos_array):
if ds_type not in ("JDS", "CDP"):
raise ValueError("Must be JDS or CDP")
prompt = "Does your JSS use a %s? (Y|N): " % ds_type
result = loop_until_valid_response(prompt)
if result:
repo_dict = ElementTree.SubElement(repos_array, "dict")
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_handle_dist_server'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Ask user for whether to use a type of dist server. |
def warning(lineno, msg):
msg = "%s:%i: warning: %s" % (global_.FILENAME, lineno, msg)
msg_output(msg)
global_.has_warnings += 1 | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'warning'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'line... | Generic warning error routine |
def users(self):
if not hasattr(self, "_users"):
us = {}
if "users" in self.doc:
for ur in self.doc["users"]:
us[ur["name"]] = u = copy.deepcopy(ur["user"])
BytesOrFile.maybe_set(u, "client-certificate")
BytesOrF... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'users'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'i... | Returns known users by exposing as a read-only property. |
def schedule(self):
response = requests.get(
"https://tccna.honeywell.com/WebAPI/emea/api/v1"
"/%s/%s/schedule" % (self.zone_type, self.zoneId),
headers=self.client._headers()
)
response.raise_for_status()
mapping = [
('dailySchedules', 'Da... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'schedule'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ... | Gets the schedule for the given zone |
def process(self):
subscription = None
result = None
try:
subscription = self.socket.recv()
except AuthenticateError as exception:
logging.error(
'Subscriber error while authenticating request: {}'
.format(exception), exc_info=1)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'process'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | Receive a subscription from the socket and process it |
def walk_tree_and_extract(self, data, target):
'Walk tree of properties and extract identifiers and associated values'
if isinstance(data, dict):
for key in ['children', 'props',]:
self.walk_tree_and_extract(data.get(key, None), target)
ident = data.get('id', None... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'walk_tree_and_extract'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children':... | Walk tree of properties and extract identifiers and associated values |
def _set_range(self, init):
if init and (self._scale_factor is not None):
return
w, h = self._viewbox.size
w, h = float(w), float(h)
x1, y1, z1 = self._xlim[0], self._ylim[0], self._zlim[0]
x2, y2, z2 = self._xlim[1], self._ylim[1], self._zlim[1]
rx, ry, rz = ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_set_range'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Reset the camera view using the known limits. |
def load_results(result_files, options, run_set_id=None, columns=None,
columns_relevant_for_diff=set()):
return parallel.map(
load_result,
result_files,
itertools.repeat(options),
itertools.repeat(run_set_id),
itertools.repeat(columns),
itertools.repe... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '17']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'load_results'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9', '12']}; {'id': '4', 'type': 'identifier', 'childre... | Version of load_result for multiple input files that will be loaded concurrently. |
def _setbitpos(self, pos):
if pos < 0:
raise ValueError("Bit position cannot be negative.")
if pos > self.len:
raise ValueError("Cannot seek past the end of the data.")
self._pos = pos | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_setbitpos'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Move to absolute postion bit in bitstream. |
def dropEvent( self, event ):
url = event.mimeData().urls()[0]
url_path = nativestring(url.toString())
if ( not url_path.startswith('file:') ):
filename = os.path.basename(url_path)
temp_path = os.path.join(nativestring(QDir.tempPath()), filename)
try:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'dropEvent'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Handles a drop event. |
def intent_path(cls, project, intent):
return google.api_core.path_template.expand(
'projects/{project}/agent/intents/{intent}',
project=project,
intent=intent,
) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'intent_path'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Return a fully-qualified intent string. |
def _delete(self, **kwargs):
path = self._construct_path_to_item()
return self._http.delete(path) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_delete'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Delete a resource from a remote Transifex server. |
def load(name, base_path=None):
if '/' in name:
return load_location(name, base_path, module=True)
return importer.import_symbol(name, base_path) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'load'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'name'};... | Load a module from a URL or a path |
def load(self):
private = self.is_private()
with open_tls_file(self.file_path, 'r', private=private) as fh:
if private:
self.x509 = crypto.load_privatekey(self.encoding, fh.read())
else:
self.x509 = crypto.load_certificate(self.encoding, fh.read())... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'load'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id... | Load from a file and return an x509 object |
def _handle_calls(self, service_obj, calls):
for call in calls:
method = call.get('method')
args = call.get('args', [])
kwargs = call.get('kwargs', {})
_check_type('args', args, list)
_check_type('kwargs', kwargs, dict)
if method is None:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_handle_calls'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Performs method calls on service object |
def templates(self):
template = lib.EnvGetNextDeftemplate(self._env, ffi.NULL)
while template != ffi.NULL:
yield Template(self._env, template)
template = lib.EnvGetNextDeftemplate(self._env, template) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'templates'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Iterate over the defined Templates. |
def select_random(ports=None, exclude_ports=None):
if ports is None:
ports = available_good_ports()
if exclude_ports is None:
exclude_ports = set()
ports.difference_update(set(exclude_ports))
for port in random.sample(ports, min(len(ports), 100)):
if not port_is_used(port):
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'select_random'}; {'id': '3', 'type': 'parameters', 'children': ['4', '7']}; {'id': '4', 'type': 'default_parameter', 'children': ['5'... | Returns random unused port number. |
def _relay_data(self):
"relay any data we have"
if self._data:
d = self._data
self._data = b''
self._sender.dataReceived(d) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_relay_data'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | relay any data we have |
def close(self):
if self.connection:
logging.info("Closing connection to {}.".format(self.host))
self.connection.close()
self.connection = None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'close'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'i... | Close internal connection to AMQP if connected. |
def post_request(self, request, response):
if request.method != api.Method.OPTIONS:
response.headers.update(self.request_headers(request))
return response | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'post_request'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Post-request hook to allow CORS headers to responses. |
def predict_distance(self, X, batch_size=1, show_progressbar=False):
X = self._check_input(X)
X_shape = reduce(np.multiply, X.shape[:-1], 1)
batched = self._create_batches(X, batch_size, shuffle_data=False)
activations = []
activation = self._init_prev(batched)
for x in t... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'predict_distance'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9']}; {'id': '4', 'type': 'identifier', 'children'... | Predict distances to some input data. |
def promote(self, lane, svcs=None, meta=None):
svcs, meta, lane = self._prep_for_release(lane, svcs=svcs, meta=meta)
for svc in svcs:
self.changes.append("Promoting: {}.release.future={}".format(svc, self.name))
self.rcs.patch('service', svc, {
"release": {"future... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'promote'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | promote a build so it is ready for an upper lane |
def iter_fasta_qual(fastafile, qualfile, defaultqual=OKQUAL, modify=False):
from Bio.SeqIO.QualityIO import PairedFastaQualIterator
if not qualfile:
qualfile = make_qual(fastafile, score=defaultqual)
rec_iter = PairedFastaQualIterator(open(fastafile), open(qualfile))
for rec in rec_iter:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'iter_fasta_qual'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9']}; {'id': '4', 'type': 'identifier', 'children':... | used by trim, emits one SeqRecord with quality values in it |
def create_nic(client, target, nic):
for network in target.network:
if network.name == nic["network_name"]:
net = network
break
else:
return None
backing = client.create("VirtualEthernetCardNetworkBackingInfo")
backing.deviceName = nic["network_name"]
backing.... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'create_nic'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Return a NIC spec |
def OnUndo(self, event):
statustext = undo.stack().undotext()
undo.stack().undo()
try:
post_command_event(self.grid.main_window,
self.grid.ContentChangedMsg)
except TypeError:
pass
self.grid.code_array.result_cache.clear()
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'OnUndo'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Calls the grid undo method |
def enqueue_or_delay(self, queue_name=None, priority=None,
delayed_until=None, prepend=False, queue_model=None):
queue_name = self._get_queue_name(queue_name)
fields = {'queued': '1'}
if priority is not None:
fields['priority'] = priority
else:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '20']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'enqueue_or_delay'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8', '11', '14', '17']}; {'id': '4', 'type': 'identifier... | Will enqueue or delay the job depending of the delayed_until. |
def _draw_chars(self, data, to_draw):
i = 0
while not self._cursor.atBlockEnd() and i < len(to_draw) and len(to_draw) > 1:
self._cursor.deleteChar()
i += 1
self._cursor.insertText(to_draw, data.fmt) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_draw_chars'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Draw the specified charachters using the specified format. |
def show_current_metadata(self):
LOGGER.debug('Showing layer: ' + self.layer.name())
keywords = KeywordIO(self.layer)
content_html = keywords.to_message().to_html()
full_html = html_header() + content_html + html_footer()
self.metadata_preview_web_view.setHtml(full_html) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'show_current_metadata'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Show metadata of the current selected layer. |
def _long_image_slice(in_filepath, out_filepath, slice_size):
print 'slicing image: {0}'.format(in_filepath)
img = Image.open(in_filepath)
width, height = img.size
upper = 0
left = 0
slices = int(math.ceil(height / slice_size))
count = 1
for slice in range(slices):
if count == sl... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_long_image_slice'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [],... | Slice an image into parts slice_size tall. |
def divide(self, data_source_factory):
data_length = data_source_factory.length()
data_interval_length = data_length / self.workers_number() + 1
current_index = 0
self.responses = []
while current_index < data_length:
self.responses.append(0)
offset = curr... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'divide'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Divides the task according to the number of workers. |
def _strip_odict(wrapped):
@functools.wraps(wrapped)
def strip(*args):
return salt.utils.json.loads(salt.utils.json.dumps(wrapped(*args)))
return strip | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_strip_odict'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'wrap... | dump to json and load it again, replaces OrderedDicts with regular ones |
def negated(input_words, include_nt=True):
input_words = [str(w).lower() for w in input_words]
neg_words = []
neg_words.extend(NEGATE)
for word in neg_words:
if word in input_words:
return True
if include_nt:
for word in input_words:
if "n't" in word:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'negated'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'inpu... | Determine if input contains negation words |
def getkey(self, path, filename=None):
scheme, keys = self.getkeys(path, filename=filename)
try:
key = next(keys)
except StopIteration:
raise FileNotFoundError("Could not find object for: '%s'" % path)
nextKey = None
try:
nextKey = next(keys)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getkey'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Get single matching key for a path |
def generic_var(self, key, value=None):
return self._get_or_set('{0}{1}'.format(self._GENERIC_VAR_KEY_PREFIX, key), value) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'generic_var'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Stores generic variables in the session prepending it with _GENERIC_VAR_KEY_PREFIX. |
def __get_path_to_mecab_config(self):
if six.PY2:
path_mecab_config_dir = subprocess.check_output(['which', 'mecab-config'])
path_mecab_config_dir = path_mecab_config_dir.strip().replace('/mecab-config', '')
else:
path_mecab_config_dir = subprocess.check_output(['whic... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '__get_path_to_mecab_config'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], ... | You get path into mecab-config |
def retry_on_bad_auth(func):
@wraps(func)
def retry_version(self, *args, **kwargs):
while True:
try:
return func(self, *args, **kwargs)
except trolly.ResourceUnavailable:
sys.stderr.write('bad request (refresh board id)\n')
self._bo... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'retry_on_bad_auth'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | If bad token or board, try again after clearing relevant cache entries |
def update_checkpoint(self, checkpoint):
if checkpoint is not None and checkpoint != self.checkpoint:
self.checkpoint = checkpoint
with self.oplog_progress as oplog_prog:
oplog_dict = oplog_prog.get_dict()
oplog_dict.pop(str(self.oplog), None)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update_checkpoint'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Store the current checkpoint in the oplog progress dictionary. |
def getPyCmd(cls):
if "win32" in sys.platform:
return 'py'
elif "linux" in sys.platform:
return 'python3'
elif 'darwin' in sys.platform:
return 'python3'
else:
cit.err("No python3 command for " + sys.platform) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getPyCmd'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cls'}; {... | get OS's python command |
def calculate_rates(base_currency, counter_currency, forward_rate=None, fwd_points=None, spot_reference=None):
if base_currency not in DIVISOR_TABLE:
divisor = DIVISOR_TABLE.get(counter_currency, DEFAULT_DIVISOR)
if forward_rate is None and fwd_points is not None and spot_reference is not None:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '15']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'calculate_rates'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9', '12']}; {'id': '4', 'type': 'identifier', 'chil... | Calculate rates for Fx Forward based on others. |
def import_global(node: Node, key: str, path: Any):
node.node_globals[key] = import_path(path) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '16']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'import_global'}; {'id': '3', 'type': 'parameters', 'children': ['4', '8', '12']}; {'id': '4', 'type': 'typed_parameter', 'children': ... | Import passed module, class, function full name and stores it to node's globals |
def change_customer_nc_users_quota(sender, structure, user, role, signal, **kwargs):
assert signal in (signals.structure_role_granted, signals.structure_role_revoked), \
'Handler "change_customer_nc_users_quota" has to be used only with structure_role signals'
assert sender in (Customer, Project), \
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'change_customer_nc_users_quota'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9']}; {'id': '4', 'type': ... | Modify nc_user_count quota usage on structure role grant or revoke |
def extend_src_text(self, content, context, text_list, category):
prefix = self.prefix + '-' if self.prefix else ''
for comment, line, encoding in text_list:
content.append(
filters.SourceText(
textwrap.dedent(comment),
"%s (%d)" % (con... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'extend_src_text'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'childr... | Extend the source text list with the gathered text data. |
def _frames(traceback):
frame = traceback
while frame.tb_next:
frame = frame.tb_next
yield frame.tb_frame
return | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_frames'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'traceback... | Returns generator that iterates over frames in a traceback |
def start(host, port=5959, tag='salt/engine/logstash', proto='udp'):
if proto == 'tcp':
logstashHandler = logstash.TCPLogstashHandler
elif proto == 'udp':
logstashHandler = logstash.UDPLogstashHandler
logstash_logger = logging.getLogger('python-logstash-logger')
logstash_logger.setLevel(... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '14']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'start'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8', '11']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Listen to salt events and forward them to logstash |
def ensure_shape(core, shape, shape_):
core = core.copy()
if shape is None:
shape = shape_
elif isinstance(shape, int):
shape = (shape,)
if tuple(shape) == tuple(shape_):
return core, shape
ones = np.ones(shape, dtype=int)
for key, val in core.items():
core[key] =... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'ensure_shape'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Ensure shape is correct. |
def read_long(self):
self.bitcount = self.bits = 0
return unpack('>I', self.input.read(4))[0] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'read_long'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Read an unsigned 32-bit integer |
def FileTransfer(*args, **kwargs):
if len(args) >= 1:
device_type = args[0].device_type
else:
device_type = kwargs["ssh_conn"].device_type
if device_type not in scp_platforms:
raise ValueError(
"Unsupported SCP device_type: "
"currently supported platforms are... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'FileTransfer'}; {'id': '3', 'type': 'parameters', 'children': ['4', '6']}; {'id': '4', 'type': 'list_splat_pattern', 'children': ['5']... | Factory function selects the proper SCP class and creates object based on device_type. |
def _validate_edata(self, edata):
if edata is None:
return True
if not (isinstance(edata, dict) or _isiterable(edata)):
return False
edata = [edata] if isinstance(edata, dict) else edata
for edict in edata:
if (not isinstance(edict, dict)) or (
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_validate_edata'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Validate edata argument of raise_exception_if method. |
def crud_for_app(app_label, urlprefix=None):
if urlprefix is None:
urlprefix = app_label + '/'
app = apps.get_app_config(app_label)
urls = []
for model in app.get_models():
urls += crud_for_model(model, urlprefix)
return urls | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'crud_for_app'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Returns list of ``url`` items to CRUD an app. |
def request_set_status(self, text: str) -> dict:
method_params = {'text': text}
response = self.session.send_method_request('status.set',
method_params)
self.check_for_errors('status.set', method_params, response)
return response | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'request_set_status'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [... | Method to set user status |
def parse_file_path(cls, file_path):
address = None
pattern = cls.file_regex.match(file_path)
if pattern:
address = pattern.group(1)
return address | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'parse_file_path'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Parse a file address path without the file specifier |
def passwordLogin(self, username):
self.challenge = secureRandom(16)
self.username = username
return {'challenge': self.challenge} | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'passwordLogin'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Generate a new challenge for the given username. |
def field_exists(self, well_x, well_y, field_x, field_y):
"Check if field exists ScanFieldArray."
return self.field(well_x, well_y, field_x, field_y) != None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'field_exists'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'children'... | Check if field exists ScanFieldArray. |
def list_database(db):
credentials = db.credentials()
if credentials:
table = Table(
db.config['headers'],
table_format=db.config['table_format'],
colors=db.config['colors'],
hidden=db.config['hidden'],
hidden_string=db.config['hidden_string'],... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'list_database'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'db'... | Print credential as a table |
def load(self, id, *args, **kwargs):
self._pre_load(id, *args, **kwargs)
response = self._load(id, *args, **kwargs)
response = self._post_load(response, *args, **kwargs)
return response | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'load'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | loads a remote resource by id |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.