id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
35,121
def resolve_field(model_field, lookup_expr): query = model_field.model._default_manager.all().query lhs = Expression(model_field) lookups = lookup_expr.split(LOOKUP_SEP) assert (len(lookups) > 0) try: while lookups: name = lookups[0] if (len(lookups) == 1): final_lookup = lhs.get_lookup(name) if (not final_lookup): lhs = query.try_transform(lhs, name, lookups) final_lookup = lhs.get_lookup('exact') return (lhs.output_field, final_lookup.lookup_name) lhs = query.try_transform(lhs, name, lookups) lookups = lookups[1:] except FieldError as e: six.raise_from(FieldLookupError(model_field, lookup_expr), e)
[ "def", "resolve_field", "(", "model_field", ",", "lookup_expr", ")", ":", "query", "=", "model_field", ".", "model", ".", "_default_manager", ".", "all", "(", ")", ".", "query", "lhs", "=", "Expression", "(", "model_field", ")", "lookups", "=", "lookup_expr"...
resolves a lookup_expr into its final output field .
train
false
35,122
@bdd.when(bdd.parsers.parse('I open {path}')) def open_path(quteproc, path): new_tab = False new_bg_tab = False new_window = False as_url = False wait = True new_tab_suffix = ' in a new tab' new_bg_tab_suffix = ' in a new background tab' new_window_suffix = ' in a new window' do_not_wait_suffix = ' without waiting' as_url_suffix = ' as a URL' while True: if path.endswith(new_tab_suffix): path = path[:(- len(new_tab_suffix))] new_tab = True elif path.endswith(new_bg_tab_suffix): path = path[:(- len(new_bg_tab_suffix))] new_bg_tab = True elif path.endswith(new_window_suffix): path = path[:(- len(new_window_suffix))] new_window = True elif path.endswith(as_url_suffix): path = path[:(- len(as_url_suffix))] as_url = True elif path.endswith(do_not_wait_suffix): path = path[:(- len(do_not_wait_suffix))] wait = False else: break quteproc.open_path(path, new_tab=new_tab, new_bg_tab=new_bg_tab, new_window=new_window, as_url=as_url, wait=wait)
[ "@", "bdd", ".", "when", "(", "bdd", ".", "parsers", ".", "parse", "(", "'I open {path}'", ")", ")", "def", "open_path", "(", "quteproc", ",", "path", ")", ":", "new_tab", "=", "False", "new_bg_tab", "=", "False", "new_window", "=", "False", "as_url", ...
open a url .
train
false
35,123
def register_mapping_type(cls): _search_mapping_types[cls.get_mapping_type_name()] = cls return cls
[ "def", "register_mapping_type", "(", "cls", ")", ":", "_search_mapping_types", "[", "cls", ".", "get_mapping_type_name", "(", ")", "]", "=", "cls", "return", "cls" ]
class decorator for registering mappingtypes for search .
train
false
35,124
def aggregate_get_all(context): return IMPL.aggregate_get_all(context)
[ "def", "aggregate_get_all", "(", "context", ")", ":", "return", "IMPL", ".", "aggregate_get_all", "(", "context", ")" ]
get all aggregates .
train
false
35,125
def _percent_encode(encode_str): encoding = (sys.stdin.encoding or 'cp936') decoded = str(encode_str) if PY3: if isinstance(encode_str, bytes): decoded = encode_str.decode(encoding) else: decoded = str(encode_str).decode(encoding) res = urlquote(decoded.encode('utf8'), '') res = res.replace('+', '%20') res = res.replace('*', '%2A') res = res.replace('%7E', '~') return res
[ "def", "_percent_encode", "(", "encode_str", ")", ":", "encoding", "=", "(", "sys", ".", "stdin", ".", "encoding", "or", "'cp936'", ")", "decoded", "=", "str", "(", "encode_str", ")", "if", "PY3", ":", "if", "isinstance", "(", "encode_str", ",", "bytes",...
encode string to utf8 .
train
false
35,126
def revoke_certs_by_project(project_id): admin = context.get_admin_context() for cert in db.certificate_get_all_by_project(admin, project_id): revoke_cert(cert['project_id'], cert['file_name'])
[ "def", "revoke_certs_by_project", "(", "project_id", ")", ":", "admin", "=", "context", ".", "get_admin_context", "(", ")", "for", "cert", "in", "db", ".", "certificate_get_all_by_project", "(", "admin", ",", "project_id", ")", ":", "revoke_cert", "(", "cert", ...
revoke all project certs .
train
false
35,127
def _getAvatars(): path = os.path.expanduser((('~/Library/Application Support/Skype/' + getUserName()) + '/main.db')) with contextlib.closing(sqlite3.connect(path).cursor()) as db: for av in db.execute('SELECT skypename,avatar_image FROM Contacts WHERE type=1 AND is_permanent=1'): if (av[1] is not None): with open((('avatars/' + av[0]) + '.jpeg'), 'wr') as f: f.write(str(av[1])[1:])
[ "def", "_getAvatars", "(", ")", ":", "path", "=", "os", ".", "path", ".", "expanduser", "(", "(", "(", "'~/Library/Application Support/Skype/'", "+", "getUserName", "(", ")", ")", "+", "'/main.db'", ")", ")", "with", "contextlib", ".", "closing", "(", "sql...
extracts the avatars from the database and stores them in the alfred extension directory .
train
false
35,129
@contextmanager def virtualenv(): with cd(env.venv_path): with prefix((u'source %s/bin/activate' % env.venv_path)): (yield)
[ "@", "contextmanager", "def", "virtualenv", "(", ")", ":", "with", "cd", "(", "env", ".", "venv_path", ")", ":", "with", "prefix", "(", "(", "u'source %s/bin/activate'", "%", "env", ".", "venv_path", ")", ")", ":", "(", "yield", ")" ]
return a virtual environment which is unique to each test function invocation created inside of a sub directory of the test functions temporary directory .
train
false
35,130
@pytest.mark.nondestructive def test_most_popular_extensions_are_sorted_by_users(base_url, selenium): page = Home(selenium, base_url).open() extensions = page.most_popular.extensions sorted_by_users = sorted(extensions, key=(lambda e: e.users), reverse=True) assert (sorted_by_users == extensions)
[ "@", "pytest", ".", "mark", ".", "nondestructive", "def", "test_most_popular_extensions_are_sorted_by_users", "(", "base_url", ",", "selenium", ")", ":", "page", "=", "Home", "(", "selenium", ",", "base_url", ")", ".", "open", "(", ")", "extensions", "=", "pag...
most popular add-ons are sorted by popularity .
train
false
35,132
@pytest.mark.django_db def test_data_tp(english): tp = TranslationProjectFactory(project=ProjectDBFactory(source_language=english), language=LanguageDBFactory()) assert (repr(tp.data) == ('<TPData: %s>' % tp.pootle_path))
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_data_tp", "(", "english", ")", ":", "tp", "=", "TranslationProjectFactory", "(", "project", "=", "ProjectDBFactory", "(", "source_language", "=", "english", ")", ",", "language", "=", "LanguageDBFactory"...
test that you cant add a duplicate file extension .
train
false
35,134
def issue_role(name, rawtext, text, lineno, inliner, options=None, content=None): options = (options or {}) content = (content or []) issue_nos = [each.strip() for each in utils.unescape(text).split(',')] config = inliner.document.settings.env.app.config ret = [] for (i, issue_no) in enumerate(issue_nos): node = _make_issue_node(issue_no, config, options=options) ret.append(node) if (i != (len(issue_nos) - 1)): sep = nodes.raw(text=', ', format='html') ret.append(sep) return (ret, [])
[ "def", "issue_role", "(", "name", ",", "rawtext", ",", "text", ",", "lineno", ",", "inliner", ",", "options", "=", "None", ",", "content", "=", "None", ")", ":", "options", "=", "(", "options", "or", "{", "}", ")", "content", "=", "(", "content", "...
sphinx role for linking to an issue .
train
true
35,135
def add_params_to_qs(query, params): if isinstance(params, dict): params = params.items() queryparams = urlparse.parse_qsl(query, keep_blank_values=True) queryparams.extend(params) return urlencode(queryparams)
[ "def", "add_params_to_qs", "(", "query", ",", "params", ")", ":", "if", "isinstance", "(", "params", ",", "dict", ")", ":", "params", "=", "params", ".", "items", "(", ")", "queryparams", "=", "urlparse", ".", "parse_qsl", "(", "query", ",", "keep_blank_...
extend a query with a list of two-tuples .
train
true
35,137
def stripnull(string): i = string.find('\x00') return (string if (i < 0) else string[:i])
[ "def", "stripnull", "(", "string", ")", ":", "i", "=", "string", ".", "find", "(", "'\\x00'", ")", "return", "(", "string", "if", "(", "i", "<", "0", ")", "else", "string", "[", ":", "i", "]", ")" ]
return string truncated at first null character .
train
true
35,138
def evaluate_entrance_exam(course, block, user_id): if milestones_helpers.is_entrance_exams_enabled(): entrance_exam_enabled = getattr(course, 'entrance_exam_enabled', False) in_entrance_exam = getattr(block, 'in_entrance_exam', False) if (entrance_exam_enabled and in_entrance_exam): request = RequestFactory().request() request.user = User.objects.get(id=user_id) exam_pct = get_entrance_exam_score(request, course) if (exam_pct >= course.entrance_exam_minimum_score_pct): exam_key = UsageKey.from_string(course.entrance_exam_id) relationship_types = milestones_helpers.get_milestone_relationship_types() content_milestones = milestones_helpers.get_course_content_milestones(course.id, exam_key, relationship=relationship_types['FULFILLS']) user = {'id': request.user.id} for milestone in content_milestones: milestones_helpers.add_user_milestone(user, milestone)
[ "def", "evaluate_entrance_exam", "(", "course", ",", "block", ",", "user_id", ")", ":", "if", "milestones_helpers", ".", "is_entrance_exams_enabled", "(", ")", ":", "entrance_exam_enabled", "=", "getattr", "(", "course", ",", "'entrance_exam_enabled'", ",", "False",...
update milestone fulfillments for the specified content module .
train
false
35,140
@lru_cache(maxsize=32) def check_pow(block_number, header_hash, mixhash, nonce, difficulty): log.debug('checking pow', block_number=block_number) if ((len(mixhash) != 32) or (len(header_hash) != 32) or (len(nonce) != 8)): return False cache = get_cache(block_number) mining_output = hashimoto_light(block_number, cache, header_hash, nonce) if (mining_output['mix digest'] != mixhash): return False return (utils.big_endian_to_int(mining_output['result']) <= ((2 ** 256) // (difficulty or 1)))
[ "@", "lru_cache", "(", "maxsize", "=", "32", ")", "def", "check_pow", "(", "block_number", ",", "header_hash", ",", "mixhash", ",", "nonce", ",", "difficulty", ")", ":", "log", ".", "debug", "(", "'checking pow'", ",", "block_number", "=", "block_number", ...
check if the proof-of-work of the block is valid .
train
true
35,142
def parse_domain(str): m = _domain_re.match(str) if (not m): raise ValueError(('Domain: ' + str)) dom = Domain() dom.sid = m.group(1) dom.sccs = m.group(2) dom.residues = Residues.Residues(m.group(3)) if (not dom.residues.pdbid): dom.residues.pdbid = dom.sid[1:5] dom.description = m.group(4).strip() return dom
[ "def", "parse_domain", "(", "str", ")", ":", "m", "=", "_domain_re", ".", "match", "(", "str", ")", "if", "(", "not", "m", ")", ":", "raise", "ValueError", "(", "(", "'Domain: '", "+", "str", ")", ")", "dom", "=", "Domain", "(", ")", "dom", ".", ...
convert an astral header string into a scop domain .
train
false
35,144
def makeCompFunctionFiles(out_path, generateFor=generateFor, specialBuilders=specialBuilders): skips = [] for (cmd_name, module_name, class_name) in generateFor: if (module_name is None): f = _openCmdFile(out_path, cmd_name) f.close() continue try: m = __import__(('%s' % (module_name,)), None, None, class_name) f = _openCmdFile(out_path, cmd_name) o = getattr(m, class_name)() if (cmd_name in specialBuilders): b = specialBuilders[cmd_name](cmd_name, o, f) b.write() else: b = Builder(cmd_name, o, f) b.write() except Exception as e: skips.append((cmd_name, e)) continue return skips
[ "def", "makeCompFunctionFiles", "(", "out_path", ",", "generateFor", "=", "generateFor", ",", "specialBuilders", "=", "specialBuilders", ")", ":", "skips", "=", "[", "]", "for", "(", "cmd_name", ",", "module_name", ",", "class_name", ")", "in", "generateFor", ...
generate completion function files in the given directory for all twisted commands @type out_path: c{str} .
train
false
35,145
def estimate_bandwidth(X, quantile=0.3, n_samples=None, random_state=0, n_jobs=1): random_state = check_random_state(random_state) if (n_samples is not None): idx = random_state.permutation(X.shape[0])[:n_samples] X = X[idx] nbrs = NearestNeighbors(n_neighbors=int((X.shape[0] * quantile)), n_jobs=n_jobs) nbrs.fit(X) bandwidth = 0.0 for batch in gen_batches(len(X), 500): (d, _) = nbrs.kneighbors(X[batch, :], return_distance=True) bandwidth += np.max(d, axis=1).sum() return (bandwidth / X.shape[0])
[ "def", "estimate_bandwidth", "(", "X", ",", "quantile", "=", "0.3", ",", "n_samples", "=", "None", ",", "random_state", "=", "0", ",", "n_jobs", "=", "1", ")", ":", "random_state", "=", "check_random_state", "(", "random_state", ")", "if", "(", "n_samples"...
estimate the bandwidth to use with the mean-shift algorithm .
train
false
35,147
@common_exceptions_400 def view_user_doesnotexist(request): raise User.DoesNotExist()
[ "@", "common_exceptions_400", "def", "view_user_doesnotexist", "(", "request", ")", ":", "raise", "User", ".", "DoesNotExist", "(", ")" ]
a dummy view that raises a user .
train
false
35,149
def ListInstances(region, instances=None, node_types=[], states=[], names=[]): ec2 = _Connect(region) filters = {} if node_types: for i in node_types: assert (i in kValidNodeTypes), ('"%s" not in the list of valid node types: %s' % (i, ', '.join(kValidNodeTypes))) filters['tag:NodeType'] = node_types if names: filters['tag:Name'] = names if states: for i in states: assert (i in kValidStateNames), ('"%s" not in the list of valid state names: %s' % (i, ', '.join(kValidStateNames))) filters['instance-state-name'] = states matches = [] for r in ec2.get_all_instances(instance_ids=instances, filters=filters): matches.extend(r.instances) return matches
[ "def", "ListInstances", "(", "region", ",", "instances", "=", "None", ",", "node_types", "=", "[", "]", ",", "states", "=", "[", "]", ",", "names", "=", "[", "]", ")", ":", "ec2", "=", "_Connect", "(", "region", ")", "filters", "=", "{", "}", "if...
list instance dns names in a given region .
train
false
35,150
def _filter_optouts_from_recipients(to_list, course_id): optouts = Optout.objects.filter(course_id=course_id, user__in=[i['pk'] for i in to_list]).values_list('user__email', flat=True) optouts = set(optouts) num_optout = len(optouts) to_list = [recipient for recipient in to_list if (recipient['email'] not in optouts)] return (to_list, num_optout)
[ "def", "_filter_optouts_from_recipients", "(", "to_list", ",", "course_id", ")", ":", "optouts", "=", "Optout", ".", "objects", ".", "filter", "(", "course_id", "=", "course_id", ",", "user__in", "=", "[", "i", "[", "'pk'", "]", "for", "i", "in", "to_list"...
filters a recipient list based on student opt-outs for a given course .
train
false
35,151
def tail_log_file(filename, offset, nlines, callback=None): def seek_file(filehandle, offset, nlines, callback): 'step backwards in chunks and stop only when we have enough lines' lines_found = [] buffer_size = 4098 block_count = (-1) while (len(lines_found) < (offset + nlines)): try: filehandle.seek((block_count * buffer_size), os.SEEK_END) except IOError: filehandle.seek(0) lines_found = filehandle.readlines() break lines_found = filehandle.readlines() block_count -= 1 lines_found = lines_found[((- nlines) - offset):((- offset) if offset else None)] if callback: callback(lines_found) else: return lines_found def errback(failure): 'Catching errors to normal log' log_trace() filehandle = _open_log_file(filename) if filehandle: if callback: return deferToThread(seek_file, filehandle, offset, nlines, callback).addErrback(errback) else: return seek_file(filehandle, offset, nlines, callback)
[ "def", "tail_log_file", "(", "filename", ",", "offset", ",", "nlines", ",", "callback", "=", "None", ")", ":", "def", "seek_file", "(", "filehandle", ",", "offset", ",", "nlines", ",", "callback", ")", ":", "lines_found", "=", "[", "]", "buffer_size", "=...
return the tail of the log file .
train
false
35,152
def is_excluded(root, excludes): for exclude in excludes: if fnmatch(root, exclude): return True return False
[ "def", "is_excluded", "(", "root", ",", "excludes", ")", ":", "for", "exclude", "in", "excludes", ":", "if", "fnmatch", "(", "root", ",", "exclude", ")", ":", "return", "True", "return", "False" ]
check if the directory is in the exclude list .
train
true
35,153
def yn_validator(optdict, name, value): return optik_ext.check_yn(None, name, value)
[ "def", "yn_validator", "(", "optdict", ",", "name", ",", "value", ")", ":", "return", "optik_ext", ".", "check_yn", "(", "None", ",", "name", ",", "value", ")" ]
validate and return a converted value for option of type yn .
train
false
35,154
def buildTLSProtocol(server=False, transport=None, fakeConnection=None): clientProtocol = AccumulatingProtocol(999999999999) clientFactory = ClientFactory() clientFactory.protocol = (lambda : clientProtocol) if fakeConnection: @implementer(IOpenSSLServerConnectionCreator, IOpenSSLClientConnectionCreator) class HardCodedConnection(object, ): def clientConnectionForTLS(self, tlsProtocol): return fakeConnection serverConnectionForTLS = clientConnectionForTLS contextFactory = HardCodedConnection() elif server: contextFactory = ServerTLSContext() else: contextFactory = ClientTLSContext() wrapperFactory = TLSMemoryBIOFactory(contextFactory, (not server), clientFactory) sslProtocol = wrapperFactory.buildProtocol(None) if (transport is None): transport = StringTransport() sslProtocol.makeConnection(transport) return (clientProtocol, sslProtocol)
[ "def", "buildTLSProtocol", "(", "server", "=", "False", ",", "transport", "=", "None", ",", "fakeConnection", "=", "None", ")", ":", "clientProtocol", "=", "AccumulatingProtocol", "(", "999999999999", ")", "clientFactory", "=", "ClientFactory", "(", ")", "client...
create a protocol hooked up to a tls transport hooked up to a stringtransport .
train
false
35,155
def apath(path=''): from gluon.fileutils import up opath = up(request.folder) while (path[:3] == '../'): (opath, path) = (up(opath), path[3:]) return os.path.join(opath, path).replace('\\', '/')
[ "def", "apath", "(", "path", "=", "''", ")", ":", "from", "gluon", ".", "fileutils", "import", "up", "opath", "=", "up", "(", "request", ".", "folder", ")", "while", "(", "path", "[", ":", "3", "]", "==", "'../'", ")", ":", "(", "opath", ",", "...
application path .
train
false
35,158
def _fixed_np_round(arr, decimals=0, out=None): if ((out is not None) and (arr.dtype.kind == 'c')): _fixed_np_round(arr.real, decimals, out.real) _fixed_np_round(arr.imag, decimals, out.imag) return out else: res = np.round(arr, decimals, out) if (out is None): def fixup_signed_zero(arg, res): if ((res == 0.0) and (arg < 0)): return (- np.abs(res)) else: return res if isinstance(arr, (complex, np.complexfloating)): res = complex(fixup_signed_zero(arr.real, res.real), fixup_signed_zero(arr.imag, res.imag)) else: res = fixup_signed_zero(arr, res) return res
[ "def", "_fixed_np_round", "(", "arr", ",", "decimals", "=", "0", ",", "out", "=", "None", ")", ":", "if", "(", "(", "out", "is", "not", "None", ")", "and", "(", "arr", ".", "dtype", ".", "kind", "==", "'c'", ")", ")", ":", "_fixed_np_round", "(",...
a slightly bugfixed version of np .
train
false
35,159
def gethostbyname_ex(hostname): return get_hub().resolver.gethostbyname_ex(hostname)
[ "def", "gethostbyname_ex", "(", "hostname", ")", ":", "return", "get_hub", "(", ")", ".", "resolver", ".", "gethostbyname_ex", "(", "hostname", ")" ]
gethostbyname_ex -> return the true host name .
train
false
35,161
def set_locale_info(domain, localedir): global _DOMAIN, _LOCALEDIR _DOMAIN = domain _LOCALEDIR = localedir
[ "def", "set_locale_info", "(", "domain", ",", "localedir", ")", ":", "global", "_DOMAIN", ",", "_LOCALEDIR", "_DOMAIN", "=", "domain", "_LOCALEDIR", "=", "localedir" ]
setup the domain and localedir for translations .
train
false
35,163
def _countFollowingZeros(l): if (len(l) == 0): return 0 elif (l[0] != 0): return 0 else: return (1 + _countFollowingZeros(l[1:]))
[ "def", "_countFollowingZeros", "(", "l", ")", ":", "if", "(", "len", "(", "l", ")", "==", "0", ")", ":", "return", "0", "elif", "(", "l", "[", "0", "]", "!=", "0", ")", ":", "return", "0", "else", ":", "return", "(", "1", "+", "_countFollowingZ...
return nr .
train
false
35,166
@register.simple_tag def bootstrap_javascript_url(): return javascript_url()
[ "@", "register", ".", "simple_tag", "def", "bootstrap_javascript_url", "(", ")", ":", "return", "javascript_url", "(", ")" ]
return the full url to the bootstrap javascript library default value: none this value is configurable .
train
false
35,167
def rst_ify(text): try: t = _ITALIC.sub((('*' + '\\1') + '*'), text) t = _BOLD.sub((('**' + '\\1') + '**'), t) t = _MODULE.sub(((':ref:`' + '\\1 <\\1>') + '`'), t) t = _URL.sub('\\1', t) t = _CONST.sub((('``' + '\\1') + '``'), t) except Exception as e: raise AnsibleError(('Could not process (%s) : %s' % (str(text), str(e)))) return t
[ "def", "rst_ify", "(", "text", ")", ":", "try", ":", "t", "=", "_ITALIC", ".", "sub", "(", "(", "(", "'*'", "+", "'\\\\1'", ")", "+", "'*'", ")", ",", "text", ")", "t", "=", "_BOLD", ".", "sub", "(", "(", "(", "'**'", "+", "'\\\\1'", ")", "...
convert symbols like i to valid restructured text .
train
false
35,168
def dotted_netmask(mask): bits = (4294967295 ^ ((1 << (32 - mask)) - 1)) return socket.inet_ntoa(struct.pack('>I', bits))
[ "def", "dotted_netmask", "(", "mask", ")", ":", "bits", "=", "(", "4294967295", "^", "(", "(", "1", "<<", "(", "32", "-", "mask", ")", ")", "-", "1", ")", ")", "return", "socket", ".", "inet_ntoa", "(", "struct", ".", "pack", "(", "'>I'", ",", ...
converts mask from /xx format to xxx .
train
false
35,169
def CDLTASUKIGAP(barDs, count): return call_talib_with_ohlc(barDs, count, talib.CDLTASUKIGAP)
[ "def", "CDLTASUKIGAP", "(", "barDs", ",", "count", ")", ":", "return", "call_talib_with_ohlc", "(", "barDs", ",", "count", ",", "talib", ".", "CDLTASUKIGAP", ")" ]
tasuki gap .
train
false
35,170
def _ensure_datetime_tzinfo(datetime, tzinfo=None): if (datetime.tzinfo is None): datetime = datetime.replace(tzinfo=UTC) if (tzinfo is not None): datetime = datetime.astimezone(get_timezone(tzinfo)) if hasattr(tzinfo, 'normalize'): datetime = tzinfo.normalize(datetime) return datetime
[ "def", "_ensure_datetime_tzinfo", "(", "datetime", ",", "tzinfo", "=", "None", ")", ":", "if", "(", "datetime", ".", "tzinfo", "is", "None", ")", ":", "datetime", "=", "datetime", ".", "replace", "(", "tzinfo", "=", "UTC", ")", "if", "(", "tzinfo", "is...
ensure the datetime passed has an attached tzinfo .
train
false
35,171
def disable_inheritance(path, objectType, copy=True): dc = daclConstants() objectType = dc.getObjectTypeBit(objectType) path = dc.processPath(path, objectType) return _set_dacl_inheritance(path, objectType, False, copy, None)
[ "def", "disable_inheritance", "(", "path", ",", "objectType", ",", "copy", "=", "True", ")", ":", "dc", "=", "daclConstants", "(", ")", "objectType", "=", "dc", ".", "getObjectTypeBit", "(", "objectType", ")", "path", "=", "dc", ".", "processPath", "(", ...
disable inheritance on an object args: path: the path to the object objecttype: the type of object copy: true will copy the inherited aces to the dacl before disabling inheritance returns : a dictionary containing the results cli example: .
train
true
35,174
@register.function @jinja2.contextfunction def reviewers_breadcrumbs(context, queue=None, items=None): crumbs = [(reverse('reviewers.home'), _('Reviewer Tools'))] if queue: queues = {'pending': _('Apps'), 'rereview': _('Re-reviews'), 'updates': _('Updates'), 'escalated': _('Escalations'), 'device': _('Device'), 'moderated': _('Moderated Reviews'), 'abuse': _('Abuse Reports'), 'abusewebsites': _('Website Abuse Reports'), 'reviewing': _('Reviewing'), 'homescreen': _('Homescreens'), 'region': _('Regional Queues')} if items: url = reverse(('reviewers.apps.queue_%s' % queue)) else: url = None crumbs.append((url, queues[queue])) if items: crumbs.extend(items) return mkt_breadcrumbs(context, items=crumbs, add_default=True)
[ "@", "register", ".", "function", "@", "jinja2", ".", "contextfunction", "def", "reviewers_breadcrumbs", "(", "context", ",", "queue", "=", "None", ",", "items", "=", "None", ")", ":", "crumbs", "=", "[", "(", "reverse", "(", "'reviewers.home'", ")", ",", ...
wrapper function for breadcrumbs .
train
false
35,176
def test_time_zone(): schema = vol.Schema(cv.time_zone) with pytest.raises(vol.MultipleInvalid): schema('America/Do_Not_Exist') schema('America/Los_Angeles') schema('UTC')
[ "def", "test_time_zone", "(", ")", ":", "schema", "=", "vol", ".", "Schema", "(", "cv", ".", "time_zone", ")", "with", "pytest", ".", "raises", "(", "vol", ".", "MultipleInvalid", ")", ":", "schema", "(", "'America/Do_Not_Exist'", ")", "schema", "(", "'A...
test time zone validation .
train
false
35,179
def dotnode(expr, styles=default_styles, labelfunc=str, pos=(), repeat=True): style = styleof(expr, styles) if (isinstance(expr, Basic) and (not expr.is_Atom)): label = str(expr.__class__.__name__) else: label = labelfunc(expr) style['label'] = label expr_str = purestr(expr) if repeat: expr_str += ('_%s' % str(pos)) return ('"%s" [%s];' % (expr_str, attrprint(style)))
[ "def", "dotnode", "(", "expr", ",", "styles", "=", "default_styles", ",", "labelfunc", "=", "str", ",", "pos", "=", "(", ")", ",", "repeat", "=", "True", ")", ":", "style", "=", "styleof", "(", "expr", ",", "styles", ")", "if", "(", "isinstance", "...
string defining a node .
train
false
35,180
def vector_mean(vectors): n = len(vectors) return scalar_multiply((1 / n), vector_sum(vectors))
[ "def", "vector_mean", "(", "vectors", ")", ":", "n", "=", "len", "(", "vectors", ")", "return", "scalar_multiply", "(", "(", "1", "/", "n", ")", ",", "vector_sum", "(", "vectors", ")", ")" ]
compute the vector whose i-th element is the mean of the i-th elements of the input vectors .
train
false
35,181
def to_delayed(df): from ..delayed import Delayed return [Delayed(k, [df.dask]) for k in df._keys()]
[ "def", "to_delayed", "(", "df", ")", ":", "from", ".", ".", "delayed", "import", "Delayed", "return", "[", "Delayed", "(", "k", ",", "[", "df", ".", "dask", "]", ")", "for", "k", "in", "df", ".", "_keys", "(", ")", "]" ]
create dask delayed objects from a dask dataframe returns a list of delayed values .
train
false
35,182
def getInvokeReflectivePEInjectionWithDLLEmbedded(payload_conf): SPLIT_SIZE = 100000 (x86InitCode, x86ConcatCode) = ('', '') code = '\n $PEBytes = ""\n {0}\n $PEBytesTotal = [System.Convert]::FromBase64String({1})\n Invoke-ReflectivePEInjection -PEBytes $PEBytesTotal -ForceASLR\n ' binaryX86 = b64encode(get_edit_pupyx86_dll(payload_conf)) binaryX86parts = [binaryX86[i:(i + SPLIT_SIZE)] for i in range(0, len(binaryX86), SPLIT_SIZE)] for (i, aPart) in enumerate(binaryX86parts): x86InitCode += '$PEBytes{0}="{1}"\n'.format(i, aPart) x86ConcatCode += '$PEBytes{0}+'.format(i) print (colorize('[+] ', 'green') + 'X86 pupy dll loaded and {0} variables generated'.format((i + 1))) script = obfuscatePowershellScript(open(os.path.join(ROOT, 'external', 'PowerSploit', 'CodeExecution', 'Invoke-ReflectivePEInjection.ps1'), 'r').read()) return obfs_ps_script('{0}\n{1}'.format(script, code.format(x86InitCode, x86ConcatCode[:(-1)])))
[ "def", "getInvokeReflectivePEInjectionWithDLLEmbedded", "(", "payload_conf", ")", ":", "SPLIT_SIZE", "=", "100000", "(", "x86InitCode", ",", "x86ConcatCode", ")", "=", "(", "''", ",", "''", ")", "code", "=", "'\\n $PEBytes = \"\"\\n {0}\\n $PEBytesTotal = [System...
return source code of invokereflectivepeinjection .
train
false
35,183
@Profiler.profile def test_core_query_caching(n): cache = {} ins = Customer.__table__.insert() for i in range(n): with engine.begin() as conn: conn.execution_options(compiled_cache=cache).execute(ins, dict(name=('customer name %d' % i), description=('customer description %d' % i)))
[ "@", "Profiler", ".", "profile", "def", "test_core_query_caching", "(", "n", ")", ":", "cache", "=", "{", "}", "ins", "=", "Customer", ".", "__table__", ".", "insert", "(", ")", "for", "i", "in", "range", "(", "n", ")", ":", "with", "engine", ".", ...
individual insert/commit pairs using core with query caching .
train
false
35,184
def make_type_consistent(s1, s2): if (isinstance(s1, str) and isinstance(s2, str)): return (s1, s2) elif (isinstance(s1, unicode) and isinstance(s2, unicode)): return (s1, s2) else: return (unicode(s1), unicode(s2))
[ "def", "make_type_consistent", "(", "s1", ",", "s2", ")", ":", "if", "(", "isinstance", "(", "s1", ",", "str", ")", "and", "isinstance", "(", "s2", ",", "str", ")", ")", ":", "return", "(", "s1", ",", "s2", ")", "elif", "(", "isinstance", "(", "s...
if both objects arent either both string or unicode instances force them to unicode .
train
true
35,186
def to_snakecase(string): string = re.sub('(\\s)', (lambda match: '_'), string) string = re.sub('^(_*)([^_])', (lambda match: (match.group(1) + match.group(2).lower())), string) string = re.sub('(\\w*)([.]+)([A-Z])', (lambda match: ((match.group(1) + match.group(2)) + match.group(3).lower())), string) string = re.sub('(?<=[^_])_+([^_])', (lambda match: ('_' + match.group(1).lower())), string) return re.sub('[A-Z]', (lambda match: ('_' + match.group(0).lower())), string)
[ "def", "to_snakecase", "(", "string", ")", ":", "string", "=", "re", ".", "sub", "(", "'(\\\\s)'", ",", "(", "lambda", "match", ":", "'_'", ")", ",", "string", ")", "string", "=", "re", ".", "sub", "(", "'^(_*)([^_])'", ",", "(", "lambda", "match", ...
converts the given string to snake-case .
train
false
35,188
def _send_email(name, email): config = __salt__['config.option']('splunk') email_object = config.get('email') if email_object: cc = email_object.get('cc') subject = email_object.get('subject') message = email_object.get('message').format(name, name, _generate_password(email), name) try: mail_process = subprocess.Popen(['mail', '-s', subject, '-c', cc, email], stdin=subprocess.PIPE) except Exception as e: log.error('unable to send email to {0}: {1}'.format(email, str(e))) mail_process.communicate(message) log.info('sent account creation email to {0}'.format(email))
[ "def", "_send_email", "(", "name", ",", "email", ")", ":", "config", "=", "__salt__", "[", "'config.option'", "]", "(", "'splunk'", ")", "email_object", "=", "config", ".", "get", "(", "'email'", ")", "if", "email_object", ":", "cc", "=", "email_object", ...
sends an email using nflgame .
train
true
35,189
@task(rate_limit='120/m') def tidy_revision_content(pk, refresh=True): try: revision = Revision.objects.get(pk=pk) except Revision.DoesNotExist as exc: log.error('Tidy was unable to get revision id: %d. Retrying.', pk) tidy_revision_content.retry(countdown=(60 * 2), max_retries=5, exc=exc) else: if (revision.tidied_content and (not refresh)): return (tidied_content, errors) = tidy_content(revision.content) if (tidied_content != revision.tidied_content): Revision.objects.filter(pk=pk).update(tidied_content=tidied_content) return errors
[ "@", "task", "(", "rate_limit", "=", "'120/m'", ")", "def", "tidy_revision_content", "(", "pk", ",", "refresh", "=", "True", ")", ":", "try", ":", "revision", "=", "Revision", ".", "objects", ".", "get", "(", "pk", "=", "pk", ")", "except", "Revision",...
run tidy over the given revisions content and save it to the tidy_content field if the content is not equal to the current value .
train
false
35,190
def average_score(scores): score_values = scores.values() sum_scores = sum(score_values) average = (sum_scores / len(score_values)) return average
[ "def", "average_score", "(", "scores", ")", ":", "score_values", "=", "scores", ".", "values", "(", ")", "sum_scores", "=", "sum", "(", "score_values", ")", "average", "=", "(", "sum_scores", "/", "len", "(", "score_values", ")", ")", "return", "average" ]
count average score .
train
false
35,191
def add_friend(self, userName, status=2, verifyContent='', autoUpdate=True): url = ('%s/webwxverifyuser?r=%s&pass_ticket=%s' % (self.loginInfo['url'], int(time.time()), self.loginInfo['pass_ticket'])) data = {'BaseRequest': self.loginInfo['BaseRequest'], 'Opcode': status, 'VerifyUserListSize': 1, 'VerifyUserList': [{'Value': userName, 'VerifyUserTicket': ''}], 'VerifyContent': verifyContent, 'SceneListCount': 1, 'SceneList': 33, 'skey': self.loginInfo['skey']} headers = {'ContentType': 'application/json; charset=UTF-8', 'User-Agent': config.USER_AGENT} r = self.s.post(url, headers=headers, data=json.dumps(data, ensure_ascii=False).encode('utf8', 'replace')) if autoUpdate: self.update_friend(userName) return ReturnValue(rawResponse=r)
[ "def", "add_friend", "(", "self", ",", "userName", ",", "status", "=", "2", ",", "verifyContent", "=", "''", ",", "autoUpdate", "=", "True", ")", ":", "url", "=", "(", "'%s/webwxverifyuser?r=%s&pass_ticket=%s'", "%", "(", "self", ".", "loginInfo", "[", "'u...
add a friend or accept a friend * for adding status should be 2 * for accepting status should be 3 .
train
false
35,193
def read_raw_cnt(input_fname, montage, eog=(), misc=(), ecg=(), emg=(), data_format='auto', date_format='mm/dd/yy', preload=False, verbose=None): return RawCNT(input_fname, montage=montage, eog=eog, misc=misc, ecg=ecg, emg=emg, data_format=data_format, date_format=date_format, preload=preload, verbose=verbose)
[ "def", "read_raw_cnt", "(", "input_fname", ",", "montage", ",", "eog", "=", "(", ")", ",", "misc", "=", "(", ")", ",", "ecg", "=", "(", ")", ",", "emg", "=", "(", ")", ",", "data_format", "=", "'auto'", ",", "date_format", "=", "'mm/dd/yy'", ",", ...
read cnt data as raw object .
train
false
35,194
def _make_tris_fan(n_vert): tris = np.zeros(((n_vert - 2), 3), int) tris[:, 2] = np.arange(2, n_vert) tris[:, 1] = (tris[:, 2] - 1) return tris
[ "def", "_make_tris_fan", "(", "n_vert", ")", ":", "tris", "=", "np", ".", "zeros", "(", "(", "(", "n_vert", "-", "2", ")", ",", "3", ")", ",", "int", ")", "tris", "[", ":", ",", "2", "]", "=", "np", ".", "arange", "(", "2", ",", "n_vert", "...
make tris given a number of vertices of a circle-like obj .
train
false
35,196
def skipif(skip_condition, msg=None): def skip_decorator(f): import nose if callable(skip_condition): skip_val = skip_condition else: skip_val = (lambda : skip_condition) def get_msg(func, msg=None): 'Skip message with information about function being skipped.' if (msg is None): out = 'Test skipped due to test condition.' else: out = msg return ('Skipping test: %s. %s' % (func.__name__, out)) def skipper_func(*args, **kwargs): 'Skipper for normal test functions.' if skip_val(): raise nose.SkipTest(get_msg(f, msg)) else: return f(*args, **kwargs) def skipper_gen(*args, **kwargs): 'Skipper for test generators.' if skip_val(): raise nose.SkipTest(get_msg(f, msg)) else: for x in f(*args, **kwargs): (yield x) if nose.util.isgenerator(f): skipper = skipper_gen else: skipper = skipper_func return nose.tools.make_decorator(f)(skipper) return skip_decorator
[ "def", "skipif", "(", "skip_condition", ",", "msg", "=", "None", ")", ":", "def", "skip_decorator", "(", "f", ")", ":", "import", "nose", "if", "callable", "(", "skip_condition", ")", ":", "skip_val", "=", "skip_condition", "else", ":", "skip_val", "=", ...
make function raise skiptest exception if skip_condition is true parameters skip_condition : bool or callable flag to determine whether to skip test .
train
true
35,197
def accept_singleton(expected_type, position=1): @decorator def wrapper(function, *args, **kw): if isinstance(args[position], expected_type): args = list(args) args[position] = [args[position]] args = tuple(args) return function(*args, **kw) return wrapper
[ "def", "accept_singleton", "(", "expected_type", ",", "position", "=", "1", ")", ":", "@", "decorator", "def", "wrapper", "(", "function", ",", "*", "args", ",", "**", "kw", ")", ":", "if", "isinstance", "(", "args", "[", "position", "]", ",", "expecte...
allows a function expecting a list to accept a single item as well .
train
false
35,198
def UpdateIndex(index): _Call('UpdateIndex', index, api_base_pb.VoidProto())
[ "def", "UpdateIndex", "(", "index", ")", ":", "_Call", "(", "'UpdateIndex'", ",", "index", ",", "api_base_pb", ".", "VoidProto", "(", ")", ")" ]
updates an indexs status .
train
false
35,199
@pytest.mark.parametrize('api_version', API_VERSIONS) def test_message_label_updates(db, api_client, default_account, api_version, custom_label): headers = dict() headers['Api-Version'] = api_version gmail_thread = add_fake_thread(db.session, default_account.namespace.id) gmail_message = add_fake_message(db.session, default_account.namespace.id, gmail_thread) resp_data = api_client.get_data('/messages/{}'.format(gmail_message.public_id), headers=headers) assert (resp_data['labels'] == []) category = custom_label.category update = dict(labels=[category.public_id]) resp = api_client.put_data('/messages/{}'.format(gmail_message.public_id), update, headers=headers) resp_data = json.loads(resp.data) if (api_version == API_VERSIONS[0]): assert (len(resp_data['labels']) == 1) assert (resp_data['labels'][0]['id'] == category.public_id) else: assert (resp_data['labels'] == [])
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'api_version'", ",", "API_VERSIONS", ")", "def", "test_message_label_updates", "(", "db", ",", "api_client", ",", "default_account", ",", "api_version", ",", "custom_label", ")", ":", "headers", "=", "dict", ...
check that you can update a message .
train
false
35,201
def fitbinned(distfn, freq, binedges, start, fixed=None): if (not (fixed is None)): raise NotImplementedError nobs = np.sum(freq) lnnobsfact = special.gammaln((nobs + 1)) def nloglike(params): 'negative loglikelihood function of binned data\n\n corresponds to multinomial\n ' prob = np.diff(distfn.cdf(binedges, *params)) return (- (lnnobsfact + np.sum(((freq * np.log(prob)) - special.gammaln((freq + 1)))))) return optimize.fmin(nloglike, start)
[ "def", "fitbinned", "(", "distfn", ",", "freq", ",", "binedges", ",", "start", ",", "fixed", "=", "None", ")", ":", "if", "(", "not", "(", "fixed", "is", "None", ")", ")", ":", "raise", "NotImplementedError", "nobs", "=", "np", ".", "sum", "(", "fr...
estimate parameters of distribution function for binned data using mle parameters distfn : distribution instance needs to have cdf method .
train
false
35,202
def CollectAff4Objects(paths, client_id, token): object_urns = {} for path in paths: aff4_object = client_id.Add(path) object_urns[str(aff4_object)] = aff4_object objects = {fd.urn: fd for fd in aff4.FACTORY.MultiOpen(object_urns, token=token)} return objects
[ "def", "CollectAff4Objects", "(", "paths", ",", "client_id", ",", "token", ")", ":", "object_urns", "=", "{", "}", "for", "path", "in", "paths", ":", "aff4_object", "=", "client_id", ".", "Add", "(", "path", ")", "object_urns", "[", "str", "(", "aff4_obj...
mimics the logic in aff4_grr .
train
false
35,203
def find_deps(task, upstream_task_family): return set([t for t in dfs_paths(task, upstream_task_family)])
[ "def", "find_deps", "(", "task", ",", "upstream_task_family", ")", ":", "return", "set", "(", "[", "t", "for", "t", "in", "dfs_paths", "(", "task", ",", "upstream_task_family", ")", "]", ")" ]
finds all dependencies that start with the given task and have a path to upstream_task_family returns all deps on all paths between task and upstream .
train
false
35,204
def add_dependency(name, first=False): dep_paths = generate_dependency_paths(name) for path in dep_paths.values(): if os.path.exists(encode(path)): add(path, first=first)
[ "def", "add_dependency", "(", "name", ",", "first", "=", "False", ")", ":", "dep_paths", "=", "generate_dependency_paths", "(", "name", ")", "for", "path", "in", "dep_paths", ".", "values", "(", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "e...
accepts a dependency name and automatically adds the appropriate path to sys .
train
false
35,205
def string_from_cuts(cuts): strings = [compat.to_unicode(cut) for cut in cuts] string = CUT_STRING_SEPARATOR_CHAR.join(strings) return string
[ "def", "string_from_cuts", "(", "cuts", ")", ":", "strings", "=", "[", "compat", ".", "to_unicode", "(", "cut", ")", "for", "cut", "in", "cuts", "]", "string", "=", "CUT_STRING_SEPARATOR_CHAR", ".", "join", "(", "strings", ")", "return", "string" ]
returns a string represeting cuts .
train
false
35,206
def isfuture(obj): return (hasattr(obj.__class__, '_asyncio_future_blocking') and (obj._asyncio_future_blocking is not None))
[ "def", "isfuture", "(", "obj", ")", ":", "return", "(", "hasattr", "(", "obj", ".", "__class__", ",", "'_asyncio_future_blocking'", ")", "and", "(", "obj", ".", "_asyncio_future_blocking", "is", "not", "None", ")", ")" ]
check for a future .
train
false
35,207
def console_get_by_pool_instance(context, pool_id, instance_uuid): return IMPL.console_get_by_pool_instance(context, pool_id, instance_uuid)
[ "def", "console_get_by_pool_instance", "(", "context", ",", "pool_id", ",", "instance_uuid", ")", ":", "return", "IMPL", ".", "console_get_by_pool_instance", "(", "context", ",", "pool_id", ",", "instance_uuid", ")" ]
get console entry for a given instance and pool .
train
false
35,208
def read_tfrs(fname, condition=None): check_fname(fname, 'tfr', ('-tfr.h5',)) logger.info(('Reading %s ...' % fname)) tfr_data = read_hdf5(fname, title='mnepython') for (k, tfr) in tfr_data: tfr['info'] = Info(tfr['info']) if (condition is not None): tfr_dict = dict(tfr_data) if (condition not in tfr_dict): keys = [('%s' % k) for k in tfr_dict] raise ValueError('Cannot find condition ("{0}") in this file. The file contains "{1}""'.format(condition, ' or '.join(keys))) out = AverageTFR(**tfr_dict[condition]) else: out = [AverageTFR(**d) for d in list(zip(*tfr_data))[1]] return out
[ "def", "read_tfrs", "(", "fname", ",", "condition", "=", "None", ")", ":", "check_fname", "(", "fname", ",", "'tfr'", ",", "(", "'-tfr.h5'", ",", ")", ")", "logger", ".", "info", "(", "(", "'Reading %s ...'", "%", "fname", ")", ")", "tfr_data", "=", ...
read tfr datasets from hdf5 file .
train
false
35,209
def runOne(test): test = BasicTestCase(test) suite = unittest.TestSuite([test]) unittest.TextTestRunner(verbosity=2).run(suite)
[ "def", "runOne", "(", "test", ")", ":", "test", "=", "BasicTestCase", "(", "test", ")", "suite", "=", "unittest", ".", "TestSuite", "(", "[", "test", "]", ")", "unittest", ".", "TextTestRunner", "(", "verbosity", "=", "2", ")", ".", "run", "(", "suit...
unittest runner .
train
false
35,210
def create_channel(key, aliases=None, desc=None, locks=None, keep_log=True, typeclass=None): typeclass = (typeclass if typeclass else settings.BASE_CHANNEL_TYPECLASS) if isinstance(typeclass, basestring): typeclass = class_from_module(typeclass, settings.TYPECLASS_PATHS) new_channel = typeclass(db_key=key) new_channel._createdict = {'key': key, 'aliases': aliases, 'desc': desc, 'locks': locks, 'keep_log': keep_log} new_channel.save() return new_channel
[ "def", "create_channel", "(", "key", ",", "aliases", "=", "None", ",", "desc", "=", "None", ",", "locks", "=", "None", ",", "keep_log", "=", "True", ",", "typeclass", "=", "None", ")", ":", "typeclass", "=", "(", "typeclass", "if", "typeclass", "else",...
create a communication channel .
train
false
35,211
def add_openid_attribute_exchange(request, response, data): try: ax_request = ax.FetchRequest.fromOpenIDRequest(request) except ax.AXError: pass else: ax_response = ax.FetchResponse() if (ax_request and ax_request.requested_attributes): for type_uri in ax_request.requested_attributes.iterkeys(): email_schema = 'http://axschema.org/contact/email' name_schema = 'http://axschema.org/namePerson' if ((type_uri == email_schema) and ('email' in data)): ax_response.addValue(email_schema, data['email']) elif ((type_uri == name_schema) and ('fullname' in data)): ax_response.addValue(name_schema, data['fullname']) ax_response.toMessage(response.fields)
[ "def", "add_openid_attribute_exchange", "(", "request", ",", "response", ",", "data", ")", ":", "try", ":", "ax_request", "=", "ax", ".", "FetchRequest", ".", "fromOpenIDRequest", "(", "request", ")", "except", "ax", ".", "AXError", ":", "pass", "else", ":",...
add attribute exchange fields to the response if requested .
train
false
35,212
def OR(*params): return or_(*params)
[ "def", "OR", "(", "*", "params", ")", ":", "return", "or_", "(", "*", "params", ")" ]
emulate sqlobjects or .
train
false
35,213
def volume_type_access_add(context, type_id, project_id): return IMPL.volume_type_access_add(context, type_id, project_id)
[ "def", "volume_type_access_add", "(", "context", ",", "type_id", ",", "project_id", ")", ":", "return", "IMPL", ".", "volume_type_access_add", "(", "context", ",", "type_id", ",", "project_id", ")" ]
add given tenant to the volume type access list .
train
false
35,214
def _GetTextInside(text, start_pattern): matching_punctuation = {'(': ')', '{': '}', '[': ']'} closing_punctuation = set(itervalues(matching_punctuation)) match = re.search(start_pattern, text, re.M) if (not match): return None start_position = match.end(0) assert (start_position > 0), 'start_pattern must ends with an opening punctuation.' assert (text[(start_position - 1)] in matching_punctuation), 'start_pattern must ends with an opening punctuation.' punctuation_stack = [matching_punctuation[text[(start_position - 1)]]] position = start_position while (punctuation_stack and (position < len(text))): if (text[position] == punctuation_stack[(-1)]): punctuation_stack.pop() elif (text[position] in closing_punctuation): return None elif (text[position] in matching_punctuation): punctuation_stack.append(matching_punctuation[text[position]]) position += 1 if punctuation_stack: return None return text[start_position:(position - 1)]
[ "def", "_GetTextInside", "(", "text", ",", "start_pattern", ")", ":", "matching_punctuation", "=", "{", "'('", ":", "')'", ",", "'{'", ":", "'}'", ",", "'['", ":", "']'", "}", "closing_punctuation", "=", "set", "(", "itervalues", "(", "matching_punctuation",...
retrieves all the text between matching open and close parentheses .
train
true
35,218
def fake_sentence(start=1, stop=100): return faker.sentence(random.randrange(start, stop))
[ "def", "fake_sentence", "(", "start", "=", "1", ",", "stop", "=", "100", ")", ":", "return", "faker", ".", "sentence", "(", "random", ".", "randrange", "(", "start", ",", "stop", ")", ")" ]
create a sentence of random length .
train
false
35,219
def absolute_location(url, location): if ('://' in location): return location elif location.startswith('/'): parts = six.moves.urllib.parse.urlsplit(url) base = url.replace(parts[2], '') return ('%s%s' % (base, location)) elif (not location.startswith('/')): base = os.path.dirname(url) return ('%s/%s' % (base, location)) else: return location
[ "def", "absolute_location", "(", "url", ",", "location", ")", ":", "if", "(", "'://'", "in", "location", ")", ":", "return", "location", "elif", "location", ".", "startswith", "(", "'/'", ")", ":", "parts", "=", "six", ".", "moves", ".", "urllib", ".",...
attempts to create an absolute url based on initial url .
train
false
35,220
@contextfunction def attachments_count(context, object=None): request = context['request'] response_format = 'html' if ('response_format' in context): response_format = context['response_format'] update = isinstance(object, UpdateRecord) if (not update): count = Attachment.objects.filter(attached_object=object).count() else: count = Attachment.objects.filter(attached_record=object).count() if count: return Markup(render_to_string('core/tags/attachments_count', {'count': count}, context_instance=RequestContext(request), response_format=response_format)) else: return ''
[ "@", "contextfunction", "def", "attachments_count", "(", "context", ",", "object", "=", "None", ")", ":", "request", "=", "context", "[", "'request'", "]", "response_format", "=", "'html'", "if", "(", "'response_format'", "in", "context", ")", ":", "response_f...
number of attachments associated with an object .
train
false
35,221
def permute(a): a.sort() (yield list(a)) if (len(a) <= 1): return first = 0 last = len(a) while 1: i = (last - 1) while 1: i = (i - 1) if (a[i] < a[(i + 1)]): j = (last - 1) while (not (a[i] < a[j])): j = (j - 1) (a[i], a[j]) = (a[j], a[i]) r = a[(i + 1):last] r.reverse() a[(i + 1):last] = r (yield list(a)) break if (i == first): a.reverse() return
[ "def", "permute", "(", "a", ")", ":", "a", ".", "sort", "(", ")", "(", "yield", "list", "(", "a", ")", ")", "if", "(", "len", "(", "a", ")", "<=", "1", ")", ":", "return", "first", "=", "0", "last", "=", "len", "(", "a", ")", "while", "1"...
returns generator of all permutations of a the following code is an in-place permutation of a given list .
train
false
35,222
@public def PolynomialRing(dom, *gens, **opts): order = opts.get('order', GeneralizedPolynomialRing.default_order) if iterable(order): order = build_product_order(order, gens) order = monomial_key(order) opts['order'] = order if order.is_global: return GlobalPolynomialRing(dom, *gens, **opts) else: return GeneralizedPolynomialRing(dom, *gens, **opts)
[ "@", "public", "def", "PolynomialRing", "(", "dom", ",", "*", "gens", ",", "**", "opts", ")", ":", "order", "=", "opts", ".", "get", "(", "'order'", ",", "GeneralizedPolynomialRing", ".", "default_order", ")", "if", "iterable", "(", "order", ")", ":", ...
create a generalized multivariate polynomial ring .
train
false
35,223
def _get_shebang(interpreter, task_vars, args=tuple()): interpreter_config = (u'ansible_%s_interpreter' % os.path.basename(interpreter).strip()) if (interpreter_config not in task_vars): return (None, interpreter) interpreter = task_vars[interpreter_config].strip() shebang = (u'#!' + interpreter) if args: shebang = ((shebang + u' ') + u' '.join(args)) return (shebang, interpreter)
[ "def", "_get_shebang", "(", "interpreter", ",", "task_vars", ",", "args", "=", "tuple", "(", ")", ")", ":", "interpreter_config", "=", "(", "u'ansible_%s_interpreter'", "%", "os", ".", "path", ".", "basename", "(", "interpreter", ")", ".", "strip", "(", ")...
note not stellar api: returns none instead of always returning a shebang line .
train
false
35,224
def pretty_all(container): for (name, mt) in container.mime_map.iteritems(): prettied = False if (mt in OEB_DOCS): pretty_html_tree(container, container.parsed(name)) prettied = True elif (mt in OEB_STYLES): container.parsed(name) prettied = True elif (name == container.opf_name): root = container.parsed(name) pretty_opf(root) pretty_xml_tree(root) prettied = True elif (mt in {guess_type(u'a.ncx'), guess_type(u'a.xml')}): pretty_xml_tree(container.parsed(name)) prettied = True if prettied: container.dirty(name)
[ "def", "pretty_all", "(", "container", ")", ":", "for", "(", "name", ",", "mt", ")", "in", "container", ".", "mime_map", ".", "iteritems", "(", ")", ":", "prettied", "=", "False", "if", "(", "mt", "in", "OEB_DOCS", ")", ":", "pretty_html_tree", "(", ...
pretty print all html/css/xml files in the container .
train
false
35,225
@route(bp, '/<store_id>/products/<product_id>', methods=['DELETE']) def remove_product(store_id, product_id): _stores.remove_product(_stores.get_or_404(store_id), _products.get_or_404(product_id)) return (None, 204)
[ "@", "route", "(", "bp", ",", "'/<store_id>/products/<product_id>'", ",", "methods", "=", "[", "'DELETE'", "]", ")", "def", "remove_product", "(", "store_id", ",", "product_id", ")", ":", "_stores", ".", "remove_product", "(", "_stores", ".", "get_or_404", "("...
removes a product form a store .
train
false
35,226
def get_thirdparty_root(*append): return __get_root('ext', 'thirdparty', *append)
[ "def", "get_thirdparty_root", "(", "*", "append", ")", ":", "return", "__get_root", "(", "'ext'", ",", "'thirdparty'", ",", "*", "append", ")" ]
returns the ext/thirdparty directory .
train
false
35,228
def rollaxis(a, axis, start=0): return core.rollaxis(a, axis, start)
[ "def", "rollaxis", "(", "a", ",", "axis", ",", "start", "=", "0", ")", ":", "return", "core", ".", "rollaxis", "(", "a", ",", "axis", ",", "start", ")" ]
moves the specified axis backwards to the given place .
train
false
35,229
def staff_for_site(): try: site_id = request.args[0] except: result = current.xml.json_message(False, 400, 'No Site provided!') else: table = s3db.hrm_human_resource ptable = db.pr_person query = (((((table.site_id == site_id) & (table.deleted == False)) & (table.status == 1)) & ((table.end_date == None) | (table.end_date > request.utcnow))) & (ptable.id == table.person_id)) rows = db(query).select(ptable.id, ptable.first_name, ptable.middle_name, ptable.last_name, orderby=ptable.first_name) result = [] append = result.append for row in rows: append({'id': row.id, 'name': s3_fullname(row)}) result = json.dumps(result) response.headers['Content-Type'] = 'application/json' return result
[ "def", "staff_for_site", "(", ")", ":", "try", ":", "site_id", "=", "request", ".", "args", "[", "0", "]", "except", ":", "result", "=", "current", ".", "xml", ".", "json_message", "(", "False", ",", "400", ",", "'No Site provided!'", ")", "else", ":",...
used by the req/req/create page - note that this returns person ids .
train
false
35,230
def _mergetree(src, dst): for item in os.listdir(src): s = os.path.join(src, item) d = os.path.join(dst, item) if os.path.isdir(s): log.info('Copying folder {0} to {1}'.format(s, d)) if os.path.exists(d): _mergetree(s, d) else: shutil.copytree(s, d) else: log.info('Copying file {0} to {1}'.format(s, d)) shutil.copy2(s, d)
[ "def", "_mergetree", "(", "src", ",", "dst", ")", ":", "for", "item", "in", "os", ".", "listdir", "(", "src", ")", ":", "s", "=", "os", ".", "path", ".", "join", "(", "src", ",", "item", ")", "d", "=", "os", ".", "path", ".", "join", "(", "...
akin to shutils .
train
true
35,231
def get_struggling_exercises(user): struggles = ExerciseLog.objects.filter(user=user, struggling=True).order_by('-latest_activity_timestamp').values_list('exercise_id', flat=True) return struggles
[ "def", "get_struggling_exercises", "(", "user", ")", ":", "struggles", "=", "ExerciseLog", ".", "objects", ".", "filter", "(", "user", "=", "user", ",", "struggling", "=", "True", ")", ".", "order_by", "(", "'-latest_activity_timestamp'", ")", ".", "values_lis...
return a list of all exercises that the user is currently struggling on .
train
false
35,232
def test_bazaar_simple_urls(): http_bzr_repo = Bazaar(url='bzr+http://bzr.myproject.org/MyProject/trunk/#egg=MyProject') https_bzr_repo = Bazaar(url='bzr+https://bzr.myproject.org/MyProject/trunk/#egg=MyProject') ssh_bzr_repo = Bazaar(url='bzr+ssh://bzr.myproject.org/MyProject/trunk/#egg=MyProject') ftp_bzr_repo = Bazaar(url='bzr+ftp://bzr.myproject.org/MyProject/trunk/#egg=MyProject') sftp_bzr_repo = Bazaar(url='bzr+sftp://bzr.myproject.org/MyProject/trunk/#egg=MyProject') launchpad_bzr_repo = Bazaar(url='bzr+lp:MyLaunchpadProject#egg=MyLaunchpadProject') assert (http_bzr_repo.get_url_rev() == ('http://bzr.myproject.org/MyProject/trunk/', None)) assert (https_bzr_repo.get_url_rev() == ('https://bzr.myproject.org/MyProject/trunk/', None)) assert (ssh_bzr_repo.get_url_rev() == ('bzr+ssh://bzr.myproject.org/MyProject/trunk/', None)) assert (ftp_bzr_repo.get_url_rev() == ('ftp://bzr.myproject.org/MyProject/trunk/', None)) assert (sftp_bzr_repo.get_url_rev() == ('sftp://bzr.myproject.org/MyProject/trunk/', None)) assert (launchpad_bzr_repo.get_url_rev() == ('lp:MyLaunchpadProject', None))
[ "def", "test_bazaar_simple_urls", "(", ")", ":", "http_bzr_repo", "=", "Bazaar", "(", "url", "=", "'bzr+http://bzr.myproject.org/MyProject/trunk/#egg=MyProject'", ")", "https_bzr_repo", "=", "Bazaar", "(", "url", "=", "'bzr+https://bzr.myproject.org/MyProject/trunk/#egg=MyProje...
test bzr url support .
train
false
35,233
def getPolygonCenter(polygon): pointSum = complex() areaSum = 0.0 for pointIndex in xrange(len(polygon)): pointBegin = polygon[pointIndex] pointEnd = polygon[((pointIndex + 1) % len(polygon))] area = ((pointBegin.real * pointEnd.imag) - (pointBegin.imag * pointEnd.real)) areaSum += area pointSum += (complex((pointBegin.real + pointEnd.real), (pointBegin.imag + pointEnd.imag)) * area) return ((pointSum / 3.0) / areaSum)
[ "def", "getPolygonCenter", "(", "polygon", ")", ":", "pointSum", "=", "complex", "(", ")", "areaSum", "=", "0.0", "for", "pointIndex", "in", "xrange", "(", "len", "(", "polygon", ")", ")", ":", "pointBegin", "=", "polygon", "[", "pointIndex", "]", "point...
get the centroid of a polygon .
train
false
35,234
def get_element_padding(page, selector): js_script = ('\n var $element = $(\'%(selector)s\');\n\n element_padding = {\n \'padding-top\': $element.css(\'padding-top\').replace("px", ""),\n \'padding-right\': $element.css(\'padding-right\').replace("px", ""),\n \'padding-bottom\': $element.css(\'padding-bottom\').replace("px", ""),\n \'padding-left\': $element.css(\'padding-left\').replace("px", "")\n };\n\n return element_padding;\n ' % {'selector': selector}) return page.browser.execute_script(js_script)
[ "def", "get_element_padding", "(", "page", ",", "selector", ")", ":", "js_script", "=", "(", "'\\n var $element = $(\\'%(selector)s\\');\\n\\n element_padding = {\\n \\'padding-top\\': $element.css(\\'padding-top\\').replace(\"px\", \"\"),\\n \\'padding-righ...
get padding of the element with given selector .
train
false
35,236
def _createTps(numCols): minThreshold = 4 activationThreshold = 5 newSynapseCount = 7 initialPerm = 0.3 connectedPerm = 0.5 permanenceInc = 0.1 permanenceDec = 0.05 globalDecay = 0 cellsPerColumn = 1 cppTp = TP10X2(numberOfCols=numCols, cellsPerColumn=cellsPerColumn, initialPerm=initialPerm, connectedPerm=connectedPerm, minThreshold=minThreshold, newSynapseCount=newSynapseCount, permanenceInc=permanenceInc, permanenceDec=permanenceDec, activationThreshold=activationThreshold, globalDecay=globalDecay, burnIn=1, seed=_SEED, verbosity=VERBOSITY, checkSynapseConsistency=True, pamLength=1000) cppTp.retrieveLearningStates = True pyTp = TP(numberOfCols=numCols, cellsPerColumn=cellsPerColumn, initialPerm=initialPerm, connectedPerm=connectedPerm, minThreshold=minThreshold, newSynapseCount=newSynapseCount, permanenceInc=permanenceInc, permanenceDec=permanenceDec, activationThreshold=activationThreshold, globalDecay=globalDecay, burnIn=1, seed=_SEED, verbosity=VERBOSITY, pamLength=1000) return (cppTp, pyTp)
[ "def", "_createTps", "(", "numCols", ")", ":", "minThreshold", "=", "4", "activationThreshold", "=", "5", "newSynapseCount", "=", "7", "initialPerm", "=", "0.3", "connectedPerm", "=", "0.5", "permanenceInc", "=", "0.1", "permanenceDec", "=", "0.05", "globalDecay...
create two instances of temporal poolers with identical parameter settings .
train
false
35,237
def askfloat(title, prompt, **kw): d = _QueryFloat(title, prompt, **kw) return d.result
[ "def", "askfloat", "(", "title", ",", "prompt", ",", "**", "kw", ")", ":", "d", "=", "_QueryFloat", "(", "title", ",", "prompt", ",", "**", "kw", ")", "return", "d", ".", "result" ]
get a float from the user arguments: title -- the dialog title prompt -- the label text **kw -- see simpledialog class return value is a float .
train
false
35,238
def _getLogger(cls, logLevel=None): logger = logging.getLogger('.'.join(['com.numenta', _MODULE_NAME, cls.__name__])) if (logLevel is not None): logger.setLevel(logLevel) return logger
[ "def", "_getLogger", "(", "cls", ",", "logLevel", "=", "None", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", "'.'", ".", "join", "(", "[", "'com.numenta'", ",", "_MODULE_NAME", ",", "cls", ".", "__name__", "]", ")", ")", "if", "(", "logL...
gets a logger for the given class in this module .
train
true
35,239
def execute_manual_step(message, default_error=''): if (not _validate_user_input(PassFailDialog(message))): msg = get_value_from_user('Give error message:', default_error) raise AssertionError(msg)
[ "def", "execute_manual_step", "(", "message", ",", "default_error", "=", "''", ")", ":", "if", "(", "not", "_validate_user_input", "(", "PassFailDialog", "(", "message", ")", ")", ")", ":", "msg", "=", "get_value_from_user", "(", "'Give error message:'", ",", ...
pauses test execution until user sets the keyword status .
train
false
35,240
@image_comparison(baseline_images=[u'fancyarrow_dpi_cor_200dpi'], remove_text=True, extensions=[u'png'], savefig_kwarg=dict(dpi=200)) def test_fancyarrow_dpi_cor_200dpi(): __prepare_fancyarrow_dpi_cor_test()
[ "@", "image_comparison", "(", "baseline_images", "=", "[", "u'fancyarrow_dpi_cor_200dpi'", "]", ",", "remove_text", "=", "True", ",", "extensions", "=", "[", "u'png'", "]", ",", "savefig_kwarg", "=", "dict", "(", "dpi", "=", "200", ")", ")", "def", "test_fan...
as test_fancyarrow_dpi_cor_100dpi .
train
false
35,241
def copy_media_files(from_dir, to_dir, exclude=None, dirty=False): for (source_dir, dirnames, filenames) in os.walk(from_dir, followlinks=True): relative_path = os.path.relpath(source_dir, from_dir) output_dir = os.path.normpath(os.path.join(to_dir, relative_path)) exclude_patterns = [u'.*'] exclude_patterns.extend((exclude or [])) for pattern in exclude_patterns: filenames = [f for f in filenames if (not fnmatch.fnmatch(f, pattern))] dirnames[:] = [d for d in dirnames if (not d.startswith(u'.'))] for filename in filenames: if (not is_markdown_file(filename)): source_path = os.path.join(source_dir, filename) output_path = os.path.join(output_dir, filename) if (dirty and (modified_time(source_path) < modified_time(output_path))): continue copy_file(source_path, output_path)
[ "def", "copy_media_files", "(", "from_dir", ",", "to_dir", ",", "exclude", "=", "None", ",", "dirty", "=", "False", ")", ":", "for", "(", "source_dir", ",", "dirnames", ",", "filenames", ")", "in", "os", ".", "walk", "(", "from_dir", ",", "followlinks", ...
recursively copy all files except markdown and exclude[ed] files into another directory .
train
false
35,242
def get_file_watcher(directories, use_mtime_file_watcher): assert (not isinstance(directories, types.StringTypes)), 'expected list got str' if (len(directories) != 1): return _MultipleFileWatcher(directories, use_mtime_file_watcher) directory = directories[0] if use_mtime_file_watcher: return mtime_file_watcher.MtimeFileWatcher(directory) elif sys.platform.startswith('linux'): return inotify_file_watcher.InotifyFileWatcher(directory) elif sys.platform.startswith('win'): return win32_file_watcher.Win32FileWatcher(directory) return mtime_file_watcher.MtimeFileWatcher(directory)
[ "def", "get_file_watcher", "(", "directories", ",", "use_mtime_file_watcher", ")", ":", "assert", "(", "not", "isinstance", "(", "directories", ",", "types", ".", "StringTypes", ")", ")", ",", "'expected list got str'", "if", "(", "len", "(", "directories", ")",...
returns an instance that monitors a hierarchy of directories .
train
false
35,244
def _maximalPercentEncode(text, safe): quoted = urlquote(normalize('NFC', text).encode('utf-8'), (safe + u'%').encode('ascii')) if (not isinstance(quoted, unicode)): quoted = quoted.decode('ascii') return quoted
[ "def", "_maximalPercentEncode", "(", "text", ",", "safe", ")", ":", "quoted", "=", "urlquote", "(", "normalize", "(", "'NFC'", ",", "text", ")", ".", "encode", "(", "'utf-8'", ")", ",", "(", "safe", "+", "u'%'", ")", ".", "encode", "(", "'ascii'", ")...
percent-encode everything required to convert a portion of an iri to a portion of a uri .
train
false
35,245
def addListsToRepository(fileNameHelp, getProfileDirectory, repository): repository.displayEntities = [] repository.executeTitle = None repository.fileNameHelp = fileNameHelp repository.fileNameInput = None repository.lowerName = fileNameHelp.split('.')[(-2)] repository.baseName = (repository.lowerName + '.csv') repository.baseNameSynonym = None repository.capitalizedName = getEachWordCapitalized(repository.lowerName) repository.getProfileDirectory = getProfileDirectory repository.openLocalHelpPage = HelpPage().getOpenFromDocumentationSubName(repository.fileNameHelp) repository.openWikiManualHelpPage = None repository.preferences = [] repository.repositoryDialog = None repository.saveListenerTable = {} repository.title = (repository.capitalizedName + ' Settings') repository.menuEntities = [] repository.saveCloseTitle = 'Save and Close' repository.windowPosition = WindowPosition().getFromValue(repository, '0+0') for setting in repository.preferences: setting.repository = repository
[ "def", "addListsToRepository", "(", "fileNameHelp", ",", "getProfileDirectory", ",", "repository", ")", ":", "repository", ".", "displayEntities", "=", "[", "]", "repository", ".", "executeTitle", "=", "None", "repository", ".", "fileNameHelp", "=", "fileNameHelp", ...
add the value to the lists .
train
false
35,246
def env_is_created(env_name): from conda import misc from os.path import basename for prefix in misc.list_prefixes(): name = (ROOT_ENV_NAME if (prefix == context.root_dir) else basename(prefix)) if (name == env_name): return True return False
[ "def", "env_is_created", "(", "env_name", ")", ":", "from", "conda", "import", "misc", "from", "os", ".", "path", "import", "basename", "for", "prefix", "in", "misc", ".", "list_prefixes", "(", ")", ":", "name", "=", "(", "ROOT_ENV_NAME", "if", "(", "pre...
assert an environment is created args: env_name: the environment name returns: true if created false otherwise .
train
false
35,247
def invalidate_local_cache(cache_path): if os.path.exists(cache_path): print u"Invalidate local cache '{0}'.".format(repr(cache_path)) shutil.rmtree(cache_path)
[ "def", "invalidate_local_cache", "(", "cache_path", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "cache_path", ")", ":", "print", "u\"Invalidate local cache '{0}'.\"", ".", "format", "(", "repr", "(", "cache_path", ")", ")", "shutil", ".", "rmtree", ...
invalidates the local cache by removing the cache folders .
train
false
35,248
def CreateCampaignWithBiddingStrategy(client, bidding_strategy_id, budget_id): campaign_service = client.GetService('CampaignService', version='v201605') campaign = {'name': ('Interplanetary Cruise #%s' % uuid.uuid4()), 'budget': {'budgetId': budget_id}, 'biddingStrategyConfiguration': {'biddingStrategyId': bidding_strategy_id}, 'advertisingChannelType': 'SEARCH', 'networkSetting': {'targetGoogleSearch': 'true', 'targetSearchNetwork': 'true', 'targetContentNetwork': 'true'}} operation = {'operator': 'ADD', 'operand': campaign} response = campaign_service.mutate([operation]) new_campaign = response['value'][0] print ("Campaign with name '%s', ID '%s' and bidding scheme ID '%s' was created." % (new_campaign['name'], new_campaign['id'], new_campaign['biddingStrategyConfiguration']['biddingStrategyId'])) return new_campaign
[ "def", "CreateCampaignWithBiddingStrategy", "(", "client", ",", "bidding_strategy_id", ",", "budget_id", ")", ":", "campaign_service", "=", "client", ".", "GetService", "(", "'CampaignService'", ",", "version", "=", "'v201605'", ")", "campaign", "=", "{", "'name'", ...
create a campaign with a shared bidding strategy .
train
true
35,251
def getScreenSizePix(screen=0): if importCtypesFailed: return False scrID = getScreen(screen) h = cocoa.CGDisplayPixelsHigh(scrID) w = cocoa.CGDisplayPixelsWide(scrID) return [h, w]
[ "def", "getScreenSizePix", "(", "screen", "=", "0", ")", ":", "if", "importCtypesFailed", ":", "return", "False", "scrID", "=", "getScreen", "(", "screen", ")", "h", "=", "cocoa", ".", "CGDisplayPixelsHigh", "(", "scrID", ")", "w", "=", "cocoa", ".", "CG...
return the height and width of the given screen .
train
false
35,252
def iqr(a): a = np.asarray(a) q1 = stats.scoreatpercentile(a, 25) q3 = stats.scoreatpercentile(a, 75) return (q3 - q1)
[ "def", "iqr", "(", "a", ")", ":", "a", "=", "np", ".", "asarray", "(", "a", ")", "q1", "=", "stats", ".", "scoreatpercentile", "(", "a", ",", "25", ")", "q3", "=", "stats", ".", "scoreatpercentile", "(", "a", ",", "75", ")", "return", "(", "q3"...
compute the interquartile range of the data along the specified axis .
train
true
35,253
def avail_locations(call=None): if (call == 'action'): raise SaltCloudSystemExit('The avail_locations function must be called with -f or --function, or with the --list-locations option') ret = {} conn = get_conn() response = conn.getCreateObjectOptions() for datacenter in response['datacenters']: ret[datacenter['template']['datacenter']['name']] = {'name': datacenter['template']['datacenter']['name']} return ret
[ "def", "avail_locations", "(", "call", "=", "None", ")", ":", "if", "(", "call", "==", "'action'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The avail_locations function must be called with -f or --function, or with the --list-locations option'", ")", "ret", "=", "...
return available linode datacenter locations .
train
true