id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
32,162
def correlation_row_generator(bt, pmf, category): data = array([i for i in bt.iter_data(axis='observation')]) try: cat_vect = array([pmf[s][category] for s in bt.ids()], dtype=float) return ((row, cat_vect) for row in data) except ValueError: raise ValueError(("Mapping file category contained data that couldn't " + "be converted to float. Can't continue."))
[ "def", "correlation_row_generator", "(", "bt", ",", "pmf", ",", "category", ")", ":", "data", "=", "array", "(", "[", "i", "for", "i", "in", "bt", ".", "iter_data", "(", "axis", "=", "'observation'", ")", "]", ")", "try", ":", "cat_vect", "=", "array...
produce a generator that feeds lists of arrays to any gradient test .
train
false
32,163
def round_power_2(value): if (value == 0): return 0 return math.pow(2, math.ceil(math.log(value, 2)))
[ "def", "round_power_2", "(", "value", ")", ":", "if", "(", "value", "==", "0", ")", ":", "return", "0", "return", "math", ".", "pow", "(", "2", ",", "math", ".", "ceil", "(", "math", ".", "log", "(", "value", ",", "2", ")", ")", ")" ]
return value rounded up to the nearest power of 2 .
train
false
32,164
def select_class(cache, selector): items = cache.class_map[ascii_lower(selector.class_name)] if items: for elem in cache.iterparsedselector(selector.selector): if (elem in items): (yield elem)
[ "def", "select_class", "(", "cache", ",", "selector", ")", ":", "items", "=", "cache", ".", "class_map", "[", "ascii_lower", "(", "selector", ".", "class_name", ")", "]", "if", "items", ":", "for", "elem", "in", "cache", ".", "iterparsedselector", "(", "...
a class selector .
train
false
32,165
def build_properties(cls, new_attrs): for (name, value) in new_attrs.items(): if (name.endswith('__get') or name.endswith('__set') or name.endswith('__del')): base = name[:(-5)] if hasattr(cls, base): old_prop = getattr(cls, base) if (not isinstance(old_prop, property)): raise ValueError(('Attribute %s is a %s, not a property; function %s is named like a property' % (base, type(old_prop), name))) attrs = {'fget': old_prop.fget, 'fset': old_prop.fset, 'fdel': old_prop.fdel, 'doc': old_prop.__doc__} else: attrs = {} attrs[('f' + name[(-3):])] = value if (name.endswith('__get') and value.__doc__): attrs['doc'] = value.__doc__ new_prop = property(**attrs) setattr(cls, base, new_prop)
[ "def", "build_properties", "(", "cls", ",", "new_attrs", ")", ":", "for", "(", "name", ",", "value", ")", "in", "new_attrs", ".", "items", "(", ")", ":", "if", "(", "name", ".", "endswith", "(", "'__get'", ")", "or", "name", ".", "endswith", "(", "...
given a class and a new set of attributes .
train
false
32,166
def assert_has_element_with_path(output, path): if (xml_find(output, path) is None): errmsg = ('Expected to find XML element matching expression %s, not such match was found.' % path) raise AssertionError(errmsg)
[ "def", "assert_has_element_with_path", "(", "output", ",", "path", ")", ":", "if", "(", "xml_find", "(", "output", ",", "path", ")", "is", "None", ")", ":", "errmsg", "=", "(", "'Expected to find XML element matching expression %s, not such match was found.'", "%", ...
asserts the specified output has at least one xml element with a path matching the specified path argument .
train
false
32,167
def collapse_unary(tree, collapsePOS=False, collapseRoot=False, joinChar='+'): if ((collapseRoot == False) and isinstance(tree, Tree) and (len(tree) == 1)): nodeList = [tree[0]] else: nodeList = [tree] while (nodeList != []): node = nodeList.pop() if isinstance(node, Tree): if ((len(node) == 1) and isinstance(node[0], Tree) and ((collapsePOS == True) or isinstance(node[(0, 0)], Tree))): node.set_label(((node.label() + joinChar) + node[0].label())) node[0:] = [child for child in node[0]] nodeList.append(node) else: for child in node: nodeList.append(child)
[ "def", "collapse_unary", "(", "tree", ",", "collapsePOS", "=", "False", ",", "collapseRoot", "=", "False", ",", "joinChar", "=", "'+'", ")", ":", "if", "(", "(", "collapseRoot", "==", "False", ")", "and", "isinstance", "(", "tree", ",", "Tree", ")", "a...
collapse subtrees with a single child into a new non-terminal joined by joinchar .
train
false
32,168
def _options_file_exists(name): return os.path.isfile(os.path.join(_options_dir(name), 'options'))
[ "def", "_options_file_exists", "(", "name", ")", ":", "return", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "_options_dir", "(", "name", ")", ",", "'options'", ")", ")" ]
returns true/false based on whether or not the options file for the specified port exists .
train
true
32,169
def ipv4_address(addr): if (not isinstance(addr, basestring)): raise CX('Invalid input, addr must be a string') else: addr = addr.strip() if (addr == ''): return addr if (not netaddr.valid_ipv4(addr)): raise CX(('Invalid IPv4 address format (%s)' % addr)) if netaddr.IPAddress(addr).is_netmask(): raise CX(('Invalid IPv4 host address (%s)' % addr)) return addr
[ "def", "ipv4_address", "(", "addr", ")", ":", "if", "(", "not", "isinstance", "(", "addr", ",", "basestring", ")", ")", ":", "raise", "CX", "(", "'Invalid input, addr must be a string'", ")", "else", ":", "addr", "=", "addr", ".", "strip", "(", ")", "if"...
validate an ipv4 address .
train
false
32,170
def extract_unigram_feats(document, unigrams, handle_negation=False): features = {} if handle_negation: document = mark_negation(document) for word in unigrams: features['contains({0})'.format(word)] = (word in set(document)) return features
[ "def", "extract_unigram_feats", "(", "document", ",", "unigrams", ",", "handle_negation", "=", "False", ")", ":", "features", "=", "{", "}", "if", "handle_negation", ":", "document", "=", "mark_negation", "(", "document", ")", "for", "word", "in", "unigrams", ...
populate a dictionary of unigram features .
train
false
32,171
def detect_lines(diffstr): resultant_lines = {} io = StringIO(diffstr) encoding = 'utf-8' udiff = unidiff.PatchSet(io, encoding) for file in udiff: target_lines = [] for hunk in file: target_lines += range(hunk.target_start, (hunk.target_start + hunk.target_length)) resultant_lines[file.path] = target_lines return resultant_lines
[ "def", "detect_lines", "(", "diffstr", ")", ":", "resultant_lines", "=", "{", "}", "io", "=", "StringIO", "(", "diffstr", ")", "encoding", "=", "'utf-8'", "udiff", "=", "unidiff", ".", "PatchSet", "(", "io", ",", "encoding", ")", "for", "file", "in", "...
take a diff string and return a dict of files with line numbers changed .
train
false
32,172
def _get_short_language_description(full_language_description): if (' (' not in full_language_description): return full_language_description else: ind = full_language_description.find(' (') return full_language_description[:ind]
[ "def", "_get_short_language_description", "(", "full_language_description", ")", ":", "if", "(", "' ('", "not", "in", "full_language_description", ")", ":", "return", "full_language_description", "else", ":", "ind", "=", "full_language_description", ".", "find", "(", ...
given one of the descriptions in feconf .
train
false
32,173
def gone(): ctx.status = '410 Gone' header('Content-Type', 'text/html') return output('gone')
[ "def", "gone", "(", ")", ":", "ctx", ".", "status", "=", "'410 Gone'", "header", "(", "'Content-Type'", ",", "'text/html'", ")", "return", "output", "(", "'gone'", ")" ]
returns a 410 gone error .
train
false
32,174
def default_buffer_changed(default_buffer): buffers[u'RESULT'].text = buffers[DEFAULT_BUFFER].text[::(-1)]
[ "def", "default_buffer_changed", "(", "default_buffer", ")", ":", "buffers", "[", "u'RESULT'", "]", ".", "text", "=", "buffers", "[", "DEFAULT_BUFFER", "]", ".", "text", "[", ":", ":", "(", "-", "1", ")", "]" ]
when the buffer on the left changes .
train
false
32,175
def _get_widget(request, module, widget_name): import_name = (module.name + '.widgets') module_widget_lib = __import__(import_name, fromlist=[str(module.name)]) module_widgets = module_widget_lib.get_widgets(request) widget = {} for name in module_widgets: if (name == widget_name): widget = module_widgets[name] if ('module_name' not in widget): widget['module_name'] = module.name if ('module_title' not in widget): widget['module_title'] = module.title widget = _preprocess_widget(widget, widget_name) break return widget
[ "def", "_get_widget", "(", "request", ",", "module", ",", "widget_name", ")", ":", "import_name", "=", "(", "module", ".", "name", "+", "'.widgets'", ")", "module_widget_lib", "=", "__import__", "(", "import_name", ",", "fromlist", "=", "[", "str", "(", "m...
gets a widget by name .
train
false
32,178
def _get_action_profile(x, indptr): N = (len(indptr) - 1) action_profile = tuple((x[indptr[i]:indptr[(i + 1)]] for i in range(N))) return action_profile
[ "def", "_get_action_profile", "(", "x", ",", "indptr", ")", ":", "N", "=", "(", "len", "(", "indptr", ")", "-", "1", ")", "action_profile", "=", "tuple", "(", "(", "x", "[", "indptr", "[", "i", "]", ":", "indptr", "[", "(", "i", "+", "1", ")", ...
obtain a tuple of mixed actions from a flattened action profile .
train
true
32,180
def convert_keys_to_string(dictionary): if (not isinstance(dictionary, dict)): return dictionary return dict(((str(k), convert_keys_to_string(v)) for (k, v) in dictionary.items()))
[ "def", "convert_keys_to_string", "(", "dictionary", ")", ":", "if", "(", "not", "isinstance", "(", "dictionary", ",", "dict", ")", ")", ":", "return", "dictionary", "return", "dict", "(", "(", "(", "str", "(", "k", ")", ",", "convert_keys_to_string", "(", ...
recursively converts dictionary keys to strings .
train
true
32,181
def get_dist_url(dist): return get_meta_entry(dist, 'Home-page')
[ "def", "get_dist_url", "(", "dist", ")", ":", "return", "get_meta_entry", "(", "dist", ",", "'Home-page'", ")" ]
return the url of the distribution .
train
false
32,182
def global_settings(request): account_links = [] tools_links = [] context = {} tools_title = _('Tools') is_reviewer = False if request.user.is_authenticated(): user = request.user profile = request.user is_reviewer = (acl.check_addons_reviewer(request) or acl.check_personas_reviewer(request)) account_links.append({'text': _('My Profile'), 'href': profile.get_url_path()}) if user.is_artist: account_links.append({'text': _('My Themes'), 'href': profile.get_user_url('themes')}) account_links.append({'text': _('Account Settings'), 'href': reverse('users.edit')}) account_links.append({'text': _('My Collections'), 'href': reverse('collections.user', args=[user.username])}) if user.favorite_addons: account_links.append({'text': _('My Favorites'), 'href': reverse('collections.detail', args=[user.username, 'favorites'])}) account_links.append({'text': _('Log out'), 'href': ((reverse('users.logout') + '?to=') + urlquote(request.path))}) if request.user.is_developer: tools_links.append({'text': _('Manage My Submissions'), 'href': reverse('devhub.addons')}) links = [{'text': _('Submit a New Add-on'), 'href': reverse('devhub.submit.agreement')}, {'text': _('Submit a New Theme'), 'href': reverse('devhub.themes.submit')}, {'text': _('Developer Hub'), 'href': reverse('devhub.index')}] links.append({'text': _('Manage API Keys'), 'href': reverse('devhub.api_key')}) tools_links += links if is_reviewer: tools_links.append({'text': _('Reviewer Tools'), 'href': reverse('editors.home')}) if (acl.action_allowed(request, 'Admin', '%') or acl.action_allowed(request, 'AdminTools', 'View')): tools_links.append({'text': _('Admin Tools'), 'href': reverse('zadmin.home')}) context['user'] = request.user else: context['user'] = AnonymousUser() context.update({'account_links': account_links, 'settings': settings, 'amo': amo, 'tools_links': tools_links, 'tools_title': tools_title, 'ADMIN_MESSAGE': get_config('site_notice'), 'is_reviewer': is_reviewer}) return context
[ "def", "global_settings", "(", "request", ")", ":", "account_links", "=", "[", "]", "tools_links", "=", "[", "]", "context", "=", "{", "}", "tools_title", "=", "_", "(", "'Tools'", ")", "is_reviewer", "=", "False", "if", "request", ".", "user", ".", "i...
adds settings to the context .
train
false
32,183
def set_weight(name, backend, weight=0, socket='/var/run/haproxy.sock'): ha_conn = _get_conn(socket) ha_cmd = haproxy.cmds.getWeight(server=name, backend=backend, weight=weight) ha_conn.sendCmd(ha_cmd) return get_weight(name, backend, socket=socket)
[ "def", "set_weight", "(", "name", ",", "backend", ",", "weight", "=", "0", ",", "socket", "=", "'/var/run/haproxy.sock'", ")", ":", "ha_conn", "=", "_get_conn", "(", "socket", ")", "ha_cmd", "=", "haproxy", ".", "cmds", ".", "getWeight", "(", "server", "...
set server weight name server name backend haproxy backend weight server weight socket haproxy stats socket cli example: .
train
false
32,184
@sensitive_post_parameters() @csrf_protect @never_cache def login(request, template_name='registration/login.html', redirect_field_name=REDIRECT_FIELD_NAME, authentication_form=AuthenticationForm, current_app=None, extra_context=None): redirect_to = request.REQUEST.get(redirect_field_name, '') if (request.method == 'POST'): form = authentication_form(data=request.POST) if form.is_valid(): if (not is_safe_url(url=redirect_to, host=request.get_host())): redirect_to = resolve_url(settings.LOGIN_REDIRECT_URL) auth_login(request, form.get_user()) if request.session.test_cookie_worked(): request.session.delete_test_cookie() return HttpResponseRedirect(redirect_to) else: form = authentication_form(request) request.session.set_test_cookie() current_site = get_current_site(request) context = {'form': form, redirect_field_name: redirect_to, 'site': current_site, 'site_name': current_site.name} if (extra_context is not None): context.update(extra_context) return TemplateResponse(request, template_name, context, current_app=current_app)
[ "@", "sensitive_post_parameters", "(", ")", "@", "csrf_protect", "@", "never_cache", "def", "login", "(", "request", ",", "template_name", "=", "'registration/login.html'", ",", "redirect_field_name", "=", "REDIRECT_FIELD_NAME", ",", "authentication_form", "=", "Authent...
persist a user id and a backend in the request .
train
false
32,186
def _hc_cut(n_clusters, children, n_leaves): if (n_clusters > n_leaves): raise ValueError(('Cannot extract more clusters than samples: %s clusters where given for a tree with %s leaves.' % (n_clusters, n_leaves))) nodes = [(- (max(children[(-1)]) + 1))] for i in xrange((n_clusters - 1)): these_children = children[((- nodes[0]) - n_leaves)] heappush(nodes, (- these_children[0])) heappushpop(nodes, (- these_children[1])) label = np.zeros(n_leaves, dtype=np.intp) for (i, node) in enumerate(nodes): label[_hierarchical._hc_get_descendent((- node), children, n_leaves)] = i return label
[ "def", "_hc_cut", "(", "n_clusters", ",", "children", ",", "n_leaves", ")", ":", "if", "(", "n_clusters", ">", "n_leaves", ")", ":", "raise", "ValueError", "(", "(", "'Cannot extract more clusters than samples: %s clusters where given for a tree with %s leaves.'", "%", ...
function cutting the ward tree for a given number of clusters .
train
false
32,187
@decorators.memoize def _get_version(): xpath = _check_xbps() version_string = __salt__['cmd.run']('{0} --version'.format(xpath), output_loglevel='trace') if (version_string is None): return False VERSION_MATCH = re.compile('(?:XBPS:[\\s]+)([\\d.]+)(?:[\\s]+.*)') version_match = VERSION_MATCH.search(version_string) if (not version_match): return False return version_match.group(1).split('.')
[ "@", "decorators", ".", "memoize", "def", "_get_version", "(", ")", ":", "xpath", "=", "_check_xbps", "(", ")", "version_string", "=", "__salt__", "[", "'cmd.run'", "]", "(", "'{0} --version'", ".", "format", "(", "xpath", ")", ",", "output_loglevel", "=", ...
get the pkgin version .
train
false
32,188
def _send_inventory(response, resource_provider, inventory, status=200): response.status = status response.body = encodeutils.to_utf8(jsonutils.dumps(_serialize_inventory(inventory, generation=resource_provider.generation))) response.content_type = 'application/json' return response
[ "def", "_send_inventory", "(", "response", ",", "resource_provider", ",", "inventory", ",", "status", "=", "200", ")", ":", "response", ".", "status", "=", "status", "response", ".", "body", "=", "encodeutils", ".", "to_utf8", "(", "jsonutils", ".", "dumps",...
send a json representation of one single inventory .
train
false
32,189
@not_implemented_for('directed') @not_implemented_for('multigraph') def cn_soundarajan_hopcroft(G, ebunch=None, community='community'): def predict(u, v): Cu = _community(G, u, community) Cv = _community(G, v, community) cnbors = list(nx.common_neighbors(G, u, v)) neighbors = (sum(((_community(G, w, community) == Cu) for w in cnbors)) if (Cu == Cv) else 0) return (len(cnbors) + neighbors) return _apply_prediction(G, predict, ebunch)
[ "@", "not_implemented_for", "(", "'directed'", ")", "@", "not_implemented_for", "(", "'multigraph'", ")", "def", "cn_soundarajan_hopcroft", "(", "G", ",", "ebunch", "=", "None", ",", "community", "=", "'community'", ")", ":", "def", "predict", "(", "u", ",", ...
count the number of common neighbors of all node pairs in ebunch using community information .
train
false
32,190
def get_link_path(target, base): path = _get_link_path(target, base) url = path_to_url(path) if os.path.isabs(path): url = ('file:' + url) return url
[ "def", "get_link_path", "(", "target", ",", "base", ")", ":", "path", "=", "_get_link_path", "(", "target", ",", "base", ")", "url", "=", "path_to_url", "(", "path", ")", "if", "os", ".", "path", ".", "isabs", "(", "path", ")", ":", "url", "=", "("...
returns a relative path to target from base .
train
false
32,191
def conflicts(fstruct1, fstruct2, trace=0): conflict_list = [] def add_conflict(fval1, fval2, path): conflict_list.append(path) return fval1 unify(fstruct1, fstruct2, fail=add_conflict, trace=trace) return conflict_list
[ "def", "conflicts", "(", "fstruct1", ",", "fstruct2", ",", "trace", "=", "0", ")", ":", "conflict_list", "=", "[", "]", "def", "add_conflict", "(", "fval1", ",", "fval2", ",", "path", ")", ":", "conflict_list", ".", "append", "(", "path", ")", "return"...
return a list of the feature paths of all features which are assigned incompatible values by fstruct1 and fstruct2 .
train
false
32,195
def connect_to_cloud_cdn(region=None): return _create_client(ep_name='cdn', region=region)
[ "def", "connect_to_cloud_cdn", "(", "region", "=", "None", ")", ":", "return", "_create_client", "(", "ep_name", "=", "'cdn'", ",", "region", "=", "region", ")" ]
creates a client for working with cloud loadbalancers .
train
false
32,196
def _dup_decompose(f, K): df = (len(f) - 1) for s in range(2, df): if ((df % s) != 0): continue h = _dup_right_decompose(f, s, K) if (h is not None): g = _dup_left_decompose(f, h, K) if (g is not None): return (g, h) return None
[ "def", "_dup_decompose", "(", "f", ",", "K", ")", ":", "df", "=", "(", "len", "(", "f", ")", "-", "1", ")", "for", "s", "in", "range", "(", "2", ",", "df", ")", ":", "if", "(", "(", "df", "%", "s", ")", "!=", "0", ")", ":", "continue", ...
helper function for :func:dup_decompose .
train
false
32,197
def check_rfc_1918(cidr): if (ipaddr.IPNetwork(cidr) in ipaddr.IPNetwork('10.0.0.0/8')): return True if (ipaddr.IPNetwork(cidr) in ipaddr.IPNetwork('172.16.0.0/12')): return True if (ipaddr.IPNetwork(cidr) in ipaddr.IPNetwork('192.168.0.0/16')): return True return False
[ "def", "check_rfc_1918", "(", "cidr", ")", ":", "if", "(", "ipaddr", ".", "IPNetwork", "(", "cidr", ")", "in", "ipaddr", ".", "IPNetwork", "(", "'10.0.0.0/8'", ")", ")", ":", "return", "True", "if", "(", "ipaddr", ".", "IPNetwork", "(", "cidr", ")", ...
ec2-classic sgs should never use rfc-1918 cidrs .
train
false
32,198
def sign_data(args): sk = _load_key(args) binary_content = args.datafile.read() signature = sk.sign_deterministic(binary_content, hashlib.sha256) vk = sk.get_verifying_key() vk.verify(signature, binary_content, hashlib.sha256) if ((args.output is None) or (os.path.abspath(args.output) == os.path.abspath(args.datafile.name))): args.datafile.close() outfile = open(args.datafile.name, 'ab') else: outfile = open(args.output, 'wb') outfile.write(binary_content) outfile.write(struct.pack('I', 0)) outfile.write(signature) outfile.close() print ('Signed %d bytes of data from %s with key %s' % (len(binary_content), args.datafile.name, args.keyfile.name))
[ "def", "sign_data", "(", "args", ")", ":", "sk", "=", "_load_key", "(", "args", ")", "binary_content", "=", "args", ".", "datafile", ".", "read", "(", ")", "signature", "=", "sk", ".", "sign_deterministic", "(", "binary_content", ",", "hashlib", ".", "sh...
sign a data file with a ecdsa private key .
train
true
32,199
def _install(quidditch, retries=5): passed = False clean = True comment = '' while (not passed): log.debug('download_collection is this long: {0}'.format(str(quidditch.install_collection.Count))) log.debug('Installing. tries left: {0}'.format(str(retries))) passed = quidditch.Install() log.info('Done installing: {0}'.format(str(passed))) if isinstance(passed, Exception): clean = False comment += 'Failed while trying to install the updates.\n DCTB DCTB {0}\n'.format(str(passed)) retries -= 1 if retries: comment += '{0} tries to go. retrying\n'.format(str(retries)) passed = False else: comment += 'out of retries. this update round failed.\n' return (comment, False, retries) if clean: comment += 'Install was done without error.\n' return (comment, True, retries)
[ "def", "_install", "(", "quidditch", ",", "retries", "=", "5", ")", ":", "passed", "=", "False", "clean", "=", "True", "comment", "=", "''", "while", "(", "not", "passed", ")", ":", "log", ".", "debug", "(", "'download_collection is this long: {0}'", ".", ...
determine whether salt-minion is installed and .
train
false
32,202
def align_to_mmap(num, round_up): res = ((num / ALLOCATIONGRANULARITY) * ALLOCATIONGRANULARITY) if (round_up and (res != num)): res += ALLOCATIONGRANULARITY return res
[ "def", "align_to_mmap", "(", "num", ",", "round_up", ")", ":", "res", "=", "(", "(", "num", "/", "ALLOCATIONGRANULARITY", ")", "*", "ALLOCATIONGRANULARITY", ")", "if", "(", "round_up", "and", "(", "res", "!=", "num", ")", ")", ":", "res", "+=", "ALLOCA...
align the given integer number to the closest page offset .
train
true
32,203
def get_hiveserver2_kerberos_principal(hostname_or_ip): fqdn = security_util.get_fqdn(hostname_or_ip) principal = get_conf().get(_CNF_HIVESERVER2_KERBEROS_PRINCIPAL, None) if principal: return security_util.get_kerberos_principal(principal, fqdn) else: return None
[ "def", "get_hiveserver2_kerberos_principal", "(", "hostname_or_ip", ")", ":", "fqdn", "=", "security_util", ".", "get_fqdn", "(", "hostname_or_ip", ")", "principal", "=", "get_conf", "(", ")", ".", "get", "(", "_CNF_HIVESERVER2_KERBEROS_PRINCIPAL", ",", "None", ")",...
retrieves principal for hiveserver 2 .
train
false
32,206
def check_command(commandline): arguments = {'chown': 'owner', 'chmod': 'mode', 'chgrp': 'group', 'ln': 'state=link', 'mkdir': 'state=directory', 'rmdir': 'state=absent', 'rm': 'state=absent', 'touch': 'state=touch'} commands = {'git': 'git', 'hg': 'hg', 'curl': 'get_url or uri', 'wget': 'get_url or uri', 'svn': 'subversion', 'service': 'service', 'mount': 'mount', 'rpm': 'yum, dnf or zypper', 'yum': 'yum', 'apt-get': 'apt', 'tar': 'unarchive', 'unzip': 'unarchive', 'sed': 'template or lineinfile', 'rsync': 'synchronize', 'dnf': 'dnf', 'zypper': 'zypper'} become = ['sudo', 'su', 'pbrun', 'pfexec', 'runas'] warnings = list() command = os.path.basename(commandline.split()[0]) if (command in arguments): warnings.append('Consider using file module with {0} rather than running {1}'.format(arguments[command], command)) if (command in commands): warnings.append('Consider using {0} module rather than running {1}'.format(commands[command], command)) if (command in become): warnings.append("Consider using 'become', 'become_method', and 'become_user' rather than running {0}".format(command)) return warnings
[ "def", "check_command", "(", "commandline", ")", ":", "arguments", "=", "{", "'chown'", ":", "'owner'", ",", "'chmod'", ":", "'mode'", ",", "'chgrp'", ":", "'group'", ",", "'ln'", ":", "'state=link'", ",", "'mkdir'", ":", "'state=directory'", ",", "'rmdir'",...
verify a command runs with an exit code of 0 .
train
false
32,207
def parse_and_save_options(option_parser, args): arg_map = defaultdict(list) real_values = option_parser.values try: option_parser.values = option_parser.get_default_values() for (dest, value) in _args_for_opt_dest_subset(option_parser, args, None): arg_map[dest].append(value) finally: option_parser.values = real_values return arg_map
[ "def", "parse_and_save_options", "(", "option_parser", ",", "args", ")", ":", "arg_map", "=", "defaultdict", "(", "list", ")", "real_values", "=", "option_parser", ".", "values", "try", ":", "option_parser", ".", "values", "=", "option_parser", ".", "get_default...
duplicate behavior of :py:class:optionparser .
train
false
32,209
def _from_hass_temperature(temperature): return ((temperature - 154) / 346)
[ "def", "_from_hass_temperature", "(", "temperature", ")", ":", "return", "(", "(", "temperature", "-", "154", ")", "/", "346", ")" ]
convert home assistant color temperature units to percentage .
train
false
32,211
def init_update_task(parent, runtask, model): def update_status(): model.update_status(update_index=True) task = qtutils.SimpleTask(parent, update_status) runtask.start(task)
[ "def", "init_update_task", "(", "parent", ",", "runtask", ",", "model", ")", ":", "def", "update_status", "(", ")", ":", "model", ".", "update_status", "(", "update_index", "=", "True", ")", "task", "=", "qtutils", ".", "SimpleTask", "(", "parent", ",", ...
update the model in the background git-cola should startup as quickly as possible .
train
false
32,212
@when(u'we refresh completions') def step_refresh_completions(context): context.cli.sendline(u'\\refresh')
[ "@", "when", "(", "u'we refresh completions'", ")", "def", "step_refresh_completions", "(", "context", ")", ":", "context", ".", "cli", ".", "sendline", "(", "u'\\\\refresh'", ")" ]
send refresh command .
train
false
32,213
def cache_with_key(keyfunc, cache_name=None, timeout=None, with_statsd_key=None): def decorator(func): @wraps(func) def func_with_caching(*args, **kwargs): key = keyfunc(*args, **kwargs) val = cache_get(key, cache_name=cache_name) extra = '' if (cache_name == 'database'): extra = '.dbcache' if (with_statsd_key is not None): metric_key = with_statsd_key else: metric_key = statsd_key(key) status = ('hit' if (val is not None) else 'miss') statsd.incr(('cache%s.%s.%s' % (extra, metric_key, status))) if (val is not None): return val[0] val = func(*args, **kwargs) cache_set(key, val, cache_name=cache_name, timeout=timeout) return val return func_with_caching return decorator
[ "def", "cache_with_key", "(", "keyfunc", ",", "cache_name", "=", "None", ",", "timeout", "=", "None", ",", "with_statsd_key", "=", "None", ")", ":", "def", "decorator", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "func_with_caching", "(...
decorator which applies django caching to a function .
train
false
32,214
def remove_capability(capability, image=None, restart=False): if (salt.utils.version_cmp(__grains__['osversion'], '10') == (-1)): raise NotImplementedError('`uninstall_capability` is not available on this version of Windows: {0}'.format(__grains__['osversion'])) cmd = ['DISM', '/Quiet', ('/Image:{0}'.format(image) if image else '/Online'), '/Remove-Capability', '/CapabilityName:{0}'.format(capability)] if (not restart): cmd.append('/NoRestart') return __salt__['cmd.run_all'](cmd)
[ "def", "remove_capability", "(", "capability", ",", "image", "=", "None", ",", "restart", "=", "False", ")", ":", "if", "(", "salt", ".", "utils", ".", "version_cmp", "(", "__grains__", "[", "'osversion'", "]", ",", "'10'", ")", "==", "(", "-", "1", ...
uninstall a capability args: capability: the capability to be removed image : the path to the root directory of an offline windows image .
train
true
32,215
def referer(pattern, accept=True, accept_missing=False, error=403, message='Forbidden Referer header.', debug=False): try: ref = cherrypy.serving.request.headers['Referer'] match = bool(re.match(pattern, ref)) if debug: cherrypy.log(('Referer %r matches %r' % (ref, pattern)), 'TOOLS.REFERER') if (accept == match): return except KeyError: if debug: cherrypy.log('No Referer header', 'TOOLS.REFERER') if accept_missing: return raise cherrypy.HTTPError(error, message)
[ "def", "referer", "(", "pattern", ",", "accept", "=", "True", ",", "accept_missing", "=", "False", ",", "error", "=", "403", ",", "message", "=", "'Forbidden Referer header.'", ",", "debug", "=", "False", ")", ":", "try", ":", "ref", "=", "cherrypy", "."...
raise httperror if referer header does/does not match the given pattern .
train
false
32,216
def get_perpendicular_vector(v): if (v[0] == v[1] == v[2] == 0): logger.error('zero-vector') if (v[0] == 0): return np.array((1, 0, 0)) if (v[1] == 0): return np.array((0, 1, 0)) if (v[2] == 0): return np.array((0, 0, 1)) return np.array([1, 1, (((-1.0) * (v[0] + v[1])) / v[2])])
[ "def", "get_perpendicular_vector", "(", "v", ")", ":", "if", "(", "v", "[", "0", "]", "==", "v", "[", "1", "]", "==", "v", "[", "2", "]", "==", "0", ")", ":", "logger", ".", "error", "(", "'zero-vector'", ")", "if", "(", "v", "[", "0", "]", ...
finds an arbitrary perpendicular vector to *v* .
train
false
32,218
def proxy_from_url(url): if (not url.isValid()): raise InvalidUrlError(url) scheme = url.scheme() if (scheme in ['pac+http', 'pac+https']): return pac.PACFetcher types = {'http': QNetworkProxy.HttpProxy, 'socks': QNetworkProxy.Socks5Proxy, 'socks5': QNetworkProxy.Socks5Proxy, 'direct': QNetworkProxy.NoProxy} if (scheme not in types): raise InvalidProxyTypeError(scheme) proxy = QNetworkProxy(types[scheme], url.host()) if (url.port() != (-1)): proxy.setPort(url.port()) if url.userName(): proxy.setUser(url.userName()) if url.password(): proxy.setPassword(url.password()) return proxy
[ "def", "proxy_from_url", "(", "url", ")", ":", "if", "(", "not", "url", ".", "isValid", "(", ")", ")", ":", "raise", "InvalidUrlError", "(", "url", ")", "scheme", "=", "url", ".", "scheme", "(", ")", "if", "(", "scheme", "in", "[", "'pac+http'", ",...
create a qnetworkproxy from qurl and a proxy type .
train
false
32,219
def is_edit_mode(request): return bool((could_edit(request) and request.session.get(EDIT_FLAG_NAME)))
[ "def", "is_edit_mode", "(", "request", ")", ":", "return", "bool", "(", "(", "could_edit", "(", "request", ")", "and", "request", ".", "session", ".", "get", "(", "EDIT_FLAG_NAME", ")", ")", ")" ]
return true if the given request has xtheme editing enabled .
train
false
32,220
def check_sudo(): check = None if (not subprocess.mswindows): if getattr(os, 'geteuid'): check = (os.geteuid() == 0) else: import ctypes check = ctypes.windll.shell32.IsUserAnAdmin() return check
[ "def", "check_sudo", "(", ")", ":", "check", "=", "None", "if", "(", "not", "subprocess", ".", "mswindows", ")", ":", "if", "getattr", "(", "os", ",", "'geteuid'", ")", ":", "check", "=", "(", "os", ".", "geteuid", "(", ")", "==", "0", ")", "else...
checks for sudo/administrator privileges .
train
false
32,222
def sparse_block_dot(W, h, inputIdx, b, outputIdx): assert (inputIdx.ndim == (h.ndim - 1)) assert (outputIdx.ndim == inputIdx.ndim) if (h.ndim == 2): h = h.dimshuffle('x', 0, 1) inputIdx = inputIdx.dimshuffle('x', 0) outputIdx = outputIdx.dimshuffle('x', 0) return SparseBlockGemv()(b.take(outputIdx, axis=0), W, h, inputIdx, outputIdx)
[ "def", "sparse_block_dot", "(", "W", ",", "h", ",", "inputIdx", ",", "b", ",", "outputIdx", ")", ":", "assert", "(", "inputIdx", ".", "ndim", "==", "(", "h", ".", "ndim", "-", "1", ")", ")", "assert", "(", "outputIdx", ".", "ndim", "==", "inputIdx"...
compute the dot product of the specified pieces of vectors and matrices .
train
false
32,223
@ssl_login_shortcut @ensure_csrf_cookie @xframe_options_deny def login_page(request): csrf_token = csrf(request)['csrf_token'] if (settings.FEATURES['AUTH_USE_CERTIFICATES'] and ssl_get_cert_from_request(request)): next_url = request.GET.get('next') if next_url: return redirect(next_url) else: return redirect('/course/') if settings.FEATURES.get('AUTH_USE_CAS'): return redirect(reverse('cas-login')) return render_to_response('login.html', {'csrf': csrf_token, 'forgot_password_link': '//{base}/login#forgot-password-modal'.format(base=settings.LMS_BASE), 'platform_name': configuration_helpers.get_value('platform_name', settings.PLATFORM_NAME)})
[ "@", "ssl_login_shortcut", "@", "ensure_csrf_cookie", "@", "xframe_options_deny", "def", "login_page", "(", "request", ")", ":", "csrf_token", "=", "csrf", "(", "request", ")", "[", "'csrf_token'", "]", "if", "(", "settings", ".", "FEATURES", "[", "'AUTH_USE_CER...
display the login form .
train
false
32,224
def p_command_gosub_bad(p): p[0] = 'INVALID LINE NUMBER IN GOSUB'
[ "def", "p_command_gosub_bad", "(", "p", ")", ":", "p", "[", "0", "]", "=", "'INVALID LINE NUMBER IN GOSUB'" ]
command : gosub error .
train
false
32,229
def datetime_f(dttm): if dttm: dttm = dttm.isoformat() now_iso = datetime.now().isoformat() if (now_iso[:10] == dttm[:10]): dttm = dttm[11:] elif (now_iso[:4] == dttm[:4]): dttm = dttm[5:] return u'<nobr>{}</nobr>'.format(dttm)
[ "def", "datetime_f", "(", "dttm", ")", ":", "if", "dttm", ":", "dttm", "=", "dttm", ".", "isoformat", "(", ")", "now_iso", "=", "datetime", ".", "now", "(", ")", ".", "isoformat", "(", ")", "if", "(", "now_iso", "[", ":", "10", "]", "==", "dttm",...
formats datetime to take less room when it is recent .
train
true
32,231
@cli.command() @click.argument('url') def locate(url): click.launch(url, locate=True)
[ "@", "cli", ".", "command", "(", ")", "@", "click", ".", "argument", "(", "'url'", ")", "def", "locate", "(", "url", ")", ":", "click", ".", "launch", "(", "url", ",", "locate", "=", "True", ")" ]
locate an object by name or dotted path .
train
false
32,233
def _list_from_statespec(stuple): nval = [] for val in stuple: typename = getattr(val, 'typename', None) if (typename is None): nval.append(val) else: val = str(val) if (typename == 'StateSpec'): val = val.split() nval.append(val) it = iter(nval) return [_flatten(spec) for spec in zip(it, it)]
[ "def", "_list_from_statespec", "(", "stuple", ")", ":", "nval", "=", "[", "]", "for", "val", "in", "stuple", ":", "typename", "=", "getattr", "(", "val", ",", "'typename'", ",", "None", ")", "if", "(", "typename", "is", "None", ")", ":", "nval", ".",...
construct a list from the given statespec tuple according to the accepted statespec accepted by _format_mapdict .
train
false
32,234
def _set_prctl_name(process_name): libc = ctypes.CDLL(ctypes.util.find_library('c')) name_buffer = ctypes.create_string_buffer((len(process_name) + 1)) name_buffer.value = stem.util.str_tools._to_bytes(process_name) libc.prctl(PR_SET_NAME, ctypes.byref(name_buffer), 0, 0, 0)
[ "def", "_set_prctl_name", "(", "process_name", ")", ":", "libc", "=", "ctypes", ".", "CDLL", "(", "ctypes", ".", "util", ".", "find_library", "(", "'c'", ")", ")", "name_buffer", "=", "ctypes", ".", "create_string_buffer", "(", "(", "len", "(", "process_na...
sets the prctl name .
train
false
32,236
def GetRegistryDefaultValue(subkey, rootkey=None): if (rootkey is None): rootkey = GetRootKey() return win32api.RegQueryValue(rootkey, subkey)
[ "def", "GetRegistryDefaultValue", "(", "subkey", ",", "rootkey", "=", "None", ")", ":", "if", "(", "rootkey", "is", "None", ")", ":", "rootkey", "=", "GetRootKey", "(", ")", "return", "win32api", ".", "RegQueryValue", "(", "rootkey", ",", "subkey", ")" ]
a helper to return the default value for a key in the registry .
train
false
32,237
def any_reviewer_required(f): @functools.wraps(f) def wrapper(request, *args, **kw): try: return addons_reviewer_required(f)(request, *args, **kw) except PermissionDenied: return personas_reviewer_required(f)(request, *args, **kw) return wrapper
[ "def", "any_reviewer_required", "(", "f", ")", ":", "@", "functools", ".", "wraps", "(", "f", ")", "def", "wrapper", "(", "request", ",", "*", "args", ",", "**", "kw", ")", ":", "try", ":", "return", "addons_reviewer_required", "(", "f", ")", "(", "r...
require an addons or personas reviewer .
train
false
32,238
def _split_multiline_prompt(get_prompt_tokens): def has_before_tokens(cli): for (token, char) in get_prompt_tokens(cli): if (u'\n' in char): return True return False def before(cli): result = [] found_nl = False for (token, char) in reversed(explode_tokens(get_prompt_tokens(cli))): if found_nl: result.insert(0, (token, char)) elif (char == u'\n'): found_nl = True return result def first_input_line(cli): result = [] for (token, char) in reversed(explode_tokens(get_prompt_tokens(cli))): if (char == u'\n'): break else: result.insert(0, (token, char)) return result return (has_before_tokens, before, first_input_line)
[ "def", "_split_multiline_prompt", "(", "get_prompt_tokens", ")", ":", "def", "has_before_tokens", "(", "cli", ")", ":", "for", "(", "token", ",", "char", ")", "in", "get_prompt_tokens", "(", "cli", ")", ":", "if", "(", "u'\\n'", "in", "char", ")", ":", "...
take a get_prompt_tokens function and return three new functions instead .
train
true
32,239
def min_party(name, zk_hosts, min_nodes, blocking=False): ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''} if __opts__['test']: ret['result'] = None ret['comment'] = 'Attempt to ensure min_party' return ret nodes = __salt__['zk_concurrency.party_members'](name, zk_hosts, min_nodes, blocking=blocking) if (not isinstance(nodes, list)): raise Exception('Error from zk_concurrency.party_members, return was not a list: {0}'.format(nodes)) num_nodes = len(nodes) if ((num_nodes >= min_nodes) or blocking): ret['result'] = (None if __opts__['test'] else True) if (not blocking): ret['comment'] = 'Currently {0} nodes, which is >= {1}'.format(num_nodes, min_nodes) else: ret['comment'] = 'Blocked until {0} nodes were available. Unblocked after {1} nodes became available'.format(min_nodes, num_nodes) else: ret['result'] = False ret['comment'] = 'Currently {0} nodes, which is < {1}'.format(num_nodes, min_nodes) return ret
[ "def", "min_party", "(", "name", ",", "zk_hosts", ",", "min_nodes", ",", "blocking", "=", "False", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'result'", ":", "False", ",", "'comment'", ":", "''", "}", "...
ensure that there are min_nodes in the party at name .
train
false
32,240
def registerAsIOThread(): global ioThread ioThread = getThreadID()
[ "def", "registerAsIOThread", "(", ")", ":", "global", "ioThread", "ioThread", "=", "getThreadID", "(", ")" ]
mark the current thread as responsable for i/o requests .
train
false
32,242
def calcFisherInformation(sigma, invSigma=None, factorSigma=None): if (invSigma == None): invSigma = inv(sigma) if (factorSigma == None): factorSigma = cholesky(sigma) dim = sigma.shape[0] fim = [invSigma] for k in range(dim): D = invSigma[k:, k:].copy() D[(0, 0)] += (factorSigma[(k, k)] ** (-2)) fim.append(D) return fim
[ "def", "calcFisherInformation", "(", "sigma", ",", "invSigma", "=", "None", ",", "factorSigma", "=", "None", ")", ":", "if", "(", "invSigma", "==", "None", ")", ":", "invSigma", "=", "inv", "(", "sigma", ")", "if", "(", "factorSigma", "==", "None", ")"...
compute the exact fisher information matrix of a gaussian distribution .
train
false
32,243
def get_verifier(context, img_signature_certificate_uuid, img_signature_hash_method, img_signature, img_signature_key_type): image_meta_props = {'img_signature_uuid': img_signature_certificate_uuid, 'img_signature_hash_method': img_signature_hash_method, 'img_signature': img_signature, 'img_signature_key_type': img_signature_key_type} for key in image_meta_props.keys(): if (image_meta_props[key] is None): raise exception.SignatureVerificationError(reason=(_('Required image properties for signature verification do not exist. Cannot verify signature. Missing property: %s') % key)) signature = get_signature(img_signature) hash_method = get_hash_method(img_signature_hash_method) signature_key_type = SignatureKeyType.lookup(img_signature_key_type) public_key = get_public_key(context, img_signature_certificate_uuid, signature_key_type) verifier = signature_key_type.create_verifier(signature, hash_method, public_key) if verifier: return verifier else: raise exception.SignatureVerificationError(reason=_('Error occurred while creating the verifier'))
[ "def", "get_verifier", "(", "context", ",", "img_signature_certificate_uuid", ",", "img_signature_hash_method", ",", "img_signature", ",", "img_signature_key_type", ")", ":", "image_meta_props", "=", "{", "'img_signature_uuid'", ":", "img_signature_certificate_uuid", ",", "...
instantiate signature properties and use them to create a verifier .
train
false
32,244
@pytest.mark.django_db def test_merge_user(en_tutorial_po, member, member2): unit = _create_submission_and_suggestion(en_tutorial_po, member) accounts.utils.UserMerger(member, member2).merge() _test_user_merged(unit, member, member2)
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_merge_user", "(", "en_tutorial_po", ",", "member", ",", "member2", ")", ":", "unit", "=", "_create_submission_and_suggestion", "(", "en_tutorial_po", ",", "member", ")", "accounts", ".", "utils", ".", ...
test merging user to another user .
train
false
32,245
def get_sample(): sample_output = check_output([NODE_TOOL, 'rangekeysample']) keys = [{'key': key.strip().decode('hex'), 'size': 0} for key in sample_output.splitlines()[1:]] sorted(keys, key=(lambda key: key['key'])) return keys
[ "def", "get_sample", "(", ")", ":", "sample_output", "=", "check_output", "(", "[", "NODE_TOOL", ",", "'rangekeysample'", "]", ")", "keys", "=", "[", "{", "'key'", ":", "key", ".", "strip", "(", ")", ".", "decode", "(", "'hex'", ")", ",", "'size'", "...
helper method to access created sample data for primitive types .
train
false
32,246
def render_markdown_from_file(f): return djblets_markdown.render_markdown_from_file(f, **MARKDOWN_KWARGS)
[ "def", "render_markdown_from_file", "(", "f", ")", ":", "return", "djblets_markdown", ".", "render_markdown_from_file", "(", "f", ",", "**", "MARKDOWN_KWARGS", ")" ]
renders markdown text to html .
train
false
32,247
def test_parameter_operators(): m = MockModel() par = m.alpha num = 42.0 val = 3 assert ((par - val) == (num - val)) assert ((val - par) == (val - num)) assert ((par / val) == (num / val)) assert ((val / par) == (val / num)) assert ((par ** val) == (num ** val)) assert ((val ** par) == (val ** num)) assert (par < 45) assert (par > 41) assert (par <= par) assert (par >= par) assert (par == par) assert ((- par) == (- num)) assert (abs(par) == abs(num))
[ "def", "test_parameter_operators", "(", ")", ":", "m", "=", "MockModel", "(", ")", "par", "=", "m", ".", "alpha", "num", "=", "42.0", "val", "=", "3", "assert", "(", "(", "par", "-", "val", ")", "==", "(", "num", "-", "val", ")", ")", "assert", ...
test if the parameter arithmetic operators work .
train
false
32,248
def fsync_dir(dirpath): dirfd = None try: dirfd = os.open(dirpath, (os.O_DIRECTORY | os.O_RDONLY)) fsync(dirfd) except OSError as err: if (err.errno == errno.ENOTDIR): raise logging.warning(_('Unable to perform fsync() on directory %(dir)s: %(err)s'), {'dir': dirpath, 'err': os.strerror(err.errno)}) finally: if dirfd: os.close(dirfd)
[ "def", "fsync_dir", "(", "dirpath", ")", ":", "dirfd", "=", "None", "try", ":", "dirfd", "=", "os", ".", "open", "(", "dirpath", ",", "(", "os", ".", "O_DIRECTORY", "|", "os", ".", "O_RDONLY", ")", ")", "fsync", "(", "dirfd", ")", "except", "OSErro...
sync directory entries to disk .
train
false
32,249
def get_interface_mode(interface, module): command = ('show interface ' + interface) intf_type = get_interface_type(interface) body = execute_show_command(command, module) mode = 'unknown' interface_table = {} try: interface_table = body[0]['TABLE_interface']['ROW_interface'] except (KeyError, AttributeError, IndexError): return mode if interface_table: if (intf_type in ['ethernet', 'portchannel']): mode = str(interface_table.get('eth_mode', 'layer3')) if (mode in ['access', 'trunk']): mode = 'layer2' if (mode == 'routed'): mode = 'layer3' elif ((intf_type == 'loopback') or (intf_type == 'svi')): mode = 'layer3' return mode
[ "def", "get_interface_mode", "(", "interface", ",", "module", ")", ":", "command", "=", "(", "'show interface '", "+", "interface", ")", "intf_type", "=", "get_interface_type", "(", "interface", ")", "body", "=", "execute_show_command", "(", "command", ",", "mod...
gets current mode of interface: layer2 or layer3 args: device : this is the device object of an nx-api enabled device using the device class within device .
train
false
32,252
def p_toktype(p): p[0] = p[1]
[ "def", "p_toktype", "(", "p", ")", ":", "p", "[", "0", "]", "=", "p", "[", "1", "]" ]
toktype : token | left | right | nonassoc .
train
false
32,253
def get_profilers(**filter_data): return rpc_utils.prepare_for_serialization(models.Profiler.list_objects(filter_data))
[ "def", "get_profilers", "(", "**", "filter_data", ")", ":", "return", "rpc_utils", ".", "prepare_for_serialization", "(", "models", ".", "Profiler", ".", "list_objects", "(", "filter_data", ")", ")" ]
get all profilers .
train
false
32,255
@test def jpeg(h): if (h[6:10] in ('JFIF', 'Exif')): return u'jpeg' if (h[:2] == '\xff\xd8'): q = h[:32].tobytes() if (('JFIF' in q) or ('8BIM' in q)): return u'jpeg'
[ "@", "test", "def", "jpeg", "(", "h", ")", ":", "if", "(", "h", "[", "6", ":", "10", "]", "in", "(", "'JFIF'", ",", "'Exif'", ")", ")", ":", "return", "u'jpeg'", "if", "(", "h", "[", ":", "2", "]", "==", "'\\xff\\xd8'", ")", ":", "q", "=", ...
jpeg data in jfif format .
train
false
32,258
def factoring_visitor(state, primes): (f, lpart, pstack) = state factoring = [] for i in range((lpart + 1)): factor = 1 for ps in pstack[f[i]:f[(i + 1)]]: if (ps.v > 0): factor *= (primes[ps.c] ** ps.v) factoring.append(factor) return factoring
[ "def", "factoring_visitor", "(", "state", ",", "primes", ")", ":", "(", "f", ",", "lpart", ",", "pstack", ")", "=", "state", "factoring", "=", "[", "]", "for", "i", "in", "range", "(", "(", "lpart", "+", "1", ")", ")", ":", "factor", "=", "1", ...
use with multiset_partitions_taocp to enumerate the ways a number can be expressed as a product of factors .
train
false
32,260
def handle_default_options(options): if options.settings: os.environ['DJANGO_SETTINGS_MODULE'] = options.settings if options.pythonpath: sys.path.insert(0, options.pythonpath)
[ "def", "handle_default_options", "(", "options", ")", ":", "if", "options", ".", "settings", ":", "os", ".", "environ", "[", "'DJANGO_SETTINGS_MODULE'", "]", "=", "options", ".", "settings", "if", "options", ".", "pythonpath", ":", "sys", ".", "path", ".", ...
include any default options that all commands should accept here so that managementutility can handle them before searching for user commands .
train
false
32,261
def require_login(handler): def require_login_wrapper_fn(request, *args, **kwargs): if getattr(request, 'is_logged_in', False): return handler(request, *args, **kwargs) raise PermissionDenied(_('You must be logged in to access this page.')) return require_login_wrapper_fn
[ "def", "require_login", "(", "handler", ")", ":", "def", "require_login_wrapper_fn", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "getattr", "(", "request", ",", "'is_logged_in'", ",", "False", ")", ":", "return", "handler", "(", ...
make sure that a user is logged in to the distributed server .
train
false
32,263
def interact_in_memory(client_conn, server_conn): wrote = True while wrote: wrote = False for (read, write) in [(client_conn, server_conn), (server_conn, client_conn)]: try: data = read.recv((2 ** 16)) except WantReadError: pass else: return (read, data) while True: try: dirty = read.bio_read(4096) except WantReadError: break else: wrote = True write.bio_write(dirty)
[ "def", "interact_in_memory", "(", "client_conn", ",", "server_conn", ")", ":", "wrote", "=", "True", "while", "wrote", ":", "wrote", "=", "False", "for", "(", "read", ",", "write", ")", "in", "[", "(", "client_conn", ",", "server_conn", ")", ",", "(", ...
try to read application bytes from each of the two connection objects .
train
false
32,264
def invalidate_zone_urls_cache(document, async=False): job = DocumentZoneURLRemapsJob() if async: invalidator = job.invalidate else: invalidator = job.refresh try: if document.zone: invalidator(document.locale) except ObjectDoesNotExist: pass
[ "def", "invalidate_zone_urls_cache", "(", "document", ",", "async", "=", "False", ")", ":", "job", "=", "DocumentZoneURLRemapsJob", "(", ")", "if", "async", ":", "invalidator", "=", "job", ".", "invalidate", "else", ":", "invalidator", "=", "job", ".", "refr...
reset the url remap list cache for the given document .
train
false
32,266
def debug_callback(event, *args, **kwds): l = [('event %s' % (event.type,))] if args: l.extend(map(str, args)) if kwds: l.extend(sorted((('%s=%s' % t) for t in kwds.items()))) print ('Debug callback (%s)' % ', '.join(l))
[ "def", "debug_callback", "(", "event", ",", "*", "args", ",", "**", "kwds", ")", ":", "l", "=", "[", "(", "'event %s'", "%", "(", "event", ".", "type", ",", ")", ")", "]", "if", "args", ":", "l", ".", "extend", "(", "map", "(", "str", ",", "a...
example callback .
train
true
32,267
@verbose def _compute_forwards(rr, bem, coils_list, ccoils_list, infos, coil_types, n_jobs, verbose=None): fwd_data = dict(coils_list=coils_list, ccoils_list=ccoils_list, infos=infos, coil_types=coil_types) _prep_field_computation(rr, bem, fwd_data, n_jobs) Bs = _compute_forwards_meeg(rr, fwd_data, n_jobs) return Bs
[ "@", "verbose", "def", "_compute_forwards", "(", "rr", ",", "bem", ",", "coils_list", ",", "ccoils_list", ",", "infos", ",", "coil_types", ",", "n_jobs", ",", "verbose", "=", "None", ")", ":", "fwd_data", "=", "dict", "(", "coils_list", "=", "coils_list", ...
compute the meg and eeg forward solutions .
train
false
32,268
@register.function def loc(s): return trim_whitespace(s)
[ "@", "register", ".", "function", "def", "loc", "(", "s", ")", ":", "return", "trim_whitespace", "(", "s", ")" ]
a noop function for strings that are not ready to be localized .
train
false
32,269
def _create_veth_pair(dev1_name, dev2_name): for dev in [dev1_name, dev2_name]: if device_exists(dev): try: utils.execute('ip', 'link', 'delete', dev1_name, run_as_root=True, check_exit_code=[0, 2, 254]) except exception.ProcessExecutionError: LOG.exception((_('Error clearing stale veth %s') % dev)) utils.execute('ip', 'link', 'add', dev1_name, 'type', 'veth', 'peer', 'name', dev2_name, run_as_root=True) for dev in [dev1_name, dev2_name]: utils.execute('ip', 'link', 'set', dev, 'up', run_as_root=True) utils.execute('ip', 'link', 'set', dev, 'promisc', 'on', run_as_root=True)
[ "def", "_create_veth_pair", "(", "dev1_name", ",", "dev2_name", ")", ":", "for", "dev", "in", "[", "dev1_name", ",", "dev2_name", "]", ":", "if", "device_exists", "(", "dev", ")", ":", "try", ":", "utils", ".", "execute", "(", "'ip'", ",", "'link'", ",...
create a pair of veth devices with the specified names .
train
false
32,272
def housing(): s3.prep = (lambda r: ((r.method == 'options') and (r.representation == 's3json'))) return s3_rest_controller()
[ "def", "housing", "(", ")", ":", "s3", ".", "prep", "=", "(", "lambda", "r", ":", "(", "(", "r", ".", "method", "==", "'options'", ")", "and", "(", "r", ".", "representation", "==", "'s3json'", ")", ")", ")", "return", "s3_rest_controller", "(", ")...
housing: restful crud controller for option lookups .
train
false
32,273
def Input(shape=None, batch_shape=None, name=None, dtype=K.floatx(), sparse=False, tensor=None): if ((not batch_shape) and (tensor is None)): assert shape, 'Please provide to Input either a `shape` or a `batch_shape` argument. Note that `shape` does not include the batch dimension.' if (shape and (not batch_shape)): batch_shape = ((None,) + tuple(shape)) input_layer = InputLayer(batch_input_shape=batch_shape, name=name, input_dtype=dtype, sparse=sparse, input_tensor=tensor) outputs = input_layer.inbound_nodes[0].output_tensors if (len(outputs) == 1): return outputs[0] else: return outputs
[ "def", "Input", "(", "shape", "=", "None", ",", "batch_shape", "=", "None", ",", "name", "=", "None", ",", "dtype", "=", "K", ".", "floatx", "(", ")", ",", "sparse", "=", "False", ",", "tensor", "=", "None", ")", ":", "if", "(", "(", "not", "ba...
input() is used to instantiate a keras tensor .
train
false
32,274
def assertReading(testCase, reactor, transport): if IReactorFDSet.providedBy(reactor): testCase.assertIn(transport, reactor.getReaders()) else: testCase.assertIn(transport, reactor.handles) testCase.assertTrue(transport.reading)
[ "def", "assertReading", "(", "testCase", ",", "reactor", ",", "transport", ")", ":", "if", "IReactorFDSet", ".", "providedBy", "(", "reactor", ")", ":", "testCase", ".", "assertIn", "(", "transport", ",", "reactor", ".", "getReaders", "(", ")", ")", "else"...
use the given test to assert that the given transport is actively reading in the given reactor .
train
false
32,275
def api_handler(kwargs): mode = kwargs.get('mode', '') output = kwargs.get('output') name = kwargs.get('name', '') callback = kwargs.get('callback', '') cherrypy.response.timeout = (60 * 10) if isinstance(mode, list): mode = mode[0] if isinstance(output, list): output = output[0] response = _api_table.get(mode, (_api_undefined, 2))[0](name, output, kwargs) if ((output == 'json') and callback): response = ('%s(%s)' % (callback, response)) return response
[ "def", "api_handler", "(", "kwargs", ")", ":", "mode", "=", "kwargs", ".", "get", "(", "'mode'", ",", "''", ")", "output", "=", "kwargs", ".", "get", "(", "'output'", ")", "name", "=", "kwargs", ".", "get", "(", "'name'", ",", "''", ")", "callback"...
api dispatcher .
train
false
32,276
@pytest.mark.parametrize('addon_status,file_status,is_unreviewed', [(amo.STATUS_NOMINATED, amo.STATUS_AWAITING_REVIEW, True), (amo.STATUS_NOMINATED, amo.STATUS_NOMINATED, True), (amo.STATUS_NOMINATED, amo.STATUS_PUBLIC, False), (amo.STATUS_NOMINATED, amo.STATUS_DISABLED, False), (amo.STATUS_NOMINATED, amo.STATUS_BETA, False), (amo.STATUS_PUBLIC, amo.STATUS_AWAITING_REVIEW, True), (amo.STATUS_PUBLIC, amo.STATUS_NOMINATED, True), (amo.STATUS_PUBLIC, amo.STATUS_PUBLIC, False), (amo.STATUS_PUBLIC, amo.STATUS_DISABLED, False), (amo.STATUS_PUBLIC, amo.STATUS_BETA, False)]) def test_unreviewed_files(db, addon_status, file_status, is_unreviewed): addon = amo.tests.addon_factory(status=addon_status, guid='foo') version = addon.current_version file_ = version.files.get() file_.update(status=file_status) addon.update(status=addon_status) assert (addon.reload().status == addon_status) assert (file_.reload().status == file_status)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'addon_status,file_status,is_unreviewed'", ",", "[", "(", "amo", ".", "STATUS_NOMINATED", ",", "amo", ".", "STATUS_AWAITING_REVIEW", ",", "True", ")", ",", "(", "amo", ".", "STATUS_NOMINATED", ",", "amo", "...
files that need to be reviewed are returned by version .
train
false
32,277
def _get_xblocks(course_key, category): xblocks = get_course_blocks(course_key, category) return xblocks
[ "def", "_get_xblocks", "(", "course_key", ",", "category", ")", ":", "xblocks", "=", "get_course_blocks", "(", "course_key", ",", "category", ")", "return", "xblocks" ]
retrieve all xblocks in the course for a particular category .
train
false
32,279
def postorder_traversal(node, keys=None): if isinstance(node, Basic): args = node.args if keys: if (keys != True): args = ordered(args, keys, default=False) else: args = ordered(args) for arg in args: for subtree in postorder_traversal(arg, keys): (yield subtree) elif iterable(node): for item in node: for subtree in postorder_traversal(item, keys): (yield subtree) (yield node)
[ "def", "postorder_traversal", "(", "node", ",", "keys", "=", "None", ")", ":", "if", "isinstance", "(", "node", ",", "Basic", ")", ":", "args", "=", "node", ".", "args", "if", "keys", ":", "if", "(", "keys", "!=", "True", ")", ":", "args", "=", "...
do a postorder traversal of a tree .
train
false
32,283
def extract_sentiment(emotions): joy = [0, 0, 0] sorrow = [0, 0, 0] anger = [0, 0, 0] surprise = [0, 0, 0] odds = ['VERY_LIKELY', 'LIKELY', 'POSSIBLE'] for i in range(len(odds)): joy[i] = sum(((f['joyLikelihood'] == odds[i]) for f in emotions)) anger[i] = sum(((f['angerLikelihood'] == odds[i]) for f in emotions)) sorrow[i] = sum(((f['sorrowLikelihood'] == odds[i]) for f in emotions)) surprise[i] = sum(((f['surpriseLikelihood'] == odds[i]) for f in emotions)) return (joy, anger, sorrow, surprise)
[ "def", "extract_sentiment", "(", "emotions", ")", ":", "joy", "=", "[", "0", ",", "0", ",", "0", "]", "sorrow", "=", "[", "0", ",", "0", ",", "0", "]", "anger", "=", "[", "0", ",", "0", ",", "0", "]", "surprise", "=", "[", "0", ",", "0", ...
extract the sentiment from the facial annotations .
train
false
32,285
def filter_by(lookup_dict, pillar, merge=None, default='default', base=None): return salt.utils.filter_by(lookup_dict=lookup_dict, lookup=pillar, traverse=__pillar__, merge=merge, default=default, base=base)
[ "def", "filter_by", "(", "lookup_dict", ",", "pillar", ",", "merge", "=", "None", ",", "default", "=", "'default'", ",", "base", "=", "None", ")", ":", "return", "salt", ".", "utils", ".", "filter_by", "(", "lookup_dict", "=", "lookup_dict", ",", "lookup...
return the first match in a dictionary of target patterns .
train
true
32,286
def bundle_to_json(fh): hg_unbundle10_obj = readbundle(get_configured_ui(), fh, None) groups = [group for group in unpack_groups(hg_unbundle10_obj)] return json.dumps(groups, indent=4)
[ "def", "bundle_to_json", "(", "fh", ")", ":", "hg_unbundle10_obj", "=", "readbundle", "(", "get_configured_ui", "(", ")", ",", "fh", ",", "None", ")", "groups", "=", "[", "group", "for", "group", "in", "unpack_groups", "(", "hg_unbundle10_obj", ")", "]", "...
convert the received hg10xx data stream to a json object .
train
false
32,287
def bus_get_object(bus, object_path, service_name=None): name = (service_name or BUS_NAME) try: return bus.get_object(name, object_path, introspect=False) except dbus.exceptions.DBusException as e: if (e.get_dbus_name() in (DBUS_SERVICE_UNKNOWN, DBUS_EXEC_FAILED, DBUS_NO_REPLY)): raise SecretServiceNotAvailableException(e.get_dbus_message()) raise
[ "def", "bus_get_object", "(", "bus", ",", "object_path", ",", "service_name", "=", "None", ")", ":", "name", "=", "(", "service_name", "or", "BUS_NAME", ")", "try", ":", "return", "bus", ".", "get_object", "(", "name", ",", "object_path", ",", "introspect"...
a wrapper around :meth:sessionbus .
train
false
32,289
def _arp(ip_address): cmd = ['arp', '-n', ip_address] arp = subprocess.Popen(cmd, stdout=subprocess.PIPE) (out, _) = arp.communicate() match = re.search('(([0-9A-Fa-f]{1,2}\\:){5}[0-9A-Fa-f]{1,2})', str(out)) if match: return match.group(0) _LOGGER.info('No MAC address found for %s', ip_address) return None
[ "def", "_arp", "(", "ip_address", ")", ":", "cmd", "=", "[", "'arp'", ",", "'-n'", ",", "ip_address", "]", "arp", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "(", "out", ",", "_", ")", "=", "ar...
get the mac address for a given ip .
train
false
32,293
def check_nsp(dist, attr, value): assert_string_list(dist, attr, value) for nsp in value: if (not dist.has_contents_for(nsp)): raise DistutilsSetupError(('Distribution contains no modules or packages for ' + ('namespace package %r' % nsp))) if ('.' in nsp): parent = '.'.join(nsp.split('.')[:(-1)]) if (parent not in value): distutils.log.warn('WARNING: %r is declared as a package namespace, but %r is not: please correct this in setup.py', nsp, parent)
[ "def", "check_nsp", "(", "dist", ",", "attr", ",", "value", ")", ":", "assert_string_list", "(", "dist", ",", "attr", ",", "value", ")", "for", "nsp", "in", "value", ":", "if", "(", "not", "dist", ".", "has_contents_for", "(", "nsp", ")", ")", ":", ...
verify that namespace packages are valid .
train
true
32,294
def intfrombits(bits): ret = 0 lsb_first = [b for b in bits] lsb_first.reverse() for bit_index in range(len(lsb_first)): if lsb_first[bit_index]: ret = (ret | (1 << bit_index)) return ret
[ "def", "intfrombits", "(", "bits", ")", ":", "ret", "=", "0", "lsb_first", "=", "[", "b", "for", "b", "in", "bits", "]", "lsb_first", ".", "reverse", "(", ")", "for", "bit_index", "in", "range", "(", "len", "(", "lsb_first", ")", ")", ":", "if", ...
given a list of boolean values .
train
false
32,295
@pytest.fixture() def depends_on_current_app(celery_app): celery_app.set_current()
[ "@", "pytest", ".", "fixture", "(", ")", "def", "depends_on_current_app", "(", "celery_app", ")", ":", "celery_app", ".", "set_current", "(", ")" ]
fixture that sets app as current .
train
false
32,296
def register_create_keys_and_cert_arguments(session, argument_table, **kwargs): after_event = 'after-call.iot.CreateKeysAndCertificate' argument_table['certificate-pem-outfile'] = QueryOutFileArgument(session=session, name='certificate-pem-outfile', query='certificatePem', after_call_event=after_event, perm=384) argument_table['public-key-outfile'] = QueryOutFileArgument(session=session, name='public-key-outfile', query='keyPair.PublicKey', after_call_event=after_event, perm=384) argument_table['private-key-outfile'] = QueryOutFileArgument(session=session, name='private-key-outfile', query='keyPair.PrivateKey', after_call_event=after_event, perm=384)
[ "def", "register_create_keys_and_cert_arguments", "(", "session", ",", "argument_table", ",", "**", "kwargs", ")", ":", "after_event", "=", "'after-call.iot.CreateKeysAndCertificate'", "argument_table", "[", "'certificate-pem-outfile'", "]", "=", "QueryOutFileArgument", "(", ...
add outfile save arguments to create-keys-and-certificate - --certificate-pem-outfile - --public-key-outfile - --private-key-outfile .
train
false
32,297
def regex_last_key(regex): def k(obj): if regex.search(obj): return (1, obj) return (0, obj) return k
[ "def", "regex_last_key", "(", "regex", ")", ":", "def", "k", "(", "obj", ")", ":", "if", "regex", ".", "search", "(", "obj", ")", ":", "return", "(", "1", ",", "obj", ")", "return", "(", "0", ",", "obj", ")", "return", "k" ]
sort key function factory that puts items that match a regular expression last .
train
true
32,298
def iff(a, b, c): if a: return b else: return c
[ "def", "iff", "(", "a", ",", "b", ",", "c", ")", ":", "if", "a", ":", "return", "b", "else", ":", "return", "c" ]
ternary shortcut .
train
false
32,299
@hook.command('pig', 'piglatin') def piglatin(text): global pronunciations if (not pronunciations): return 'Please wait, getting NLTK ready!' words = [] for word in text.split(): if (word[(-1)] in string.punctuation): end = word[(-1)] word = word[:(-1)] else: end = '' out_word = translate(word) if (word.isupper() and (not (word == 'I'))): out_word = out_word.upper() elif word[0].isupper(): out_word = out_word.title() else: out_word = out_word.lower() words.append((out_word + end)) if text.isupper(): return ' '.join(words).upper() else: return ' '.join(words)
[ "@", "hook", ".", "command", "(", "'pig'", ",", "'piglatin'", ")", "def", "piglatin", "(", "text", ")", ":", "global", "pronunciations", "if", "(", "not", "pronunciations", ")", ":", "return", "'Please wait, getting NLTK ready!'", "words", "=", "[", "]", "fo...
pig <text> -- converts <text> to pig latin .
train
false
32,300
def stop_server(port=7000): conn = httplib.HTTPConnection(('localhost:%d' % port)) conn.request('QUIT', '/') conn.getresponse()
[ "def", "stop_server", "(", "port", "=", "7000", ")", ":", "conn", "=", "httplib", ".", "HTTPConnection", "(", "(", "'localhost:%d'", "%", "port", ")", ")", "conn", ".", "request", "(", "'QUIT'", ",", "'/'", ")", "conn", ".", "getresponse", "(", ")" ]
stops all instances for the given version of the server .
train
false
32,301
def get_repo_teams(repo_name, profile='github'): ret = [] org_name = _get_config_value(profile, 'org_name') client = _get_client(profile) try: repo = client.get_repo('/'.join([org_name, repo_name])) except github.UnknownObjectException: raise CommandExecutionError("The '{0}' repository under the '{1}' organization could not be found.".format(repo_name, org_name)) try: teams = repo.get_teams() for team in teams: ret.append({'id': team.id, 'name': team.name, 'permission': team.permission}) except github.UnknownObjectException: raise CommandExecutionError("Unable to retrieve teams for repository '{0}' under the '{1}' organization.".format(repo_name, org_name)) return ret
[ "def", "get_repo_teams", "(", "repo_name", ",", "profile", "=", "'github'", ")", ":", "ret", "=", "[", "]", "org_name", "=", "_get_config_value", "(", "profile", ",", "'org_name'", ")", "client", "=", "_get_client", "(", "profile", ")", "try", ":", "repo",...
return teams belonging to a repository .
train
true
32,302
def list_services(kwargs=None, conn=None, call=None): if (call != 'function'): raise SaltCloudSystemExit('The list_services function must be called with -f or --function.') if (not conn): conn = get_conn() data = conn.list_hosted_services() ret = {} for item in data.hosted_services: ret[item.service_name] = object_to_dict(item) ret[item.service_name]['name'] = item.service_name return ret
[ "def", "list_services", "(", "kwargs", "=", "None", ",", "conn", "=", "None", ",", "call", "=", "None", ")", ":", "if", "(", "call", "!=", "'function'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The list_services function must be called with -f or --functio...
list services belonging to this account cli example: salt myminion pagerduty .
train
true
32,303
def configure_control_node(cluster, provider, logging_config=None): setup_action = 'start' if (provider == 'managed'): setup_action = 'restart' return run_remotely(username='root', address=cluster.control_node.address, commands=sequence([task_install_control_certificates(cluster.certificates.cluster.certificate, cluster.certificates.control.certificate, cluster.certificates.control.key), task_enable_flocker_control(cluster.control_node.distribution, setup_action), if_firewall_available(cluster.control_node.distribution, task_open_control_firewall(cluster.control_node.distribution))]))
[ "def", "configure_control_node", "(", "cluster", ",", "provider", ",", "logging_config", "=", "None", ")", ":", "setup_action", "=", "'start'", "if", "(", "provider", "==", "'managed'", ")", ":", "setup_action", "=", "'restart'", "return", "run_remotely", "(", ...
configure flocker control service on the given node .
train
false