id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
4,705
def get_subordinate_groups(user, site): from cms.utils.page_permissions import get_change_permissions_id_list try: user_level = get_user_permission_level(user, site) except NoPermissionsException: groups = Group.objects.filter((Q(pageusergroup__created_by=user) & Q(pagepermission__page__isnull=True))).distinct() return groups if (user_level == ROOT_USER_LEVEL): return Group.objects.all() page_id_allow_list = get_change_permissions_id_list(user, site, check_global=False) return Group.objects.distinct().filter(((Q(pagepermission__page__id__in=page_id_allow_list) & Q(pagepermission__page__depth__gte=user_level)) | (Q(pageusergroup__created_by=user) & Q(pagepermission__page__isnull=True))))
[ "def", "get_subordinate_groups", "(", "user", ",", "site", ")", ":", "from", "cms", ".", "utils", ".", "page_permissions", "import", "get_change_permissions_id_list", "try", ":", "user_level", "=", "get_user_permission_level", "(", "user", ",", "site", ")", "except", "NoPermissionsException", ":", "groups", "=", "Group", ".", "objects", ".", "filter", "(", "(", "Q", "(", "pageusergroup__created_by", "=", "user", ")", "&", "Q", "(", "pagepermission__page__isnull", "=", "True", ")", ")", ")", ".", "distinct", "(", ")", "return", "groups", "if", "(", "user_level", "==", "ROOT_USER_LEVEL", ")", ":", "return", "Group", ".", "objects", ".", "all", "(", ")", "page_id_allow_list", "=", "get_change_permissions_id_list", "(", "user", ",", "site", ",", "check_global", "=", "False", ")", "return", "Group", ".", "objects", ".", "distinct", "(", ")", ".", "filter", "(", "(", "(", "Q", "(", "pagepermission__page__id__in", "=", "page_id_allow_list", ")", "&", "Q", "(", "pagepermission__page__depth__gte", "=", "user_level", ")", ")", "|", "(", "Q", "(", "pageusergroup__created_by", "=", "user", ")", "&", "Q", "(", "pagepermission__page__isnull", "=", "True", ")", ")", ")", ")" ]
similar to get_subordinate_users .
train
false
4,706
def sift_port_white_list(port_white_list, registered_io_ports): valid_port_list = [] LOG.debug('Filter ports in [%(white)s}] but not in [%(reg_ports)s].', {'white': ','.join([port.display_name for port in port_white_list]), 'reg_ports': ','.join([port.display_name for port in registered_io_ports])}) for io_port in port_white_list: if (io_port not in registered_io_ports): LOG.debug('Skipped SP port %(port)s due to it is not registered. The registered IO ports: %(reg_ports)s.', {'port': io_port, 'reg_ports': registered_io_ports}) else: valid_port_list.append(io_port) return valid_port_list
[ "def", "sift_port_white_list", "(", "port_white_list", ",", "registered_io_ports", ")", ":", "valid_port_list", "=", "[", "]", "LOG", ".", "debug", "(", "'Filter ports in [%(white)s}] but not in [%(reg_ports)s].'", ",", "{", "'white'", ":", "','", ".", "join", "(", "[", "port", ".", "display_name", "for", "port", "in", "port_white_list", "]", ")", ",", "'reg_ports'", ":", "','", ".", "join", "(", "[", "port", ".", "display_name", "for", "port", "in", "registered_io_ports", "]", ")", "}", ")", "for", "io_port", "in", "port_white_list", ":", "if", "(", "io_port", "not", "in", "registered_io_ports", ")", ":", "LOG", ".", "debug", "(", "'Skipped SP port %(port)s due to it is not registered. The registered IO ports: %(reg_ports)s.'", ",", "{", "'port'", ":", "io_port", ",", "'reg_ports'", ":", "registered_io_ports", "}", ")", "else", ":", "valid_port_list", ".", "append", "(", "io_port", ")", "return", "valid_port_list" ]
filters out the unregistered ports .
train
false
4,708
def current_device(): _lazy_init() return torch._C._cuda_getDevice()
[ "def", "current_device", "(", ")", ":", "_lazy_init", "(", ")", "return", "torch", ".", "_C", ".", "_cuda_getDevice", "(", ")" ]
returns an adbdevice instance for the currently-selected device .
train
false
4,710
def find_platform_and_atomic_group(host): platforms = [label.name for label in host.label_list if label.platform] if (not platforms): platform = None else: platform = platforms[0] if (len(platforms) > 1): raise ValueError(('Host %s has more than one platform: %s' % (host.hostname, ', '.join(platforms)))) for label in host.label_list: if label.atomic_group: atomic_group_name = label.atomic_group.name break else: atomic_group_name = None return (platform, atomic_group_name)
[ "def", "find_platform_and_atomic_group", "(", "host", ")", ":", "platforms", "=", "[", "label", ".", "name", "for", "label", "in", "host", ".", "label_list", "if", "label", ".", "platform", "]", "if", "(", "not", "platforms", ")", ":", "platform", "=", "None", "else", ":", "platform", "=", "platforms", "[", "0", "]", "if", "(", "len", "(", "platforms", ")", ">", "1", ")", ":", "raise", "ValueError", "(", "(", "'Host %s has more than one platform: %s'", "%", "(", "host", ".", "hostname", ",", "', '", ".", "join", "(", "platforms", ")", ")", ")", ")", "for", "label", "in", "host", ".", "label_list", ":", "if", "label", ".", "atomic_group", ":", "atomic_group_name", "=", "label", ".", "atomic_group", ".", "name", "break", "else", ":", "atomic_group_name", "=", "None", "return", "(", "platform", ",", "atomic_group_name", ")" ]
figure out the platform name and atomic group name for the given host object .
train
false
4,711
def split_code_and_text_blocks(source_file): (docstring, rest_of_content) = get_docstring_and_rest(source_file) blocks = [('text', docstring)] pattern = re.compile('(?P<header_line>^#{20,}.*)\\s(?P<text_content>(?:^#.*\\s)*)', flags=re.M) pos_so_far = 0 for match in re.finditer(pattern, rest_of_content): (match_start_pos, match_end_pos) = match.span() code_block_content = rest_of_content[pos_so_far:match_start_pos] text_content = match.group('text_content') sub_pat = re.compile('^#', flags=re.M) text_block_content = dedent(re.sub(sub_pat, '', text_content)).lstrip() if code_block_content.strip(): blocks.append(('code', code_block_content)) if text_block_content.strip(): blocks.append(('text', text_block_content)) pos_so_far = match_end_pos remaining_content = rest_of_content[pos_so_far:] if remaining_content.strip(): blocks.append(('code', remaining_content)) return blocks
[ "def", "split_code_and_text_blocks", "(", "source_file", ")", ":", "(", "docstring", ",", "rest_of_content", ")", "=", "get_docstring_and_rest", "(", "source_file", ")", "blocks", "=", "[", "(", "'text'", ",", "docstring", ")", "]", "pattern", "=", "re", ".", "compile", "(", "'(?P<header_line>^#{20,}.*)\\\\s(?P<text_content>(?:^#.*\\\\s)*)'", ",", "flags", "=", "re", ".", "M", ")", "pos_so_far", "=", "0", "for", "match", "in", "re", ".", "finditer", "(", "pattern", ",", "rest_of_content", ")", ":", "(", "match_start_pos", ",", "match_end_pos", ")", "=", "match", ".", "span", "(", ")", "code_block_content", "=", "rest_of_content", "[", "pos_so_far", ":", "match_start_pos", "]", "text_content", "=", "match", ".", "group", "(", "'text_content'", ")", "sub_pat", "=", "re", ".", "compile", "(", "'^#'", ",", "flags", "=", "re", ".", "M", ")", "text_block_content", "=", "dedent", "(", "re", ".", "sub", "(", "sub_pat", ",", "''", ",", "text_content", ")", ")", ".", "lstrip", "(", ")", "if", "code_block_content", ".", "strip", "(", ")", ":", "blocks", ".", "append", "(", "(", "'code'", ",", "code_block_content", ")", ")", "if", "text_block_content", ".", "strip", "(", ")", ":", "blocks", ".", "append", "(", "(", "'text'", ",", "text_block_content", ")", ")", "pos_so_far", "=", "match_end_pos", "remaining_content", "=", "rest_of_content", "[", "pos_so_far", ":", "]", "if", "remaining_content", ".", "strip", "(", ")", ":", "blocks", ".", "append", "(", "(", "'code'", ",", "remaining_content", ")", ")", "return", "blocks" ]
return list with source file separated into code and text blocks .
train
true
4,712
@login_required def project_resync_webhook(request, project_slug): project = get_object_or_404(Project.objects.for_admin_user(request.user), slug=project_slug) if (request.method == 'POST'): attach_webhook(project=project, request=request) return HttpResponseRedirect(reverse('projects_detail', args=[project.slug])) return render_to_response('projects/project_resync_webhook.html', {'project': project}, context_instance=RequestContext(request))
[ "@", "login_required", "def", "project_resync_webhook", "(", "request", ",", "project_slug", ")", ":", "project", "=", "get_object_or_404", "(", "Project", ".", "objects", ".", "for_admin_user", "(", "request", ".", "user", ")", ",", "slug", "=", "project_slug", ")", "if", "(", "request", ".", "method", "==", "'POST'", ")", ":", "attach_webhook", "(", "project", "=", "project", ",", "request", "=", "request", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'projects_detail'", ",", "args", "=", "[", "project", ".", "slug", "]", ")", ")", "return", "render_to_response", "(", "'projects/project_resync_webhook.html'", ",", "{", "'project'", ":", "project", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ")" ]
resync a project webhook .
train
false
4,716
def xmlDoc_from_html(response): utf8body = (body_as_utf8(response) or ' ') try: lxdoc = libxml2.htmlReadDoc(utf8body, response.url, 'utf-8', html_parser_options) except TypeError: lxdoc = libxml2.htmlReadDoc(utf8body.replace('\x00', ''), response.url, 'utf-8', html_parser_options) return lxdoc
[ "def", "xmlDoc_from_html", "(", "response", ")", ":", "utf8body", "=", "(", "body_as_utf8", "(", "response", ")", "or", "' '", ")", "try", ":", "lxdoc", "=", "libxml2", ".", "htmlReadDoc", "(", "utf8body", ",", "response", ".", "url", ",", "'utf-8'", ",", "html_parser_options", ")", "except", "TypeError", ":", "lxdoc", "=", "libxml2", ".", "htmlReadDoc", "(", "utf8body", ".", "replace", "(", "'\\x00'", ",", "''", ")", ",", "response", ".", "url", ",", "'utf-8'", ",", "html_parser_options", ")", "return", "lxdoc" ]
return libxml2 doc for htmls .
train
false
4,717
def blend_on_canvas(img, width, height, bgcolor=u'#ffffff'): (w, h) = (img.width(), img.height()) (scaled, nw, nh) = fit_image(w, h, width, height) if scaled: img = img.scaled(nw, nh, Qt.IgnoreAspectRatio, Qt.SmoothTransformation) (w, h) = (nw, nh) canvas = QImage(width, height, QImage.Format_RGB32) canvas.fill(QColor(bgcolor)) overlay_image(img, canvas, ((width - w) // 2), ((height - h) // 2)) return canvas
[ "def", "blend_on_canvas", "(", "img", ",", "width", ",", "height", ",", "bgcolor", "=", "u'#ffffff'", ")", ":", "(", "w", ",", "h", ")", "=", "(", "img", ".", "width", "(", ")", ",", "img", ".", "height", "(", ")", ")", "(", "scaled", ",", "nw", ",", "nh", ")", "=", "fit_image", "(", "w", ",", "h", ",", "width", ",", "height", ")", "if", "scaled", ":", "img", "=", "img", ".", "scaled", "(", "nw", ",", "nh", ",", "Qt", ".", "IgnoreAspectRatio", ",", "Qt", ".", "SmoothTransformation", ")", "(", "w", ",", "h", ")", "=", "(", "nw", ",", "nh", ")", "canvas", "=", "QImage", "(", "width", ",", "height", ",", "QImage", ".", "Format_RGB32", ")", "canvas", ".", "fill", "(", "QColor", "(", "bgcolor", ")", ")", "overlay_image", "(", "img", ",", "canvas", ",", "(", "(", "width", "-", "w", ")", "//", "2", ")", ",", "(", "(", "height", "-", "h", ")", "//", "2", ")", ")", "return", "canvas" ]
blend the img onto a canvas with the specified background color and size .
train
false
4,718
def traverse_translatable_index(doctree): def is_block_index(node): return (isinstance(node, addnodes.index) and (node.get('inline') is False)) for node in doctree.traverse(is_block_index): if ('raw_entries' in node): entries = node['raw_entries'] else: entries = node['entries'] (yield (node, entries))
[ "def", "traverse_translatable_index", "(", "doctree", ")", ":", "def", "is_block_index", "(", "node", ")", ":", "return", "(", "isinstance", "(", "node", ",", "addnodes", ".", "index", ")", "and", "(", "node", ".", "get", "(", "'inline'", ")", "is", "False", ")", ")", "for", "node", "in", "doctree", ".", "traverse", "(", "is_block_index", ")", ":", "if", "(", "'raw_entries'", "in", "node", ")", ":", "entries", "=", "node", "[", "'raw_entries'", "]", "else", ":", "entries", "=", "node", "[", "'entries'", "]", "(", "yield", "(", "node", ",", "entries", ")", ")" ]
traverse translatable index node from a document tree .
train
false
4,719
def add_url_params(url, params): url = unquote(url) parsed_url = urlparse(url) get_args = parsed_url.query parsed_get_args = dict(parse_qsl(get_args)) parsed_get_args.update(params) parsed_get_args.update({k: dumps(v) for (k, v) in parsed_get_args.items() if isinstance(v, (bool, dict))}) encoded_get_args = urlencode(parsed_get_args, doseq=True) new_url = ParseResult(parsed_url.scheme, parsed_url.netloc, parsed_url.path, parsed_url.params, encoded_get_args, parsed_url.fragment).geturl() return new_url
[ "def", "add_url_params", "(", "url", ",", "params", ")", ":", "url", "=", "unquote", "(", "url", ")", "parsed_url", "=", "urlparse", "(", "url", ")", "get_args", "=", "parsed_url", ".", "query", "parsed_get_args", "=", "dict", "(", "parse_qsl", "(", "get_args", ")", ")", "parsed_get_args", ".", "update", "(", "params", ")", "parsed_get_args", ".", "update", "(", "{", "k", ":", "dumps", "(", "v", ")", "for", "(", "k", ",", "v", ")", "in", "parsed_get_args", ".", "items", "(", ")", "if", "isinstance", "(", "v", ",", "(", "bool", ",", "dict", ")", ")", "}", ")", "encoded_get_args", "=", "urlencode", "(", "parsed_get_args", ",", "doseq", "=", "True", ")", "new_url", "=", "ParseResult", "(", "parsed_url", ".", "scheme", ",", "parsed_url", ".", "netloc", ",", "parsed_url", ".", "path", ",", "parsed_url", ".", "params", ",", "encoded_get_args", ",", "parsed_url", ".", "fragment", ")", ".", "geturl", "(", ")", "return", "new_url" ]
add get params to provided url being aware of existing .
train
false
4,720
def getEndsWithList(word, wordEndings): for wordEnding in wordEndings: if word.endswith(wordEnding): return True return False
[ "def", "getEndsWithList", "(", "word", ",", "wordEndings", ")", ":", "for", "wordEnding", "in", "wordEndings", ":", "if", "word", ".", "endswith", "(", "wordEnding", ")", ":", "return", "True", "return", "False" ]
determine if the word ends with a list .
train
false
4,721
def runReducedExperiment(path, reduced=True): initExperimentPrng() if reduced: args = [path, '--testMode'] else: args = [path] runExperiment(args)
[ "def", "runReducedExperiment", "(", "path", ",", "reduced", "=", "True", ")", ":", "initExperimentPrng", "(", ")", "if", "reduced", ":", "args", "=", "[", "path", ",", "'--testMode'", "]", "else", ":", "args", "=", "[", "path", "]", "runExperiment", "(", "args", ")" ]
run the experiment in the <path> with a reduced iteration count .
train
false
4,722
def get_settings_from_file(path, default_settings=DEFAULT_CONFIG): (name, ext) = os.path.splitext(os.path.basename(path)) module = load_source(name, path) return get_settings_from_module(module, default_settings=default_settings)
[ "def", "get_settings_from_file", "(", "path", ",", "default_settings", "=", "DEFAULT_CONFIG", ")", ":", "(", "name", ",", "ext", ")", "=", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "path", ")", ")", "module", "=", "load_source", "(", "name", ",", "path", ")", "return", "get_settings_from_module", "(", "module", ",", "default_settings", "=", "default_settings", ")" ]
loads settings from a file path .
train
false
4,724
def _child_vhds(session, sr_ref, vdi_uuid): children = set() for (ref, rec) in _get_all_vdis_in_sr(session, sr_ref): rec_uuid = rec['uuid'] if (rec_uuid == vdi_uuid): continue parent_uuid = _get_vhd_parent_uuid(session, ref) if (parent_uuid != vdi_uuid): continue children.add(rec_uuid) return children
[ "def", "_child_vhds", "(", "session", ",", "sr_ref", ",", "vdi_uuid", ")", ":", "children", "=", "set", "(", ")", "for", "(", "ref", ",", "rec", ")", "in", "_get_all_vdis_in_sr", "(", "session", ",", "sr_ref", ")", ":", "rec_uuid", "=", "rec", "[", "'uuid'", "]", "if", "(", "rec_uuid", "==", "vdi_uuid", ")", ":", "continue", "parent_uuid", "=", "_get_vhd_parent_uuid", "(", "session", ",", "ref", ")", "if", "(", "parent_uuid", "!=", "vdi_uuid", ")", ":", "continue", "children", ".", "add", "(", "rec_uuid", ")", "return", "children" ]
return the immediate children of a given vhd .
train
false
4,725
def osquery_info(attrs=None, where=None): return _osquery_cmd(table='osquery_info', attrs=attrs, where=where)
[ "def", "osquery_info", "(", "attrs", "=", "None", ",", "where", "=", "None", ")", ":", "return", "_osquery_cmd", "(", "table", "=", "'osquery_info'", ",", "attrs", "=", "attrs", ",", "where", "=", "where", ")" ]
return osquery_info information from osquery cli example: .
train
false
4,726
def PacificTime(now): now -= (8 * 3600) if IsPacificDST(now): now += 3600 return now
[ "def", "PacificTime", "(", "now", ")", ":", "now", "-=", "(", "8", "*", "3600", ")", "if", "IsPacificDST", "(", "now", ")", ":", "now", "+=", "3600", "return", "now" ]
helper to return the number of seconds between utc and pacific time .
train
false
4,727
def publish_exploration(committer_id, exploration_id): _publish_activity(committer_id, exploration_id, feconf.ACTIVITY_TYPE_EXPLORATION)
[ "def", "publish_exploration", "(", "committer_id", ",", "exploration_id", ")", ":", "_publish_activity", "(", "committer_id", ",", "exploration_id", ",", "feconf", ".", "ACTIVITY_TYPE_EXPLORATION", ")" ]
this is called by the publish_exploration_and_update_user_profiles function in exp_services .
train
false
4,729
def delete_user_policy(user_name, policy_name, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if (not conn): return False _policy = get_user_policy(user_name, policy_name, region, key, keyid, profile) if (not _policy): return True try: conn.delete_user_policy(user_name, policy_name) msg = 'Successfully deleted {0} policy for user {1}.' log.info(msg.format(policy_name, user_name)) return True except boto.exception.BotoServerError as e: log.debug(e) msg = 'Failed to delete {0} policy for user {1}.' log.error(msg.format(policy_name, user_name)) return False
[ "def", "delete_user_policy", "(", "user_name", ",", "policy_name", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "(", "not", "conn", ")", ":", "return", "False", "_policy", "=", "get_user_policy", "(", "user_name", ",", "policy_name", ",", "region", ",", "key", ",", "keyid", ",", "profile", ")", "if", "(", "not", "_policy", ")", ":", "return", "True", "try", ":", "conn", ".", "delete_user_policy", "(", "user_name", ",", "policy_name", ")", "msg", "=", "'Successfully deleted {0} policy for user {1}.'", "log", ".", "info", "(", "msg", ".", "format", "(", "policy_name", ",", "user_name", ")", ")", "return", "True", "except", "boto", ".", "exception", ".", "BotoServerError", "as", "e", ":", "log", ".", "debug", "(", "e", ")", "msg", "=", "'Failed to delete {0} policy for user {1}.'", "log", ".", "error", "(", "msg", ".", "format", "(", "policy_name", ",", "user_name", ")", ")", "return", "False" ]
delete a user policy .
train
true
4,731
def dmp_lcm(f, g, u, K): if (not u): return dup_lcm(f, g, K) if K.has_Field: return dmp_ff_lcm(f, g, u, K) else: return dmp_rr_lcm(f, g, u, K)
[ "def", "dmp_lcm", "(", "f", ",", "g", ",", "u", ",", "K", ")", ":", "if", "(", "not", "u", ")", ":", "return", "dup_lcm", "(", "f", ",", "g", ",", "K", ")", "if", "K", ".", "has_Field", ":", "return", "dmp_ff_lcm", "(", "f", ",", "g", ",", "u", ",", "K", ")", "else", ":", "return", "dmp_rr_lcm", "(", "f", ",", "g", ",", "u", ",", "K", ")" ]
computes polynomial lcm of f and g in k[x] .
train
false
4,733
def get_request(): return crum.get_current_request()
[ "def", "get_request", "(", ")", ":", "return", "crum", ".", "get_current_request", "(", ")" ]
returns the active request instance .
train
false
4,734
def test_hstack_vstack(): def make_block(dtype): return theano.sparse.csr_matrix(name=('%s block' % dtype), dtype=dtype) def get_expected_dtype(blocks, to_dtype): if (to_dtype is None): block_dtypes = tuple((b.dtype for b in blocks)) return theano.scalar.upcast(*block_dtypes) else: return to_dtype dtypes = ('complex128', theano.config.floatX) blocks = [make_block(dtype) for dtype in dtypes] for (stack_dimension, stack_function) in enumerate((theano.sparse.vstack, theano.sparse.hstack)): for to_dtype in ((None,) + dtypes): stacked_blocks = stack_function(blocks, dtype=to_dtype) expected_dtype = get_expected_dtype(blocks, to_dtype) assert (stacked_blocks.dtype == expected_dtype)
[ "def", "test_hstack_vstack", "(", ")", ":", "def", "make_block", "(", "dtype", ")", ":", "return", "theano", ".", "sparse", ".", "csr_matrix", "(", "name", "=", "(", "'%s block'", "%", "dtype", ")", ",", "dtype", "=", "dtype", ")", "def", "get_expected_dtype", "(", "blocks", ",", "to_dtype", ")", ":", "if", "(", "to_dtype", "is", "None", ")", ":", "block_dtypes", "=", "tuple", "(", "(", "b", ".", "dtype", "for", "b", "in", "blocks", ")", ")", "return", "theano", ".", "scalar", ".", "upcast", "(", "*", "block_dtypes", ")", "else", ":", "return", "to_dtype", "dtypes", "=", "(", "'complex128'", ",", "theano", ".", "config", ".", "floatX", ")", "blocks", "=", "[", "make_block", "(", "dtype", ")", "for", "dtype", "in", "dtypes", "]", "for", "(", "stack_dimension", ",", "stack_function", ")", "in", "enumerate", "(", "(", "theano", ".", "sparse", ".", "vstack", ",", "theano", ".", "sparse", ".", "hstack", ")", ")", ":", "for", "to_dtype", "in", "(", "(", "None", ",", ")", "+", "dtypes", ")", ":", "stacked_blocks", "=", "stack_function", "(", "blocks", ",", "dtype", "=", "to_dtype", ")", "expected_dtype", "=", "get_expected_dtype", "(", "blocks", ",", "to_dtype", ")", "assert", "(", "stacked_blocks", ".", "dtype", "==", "expected_dtype", ")" ]
tests sparse .
train
false
4,735
def invalid_kwargs(invalid_kwargs, raise_exc=True): if invalid_kwargs: if isinstance(invalid_kwargs, dict): new_invalid = ['{0}={1}'.format(x, y) for (x, y) in six.iteritems(invalid_kwargs)] invalid_kwargs = new_invalid msg = 'The following keyword arguments are not valid: {0}'.format(', '.join(invalid_kwargs)) if raise_exc: raise SaltInvocationError(msg) else: return msg
[ "def", "invalid_kwargs", "(", "invalid_kwargs", ",", "raise_exc", "=", "True", ")", ":", "if", "invalid_kwargs", ":", "if", "isinstance", "(", "invalid_kwargs", ",", "dict", ")", ":", "new_invalid", "=", "[", "'{0}={1}'", ".", "format", "(", "x", ",", "y", ")", "for", "(", "x", ",", "y", ")", "in", "six", ".", "iteritems", "(", "invalid_kwargs", ")", "]", "invalid_kwargs", "=", "new_invalid", "msg", "=", "'The following keyword arguments are not valid: {0}'", ".", "format", "(", "', '", ".", "join", "(", "invalid_kwargs", ")", ")", "if", "raise_exc", ":", "raise", "SaltInvocationError", "(", "msg", ")", "else", ":", "return", "msg" ]
raise a saltinvocationerror if invalid_kwargs is non-empty .
train
true
4,736
def FileOutputStream(filename, real_filename=None): assert isinstance(filename, unicode) if (not real_filename): real_filename = filename output = open(real_filename, 'wb') return OutputStream(output, filename=filename)
[ "def", "FileOutputStream", "(", "filename", ",", "real_filename", "=", "None", ")", ":", "assert", "isinstance", "(", "filename", ",", "unicode", ")", "if", "(", "not", "real_filename", ")", ":", "real_filename", "=", "filename", "output", "=", "open", "(", "real_filename", ",", "'wb'", ")", "return", "OutputStream", "(", "output", ",", "filename", "=", "filename", ")" ]
create an output stream into file with given name .
train
false
4,737
def dmp_pquo(f, g, u, K): return dmp_pdiv(f, g, u, K)[0]
[ "def", "dmp_pquo", "(", "f", ",", "g", ",", "u", ",", "K", ")", ":", "return", "dmp_pdiv", "(", "f", ",", "g", ",", "u", ",", "K", ")", "[", "0", "]" ]
polynomial exact pseudo-quotient in k[x] .
train
false
4,739
def _get_http(url, temp_file_name, initial_size, file_size, verbose_bool): req = urllib.request.Request(url) if (initial_size > 0): req.headers['Range'] = ('bytes=%s-' % (initial_size,)) try: response = urllib.request.urlopen(req) except Exception: logger.info('Resuming download failed (server rejected the request). Attempting to restart downloading the entire file.') del req.headers['Range'] response = urllib.request.urlopen(req) total_size = int(response.headers.get('Content-Length', '1').strip()) if ((initial_size > 0) and (file_size == total_size)): logger.info('Resuming download failed (resume file size mismatch). Attempting to restart downloading the entire file.') initial_size = 0 total_size += initial_size if (total_size != file_size): raise RuntimeError('URL could not be parsed properly') mode = ('ab' if (initial_size > 0) else 'wb') progress = ProgressBar(total_size, initial_value=initial_size, max_chars=40, spinner=True, mesg='file_sizes', verbose_bool=verbose_bool) chunk_size = 8192 with open(temp_file_name, mode) as local_file: while True: t0 = time.time() chunk = response.read(chunk_size) dt = (time.time() - t0) if (dt < 0.005): chunk_size *= 2 elif ((dt > 0.1) and (chunk_size > 8192)): chunk_size = (chunk_size // 2) if (not chunk): if verbose_bool: sys.stdout.write('\n') sys.stdout.flush() break local_file.write(chunk) progress.update_with_increment_value(len(chunk), mesg='file_sizes')
[ "def", "_get_http", "(", "url", ",", "temp_file_name", ",", "initial_size", ",", "file_size", ",", "verbose_bool", ")", ":", "req", "=", "urllib", ".", "request", ".", "Request", "(", "url", ")", "if", "(", "initial_size", ">", "0", ")", ":", "req", ".", "headers", "[", "'Range'", "]", "=", "(", "'bytes=%s-'", "%", "(", "initial_size", ",", ")", ")", "try", ":", "response", "=", "urllib", ".", "request", ".", "urlopen", "(", "req", ")", "except", "Exception", ":", "logger", ".", "info", "(", "'Resuming download failed (server rejected the request). Attempting to restart downloading the entire file.'", ")", "del", "req", ".", "headers", "[", "'Range'", "]", "response", "=", "urllib", ".", "request", ".", "urlopen", "(", "req", ")", "total_size", "=", "int", "(", "response", ".", "headers", ".", "get", "(", "'Content-Length'", ",", "'1'", ")", ".", "strip", "(", ")", ")", "if", "(", "(", "initial_size", ">", "0", ")", "and", "(", "file_size", "==", "total_size", ")", ")", ":", "logger", ".", "info", "(", "'Resuming download failed (resume file size mismatch). Attempting to restart downloading the entire file.'", ")", "initial_size", "=", "0", "total_size", "+=", "initial_size", "if", "(", "total_size", "!=", "file_size", ")", ":", "raise", "RuntimeError", "(", "'URL could not be parsed properly'", ")", "mode", "=", "(", "'ab'", "if", "(", "initial_size", ">", "0", ")", "else", "'wb'", ")", "progress", "=", "ProgressBar", "(", "total_size", ",", "initial_value", "=", "initial_size", ",", "max_chars", "=", "40", ",", "spinner", "=", "True", ",", "mesg", "=", "'file_sizes'", ",", "verbose_bool", "=", "verbose_bool", ")", "chunk_size", "=", "8192", "with", "open", "(", "temp_file_name", ",", "mode", ")", "as", "local_file", ":", "while", "True", ":", "t0", "=", "time", ".", "time", "(", ")", "chunk", "=", "response", ".", "read", "(", "chunk_size", ")", "dt", "=", "(", "time", ".", "time", "(", ")", "-", "t0", ")", "if", "(", "dt", "<", "0.005", ")", ":", "chunk_size", "*=", "2", "elif", "(", "(", "dt", ">", "0.1", ")", "and", "(", "chunk_size", ">", "8192", ")", ")", ":", "chunk_size", "=", "(", "chunk_size", "//", "2", ")", "if", "(", "not", "chunk", ")", ":", "if", "verbose_bool", ":", "sys", ".", "stdout", ".", "write", "(", "'\\n'", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "break", "local_file", ".", "write", "(", "chunk", ")", "progress", ".", "update_with_increment_value", "(", "len", "(", "chunk", ")", ",", "mesg", "=", "'file_sizes'", ")" ]
provides an authed http object .
train
false
4,740
def normalize_time(timestamp): offset = timestamp.utcoffset() if (offset is None): return timestamp return (timestamp.replace(tzinfo=None) - offset)
[ "def", "normalize_time", "(", "timestamp", ")", ":", "offset", "=", "timestamp", ".", "utcoffset", "(", ")", "if", "(", "offset", "is", "None", ")", ":", "return", "timestamp", "return", "(", "timestamp", ".", "replace", "(", "tzinfo", "=", "None", ")", "-", "offset", ")" ]
normalize time in arbitrary timezone to utc naive object .
train
true
4,741
def matchPreviousLiteral(expr): rep = Forward() def copyTokenToRepeater(s, l, t): if t: if (len(t) == 1): (rep << t[0]) else: tflat = _flatten(t.asList()) (rep << And([Literal(tt) for tt in tflat])) else: (rep << Empty()) expr.addParseAction(copyTokenToRepeater, callDuringTry=True) return rep
[ "def", "matchPreviousLiteral", "(", "expr", ")", ":", "rep", "=", "Forward", "(", ")", "def", "copyTokenToRepeater", "(", "s", ",", "l", ",", "t", ")", ":", "if", "t", ":", "if", "(", "len", "(", "t", ")", "==", "1", ")", ":", "(", "rep", "<<", "t", "[", "0", "]", ")", "else", ":", "tflat", "=", "_flatten", "(", "t", ".", "asList", "(", ")", ")", "(", "rep", "<<", "And", "(", "[", "Literal", "(", "tt", ")", "for", "tt", "in", "tflat", "]", ")", ")", "else", ":", "(", "rep", "<<", "Empty", "(", ")", ")", "expr", ".", "addParseAction", "(", "copyTokenToRepeater", ",", "callDuringTry", "=", "True", ")", "return", "rep" ]
helper to define an expression that is indirectly defined from the tokens matched in a previous expression .
train
true
4,742
def _service_is_upstart(name): return (HAS_UPSTART and os.path.exists('/etc/init/{0}.conf'.format(name)))
[ "def", "_service_is_upstart", "(", "name", ")", ":", "return", "(", "HAS_UPSTART", "and", "os", ".", "path", ".", "exists", "(", "'/etc/init/{0}.conf'", ".", "format", "(", "name", ")", ")", ")" ]
from "writing jobs" at URL jobs are defined in files placed in /etc/init .
train
false
4,746
def scourCoordinates(data, options, forceCommaWsp=False): if (data != None): newData = [] c = 0 previousCoord = '' for coord in data: scouredCoord = scourUnitlessLength(coord, needsRendererWorkaround=options.renderer_workaround) if ((c > 0) and (forceCommaWsp or scouredCoord[0].isdigit() or ((scouredCoord[0] == '.') and (not (('.' in previousCoord) or ('e' in previousCoord)))))): newData.append(' ') newData.append(scouredCoord) previousCoord = scouredCoord c += 1 if options.renderer_workaround: if (len(newData) > 0): for i in xrange(1, len(newData)): if ((newData[i][0] == '-') and ('e' in newData[(i - 1)])): newData[(i - 1)] += ' ' return ''.join(newData) else: return ''.join(newData) return ''
[ "def", "scourCoordinates", "(", "data", ",", "options", ",", "forceCommaWsp", "=", "False", ")", ":", "if", "(", "data", "!=", "None", ")", ":", "newData", "=", "[", "]", "c", "=", "0", "previousCoord", "=", "''", "for", "coord", "in", "data", ":", "scouredCoord", "=", "scourUnitlessLength", "(", "coord", ",", "needsRendererWorkaround", "=", "options", ".", "renderer_workaround", ")", "if", "(", "(", "c", ">", "0", ")", "and", "(", "forceCommaWsp", "or", "scouredCoord", "[", "0", "]", ".", "isdigit", "(", ")", "or", "(", "(", "scouredCoord", "[", "0", "]", "==", "'.'", ")", "and", "(", "not", "(", "(", "'.'", "in", "previousCoord", ")", "or", "(", "'e'", "in", "previousCoord", ")", ")", ")", ")", ")", ")", ":", "newData", ".", "append", "(", "' '", ")", "newData", ".", "append", "(", "scouredCoord", ")", "previousCoord", "=", "scouredCoord", "c", "+=", "1", "if", "options", ".", "renderer_workaround", ":", "if", "(", "len", "(", "newData", ")", ">", "0", ")", ":", "for", "i", "in", "xrange", "(", "1", ",", "len", "(", "newData", ")", ")", ":", "if", "(", "(", "newData", "[", "i", "]", "[", "0", "]", "==", "'-'", ")", "and", "(", "'e'", "in", "newData", "[", "(", "i", "-", "1", ")", "]", ")", ")", ":", "newData", "[", "(", "i", "-", "1", ")", "]", "+=", "' '", "return", "''", ".", "join", "(", "newData", ")", "else", ":", "return", "''", ".", "join", "(", "newData", ")", "return", "''" ]
serializes coordinate data with some cleanups: - removes all trailing zeros after the decimal - integerize coordinates if possible - removes extraneous whitespace - adds spaces between values in a subcommand if required .
train
false
4,747
@pytest.mark.django_db def test_verify_user_without_existing_email(trans_member): member = trans_member with pytest.raises(EmailAddress.DoesNotExist): EmailAddress.objects.get(user=member) member.email = 'member@this.test' accounts.utils.verify_user(member) EmailAddress.objects.get(user=member, email='member@this.test', primary=True, verified=True) assert (get_user_model().objects.get(pk=member.pk).email == '')
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_verify_user_without_existing_email", "(", "trans_member", ")", ":", "member", "=", "trans_member", "with", "pytest", ".", "raises", "(", "EmailAddress", ".", "DoesNotExist", ")", ":", "EmailAddress", ".", "objects", ".", "get", "(", "user", "=", "member", ")", "member", ".", "email", "=", "'member@this.test'", "accounts", ".", "utils", ".", "verify_user", "(", "member", ")", "EmailAddress", ".", "objects", ".", "get", "(", "user", "=", "member", ",", "email", "=", "'member@this.test'", ",", "primary", "=", "True", ",", "verified", "=", "True", ")", "assert", "(", "get_user_model", "(", ")", ".", "objects", ".", "get", "(", "pk", "=", "member", ".", "pk", ")", ".", "email", "==", "''", ")" ]
test verifying user using verify_user function .
train
false
4,748
def vserver_servicegroup_exists(v_name, sg_name, **connection_args): return (_vserver_servicegroup_get(v_name, sg_name, **connection_args) is not None)
[ "def", "vserver_servicegroup_exists", "(", "v_name", ",", "sg_name", ",", "**", "connection_args", ")", ":", "return", "(", "_vserver_servicegroup_get", "(", "v_name", ",", "sg_name", ",", "**", "connection_args", ")", "is", "not", "None", ")" ]
checks if a servicegroup is tied to a vserver cli example: .
train
false
4,749
def download_file_insecure(url, target): src = urlopen(url) try: data = src.read() finally: src.close() with open(target, 'wb') as dst: dst.write(data)
[ "def", "download_file_insecure", "(", "url", ",", "target", ")", ":", "src", "=", "urlopen", "(", "url", ")", "try", ":", "data", "=", "src", ".", "read", "(", ")", "finally", ":", "src", ".", "close", "(", ")", "with", "open", "(", "target", ",", "'wb'", ")", "as", "dst", ":", "dst", ".", "write", "(", "data", ")" ]
use python to download the file .
train
true
4,750
def delete_route_table(route_table_id=None, route_table_name=None, region=None, key=None, keyid=None, profile=None): return _delete_resource(resource='route_table', name=route_table_name, resource_id=route_table_id, region=region, key=key, keyid=keyid, profile=profile)
[ "def", "delete_route_table", "(", "route_table_id", "=", "None", ",", "route_table_name", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "return", "_delete_resource", "(", "resource", "=", "'route_table'", ",", "name", "=", "route_table_name", ",", "resource_id", "=", "route_table_id", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")" ]
deletes a route table .
train
true
4,751
def _coo_to_sparse_series(A, dense_index=False): s = Series(A.data, MultiIndex.from_arrays((A.row, A.col))) s = s.sort_index() s = s.to_sparse() if dense_index: i = range(A.shape[0]) j = range(A.shape[1]) ind = MultiIndex.from_product([i, j]) s = s.reindex_axis(ind) return s
[ "def", "_coo_to_sparse_series", "(", "A", ",", "dense_index", "=", "False", ")", ":", "s", "=", "Series", "(", "A", ".", "data", ",", "MultiIndex", ".", "from_arrays", "(", "(", "A", ".", "row", ",", "A", ".", "col", ")", ")", ")", "s", "=", "s", ".", "sort_index", "(", ")", "s", "=", "s", ".", "to_sparse", "(", ")", "if", "dense_index", ":", "i", "=", "range", "(", "A", ".", "shape", "[", "0", "]", ")", "j", "=", "range", "(", "A", ".", "shape", "[", "1", "]", ")", "ind", "=", "MultiIndex", ".", "from_product", "(", "[", "i", ",", "j", "]", ")", "s", "=", "s", ".", "reindex_axis", "(", "ind", ")", "return", "s" ]
convert a scipy .
train
true
4,752
def _formatinfo(format): size = struct.calcsize(format) return (size, len(struct.unpack(format, ('\x00' * size))))
[ "def", "_formatinfo", "(", "format", ")", ":", "size", "=", "struct", ".", "calcsize", "(", "format", ")", "return", "(", "size", ",", "len", "(", "struct", ".", "unpack", "(", "format", ",", "(", "'\\x00'", "*", "size", ")", ")", ")", ")" ]
calculate the size and number of items in a struct format .
train
true
4,754
def resource_media_fields(document, resource): media_fields = app.config['DOMAIN'][resource]['_media'] return [field for field in media_fields if (field in document)]
[ "def", "resource_media_fields", "(", "document", ",", "resource", ")", ":", "media_fields", "=", "app", ".", "config", "[", "'DOMAIN'", "]", "[", "resource", "]", "[", "'_media'", "]", "return", "[", "field", "for", "field", "in", "media_fields", "if", "(", "field", "in", "document", ")", "]" ]
returns a list of media fields defined in the resource schema .
train
false
4,756
def set_instances(name, instances, test=False, region=None, key=None, keyid=None, profile=None): ret = True current = set([i['instance_id'] for i in get_instance_health(name, region, key, keyid, profile)]) desired = set(instances) add = (desired - current) remove = (current - desired) if test: return bool((add or remove)) if len(remove): if (deregister_instances(name, list(remove), region, key, keyid, profile) is False): ret = False if len(add): if (register_instances(name, list(add), region, key, keyid, profile) is False): ret = False return ret
[ "def", "set_instances", "(", "name", ",", "instances", ",", "test", "=", "False", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "ret", "=", "True", "current", "=", "set", "(", "[", "i", "[", "'instance_id'", "]", "for", "i", "in", "get_instance_health", "(", "name", ",", "region", ",", "key", ",", "keyid", ",", "profile", ")", "]", ")", "desired", "=", "set", "(", "instances", ")", "add", "=", "(", "desired", "-", "current", ")", "remove", "=", "(", "current", "-", "desired", ")", "if", "test", ":", "return", "bool", "(", "(", "add", "or", "remove", ")", ")", "if", "len", "(", "remove", ")", ":", "if", "(", "deregister_instances", "(", "name", ",", "list", "(", "remove", ")", ",", "region", ",", "key", ",", "keyid", ",", "profile", ")", "is", "False", ")", ":", "ret", "=", "False", "if", "len", "(", "add", ")", ":", "if", "(", "register_instances", "(", "name", ",", "list", "(", "add", ")", ",", "region", ",", "key", ",", "keyid", ",", "profile", ")", "is", "False", ")", ":", "ret", "=", "False", "return", "ret" ]
set the instances assigned to an elb to exactly the list given cli example: .
train
true
4,757
def _check_logger_class(): import logging if hasattr(logging, 'multiprocessing'): return logging._acquireLock() try: OldLoggerClass = logging.getLoggerClass() if (not getattr(OldLoggerClass, '_process_aware', False)): class ProcessAwareLogger(OldLoggerClass, ): _process_aware = True def makeRecord(self, *args, **kwds): record = OldLoggerClass.makeRecord(self, *args, **kwds) record.processName = current_process()._name return record logging.setLoggerClass(ProcessAwareLogger) finally: logging._releaseLock()
[ "def", "_check_logger_class", "(", ")", ":", "import", "logging", "if", "hasattr", "(", "logging", ",", "'multiprocessing'", ")", ":", "return", "logging", ".", "_acquireLock", "(", ")", "try", ":", "OldLoggerClass", "=", "logging", ".", "getLoggerClass", "(", ")", "if", "(", "not", "getattr", "(", "OldLoggerClass", ",", "'_process_aware'", ",", "False", ")", ")", ":", "class", "ProcessAwareLogger", "(", "OldLoggerClass", ",", ")", ":", "_process_aware", "=", "True", "def", "makeRecord", "(", "self", ",", "*", "args", ",", "**", "kwds", ")", ":", "record", "=", "OldLoggerClass", ".", "makeRecord", "(", "self", ",", "*", "args", ",", "**", "kwds", ")", "record", ".", "processName", "=", "current_process", "(", ")", ".", "_name", "return", "record", "logging", ".", "setLoggerClass", "(", "ProcessAwareLogger", ")", "finally", ":", "logging", ".", "_releaseLock", "(", ")" ]
make sure process name is recorded when loggers are used .
train
false
4,758
def format_user_subscriptions(user): user_subs_available = list(constants.USER_SUBSCRIPTIONS_AVAILABLE.keys()) subscriptions = [serialize_event(user, subscription, event_description=user_subs_available.pop(user_subs_available.index(getattr(subscription, 'event_name')))) for subscription in get_all_user_subscriptions(user) if ((subscription is not None) and (getattr(subscription, 'event_name') in user_subs_available))] subscriptions.extend([serialize_event(user, event_description=sub) for sub in user_subs_available]) return subscriptions
[ "def", "format_user_subscriptions", "(", "user", ")", ":", "user_subs_available", "=", "list", "(", "constants", ".", "USER_SUBSCRIPTIONS_AVAILABLE", ".", "keys", "(", ")", ")", "subscriptions", "=", "[", "serialize_event", "(", "user", ",", "subscription", ",", "event_description", "=", "user_subs_available", ".", "pop", "(", "user_subs_available", ".", "index", "(", "getattr", "(", "subscription", ",", "'event_name'", ")", ")", ")", ")", "for", "subscription", "in", "get_all_user_subscriptions", "(", "user", ")", "if", "(", "(", "subscription", "is", "not", "None", ")", "and", "(", "getattr", "(", "subscription", ",", "'event_name'", ")", "in", "user_subs_available", ")", ")", "]", "subscriptions", ".", "extend", "(", "[", "serialize_event", "(", "user", ",", "event_description", "=", "sub", ")", "for", "sub", "in", "user_subs_available", "]", ")", "return", "subscriptions" ]
format user-level subscriptions for user settings page .
train
false
4,759
@receiver(post_delete, sender=CourseTeam, dispatch_uid='teams.signals.course_team_post_delete_callback') def course_team_post_delete_callback(**kwargs): try: CourseTeamIndexer.remove(kwargs['instance']) except ElasticSearchConnectionError: pass
[ "@", "receiver", "(", "post_delete", ",", "sender", "=", "CourseTeam", ",", "dispatch_uid", "=", "'teams.signals.course_team_post_delete_callback'", ")", "def", "course_team_post_delete_callback", "(", "**", "kwargs", ")", ":", "try", ":", "CourseTeamIndexer", ".", "remove", "(", "kwargs", "[", "'instance'", "]", ")", "except", "ElasticSearchConnectionError", ":", "pass" ]
reindex object after delete .
train
false
4,760
def vararg_callback(option, opt_str, value, parser): value = [value] def floatable(str): try: float(str) return True except ValueError: return False for arg in parser.rargs: if ((arg[:2] == '--') and (len(arg) > 2)): break if ((arg[:1] == '-') and (len(arg) > 1) and (not floatable(arg))): break value.append(arg) del parser.rargs[:(len(value) - 1)] setattr(parser.values, option.dest, value)
[ "def", "vararg_callback", "(", "option", ",", "opt_str", ",", "value", ",", "parser", ")", ":", "value", "=", "[", "value", "]", "def", "floatable", "(", "str", ")", ":", "try", ":", "float", "(", "str", ")", "return", "True", "except", "ValueError", ":", "return", "False", "for", "arg", "in", "parser", ".", "rargs", ":", "if", "(", "(", "arg", "[", ":", "2", "]", "==", "'--'", ")", "and", "(", "len", "(", "arg", ")", ">", "2", ")", ")", ":", "break", "if", "(", "(", "arg", "[", ":", "1", "]", "==", "'-'", ")", "and", "(", "len", "(", "arg", ")", ">", "1", ")", "and", "(", "not", "floatable", "(", "arg", ")", ")", ")", ":", "break", "value", ".", "append", "(", "arg", ")", "del", "parser", ".", "rargs", "[", ":", "(", "len", "(", "value", ")", "-", "1", ")", "]", "setattr", "(", "parser", ".", "values", ",", "option", ".", "dest", ",", "value", ")" ]
callback for an option with variable arguments .
train
false
4,761
def _af_rmul(a, b): return [a[i] for i in b]
[ "def", "_af_rmul", "(", "a", ",", "b", ")", ":", "return", "[", "a", "[", "i", "]", "for", "i", "in", "b", "]" ]
return the product b*a; input and output are array forms .
train
false
4,762
def rgb2caffe(im, out_size=(128, 171)): im = np.copy(im) if (len(im.shape) == 2): im = color.gray2rgb(im) (h, w, _) = im.shape im = skimage.transform.resize(im, out_size, preserve_range=True) im = np.swapaxes(np.swapaxes(im, 1, 2), 0, 1) im = im[::(-1), :, :] return np.array(im, theano.config.floatX)
[ "def", "rgb2caffe", "(", "im", ",", "out_size", "=", "(", "128", ",", "171", ")", ")", ":", "im", "=", "np", ".", "copy", "(", "im", ")", "if", "(", "len", "(", "im", ".", "shape", ")", "==", "2", ")", ":", "im", "=", "color", ".", "gray2rgb", "(", "im", ")", "(", "h", ",", "w", ",", "_", ")", "=", "im", ".", "shape", "im", "=", "skimage", ".", "transform", ".", "resize", "(", "im", ",", "out_size", ",", "preserve_range", "=", "True", ")", "im", "=", "np", ".", "swapaxes", "(", "np", ".", "swapaxes", "(", "im", ",", "1", ",", "2", ")", ",", "0", ",", "1", ")", "im", "=", "im", "[", ":", ":", "(", "-", "1", ")", ",", ":", ",", ":", "]", "return", "np", ".", "array", "(", "im", ",", "theano", ".", "config", ".", "floatX", ")" ]
converts an rgb image to caffe format and downscales it as needed by c3d parameters im numpy array an rgb image downscale returns a caffe image in bgr format .
train
false
4,763
def add_team_member(name, team_name, profile='github'): team = get_team(team_name, profile=profile) if (not team): log.error('Team {0} does not exist'.format(team_name)) return False try: client = _get_client(profile) organization = client.get_organization(_get_config_value(profile, 'org_name')) team = organization.get_team(team['id']) member = client.get_user(name) except UnknownObjectException as e: log.exception('Resource not found: {0}'.format(team['id'])) return False try: (headers, data) = team._requester.requestJsonAndCheck('PUT', ((team.url + '/memberships/') + member._identity), input={'role': 'member'}, parameters={'role': 'member'}) except github.GithubException as e: log.exception('Error in adding a member to a team: {0}'.format(str(e))) return False return True
[ "def", "add_team_member", "(", "name", ",", "team_name", ",", "profile", "=", "'github'", ")", ":", "team", "=", "get_team", "(", "team_name", ",", "profile", "=", "profile", ")", "if", "(", "not", "team", ")", ":", "log", ".", "error", "(", "'Team {0} does not exist'", ".", "format", "(", "team_name", ")", ")", "return", "False", "try", ":", "client", "=", "_get_client", "(", "profile", ")", "organization", "=", "client", ".", "get_organization", "(", "_get_config_value", "(", "profile", ",", "'org_name'", ")", ")", "team", "=", "organization", ".", "get_team", "(", "team", "[", "'id'", "]", ")", "member", "=", "client", ".", "get_user", "(", "name", ")", "except", "UnknownObjectException", "as", "e", ":", "log", ".", "exception", "(", "'Resource not found: {0}'", ".", "format", "(", "team", "[", "'id'", "]", ")", ")", "return", "False", "try", ":", "(", "headers", ",", "data", ")", "=", "team", ".", "_requester", ".", "requestJsonAndCheck", "(", "'PUT'", ",", "(", "(", "team", ".", "url", "+", "'/memberships/'", ")", "+", "member", ".", "_identity", ")", ",", "input", "=", "{", "'role'", ":", "'member'", "}", ",", "parameters", "=", "{", "'role'", ":", "'member'", "}", ")", "except", "github", ".", "GithubException", "as", "e", ":", "log", ".", "exception", "(", "'Error in adding a member to a team: {0}'", ".", "format", "(", "str", "(", "e", ")", ")", ")", "return", "False", "return", "True" ]
adds a team member to a team with team_name .
train
true
4,764
def getIntermediateLocation(alongWay, begin, end): return ((begin * (1.0 - alongWay)) + (end * alongWay))
[ "def", "getIntermediateLocation", "(", "alongWay", ",", "begin", ",", "end", ")", ":", "return", "(", "(", "begin", "*", "(", "1.0", "-", "alongWay", ")", ")", "+", "(", "end", "*", "alongWay", ")", ")" ]
get the intermediate location between begin and end .
train
false
4,765
@auth.route('/activate', methods=['GET', 'POST']) def request_activation_token(token=None): if (current_user.is_active or (not flaskbb_config['ACTIVATE_ACCOUNT'])): flash(_('This account is already activated.'), 'info') return redirect(url_for('forum.index')) form = RequestActivationForm() if form.validate_on_submit(): user = User.query.filter_by(email=form.email.data).first() send_activation_token.delay(user) flash(_('A new account activation token has been sent to your email address.'), 'success') return redirect(url_for('auth.activate_account')) return render_template('auth/request_account_activation.html', form=form)
[ "@", "auth", ".", "route", "(", "'/activate'", ",", "methods", "=", "[", "'GET'", ",", "'POST'", "]", ")", "def", "request_activation_token", "(", "token", "=", "None", ")", ":", "if", "(", "current_user", ".", "is_active", "or", "(", "not", "flaskbb_config", "[", "'ACTIVATE_ACCOUNT'", "]", ")", ")", ":", "flash", "(", "_", "(", "'This account is already activated.'", ")", ",", "'info'", ")", "return", "redirect", "(", "url_for", "(", "'forum.index'", ")", ")", "form", "=", "RequestActivationForm", "(", ")", "if", "form", ".", "validate_on_submit", "(", ")", ":", "user", "=", "User", ".", "query", ".", "filter_by", "(", "email", "=", "form", ".", "email", ".", "data", ")", ".", "first", "(", ")", "send_activation_token", ".", "delay", "(", "user", ")", "flash", "(", "_", "(", "'A new account activation token has been sent to your email address.'", ")", ",", "'success'", ")", "return", "redirect", "(", "url_for", "(", "'auth.activate_account'", ")", ")", "return", "render_template", "(", "'auth/request_account_activation.html'", ",", "form", "=", "form", ")" ]
requests a new account activation token .
train
false
4,766
def first_false_index(iterable, pred=None, default=None): if (pred is None): func = operator.not_ else: func = (lambda x: (not pred(x))) return first_true_index(iterable, func, default)
[ "def", "first_false_index", "(", "iterable", ",", "pred", "=", "None", ",", "default", "=", "None", ")", ":", "if", "(", "pred", "is", "None", ")", ":", "func", "=", "operator", ".", "not_", "else", ":", "func", "=", "(", "lambda", "x", ":", "(", "not", "pred", "(", "x", ")", ")", ")", "return", "first_true_index", "(", "iterable", ",", "func", ",", "default", ")" ]
find the first index position for the which the callable pred returns false .
train
false
4,767
def _encode_basestring(s): if (isinstance(s, str) and (HAS_UTF8.search(s) is not None)): s = s.decode('utf-8') def replace(match): return ESCAPE_DCT[match.group(0)] return ((u'"' + ESCAPE.sub(replace, s)) + u'"')
[ "def", "_encode_basestring", "(", "s", ")", ":", "if", "(", "isinstance", "(", "s", ",", "str", ")", "and", "(", "HAS_UTF8", ".", "search", "(", "s", ")", "is", "not", "None", ")", ")", ":", "s", "=", "s", ".", "decode", "(", "'utf-8'", ")", "def", "replace", "(", "match", ")", ":", "return", "ESCAPE_DCT", "[", "match", ".", "group", "(", "0", ")", "]", "return", "(", "(", "u'\"'", "+", "ESCAPE", ".", "sub", "(", "replace", ",", "s", ")", ")", "+", "u'\"'", ")" ]
return a json representation of a python string .
train
true
4,772
def split_words(text): if ((not text) or (not text.strip())): return [] words = split_words_re.findall(text) return words
[ "def", "split_words", "(", "text", ")", ":", "if", "(", "(", "not", "text", ")", "or", "(", "not", "text", ".", "strip", "(", ")", ")", ")", ":", "return", "[", "]", "words", "=", "split_words_re", ".", "findall", "(", "text", ")", "return", "words" ]
split a string into array of words .
train
true
4,773
def _detect_filename(ext): import inspect from os.path import isfile, dirname, basename, splitext, join frame = inspect.currentframe() while (frame.f_back and (frame.f_globals.get('name') != '__main__')): frame = frame.f_back filename = frame.f_globals.get('__file__') if (filename and isfile(filename)): (name, _) = splitext(basename(filename)) return join(dirname(filename), ((name + '.') + ext))
[ "def", "_detect_filename", "(", "ext", ")", ":", "import", "inspect", "from", "os", ".", "path", "import", "isfile", ",", "dirname", ",", "basename", ",", "splitext", ",", "join", "frame", "=", "inspect", ".", "currentframe", "(", ")", "while", "(", "frame", ".", "f_back", "and", "(", "frame", ".", "f_globals", ".", "get", "(", "'name'", ")", "!=", "'__main__'", ")", ")", ":", "frame", "=", "frame", ".", "f_back", "filename", "=", "frame", ".", "f_globals", ".", "get", "(", "'__file__'", ")", "if", "(", "filename", "and", "isfile", "(", "filename", ")", ")", ":", "(", "name", ",", "_", ")", "=", "splitext", "(", "basename", "(", "filename", ")", ")", "return", "join", "(", "dirname", "(", "filename", ")", ",", "(", "(", "name", "+", "'.'", ")", "+", "ext", ")", ")" ]
detect filename from the name of the script being run .
train
false
4,774
def get_cohorted_commentables(course_key): course_cohort_settings = get_course_cohort_settings(course_key) if (not course_cohort_settings.is_cohorted): ans = set() else: ans = set(course_cohort_settings.cohorted_discussions) return ans
[ "def", "get_cohorted_commentables", "(", "course_key", ")", ":", "course_cohort_settings", "=", "get_course_cohort_settings", "(", "course_key", ")", "if", "(", "not", "course_cohort_settings", ".", "is_cohorted", ")", ":", "ans", "=", "set", "(", ")", "else", ":", "ans", "=", "set", "(", "course_cohort_settings", ".", "cohorted_discussions", ")", "return", "ans" ]
given a course_key return a set of strings representing cohorted commentables .
train
false
4,775
def is_attrib_modified(kev): return (kev.fflags & select.KQ_NOTE_ATTRIB)
[ "def", "is_attrib_modified", "(", "kev", ")", ":", "return", "(", "kev", ".", "fflags", "&", "select", ".", "KQ_NOTE_ATTRIB", ")" ]
determines whether the given kevent represents attribute modification .
train
false
4,777
def filter_path_matches(match_text, file_list, case_sensitive): files = set(file_list) files_and_dirs = utils.add_parents(files) dirs = files_and_dirs.difference(files) paths = filter_matches(match_text, files_and_dirs, case_sensitive) return (paths, dirs)
[ "def", "filter_path_matches", "(", "match_text", ",", "file_list", ",", "case_sensitive", ")", ":", "files", "=", "set", "(", "file_list", ")", "files_and_dirs", "=", "utils", ".", "add_parents", "(", "files", ")", "dirs", "=", "files_and_dirs", ".", "difference", "(", "files", ")", "paths", "=", "filter_matches", "(", "match_text", ",", "files_and_dirs", ",", "case_sensitive", ")", "return", "(", "paths", ",", "dirs", ")" ]
return matching completions from a list of candidate files .
train
false
4,779
def wait_for_volume_state(volume_manager, expected_volume, desired_state, transient_states=(), time_limit=CINDER_TIMEOUT): waiter = VolumeStateMonitor(volume_manager, expected_volume, desired_state, transient_states, time_limit) return poll_until(waiter.reached_desired_state, repeat(1))
[ "def", "wait_for_volume_state", "(", "volume_manager", ",", "expected_volume", ",", "desired_state", ",", "transient_states", "=", "(", ")", ",", "time_limit", "=", "CINDER_TIMEOUT", ")", ":", "waiter", "=", "VolumeStateMonitor", "(", "volume_manager", ",", "expected_volume", ",", "desired_state", ",", "transient_states", ",", "time_limit", ")", "return", "poll_until", "(", "waiter", ".", "reached_desired_state", ",", "repeat", "(", "1", ")", ")" ]
wait for a volume with the same id as expected_volume to be listed and to have a status value of desired_state .
train
false
4,780
def no_log_warn(logical_line): msg = 'K302: LOG.warn is deprecated, please use LOG.warning!' if ('LOG.warn(' in logical_line): (yield (0, msg))
[ "def", "no_log_warn", "(", "logical_line", ")", ":", "msg", "=", "'K302: LOG.warn is deprecated, please use LOG.warning!'", "if", "(", "'LOG.warn('", "in", "logical_line", ")", ":", "(", "yield", "(", "0", ",", "msg", ")", ")" ]
disallow log .
train
false
4,781
def validate_tzinfo(dummy, value): if ((value is not None) and (not isinstance(value, datetime.tzinfo))): raise TypeError(('%s must be an instance of datetime.tzinfo' % value)) return value
[ "def", "validate_tzinfo", "(", "dummy", ",", "value", ")", ":", "if", "(", "(", "value", "is", "not", "None", ")", "and", "(", "not", "isinstance", "(", "value", ",", "datetime", ".", "tzinfo", ")", ")", ")", ":", "raise", "TypeError", "(", "(", "'%s must be an instance of datetime.tzinfo'", "%", "value", ")", ")", "return", "value" ]
validate the tzinfo option .
train
true
4,782
def arccos(x): return Arccos()(x)
[ "def", "arccos", "(", "x", ")", ":", "return", "Arccos", "(", ")", "(", "x", ")" ]
elementwise arccosine function .
train
false
4,783
def result_headers(cl): ordering_field_columns = cl.get_ordering_field_columns() for (i, field_name) in enumerate(cl.list_display): (text, attr) = label_for_field(field_name, cl.model, model_admin=cl.model_admin, return_attr=True) if attr: if (field_name == u'action_checkbox'): (yield {u'text': text, u'class_attrib': mark_safe(u' class="action-checkbox-column"'), u'sortable': False}) continue admin_order_field = getattr(attr, u'admin_order_field', None) if (not admin_order_field): (yield {u'text': text, u'class_attrib': format_html(u' class="column-{0}"', field_name), u'sortable': False}) continue th_classes = [u'sortable', u'column-{0}'.format(field_name)] order_type = u'' new_order_type = u'asc' sort_priority = 0 sorted = False if (i in ordering_field_columns): sorted = True order_type = ordering_field_columns.get(i).lower() sort_priority = (list(ordering_field_columns).index(i) + 1) th_classes.append((u'sorted %sending' % order_type)) new_order_type = {u'asc': u'desc', u'desc': u'asc'}[order_type] o_list_primary = [] o_list_remove = [] o_list_toggle = [] make_qs_param = (lambda t, n: ((u'-' if (t == u'desc') else u'') + str(n))) for (j, ot) in ordering_field_columns.items(): if (j == i): param = make_qs_param(new_order_type, j) o_list_primary.insert(0, param) o_list_toggle.append(param) else: param = make_qs_param(ot, j) o_list_primary.append(param) o_list_toggle.append(param) o_list_remove.append(param) if (i not in ordering_field_columns): o_list_primary.insert(0, make_qs_param(new_order_type, i)) (yield {u'text': text, u'sortable': True, u'sorted': sorted, u'ascending': (order_type == u'asc'), u'sort_priority': sort_priority, u'url_primary': cl.get_query_string({ORDER_VAR: u'.'.join(o_list_primary)}), u'url_remove': cl.get_query_string({ORDER_VAR: u'.'.join(o_list_remove)}), u'url_toggle': cl.get_query_string({ORDER_VAR: u'.'.join(o_list_toggle)}), u'class_attrib': (format_html(u' class="{0}"', u' '.join(th_classes)) if th_classes else u'')})
[ "def", "result_headers", "(", "cl", ")", ":", "ordering_field_columns", "=", "cl", ".", "get_ordering_field_columns", "(", ")", "for", "(", "i", ",", "field_name", ")", "in", "enumerate", "(", "cl", ".", "list_display", ")", ":", "(", "text", ",", "attr", ")", "=", "label_for_field", "(", "field_name", ",", "cl", ".", "model", ",", "model_admin", "=", "cl", ".", "model_admin", ",", "return_attr", "=", "True", ")", "if", "attr", ":", "if", "(", "field_name", "==", "u'action_checkbox'", ")", ":", "(", "yield", "{", "u'text'", ":", "text", ",", "u'class_attrib'", ":", "mark_safe", "(", "u' class=\"action-checkbox-column\"'", ")", ",", "u'sortable'", ":", "False", "}", ")", "continue", "admin_order_field", "=", "getattr", "(", "attr", ",", "u'admin_order_field'", ",", "None", ")", "if", "(", "not", "admin_order_field", ")", ":", "(", "yield", "{", "u'text'", ":", "text", ",", "u'class_attrib'", ":", "format_html", "(", "u' class=\"column-{0}\"'", ",", "field_name", ")", ",", "u'sortable'", ":", "False", "}", ")", "continue", "th_classes", "=", "[", "u'sortable'", ",", "u'column-{0}'", ".", "format", "(", "field_name", ")", "]", "order_type", "=", "u''", "new_order_type", "=", "u'asc'", "sort_priority", "=", "0", "sorted", "=", "False", "if", "(", "i", "in", "ordering_field_columns", ")", ":", "sorted", "=", "True", "order_type", "=", "ordering_field_columns", ".", "get", "(", "i", ")", ".", "lower", "(", ")", "sort_priority", "=", "(", "list", "(", "ordering_field_columns", ")", ".", "index", "(", "i", ")", "+", "1", ")", "th_classes", ".", "append", "(", "(", "u'sorted %sending'", "%", "order_type", ")", ")", "new_order_type", "=", "{", "u'asc'", ":", "u'desc'", ",", "u'desc'", ":", "u'asc'", "}", "[", "order_type", "]", "o_list_primary", "=", "[", "]", "o_list_remove", "=", "[", "]", "o_list_toggle", "=", "[", "]", "make_qs_param", "=", "(", "lambda", "t", ",", "n", ":", "(", "(", "u'-'", "if", "(", "t", "==", "u'desc'", ")", "else", "u''", ")", "+", "str", "(", "n", ")", ")", ")", "for", "(", "j", ",", "ot", ")", "in", "ordering_field_columns", ".", "items", "(", ")", ":", "if", "(", "j", "==", "i", ")", ":", "param", "=", "make_qs_param", "(", "new_order_type", ",", "j", ")", "o_list_primary", ".", "insert", "(", "0", ",", "param", ")", "o_list_toggle", ".", "append", "(", "param", ")", "else", ":", "param", "=", "make_qs_param", "(", "ot", ",", "j", ")", "o_list_primary", ".", "append", "(", "param", ")", "o_list_toggle", ".", "append", "(", "param", ")", "o_list_remove", ".", "append", "(", "param", ")", "if", "(", "i", "not", "in", "ordering_field_columns", ")", ":", "o_list_primary", ".", "insert", "(", "0", ",", "make_qs_param", "(", "new_order_type", ",", "i", ")", ")", "(", "yield", "{", "u'text'", ":", "text", ",", "u'sortable'", ":", "True", ",", "u'sorted'", ":", "sorted", ",", "u'ascending'", ":", "(", "order_type", "==", "u'asc'", ")", ",", "u'sort_priority'", ":", "sort_priority", ",", "u'url_primary'", ":", "cl", ".", "get_query_string", "(", "{", "ORDER_VAR", ":", "u'.'", ".", "join", "(", "o_list_primary", ")", "}", ")", ",", "u'url_remove'", ":", "cl", ".", "get_query_string", "(", "{", "ORDER_VAR", ":", "u'.'", ".", "join", "(", "o_list_remove", ")", "}", ")", ",", "u'url_toggle'", ":", "cl", ".", "get_query_string", "(", "{", "ORDER_VAR", ":", "u'.'", ".", "join", "(", "o_list_toggle", ")", "}", ")", ",", "u'class_attrib'", ":", "(", "format_html", "(", "u' class=\"{0}\"'", ",", "u' '", ".", "join", "(", "th_classes", ")", ")", "if", "th_classes", "else", "u''", ")", "}", ")" ]
generates the list column headers .
train
false
4,784
def rmdir(path): path = os.path.expanduser(path) if (not os.path.isabs(path)): raise SaltInvocationError('File path must be absolute.') if (not os.path.isdir(path)): raise SaltInvocationError('A valid directory was not specified.') try: os.rmdir(path) return True except OSError as exc: return exc.strerror
[ "def", "rmdir", "(", "path", ")", ":", "path", "=", "os", ".", "path", ".", "expanduser", "(", "path", ")", "if", "(", "not", "os", ".", "path", ".", "isabs", "(", "path", ")", ")", ":", "raise", "SaltInvocationError", "(", "'File path must be absolute.'", ")", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "path", ")", ")", ":", "raise", "SaltInvocationError", "(", "'A valid directory was not specified.'", ")", "try", ":", "os", ".", "rmdir", "(", "path", ")", "return", "True", "except", "OSError", "as", "exc", ":", "return", "exc", ".", "strerror" ]
remove the given dir .
train
true
4,786
def deleteAllSystems(server): try: (client, key) = _get_session(server) except Exception as exc: err_msg = 'Exception raised when connecting to spacewalk server ({0}): {1}'.format(server, exc) log.error(err_msg) return {'Error': err_msg} systems = client.system.listSystems(key) ids = [] names = [] for system in systems: ids.append(system['id']) names.append(system['name']) if (client.system.deleteSystems(key, ids) == 1): return {'deleted': names} else: return {'Error': 'Failed to delete all systems'}
[ "def", "deleteAllSystems", "(", "server", ")", ":", "try", ":", "(", "client", ",", "key", ")", "=", "_get_session", "(", "server", ")", "except", "Exception", "as", "exc", ":", "err_msg", "=", "'Exception raised when connecting to spacewalk server ({0}): {1}'", ".", "format", "(", "server", ",", "exc", ")", "log", ".", "error", "(", "err_msg", ")", "return", "{", "'Error'", ":", "err_msg", "}", "systems", "=", "client", ".", "system", ".", "listSystems", "(", "key", ")", "ids", "=", "[", "]", "names", "=", "[", "]", "for", "system", "in", "systems", ":", "ids", ".", "append", "(", "system", "[", "'id'", "]", ")", "names", ".", "append", "(", "system", "[", "'name'", "]", ")", "if", "(", "client", ".", "system", ".", "deleteSystems", "(", "key", ",", "ids", ")", "==", "1", ")", ":", "return", "{", "'deleted'", ":", "names", "}", "else", ":", "return", "{", "'Error'", ":", "'Failed to delete all systems'", "}" ]
delete all systems from spacewalk cli example: .
train
true
4,787
@utils.arg('network', metavar='<network>', help=_('UUID of network.')) @deprecated_network def do_network_associate_project(cs, args): cs.networks.associate_project(args.network)
[ "@", "utils", ".", "arg", "(", "'network'", ",", "metavar", "=", "'<network>'", ",", "help", "=", "_", "(", "'UUID of network.'", ")", ")", "@", "deprecated_network", "def", "do_network_associate_project", "(", "cs", ",", "args", ")", ":", "cs", ".", "networks", ".", "associate_project", "(", "args", ".", "network", ")" ]
associate project with network .
train
false
4,788
def process_memory_map(attrs=None, where=None): if (__grains__['os_family'] in ['RedHat', 'Debian']): return _osquery_cmd(table='process_memory_map', attrs=attrs, where=where) return {'result': False, 'comment': 'Only available on Red Hat or Debian based systems.'}
[ "def", "process_memory_map", "(", "attrs", "=", "None", ",", "where", "=", "None", ")", ":", "if", "(", "__grains__", "[", "'os_family'", "]", "in", "[", "'RedHat'", ",", "'Debian'", "]", ")", ":", "return", "_osquery_cmd", "(", "table", "=", "'process_memory_map'", ",", "attrs", "=", "attrs", ",", "where", "=", "where", ")", "return", "{", "'result'", ":", "False", ",", "'comment'", ":", "'Only available on Red Hat or Debian based systems.'", "}" ]
return process_memory_map information from osquery cli example: .
train
true
4,789
def registerField(fieldCls): assert (fieldCls.typeCode is not None), "Type code isn't defined" _fieldsRegistry[fieldCls.typeCode] = fieldCls
[ "def", "registerField", "(", "fieldCls", ")", ":", "assert", "(", "fieldCls", ".", "typeCode", "is", "not", "None", ")", ",", "\"Type code isn't defined\"", "_fieldsRegistry", "[", "fieldCls", ".", "typeCode", "]", "=", "fieldCls" ]
register field definition class .
train
false
4,790
def get_default_version(server=None): req = servers_service_pb.GetDefaultVersionRequest() if server: req.set_server(server) resp = servers_service_pb.GetDefaultVersionResponse() try: apiproxy_stub_map.MakeSyncCall('servers', 'GetDefaultVersion', req, resp) except apiproxy_errors.ApplicationError as e: if (e.application_error == servers_service_pb.ServersServiceError.INVALID_SERVER): raise InvalidServerError() if (e.application_error == servers_service_pb.ServersServiceError.INVALID_VERSION): raise InvalidVersionError() else: raise Error() return resp.version()
[ "def", "get_default_version", "(", "server", "=", "None", ")", ":", "req", "=", "servers_service_pb", ".", "GetDefaultVersionRequest", "(", ")", "if", "server", ":", "req", ".", "set_server", "(", "server", ")", "resp", "=", "servers_service_pb", ".", "GetDefaultVersionResponse", "(", ")", "try", ":", "apiproxy_stub_map", ".", "MakeSyncCall", "(", "'servers'", ",", "'GetDefaultVersion'", ",", "req", ",", "resp", ")", "except", "apiproxy_errors", ".", "ApplicationError", "as", "e", ":", "if", "(", "e", ".", "application_error", "==", "servers_service_pb", ".", "ServersServiceError", ".", "INVALID_SERVER", ")", ":", "raise", "InvalidServerError", "(", ")", "if", "(", "e", ".", "application_error", "==", "servers_service_pb", ".", "ServersServiceError", ".", "INVALID_VERSION", ")", ":", "raise", "InvalidVersionError", "(", ")", "else", ":", "raise", "Error", "(", ")", "return", "resp", ".", "version", "(", ")" ]
returns the name of the default version for the module .
train
false
4,791
def cinder_import_alphabetical(physical_line, line_number, lines): split_line = import_normalize(physical_line.strip()).lower().split() split_previous = import_normalize(lines[(line_number - 2)]).strip().lower().split() length = [2, 4] if ((len(split_line) in length) and (len(split_previous) in length) and (split_line[0] == 'import') and (split_previous[0] == 'import')): if (split_line[1] < split_previous[1]): return (0, ('CINDER N306: imports not in alphabetical order (%s, %s)' % (split_previous[1], split_line[1])))
[ "def", "cinder_import_alphabetical", "(", "physical_line", ",", "line_number", ",", "lines", ")", ":", "split_line", "=", "import_normalize", "(", "physical_line", ".", "strip", "(", ")", ")", ".", "lower", "(", ")", ".", "split", "(", ")", "split_previous", "=", "import_normalize", "(", "lines", "[", "(", "line_number", "-", "2", ")", "]", ")", ".", "strip", "(", ")", ".", "lower", "(", ")", ".", "split", "(", ")", "length", "=", "[", "2", ",", "4", "]", "if", "(", "(", "len", "(", "split_line", ")", "in", "length", ")", "and", "(", "len", "(", "split_previous", ")", "in", "length", ")", "and", "(", "split_line", "[", "0", "]", "==", "'import'", ")", "and", "(", "split_previous", "[", "0", "]", "==", "'import'", ")", ")", ":", "if", "(", "split_line", "[", "1", "]", "<", "split_previous", "[", "1", "]", ")", ":", "return", "(", "0", ",", "(", "'CINDER N306: imports not in alphabetical order (%s, %s)'", "%", "(", "split_previous", "[", "1", "]", ",", "split_line", "[", "1", "]", ")", ")", ")" ]
check for imports in alphabetical order .
train
false
4,793
def send_bulk_message(registration_ids, data, cloud_type, **kwargs): if (cloud_type == 'GCM'): max_recipients = SETTINGS.get('GCM_MAX_RECIPIENTS') elif (cloud_type == 'FCM'): max_recipients = SETTINGS.get('FCM_MAX_RECIPIENTS') else: raise ImproperlyConfigured(('cloud_type must be GCM or FCM not %s' % str(cloud_type))) if ((registration_ids is None) and ('/topics/' not in kwargs.get('to', ''))): return if registration_ids: if (len(registration_ids) > max_recipients): ret = [] for chunk in _chunks(registration_ids, max_recipients): ret.append(_cm_send_json(chunk, data, cloud_type=cloud_type, **kwargs)) return ret return _cm_send_json(registration_ids, data, cloud_type=cloud_type, **kwargs)
[ "def", "send_bulk_message", "(", "registration_ids", ",", "data", ",", "cloud_type", ",", "**", "kwargs", ")", ":", "if", "(", "cloud_type", "==", "'GCM'", ")", ":", "max_recipients", "=", "SETTINGS", ".", "get", "(", "'GCM_MAX_RECIPIENTS'", ")", "elif", "(", "cloud_type", "==", "'FCM'", ")", ":", "max_recipients", "=", "SETTINGS", ".", "get", "(", "'FCM_MAX_RECIPIENTS'", ")", "else", ":", "raise", "ImproperlyConfigured", "(", "(", "'cloud_type must be GCM or FCM not %s'", "%", "str", "(", "cloud_type", ")", ")", ")", "if", "(", "(", "registration_ids", "is", "None", ")", "and", "(", "'/topics/'", "not", "in", "kwargs", ".", "get", "(", "'to'", ",", "''", ")", ")", ")", ":", "return", "if", "registration_ids", ":", "if", "(", "len", "(", "registration_ids", ")", ">", "max_recipients", ")", ":", "ret", "=", "[", "]", "for", "chunk", "in", "_chunks", "(", "registration_ids", ",", "max_recipients", ")", ":", "ret", ".", "append", "(", "_cm_send_json", "(", "chunk", ",", "data", ",", "cloud_type", "=", "cloud_type", ",", "**", "kwargs", ")", ")", "return", "ret", "return", "_cm_send_json", "(", "registration_ids", ",", "data", ",", "cloud_type", "=", "cloud_type", ",", "**", "kwargs", ")" ]
sends a gcm or fcm notification to one or more registration_ids .
train
false
4,794
def remove_server(): global _BONJOUR_OBJECT if _BONJOUR_OBJECT: _BONJOUR_OBJECT.close() _BONJOUR_OBJECT = None
[ "def", "remove_server", "(", ")", ":", "global", "_BONJOUR_OBJECT", "if", "_BONJOUR_OBJECT", ":", "_BONJOUR_OBJECT", ".", "close", "(", ")", "_BONJOUR_OBJECT", "=", "None" ]
remove bonjour registration .
train
false
4,796
def getCarvingFromParser(xmlParser): booleanGeometryElement = xmlParser.getDocumentElement() booleanGeometryElement.xmlObject = boolean_geometry.BooleanGeometry() booleanGeometryElement.xmlProcessor = XMLBooleanGeometryProcessor() booleanGeometryElement.xmlProcessor.processChildNodes(booleanGeometryElement) return booleanGeometryElement.xmlObject
[ "def", "getCarvingFromParser", "(", "xmlParser", ")", ":", "booleanGeometryElement", "=", "xmlParser", ".", "getDocumentElement", "(", ")", "booleanGeometryElement", ".", "xmlObject", "=", "boolean_geometry", ".", "BooleanGeometry", "(", ")", "booleanGeometryElement", ".", "xmlProcessor", "=", "XMLBooleanGeometryProcessor", "(", ")", "booleanGeometryElement", ".", "xmlProcessor", ".", "processChildNodes", "(", "booleanGeometryElement", ")", "return", "booleanGeometryElement", ".", "xmlObject" ]
get the carving for the parser .
train
false
4,798
def get_cert_and_update_domain(zappa_instance, lambda_name, api_stage, domain=None, clean_up=True): try: create_domain_key() create_domain_csr(domain) get_cert(zappa_instance) create_chained_certificate() with open('/tmp/signed.crt') as f: certificate_body = f.read() with open('/tmp/domain.key') as f: certificate_private_key = f.read() with open('/tmp/intermediate.pem') as f: certificate_chain = f.read() if domain: if (not zappa_instance.get_domain_name(domain)): zappa_instance.create_domain_name(domain, (domain + '-Zappa-LE-Cert'), certificate_body, certificate_private_key, certificate_chain, lambda_name, api_stage) print 'Created a new domain name. Please note that it can take up to 40 minutes for this domain to be created and propagated through AWS, but it requires no further work on your part.' else: zappa_instance.update_domain_name(domain, (domain + '-Zappa-LE-Cert'), certificate_body, certificate_private_key, certificate_chain) except Exception as e: print e return False if clean_up: cleanup() return True
[ "def", "get_cert_and_update_domain", "(", "zappa_instance", ",", "lambda_name", ",", "api_stage", ",", "domain", "=", "None", ",", "clean_up", "=", "True", ")", ":", "try", ":", "create_domain_key", "(", ")", "create_domain_csr", "(", "domain", ")", "get_cert", "(", "zappa_instance", ")", "create_chained_certificate", "(", ")", "with", "open", "(", "'/tmp/signed.crt'", ")", "as", "f", ":", "certificate_body", "=", "f", ".", "read", "(", ")", "with", "open", "(", "'/tmp/domain.key'", ")", "as", "f", ":", "certificate_private_key", "=", "f", ".", "read", "(", ")", "with", "open", "(", "'/tmp/intermediate.pem'", ")", "as", "f", ":", "certificate_chain", "=", "f", ".", "read", "(", ")", "if", "domain", ":", "if", "(", "not", "zappa_instance", ".", "get_domain_name", "(", "domain", ")", ")", ":", "zappa_instance", ".", "create_domain_name", "(", "domain", ",", "(", "domain", "+", "'-Zappa-LE-Cert'", ")", ",", "certificate_body", ",", "certificate_private_key", ",", "certificate_chain", ",", "lambda_name", ",", "api_stage", ")", "print", "'Created a new domain name. Please note that it can take up to 40 minutes for this domain to be created and propagated through AWS, but it requires no further work on your part.'", "else", ":", "zappa_instance", ".", "update_domain_name", "(", "domain", ",", "(", "domain", "+", "'-Zappa-LE-Cert'", ")", ",", "certificate_body", ",", "certificate_private_key", ",", "certificate_chain", ")", "except", "Exception", "as", "e", ":", "print", "e", "return", "False", "if", "clean_up", ":", "cleanup", "(", ")", "return", "True" ]
main cert installer path .
train
false
4,799
def pop_stream(f): f.flush() f.seek(0) output = f.read() f.seek(0) f.truncate() return output
[ "def", "pop_stream", "(", "f", ")", ":", "f", ".", "flush", "(", ")", "f", ".", "seek", "(", "0", ")", "output", "=", "f", ".", "read", "(", ")", "f", ".", "seek", "(", "0", ")", "f", ".", "truncate", "(", ")", "return", "output" ]
read everything out of file from the top and clear it out .
train
false
4,801
@requires_application() def test_run(): for _ in range(2): with Canvas(size=(100, 100), show=True, title='run') as c: @c.events.draw.connect def draw(event): print event c.app.quit() c.update() c.app.run() c.app.quit()
[ "@", "requires_application", "(", ")", "def", "test_run", "(", ")", ":", "for", "_", "in", "range", "(", "2", ")", ":", "with", "Canvas", "(", "size", "=", "(", "100", ",", "100", ")", ",", "show", "=", "True", ",", "title", "=", "'run'", ")", "as", "c", ":", "@", "c", ".", "events", ".", "draw", ".", "connect", "def", "draw", "(", "event", ")", ":", "print", "event", "c", ".", "app", ".", "quit", "(", ")", "c", ".", "update", "(", ")", "c", ".", "app", ".", "run", "(", ")", "c", ".", "app", ".", "quit", "(", ")" ]
test running subprocesses .
train
false
4,804
@shared_task def print_unicode(log_message=u'h\xe5\xe5\xae\u0192 valmuefr\xf8', print_message=u'hi\xf6\xe4\xfc\xdf'): logger.warning(log_message) print print_message
[ "@", "shared_task", "def", "print_unicode", "(", "log_message", "=", "u'h\\xe5\\xe5\\xae\\u0192 valmuefr\\xf8'", ",", "print_message", "=", "u'hi\\xf6\\xe4\\xfc\\xdf'", ")", ":", "logger", ".", "warning", "(", "log_message", ")", "print", "print_message" ]
task that both logs and print strings containing funny characters .
train
false
4,805
def trunc_password(password): try: if (len(password) > MAX_PASSWORD_LENGTH): return password[:MAX_PASSWORD_LENGTH] else: return password except TypeError: raise exception.ValidationError(attribute='string', target='password')
[ "def", "trunc_password", "(", "password", ")", ":", "try", ":", "if", "(", "len", "(", "password", ")", ">", "MAX_PASSWORD_LENGTH", ")", ":", "return", "password", "[", ":", "MAX_PASSWORD_LENGTH", "]", "else", ":", "return", "password", "except", "TypeError", ":", "raise", "exception", ".", "ValidationError", "(", "attribute", "=", "'string'", ",", "target", "=", "'password'", ")" ]
truncate passwords to the max_password_length .
train
false
4,806
def get_langids(dev): from usb.control import get_descriptor buf = get_descriptor(dev, 254, DESC_TYPE_STRING, 0) if ((len(buf) < 4) or (buf[0] < 4) or ((buf[0] & 1) != 0)): return () return tuple(map((lambda x, y: (x + (y << 8))), buf[2:buf[0]:2], buf[3:buf[0]:2]))
[ "def", "get_langids", "(", "dev", ")", ":", "from", "usb", ".", "control", "import", "get_descriptor", "buf", "=", "get_descriptor", "(", "dev", ",", "254", ",", "DESC_TYPE_STRING", ",", "0", ")", "if", "(", "(", "len", "(", "buf", ")", "<", "4", ")", "or", "(", "buf", "[", "0", "]", "<", "4", ")", "or", "(", "(", "buf", "[", "0", "]", "&", "1", ")", "!=", "0", ")", ")", ":", "return", "(", ")", "return", "tuple", "(", "map", "(", "(", "lambda", "x", ",", "y", ":", "(", "x", "+", "(", "y", "<<", "8", ")", ")", ")", ",", "buf", "[", "2", ":", "buf", "[", "0", "]", ":", "2", "]", ",", "buf", "[", "3", ":", "buf", "[", "0", "]", ":", "2", "]", ")", ")" ]
retrieve the list of supported language ids from the device .
train
true
4,807
def decode_addr(v): if (len(v) not in [0, 20]): raise Exception('Serialized addresses must be empty or 20 bytes long!') return encode_hex(v)
[ "def", "decode_addr", "(", "v", ")", ":", "if", "(", "len", "(", "v", ")", "not", "in", "[", "0", ",", "20", "]", ")", ":", "raise", "Exception", "(", "'Serialized addresses must be empty or 20 bytes long!'", ")", "return", "encode_hex", "(", "v", ")" ]
decodes an address from serialization .
train
false
4,808
def build_xoauth_string(url, consumer, token=None): request = Request.from_consumer_and_token(consumer, token, 'GET', url) signing_method = SignatureMethod_HMAC_SHA1() request.sign_request(signing_method, consumer, token) params = [] for (k, v) in sorted(request.iteritems()): if (v is not None): params.append(('%s="%s"' % (k, escape(v)))) return ('%s %s %s' % ('GET', url, ','.join(params)))
[ "def", "build_xoauth_string", "(", "url", ",", "consumer", ",", "token", "=", "None", ")", ":", "request", "=", "Request", ".", "from_consumer_and_token", "(", "consumer", ",", "token", ",", "'GET'", ",", "url", ")", "signing_method", "=", "SignatureMethod_HMAC_SHA1", "(", ")", "request", ".", "sign_request", "(", "signing_method", ",", "consumer", ",", "token", ")", "params", "=", "[", "]", "for", "(", "k", ",", "v", ")", "in", "sorted", "(", "request", ".", "iteritems", "(", ")", ")", ":", "if", "(", "v", "is", "not", "None", ")", ":", "params", ".", "append", "(", "(", "'%s=\"%s\"'", "%", "(", "k", ",", "escape", "(", "v", ")", ")", ")", ")", "return", "(", "'%s %s %s'", "%", "(", "'GET'", ",", "url", ",", "','", ".", "join", "(", "params", ")", ")", ")" ]
build an xoauth string for use in smtp/impa authentication .
train
true
4,809
def _msg_err(receiver, stringtuple): string = '{traceback}\n{errmsg}\n(Traceback was logged {timestamp}).' timestamp = logger.timeformat() tracestring = format_exc() logger.log_trace() if _IN_GAME_ERRORS: receiver.msg(string.format(traceback=tracestring, errmsg=stringtuple[0].strip(), timestamp=timestamp).strip()) else: receiver.msg(string.format(traceback=tracestring.splitlines()[(-1)], errmsg=stringtuple[1].strip(), timestamp=timestamp).strip())
[ "def", "_msg_err", "(", "receiver", ",", "stringtuple", ")", ":", "string", "=", "'{traceback}\\n{errmsg}\\n(Traceback was logged {timestamp}).'", "timestamp", "=", "logger", ".", "timeformat", "(", ")", "tracestring", "=", "format_exc", "(", ")", "logger", ".", "log_trace", "(", ")", "if", "_IN_GAME_ERRORS", ":", "receiver", ".", "msg", "(", "string", ".", "format", "(", "traceback", "=", "tracestring", ",", "errmsg", "=", "stringtuple", "[", "0", "]", ".", "strip", "(", ")", ",", "timestamp", "=", "timestamp", ")", ".", "strip", "(", ")", ")", "else", ":", "receiver", ".", "msg", "(", "string", ".", "format", "(", "traceback", "=", "tracestring", ".", "splitlines", "(", ")", "[", "(", "-", "1", ")", "]", ",", "errmsg", "=", "stringtuple", "[", "1", "]", ".", "strip", "(", ")", ",", "timestamp", "=", "timestamp", ")", ".", "strip", "(", ")", ")" ]
helper function for returning an error to the caller .
train
false
4,810
def find_functions(code): regex = (('^\\s*' + re_func_decl) + '\\s*{') funcs = [] while True: m = re.search(regex, code, re.M) if (m is None): return funcs (rtype, name, args) = m.groups()[:3] if ((args == 'void') or (args.strip() == '')): args = [] else: args = [tuple(arg.strip().split(' ')) for arg in args.split(',')] funcs.append((name, args, rtype)) code = code[m.end():]
[ "def", "find_functions", "(", "code", ")", ":", "regex", "=", "(", "(", "'^\\\\s*'", "+", "re_func_decl", ")", "+", "'\\\\s*{'", ")", "funcs", "=", "[", "]", "while", "True", ":", "m", "=", "re", ".", "search", "(", "regex", ",", "code", ",", "re", ".", "M", ")", "if", "(", "m", "is", "None", ")", ":", "return", "funcs", "(", "rtype", ",", "name", ",", "args", ")", "=", "m", ".", "groups", "(", ")", "[", ":", "3", "]", "if", "(", "(", "args", "==", "'void'", ")", "or", "(", "args", ".", "strip", "(", ")", "==", "''", ")", ")", ":", "args", "=", "[", "]", "else", ":", "args", "=", "[", "tuple", "(", "arg", ".", "strip", "(", ")", ".", "split", "(", "' '", ")", ")", "for", "arg", "in", "args", ".", "split", "(", "','", ")", "]", "funcs", ".", "append", "(", "(", "name", ",", "args", ",", "rtype", ")", ")", "code", "=", "code", "[", "m", ".", "end", "(", ")", ":", "]" ]
return a list of for all function definition2 found in *code* .
train
true
4,812
def summarize_otu_sizes_from_otu_map(otu_map_f): result = {} for (otu_id, seq_ids) in fields_to_dict(otu_map_f).items(): otu_size = len(seq_ids) try: result[otu_size] += 1 except KeyError: result[otu_size] = 1 result = sorted(result.items()) return result
[ "def", "summarize_otu_sizes_from_otu_map", "(", "otu_map_f", ")", ":", "result", "=", "{", "}", "for", "(", "otu_id", ",", "seq_ids", ")", "in", "fields_to_dict", "(", "otu_map_f", ")", ".", "items", "(", ")", ":", "otu_size", "=", "len", "(", "seq_ids", ")", "try", ":", "result", "[", "otu_size", "]", "+=", "1", "except", "KeyError", ":", "result", "[", "otu_size", "]", "=", "1", "result", "=", "sorted", "(", "result", ".", "items", "(", ")", ")", "return", "result" ]
given an otu map file handle .
train
false
4,814
@pytest.mark.network def test_wheel_package_with_latin1_setup(script, data): script.pip('install', 'wheel') pkg_to_wheel = data.packages.join('SetupPyLatin1') result = script.pip('wheel', pkg_to_wheel) assert ('Successfully built SetupPyUTF8' in result.stdout)
[ "@", "pytest", ".", "mark", ".", "network", "def", "test_wheel_package_with_latin1_setup", "(", "script", ",", "data", ")", ":", "script", ".", "pip", "(", "'install'", ",", "'wheel'", ")", "pkg_to_wheel", "=", "data", ".", "packages", ".", "join", "(", "'SetupPyLatin1'", ")", "result", "=", "script", ".", "pip", "(", "'wheel'", ",", "pkg_to_wheel", ")", "assert", "(", "'Successfully built SetupPyUTF8'", "in", "result", ".", "stdout", ")" ]
create a wheel from a package with latin-1 encoded setup .
train
false
4,815
def computer_desc(name): name = str(name) ret = {'name': name, 'changes': {}, 'result': True, 'comment': "Computer description already set to '{0}'".format(name)} before_desc = __salt__['system.get_computer_desc']() if (before_desc == name): return ret if __opts__['test']: ret['result'] = None ret['comment'] = "Computer description will be changed to '{0}'".format(name) return ret result = __salt__['system.set_computer_desc'](name) if (result['Computer Description'] == name): ret['comment'] = "Computer description successfully changed to '{0}'".format(name) ret['changes'] = {'old': before_desc, 'new': name} else: ret['result'] = False ret['comment'] = "Unable to set computer description to '{0}'".format(name) return ret
[ "def", "computer_desc", "(", "name", ")", ":", "name", "=", "str", "(", "name", ")", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'result'", ":", "True", ",", "'comment'", ":", "\"Computer description already set to '{0}'\"", ".", "format", "(", "name", ")", "}", "before_desc", "=", "__salt__", "[", "'system.get_computer_desc'", "]", "(", ")", "if", "(", "before_desc", "==", "name", ")", ":", "return", "ret", "if", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'result'", "]", "=", "None", "ret", "[", "'comment'", "]", "=", "\"Computer description will be changed to '{0}'\"", ".", "format", "(", "name", ")", "return", "ret", "result", "=", "__salt__", "[", "'system.set_computer_desc'", "]", "(", "name", ")", "if", "(", "result", "[", "'Computer Description'", "]", "==", "name", ")", ":", "ret", "[", "'comment'", "]", "=", "\"Computer description successfully changed to '{0}'\"", ".", "format", "(", "name", ")", "ret", "[", "'changes'", "]", "=", "{", "'old'", ":", "before_desc", ",", "'new'", ":", "name", "}", "else", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "\"Unable to set computer description to '{0}'\"", ".", "format", "(", "name", ")", "return", "ret" ]
manage the computers description field name the desired computer description .
train
false
4,817
def find_distributions(path_item, only=False): importer = get_importer(path_item) finder = _find_adapter(_distribution_finders, importer) return finder(importer, path_item, only)
[ "def", "find_distributions", "(", "path_item", ",", "only", "=", "False", ")", ":", "importer", "=", "get_importer", "(", "path_item", ")", "finder", "=", "_find_adapter", "(", "_distribution_finders", ",", "importer", ")", "return", "finder", "(", "importer", ",", "path_item", ",", "only", ")" ]
yield distributions accessible via path_item .
train
true
4,818
def clean0(matrix): colsum = np.add.reduce((matrix ** 2), 0) val = [matrix[:, i] for i in np.flatnonzero(colsum)] return np.array(np.transpose(val))
[ "def", "clean0", "(", "matrix", ")", ":", "colsum", "=", "np", ".", "add", ".", "reduce", "(", "(", "matrix", "**", "2", ")", ",", "0", ")", "val", "=", "[", "matrix", "[", ":", ",", "i", "]", "for", "i", "in", "np", ".", "flatnonzero", "(", "colsum", ")", "]", "return", "np", ".", "array", "(", "np", ".", "transpose", "(", "val", ")", ")" ]
erase columns of zeros: can save some time in pseudoinverse .
train
false
4,819
def log_warn(message, **kwargs): if kwargs: message = message.format(**kwargs) print(message, file=sys.stderr)
[ "def", "log_warn", "(", "message", ",", "**", "kwargs", ")", ":", "if", "kwargs", ":", "message", "=", "message", ".", "format", "(", "**", "kwargs", ")", "print", "(", "message", ",", "file", "=", "sys", ".", "stderr", ")" ]
prints/logs any warnings that arent critical but should be noted .
train
false
4,820
@decorator def pb(f, *args, **kwargs): msg = f(*args, **kwargs) return msg.SerializeToString()
[ "@", "decorator", "def", "pb", "(", "f", ",", "*", "args", ",", "**", "kwargs", ")", ":", "msg", "=", "f", "(", "*", "args", ",", "**", "kwargs", ")", "return", "msg", ".", "SerializeToString", "(", ")" ]
decorator to serialize a protobuf message .
train
false
4,821
def record_exchange(db, route, amount, fee, participant, status, error=None): assert (route.participant.id == participant.id) with db.get_cursor() as cursor: exchange_id = cursor.one(u'\n INSERT INTO exchanges\n (amount, fee, participant, status, route, note)\n VALUES (%s, %s, %s, %s, %s, %s)\n RETURNING id\n ', (amount, fee, participant.username, status, route.id, error)) if (status == u'failed'): propagate_exchange(cursor, participant, route, error, 0) elif (amount < 0): amount -= fee propagate_exchange(cursor, participant, route, u'', amount) return exchange_id
[ "def", "record_exchange", "(", "db", ",", "route", ",", "amount", ",", "fee", ",", "participant", ",", "status", ",", "error", "=", "None", ")", ":", "assert", "(", "route", ".", "participant", ".", "id", "==", "participant", ".", "id", ")", "with", "db", ".", "get_cursor", "(", ")", "as", "cursor", ":", "exchange_id", "=", "cursor", ".", "one", "(", "u'\\n INSERT INTO exchanges\\n (amount, fee, participant, status, route, note)\\n VALUES (%s, %s, %s, %s, %s, %s)\\n RETURNING id\\n '", ",", "(", "amount", ",", "fee", ",", "participant", ".", "username", ",", "status", ",", "route", ".", "id", ",", "error", ")", ")", "if", "(", "status", "==", "u'failed'", ")", ":", "propagate_exchange", "(", "cursor", ",", "participant", ",", "route", ",", "error", ",", "0", ")", "elif", "(", "amount", "<", "0", ")", ":", "amount", "-=", "fee", "propagate_exchange", "(", "cursor", ",", "participant", ",", "route", ",", "u''", ",", "amount", ")", "return", "exchange_id" ]
given a bunch of stuff .
train
false
4,824
def reload_(name): term(name)
[ "def", "reload_", "(", "name", ")", ":", "term", "(", "name", ")" ]
reloads syslog-ng .
train
false
4,825
def generate_skip_decorator(bear): result = bear.check_prerequisites() return (skip(result) if isinstance(result, str) else skipIf((not result), '(No reason given.)'))
[ "def", "generate_skip_decorator", "(", "bear", ")", ":", "result", "=", "bear", ".", "check_prerequisites", "(", ")", "return", "(", "skip", "(", "result", ")", "if", "isinstance", "(", "result", ",", "str", ")", "else", "skipIf", "(", "(", "not", "result", ")", ",", "'(No reason given.)'", ")", ")" ]
creates a skip decorator for a unittest module test from a bear .
train
false
4,826
@np.deprecate(message='stats.f_value_multivariate deprecated in scipy 0.17.0') def f_value_multivariate(ER, EF, dfnum, dfden): if isinstance(ER, (int, float)): ER = array([[ER]]) if isinstance(EF, (int, float)): EF = array([[EF]]) n_um = ((linalg.det(ER) - linalg.det(EF)) / float(dfnum)) d_en = (linalg.det(EF) / float(dfden)) return (n_um / d_en)
[ "@", "np", ".", "deprecate", "(", "message", "=", "'stats.f_value_multivariate deprecated in scipy 0.17.0'", ")", "def", "f_value_multivariate", "(", "ER", ",", "EF", ",", "dfnum", ",", "dfden", ")", ":", "if", "isinstance", "(", "ER", ",", "(", "int", ",", "float", ")", ")", ":", "ER", "=", "array", "(", "[", "[", "ER", "]", "]", ")", "if", "isinstance", "(", "EF", ",", "(", "int", ",", "float", ")", ")", ":", "EF", "=", "array", "(", "[", "[", "EF", "]", "]", ")", "n_um", "=", "(", "(", "linalg", ".", "det", "(", "ER", ")", "-", "linalg", ".", "det", "(", "EF", ")", ")", "/", "float", "(", "dfnum", ")", ")", "d_en", "=", "(", "linalg", ".", "det", "(", "EF", ")", "/", "float", "(", "dfden", ")", ")", "return", "(", "n_um", "/", "d_en", ")" ]
returns a multivariate f-statistic .
train
false
4,827
def merge_policies_dict(non_report_only_policies_dict, report_only_policies_dict): if ((non_report_only_policies_dict is None) or (len(non_report_only_policies_dict) == 0)): return report_only_policies_dict if ((report_only_policies_dict is None) or (len(report_only_policies_dict) == 0)): return non_report_only_policies_dict merged_policies = {} directives_names = list(set((non_report_only_policies_dict.keys() + report_only_policies_dict.keys()))) for k in directives_names: values = [] if (k in non_report_only_policies_dict): values.extend(non_report_only_policies_dict[k]) if (k in report_only_policies_dict): values.extend(report_only_policies_dict[k]) merged_policies[k] = list(set(values)) return merged_policies
[ "def", "merge_policies_dict", "(", "non_report_only_policies_dict", ",", "report_only_policies_dict", ")", ":", "if", "(", "(", "non_report_only_policies_dict", "is", "None", ")", "or", "(", "len", "(", "non_report_only_policies_dict", ")", "==", "0", ")", ")", ":", "return", "report_only_policies_dict", "if", "(", "(", "report_only_policies_dict", "is", "None", ")", "or", "(", "len", "(", "report_only_policies_dict", ")", "==", "0", ")", ")", ":", "return", "non_report_only_policies_dict", "merged_policies", "=", "{", "}", "directives_names", "=", "list", "(", "set", "(", "(", "non_report_only_policies_dict", ".", "keys", "(", ")", "+", "report_only_policies_dict", ".", "keys", "(", ")", ")", ")", ")", "for", "k", "in", "directives_names", ":", "values", "=", "[", "]", "if", "(", "k", "in", "non_report_only_policies_dict", ")", ":", "values", ".", "extend", "(", "non_report_only_policies_dict", "[", "k", "]", ")", "if", "(", "k", "in", "report_only_policies_dict", ")", ":", "values", ".", "extend", "(", "report_only_policies_dict", "[", "k", "]", ")", "merged_policies", "[", "k", "]", "=", "list", "(", "set", "(", "values", ")", ")", "return", "merged_policies" ]
method to merge 2 policies dictionaries to a single .
train
false
4,830
def get_change(name): policies = _get_account_policy(name) if ('expirationDateGMT' in policies): return policies['expirationDateGMT'] return 'Value not set'
[ "def", "get_change", "(", "name", ")", ":", "policies", "=", "_get_account_policy", "(", "name", ")", "if", "(", "'expirationDateGMT'", "in", "policies", ")", ":", "return", "policies", "[", "'expirationDateGMT'", "]", "return", "'Value not set'" ]
gets the date on which the password expires .
train
false
4,831
@pytest.mark.parametrize('text, deleted, rest', [('test |delete this', 'delete this', 'test |'), fixme(('<test >delete this', 'test delete this', 'test |')), ('<test >delete this', 'test delete this', '|')]) def test_rl_kill_line(lineedit, bridge, text, deleted, rest): _validate_deletion(lineedit, bridge, bridge.rl_kill_line, text, deleted, rest)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'text, deleted, rest'", ",", "[", "(", "'test |delete this'", ",", "'delete this'", ",", "'test |'", ")", ",", "fixme", "(", "(", "'<test >delete this'", ",", "'test delete this'", ",", "'test |'", ")", ")", ",", "(", "'<test >delete this'", ",", "'test delete this'", ",", "'|'", ")", "]", ")", "def", "test_rl_kill_line", "(", "lineedit", ",", "bridge", ",", "text", ",", "deleted", ",", "rest", ")", ":", "_validate_deletion", "(", "lineedit", ",", "bridge", ",", "bridge", ".", "rl_kill_line", ",", "text", ",", "deleted", ",", "rest", ")" ]
delete from the cursor to the end of line and yank back .
train
false
4,832
@contextfunction def currency_print(context, currency=None): if (not currency): currency = Currency.objects.get(is_default=True) return unicode(currency.code)
[ "@", "contextfunction", "def", "currency_print", "(", "context", ",", "currency", "=", "None", ")", ":", "if", "(", "not", "currency", ")", ":", "currency", "=", "Currency", ".", "objects", ".", "get", "(", "is_default", "=", "True", ")", "return", "unicode", "(", "currency", ".", "code", ")" ]
just returns the currency symbol as set in sales module settings to a given string .
train
false
4,833
def pinv(a, cond=None, rcond=None, return_rank=False, check_finite=True): a = _asarray_validated(a, check_finite=check_finite) b = np.identity(a.shape[0], dtype=a.dtype) if (rcond is not None): cond = rcond (x, resids, rank, s) = lstsq(a, b, cond=cond, check_finite=False) if return_rank: return (x, rank) else: return x
[ "def", "pinv", "(", "a", ",", "cond", "=", "None", ",", "rcond", "=", "None", ",", "return_rank", "=", "False", ",", "check_finite", "=", "True", ")", ":", "a", "=", "_asarray_validated", "(", "a", ",", "check_finite", "=", "check_finite", ")", "b", "=", "np", ".", "identity", "(", "a", ".", "shape", "[", "0", "]", ",", "dtype", "=", "a", ".", "dtype", ")", "if", "(", "rcond", "is", "not", "None", ")", ":", "cond", "=", "rcond", "(", "x", ",", "resids", ",", "rank", ",", "s", ")", "=", "lstsq", "(", "a", ",", "b", ",", "cond", "=", "cond", ",", "check_finite", "=", "False", ")", "if", "return_rank", ":", "return", "(", "x", ",", "rank", ")", "else", ":", "return", "x" ]
compute the pseudo-inverse of a matrix .
train
false
4,834
def describe_topic_rule(ruleName, region=None, key=None, keyid=None, profile=None): try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) rule = conn.get_topic_rule(ruleName=ruleName) if (rule and ('rule' in rule)): rule = rule['rule'] keys = ('ruleName', 'sql', 'description', 'actions', 'ruleDisabled') return {'rule': dict([(k, rule.get(k)) for k in keys])} else: return {'rule': None} except ClientError as e: return {'error': salt.utils.boto3.get_error(e)}
[ "def", "describe_topic_rule", "(", "ruleName", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "rule", "=", "conn", ".", "get_topic_rule", "(", "ruleName", "=", "ruleName", ")", "if", "(", "rule", "and", "(", "'rule'", "in", "rule", ")", ")", ":", "rule", "=", "rule", "[", "'rule'", "]", "keys", "=", "(", "'ruleName'", ",", "'sql'", ",", "'description'", ",", "'actions'", ",", "'ruleDisabled'", ")", "return", "{", "'rule'", ":", "dict", "(", "[", "(", "k", ",", "rule", ".", "get", "(", "k", ")", ")", "for", "k", "in", "keys", "]", ")", "}", "else", ":", "return", "{", "'rule'", ":", "None", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'error'", ":", "salt", ".", "utils", ".", "boto3", ".", "get_error", "(", "e", ")", "}" ]
given a topic rule name describe its properties .
train
false
4,835
def rsa_public_key(p, q, e): n = (p * q) if (isprime(p) and isprime(q)): phi = totient(n) if (gcd(e, phi) == 1): return (n, e) return False
[ "def", "rsa_public_key", "(", "p", ",", "q", ",", "e", ")", ":", "n", "=", "(", "p", "*", "q", ")", "if", "(", "isprime", "(", "p", ")", "and", "isprime", "(", "q", ")", ")", ":", "phi", "=", "totient", "(", "n", ")", "if", "(", "gcd", "(", "e", ",", "phi", ")", "==", "1", ")", ":", "return", "(", "n", ",", "e", ")", "return", "False" ]
return the rsa *public key* pair .
train
false
4,836
def _parseClientUNIX(*args, **kwargs): if (len(args) == 1): kwargs['path'] = args[0] try: kwargs['checkPID'] = bool(int(kwargs.pop('lockfile'))) except KeyError: pass try: kwargs['timeout'] = int(kwargs['timeout']) except KeyError: pass return kwargs
[ "def", "_parseClientUNIX", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "len", "(", "args", ")", "==", "1", ")", ":", "kwargs", "[", "'path'", "]", "=", "args", "[", "0", "]", "try", ":", "kwargs", "[", "'checkPID'", "]", "=", "bool", "(", "int", "(", "kwargs", ".", "pop", "(", "'lockfile'", ")", ")", ")", "except", "KeyError", ":", "pass", "try", ":", "kwargs", "[", "'timeout'", "]", "=", "int", "(", "kwargs", "[", "'timeout'", "]", ")", "except", "KeyError", ":", "pass", "return", "kwargs" ]
perform any argument value coercion necessary for unix client parameters .
train
false
4,842
def get_transport_and_path(location, **kwargs): try: return get_transport_and_path_from_url(location, **kwargs) except ValueError: pass if ((sys.platform == 'win32') and location[0].isalpha() and (location[1:3] == ':\\')): return (default_local_git_client_cls(**kwargs), location) if ((':' in location) and (not ('@' in location))): (hostname, path) = location.split(':', 1) return (SSHGitClient(hostname, **kwargs), path) elif (':' in location): (user_host, path) = location.split(':', 1) if ('@' in user_host): (user, host) = user_host.rsplit('@', 1) else: user = None host = user_host return (SSHGitClient(host, username=user, **kwargs), path) return (default_local_git_client_cls(**kwargs), location)
[ "def", "get_transport_and_path", "(", "location", ",", "**", "kwargs", ")", ":", "try", ":", "return", "get_transport_and_path_from_url", "(", "location", ",", "**", "kwargs", ")", "except", "ValueError", ":", "pass", "if", "(", "(", "sys", ".", "platform", "==", "'win32'", ")", "and", "location", "[", "0", "]", ".", "isalpha", "(", ")", "and", "(", "location", "[", "1", ":", "3", "]", "==", "':\\\\'", ")", ")", ":", "return", "(", "default_local_git_client_cls", "(", "**", "kwargs", ")", ",", "location", ")", "if", "(", "(", "':'", "in", "location", ")", "and", "(", "not", "(", "'@'", "in", "location", ")", ")", ")", ":", "(", "hostname", ",", "path", ")", "=", "location", ".", "split", "(", "':'", ",", "1", ")", "return", "(", "SSHGitClient", "(", "hostname", ",", "**", "kwargs", ")", ",", "path", ")", "elif", "(", "':'", "in", "location", ")", ":", "(", "user_host", ",", "path", ")", "=", "location", ".", "split", "(", "':'", ",", "1", ")", "if", "(", "'@'", "in", "user_host", ")", ":", "(", "user", ",", "host", ")", "=", "user_host", ".", "rsplit", "(", "'@'", ",", "1", ")", "else", ":", "user", "=", "None", "host", "=", "user_host", "return", "(", "SSHGitClient", "(", "host", ",", "username", "=", "user", ",", "**", "kwargs", ")", ",", "path", ")", "return", "(", "default_local_git_client_cls", "(", "**", "kwargs", ")", ",", "location", ")" ]
obtain a git client from a url .
train
false
4,844
def sendOutputTo(outputTo, text): if outputTo.endswith('stderr'): sys.stderr.write(text) sys.stderr.write('\n') sys.stderr.flush() return if outputTo.endswith('stdout'): sys.stdout.write(text) sys.stdout.write('\n') sys.stdout.flush() return archive.writeFileText(outputTo, text)
[ "def", "sendOutputTo", "(", "outputTo", ",", "text", ")", ":", "if", "outputTo", ".", "endswith", "(", "'stderr'", ")", ":", "sys", ".", "stderr", ".", "write", "(", "text", ")", "sys", ".", "stderr", ".", "write", "(", "'\\n'", ")", "sys", ".", "stderr", ".", "flush", "(", ")", "return", "if", "outputTo", ".", "endswith", "(", "'stdout'", ")", ":", "sys", ".", "stdout", ".", "write", "(", "text", ")", "sys", ".", "stdout", ".", "write", "(", "'\\n'", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "return", "archive", ".", "writeFileText", "(", "outputTo", ",", "text", ")" ]
send output to a file or a standard output .
train
false
4,845
def to_string_from_fields(item_dict, fields, interface_fields=None): buf = '' keys = [] for elem in fields: keys.append((elem[0], elem[3], elem[4])) keys.sort() buf += ('%-30s : %s\n' % ('Name', item_dict['name'])) for (k, nicename, editable) in keys: if (not editable): continue if (k != 'name'): buf += ('%-30s : %s\n' % (nicename, item_dict[k])) if (('interfaces' in item_dict) and (interface_fields is not None)): keys = [] for elem in interface_fields: keys.append((elem[0], elem[3], elem[4])) keys.sort() for iname in item_dict['interfaces'].keys(): buf += ('%-30s : %s\n' % ('Interface ===== ', iname)) for (k, nicename, editable) in keys: if editable: buf += ('%-30s : %s\n' % (nicename, item_dict['interfaces'][iname].get(k, ''))) return buf
[ "def", "to_string_from_fields", "(", "item_dict", ",", "fields", ",", "interface_fields", "=", "None", ")", ":", "buf", "=", "''", "keys", "=", "[", "]", "for", "elem", "in", "fields", ":", "keys", ".", "append", "(", "(", "elem", "[", "0", "]", ",", "elem", "[", "3", "]", ",", "elem", "[", "4", "]", ")", ")", "keys", ".", "sort", "(", ")", "buf", "+=", "(", "'%-30s : %s\\n'", "%", "(", "'Name'", ",", "item_dict", "[", "'name'", "]", ")", ")", "for", "(", "k", ",", "nicename", ",", "editable", ")", "in", "keys", ":", "if", "(", "not", "editable", ")", ":", "continue", "if", "(", "k", "!=", "'name'", ")", ":", "buf", "+=", "(", "'%-30s : %s\\n'", "%", "(", "nicename", ",", "item_dict", "[", "k", "]", ")", ")", "if", "(", "(", "'interfaces'", "in", "item_dict", ")", "and", "(", "interface_fields", "is", "not", "None", ")", ")", ":", "keys", "=", "[", "]", "for", "elem", "in", "interface_fields", ":", "keys", ".", "append", "(", "(", "elem", "[", "0", "]", ",", "elem", "[", "3", "]", ",", "elem", "[", "4", "]", ")", ")", "keys", ".", "sort", "(", ")", "for", "iname", "in", "item_dict", "[", "'interfaces'", "]", ".", "keys", "(", ")", ":", "buf", "+=", "(", "'%-30s : %s\\n'", "%", "(", "'Interface ===== '", ",", "iname", ")", ")", "for", "(", "k", ",", "nicename", ",", "editable", ")", "in", "keys", ":", "if", "editable", ":", "buf", "+=", "(", "'%-30s : %s\\n'", "%", "(", "nicename", ",", "item_dict", "[", "'interfaces'", "]", "[", "iname", "]", ".", "get", "(", "k", ",", "''", ")", ")", ")", "return", "buf" ]
item_dict is a dictionary .
train
false