id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
51,165
def set_git_remote_url(git_url, bench_path='.'): app = git_url.rsplit('/', 1)[1].rsplit('.', 1)[0] if (app not in bench.app.get_apps(bench_path)): print 'No app named {0}'.format(app) sys.exit(1) app_dir = bench.app.get_repo_dir(app, bench_path=bench_path) if os.path.exists(os.path.join(app_dir, '.git')): exec_cmd('git remote set-url upstream {}'.format(git_url), cwd=app_dir)
[ "def", "set_git_remote_url", "(", "git_url", ",", "bench_path", "=", "'.'", ")", ":", "app", "=", "git_url", ".", "rsplit", "(", "'/'", ",", "1", ")", "[", "1", "]", ".", "rsplit", "(", "'.'", ",", "1", ")", "[", "0", "]", "if", "(", "app", "not", "in", "bench", ".", "app", ".", "get_apps", "(", "bench_path", ")", ")", ":", "print", "'No app named {0}'", ".", "format", "(", "app", ")", "sys", ".", "exit", "(", "1", ")", "app_dir", "=", "bench", ".", "app", ".", "get_repo_dir", "(", "app", ",", "bench_path", "=", "bench_path", ")", "if", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "app_dir", ",", "'.git'", ")", ")", ":", "exec_cmd", "(", "'git remote set-url upstream {}'", ".", "format", "(", "git_url", ")", ",", "cwd", "=", "app_dir", ")" ]
set app remote git url .
train
false
51,166
def test_TimeFormat_scale(): t = Time('1900-01-01', scale='ut1') t.delta_ut1_utc = 0.0 t.unix assert (t.unix == t.utc.unix)
[ "def", "test_TimeFormat_scale", "(", ")", ":", "t", "=", "Time", "(", "'1900-01-01'", ",", "scale", "=", "'ut1'", ")", "t", ".", "delta_ut1_utc", "=", "0.0", "t", ".", "unix", "assert", "(", "t", ".", "unix", "==", "t", ".", "utc", ".", "unix", ")" ]
guard against recurrence of #1122 .
train
false
51,167
def _convert_expression(expr): s = pprint_thing(expr) _check_expression(s) return s
[ "def", "_convert_expression", "(", "expr", ")", ":", "s", "=", "pprint_thing", "(", "expr", ")", "_check_expression", "(", "s", ")", "return", "s" ]
convert an object to an expression .
train
false
51,168
def test_extract_messages_without_rawsource(): p = nodes.paragraph() p.append(nodes.Text('test')) p.append(nodes.Text('sentence')) assert (not p.rawsource) document = create_new_document() document.append(p) _transform(document) assert_node_count(extract_messages(document), nodes.TextElement, 1) assert [m for (n, m) in extract_messages(document)][0], 'text sentence'
[ "def", "test_extract_messages_without_rawsource", "(", ")", ":", "p", "=", "nodes", ".", "paragraph", "(", ")", "p", ".", "append", "(", "nodes", ".", "Text", "(", "'test'", ")", ")", "p", ".", "append", "(", "nodes", ".", "Text", "(", "'sentence'", ")", ")", "assert", "(", "not", "p", ".", "rawsource", ")", "document", "=", "create_new_document", "(", ")", "document", ".", "append", "(", "p", ")", "_transform", "(", "document", ")", "assert_node_count", "(", "extract_messages", "(", "document", ")", ",", "nodes", ".", "TextElement", ",", "1", ")", "assert", "[", "m", "for", "(", "n", ",", "m", ")", "in", "extract_messages", "(", "document", ")", "]", "[", "0", "]", ",", "'text sentence'" ]
check node .
train
false
51,169
@intercept_errors(UserAPIInternalError, ignore_errors=[UserAPIRequestError]) def get_account_settings(request, usernames=None, configuration=None, view=None): requesting_user = request.user usernames = (usernames or [requesting_user.username]) requested_users = User.objects.select_related('profile').filter(username__in=usernames) if (not requested_users): raise UserNotFound() serialized_users = [] for user in requested_users: has_full_access = (requesting_user.is_staff or (requesting_user.username == user.username)) if (has_full_access and (view != 'shared')): admin_fields = settings.ACCOUNT_VISIBILITY_CONFIGURATION.get('admin_fields') else: admin_fields = None serialized_users.append(UserReadOnlySerializer(user, configuration=configuration, custom_fields=admin_fields, context={'request': request}).data) return serialized_users
[ "@", "intercept_errors", "(", "UserAPIInternalError", ",", "ignore_errors", "=", "[", "UserAPIRequestError", "]", ")", "def", "get_account_settings", "(", "request", ",", "usernames", "=", "None", ",", "configuration", "=", "None", ",", "view", "=", "None", ")", ":", "requesting_user", "=", "request", ".", "user", "usernames", "=", "(", "usernames", "or", "[", "requesting_user", ".", "username", "]", ")", "requested_users", "=", "User", ".", "objects", ".", "select_related", "(", "'profile'", ")", ".", "filter", "(", "username__in", "=", "usernames", ")", "if", "(", "not", "requested_users", ")", ":", "raise", "UserNotFound", "(", ")", "serialized_users", "=", "[", "]", "for", "user", "in", "requested_users", ":", "has_full_access", "=", "(", "requesting_user", ".", "is_staff", "or", "(", "requesting_user", ".", "username", "==", "user", ".", "username", ")", ")", "if", "(", "has_full_access", "and", "(", "view", "!=", "'shared'", ")", ")", ":", "admin_fields", "=", "settings", ".", "ACCOUNT_VISIBILITY_CONFIGURATION", ".", "get", "(", "'admin_fields'", ")", "else", ":", "admin_fields", "=", "None", "serialized_users", ".", "append", "(", "UserReadOnlySerializer", "(", "user", ",", "configuration", "=", "configuration", ",", "custom_fields", "=", "admin_fields", ",", "context", "=", "{", "'request'", ":", "request", "}", ")", ".", "data", ")", "return", "serialized_users" ]
returns account information for a user serialized as json .
train
false
51,170
def delete_index(sender, instance, **kwargs): if (isinstance(instance, Object) and instance.is_searchable()): ix = index.open_dir(settings.WHOOSH_INDEX) try: writer = ix.writer() try: writer.delete_by_term(u'id', unicode(instance.id)) writer.commit() except: writer.cancel() except: pass
[ "def", "delete_index", "(", "sender", ",", "instance", ",", "**", "kwargs", ")", ":", "if", "(", "isinstance", "(", "instance", ",", "Object", ")", "and", "instance", ".", "is_searchable", "(", ")", ")", ":", "ix", "=", "index", ".", "open_dir", "(", "settings", ".", "WHOOSH_INDEX", ")", "try", ":", "writer", "=", "ix", ".", "writer", "(", ")", "try", ":", "writer", ".", "delete_by_term", "(", "u'id'", ",", "unicode", "(", "instance", ".", "id", ")", ")", "writer", ".", "commit", "(", ")", "except", ":", "writer", ".", "cancel", "(", ")", "except", ":", "pass" ]
delete object from search index .
train
false
51,171
def relabel_gexf_graph(G): try: mapping = [(u, G.node[u]['label']) for u in G] except KeyError: raise nx.NetworkXError('Failed to relabel nodes: missing node labels found. Use relabel=False.') (x, y) = zip(*mapping) if (len(set(y)) != len(G)): raise nx.NetworkXError('Failed to relabel nodes: duplicate node labels found. Use relabel=False.') mapping = dict(mapping) H = nx.relabel_nodes(G, mapping) for n in G: m = mapping[n] H.node[m]['id'] = n H.node[m].pop('label') if ('pid' in H.node[m]): H.node[m]['pid'] = mapping[G.node[n]['pid']] if ('parents' in H.node[m]): H.node[m]['parents'] = [mapping[p] for p in G.node[n]['parents']] return H
[ "def", "relabel_gexf_graph", "(", "G", ")", ":", "try", ":", "mapping", "=", "[", "(", "u", ",", "G", ".", "node", "[", "u", "]", "[", "'label'", "]", ")", "for", "u", "in", "G", "]", "except", "KeyError", ":", "raise", "nx", ".", "NetworkXError", "(", "'Failed to relabel nodes: missing node labels found. Use relabel=False.'", ")", "(", "x", ",", "y", ")", "=", "zip", "(", "*", "mapping", ")", "if", "(", "len", "(", "set", "(", "y", ")", ")", "!=", "len", "(", "G", ")", ")", ":", "raise", "nx", ".", "NetworkXError", "(", "'Failed to relabel nodes: duplicate node labels found. Use relabel=False.'", ")", "mapping", "=", "dict", "(", "mapping", ")", "H", "=", "nx", ".", "relabel_nodes", "(", "G", ",", "mapping", ")", "for", "n", "in", "G", ":", "m", "=", "mapping", "[", "n", "]", "H", ".", "node", "[", "m", "]", "[", "'id'", "]", "=", "n", "H", ".", "node", "[", "m", "]", ".", "pop", "(", "'label'", ")", "if", "(", "'pid'", "in", "H", ".", "node", "[", "m", "]", ")", ":", "H", ".", "node", "[", "m", "]", "[", "'pid'", "]", "=", "mapping", "[", "G", ".", "node", "[", "n", "]", "[", "'pid'", "]", "]", "if", "(", "'parents'", "in", "H", ".", "node", "[", "m", "]", ")", ":", "H", ".", "node", "[", "m", "]", "[", "'parents'", "]", "=", "[", "mapping", "[", "p", "]", "for", "p", "in", "G", ".", "node", "[", "n", "]", "[", "'parents'", "]", "]", "return", "H" ]
relabel graph using "label" node keyword for node label .
train
false
51,172
def dals(string): return dedent(string).lstrip()
[ "def", "dals", "(", "string", ")", ":", "return", "dedent", "(", "string", ")", ".", "lstrip", "(", ")" ]
dedent and left-strip .
train
false
51,173
def nova_todo_format(physical_line, tokens): pos = physical_line.find('TODO') pos1 = physical_line.find('TODO(') pos2 = physical_line.find('#') if ((pos != pos1) and (pos2 >= 0) and (pos2 < pos) and (len(tokens) == 0)): return (pos, 'N101: Use TODO(NAME)')
[ "def", "nova_todo_format", "(", "physical_line", ",", "tokens", ")", ":", "pos", "=", "physical_line", ".", "find", "(", "'TODO'", ")", "pos1", "=", "physical_line", ".", "find", "(", "'TODO('", ")", "pos2", "=", "physical_line", ".", "find", "(", "'#'", ")", "if", "(", "(", "pos", "!=", "pos1", ")", "and", "(", "pos2", ">=", "0", ")", "and", "(", "pos2", "<", "pos", ")", "and", "(", "len", "(", "tokens", ")", "==", "0", ")", ")", ":", "return", "(", "pos", ",", "'N101: Use TODO(NAME)'", ")" ]
check for todo() .
train
false
51,174
def _format_coredump_stdout(cmd_ret): ret_dict = {} for line in cmd_ret['stdout'].splitlines(): line = line.strip().lower() if line.startswith('enabled:'): enabled = line.split(':') if ('true' in enabled[1]): ret_dict['enabled'] = True else: ret_dict['enabled'] = False break if line.startswith('host vnic:'): host_vnic = line.split(':') ret_dict['host_vnic'] = host_vnic[1].strip() if line.startswith('network server ip:'): ip = line.split(':') ret_dict['ip'] = ip[1].strip() if line.startswith('network server port:'): ip_port = line.split(':') ret_dict['port'] = ip_port[1].strip() return ret_dict
[ "def", "_format_coredump_stdout", "(", "cmd_ret", ")", ":", "ret_dict", "=", "{", "}", "for", "line", "in", "cmd_ret", "[", "'stdout'", "]", ".", "splitlines", "(", ")", ":", "line", "=", "line", ".", "strip", "(", ")", ".", "lower", "(", ")", "if", "line", ".", "startswith", "(", "'enabled:'", ")", ":", "enabled", "=", "line", ".", "split", "(", "':'", ")", "if", "(", "'true'", "in", "enabled", "[", "1", "]", ")", ":", "ret_dict", "[", "'enabled'", "]", "=", "True", "else", ":", "ret_dict", "[", "'enabled'", "]", "=", "False", "break", "if", "line", ".", "startswith", "(", "'host vnic:'", ")", ":", "host_vnic", "=", "line", ".", "split", "(", "':'", ")", "ret_dict", "[", "'host_vnic'", "]", "=", "host_vnic", "[", "1", "]", ".", "strip", "(", ")", "if", "line", ".", "startswith", "(", "'network server ip:'", ")", ":", "ip", "=", "line", ".", "split", "(", "':'", ")", "ret_dict", "[", "'ip'", "]", "=", "ip", "[", "1", "]", ".", "strip", "(", ")", "if", "line", ".", "startswith", "(", "'network server port:'", ")", ":", "ip_port", "=", "line", ".", "split", "(", "':'", ")", "ret_dict", "[", "'port'", "]", "=", "ip_port", "[", "1", "]", ".", "strip", "(", ")", "return", "ret_dict" ]
helper function to format the stdout from the get_coredump_network_config function .
train
true
51,177
def _get_mock_request(student): request = RequestFactory().get('/') request.user = student return request
[ "def", "_get_mock_request", "(", "student", ")", ":", "request", "=", "RequestFactory", "(", ")", ".", "get", "(", "'/'", ")", "request", ".", "user", "=", "student", "return", "request" ]
make a fake request because grading code expects to be able to look at the request .
train
false
51,179
def console_decode(string, encoding=CONSOLE_ENCODING, force=False): if (is_unicode(string) and (not (IRONPYTHON and force))): return string encoding = {'CONSOLE': CONSOLE_ENCODING, 'SYSTEM': SYSTEM_ENCODING}.get(encoding.upper(), encoding) try: return string.decode(encoding) except UnicodeError: return unic(string)
[ "def", "console_decode", "(", "string", ",", "encoding", "=", "CONSOLE_ENCODING", ",", "force", "=", "False", ")", ":", "if", "(", "is_unicode", "(", "string", ")", "and", "(", "not", "(", "IRONPYTHON", "and", "force", ")", ")", ")", ":", "return", "string", "encoding", "=", "{", "'CONSOLE'", ":", "CONSOLE_ENCODING", ",", "'SYSTEM'", ":", "SYSTEM_ENCODING", "}", ".", "get", "(", "encoding", ".", "upper", "(", ")", ",", "encoding", ")", "try", ":", "return", "string", ".", "decode", "(", "encoding", ")", "except", "UnicodeError", ":", "return", "unic", "(", "string", ")" ]
decodes bytes from console encoding to unicode .
train
false
51,181
def make_eye_loader(dates, sids): return EyeLoader(TestingDataSet.columns, dates, sids)
[ "def", "make_eye_loader", "(", "dates", ",", "sids", ")", ":", "return", "EyeLoader", "(", "TestingDataSet", ".", "columns", ",", "dates", ",", "sids", ")" ]
make a pipelineloader that emits np .
train
false
51,182
def linux_interfaces(): ifaces = dict() ip_path = salt.utils.which('ip') ifconfig_path = (None if ip_path else salt.utils.which('ifconfig')) if ip_path: cmd1 = subprocess.Popen('{0} link show'.format(ip_path), shell=True, close_fds=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0] cmd2 = subprocess.Popen('{0} addr show'.format(ip_path), shell=True, close_fds=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0] ifaces = _interfaces_ip('{0}\n{1}'.format(salt.utils.to_str(cmd1), salt.utils.to_str(cmd2))) elif ifconfig_path: cmd = subprocess.Popen('{0} -a'.format(ifconfig_path), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0] ifaces = _interfaces_ifconfig(salt.utils.to_str(cmd)) return ifaces
[ "def", "linux_interfaces", "(", ")", ":", "ifaces", "=", "dict", "(", ")", "ip_path", "=", "salt", ".", "utils", ".", "which", "(", "'ip'", ")", "ifconfig_path", "=", "(", "None", "if", "ip_path", "else", "salt", ".", "utils", ".", "which", "(", "'ifconfig'", ")", ")", "if", "ip_path", ":", "cmd1", "=", "subprocess", ".", "Popen", "(", "'{0} link show'", ".", "format", "(", "ip_path", ")", ",", "shell", "=", "True", ",", "close_fds", "=", "True", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "STDOUT", ")", ".", "communicate", "(", ")", "[", "0", "]", "cmd2", "=", "subprocess", ".", "Popen", "(", "'{0} addr show'", ".", "format", "(", "ip_path", ")", ",", "shell", "=", "True", ",", "close_fds", "=", "True", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "STDOUT", ")", ".", "communicate", "(", ")", "[", "0", "]", "ifaces", "=", "_interfaces_ip", "(", "'{0}\\n{1}'", ".", "format", "(", "salt", ".", "utils", ".", "to_str", "(", "cmd1", ")", ",", "salt", ".", "utils", ".", "to_str", "(", "cmd2", ")", ")", ")", "elif", "ifconfig_path", ":", "cmd", "=", "subprocess", ".", "Popen", "(", "'{0} -a'", ".", "format", "(", "ifconfig_path", ")", ",", "shell", "=", "True", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "STDOUT", ")", ".", "communicate", "(", ")", "[", "0", "]", "ifaces", "=", "_interfaces_ifconfig", "(", "salt", ".", "utils", ".", "to_str", "(", "cmd", ")", ")", "return", "ifaces" ]
obtain interface information for *nix/bsd variants .
train
true
51,184
def get_windist_exec(pyver, scipy_verstr): if BUILD_MSI: ext = '.msi' else: ext = '.exe' name = ('scipy-%s.win32-py%s%s' % (scipy_verstr, pyver, ext)) return name
[ "def", "get_windist_exec", "(", "pyver", ",", "scipy_verstr", ")", ":", "if", "BUILD_MSI", ":", "ext", "=", "'.msi'", "else", ":", "ext", "=", "'.exe'", "name", "=", "(", "'scipy-%s.win32-py%s%s'", "%", "(", "scipy_verstr", ",", "pyver", ",", "ext", ")", ")", "return", "name" ]
return the name of the installer built by wininst command .
train
false
51,185
@cronjobs.register def update_search_ctr_metric(): if settings.STAGE: return latest_metric = _get_latest_metric(SEARCH_CLICKS_METRIC_CODE) if (latest_metric is not None): latest_metric_date = latest_metric.start else: latest_metric_date = date(2011, 1, 1) start = (latest_metric_date + timedelta(days=1)) end = (date.today() - timedelta(days=1)) ctr_data = googleanalytics.search_ctr(start, end) clicks_kind = MetricKind.objects.get(code=SEARCH_CLICKS_METRIC_CODE) searches_kind = MetricKind.objects.get(code=SEARCH_SEARCHES_METRIC_CODE) for (date_str, ctr) in ctr_data.items(): day = datetime.strptime(date_str, '%Y-%m-%d').date() Metric.objects.create(kind=searches_kind, start=day, end=(day + timedelta(days=1)), value=1000) Metric.objects.create(kind=clicks_kind, start=day, end=(day + timedelta(days=1)), value=(round(ctr, 1) * 10))
[ "@", "cronjobs", ".", "register", "def", "update_search_ctr_metric", "(", ")", ":", "if", "settings", ".", "STAGE", ":", "return", "latest_metric", "=", "_get_latest_metric", "(", "SEARCH_CLICKS_METRIC_CODE", ")", "if", "(", "latest_metric", "is", "not", "None", ")", ":", "latest_metric_date", "=", "latest_metric", ".", "start", "else", ":", "latest_metric_date", "=", "date", "(", "2011", ",", "1", ",", "1", ")", "start", "=", "(", "latest_metric_date", "+", "timedelta", "(", "days", "=", "1", ")", ")", "end", "=", "(", "date", ".", "today", "(", ")", "-", "timedelta", "(", "days", "=", "1", ")", ")", "ctr_data", "=", "googleanalytics", ".", "search_ctr", "(", "start", ",", "end", ")", "clicks_kind", "=", "MetricKind", ".", "objects", ".", "get", "(", "code", "=", "SEARCH_CLICKS_METRIC_CODE", ")", "searches_kind", "=", "MetricKind", ".", "objects", ".", "get", "(", "code", "=", "SEARCH_SEARCHES_METRIC_CODE", ")", "for", "(", "date_str", ",", "ctr", ")", "in", "ctr_data", ".", "items", "(", ")", ":", "day", "=", "datetime", ".", "strptime", "(", "date_str", ",", "'%Y-%m-%d'", ")", ".", "date", "(", ")", "Metric", ".", "objects", ".", "create", "(", "kind", "=", "searches_kind", ",", "start", "=", "day", ",", "end", "=", "(", "day", "+", "timedelta", "(", "days", "=", "1", ")", ")", ",", "value", "=", "1000", ")", "Metric", ".", "objects", ".", "create", "(", "kind", "=", "clicks_kind", ",", "start", "=", "day", ",", "end", "=", "(", "day", "+", "timedelta", "(", "days", "=", "1", ")", ")", ",", "value", "=", "(", "round", "(", "ctr", ",", "1", ")", "*", "10", ")", ")" ]
get new search ctr data from google analytics and save .
train
false
51,186
def groups_norm2(A, n_orient): n_positions = (A.shape[0] // n_orient) return np.sum(np.power(A, 2, A).reshape(n_positions, (-1)), axis=1)
[ "def", "groups_norm2", "(", "A", ",", "n_orient", ")", ":", "n_positions", "=", "(", "A", ".", "shape", "[", "0", "]", "//", "n_orient", ")", "return", "np", ".", "sum", "(", "np", ".", "power", "(", "A", ",", "2", ",", "A", ")", ".", "reshape", "(", "n_positions", ",", "(", "-", "1", ")", ")", ",", "axis", "=", "1", ")" ]
compute squared l2 norms of groups inplace .
train
false
51,187
def sprot_search_de(text, swissprot=1, trembl=None, cgi='http://www.expasy.ch/cgi-bin/sprot-search-de'): variables = {'SEARCH': text} if swissprot: variables['S'] = 'on' if trembl: variables['T'] = 'on' options = _urlencode(variables) fullcgi = ('%s?%s' % (cgi, options)) handle = _urlopen(fullcgi) return handle
[ "def", "sprot_search_de", "(", "text", ",", "swissprot", "=", "1", ",", "trembl", "=", "None", ",", "cgi", "=", "'http://www.expasy.ch/cgi-bin/sprot-search-de'", ")", ":", "variables", "=", "{", "'SEARCH'", ":", "text", "}", "if", "swissprot", ":", "variables", "[", "'S'", "]", "=", "'on'", "if", "trembl", ":", "variables", "[", "'T'", "]", "=", "'on'", "options", "=", "_urlencode", "(", "variables", ")", "fullcgi", "=", "(", "'%s?%s'", "%", "(", "cgi", ",", "options", ")", ")", "handle", "=", "_urlopen", "(", "fullcgi", ")", "return", "handle" ]
sprot_search_de -> handle search swissprot by name .
train
false
51,188
def register_web_routes(application, register): def get_index_file(): return template('index.html') def get_static_file(filename): return static_file(filename, root='./media') register.route('/', method='GET', name='get_index_file')(get_index_file) register.route('/media/<filename:path>', method='GET', name='get_static_file')(get_static_file)
[ "def", "register_web_routes", "(", "application", ",", "register", ")", ":", "def", "get_index_file", "(", ")", ":", "return", "template", "(", "'index.html'", ")", "def", "get_static_file", "(", "filename", ")", ":", "return", "static_file", "(", "filename", ",", "root", "=", "'./media'", ")", "register", ".", "route", "(", "'/'", ",", "method", "=", "'GET'", ",", "name", "=", "'get_index_file'", ")", "(", "get_index_file", ")", "register", ".", "route", "(", "'/media/<filename:path>'", ",", "method", "=", "'GET'", ",", "name", "=", "'get_static_file'", ")", "(", "get_static_file", ")" ]
a helper method to register the default web routes of a single page application .
train
false
51,190
def calculate_debounced_passing(recent_results, debounce=0): if (not recent_results): return True debounce_window = recent_results[:(debounce + 1)] for r in debounce_window: if r.succeeded: return True return False
[ "def", "calculate_debounced_passing", "(", "recent_results", ",", "debounce", "=", "0", ")", ":", "if", "(", "not", "recent_results", ")", ":", "return", "True", "debounce_window", "=", "recent_results", "[", ":", "(", "debounce", "+", "1", ")", "]", "for", "r", "in", "debounce_window", ":", "if", "r", ".", "succeeded", ":", "return", "True", "return", "False" ]
debounce is the number of previous failures we need to mark a search as passing or failing returns: true if passing given debounce factor false if failing .
train
false
51,191
def getXMLElementsByKey(key, xmlElement): if (key not in xmlElement.attributeDictionary): return [] word = str(xmlElement.attributeDictionary[key]).strip() evaluatedLinkValue = getEvaluatedLinkValue(word, xmlElement) if (evaluatedLinkValue.__class__.__name__ == 'XMLElement'): return [evaluatedLinkValue] if (evaluatedLinkValue.__class__ == list): return evaluatedLinkValue print 'Warning, could not get XMLElements in getXMLElementsByKey in evaluate for:' print key print evaluatedLinkValue print xmlElement return None
[ "def", "getXMLElementsByKey", "(", "key", ",", "xmlElement", ")", ":", "if", "(", "key", "not", "in", "xmlElement", ".", "attributeDictionary", ")", ":", "return", "[", "]", "word", "=", "str", "(", "xmlElement", ".", "attributeDictionary", "[", "key", "]", ")", ".", "strip", "(", ")", "evaluatedLinkValue", "=", "getEvaluatedLinkValue", "(", "word", ",", "xmlElement", ")", "if", "(", "evaluatedLinkValue", ".", "__class__", ".", "__name__", "==", "'XMLElement'", ")", ":", "return", "[", "evaluatedLinkValue", "]", "if", "(", "evaluatedLinkValue", ".", "__class__", "==", "list", ")", ":", "return", "evaluatedLinkValue", "print", "'Warning, could not get XMLElements in getXMLElementsByKey in evaluate for:'", "print", "key", "print", "evaluatedLinkValue", "print", "xmlElement", "return", "None" ]
get the xml elements by key .
train
false
51,192
def onCreateAccountCallbackFromDB(accountName, errorno, datas): INFO_MSG(('onCreateAccountCallbackFromDB() accountName=%s, errorno=%s' % (accountName, errorno)))
[ "def", "onCreateAccountCallbackFromDB", "(", "accountName", ",", "errorno", ",", "datas", ")", ":", "INFO_MSG", "(", "(", "'onCreateAccountCallbackFromDB() accountName=%s, errorno=%s'", "%", "(", "accountName", ",", "errorno", ")", ")", ")" ]
kbengine method .
train
false
51,193
def formatUnformattableEvent(event, error): try: return u'Unable to format event {event!r}: {error}'.format(event=event, error=error) except BaseException: failure = Failure() text = u', '.join((u' = '.join((safe_repr(key), safe_repr(value))) for (key, value) in event.items())) return u'MESSAGE LOST: unformattable object logged: {error}\nRecoverable data: {text}\nException during formatting:\n{failure}'.format(error=safe_repr(error), failure=failure, text=text)
[ "def", "formatUnformattableEvent", "(", "event", ",", "error", ")", ":", "try", ":", "return", "u'Unable to format event {event!r}: {error}'", ".", "format", "(", "event", "=", "event", ",", "error", "=", "error", ")", "except", "BaseException", ":", "failure", "=", "Failure", "(", ")", "text", "=", "u', '", ".", "join", "(", "(", "u' = '", ".", "join", "(", "(", "safe_repr", "(", "key", ")", ",", "safe_repr", "(", "value", ")", ")", ")", "for", "(", "key", ",", "value", ")", "in", "event", ".", "items", "(", ")", ")", ")", "return", "u'MESSAGE LOST: unformattable object logged: {error}\\nRecoverable data: {text}\\nException during formatting:\\n{failure}'", ".", "format", "(", "error", "=", "safe_repr", "(", "error", ")", ",", "failure", "=", "failure", ",", "text", "=", "text", ")" ]
formats an event as a l{unicode} that describes the event generically and a formatting error .
train
false
51,197
def shuffle_data(X, L, seed=1234): prng = RandomState(seed) inds = np.arange(len(X)) prng.shuffle(inds) X = [X[i] for i in inds] L = L[inds] return (X, L)
[ "def", "shuffle_data", "(", "X", ",", "L", ",", "seed", "=", "1234", ")", ":", "prng", "=", "RandomState", "(", "seed", ")", "inds", "=", "np", ".", "arange", "(", "len", "(", "X", ")", ")", "prng", ".", "shuffle", "(", "inds", ")", "X", "=", "[", "X", "[", "i", "]", "for", "i", "in", "inds", "]", "L", "=", "L", "[", "inds", "]", "return", "(", "X", ",", "L", ")" ]
shuffle the data .
train
false
51,198
def public_host(app): if app.subdomain_host: return app.subdomain_host else: return app.proxy.public_server.host
[ "def", "public_host", "(", "app", ")", ":", "if", "app", ".", "subdomain_host", ":", "return", "app", ".", "subdomain_host", "else", ":", "return", "app", ".", "proxy", ".", "public_server", ".", "host" ]
return the public *host* of the given jupyterhub instance .
train
false
51,199
@gen.coroutine def ValidateUserPassword(client, user, password): assert (user is not None), 'user should exist in login case' if (user.pwd_hash is None): raise PermissionError(_NO_PASSWORD_SET) assert user.salt, user user_salt = user.salt.Decrypt() user_pwd_hash = user.pwd_hash.Decrypt() (yield ValidatePassword(client, user.user_id, password, user_salt, user_pwd_hash))
[ "@", "gen", ".", "coroutine", "def", "ValidateUserPassword", "(", "client", ",", "user", ",", "password", ")", ":", "assert", "(", "user", "is", "not", "None", ")", ",", "'user should exist in login case'", "if", "(", "user", ".", "pwd_hash", "is", "None", ")", ":", "raise", "PermissionError", "(", "_NO_PASSWORD_SET", ")", "assert", "user", ".", "salt", ",", "user", "user_salt", "=", "user", ".", "salt", ".", "Decrypt", "(", ")", "user_pwd_hash", "=", "user", ".", "pwd_hash", ".", "Decrypt", "(", ")", "(", "yield", "ValidatePassword", "(", "client", ",", "user", ".", "user_id", ",", "password", ",", "user_salt", ",", "user_pwd_hash", ")", ")" ]
validates that the users password matches the given password and that the maximum incorrect guess count has not been reached .
train
false
51,200
@requires_segment_info def last_status(pl, segment_info): if (not segment_info[u'args'].last_exit_code): return None return [{u'contents': str(segment_info[u'args'].last_exit_code), u'highlight_groups': [u'exit_fail']}]
[ "@", "requires_segment_info", "def", "last_status", "(", "pl", ",", "segment_info", ")", ":", "if", "(", "not", "segment_info", "[", "u'args'", "]", ".", "last_exit_code", ")", ":", "return", "None", "return", "[", "{", "u'contents'", ":", "str", "(", "segment_info", "[", "u'args'", "]", ".", "last_exit_code", ")", ",", "u'highlight_groups'", ":", "[", "u'exit_fail'", "]", "}", "]" ]
return last exit code .
train
false
51,201
def update_sequencer_id_in_request_type(migrate_engine, request_type_id, sequencer_id): cmd = ('UPDATE request_type SET sequencer_id=%i WHERE id=%i' % (sequencer_id, request_type_id)) migrate_engine.execute(cmd)
[ "def", "update_sequencer_id_in_request_type", "(", "migrate_engine", ",", "request_type_id", ",", "sequencer_id", ")", ":", "cmd", "=", "(", "'UPDATE request_type SET sequencer_id=%i WHERE id=%i'", "%", "(", "sequencer_id", ",", "request_type_id", ")", ")", "migrate_engine", ".", "execute", "(", "cmd", ")" ]
update the foreign key to the sequencer table in the request_type table .
train
false
51,202
def _remove_at_charset(tokens): tokens = iter(tokens) header = list(islice(tokens, 4)) if ([t.type for t in header] == [u'ATKEYWORD', u'S', u'STRING', u';']): (atkw, space, string, semicolon) = header if (((atkw.value, space.value) == (u'@charset', u' ')) and (string.as_css()[0] == u'"')): return tokens return chain(header, tokens)
[ "def", "_remove_at_charset", "(", "tokens", ")", ":", "tokens", "=", "iter", "(", "tokens", ")", "header", "=", "list", "(", "islice", "(", "tokens", ",", "4", ")", ")", "if", "(", "[", "t", ".", "type", "for", "t", "in", "header", "]", "==", "[", "u'ATKEYWORD'", ",", "u'S'", ",", "u'STRING'", ",", "u';'", "]", ")", ":", "(", "atkw", ",", "space", ",", "string", ",", "semicolon", ")", "=", "header", "if", "(", "(", "(", "atkw", ".", "value", ",", "space", ".", "value", ")", "==", "(", "u'@charset'", ",", "u' '", ")", ")", "and", "(", "string", ".", "as_css", "(", ")", "[", "0", "]", "==", "u'\"'", ")", ")", ":", "return", "tokens", "return", "chain", "(", "header", ",", "tokens", ")" ]
remove any valid @charset at the beggining of a token stream .
train
false
51,204
def update_router_lport(cluster, lrouter_uuid, lrouter_port_uuid, tenant_id, quantum_port_id, display_name, admin_status_enabled, ip_addresses): lport_obj = dict(admin_status_enabled=admin_status_enabled, display_name=display_name, tags=[dict(scope='os_tid', tag=tenant_id), dict(scope='q_port_id', tag=quantum_port_id)], ip_addresses=ip_addresses, type='LogicalRouterPortConfig') for key in lport_obj.keys(): if (lport_obj[key] is None): del lport_obj[key] path = _build_uri_path(LROUTERPORT_RESOURCE, lrouter_port_uuid, parent_resource_id=lrouter_uuid) try: resp_obj = do_single_request(HTTP_PUT, path, json.dumps(lport_obj), cluster=cluster) except NvpApiClient.ResourceNotFound as e: LOG.error(_('Logical router or router port not found, Error: %s'), str(e)) raise result = json.loads(resp_obj) LOG.debug(_('Updated logical port %(lport_uuid)s on logical router %(lrouter_uuid)s'), {'lport_uuid': lrouter_port_uuid, 'lrouter_uuid': lrouter_uuid}) return result
[ "def", "update_router_lport", "(", "cluster", ",", "lrouter_uuid", ",", "lrouter_port_uuid", ",", "tenant_id", ",", "quantum_port_id", ",", "display_name", ",", "admin_status_enabled", ",", "ip_addresses", ")", ":", "lport_obj", "=", "dict", "(", "admin_status_enabled", "=", "admin_status_enabled", ",", "display_name", "=", "display_name", ",", "tags", "=", "[", "dict", "(", "scope", "=", "'os_tid'", ",", "tag", "=", "tenant_id", ")", ",", "dict", "(", "scope", "=", "'q_port_id'", ",", "tag", "=", "quantum_port_id", ")", "]", ",", "ip_addresses", "=", "ip_addresses", ",", "type", "=", "'LogicalRouterPortConfig'", ")", "for", "key", "in", "lport_obj", ".", "keys", "(", ")", ":", "if", "(", "lport_obj", "[", "key", "]", "is", "None", ")", ":", "del", "lport_obj", "[", "key", "]", "path", "=", "_build_uri_path", "(", "LROUTERPORT_RESOURCE", ",", "lrouter_port_uuid", ",", "parent_resource_id", "=", "lrouter_uuid", ")", "try", ":", "resp_obj", "=", "do_single_request", "(", "HTTP_PUT", ",", "path", ",", "json", ".", "dumps", "(", "lport_obj", ")", ",", "cluster", "=", "cluster", ")", "except", "NvpApiClient", ".", "ResourceNotFound", "as", "e", ":", "LOG", ".", "error", "(", "_", "(", "'Logical router or router port not found, Error: %s'", ")", ",", "str", "(", "e", ")", ")", "raise", "result", "=", "json", ".", "loads", "(", "resp_obj", ")", "LOG", ".", "debug", "(", "_", "(", "'Updated logical port %(lport_uuid)s on logical router %(lrouter_uuid)s'", ")", ",", "{", "'lport_uuid'", ":", "lrouter_port_uuid", ",", "'lrouter_uuid'", ":", "lrouter_uuid", "}", ")", "return", "result" ]
updates a logical port on the assigned logical router .
train
false
51,205
def validate_fonttype(s): fonttypes = {'type3': 3, 'truetype': 42} try: fonttype = validate_int(s) except ValueError: if (s.lower() in fonttypes.keys()): return fonttypes[s.lower()] raise ValueError(('Supported Postscript/PDF font types are %s' % fonttypes.keys())) else: if (fonttype not in fonttypes.values()): raise ValueError(('Supported Postscript/PDF font types are %s' % fonttypes.values())) return fonttype
[ "def", "validate_fonttype", "(", "s", ")", ":", "fonttypes", "=", "{", "'type3'", ":", "3", ",", "'truetype'", ":", "42", "}", "try", ":", "fonttype", "=", "validate_int", "(", "s", ")", "except", "ValueError", ":", "if", "(", "s", ".", "lower", "(", ")", "in", "fonttypes", ".", "keys", "(", ")", ")", ":", "return", "fonttypes", "[", "s", ".", "lower", "(", ")", "]", "raise", "ValueError", "(", "(", "'Supported Postscript/PDF font types are %s'", "%", "fonttypes", ".", "keys", "(", ")", ")", ")", "else", ":", "if", "(", "fonttype", "not", "in", "fonttypes", ".", "values", "(", ")", ")", ":", "raise", "ValueError", "(", "(", "'Supported Postscript/PDF font types are %s'", "%", "fonttypes", ".", "values", "(", ")", ")", ")", "return", "fonttype" ]
confirm that this is a postscript of pdf font type that we know how to convert to .
train
false
51,209
@check_job_access_permission() @check_job_edition_permission() def create_coordinator_dataset(request, coordinator): dataset = Dataset(coordinator=coordinator) response = {'status': (-1), 'data': 'None'} if (request.method == 'POST'): dataset_form = DatasetForm(request.POST, instance=dataset, prefix='create') if dataset_form.is_valid(): dataset_form.save() response['status'] = 0 response['data'] = (reverse('oozie:edit_coordinator', kwargs={'coordinator': coordinator.id}) + '#listDataset') request.info(_('Dataset created')) else: response['data'] = _('A POST request is required.') if (response['status'] != 0): response['data'] = render('editor/create_coordinator_dataset.mako', request, {'coordinator': coordinator, 'dataset_form': dataset_form, 'dataset': dataset}, force_template=True).content return JsonResponse(response, safe=False)
[ "@", "check_job_access_permission", "(", ")", "@", "check_job_edition_permission", "(", ")", "def", "create_coordinator_dataset", "(", "request", ",", "coordinator", ")", ":", "dataset", "=", "Dataset", "(", "coordinator", "=", "coordinator", ")", "response", "=", "{", "'status'", ":", "(", "-", "1", ")", ",", "'data'", ":", "'None'", "}", "if", "(", "request", ".", "method", "==", "'POST'", ")", ":", "dataset_form", "=", "DatasetForm", "(", "request", ".", "POST", ",", "instance", "=", "dataset", ",", "prefix", "=", "'create'", ")", "if", "dataset_form", ".", "is_valid", "(", ")", ":", "dataset_form", ".", "save", "(", ")", "response", "[", "'status'", "]", "=", "0", "response", "[", "'data'", "]", "=", "(", "reverse", "(", "'oozie:edit_coordinator'", ",", "kwargs", "=", "{", "'coordinator'", ":", "coordinator", ".", "id", "}", ")", "+", "'#listDataset'", ")", "request", ".", "info", "(", "_", "(", "'Dataset created'", ")", ")", "else", ":", "response", "[", "'data'", "]", "=", "_", "(", "'A POST request is required.'", ")", "if", "(", "response", "[", "'status'", "]", "!=", "0", ")", ":", "response", "[", "'data'", "]", "=", "render", "(", "'editor/create_coordinator_dataset.mako'", ",", "request", ",", "{", "'coordinator'", ":", "coordinator", ",", "'dataset_form'", ":", "dataset_form", ",", "'dataset'", ":", "dataset", "}", ",", "force_template", "=", "True", ")", ".", "content", "return", "JsonResponse", "(", "response", ",", "safe", "=", "False", ")" ]
returns {status 0/1 .
train
false
51,210
def safe_mkdtemp(cleaner=_mkdtemp_atexit_cleaner, **kw): with _MKDTEMP_LOCK: return register_rmtree(tempfile.mkdtemp(**kw), cleaner=cleaner)
[ "def", "safe_mkdtemp", "(", "cleaner", "=", "_mkdtemp_atexit_cleaner", ",", "**", "kw", ")", ":", "with", "_MKDTEMP_LOCK", ":", "return", "register_rmtree", "(", "tempfile", ".", "mkdtemp", "(", "**", "kw", ")", ",", "cleaner", "=", "cleaner", ")" ]
create a temporary directory that is cleaned up on process exit .
train
true
51,211
def activate_api_deployment(restApiId, stageName, deploymentId, region=None, key=None, keyid=None, profile=None): try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) response = conn.update_stage(restApiId=restApiId, stageName=stageName, patchOperations=[{'op': 'replace', 'path': '/deploymentId', 'value': deploymentId}]) return {'set': True, 'response': _convert_datetime_str(response)} except ClientError as e: return {'set': False, 'error': salt.utils.boto3.get_error(e)}
[ "def", "activate_api_deployment", "(", "restApiId", ",", "stageName", ",", "deploymentId", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "response", "=", "conn", ".", "update_stage", "(", "restApiId", "=", "restApiId", ",", "stageName", "=", "stageName", ",", "patchOperations", "=", "[", "{", "'op'", ":", "'replace'", ",", "'path'", ":", "'/deploymentId'", ",", "'value'", ":", "deploymentId", "}", "]", ")", "return", "{", "'set'", ":", "True", ",", "'response'", ":", "_convert_datetime_str", "(", "response", ")", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'set'", ":", "False", ",", "'error'", ":", "salt", ".", "utils", ".", "boto3", ".", "get_error", "(", "e", ")", "}" ]
activates previously deployed deployment for a given stage cli example: .
train
false
51,212
def authorize_project_context(context, project_id): if is_user_context(context): if (not context.project_id): raise exception.Forbidden() elif (context.project_id != project_id): raise exception.Forbidden()
[ "def", "authorize_project_context", "(", "context", ",", "project_id", ")", ":", "if", "is_user_context", "(", "context", ")", ":", "if", "(", "not", "context", ".", "project_id", ")", ":", "raise", "exception", ".", "Forbidden", "(", ")", "elif", "(", "context", ".", "project_id", "!=", "project_id", ")", ":", "raise", "exception", ".", "Forbidden", "(", ")" ]
ensures a request has permission to access the given project .
train
false
51,215
def _LineHasContinuationMarkers(uwline): return any((tok.is_continuation for tok in uwline.tokens))
[ "def", "_LineHasContinuationMarkers", "(", "uwline", ")", ":", "return", "any", "(", "(", "tok", ".", "is_continuation", "for", "tok", "in", "uwline", ".", "tokens", ")", ")" ]
return true if the line has continuation markers in it .
train
false
51,216
def ellip(N, rp, rs, Wn, btype='low', analog=False, output='ba'): return iirfilter(N, Wn, rs=rs, rp=rp, btype=btype, analog=analog, output=output, ftype='elliptic')
[ "def", "ellip", "(", "N", ",", "rp", ",", "rs", ",", "Wn", ",", "btype", "=", "'low'", ",", "analog", "=", "False", ",", "output", "=", "'ba'", ")", ":", "return", "iirfilter", "(", "N", ",", "Wn", ",", "rs", "=", "rs", ",", "rp", "=", "rp", ",", "btype", "=", "btype", ",", "analog", "=", "analog", ",", "output", "=", "output", ",", "ftype", "=", "'elliptic'", ")" ]
elliptic digital and analog filter design .
train
false
51,217
def test_check_type_picks(): picks = np.arange(12) assert_array_equal(picks, _check_type_picks(picks)) picks = list(range(12)) assert_array_equal(np.array(picks), _check_type_picks(picks)) picks = None assert_array_equal(None, _check_type_picks(picks)) picks = ['a', 'b'] assert_raises(ValueError, _check_type_picks, picks) picks = 'b' assert_raises(ValueError, _check_type_picks, picks)
[ "def", "test_check_type_picks", "(", ")", ":", "picks", "=", "np", ".", "arange", "(", "12", ")", "assert_array_equal", "(", "picks", ",", "_check_type_picks", "(", "picks", ")", ")", "picks", "=", "list", "(", "range", "(", "12", ")", ")", "assert_array_equal", "(", "np", ".", "array", "(", "picks", ")", ",", "_check_type_picks", "(", "picks", ")", ")", "picks", "=", "None", "assert_array_equal", "(", "None", ",", "_check_type_picks", "(", "picks", ")", ")", "picks", "=", "[", "'a'", ",", "'b'", "]", "assert_raises", "(", "ValueError", ",", "_check_type_picks", ",", "picks", ")", "picks", "=", "'b'", "assert_raises", "(", "ValueError", ",", "_check_type_picks", ",", "picks", ")" ]
test checking type integrity checks of picks .
train
false
51,219
def use_or_create_async_context(context, name=None, loop=None, **kwargs): if (name is None): name = AsyncContext.default_name async_context = getattr(context, name, None) if (async_context is None): async_context = AsyncContext(loop=loop, name=name, **kwargs) setattr(context, async_context.name, async_context) assert isinstance(async_context, AsyncContext) assert (getattr(context, async_context.name) is async_context) return async_context
[ "def", "use_or_create_async_context", "(", "context", ",", "name", "=", "None", ",", "loop", "=", "None", ",", "**", "kwargs", ")", ":", "if", "(", "name", "is", "None", ")", ":", "name", "=", "AsyncContext", ".", "default_name", "async_context", "=", "getattr", "(", "context", ",", "name", ",", "None", ")", "if", "(", "async_context", "is", "None", ")", ":", "async_context", "=", "AsyncContext", "(", "loop", "=", "loop", ",", "name", "=", "name", ",", "**", "kwargs", ")", "setattr", "(", "context", ",", "async_context", ".", "name", ",", "async_context", ")", "assert", "isinstance", "(", "async_context", ",", "AsyncContext", ")", "assert", "(", "getattr", "(", "context", ",", "async_context", ".", "name", ")", "is", "async_context", ")", "return", "async_context" ]
utility function to be used in step implementations to ensure that an :class:asynccontext object is stored in the .
train
false
51,220
def rate_limited_generator(rate_limit_per_second, iterable): throttler = rate_limiter(rate_limit_per_second) for i in iterable: throttler() (yield i)
[ "def", "rate_limited_generator", "(", "rate_limit_per_second", ",", "iterable", ")", ":", "throttler", "=", "rate_limiter", "(", "rate_limit_per_second", ")", "for", "i", "in", "iterable", ":", "throttler", "(", ")", "(", "yield", "i", ")" ]
yield from iterable without going over rate limit .
train
false
51,221
def _tricode(G, v, u, w): combos = ((v, u, 1), (u, v, 2), (v, w, 4), (w, v, 8), (u, w, 16), (w, u, 32)) return sum((x for (u, v, x) in combos if (v in G[u])))
[ "def", "_tricode", "(", "G", ",", "v", ",", "u", ",", "w", ")", ":", "combos", "=", "(", "(", "v", ",", "u", ",", "1", ")", ",", "(", "u", ",", "v", ",", "2", ")", ",", "(", "v", ",", "w", ",", "4", ")", ",", "(", "w", ",", "v", ",", "8", ")", ",", "(", "u", ",", "w", ",", "16", ")", ",", "(", "w", ",", "u", ",", "32", ")", ")", "return", "sum", "(", "(", "x", "for", "(", "u", ",", "v", ",", "x", ")", "in", "combos", "if", "(", "v", "in", "G", "[", "u", "]", ")", ")", ")" ]
returns the integer code of the given triad .
train
false
51,222
def _AddCustomBuildToolForMSVS(p, spec, primary_input, inputs, outputs, description, cmd): inputs = _FixPaths(inputs) outputs = _FixPaths(outputs) tool = MSVSProject.Tool('VCCustomBuildTool', {'Description': description, 'AdditionalDependencies': ';'.join(inputs), 'Outputs': ';'.join(outputs), 'CommandLine': cmd}) for (config_name, c_data) in spec['configurations'].iteritems(): p.AddFileConfig(_FixPath(primary_input), _ConfigFullName(config_name, c_data), tools=[tool])
[ "def", "_AddCustomBuildToolForMSVS", "(", "p", ",", "spec", ",", "primary_input", ",", "inputs", ",", "outputs", ",", "description", ",", "cmd", ")", ":", "inputs", "=", "_FixPaths", "(", "inputs", ")", "outputs", "=", "_FixPaths", "(", "outputs", ")", "tool", "=", "MSVSProject", ".", "Tool", "(", "'VCCustomBuildTool'", ",", "{", "'Description'", ":", "description", ",", "'AdditionalDependencies'", ":", "';'", ".", "join", "(", "inputs", ")", ",", "'Outputs'", ":", "';'", ".", "join", "(", "outputs", ")", ",", "'CommandLine'", ":", "cmd", "}", ")", "for", "(", "config_name", ",", "c_data", ")", "in", "spec", "[", "'configurations'", "]", ".", "iteritems", "(", ")", ":", "p", ".", "AddFileConfig", "(", "_FixPath", "(", "primary_input", ")", ",", "_ConfigFullName", "(", "config_name", ",", "c_data", ")", ",", "tools", "=", "[", "tool", "]", ")" ]
add a custom build tool to execute something .
train
false
51,223
def find_template(template_name): template_paths = config['pylons.app_globals'].template_paths for path in template_paths: if os.path.exists(os.path.join(path, template_name.encode('utf-8'))): return os.path.join(path, template_name)
[ "def", "find_template", "(", "template_name", ")", ":", "template_paths", "=", "config", "[", "'pylons.app_globals'", "]", ".", "template_paths", "for", "path", "in", "template_paths", ":", "if", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "path", ",", "template_name", ".", "encode", "(", "'utf-8'", ")", ")", ")", ":", "return", "os", ".", "path", ".", "join", "(", "path", ",", "template_name", ")" ]
looks through the possible template paths to find a template returns the full path is it exists .
train
false
51,224
@local_optimizer([GpuSparseBlockGemv], inplace=True) def local_inplace_gpu_sparse_block_gemv(node): if (isinstance(node.op, GpuSparseBlockGemv) and (not node.op.inplace)): new_node = gpu_sparse_block_gemv_inplace(*node.inputs) return [new_node] return False
[ "@", "local_optimizer", "(", "[", "GpuSparseBlockGemv", "]", ",", "inplace", "=", "True", ")", "def", "local_inplace_gpu_sparse_block_gemv", "(", "node", ")", ":", "if", "(", "isinstance", "(", "node", ".", "op", ",", "GpuSparseBlockGemv", ")", "and", "(", "not", "node", ".", "op", ".", "inplace", ")", ")", ":", "new_node", "=", "gpu_sparse_block_gemv_inplace", "(", "*", "node", ".", "inputs", ")", "return", "[", "new_node", "]", "return", "False" ]
gpusparseblockgemv -> gpusparseblockgemv .
train
false
51,225
def gravatar_hash(email): return make_safe_digest(email.lower(), hashlib.md5)
[ "def", "gravatar_hash", "(", "email", ")", ":", "return", "make_safe_digest", "(", "email", ".", "lower", "(", ")", ",", "hashlib", ".", "md5", ")" ]
compute the gravatar hash for an email address .
train
false
51,226
def dirichlet_entropy_vec(alpha): if (len(alpha.shape) == 1): return stats.dirichlet.entropy(alpha) else: n_minibatch = alpha.shape[0] return np.array([stats.dirichlet.entropy(alpha[i, :]) for i in range(n_minibatch)])
[ "def", "dirichlet_entropy_vec", "(", "alpha", ")", ":", "if", "(", "len", "(", "alpha", ".", "shape", ")", "==", "1", ")", ":", "return", "stats", ".", "dirichlet", ".", "entropy", "(", "alpha", ")", "else", ":", "n_minibatch", "=", "alpha", ".", "shape", "[", "0", "]", "return", "np", ".", "array", "(", "[", "stats", ".", "dirichlet", ".", "entropy", "(", "alpha", "[", "i", ",", ":", "]", ")", "for", "i", "in", "range", "(", "n_minibatch", ")", "]", ")" ]
vectorized version of stats .
train
false
51,227
def get_vm_of_host(check_esx_path, vcenter, host, user, password): print 'Listing host', host list_vm_cmd = [check_esx_path, '-D', vcenter, '-H', host, '-u', user, '-p', password, '-l', 'runtime', '-s', 'list'] print ' '.join(list_vm_cmd) p = Popen(list_vm_cmd, stdout=PIPE) output = p.communicate() print 'Exit status', p.returncode if (p.returncode == 2): print 'Error: check_esx3.pl returnes an error:', output raise SystemExit(2) parts = output[0].split(':') if (len(parts) < 2): return None vms_raw = parts[1].split('|')[0] vms_raw_lst = vms_raw.split(',') lst = [] for vm_raw in vms_raw_lst: vm_raw = vm_raw.strip() elts = vm_raw.split('(') vm = elts[0] lst.append(vm) return lst
[ "def", "get_vm_of_host", "(", "check_esx_path", ",", "vcenter", ",", "host", ",", "user", ",", "password", ")", ":", "print", "'Listing host'", ",", "host", "list_vm_cmd", "=", "[", "check_esx_path", ",", "'-D'", ",", "vcenter", ",", "'-H'", ",", "host", ",", "'-u'", ",", "user", ",", "'-p'", ",", "password", ",", "'-l'", ",", "'runtime'", ",", "'-s'", ",", "'list'", "]", "print", "' '", ".", "join", "(", "list_vm_cmd", ")", "p", "=", "Popen", "(", "list_vm_cmd", ",", "stdout", "=", "PIPE", ")", "output", "=", "p", ".", "communicate", "(", ")", "print", "'Exit status'", ",", "p", ".", "returncode", "if", "(", "p", ".", "returncode", "==", "2", ")", ":", "print", "'Error: check_esx3.pl returnes an error:'", ",", "output", "raise", "SystemExit", "(", "2", ")", "parts", "=", "output", "[", "0", "]", ".", "split", "(", "':'", ")", "if", "(", "len", "(", "parts", ")", "<", "2", ")", ":", "return", "None", "vms_raw", "=", "parts", "[", "1", "]", ".", "split", "(", "'|'", ")", "[", "0", "]", "vms_raw_lst", "=", "vms_raw", ".", "split", "(", "','", ")", "lst", "=", "[", "]", "for", "vm_raw", "in", "vms_raw_lst", ":", "vm_raw", "=", "vm_raw", ".", "strip", "(", ")", "elts", "=", "vm_raw", ".", "split", "(", "'('", ")", "vm", "=", "elts", "[", "0", "]", "lst", ".", "append", "(", "vm", ")", "return", "lst" ]
get a list of all virtual machines on a specific host .
train
false
51,229
def _VarintDecoder(mask): local_ord = ord def DecodeVarint(buffer, pos): result = 0 shift = 0 while 1: b = local_ord(buffer[pos]) result |= ((b & 127) << shift) pos += 1 if (not (b & 128)): result &= mask return (result, pos) shift += 7 if (shift >= 64): raise _DecodeError('Too many bytes when decoding varint.') return DecodeVarint
[ "def", "_VarintDecoder", "(", "mask", ")", ":", "local_ord", "=", "ord", "def", "DecodeVarint", "(", "buffer", ",", "pos", ")", ":", "result", "=", "0", "shift", "=", "0", "while", "1", ":", "b", "=", "local_ord", "(", "buffer", "[", "pos", "]", ")", "result", "|=", "(", "(", "b", "&", "127", ")", "<<", "shift", ")", "pos", "+=", "1", "if", "(", "not", "(", "b", "&", "128", ")", ")", ":", "result", "&=", "mask", "return", "(", "result", ",", "pos", ")", "shift", "+=", "7", "if", "(", "shift", ">=", "64", ")", ":", "raise", "_DecodeError", "(", "'Too many bytes when decoding varint.'", ")", "return", "DecodeVarint" ]
return an encoder for a basic varint value .
train
false
51,231
@pytest.mark.django_db def test_submission_ordering(en_tutorial_po, member, no_submissions): at_time = timezone.now() unit = en_tutorial_po.units[0] _create_comment_submission(unit, member, at_time, 'Comment 3') _create_comment_submission(unit, member, at_time, 'Comment 2') _create_comment_submission(unit, member, at_time, 'Comment 1') unit = en_tutorial_po.units[0] assert (Submission.objects.count() == 3) assert (Submission.objects.first().creation_time == Submission.objects.last().creation_time) assert (Submission.objects.latest().pk > Submission.objects.earliest().pk) assert (unit.submission_set.latest().pk > unit.submission_set.earliest().pk) assert (unit.submission_set.earliest('new_value').new_value == 'Comment 1') assert (unit.submission_set.latest('new_value').new_value == 'Comment 3') assert (unit.submission_set.earliest('pk').new_value == 'Comment 3') assert (unit.submission_set.latest('pk').new_value == 'Comment 1')
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_submission_ordering", "(", "en_tutorial_po", ",", "member", ",", "no_submissions", ")", ":", "at_time", "=", "timezone", ".", "now", "(", ")", "unit", "=", "en_tutorial_po", ".", "units", "[", "0", "]", "_create_comment_submission", "(", "unit", ",", "member", ",", "at_time", ",", "'Comment 3'", ")", "_create_comment_submission", "(", "unit", ",", "member", ",", "at_time", ",", "'Comment 2'", ")", "_create_comment_submission", "(", "unit", ",", "member", ",", "at_time", ",", "'Comment 1'", ")", "unit", "=", "en_tutorial_po", ".", "units", "[", "0", "]", "assert", "(", "Submission", ".", "objects", ".", "count", "(", ")", "==", "3", ")", "assert", "(", "Submission", ".", "objects", ".", "first", "(", ")", ".", "creation_time", "==", "Submission", ".", "objects", ".", "last", "(", ")", ".", "creation_time", ")", "assert", "(", "Submission", ".", "objects", ".", "latest", "(", ")", ".", "pk", ">", "Submission", ".", "objects", ".", "earliest", "(", ")", ".", "pk", ")", "assert", "(", "unit", ".", "submission_set", ".", "latest", "(", ")", ".", "pk", ">", "unit", ".", "submission_set", ".", "earliest", "(", ")", ".", "pk", ")", "assert", "(", "unit", ".", "submission_set", ".", "earliest", "(", "'new_value'", ")", ".", "new_value", "==", "'Comment 1'", ")", "assert", "(", "unit", ".", "submission_set", ".", "latest", "(", "'new_value'", ")", ".", "new_value", "==", "'Comment 3'", ")", "assert", "(", "unit", ".", "submission_set", ".", "earliest", "(", "'pk'", ")", ".", "new_value", "==", "'Comment 3'", ")", "assert", "(", "unit", ".", "submission_set", ".", "latest", "(", "'pk'", ")", ".", "new_value", "==", "'Comment 1'", ")" ]
submissions with same creation_time should order by pk .
train
false
51,232
def service_destroy(context, service_id): return IMPL.service_destroy(context, service_id)
[ "def", "service_destroy", "(", "context", ",", "service_id", ")", ":", "return", "IMPL", ".", "service_destroy", "(", "context", ",", "service_id", ")" ]
destroy the service or raise if it does not exist .
train
false
51,234
def close_tab(data, socket): if (socket.tab is not None): socket.tab.close() socket.factory[socket].tab = None
[ "def", "close_tab", "(", "data", ",", "socket", ")", ":", "if", "(", "socket", ".", "tab", "is", "not", "None", ")", ":", "socket", ".", "tab", ".", "close", "(", ")", "socket", ".", "factory", "[", "socket", "]", ".", "tab", "=", "None" ]
close virtual tab if it is open .
train
false
51,236
def _populate_defaults(): global _populated if (not _populated): _populated = True from reviewboard.reviews.default_actions import get_default_actions for default_action in reversed(get_default_actions()): default_action.register()
[ "def", "_populate_defaults", "(", ")", ":", "global", "_populated", "if", "(", "not", "_populated", ")", ":", "_populated", "=", "True", "from", "reviewboard", ".", "reviews", ".", "default_actions", "import", "get_default_actions", "for", "default_action", "in", "reversed", "(", "get_default_actions", "(", ")", ")", ":", "default_action", ".", "register", "(", ")" ]
populate the default action instances .
train
false
51,237
def publish_request(liveaction, execution): LiveAction.publish_create(liveaction) LiveAction.publish_status(liveaction) ActionExecution.publish_create(execution) extra = {'liveaction_db': liveaction, 'execution_db': execution} LOG.audit(('Action execution requested. LiveAction.id=%s, ActionExecution.id=%s' % (liveaction.id, execution.id)), extra=extra) return (liveaction, execution)
[ "def", "publish_request", "(", "liveaction", ",", "execution", ")", ":", "LiveAction", ".", "publish_create", "(", "liveaction", ")", "LiveAction", ".", "publish_status", "(", "liveaction", ")", "ActionExecution", ".", "publish_create", "(", "execution", ")", "extra", "=", "{", "'liveaction_db'", ":", "liveaction", ",", "'execution_db'", ":", "execution", "}", "LOG", ".", "audit", "(", "(", "'Action execution requested. LiveAction.id=%s, ActionExecution.id=%s'", "%", "(", "liveaction", ".", "id", ",", "execution", ".", "id", ")", ")", ",", "extra", "=", "extra", ")", "return", "(", "liveaction", ",", "execution", ")" ]
publish an action execution .
train
false
51,240
def format_url(url, data): try: result = (url.replace('$(', '%(') % data) except AttributeError: return None except KeyError as e: LOG.error((_('Malformed endpoint %(url)s - unknown key %(keyerror)s') % {'url': url, 'keyerror': str(e)})) raise exception.MalformedEndpoint(endpoint=url) except TypeError as e: LOG.error((_('Malformed endpoint %(url)s - unknown key %(keyerror)s(are you missing brackets ?)') % {'url': url, 'keyerror': str(e)})) raise exception.MalformedEndpoint(endpoint=url) except ValueError as e: LOG.error((_('Malformed endpoint %s - incomplete format (are you missing a type notifier ?)') % url)) raise exception.MalformedEndpoint(endpoint=url) return result
[ "def", "format_url", "(", "url", ",", "data", ")", ":", "try", ":", "result", "=", "(", "url", ".", "replace", "(", "'$('", ",", "'%('", ")", "%", "data", ")", "except", "AttributeError", ":", "return", "None", "except", "KeyError", "as", "e", ":", "LOG", ".", "error", "(", "(", "_", "(", "'Malformed endpoint %(url)s - unknown key %(keyerror)s'", ")", "%", "{", "'url'", ":", "url", ",", "'keyerror'", ":", "str", "(", "e", ")", "}", ")", ")", "raise", "exception", ".", "MalformedEndpoint", "(", "endpoint", "=", "url", ")", "except", "TypeError", "as", "e", ":", "LOG", ".", "error", "(", "(", "_", "(", "'Malformed endpoint %(url)s - unknown key %(keyerror)s(are you missing brackets ?)'", ")", "%", "{", "'url'", ":", "url", ",", "'keyerror'", ":", "str", "(", "e", ")", "}", ")", ")", "raise", "exception", ".", "MalformedEndpoint", "(", "endpoint", "=", "url", ")", "except", "ValueError", "as", "e", ":", "LOG", ".", "error", "(", "(", "_", "(", "'Malformed endpoint %s - incomplete format (are you missing a type notifier ?)'", ")", "%", "url", ")", ")", "raise", "exception", ".", "MalformedEndpoint", "(", "endpoint", "=", "url", ")", "return", "result" ]
helper method for all backend catalogs to deal with urls .
train
false
51,241
def _login_user(context, username, password, facility=None): data = {'username': username, 'password': password} if facility: data['facility'] = facility context.facility = facility data = json.dumps(data) url = (reverse('api_dispatch_list', kwargs={'resource_name': 'user'}) + 'login/') resp = post(context, url, data) context.user = username assert resp, ('Login failed. url: %s\ndata: %s' % (url, data))
[ "def", "_login_user", "(", "context", ",", "username", ",", "password", ",", "facility", "=", "None", ")", ":", "data", "=", "{", "'username'", ":", "username", ",", "'password'", ":", "password", "}", "if", "facility", ":", "data", "[", "'facility'", "]", "=", "facility", "context", ".", "facility", "=", "facility", "data", "=", "json", ".", "dumps", "(", "data", ")", "url", "=", "(", "reverse", "(", "'api_dispatch_list'", ",", "kwargs", "=", "{", "'resource_name'", ":", "'user'", "}", ")", "+", "'login/'", ")", "resp", "=", "post", "(", "context", ",", "url", ",", "data", ")", "context", ".", "user", "=", "username", "assert", "resp", ",", "(", "'Login failed. url: %s\\ndata: %s'", "%", "(", "url", ",", "data", ")", ")" ]
logs a user in with an api endpoint .
train
false
51,242
def require_persistent_graph(request, *args, **kwargs): kwargs['raise_'] = True graph = get_persistent_graph(request, *args, **kwargs) if (not graph): raise OpenFacebookException('please authenticate') return graph
[ "def", "require_persistent_graph", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "kwargs", "[", "'raise_'", "]", "=", "True", "graph", "=", "get_persistent_graph", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "if", "(", "not", "graph", ")", ":", "raise", "OpenFacebookException", "(", "'please authenticate'", ")", "return", "graph" ]
just like get_persistent graph .
train
false
51,244
def legendre_symbol(a, p): (a, p) = (as_int(a), as_int(p)) if ((not isprime(p)) or (p == 2)): raise ValueError('p should be an odd prime') a = (a % p) if (not a): return 0 if is_quad_residue(a, p): return 1 return (-1)
[ "def", "legendre_symbol", "(", "a", ",", "p", ")", ":", "(", "a", ",", "p", ")", "=", "(", "as_int", "(", "a", ")", ",", "as_int", "(", "p", ")", ")", "if", "(", "(", "not", "isprime", "(", "p", ")", ")", "or", "(", "p", "==", "2", ")", ")", ":", "raise", "ValueError", "(", "'p should be an odd prime'", ")", "a", "=", "(", "a", "%", "p", ")", "if", "(", "not", "a", ")", ":", "return", "0", "if", "is_quad_residue", "(", "a", ",", "p", ")", ":", "return", "1", "return", "(", "-", "1", ")" ]
returns the legendre symbol .
train
false
51,245
def RegisterCoreDLL(coredllName=None): if (coredllName is None): coredllName = win32api.GetModuleFileName(sys.dllhandle) else: try: os.stat(coredllName) except os.error: print ('Warning: Registering non-existant core DLL %s' % coredllName) hKey = win32api.RegCreateKey(GetRootKey(), BuildDefaultPythonKey()) try: win32api.RegSetValue(hKey, 'Dll', win32con.REG_SZ, coredllName) finally: win32api.RegCloseKey(hKey) win32api.RegSetValue(GetRootKey(), 'Software\\Python\\PythonCore\\CurrentVersion', win32con.REG_SZ, sys.winver)
[ "def", "RegisterCoreDLL", "(", "coredllName", "=", "None", ")", ":", "if", "(", "coredllName", "is", "None", ")", ":", "coredllName", "=", "win32api", ".", "GetModuleFileName", "(", "sys", ".", "dllhandle", ")", "else", ":", "try", ":", "os", ".", "stat", "(", "coredllName", ")", "except", "os", ".", "error", ":", "print", "(", "'Warning: Registering non-existant core DLL %s'", "%", "coredllName", ")", "hKey", "=", "win32api", ".", "RegCreateKey", "(", "GetRootKey", "(", ")", ",", "BuildDefaultPythonKey", "(", ")", ")", "try", ":", "win32api", ".", "RegSetValue", "(", "hKey", ",", "'Dll'", ",", "win32con", ".", "REG_SZ", ",", "coredllName", ")", "finally", ":", "win32api", ".", "RegCloseKey", "(", "hKey", ")", "win32api", ".", "RegSetValue", "(", "GetRootKey", "(", ")", ",", "'Software\\\\Python\\\\PythonCore\\\\CurrentVersion'", ",", "win32con", ".", "REG_SZ", ",", "sys", ".", "winver", ")" ]
registers the core dll in the registry .
train
false
51,246
def _try_passwordless_openssh(server, keyfile): if (pexpect is None): raise ImportError('pexpect unavailable, use paramiko') cmd = ('ssh -f ' + server) if keyfile: cmd += (' -i ' + keyfile) cmd += ' exit' env = os.environ.copy() env.pop('SSH_ASKPASS', None) ssh_newkey = 'Are you sure you want to continue connecting' p = pexpect.spawn(cmd, env=env) while True: try: i = p.expect([ssh_newkey, _password_pat], timeout=0.1) if (i == 0): raise SSHException("The authenticity of the host can't be established.") except pexpect.TIMEOUT: continue except pexpect.EOF: return True else: return False
[ "def", "_try_passwordless_openssh", "(", "server", ",", "keyfile", ")", ":", "if", "(", "pexpect", "is", "None", ")", ":", "raise", "ImportError", "(", "'pexpect unavailable, use paramiko'", ")", "cmd", "=", "(", "'ssh -f '", "+", "server", ")", "if", "keyfile", ":", "cmd", "+=", "(", "' -i '", "+", "keyfile", ")", "cmd", "+=", "' exit'", "env", "=", "os", ".", "environ", ".", "copy", "(", ")", "env", ".", "pop", "(", "'SSH_ASKPASS'", ",", "None", ")", "ssh_newkey", "=", "'Are you sure you want to continue connecting'", "p", "=", "pexpect", ".", "spawn", "(", "cmd", ",", "env", "=", "env", ")", "while", "True", ":", "try", ":", "i", "=", "p", ".", "expect", "(", "[", "ssh_newkey", ",", "_password_pat", "]", ",", "timeout", "=", "0.1", ")", "if", "(", "i", "==", "0", ")", ":", "raise", "SSHException", "(", "\"The authenticity of the host can't be established.\"", ")", "except", "pexpect", ".", "TIMEOUT", ":", "continue", "except", "pexpect", ".", "EOF", ":", "return", "True", "else", ":", "return", "False" ]
try passwordless login with shell ssh command .
train
false
51,248
def boot_device(name='default', **kwargs): ret = {'name': name, 'result': False, 'comment': '', 'changes': {}} org = __salt__['ipmi.get_bootdev'](**kwargs) if ('bootdev' in org): org = org['bootdev'] if (org == name): ret['result'] = True ret['comment'] = 'system already in this state' return ret if __opts__['test']: ret['comment'] = 'would change boot device' ret['result'] = None ret['changes'] = {'old': org, 'new': name} return ret outdddd = __salt__['ipmi.set_bootdev'](bootdev=name, **kwargs) ret['comment'] = 'changed boot device' ret['result'] = True ret['changes'] = {'old': org, 'new': name} return ret
[ "def", "boot_device", "(", "name", "=", "'default'", ",", "**", "kwargs", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'result'", ":", "False", ",", "'comment'", ":", "''", ",", "'changes'", ":", "{", "}", "}", "org", "=", "__salt__", "[", "'ipmi.get_bootdev'", "]", "(", "**", "kwargs", ")", "if", "(", "'bootdev'", "in", "org", ")", ":", "org", "=", "org", "[", "'bootdev'", "]", "if", "(", "org", "==", "name", ")", ":", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'comment'", "]", "=", "'system already in this state'", "return", "ret", "if", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'comment'", "]", "=", "'would change boot device'", "ret", "[", "'result'", "]", "=", "None", "ret", "[", "'changes'", "]", "=", "{", "'old'", ":", "org", ",", "'new'", ":", "name", "}", "return", "ret", "outdddd", "=", "__salt__", "[", "'ipmi.set_bootdev'", "]", "(", "bootdev", "=", "name", ",", "**", "kwargs", ")", "ret", "[", "'comment'", "]", "=", "'changed boot device'", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'changes'", "]", "=", "{", "'old'", ":", "org", ",", "'new'", ":", "name", "}", "return", "ret" ]
request power state change name = default * network -- request network boot * hd -- boot from hard drive * safe -- boot from hard drive .
train
true
51,250
@register.simple_tag def dictionary_count(lang, project): return Dictionary.objects.filter(project=project, language=lang).count()
[ "@", "register", ".", "simple_tag", "def", "dictionary_count", "(", "lang", ",", "project", ")", ":", "return", "Dictionary", ".", "objects", ".", "filter", "(", "project", "=", "project", ",", "language", "=", "lang", ")", ".", "count", "(", ")" ]
returns number of words in dictionary .
train
false
51,251
@frappe.whitelist() def get_fee_components(fee_structure): if fee_structure: fs = frappe.get_list(u'Fee Component', fields=[u'fees_category', u'amount'], filters={u'parent': fee_structure}, order_by=u'idx') return fs
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "get_fee_components", "(", "fee_structure", ")", ":", "if", "fee_structure", ":", "fs", "=", "frappe", ".", "get_list", "(", "u'Fee Component'", ",", "fields", "=", "[", "u'fees_category'", ",", "u'amount'", "]", ",", "filters", "=", "{", "u'parent'", ":", "fee_structure", "}", ",", "order_by", "=", "u'idx'", ")", "return", "fs" ]
returns fee components .
train
false
51,252
@receiver(SignalHandler.course_published) def listen_for_course_publish(sender, course_key, **kwargs): from .tasks import update_course_structure try: structure = CourseStructure.objects.get(course_id=course_key) structure.discussion_id_map_json = None structure.save() except CourseStructure.DoesNotExist: pass update_course_structure.apply_async([unicode(course_key)], countdown=0)
[ "@", "receiver", "(", "SignalHandler", ".", "course_published", ")", "def", "listen_for_course_publish", "(", "sender", ",", "course_key", ",", "**", "kwargs", ")", ":", "from", ".", "tasks", "import", "update_course_structure", "try", ":", "structure", "=", "CourseStructure", ".", "objects", ".", "get", "(", "course_id", "=", "course_key", ")", "structure", ".", "discussion_id_map_json", "=", "None", "structure", ".", "save", "(", ")", "except", "CourseStructure", ".", "DoesNotExist", ":", "pass", "update_course_structure", ".", "apply_async", "(", "[", "unicode", "(", "course_key", ")", "]", ",", "countdown", "=", "0", ")" ]
receives publishing signal and performs publishing related workflows .
train
false
51,253
@login_required @require_http_methods(['GET', 'POST']) def edit_watch_list(request): watches = Watch.objects.filter(user=request.user).order_by('content_type') watch_list = [] for w in watches: if (w.content_object is not None): if (w.content_type.name == 'question'): if (not w.content_object.is_archived): watch_list.append(w) else: watch_list.append(w) if (request.method == 'POST'): for w in watch_list: w.is_active = (('watch_%s' % w.id) in request.POST) w.save() return render(request, 'users/edit_watches.html', {'watch_list': watch_list})
[ "@", "login_required", "@", "require_http_methods", "(", "[", "'GET'", ",", "'POST'", "]", ")", "def", "edit_watch_list", "(", "request", ")", ":", "watches", "=", "Watch", ".", "objects", ".", "filter", "(", "user", "=", "request", ".", "user", ")", ".", "order_by", "(", "'content_type'", ")", "watch_list", "=", "[", "]", "for", "w", "in", "watches", ":", "if", "(", "w", ".", "content_object", "is", "not", "None", ")", ":", "if", "(", "w", ".", "content_type", ".", "name", "==", "'question'", ")", ":", "if", "(", "not", "w", ".", "content_object", ".", "is_archived", ")", ":", "watch_list", ".", "append", "(", "w", ")", "else", ":", "watch_list", ".", "append", "(", "w", ")", "if", "(", "request", ".", "method", "==", "'POST'", ")", ":", "for", "w", "in", "watch_list", ":", "w", ".", "is_active", "=", "(", "(", "'watch_%s'", "%", "w", ".", "id", ")", "in", "request", ".", "POST", ")", "w", ".", "save", "(", ")", "return", "render", "(", "request", ",", "'users/edit_watches.html'", ",", "{", "'watch_list'", ":", "watch_list", "}", ")" ]
edit watch list .
train
false
51,254
def set_tenant_id_tag(tenant_id, taglist=None): new_taglist = [] if taglist: new_taglist = [x for x in taglist if (x['scope'] != TENANT_ID_SCOPE)] new_taglist.append(dict(scope=TENANT_ID_SCOPE, tag=tenant_id)) return new_taglist
[ "def", "set_tenant_id_tag", "(", "tenant_id", ",", "taglist", "=", "None", ")", ":", "new_taglist", "=", "[", "]", "if", "taglist", ":", "new_taglist", "=", "[", "x", "for", "x", "in", "taglist", "if", "(", "x", "[", "'scope'", "]", "!=", "TENANT_ID_SCOPE", ")", "]", "new_taglist", ".", "append", "(", "dict", "(", "scope", "=", "TENANT_ID_SCOPE", ",", "tag", "=", "tenant_id", ")", ")", "return", "new_taglist" ]
convenience function to add tenant_id tag to taglist .
train
false
51,255
def test_file_index_url_quoting(script, data): index_url = data.index_url(urllib_parse.quote('in dex')) result = script.pip('install', '-vvv', '--index-url', index_url, 'simple', expect_error=False) assert ((script.site_packages / 'simple') in result.files_created), str(result.stdout) assert (((script.site_packages / 'simple-1.0-py%s.egg-info') % pyversion) in result.files_created), str(result)
[ "def", "test_file_index_url_quoting", "(", "script", ",", "data", ")", ":", "index_url", "=", "data", ".", "index_url", "(", "urllib_parse", ".", "quote", "(", "'in dex'", ")", ")", "result", "=", "script", ".", "pip", "(", "'install'", ",", "'-vvv'", ",", "'--index-url'", ",", "index_url", ",", "'simple'", ",", "expect_error", "=", "False", ")", "assert", "(", "(", "script", ".", "site_packages", "/", "'simple'", ")", "in", "result", ".", "files_created", ")", ",", "str", "(", "result", ".", "stdout", ")", "assert", "(", "(", "(", "script", ".", "site_packages", "/", "'simple-1.0-py%s.egg-info'", ")", "%", "pyversion", ")", "in", "result", ".", "files_created", ")", ",", "str", "(", "result", ")" ]
test url quoting of file index url with a space .
train
false
51,256
def format_histograms_two_bins(pre_hist, post_hist, bin_edges): lines = [] lines.append('Length DCTB Before DCTB After') for (edge, pre, post) in zip(bin_edges, pre_hist, post_hist): lines.append(' DCTB '.join(map(str, [edge, pre, post]))) return '\n'.join(lines)
[ "def", "format_histograms_two_bins", "(", "pre_hist", ",", "post_hist", ",", "bin_edges", ")", ":", "lines", "=", "[", "]", "lines", ".", "append", "(", "'Length DCTB Before DCTB After'", ")", "for", "(", "edge", ",", "pre", ",", "post", ")", "in", "zip", "(", "bin_edges", ",", "pre_hist", ",", "post_hist", ")", ":", "lines", ".", "append", "(", "' DCTB '", ".", "join", "(", "map", "(", "str", ",", "[", "edge", ",", "pre", ",", "post", "]", ")", ")", ")", "return", "'\\n'", ".", "join", "(", "lines", ")" ]
returns text-formatted histogram .
train
false
51,257
def markdown_render_conditional(text, rich_text): if rich_text: return render_markdown(text) else: return escape(text)
[ "def", "markdown_render_conditional", "(", "text", ",", "rich_text", ")", ":", "if", "rich_text", ":", "return", "render_markdown", "(", "text", ")", "else", ":", "return", "escape", "(", "text", ")" ]
return the escaped html content based on the rich_text flag .
train
false
51,259
def vpn_disable(name): run(settings.service, 'openvpn', 'stop', name)
[ "def", "vpn_disable", "(", "name", ")", ":", "run", "(", "settings", ".", "service", ",", "'openvpn'", ",", "'stop'", ",", "name", ")" ]
stop a running vpn .
train
false
51,260
def getComplexByPrefix(elementNode, prefix, valueComplex): value = evaluate.getEvaluatedValue(None, elementNode, prefix) if (value != None): valueComplex = getComplexByDictionaryListValue(value, valueComplex) x = evaluate.getEvaluatedFloat(None, elementNode, (prefix + '.x')) if (x != None): valueComplex = complex(x, getComplexIfNone(valueComplex).imag) y = evaluate.getEvaluatedFloat(None, elementNode, (prefix + '.y')) if (y != None): valueComplex = complex(getComplexIfNone(valueComplex).real, y) return valueComplex
[ "def", "getComplexByPrefix", "(", "elementNode", ",", "prefix", ",", "valueComplex", ")", ":", "value", "=", "evaluate", ".", "getEvaluatedValue", "(", "None", ",", "elementNode", ",", "prefix", ")", "if", "(", "value", "!=", "None", ")", ":", "valueComplex", "=", "getComplexByDictionaryListValue", "(", "value", ",", "valueComplex", ")", "x", "=", "evaluate", ".", "getEvaluatedFloat", "(", "None", ",", "elementNode", ",", "(", "prefix", "+", "'.x'", ")", ")", "if", "(", "x", "!=", "None", ")", ":", "valueComplex", "=", "complex", "(", "x", ",", "getComplexIfNone", "(", "valueComplex", ")", ".", "imag", ")", "y", "=", "evaluate", ".", "getEvaluatedFloat", "(", "None", ",", "elementNode", ",", "(", "prefix", "+", "'.y'", ")", ")", "if", "(", "y", "!=", "None", ")", ":", "valueComplex", "=", "complex", "(", "getComplexIfNone", "(", "valueComplex", ")", ".", "real", ",", "y", ")", "return", "valueComplex" ]
get complex from prefix and xml element .
train
false
51,261
def _prof_path(profiler_output, expr): dir_ = os.path.join(profiler_output, expr_md5(expr)) ensure_dir(dir_) return os.path.join(dir_, str(int(datetime.utcnow().timestamp())))
[ "def", "_prof_path", "(", "profiler_output", ",", "expr", ")", ":", "dir_", "=", "os", ".", "path", ".", "join", "(", "profiler_output", ",", "expr_md5", "(", "expr", ")", ")", "ensure_dir", "(", "dir_", ")", "return", "os", ".", "path", ".", "join", "(", "dir_", ",", "str", "(", "int", "(", "datetime", ".", "utcnow", "(", ")", ".", "timestamp", "(", ")", ")", ")", ")" ]
get the path to write the data for a profile run of expr .
train
false
51,262
def save_icon(obj, icon_content, sizes=mkt.CONTENT_ICON_SIZES): tmp_dst = os.path.join(settings.TMP_PATH, 'icon', uuid.uuid4().hex) with public_storage.open(tmp_dst, 'wb') as fd: fd.write(icon_content) dirname = obj.get_icon_dir() destination = os.path.join(dirname, ('%s' % obj.pk)) remove_icons(destination) icon_hash = resize_icon(tmp_dst, destination, sizes, set_modified_on=[obj], src_storage=public_storage, dst_storage=public_storage) obj.icon_hash = icon_hash['icon_hash'] try: obj.icon_type = 'image/png' except AttributeError: pass obj.save()
[ "def", "save_icon", "(", "obj", ",", "icon_content", ",", "sizes", "=", "mkt", ".", "CONTENT_ICON_SIZES", ")", ":", "tmp_dst", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "TMP_PATH", ",", "'icon'", ",", "uuid", ".", "uuid4", "(", ")", ".", "hex", ")", "with", "public_storage", ".", "open", "(", "tmp_dst", ",", "'wb'", ")", "as", "fd", ":", "fd", ".", "write", "(", "icon_content", ")", "dirname", "=", "obj", ".", "get_icon_dir", "(", ")", "destination", "=", "os", ".", "path", ".", "join", "(", "dirname", ",", "(", "'%s'", "%", "obj", ".", "pk", ")", ")", "remove_icons", "(", "destination", ")", "icon_hash", "=", "resize_icon", "(", "tmp_dst", ",", "destination", ",", "sizes", ",", "set_modified_on", "=", "[", "obj", "]", ",", "src_storage", "=", "public_storage", ",", "dst_storage", "=", "public_storage", ")", "obj", ".", "icon_hash", "=", "icon_hash", "[", "'icon_hash'", "]", "try", ":", "obj", ".", "icon_type", "=", "'image/png'", "except", "AttributeError", ":", "pass", "obj", ".", "save", "(", ")" ]
saves the icon for obj to its final destination .
train
false
51,263
def list_difference(left, right): blocked = set(right) difference = [] for item in left: if (item not in blocked): blocked.add(item) difference.append(item) return difference
[ "def", "list_difference", "(", "left", ",", "right", ")", ":", "blocked", "=", "set", "(", "right", ")", "difference", "=", "[", "]", "for", "item", "in", "left", ":", "if", "(", "item", "not", "in", "blocked", ")", ":", "blocked", ".", "add", "(", "item", ")", "difference", ".", "append", "(", "item", ")", "return", "difference" ]
take the not-in-place difference of two lists .
train
true
51,264
def acl_info(consul_url=None, **kwargs): ret = {} data = {} if (not consul_url): consul_url = _get_config() if (not consul_url): log.error('No Consul URL found.') ret['message'] = 'No Consul URL found.' ret['res'] = False return ret if ('id' not in kwargs): ret['message'] = 'Required parameter "id" is missing.' ret['res'] = False return ret function = 'acl/info/{0}'.format(kwargs['id']) ret = _query(consul_url=consul_url, data=data, method='PUT', function=function) return ret
[ "def", "acl_info", "(", "consul_url", "=", "None", ",", "**", "kwargs", ")", ":", "ret", "=", "{", "}", "data", "=", "{", "}", "if", "(", "not", "consul_url", ")", ":", "consul_url", "=", "_get_config", "(", ")", "if", "(", "not", "consul_url", ")", ":", "log", ".", "error", "(", "'No Consul URL found.'", ")", "ret", "[", "'message'", "]", "=", "'No Consul URL found.'", "ret", "[", "'res'", "]", "=", "False", "return", "ret", "if", "(", "'id'", "not", "in", "kwargs", ")", ":", "ret", "[", "'message'", "]", "=", "'Required parameter \"id\" is missing.'", "ret", "[", "'res'", "]", "=", "False", "return", "ret", "function", "=", "'acl/info/{0}'", ".", "format", "(", "kwargs", "[", "'id'", "]", ")", "ret", "=", "_query", "(", "consul_url", "=", "consul_url", ",", "data", "=", "data", ",", "method", "=", "'PUT'", ",", "function", "=", "function", ")", "return", "ret" ]
information about an acl token .
train
true
51,265
def selected_index(view): indices = view.selectedIndexes() assert (len(indices) < 2), 'View must be in single selection mode' if indices: return indices[0].row() else: return (-1)
[ "def", "selected_index", "(", "view", ")", ":", "indices", "=", "view", ".", "selectedIndexes", "(", ")", "assert", "(", "len", "(", "indices", ")", "<", "2", ")", ",", "'View must be in single selection mode'", "if", "indices", ":", "return", "indices", "[", "0", "]", ".", "row", "(", ")", "else", ":", "return", "(", "-", "1", ")" ]
return the selected integer index in the view .
train
false
51,266
def get_settings_from_module(module=None, default_settings=DEFAULT_CONFIG): context = copy.deepcopy(default_settings) if (module is not None): context.update(((k, v) for (k, v) in inspect.getmembers(module) if k.isupper())) return context
[ "def", "get_settings_from_module", "(", "module", "=", "None", ",", "default_settings", "=", "DEFAULT_CONFIG", ")", ":", "context", "=", "copy", ".", "deepcopy", "(", "default_settings", ")", "if", "(", "module", "is", "not", "None", ")", ":", "context", ".", "update", "(", "(", "(", "k", ",", "v", ")", "for", "(", "k", ",", "v", ")", "in", "inspect", ".", "getmembers", "(", "module", ")", "if", "k", ".", "isupper", "(", ")", ")", ")", "return", "context" ]
loads settings from a module .
train
false
51,267
def get_point_of_reference(unit, count, epoch=None): if (unit == 'seconds'): multiplier = 1 elif (unit == 'minutes'): multiplier = 60 elif (unit == 'hours'): multiplier = 3600 elif (unit == 'days'): multiplier = (3600 * 24) elif (unit == 'weeks'): multiplier = ((3600 * 24) * 7) elif (unit == 'months'): multiplier = ((3600 * 24) * 30) elif (unit == 'years'): multiplier = ((3600 * 24) * 365) else: raise ValueError('Invalid unit: {0}.'.format(unit)) if (not epoch): epoch = time.time() epoch = fix_epoch(epoch) return (epoch - (multiplier * count))
[ "def", "get_point_of_reference", "(", "unit", ",", "count", ",", "epoch", "=", "None", ")", ":", "if", "(", "unit", "==", "'seconds'", ")", ":", "multiplier", "=", "1", "elif", "(", "unit", "==", "'minutes'", ")", ":", "multiplier", "=", "60", "elif", "(", "unit", "==", "'hours'", ")", ":", "multiplier", "=", "3600", "elif", "(", "unit", "==", "'days'", ")", ":", "multiplier", "=", "(", "3600", "*", "24", ")", "elif", "(", "unit", "==", "'weeks'", ")", ":", "multiplier", "=", "(", "(", "3600", "*", "24", ")", "*", "7", ")", "elif", "(", "unit", "==", "'months'", ")", ":", "multiplier", "=", "(", "(", "3600", "*", "24", ")", "*", "30", ")", "elif", "(", "unit", "==", "'years'", ")", ":", "multiplier", "=", "(", "(", "3600", "*", "24", ")", "*", "365", ")", "else", ":", "raise", "ValueError", "(", "'Invalid unit: {0}.'", ".", "format", "(", "unit", ")", ")", "if", "(", "not", "epoch", ")", ":", "epoch", "=", "time", ".", "time", "(", ")", "epoch", "=", "fix_epoch", "(", "epoch", ")", "return", "(", "epoch", "-", "(", "multiplier", "*", "count", ")", ")" ]
get a point-of-reference timestamp in epoch + milliseconds by deriving from a unit and a count .
train
false
51,268
def instance_type_extra_specs_delete(context, flavor_id, key): IMPL.instance_type_extra_specs_delete(context, flavor_id, key)
[ "def", "instance_type_extra_specs_delete", "(", "context", ",", "flavor_id", ",", "key", ")", ":", "IMPL", ".", "instance_type_extra_specs_delete", "(", "context", ",", "flavor_id", ",", "key", ")" ]
delete the given extra specs item .
train
false
51,269
@builtin(u'Title-case text', titlecase, apply_func_to_match_groups) def replace_titlecase(match, number, file_name, metadata, dictionaries, data, functions, *args, **kwargs): return apply_func_to_match_groups(match, titlecase)
[ "@", "builtin", "(", "u'Title-case text'", ",", "titlecase", ",", "apply_func_to_match_groups", ")", "def", "replace_titlecase", "(", "match", ",", "number", ",", "file_name", ",", "metadata", ",", "dictionaries", ",", "data", ",", "functions", ",", "*", "args", ",", "**", "kwargs", ")", ":", "return", "apply_func_to_match_groups", "(", "match", ",", "titlecase", ")" ]
title-case matched text .
train
false
51,272
def parse_qiime_config_files(qiime_config_files): def return_none(): return None results = defaultdict(return_none) for qiime_config_file in qiime_config_files: try: results.update(parse_qiime_config_file(qiime_config_file)) except IOError: pass return results
[ "def", "parse_qiime_config_files", "(", "qiime_config_files", ")", ":", "def", "return_none", "(", ")", ":", "return", "None", "results", "=", "defaultdict", "(", "return_none", ")", "for", "qiime_config_file", "in", "qiime_config_files", ":", "try", ":", "results", ".", "update", "(", "parse_qiime_config_file", "(", "qiime_config_file", ")", ")", "except", "IOError", ":", "pass", "return", "results" ]
parse files in list of qiime_config_files the order of files must be least important to most important .
train
false
51,273
def load_shared_variable(val): return tensor_constructor(val)
[ "def", "load_shared_variable", "(", "val", ")", ":", "return", "tensor_constructor", "(", "val", ")" ]
this function is only here to keep some pickles loading after a failed fix done in august 2011 .
train
false
51,274
def parse_path_info(path_info, av=False): if av: vars = None if ('?' in path_info): (path_info, query) = path_info.split('?', 2) vars = Storage() for var in query.split('&'): (var, val) = (var.split('=', 2) if ('=' in var) else (var, None)) vars[var] = val items = List(path_info.split('/')) args = (List(items[3:]) if (len(items) > 3) else None) return (items(0), items(1), items(2), args, vars) mo = re.match('^/?(?P<a>\\w+)(/(?P<c>\\w+)(/(?P<f>\\w+))?)?$', path_info) if mo: return (mo.group('a'), mo.group('c'), mo.group('f')) else: return (None, None, None)
[ "def", "parse_path_info", "(", "path_info", ",", "av", "=", "False", ")", ":", "if", "av", ":", "vars", "=", "None", "if", "(", "'?'", "in", "path_info", ")", ":", "(", "path_info", ",", "query", ")", "=", "path_info", ".", "split", "(", "'?'", ",", "2", ")", "vars", "=", "Storage", "(", ")", "for", "var", "in", "query", ".", "split", "(", "'&'", ")", ":", "(", "var", ",", "val", ")", "=", "(", "var", ".", "split", "(", "'='", ",", "2", ")", "if", "(", "'='", "in", "var", ")", "else", "(", "var", ",", "None", ")", ")", "vars", "[", "var", "]", "=", "val", "items", "=", "List", "(", "path_info", ".", "split", "(", "'/'", ")", ")", "args", "=", "(", "List", "(", "items", "[", "3", ":", "]", ")", "if", "(", "len", "(", "items", ")", ">", "3", ")", "else", "None", ")", "return", "(", "items", "(", "0", ")", ",", "items", "(", "1", ")", ",", "items", "(", "2", ")", ",", "args", ",", "vars", ")", "mo", "=", "re", ".", "match", "(", "'^/?(?P<a>\\\\w+)(/(?P<c>\\\\w+)(/(?P<f>\\\\w+))?)?$'", ",", "path_info", ")", "if", "mo", ":", "return", "(", "mo", ".", "group", "(", "'a'", ")", ",", "mo", ".", "group", "(", "'c'", ")", ",", "mo", ".", "group", "(", "'f'", ")", ")", "else", ":", "return", "(", "None", ",", "None", ",", "None", ")" ]
parses path info formatted like a/c/f where c and f are optional and a leading / is accepted .
train
false
51,276
def matching(u, v): return hamming(u, v)
[ "def", "matching", "(", "u", ",", "v", ")", ":", "return", "hamming", "(", "u", ",", "v", ")" ]
computes the hamming distance between two boolean 1-d arrays .
train
false
51,277
def summarize_address_range(first, last): if (not (isinstance(first, _BaseAddress) and isinstance(last, _BaseAddress))): raise TypeError(u'first and last must be IP addresses, not networks') if (first.version != last.version): raise TypeError((u'%s and %s are not of the same version' % (first, last))) if (first > last): raise ValueError(u'last IP address must be greater than first') if (first.version == 4): ip = IPv4Network elif (first.version == 6): ip = IPv6Network else: raise ValueError(u'unknown IP version') ip_bits = first._max_prefixlen first_int = first._ip last_int = last._ip while (first_int <= last_int): nbits = min(_count_righthand_zero_bits(first_int, ip_bits), (_compat_bit_length(((last_int - first_int) + 1)) - 1)) net = ip((first_int, (ip_bits - nbits))) (yield net) first_int += (1 << nbits) if ((first_int - 1) == ip._ALL_ONES): break
[ "def", "summarize_address_range", "(", "first", ",", "last", ")", ":", "if", "(", "not", "(", "isinstance", "(", "first", ",", "_BaseAddress", ")", "and", "isinstance", "(", "last", ",", "_BaseAddress", ")", ")", ")", ":", "raise", "TypeError", "(", "u'first and last must be IP addresses, not networks'", ")", "if", "(", "first", ".", "version", "!=", "last", ".", "version", ")", ":", "raise", "TypeError", "(", "(", "u'%s and %s are not of the same version'", "%", "(", "first", ",", "last", ")", ")", ")", "if", "(", "first", ">", "last", ")", ":", "raise", "ValueError", "(", "u'last IP address must be greater than first'", ")", "if", "(", "first", ".", "version", "==", "4", ")", ":", "ip", "=", "IPv4Network", "elif", "(", "first", ".", "version", "==", "6", ")", ":", "ip", "=", "IPv6Network", "else", ":", "raise", "ValueError", "(", "u'unknown IP version'", ")", "ip_bits", "=", "first", ".", "_max_prefixlen", "first_int", "=", "first", ".", "_ip", "last_int", "=", "last", ".", "_ip", "while", "(", "first_int", "<=", "last_int", ")", ":", "nbits", "=", "min", "(", "_count_righthand_zero_bits", "(", "first_int", ",", "ip_bits", ")", ",", "(", "_compat_bit_length", "(", "(", "(", "last_int", "-", "first_int", ")", "+", "1", ")", ")", "-", "1", ")", ")", "net", "=", "ip", "(", "(", "first_int", ",", "(", "ip_bits", "-", "nbits", ")", ")", ")", "(", "yield", "net", ")", "first_int", "+=", "(", "1", "<<", "nbits", ")", "if", "(", "(", "first_int", "-", "1", ")", "==", "ip", ".", "_ALL_ONES", ")", ":", "break" ]
summarize a network range given the first and last ip addresses .
train
true
51,278
@pytest.mark.skipif("sys.platform == 'win32' and sys.version_info[:2] >= (3,5)") def test_commandline_abbrev_interp(tmpdir): if (sys.platform == 'win32'): fmt = '%s.%s' else: fmt = 'python%s.%s' abbrev = (fmt % (sys.version_info[0], sys.version_info[1])) subprocess.check_call([sys.executable, VIRTUALENV_SCRIPT, '-p', abbrev, str(tmpdir.join('venv'))])
[ "@", "pytest", ".", "mark", ".", "skipif", "(", "\"sys.platform == 'win32' and sys.version_info[:2] >= (3,5)\"", ")", "def", "test_commandline_abbrev_interp", "(", "tmpdir", ")", ":", "if", "(", "sys", ".", "platform", "==", "'win32'", ")", ":", "fmt", "=", "'%s.%s'", "else", ":", "fmt", "=", "'python%s.%s'", "abbrev", "=", "(", "fmt", "%", "(", "sys", ".", "version_info", "[", "0", "]", ",", "sys", ".", "version_info", "[", "1", "]", ")", ")", "subprocess", ".", "check_call", "(", "[", "sys", ".", "executable", ",", "VIRTUALENV_SCRIPT", ",", "'-p'", ",", "abbrev", ",", "str", "(", "tmpdir", ".", "join", "(", "'venv'", ")", ")", "]", ")" ]
specifying abbreviated forms of the python interpreter should work .
train
false
51,279
def _strip_module_name(mod): if (mod.strip() == ''): return False return mod.split()[0]
[ "def", "_strip_module_name", "(", "mod", ")", ":", "if", "(", "mod", ".", "strip", "(", ")", "==", "''", ")", ":", "return", "False", "return", "mod", ".", "split", "(", ")", "[", "0", "]" ]
return module name and strip configuration .
train
false
51,280
def isNaN(value): return ((_exponent(value) == 2047) and (not _zero_mantissa(value)))
[ "def", "isNaN", "(", "value", ")", ":", "return", "(", "(", "_exponent", "(", "value", ")", "==", "2047", ")", "and", "(", "not", "_zero_mantissa", "(", "value", ")", ")", ")" ]
determine if the argument is a ieee 754 nan value .
train
false
51,282
def monitor_docstring_start_space(physical_line): pos = max([physical_line.find(i) for i in DOCSTRING_TRIPLE]) if ((pos != (-1)) and (len(physical_line) > (pos + 1))): if (physical_line[(pos + 3)] == ' '): return (pos, 'ENERGY N401: one line docstring should not start with a space')
[ "def", "monitor_docstring_start_space", "(", "physical_line", ")", ":", "pos", "=", "max", "(", "[", "physical_line", ".", "find", "(", "i", ")", "for", "i", "in", "DOCSTRING_TRIPLE", "]", ")", "if", "(", "(", "pos", "!=", "(", "-", "1", ")", ")", "and", "(", "len", "(", "physical_line", ")", ">", "(", "pos", "+", "1", ")", ")", ")", ":", "if", "(", "physical_line", "[", "(", "pos", "+", "3", ")", "]", "==", "' '", ")", ":", "return", "(", "pos", ",", "'ENERGY N401: one line docstring should not start with a space'", ")" ]
check for docstring not start with space .
train
false
51,284
def convert_password(context, password): password = (password or '') meta = {} for i in xrange(CHUNKS): meta[('password_%d' % i)] = password[:CHUNK_LENGTH] password = password[CHUNK_LENGTH:] return meta
[ "def", "convert_password", "(", "context", ",", "password", ")", ":", "password", "=", "(", "password", "or", "''", ")", "meta", "=", "{", "}", "for", "i", "in", "xrange", "(", "CHUNKS", ")", ":", "meta", "[", "(", "'password_%d'", "%", "i", ")", "]", "=", "password", "[", ":", "CHUNK_LENGTH", "]", "password", "=", "password", "[", "CHUNK_LENGTH", ":", "]", "return", "meta" ]
stores password as system_metadata items .
train
false
51,285
def pink(): rc('image', cmap='pink') im = gci() if (im is not None): im.set_cmap(cm.pink) draw_if_interactive()
[ "def", "pink", "(", ")", ":", "rc", "(", "'image'", ",", "cmap", "=", "'pink'", ")", "im", "=", "gci", "(", ")", "if", "(", "im", "is", "not", "None", ")", ":", "im", ".", "set_cmap", "(", "cm", ".", "pink", ")", "draw_if_interactive", "(", ")" ]
set the default colormap to pink and apply to current image if any .
train
false
51,286
def getEndpointsFromYIntersections(x, yIntersections): endpoints = [] for yIntersectionIndex in xrange(0, len(yIntersections), 2): firstY = yIntersections[yIntersectionIndex] secondY = yIntersections[(yIntersectionIndex + 1)] if (firstY != secondY): firstComplex = complex(x, firstY) secondComplex = complex(x, secondY) endpointFirst = euclidean.Endpoint() endpointSecond = euclidean.Endpoint().getFromOtherPoint(endpointFirst, secondComplex) endpointFirst.getFromOtherPoint(endpointSecond, firstComplex) endpoints.append(endpointFirst) endpoints.append(endpointSecond) return endpoints
[ "def", "getEndpointsFromYIntersections", "(", "x", ",", "yIntersections", ")", ":", "endpoints", "=", "[", "]", "for", "yIntersectionIndex", "in", "xrange", "(", "0", ",", "len", "(", "yIntersections", ")", ",", "2", ")", ":", "firstY", "=", "yIntersections", "[", "yIntersectionIndex", "]", "secondY", "=", "yIntersections", "[", "(", "yIntersectionIndex", "+", "1", ")", "]", "if", "(", "firstY", "!=", "secondY", ")", ":", "firstComplex", "=", "complex", "(", "x", ",", "firstY", ")", "secondComplex", "=", "complex", "(", "x", ",", "secondY", ")", "endpointFirst", "=", "euclidean", ".", "Endpoint", "(", ")", "endpointSecond", "=", "euclidean", ".", "Endpoint", "(", ")", ".", "getFromOtherPoint", "(", "endpointFirst", ",", "secondComplex", ")", "endpointFirst", ".", "getFromOtherPoint", "(", "endpointSecond", ",", "firstComplex", ")", "endpoints", ".", "append", "(", "endpointFirst", ")", "endpoints", ".", "append", "(", "endpointSecond", ")", "return", "endpoints" ]
get endpoints from the y intersections .
train
false
51,288
def rs_cot(p, x, prec): if rs_is_puiseux(p, x): r = rs_puiseux(rs_cot, p, x, prec) return r (i, m) = _check_series_var(p, x, 'cot') prec1 = (prec + (2 * m)) (c, s) = rs_cos_sin(p, x, prec1) s = mul_xin(s, i, (- m)) s = rs_series_inversion(s, x, prec1) res = rs_mul(c, s, x, prec1) res = mul_xin(res, i, (- m)) res = rs_trunc(res, x, prec) return res
[ "def", "rs_cot", "(", "p", ",", "x", ",", "prec", ")", ":", "if", "rs_is_puiseux", "(", "p", ",", "x", ")", ":", "r", "=", "rs_puiseux", "(", "rs_cot", ",", "p", ",", "x", ",", "prec", ")", "return", "r", "(", "i", ",", "m", ")", "=", "_check_series_var", "(", "p", ",", "x", ",", "'cot'", ")", "prec1", "=", "(", "prec", "+", "(", "2", "*", "m", ")", ")", "(", "c", ",", "s", ")", "=", "rs_cos_sin", "(", "p", ",", "x", ",", "prec1", ")", "s", "=", "mul_xin", "(", "s", ",", "i", ",", "(", "-", "m", ")", ")", "s", "=", "rs_series_inversion", "(", "s", ",", "x", ",", "prec1", ")", "res", "=", "rs_mul", "(", "c", ",", "s", ",", "x", ",", "prec1", ")", "res", "=", "mul_xin", "(", "res", ",", "i", ",", "(", "-", "m", ")", ")", "res", "=", "rs_trunc", "(", "res", ",", "x", ",", "prec", ")", "return", "res" ]
cotangent of a series return the series expansion of the cot of p .
train
false
51,289
def chain_from_iterable(iterables): for it in iterables: for element in it: (yield element)
[ "def", "chain_from_iterable", "(", "iterables", ")", ":", "for", "it", "in", "iterables", ":", "for", "element", "in", "it", ":", "(", "yield", "element", ")" ]
alternate constructor for chain() .
train
false
51,292
def setup_logger(debug, color): if debug: log_level = logging.DEBUG else: log_level = logging.INFO logger = logging.getLogger('exifread') stream = Handler(log_level, debug, color) logger.addHandler(stream) logger.setLevel(log_level)
[ "def", "setup_logger", "(", "debug", ",", "color", ")", ":", "if", "debug", ":", "log_level", "=", "logging", ".", "DEBUG", "else", ":", "log_level", "=", "logging", ".", "INFO", "logger", "=", "logging", ".", "getLogger", "(", "'exifread'", ")", "stream", "=", "Handler", "(", "log_level", ",", "debug", ",", "color", ")", "logger", ".", "addHandler", "(", "stream", ")", "logger", ".", "setLevel", "(", "log_level", ")" ]
configure the logger .
train
true
51,294
def _check_value(value): return (value.get() if isinstance(value, SpecialValue) else value)
[ "def", "_check_value", "(", "value", ")", ":", "return", "(", "value", ".", "get", "(", ")", "if", "isinstance", "(", "value", ",", "SpecialValue", ")", "else", "value", ")" ]
return value .
train
false
51,295
@verbose def apply_forward(fwd, stc, info, start=None, stop=None, verbose=None): for ch_name in fwd['sol']['row_names']: if (ch_name not in info['ch_names']): raise ValueError(('Channel %s of forward operator not present in evoked_template.' % ch_name)) (data, times) = _apply_forward(fwd, stc, start, stop) sfreq = float((1.0 / stc.tstep)) info_out = _fill_measurement_info(info, fwd, sfreq) evoked = EvokedArray(data, info_out, times[0], nave=1) evoked.times = times evoked.first = int(np.round((evoked.times[0] * sfreq))) evoked.last = ((evoked.first + evoked.data.shape[1]) - 1) return evoked
[ "@", "verbose", "def", "apply_forward", "(", "fwd", ",", "stc", ",", "info", ",", "start", "=", "None", ",", "stop", "=", "None", ",", "verbose", "=", "None", ")", ":", "for", "ch_name", "in", "fwd", "[", "'sol'", "]", "[", "'row_names'", "]", ":", "if", "(", "ch_name", "not", "in", "info", "[", "'ch_names'", "]", ")", ":", "raise", "ValueError", "(", "(", "'Channel %s of forward operator not present in evoked_template.'", "%", "ch_name", ")", ")", "(", "data", ",", "times", ")", "=", "_apply_forward", "(", "fwd", ",", "stc", ",", "start", ",", "stop", ")", "sfreq", "=", "float", "(", "(", "1.0", "/", "stc", ".", "tstep", ")", ")", "info_out", "=", "_fill_measurement_info", "(", "info", ",", "fwd", ",", "sfreq", ")", "evoked", "=", "EvokedArray", "(", "data", ",", "info_out", ",", "times", "[", "0", "]", ",", "nave", "=", "1", ")", "evoked", ".", "times", "=", "times", "evoked", ".", "first", "=", "int", "(", "np", ".", "round", "(", "(", "evoked", ".", "times", "[", "0", "]", "*", "sfreq", ")", ")", ")", "evoked", ".", "last", "=", "(", "(", "evoked", ".", "first", "+", "evoked", ".", "data", ".", "shape", "[", "1", "]", ")", "-", "1", ")", "return", "evoked" ]
project source space currents to sensor space using a forward operator .
train
false
51,296
def make_library_cache(prefix): assert (prefix not in _lib_cache_prefixes) _lib_cache_prefixes.add(prefix) class CustomCodeLibraryCacheImpl(CodeLibraryCacheImpl, ): _filename_prefix = prefix class LibraryCache(Cache, ): '\n Implements Cache that saves and loads CodeLibrary objects for additional\n feature for the specified python function.\n ' _impl_class = CustomCodeLibraryCacheImpl return LibraryCache
[ "def", "make_library_cache", "(", "prefix", ")", ":", "assert", "(", "prefix", "not", "in", "_lib_cache_prefixes", ")", "_lib_cache_prefixes", ".", "add", "(", "prefix", ")", "class", "CustomCodeLibraryCacheImpl", "(", "CodeLibraryCacheImpl", ",", ")", ":", "_filename_prefix", "=", "prefix", "class", "LibraryCache", "(", "Cache", ",", ")", ":", "_impl_class", "=", "CustomCodeLibraryCacheImpl", "return", "LibraryCache" ]
create a cache class for additional compilation features to cache their result for reuse .
train
false
51,297
def test_vi_block_editing(): feed = partial(_feed_cli_with_input, editing_mode=EditingMode.VI, multiline=True) operations = u'-line1\n-line2\n-line3\n-line4\n-line5\n-line6\x1bkkkkkkkj0l\x16jjlllinsert***\x1b\n' (result, cli) = feed(operations.replace(u'insert', u'I')) assert (result.text == u'-line1\n-***line2\n-***line3\n-***line4\n-line5\n-line6') (result, cli) = feed(operations.replace(u'insert', u'A')) assert (result.text == u'-line1\n-line***2\n-line***3\n-line***4\n-line5\n-line6')
[ "def", "test_vi_block_editing", "(", ")", ":", "feed", "=", "partial", "(", "_feed_cli_with_input", ",", "editing_mode", "=", "EditingMode", ".", "VI", ",", "multiline", "=", "True", ")", "operations", "=", "u'-line1\\n-line2\\n-line3\\n-line4\\n-line5\\n-line6\\x1bkkkkkkkj0l\\x16jjlllinsert***\\x1b\\n'", "(", "result", ",", "cli", ")", "=", "feed", "(", "operations", ".", "replace", "(", "u'insert'", ",", "u'I'", ")", ")", "assert", "(", "result", ".", "text", "==", "u'-line1\\n-***line2\\n-***line3\\n-***line4\\n-line5\\n-line6'", ")", "(", "result", ",", "cli", ")", "=", "feed", "(", "operations", ".", "replace", "(", "u'insert'", ",", "u'A'", ")", ")", "assert", "(", "result", ".", "text", "==", "u'-line1\\n-line***2\\n-line***3\\n-line***4\\n-line5\\n-line6'", ")" ]
test vi control-v style block insertion .
train
false
51,298
@register.inclusion_tag('inclusion.html') def inclusion_one_default(one, two='hi'): return {'result': ('inclusion_one_default - Expected result: %s, %s' % (one, two))}
[ "@", "register", ".", "inclusion_tag", "(", "'inclusion.html'", ")", "def", "inclusion_one_default", "(", "one", ",", "two", "=", "'hi'", ")", ":", "return", "{", "'result'", ":", "(", "'inclusion_one_default - Expected result: %s, %s'", "%", "(", "one", ",", "two", ")", ")", "}" ]
expected inclusion_one_default __doc__ .
train
false
51,299
def import_by_name(name, prefixes=[None]): tried = [] for prefix in prefixes: try: if prefix: prefixed_name = '.'.join([prefix, name]) else: prefixed_name = name (obj, parent) = _import_by_name(prefixed_name) return (prefixed_name, obj, parent) except ImportError: tried.append(prefixed_name) raise ImportError(('no module named %s' % ' or '.join(tried)))
[ "def", "import_by_name", "(", "name", ",", "prefixes", "=", "[", "None", "]", ")", ":", "tried", "=", "[", "]", "for", "prefix", "in", "prefixes", ":", "try", ":", "if", "prefix", ":", "prefixed_name", "=", "'.'", ".", "join", "(", "[", "prefix", ",", "name", "]", ")", "else", ":", "prefixed_name", "=", "name", "(", "obj", ",", "parent", ")", "=", "_import_by_name", "(", "prefixed_name", ")", "return", "(", "prefixed_name", ",", "obj", ",", "parent", ")", "except", "ImportError", ":", "tried", ".", "append", "(", "prefixed_name", ")", "raise", "ImportError", "(", "(", "'no module named %s'", "%", "' or '", ".", "join", "(", "tried", ")", ")", ")" ]
import a python object that has the given *name* .
train
true