id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
51,436
def fixed_ip_create(context, values): return IMPL.fixed_ip_create(context, values)
[ "def", "fixed_ip_create", "(", "context", ",", "values", ")", ":", "return", "IMPL", ".", "fixed_ip_create", "(", "context", ",", "values", ")" ]
create a fixed ip from the values dictionary .
train
false
51,437
def CreatePriceTableRow(header, description, final_url, price_in_micros, currency_code, price_unit): return {'header': header, 'description': description, 'finalUrls': {'urls': [final_url]}, 'price': {'money': {'microAmount': price_in_micros}, 'currencyCode': currency_code}, 'priceUnit': price_unit, 'xsi_type': 'PriceTableRow'}
[ "def", "CreatePriceTableRow", "(", "header", ",", "description", ",", "final_url", ",", "price_in_micros", ",", "currency_code", ",", "price_unit", ")", ":", "return", "{", "'header'", ":", "header", ",", "'description'", ":", "description", ",", "'finalUrls'", ":", "{", "'urls'", ":", "[", "final_url", "]", "}", ",", "'price'", ":", "{", "'money'", ":", "{", "'microAmount'", ":", "price_in_micros", "}", ",", "'currencyCode'", ":", "currency_code", "}", ",", "'priceUnit'", ":", "price_unit", ",", "'xsi_type'", ":", "'PriceTableRow'", "}" ]
helper function to generate a single row of a price table .
train
false
51,439
def escape_uri_path(path): return quote(force_bytes(path), safe="/:@&+$,-_.!~*'()")
[ "def", "escape_uri_path", "(", "path", ")", ":", "return", "quote", "(", "force_bytes", "(", "path", ")", ",", "safe", "=", "\"/:@&+$,-_.!~*'()\"", ")" ]
escape the unsafe characters from the path portion of a uniform resource identifier .
train
false
51,441
def data_dir_base(): if (sys.platform == 'darwin'): base = os.path.expanduser('~/Library/Application Support') elif (sys.platform == 'win32'): base = os.getenv('APPDATA', os.path.expanduser('~/AppData/Local')) elif (os.name == 'posix'): base = os.getenv('XDG_DATA_HOME', os.path.expanduser('~/.local/share')) else: base = os.path.expanduser('~/.local/share') return base
[ "def", "data_dir_base", "(", ")", ":", "if", "(", "sys", ".", "platform", "==", "'darwin'", ")", ":", "base", "=", "os", ".", "path", ".", "expanduser", "(", "'~/Library/Application Support'", ")", "elif", "(", "sys", ".", "platform", "==", "'win32'", ")", ":", "base", "=", "os", ".", "getenv", "(", "'APPDATA'", ",", "os", ".", "path", ".", "expanduser", "(", "'~/AppData/Local'", ")", ")", "elif", "(", "os", ".", "name", "==", "'posix'", ")", ":", "base", "=", "os", ".", "getenv", "(", "'XDG_DATA_HOME'", ",", "os", ".", "path", ".", "expanduser", "(", "'~/.local/share'", ")", ")", "else", ":", "base", "=", "os", ".", "path", ".", "expanduser", "(", "'~/.local/share'", ")", "return", "base" ]
return the platform dependent application directory .
train
false
51,442
def bellman_ford_path(G, source, target, weight='weight'): (lengths, paths) = single_source_bellman_ford(G, source, target=target, weight=weight) try: return paths[target] except KeyError: raise nx.NetworkXNoPath(('Node %s not reachable from %s' % (source, target)))
[ "def", "bellman_ford_path", "(", "G", ",", "source", ",", "target", ",", "weight", "=", "'weight'", ")", ":", "(", "lengths", ",", "paths", ")", "=", "single_source_bellman_ford", "(", "G", ",", "source", ",", "target", "=", "target", ",", "weight", "=", "weight", ")", "try", ":", "return", "paths", "[", "target", "]", "except", "KeyError", ":", "raise", "nx", ".", "NetworkXNoPath", "(", "(", "'Node %s not reachable from %s'", "%", "(", "source", ",", "target", ")", ")", ")" ]
returns the shortest path from source to target in a weighted graph g .
train
false
51,443
@register.inclusion_tag('zinnia/tags/dummy.html') def get_random_entries(number=5, template='zinnia/tags/entries_random.html'): return {'template': template, 'entries': Entry.published.order_by('?')[:number]}
[ "@", "register", ".", "inclusion_tag", "(", "'zinnia/tags/dummy.html'", ")", "def", "get_random_entries", "(", "number", "=", "5", ",", "template", "=", "'zinnia/tags/entries_random.html'", ")", ":", "return", "{", "'template'", ":", "template", ",", "'entries'", ":", "Entry", ".", "published", ".", "order_by", "(", "'?'", ")", "[", ":", "number", "]", "}" ]
return random entries .
train
false
51,444
def enumerate_serial_ports(): path = 'HARDWARE\\DEVICEMAP\\SERIALCOMM' try: key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, path) except WindowsError: raise StopIteration for i in itertools.count(): try: val = winreg.EnumValue(key, i) (yield str(val[1])) except EnvironmentError: break
[ "def", "enumerate_serial_ports", "(", ")", ":", "path", "=", "'HARDWARE\\\\DEVICEMAP\\\\SERIALCOMM'", "try", ":", "key", "=", "winreg", ".", "OpenKey", "(", "winreg", ".", "HKEY_LOCAL_MACHINE", ",", "path", ")", "except", "WindowsError", ":", "raise", "StopIteration", "for", "i", "in", "itertools", ".", "count", "(", ")", ":", "try", ":", "val", "=", "winreg", ".", "EnumValue", "(", "key", ",", "i", ")", "(", "yield", "str", "(", "val", "[", "1", "]", ")", ")", "except", "EnvironmentError", ":", "break" ]
uses the win32 registry to return an iterator of serial ports existing on this computer .
train
false
51,445
def extended_euclid_gcd(a, b): if (b == 0): return (a, 1, 0) q = abs((a % b)) r = long((a / b)) (d, k, l) = extended_euclid_gcd(b, q) return (d, l, (k - (l * r)))
[ "def", "extended_euclid_gcd", "(", "a", ",", "b", ")", ":", "if", "(", "b", "==", "0", ")", ":", "return", "(", "a", ",", "1", ",", "0", ")", "q", "=", "abs", "(", "(", "a", "%", "b", ")", ")", "r", "=", "long", "(", "(", "a", "/", "b", ")", ")", "(", "d", ",", "k", ",", "l", ")", "=", "extended_euclid_gcd", "(", "b", ",", "q", ")", "return", "(", "d", ",", "l", ",", "(", "k", "-", "(", "l", "*", "r", ")", ")", ")" ]
returns a tuple such that d = gcd = ia + jb .
train
false
51,446
def test_local_flag(script, data): script.pip('install', '-f', data.find_links, '--no-index', 'simple==1.0') result = script.pip('list', '--local', '--format=legacy') assert ('simple (1.0)' in result.stdout)
[ "def", "test_local_flag", "(", "script", ",", "data", ")", ":", "script", ".", "pip", "(", "'install'", ",", "'-f'", ",", "data", ".", "find_links", ",", "'--no-index'", ",", "'simple==1.0'", ")", "result", "=", "script", ".", "pip", "(", "'list'", ",", "'--local'", ",", "'--format=legacy'", ")", "assert", "(", "'simple (1.0)'", "in", "result", ".", "stdout", ")" ]
test the behavior of --local flag in the list command .
train
false
51,447
def ndget(ind, data): if (isinstance(ind, tuple) and (len(ind) == 1)): ind = ind[0] if (not isinstance(ind, tuple)): return get(ind, data) result = get(ind[0], data) if isinstance(ind[0], (list, slice)): return type(result)((ndget(ind[1:], row) for row in result)) else: return ndget(ind[1:], result)
[ "def", "ndget", "(", "ind", ",", "data", ")", ":", "if", "(", "isinstance", "(", "ind", ",", "tuple", ")", "and", "(", "len", "(", "ind", ")", "==", "1", ")", ")", ":", "ind", "=", "ind", "[", "0", "]", "if", "(", "not", "isinstance", "(", "ind", ",", "tuple", ")", ")", ":", "return", "get", "(", "ind", ",", "data", ")", "result", "=", "get", "(", "ind", "[", "0", "]", ",", "data", ")", "if", "isinstance", "(", "ind", "[", "0", "]", ",", "(", "list", ",", "slice", ")", ")", ":", "return", "type", "(", "result", ")", "(", "(", "ndget", "(", "ind", "[", "1", ":", "]", ",", "row", ")", "for", "row", "in", "result", ")", ")", "else", ":", "return", "ndget", "(", "ind", "[", "1", ":", "]", ",", "result", ")" ]
get from n-dimensional getable can index with elements .
train
false
51,450
def getDivisionFailure(*args, **kwargs): try: (1 / 0) except: f = failure.Failure(*args, **kwargs) return f
[ "def", "getDivisionFailure", "(", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "(", "1", "/", "0", ")", "except", ":", "f", "=", "failure", ".", "Failure", "(", "*", "args", ",", "**", "kwargs", ")", "return", "f" ]
make a c{failure} of a divide-by-zero error .
train
false
51,452
def norm_f(x, y): d = ((x - y) ** 2).sum() return np.sqrt(d)
[ "def", "norm_f", "(", "x", ",", "y", ")", ":", "d", "=", "(", "(", "x", "-", "y", ")", "**", "2", ")", ".", "sum", "(", ")", "return", "np", ".", "sqrt", "(", "d", ")" ]
frobenious norm of difference between two arrays .
train
false
51,453
def _make_flow(request, scopes, return_url=None): csrf_token = hashlib.sha256(os.urandom(1024)).hexdigest() request.session[_CSRF_KEY] = csrf_token state = json.dumps({'csrf_token': csrf_token, 'return_url': return_url}) flow = client.OAuth2WebServerFlow(client_id=django_util.oauth2_settings.client_id, client_secret=django_util.oauth2_settings.client_secret, scope=scopes, state=state, redirect_uri=request.build_absolute_uri(urlresolvers.reverse('google_oauth:callback'))) flow_key = _FLOW_KEY.format(csrf_token) request.session[flow_key] = jsonpickle.encode(flow) return flow
[ "def", "_make_flow", "(", "request", ",", "scopes", ",", "return_url", "=", "None", ")", ":", "csrf_token", "=", "hashlib", ".", "sha256", "(", "os", ".", "urandom", "(", "1024", ")", ")", ".", "hexdigest", "(", ")", "request", ".", "session", "[", "_CSRF_KEY", "]", "=", "csrf_token", "state", "=", "json", ".", "dumps", "(", "{", "'csrf_token'", ":", "csrf_token", ",", "'return_url'", ":", "return_url", "}", ")", "flow", "=", "client", ".", "OAuth2WebServerFlow", "(", "client_id", "=", "django_util", ".", "oauth2_settings", ".", "client_id", ",", "client_secret", "=", "django_util", ".", "oauth2_settings", ".", "client_secret", ",", "scope", "=", "scopes", ",", "state", "=", "state", ",", "redirect_uri", "=", "request", ".", "build_absolute_uri", "(", "urlresolvers", ".", "reverse", "(", "'google_oauth:callback'", ")", ")", ")", "flow_key", "=", "_FLOW_KEY", ".", "format", "(", "csrf_token", ")", "request", ".", "session", "[", "flow_key", "]", "=", "jsonpickle", ".", "encode", "(", "flow", ")", "return", "flow" ]
creates a web server flow args: request: a django request object .
train
true
51,454
def _remote_logging(original): @wraps(original) def logger(self, request, **routeArguments): serialized_remote_task = request.requestHeaders.getRawHeaders('X-Eliot-Task-Id', [None])[0] if (serialized_remote_task is None): return original(self, request, **routeArguments) try: action = Action.continue_task(task_id=serialized_remote_task) except ValueError: return original(self, request, **routeArguments) with action.context(): d = DeferredContext(original(self, request, **routeArguments)) d.addActionFinish() return d.result return logger
[ "def", "_remote_logging", "(", "original", ")", ":", "@", "wraps", "(", "original", ")", "def", "logger", "(", "self", ",", "request", ",", "**", "routeArguments", ")", ":", "serialized_remote_task", "=", "request", ".", "requestHeaders", ".", "getRawHeaders", "(", "'X-Eliot-Task-Id'", ",", "[", "None", "]", ")", "[", "0", "]", "if", "(", "serialized_remote_task", "is", "None", ")", ":", "return", "original", "(", "self", ",", "request", ",", "**", "routeArguments", ")", "try", ":", "action", "=", "Action", ".", "continue_task", "(", "task_id", "=", "serialized_remote_task", ")", "except", "ValueError", ":", "return", "original", "(", "self", ",", "request", ",", "**", "routeArguments", ")", "with", "action", ".", "context", "(", ")", ":", "d", "=", "DeferredContext", "(", "original", "(", "self", ",", "request", ",", "**", "routeArguments", ")", ")", "d", ".", "addActionFinish", "(", ")", "return", "d", ".", "result", "return", "logger" ]
decorate a method which implements an api endpoint to do eliot-based log tracing; that is .
train
false
51,456
def CodeRange(code1, code2): if (code1 <= nl_code < code2): return Alt(RawCodeRange(code1, nl_code), RawNewline, RawCodeRange((nl_code + 1), code2)) else: return RawCodeRange(code1, code2)
[ "def", "CodeRange", "(", "code1", ",", "code2", ")", ":", "if", "(", "code1", "<=", "nl_code", "<", "code2", ")", ":", "return", "Alt", "(", "RawCodeRange", "(", "code1", ",", "nl_code", ")", ",", "RawNewline", ",", "RawCodeRange", "(", "(", "nl_code", "+", "1", ")", ",", "code2", ")", ")", "else", ":", "return", "RawCodeRange", "(", "code1", ",", "code2", ")" ]
coderange is an re which matches any character with a code |c| in the range |code1| <= |c| < |code2| .
train
false
51,457
def _getReplacementString(replacement): if callable(replacement): replacement = _fullyQualifiedName(replacement) return ('please use %s instead' % (replacement,))
[ "def", "_getReplacementString", "(", "replacement", ")", ":", "if", "callable", "(", "replacement", ")", ":", "replacement", "=", "_fullyQualifiedName", "(", "replacement", ")", "return", "(", "'please use %s instead'", "%", "(", "replacement", ",", ")", ")" ]
surround a replacement for a deprecated api with some polite text exhorting the user to consider it as an alternative .
train
false
51,459
def tablespace_alter(name, user=None, host=None, port=None, maintenance_db=None, password=None, new_name=None, new_owner=None, set_option=None, reset_option=None, runas=None): if (not any([new_name, new_owner, set_option, reset_option])): return True queries = [] if new_name: queries.append('ALTER TABLESPACE "{0}" RENAME TO "{1}"'.format(name, new_name)) if new_owner: queries.append('ALTER TABLESPACE "{0}" OWNER TO "{1}"'.format(name, new_owner)) if set_option: queries.append('ALTER TABLESPACE "{0}" SET ({1} = {2})'.format(name, set_option.keys()[0], set_option.values()[0])) if reset_option: queries.append('ALTER TABLESPACE "{0}" RESET ({1})'.format(name, reset_option)) for query in queries: ret = _psql_prepare_and_run(['-c', query], user=user, host=host, port=port, maintenance_db=maintenance_db, password=password, runas=runas) if (ret['retcode'] != 0): return False return True
[ "def", "tablespace_alter", "(", "name", ",", "user", "=", "None", ",", "host", "=", "None", ",", "port", "=", "None", ",", "maintenance_db", "=", "None", ",", "password", "=", "None", ",", "new_name", "=", "None", ",", "new_owner", "=", "None", ",", "set_option", "=", "None", ",", "reset_option", "=", "None", ",", "runas", "=", "None", ")", ":", "if", "(", "not", "any", "(", "[", "new_name", ",", "new_owner", ",", "set_option", ",", "reset_option", "]", ")", ")", ":", "return", "True", "queries", "=", "[", "]", "if", "new_name", ":", "queries", ".", "append", "(", "'ALTER TABLESPACE \"{0}\" RENAME TO \"{1}\"'", ".", "format", "(", "name", ",", "new_name", ")", ")", "if", "new_owner", ":", "queries", ".", "append", "(", "'ALTER TABLESPACE \"{0}\" OWNER TO \"{1}\"'", ".", "format", "(", "name", ",", "new_owner", ")", ")", "if", "set_option", ":", "queries", ".", "append", "(", "'ALTER TABLESPACE \"{0}\" SET ({1} = {2})'", ".", "format", "(", "name", ",", "set_option", ".", "keys", "(", ")", "[", "0", "]", ",", "set_option", ".", "values", "(", ")", "[", "0", "]", ")", ")", "if", "reset_option", ":", "queries", ".", "append", "(", "'ALTER TABLESPACE \"{0}\" RESET ({1})'", ".", "format", "(", "name", ",", "reset_option", ")", ")", "for", "query", "in", "queries", ":", "ret", "=", "_psql_prepare_and_run", "(", "[", "'-c'", ",", "query", "]", ",", "user", "=", "user", ",", "host", "=", "host", ",", "port", "=", "port", ",", "maintenance_db", "=", "maintenance_db", ",", "password", "=", "password", ",", "runas", "=", "runas", ")", "if", "(", "ret", "[", "'retcode'", "]", "!=", "0", ")", ":", "return", "False", "return", "True" ]
change tablespace name .
train
true
51,462
def modifyPdpContextAccept(): a = TpPd(pd=8) b = MessageType(mesType=69) packet = (a / b) return packet
[ "def", "modifyPdpContextAccept", "(", ")", ":", "a", "=", "TpPd", "(", "pd", "=", "8", ")", "b", "=", "MessageType", "(", "mesType", "=", "69", ")", "packet", "=", "(", "a", "/", "b", ")", "return", "packet" ]
modify pdp context accept section 9 .
train
true
51,463
@contextmanager def use_proxy_buffer(snippets_stack, vstate): buffer_proxy = VimBufferProxy(snippets_stack, vstate) old_buffer = _vim.buf try: _vim.buf = buffer_proxy (yield) finally: _vim.buf = old_buffer buffer_proxy.validate_buffer()
[ "@", "contextmanager", "def", "use_proxy_buffer", "(", "snippets_stack", ",", "vstate", ")", ":", "buffer_proxy", "=", "VimBufferProxy", "(", "snippets_stack", ",", "vstate", ")", "old_buffer", "=", "_vim", ".", "buf", "try", ":", "_vim", ".", "buf", "=", "buffer_proxy", "(", "yield", ")", "finally", ":", "_vim", ".", "buf", "=", "old_buffer", "buffer_proxy", ".", "validate_buffer", "(", ")" ]
forward all changes made in the buffer to the current snippet stack while function call .
train
false
51,464
def create_form_params_helper(form_data): new_form_params = FormParameters() for elem_data in form_data: new_form_params.add_field_by_attrs(elem_data) return new_form_params
[ "def", "create_form_params_helper", "(", "form_data", ")", ":", "new_form_params", "=", "FormParameters", "(", ")", "for", "elem_data", "in", "form_data", ":", "new_form_params", ".", "add_field_by_attrs", "(", "elem_data", ")", "return", "new_form_params" ]
creates a dc .
train
false
51,465
@verbose def _write_source_spaces_to_fid(fid, src, verbose=None): for s in src: logger.info(' Write a source space...') start_block(fid, FIFF.FIFFB_MNE_SOURCE_SPACE) _write_one_source_space(fid, s, verbose) end_block(fid, FIFF.FIFFB_MNE_SOURCE_SPACE) logger.info(' [done]') logger.info((' %d source spaces written' % len(src)))
[ "@", "verbose", "def", "_write_source_spaces_to_fid", "(", "fid", ",", "src", ",", "verbose", "=", "None", ")", ":", "for", "s", "in", "src", ":", "logger", ".", "info", "(", "' Write a source space...'", ")", "start_block", "(", "fid", ",", "FIFF", ".", "FIFFB_MNE_SOURCE_SPACE", ")", "_write_one_source_space", "(", "fid", ",", "s", ",", "verbose", ")", "end_block", "(", "fid", ",", "FIFF", ".", "FIFFB_MNE_SOURCE_SPACE", ")", "logger", ".", "info", "(", "' [done]'", ")", "logger", ".", "info", "(", "(", "' %d source spaces written'", "%", "len", "(", "src", ")", ")", ")" ]
write the source spaces to a fif file .
train
false
51,466
def get_perf(filename, folder): _conlleval = os.path.join(folder, 'conlleval.pl') if (not os.path.isfile(_conlleval)): url = 'http://www-etud.iro.umontreal.ca/~mesnilgr/atis/conlleval.pl' download(url, _conlleval) os.chmod(_conlleval, stat.S_IRWXU) proc = subprocess.Popen(['perl', _conlleval], stdin=subprocess.PIPE, stdout=subprocess.PIPE) (stdout, _) = proc.communicate(''.join(open(filename).readlines()).encode('utf-8')) stdout = stdout.decode('utf-8') out = None for line in stdout.split('\n'): if ('accuracy' in line): out = line.split() break if (out is None): print(stdout.split('\n')) precision = float(out[6][:(-2)]) recall = float(out[8][:(-2)]) f1score = float(out[10]) return {'p': precision, 'r': recall, 'f1': f1score}
[ "def", "get_perf", "(", "filename", ",", "folder", ")", ":", "_conlleval", "=", "os", ".", "path", ".", "join", "(", "folder", ",", "'conlleval.pl'", ")", "if", "(", "not", "os", ".", "path", ".", "isfile", "(", "_conlleval", ")", ")", ":", "url", "=", "'http://www-etud.iro.umontreal.ca/~mesnilgr/atis/conlleval.pl'", "download", "(", "url", ",", "_conlleval", ")", "os", ".", "chmod", "(", "_conlleval", ",", "stat", ".", "S_IRWXU", ")", "proc", "=", "subprocess", ".", "Popen", "(", "[", "'perl'", ",", "_conlleval", "]", ",", "stdin", "=", "subprocess", ".", "PIPE", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "(", "stdout", ",", "_", ")", "=", "proc", ".", "communicate", "(", "''", ".", "join", "(", "open", "(", "filename", ")", ".", "readlines", "(", ")", ")", ".", "encode", "(", "'utf-8'", ")", ")", "stdout", "=", "stdout", ".", "decode", "(", "'utf-8'", ")", "out", "=", "None", "for", "line", "in", "stdout", ".", "split", "(", "'\\n'", ")", ":", "if", "(", "'accuracy'", "in", "line", ")", ":", "out", "=", "line", ".", "split", "(", ")", "break", "if", "(", "out", "is", "None", ")", ":", "print", "(", "stdout", ".", "split", "(", "'\\n'", ")", ")", "precision", "=", "float", "(", "out", "[", "6", "]", "[", ":", "(", "-", "2", ")", "]", ")", "recall", "=", "float", "(", "out", "[", "8", "]", "[", ":", "(", "-", "2", ")", "]", ")", "f1score", "=", "float", "(", "out", "[", "10", "]", ")", "return", "{", "'p'", ":", "precision", ",", "'r'", ":", "recall", ",", "'f1'", ":", "f1score", "}" ]
run conlleval .
train
false
51,467
def _order_surfaces(surfs): if (len(surfs) != 3): return surfs surf_order = [FIFF.FIFFV_BEM_SURF_ID_HEAD, FIFF.FIFFV_BEM_SURF_ID_SKULL, FIFF.FIFFV_BEM_SURF_ID_BRAIN] ids = np.array([surf['id'] for surf in surfs]) if (set(ids) != set(surf_order)): raise RuntimeError(('bad surface ids: %s' % ids)) order = [np.where((ids == id_))[0][0] for id_ in surf_order] surfs = [surfs[idx] for idx in order] return surfs
[ "def", "_order_surfaces", "(", "surfs", ")", ":", "if", "(", "len", "(", "surfs", ")", "!=", "3", ")", ":", "return", "surfs", "surf_order", "=", "[", "FIFF", ".", "FIFFV_BEM_SURF_ID_HEAD", ",", "FIFF", ".", "FIFFV_BEM_SURF_ID_SKULL", ",", "FIFF", ".", "FIFFV_BEM_SURF_ID_BRAIN", "]", "ids", "=", "np", ".", "array", "(", "[", "surf", "[", "'id'", "]", "for", "surf", "in", "surfs", "]", ")", "if", "(", "set", "(", "ids", ")", "!=", "set", "(", "surf_order", ")", ")", ":", "raise", "RuntimeError", "(", "(", "'bad surface ids: %s'", "%", "ids", ")", ")", "order", "=", "[", "np", ".", "where", "(", "(", "ids", "==", "id_", ")", ")", "[", "0", "]", "[", "0", "]", "for", "id_", "in", "surf_order", "]", "surfs", "=", "[", "surfs", "[", "idx", "]", "for", "idx", "in", "order", "]", "return", "surfs" ]
reorder the surfaces .
train
false
51,468
def get_block_allocation(module, cp_driver, lb_driver, network_domain, block): nat_rules = list_nat_rules(module, cp_driver, network_domain) balancers = list_balancers(module, lb_driver) pub_ip_block = get_public_ip_block(module, cp_driver, network_domain, block.id, False) pub_ips = expand_ip_block(pub_ip_block) block_detailed = {'id': block.id, 'addresses': []} for ip in pub_ips: allocated = False nat_match = [nat_rule for nat_rule in nat_rules if (nat_rule.external_ip == ip)] lb_match = [balancer for balancer in balancers if (balancer.ip == ip)] if ((len(nat_match) > 0) or (len(lb_match) > 0)): allocated = True else: allocated = False block_detailed['addresses'].append({'address': ip, 'allocated': allocated}) return block_detailed
[ "def", "get_block_allocation", "(", "module", ",", "cp_driver", ",", "lb_driver", ",", "network_domain", ",", "block", ")", ":", "nat_rules", "=", "list_nat_rules", "(", "module", ",", "cp_driver", ",", "network_domain", ")", "balancers", "=", "list_balancers", "(", "module", ",", "lb_driver", ")", "pub_ip_block", "=", "get_public_ip_block", "(", "module", ",", "cp_driver", ",", "network_domain", ",", "block", ".", "id", ",", "False", ")", "pub_ips", "=", "expand_ip_block", "(", "pub_ip_block", ")", "block_detailed", "=", "{", "'id'", ":", "block", ".", "id", ",", "'addresses'", ":", "[", "]", "}", "for", "ip", "in", "pub_ips", ":", "allocated", "=", "False", "nat_match", "=", "[", "nat_rule", "for", "nat_rule", "in", "nat_rules", "if", "(", "nat_rule", ".", "external_ip", "==", "ip", ")", "]", "lb_match", "=", "[", "balancer", "for", "balancer", "in", "balancers", "if", "(", "balancer", ".", "ip", "==", "ip", ")", "]", "if", "(", "(", "len", "(", "nat_match", ")", ">", "0", ")", "or", "(", "len", "(", "lb_match", ")", ">", "0", ")", ")", ":", "allocated", "=", "True", "else", ":", "allocated", "=", "False", "block_detailed", "[", "'addresses'", "]", ".", "append", "(", "{", "'address'", ":", "ip", ",", "'allocated'", ":", "allocated", "}", ")", "return", "block_detailed" ]
get public ip block allocation details .
train
false
51,469
def test_extract_array_wrong_mode(): with pytest.raises(ValueError) as e: extract_array(np.arange(4), (2,), (0,), mode=u'full') assert (u"Valid modes are 'partial', 'trim', and 'strict'." == str(e.value))
[ "def", "test_extract_array_wrong_mode", "(", ")", ":", "with", "pytest", ".", "raises", "(", "ValueError", ")", "as", "e", ":", "extract_array", "(", "np", ".", "arange", "(", "4", ")", ",", "(", "2", ",", ")", ",", "(", "0", ",", ")", ",", "mode", "=", "u'full'", ")", "assert", "(", "u\"Valid modes are 'partial', 'trim', and 'strict'.\"", "==", "str", "(", "e", ".", "value", ")", ")" ]
call extract_array with non-existing mode .
train
false
51,470
def port_create_vlan(br, port, id, internal=False): interfaces = __salt__['network.interfaces']() if (not (0 <= id <= 4095)): return False elif (not bridge_exists(br)): return False elif ((not internal) and (port not in interfaces)): return False elif (port in port_list(br)): cmd = 'ovs-vsctl set port {0} tag={1}'.format(port, id) if internal: cmd += ' -- set interface {0} type=internal'.format(port) result = __salt__['cmd.run_all'](cmd) return _retcode_to_bool(result['retcode']) else: cmd = 'ovs-vsctl add-port {0} {1} tag={2}'.format(br, port, id) if internal: cmd += ' -- set interface {0} type=internal'.format(port) result = __salt__['cmd.run_all'](cmd) return _retcode_to_bool(result['retcode'])
[ "def", "port_create_vlan", "(", "br", ",", "port", ",", "id", ",", "internal", "=", "False", ")", ":", "interfaces", "=", "__salt__", "[", "'network.interfaces'", "]", "(", ")", "if", "(", "not", "(", "0", "<=", "id", "<=", "4095", ")", ")", ":", "return", "False", "elif", "(", "not", "bridge_exists", "(", "br", ")", ")", ":", "return", "False", "elif", "(", "(", "not", "internal", ")", "and", "(", "port", "not", "in", "interfaces", ")", ")", ":", "return", "False", "elif", "(", "port", "in", "port_list", "(", "br", ")", ")", ":", "cmd", "=", "'ovs-vsctl set port {0} tag={1}'", ".", "format", "(", "port", ",", "id", ")", "if", "internal", ":", "cmd", "+=", "' -- set interface {0} type=internal'", ".", "format", "(", "port", ")", "result", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ")", "return", "_retcode_to_bool", "(", "result", "[", "'retcode'", "]", ")", "else", ":", "cmd", "=", "'ovs-vsctl add-port {0} {1} tag={2}'", ".", "format", "(", "br", ",", "port", ",", "id", ")", "if", "internal", ":", "cmd", "+=", "' -- set interface {0} type=internal'", ".", "format", "(", "port", ")", "result", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ")", "return", "_retcode_to_bool", "(", "result", "[", "'retcode'", "]", ")" ]
isolate vm traffic using vlans .
train
true
51,471
def describe_field(field_definition): field_descriptor = FieldDescriptor() field_descriptor.name = field_definition.name field_descriptor.number = field_definition.number field_descriptor.variant = field_definition.variant if isinstance(field_definition, messages.EnumField): field_descriptor.type_name = field_definition.type.definition_name() if isinstance(field_definition, messages.MessageField): field_descriptor.type_name = field_definition.message_type.definition_name() if (field_definition.default is not None): field_descriptor.default_value = _DEFAULT_TO_STRING_MAP[type(field_definition)](field_definition.default) if field_definition.repeated: field_descriptor.label = FieldDescriptor.Label.REPEATED elif field_definition.required: field_descriptor.label = FieldDescriptor.Label.REQUIRED else: field_descriptor.label = FieldDescriptor.Label.OPTIONAL return field_descriptor
[ "def", "describe_field", "(", "field_definition", ")", ":", "field_descriptor", "=", "FieldDescriptor", "(", ")", "field_descriptor", ".", "name", "=", "field_definition", ".", "name", "field_descriptor", ".", "number", "=", "field_definition", ".", "number", "field_descriptor", ".", "variant", "=", "field_definition", ".", "variant", "if", "isinstance", "(", "field_definition", ",", "messages", ".", "EnumField", ")", ":", "field_descriptor", ".", "type_name", "=", "field_definition", ".", "type", ".", "definition_name", "(", ")", "if", "isinstance", "(", "field_definition", ",", "messages", ".", "MessageField", ")", ":", "field_descriptor", ".", "type_name", "=", "field_definition", ".", "message_type", ".", "definition_name", "(", ")", "if", "(", "field_definition", ".", "default", "is", "not", "None", ")", ":", "field_descriptor", ".", "default_value", "=", "_DEFAULT_TO_STRING_MAP", "[", "type", "(", "field_definition", ")", "]", "(", "field_definition", ".", "default", ")", "if", "field_definition", ".", "repeated", ":", "field_descriptor", ".", "label", "=", "FieldDescriptor", ".", "Label", ".", "REPEATED", "elif", "field_definition", ".", "required", ":", "field_descriptor", ".", "label", "=", "FieldDescriptor", ".", "Label", ".", "REQUIRED", "else", ":", "field_descriptor", ".", "label", "=", "FieldDescriptor", ".", "Label", ".", "OPTIONAL", "return", "field_descriptor" ]
build descriptor for field instance .
train
true
51,472
def decorate_methods(obj, decorator): return _DecoratedInstance(obj, decorator)
[ "def", "decorate_methods", "(", "obj", ",", "decorator", ")", ":", "return", "_DecoratedInstance", "(", "obj", ",", "decorator", ")" ]
return a wrapper around obj with decorator applied to all of its method calls .
train
false
51,473
def iso_time_string(val, show_tzinfo=False): if (not val): return '' if isinstance(val, six.string_types): dt = _parse_datetime_string(val) else: dt = val if (not isinstance(dt, datetime.datetime)): dt = datetime.datetime.fromordinal(dt.toordinal()) has_tz = (dt.tzinfo is not None) if (show_tzinfo and has_tz): ret = ''.join(dt.isoformat().rsplit(':', 1)) elif (show_tzinfo and (not has_tz)): ret = ('%s+0000' % dt.isoformat().split('.')[0]) elif ((not show_tzinfo) and has_tz): ret = dt.isoformat()[:(-6)] elif ((not show_tzinfo) and (not has_tz)): ret = dt.isoformat().split('.')[0] return ret
[ "def", "iso_time_string", "(", "val", ",", "show_tzinfo", "=", "False", ")", ":", "if", "(", "not", "val", ")", ":", "return", "''", "if", "isinstance", "(", "val", ",", "six", ".", "string_types", ")", ":", "dt", "=", "_parse_datetime_string", "(", "val", ")", "else", ":", "dt", "=", "val", "if", "(", "not", "isinstance", "(", "dt", ",", "datetime", ".", "datetime", ")", ")", ":", "dt", "=", "datetime", ".", "datetime", ".", "fromordinal", "(", "dt", ".", "toordinal", "(", ")", ")", "has_tz", "=", "(", "dt", ".", "tzinfo", "is", "not", "None", ")", "if", "(", "show_tzinfo", "and", "has_tz", ")", ":", "ret", "=", "''", ".", "join", "(", "dt", ".", "isoformat", "(", ")", ".", "rsplit", "(", "':'", ",", "1", ")", ")", "elif", "(", "show_tzinfo", "and", "(", "not", "has_tz", ")", ")", ":", "ret", "=", "(", "'%s+0000'", "%", "dt", ".", "isoformat", "(", ")", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", "elif", "(", "(", "not", "show_tzinfo", ")", "and", "has_tz", ")", ":", "ret", "=", "dt", ".", "isoformat", "(", ")", "[", ":", "(", "-", "6", ")", "]", "elif", "(", "(", "not", "show_tzinfo", ")", "and", "(", "not", "has_tz", ")", ")", ":", "ret", "=", "dt", ".", "isoformat", "(", ")", ".", "split", "(", "'.'", ")", "[", "0", "]", "return", "ret" ]
takes either a date .
train
true
51,474
def get_username_for_svn_txn(repo_path, txn_id): return subproc_check_output(['svnlook', 'author', repo_path, '-t', txn_id]).strip()
[ "def", "get_username_for_svn_txn", "(", "repo_path", ",", "txn_id", ")", ":", "return", "subproc_check_output", "(", "[", "'svnlook'", ",", "'author'", ",", "repo_path", ",", "'-t'", ",", "txn_id", "]", ")", ".", "strip", "(", ")" ]
returns username for an svn transaction .
train
false
51,477
def parse_mappings(mapping_list, unique_values=True): mappings = {} for mapping in mapping_list: mapping = mapping.strip() if (not mapping): continue split_result = mapping.split(':') if (len(split_result) != 2): raise ValueError((_("Invalid mapping: '%s'") % mapping)) key = split_result[0].strip() if (not key): raise ValueError((_("Missing key in mapping: '%s'") % mapping)) value = split_result[1].strip() if (not value): raise ValueError((_("Missing value in mapping: '%s'") % mapping)) if (key in mappings): raise ValueError((_("Key %(key)s in mapping: '%(mapping)s' not unique") % locals())) if (unique_values and (value in mappings.itervalues())): raise ValueError((_("Value %(value)s in mapping: '%(mapping)s' not unique") % locals())) mappings[key] = value return mappings
[ "def", "parse_mappings", "(", "mapping_list", ",", "unique_values", "=", "True", ")", ":", "mappings", "=", "{", "}", "for", "mapping", "in", "mapping_list", ":", "mapping", "=", "mapping", ".", "strip", "(", ")", "if", "(", "not", "mapping", ")", ":", "continue", "split_result", "=", "mapping", ".", "split", "(", "':'", ")", "if", "(", "len", "(", "split_result", ")", "!=", "2", ")", ":", "raise", "ValueError", "(", "(", "_", "(", "\"Invalid mapping: '%s'\"", ")", "%", "mapping", ")", ")", "key", "=", "split_result", "[", "0", "]", ".", "strip", "(", ")", "if", "(", "not", "key", ")", ":", "raise", "ValueError", "(", "(", "_", "(", "\"Missing key in mapping: '%s'\"", ")", "%", "mapping", ")", ")", "value", "=", "split_result", "[", "1", "]", ".", "strip", "(", ")", "if", "(", "not", "value", ")", ":", "raise", "ValueError", "(", "(", "_", "(", "\"Missing value in mapping: '%s'\"", ")", "%", "mapping", ")", ")", "if", "(", "key", "in", "mappings", ")", ":", "raise", "ValueError", "(", "(", "_", "(", "\"Key %(key)s in mapping: '%(mapping)s' not unique\"", ")", "%", "locals", "(", ")", ")", ")", "if", "(", "unique_values", "and", "(", "value", "in", "mappings", ".", "itervalues", "(", ")", ")", ")", ":", "raise", "ValueError", "(", "(", "_", "(", "\"Value %(value)s in mapping: '%(mapping)s' not unique\"", ")", "%", "locals", "(", ")", ")", ")", "mappings", "[", "key", "]", "=", "value", "return", "mappings" ]
parse a list of of mapping strings into a dictionary .
train
false
51,478
def jsonable2thrift(jsonable, thrift_class): return _jsonable2thrift_helper(jsonable, TType.STRUCT, (thrift_class, thrift_class.thrift_spec), default=None, recursion_depth=0)
[ "def", "jsonable2thrift", "(", "jsonable", ",", "thrift_class", ")", ":", "return", "_jsonable2thrift_helper", "(", "jsonable", ",", "TType", ".", "STRUCT", ",", "(", "thrift_class", ",", "thrift_class", ".", "thrift_spec", ")", ",", "default", "=", "None", ",", "recursion_depth", "=", "0", ")" ]
converts a json-able x that represents a thrift struct into the struct .
train
false
51,479
def interpret_conf_limits(conf, name_prefix, info=None): conf_limits = [] for conf_key in conf: if conf_key.startswith(name_prefix): cont_size = int(conf_key[len(name_prefix):]) rate = float(conf[conf_key]) conf_limits.append((cont_size, rate)) conf_limits.sort() ratelimits = [] conf_limits_info = list(conf_limits) while conf_limits: (cur_size, cur_rate) = conf_limits.pop(0) if conf_limits: (next_size, next_rate) = conf_limits[0] slope = ((float(next_rate) - float(cur_rate)) / (next_size - cur_size)) def new_scope(cur_size, slope, cur_rate): return (lambda x: (((x - cur_size) * slope) + cur_rate)) line_func = new_scope(cur_size, slope, cur_rate) else: line_func = (lambda x: cur_rate) ratelimits.append((cur_size, cur_rate, line_func)) if (info is None): return ratelimits else: return (ratelimits, conf_limits_info)
[ "def", "interpret_conf_limits", "(", "conf", ",", "name_prefix", ",", "info", "=", "None", ")", ":", "conf_limits", "=", "[", "]", "for", "conf_key", "in", "conf", ":", "if", "conf_key", ".", "startswith", "(", "name_prefix", ")", ":", "cont_size", "=", "int", "(", "conf_key", "[", "len", "(", "name_prefix", ")", ":", "]", ")", "rate", "=", "float", "(", "conf", "[", "conf_key", "]", ")", "conf_limits", ".", "append", "(", "(", "cont_size", ",", "rate", ")", ")", "conf_limits", ".", "sort", "(", ")", "ratelimits", "=", "[", "]", "conf_limits_info", "=", "list", "(", "conf_limits", ")", "while", "conf_limits", ":", "(", "cur_size", ",", "cur_rate", ")", "=", "conf_limits", ".", "pop", "(", "0", ")", "if", "conf_limits", ":", "(", "next_size", ",", "next_rate", ")", "=", "conf_limits", "[", "0", "]", "slope", "=", "(", "(", "float", "(", "next_rate", ")", "-", "float", "(", "cur_rate", ")", ")", "/", "(", "next_size", "-", "cur_size", ")", ")", "def", "new_scope", "(", "cur_size", ",", "slope", ",", "cur_rate", ")", ":", "return", "(", "lambda", "x", ":", "(", "(", "(", "x", "-", "cur_size", ")", "*", "slope", ")", "+", "cur_rate", ")", ")", "line_func", "=", "new_scope", "(", "cur_size", ",", "slope", ",", "cur_rate", ")", "else", ":", "line_func", "=", "(", "lambda", "x", ":", "cur_rate", ")", "ratelimits", ".", "append", "(", "(", "cur_size", ",", "cur_rate", ",", "line_func", ")", ")", "if", "(", "info", "is", "None", ")", ":", "return", "ratelimits", "else", ":", "return", "(", "ratelimits", ",", "conf_limits_info", ")" ]
parses general parms for rate limits looking for things that start with the provided name_prefix within the provided conf and returns lists for both internal use and for /info .
train
false
51,480
def getLoopsInOrderOfArea(compareAreaFunction, loops): loopAreas = [] for loop in loops: loopArea = LoopArea(loop) loopAreas.append(loopArea) loopAreas.sort(compareAreaFunction) loopsInDescendingOrderOfArea = [] for loopArea in loopAreas: loopsInDescendingOrderOfArea.append(loopArea.loop) return loopsInDescendingOrderOfArea
[ "def", "getLoopsInOrderOfArea", "(", "compareAreaFunction", ",", "loops", ")", ":", "loopAreas", "=", "[", "]", "for", "loop", "in", "loops", ":", "loopArea", "=", "LoopArea", "(", "loop", ")", "loopAreas", ".", "append", "(", "loopArea", ")", "loopAreas", ".", "sort", "(", "compareAreaFunction", ")", "loopsInDescendingOrderOfArea", "=", "[", "]", "for", "loopArea", "in", "loopAreas", ":", "loopsInDescendingOrderOfArea", ".", "append", "(", "loopArea", ".", "loop", ")", "return", "loopsInDescendingOrderOfArea" ]
get the loops in the order of area according to the compare function .
train
false
51,481
def image_mime_type(data): kind = _imghdr_what_wrapper(data) if (kind in ['gif', 'jpeg', 'png', 'tiff', 'bmp']): return 'image/{0}'.format(kind) elif (kind == 'pgm'): return 'image/x-portable-graymap' elif (kind == 'pbm'): return 'image/x-portable-bitmap' elif (kind == 'ppm'): return 'image/x-portable-pixmap' elif (kind == 'xbm'): return 'image/x-xbitmap' else: return 'image/x-{0}'.format(kind)
[ "def", "image_mime_type", "(", "data", ")", ":", "kind", "=", "_imghdr_what_wrapper", "(", "data", ")", "if", "(", "kind", "in", "[", "'gif'", ",", "'jpeg'", ",", "'png'", ",", "'tiff'", ",", "'bmp'", "]", ")", ":", "return", "'image/{0}'", ".", "format", "(", "kind", ")", "elif", "(", "kind", "==", "'pgm'", ")", ":", "return", "'image/x-portable-graymap'", "elif", "(", "kind", "==", "'pbm'", ")", ":", "return", "'image/x-portable-bitmap'", "elif", "(", "kind", "==", "'ppm'", ")", ":", "return", "'image/x-portable-pixmap'", "elif", "(", "kind", "==", "'xbm'", ")", ":", "return", "'image/x-xbitmap'", "else", ":", "return", "'image/x-{0}'", ".", "format", "(", "kind", ")" ]
return the mime type of the image data .
train
true
51,482
def _classSupportingImplementer(*interfaces): def check(obj): '\n Do nothing at all.\n ' return check
[ "def", "_classSupportingImplementer", "(", "*", "interfaces", ")", ":", "def", "check", "(", "obj", ")", ":", "return", "check" ]
a fake implementation of l{zope .
train
false
51,483
def remove_event_handlers(name): _events.pop(name, None)
[ "def", "remove_event_handlers", "(", "name", ")", ":", "_events", ".", "pop", "(", "name", ",", "None", ")" ]
removes all handlers for given event name .
train
false
51,485
def user_details(strategy, details, user=None, *args, **kwargs): if user: changed = False protected = (('username', 'id', 'pk', 'email') + tuple(strategy.setting('PROTECTED_USER_FIELDS', []))) for (name, value) in details.items(): if (not hasattr(user, name)): continue current_value = getattr(user, name, None) if ((not current_value) or (name not in protected)): changed |= (current_value != value) setattr(user, name, value) if changed: strategy.storage.user.changed(user)
[ "def", "user_details", "(", "strategy", ",", "details", ",", "user", "=", "None", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "user", ":", "changed", "=", "False", "protected", "=", "(", "(", "'username'", ",", "'id'", ",", "'pk'", ",", "'email'", ")", "+", "tuple", "(", "strategy", ".", "setting", "(", "'PROTECTED_USER_FIELDS'", ",", "[", "]", ")", ")", ")", "for", "(", "name", ",", "value", ")", "in", "details", ".", "items", "(", ")", ":", "if", "(", "not", "hasattr", "(", "user", ",", "name", ")", ")", ":", "continue", "current_value", "=", "getattr", "(", "user", ",", "name", ",", "None", ")", "if", "(", "(", "not", "current_value", ")", "or", "(", "name", "not", "in", "protected", ")", ")", ":", "changed", "|=", "(", "current_value", "!=", "value", ")", "setattr", "(", "user", ",", "name", ",", "value", ")", "if", "changed", ":", "strategy", ".", "storage", ".", "user", ".", "changed", "(", "user", ")" ]
update user details using data from provider .
train
false
51,486
def sort_torrent_fulltext(data_set): norm_num_seeders = normalize_data_dict(data_set, 'num_seeders', 'infohash') norm_neg_votes = normalize_data_dict(data_set, 'neg_votes', 'infohash') norm_subscriptions = normalize_data_dict(data_set, 'subscriptions', 'infohash') for data in data_set: score = (((0.8 * norm_num_seeders[data.get('infohash')]) - (0.1 * norm_neg_votes[data.get('infohash')])) + (0.1 * norm_subscriptions[data.get('infohash')])) data.get('relevance_score')[(-1)] = score data_set.sort(key=(lambda d: d.get('relevance_score')), reverse=True)
[ "def", "sort_torrent_fulltext", "(", "data_set", ")", ":", "norm_num_seeders", "=", "normalize_data_dict", "(", "data_set", ",", "'num_seeders'", ",", "'infohash'", ")", "norm_neg_votes", "=", "normalize_data_dict", "(", "data_set", ",", "'neg_votes'", ",", "'infohash'", ")", "norm_subscriptions", "=", "normalize_data_dict", "(", "data_set", ",", "'subscriptions'", ",", "'infohash'", ")", "for", "data", "in", "data_set", ":", "score", "=", "(", "(", "(", "0.8", "*", "norm_num_seeders", "[", "data", ".", "get", "(", "'infohash'", ")", "]", ")", "-", "(", "0.1", "*", "norm_neg_votes", "[", "data", ".", "get", "(", "'infohash'", ")", "]", ")", ")", "+", "(", "0.1", "*", "norm_subscriptions", "[", "data", ".", "get", "(", "'infohash'", ")", "]", ")", ")", "data", ".", "get", "(", "'relevance_score'", ")", "[", "(", "-", "1", ")", "]", "=", "score", "data_set", ".", "sort", "(", "key", "=", "(", "lambda", "d", ":", "d", ".", "get", "(", "'relevance_score'", ")", ")", ",", "reverse", "=", "True", ")" ]
sorts a given list of torrents using fulltext sorting .
train
false
51,487
def run_parallel_map_providers_query(data, queue=None): reinit_crypto() cloud = Cloud(data['opts']) try: with context.func_globals_inject(cloud.clouds[data['fun']], __active_provider_name__=':'.join([data['alias'], data['driver']])): return (data['alias'], data['driver'], salt.utils.simple_types_filter(cloud.clouds[data['fun']]())) except Exception as err: log.debug("Failed to execute '{0}()' while querying for running nodes: {1}".format(data['fun'], err), exc_info_on_loglevel=logging.DEBUG) return (data['alias'], data['driver'], ())
[ "def", "run_parallel_map_providers_query", "(", "data", ",", "queue", "=", "None", ")", ":", "reinit_crypto", "(", ")", "cloud", "=", "Cloud", "(", "data", "[", "'opts'", "]", ")", "try", ":", "with", "context", ".", "func_globals_inject", "(", "cloud", ".", "clouds", "[", "data", "[", "'fun'", "]", "]", ",", "__active_provider_name__", "=", "':'", ".", "join", "(", "[", "data", "[", "'alias'", "]", ",", "data", "[", "'driver'", "]", "]", ")", ")", ":", "return", "(", "data", "[", "'alias'", "]", ",", "data", "[", "'driver'", "]", ",", "salt", ".", "utils", ".", "simple_types_filter", "(", "cloud", ".", "clouds", "[", "data", "[", "'fun'", "]", "]", "(", ")", ")", ")", "except", "Exception", "as", "err", ":", "log", ".", "debug", "(", "\"Failed to execute '{0}()' while querying for running nodes: {1}\"", ".", "format", "(", "data", "[", "'fun'", "]", ",", "err", ")", ",", "exc_info_on_loglevel", "=", "logging", ".", "DEBUG", ")", "return", "(", "data", "[", "'alias'", "]", ",", "data", "[", "'driver'", "]", ",", "(", ")", ")" ]
this function will be called from another process when building the providers map .
train
true
51,488
def _xfs_estimate_output(out): spc = re.compile('\\s+') data = {} for line in [l for l in out.split('\n') if l.strip()][1:]: (directory, bsize, blocks, megabytes, logsize) = spc.sub(' ', line).split(' ') data[directory] = {'block _size': bsize, 'blocks': blocks, 'megabytes': megabytes, 'logsize': logsize} return data
[ "def", "_xfs_estimate_output", "(", "out", ")", ":", "spc", "=", "re", ".", "compile", "(", "'\\\\s+'", ")", "data", "=", "{", "}", "for", "line", "in", "[", "l", "for", "l", "in", "out", ".", "split", "(", "'\\n'", ")", "if", "l", ".", "strip", "(", ")", "]", "[", "1", ":", "]", ":", "(", "directory", ",", "bsize", ",", "blocks", ",", "megabytes", ",", "logsize", ")", "=", "spc", ".", "sub", "(", "' '", ",", "line", ")", ".", "split", "(", "' '", ")", "data", "[", "directory", "]", "=", "{", "'block _size'", ":", "bsize", ",", "'blocks'", ":", "blocks", ",", "'megabytes'", ":", "megabytes", ",", "'logsize'", ":", "logsize", "}", "return", "data" ]
parse xfs_estimate output .
train
true
51,489
def SCORE(fragment): return None
[ "def", "SCORE", "(", "fragment", ")", ":", "return", "None" ]
sorts higher scored passages first .
train
false
51,490
def grain_pcre(tgt, delimiter=DEFAULT_TARGET_DELIM): matcher = salt.minion.Matcher({'grains': __grains__}, __salt__) try: return matcher.grain_pcre_match(tgt, delimiter=delimiter) except Exception as exc: log.exception(exc) return False
[ "def", "grain_pcre", "(", "tgt", ",", "delimiter", "=", "DEFAULT_TARGET_DELIM", ")", ":", "matcher", "=", "salt", ".", "minion", ".", "Matcher", "(", "{", "'grains'", ":", "__grains__", "}", ",", "__salt__", ")", "try", ":", "return", "matcher", ".", "grain_pcre_match", "(", "tgt", ",", "delimiter", "=", "delimiter", ")", "except", "Exception", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "return", "False" ]
return true if the minion matches the given grain_pcre target .
train
false
51,491
@register.simple_tag(takes_context=True) def escape_explicit(context): return escape('Hello {0}!'.format(context['name']))
[ "@", "register", ".", "simple_tag", "(", "takes_context", "=", "True", ")", "def", "escape_explicit", "(", "context", ")", ":", "return", "escape", "(", "'Hello {0}!'", ".", "format", "(", "context", "[", "'name'", "]", ")", ")" ]
a tag that uses escape explicitly .
train
false
51,492
def scrape_options_and_index_by_dest(*parsers_and_groups): all_options = {} job_option_lists = [g.option_list for g in parsers_and_groups] for option in itertools.chain(*job_option_lists): other_options = all_options.get(option.dest, []) other_options.append(option) all_options[option.dest] = other_options return all_options
[ "def", "scrape_options_and_index_by_dest", "(", "*", "parsers_and_groups", ")", ":", "all_options", "=", "{", "}", "job_option_lists", "=", "[", "g", ".", "option_list", "for", "g", "in", "parsers_and_groups", "]", "for", "option", "in", "itertools", ".", "chain", "(", "*", "job_option_lists", ")", ":", "other_options", "=", "all_options", ".", "get", "(", "option", ".", "dest", ",", "[", "]", ")", "other_options", ".", "append", "(", "option", ")", "all_options", "[", "option", ".", "dest", "]", "=", "other_options", "return", "all_options" ]
scrapes optparse options from :py:class:optionparser and :py:class:optiongroup objects and builds a dictionary of dest_var: [option1 .
train
false
51,494
def test_issue590(en_vocab): doc = get_doc(en_vocab, [u'n', u'=', u'1', u';', u'a', u':', u'5', u'%']) matcher = Matcher(en_vocab) matcher.add_entity(u'ab', acceptor=None, on_match=None) matcher.add_pattern(u'ab', [{IS_ALPHA: True}, {ORTH: u':'}, {LIKE_NUM: True}, {ORTH: u'%'}], label=u'a') matcher.add_pattern(u'ab', [{IS_ALPHA: True}, {ORTH: u'='}, {LIKE_NUM: True}], label=u'b') matches = matcher(doc) assert (len(matches) == 2)
[ "def", "test_issue590", "(", "en_vocab", ")", ":", "doc", "=", "get_doc", "(", "en_vocab", ",", "[", "u'n'", ",", "u'='", ",", "u'1'", ",", "u';'", ",", "u'a'", ",", "u':'", ",", "u'5'", ",", "u'%'", "]", ")", "matcher", "=", "Matcher", "(", "en_vocab", ")", "matcher", ".", "add_entity", "(", "u'ab'", ",", "acceptor", "=", "None", ",", "on_match", "=", "None", ")", "matcher", ".", "add_pattern", "(", "u'ab'", ",", "[", "{", "IS_ALPHA", ":", "True", "}", ",", "{", "ORTH", ":", "u':'", "}", ",", "{", "LIKE_NUM", ":", "True", "}", ",", "{", "ORTH", ":", "u'%'", "}", "]", ",", "label", "=", "u'a'", ")", "matcher", ".", "add_pattern", "(", "u'ab'", ",", "[", "{", "IS_ALPHA", ":", "True", "}", ",", "{", "ORTH", ":", "u'='", "}", ",", "{", "LIKE_NUM", ":", "True", "}", "]", ",", "label", "=", "u'b'", ")", "matches", "=", "matcher", "(", "doc", ")", "assert", "(", "len", "(", "matches", ")", "==", "2", ")" ]
test overlapping matches .
train
false
51,495
def get_tests_modules(basepath=this_dir_path, gui=True, packages=None): py_ext = '.py' for (dirpath, dirnames, filenames) in os.walk(basepath): for dirname in list(dirnames): if (dirname[0] == '.'): dirnames.remove(dirname) if (is_package(dirpath) and filenames): pkg_name = dirpath[(len(basepath) + len(os.sep)):].replace('/', '.') if (packages and (pkg_name not in packages)): continue filenames = filter((lambda x: (x.startswith('test_') and x.endswith(py_ext))), filenames) for name in filenames: try: (yield importlib.import_module(('.%s' % name[:(- len(py_ext))]), pkg_name)) except test.test_support.ResourceDenied: if gui: raise
[ "def", "get_tests_modules", "(", "basepath", "=", "this_dir_path", ",", "gui", "=", "True", ",", "packages", "=", "None", ")", ":", "py_ext", "=", "'.py'", "for", "(", "dirpath", ",", "dirnames", ",", "filenames", ")", "in", "os", ".", "walk", "(", "basepath", ")", ":", "for", "dirname", "in", "list", "(", "dirnames", ")", ":", "if", "(", "dirname", "[", "0", "]", "==", "'.'", ")", ":", "dirnames", ".", "remove", "(", "dirname", ")", "if", "(", "is_package", "(", "dirpath", ")", "and", "filenames", ")", ":", "pkg_name", "=", "dirpath", "[", "(", "len", "(", "basepath", ")", "+", "len", "(", "os", ".", "sep", ")", ")", ":", "]", ".", "replace", "(", "'/'", ",", "'.'", ")", "if", "(", "packages", "and", "(", "pkg_name", "not", "in", "packages", ")", ")", ":", "continue", "filenames", "=", "filter", "(", "(", "lambda", "x", ":", "(", "x", ".", "startswith", "(", "'test_'", ")", "and", "x", ".", "endswith", "(", "py_ext", ")", ")", ")", ",", "filenames", ")", "for", "name", "in", "filenames", ":", "try", ":", "(", "yield", "importlib", ".", "import_module", "(", "(", "'.%s'", "%", "name", "[", ":", "(", "-", "len", "(", "py_ext", ")", ")", "]", ")", ",", "pkg_name", ")", ")", "except", "test", ".", "test_support", ".", "ResourceDenied", ":", "if", "gui", ":", "raise" ]
this will import and yield modules whose names start with test_ and are inside packages found in the path starting at basepath .
train
false
51,498
def write_tfrs(fname, tfr, overwrite=False): out = [] if (not isinstance(tfr, (list, tuple))): tfr = [tfr] for (ii, tfr_) in enumerate(tfr): comment = (ii if (tfr_.comment is None) else tfr_.comment) out.append(_prepare_write_tfr(tfr_, condition=comment)) write_hdf5(fname, out, overwrite=overwrite, title='mnepython')
[ "def", "write_tfrs", "(", "fname", ",", "tfr", ",", "overwrite", "=", "False", ")", ":", "out", "=", "[", "]", "if", "(", "not", "isinstance", "(", "tfr", ",", "(", "list", ",", "tuple", ")", ")", ")", ":", "tfr", "=", "[", "tfr", "]", "for", "(", "ii", ",", "tfr_", ")", "in", "enumerate", "(", "tfr", ")", ":", "comment", "=", "(", "ii", "if", "(", "tfr_", ".", "comment", "is", "None", ")", "else", "tfr_", ".", "comment", ")", "out", ".", "append", "(", "_prepare_write_tfr", "(", "tfr_", ",", "condition", "=", "comment", ")", ")", "write_hdf5", "(", "fname", ",", "out", ",", "overwrite", "=", "overwrite", ",", "title", "=", "'mnepython'", ")" ]
write a tfr dataset to hdf5 .
train
false
51,500
def leslie(f, s): f = np.atleast_1d(f) s = np.atleast_1d(s) if (f.ndim != 1): raise ValueError('Incorrect shape for f. f must be one-dimensional') if (s.ndim != 1): raise ValueError('Incorrect shape for s. s must be one-dimensional') if (f.size != (s.size + 1)): raise ValueError('Incorrect lengths for f and s. The length of s must be one less than the length of f.') if (s.size == 0): raise ValueError('The length of s must be at least 1.') tmp = (f[0] + s[0]) n = f.size a = np.zeros((n, n), dtype=tmp.dtype) a[0] = f a[(list(range(1, n)), list(range(0, (n - 1))))] = s return a
[ "def", "leslie", "(", "f", ",", "s", ")", ":", "f", "=", "np", ".", "atleast_1d", "(", "f", ")", "s", "=", "np", ".", "atleast_1d", "(", "s", ")", "if", "(", "f", ".", "ndim", "!=", "1", ")", ":", "raise", "ValueError", "(", "'Incorrect shape for f. f must be one-dimensional'", ")", "if", "(", "s", ".", "ndim", "!=", "1", ")", ":", "raise", "ValueError", "(", "'Incorrect shape for s. s must be one-dimensional'", ")", "if", "(", "f", ".", "size", "!=", "(", "s", ".", "size", "+", "1", ")", ")", ":", "raise", "ValueError", "(", "'Incorrect lengths for f and s. The length of s must be one less than the length of f.'", ")", "if", "(", "s", ".", "size", "==", "0", ")", ":", "raise", "ValueError", "(", "'The length of s must be at least 1.'", ")", "tmp", "=", "(", "f", "[", "0", "]", "+", "s", "[", "0", "]", ")", "n", "=", "f", ".", "size", "a", "=", "np", ".", "zeros", "(", "(", "n", ",", "n", ")", ",", "dtype", "=", "tmp", ".", "dtype", ")", "a", "[", "0", "]", "=", "f", "a", "[", "(", "list", "(", "range", "(", "1", ",", "n", ")", ")", ",", "list", "(", "range", "(", "0", ",", "(", "n", "-", "1", ")", ")", ")", ")", "]", "=", "s", "return", "a" ]
create a leslie matrix .
train
false
51,501
def sources_add(source_uri, ruby=None, runas=None, gem_bin=None): return _gem(['sources', '--add', source_uri], ruby, gem_bin=gem_bin, runas=runas)
[ "def", "sources_add", "(", "source_uri", ",", "ruby", "=", "None", ",", "runas", "=", "None", ",", "gem_bin", "=", "None", ")", ":", "return", "_gem", "(", "[", "'sources'", ",", "'--add'", ",", "source_uri", "]", ",", "ruby", ",", "gem_bin", "=", "gem_bin", ",", "runas", "=", "runas", ")" ]
add a gem source .
train
true
51,503
def ofctl_arg_supported(cmd, **kwargs): br_name = common_utils.get_rand_device_name(prefix='br-test-') with ovs_lib.OVSBridge(br_name) as test_br: full_args = ['ovs-ofctl', cmd, test_br.br_name, ovs_lib._build_flow_expr_str(kwargs, cmd.split('-')[0])] try: agent_utils.execute(full_args, run_as_root=True) except RuntimeError as e: LOG.debug('Exception while checking supported feature via command %s. Exception: %s', full_args, e) return False except Exception: LOG.exception(_LE('Unexpected exception while checking supported feature via command: %s'), full_args) return False else: return True
[ "def", "ofctl_arg_supported", "(", "cmd", ",", "**", "kwargs", ")", ":", "br_name", "=", "common_utils", ".", "get_rand_device_name", "(", "prefix", "=", "'br-test-'", ")", "with", "ovs_lib", ".", "OVSBridge", "(", "br_name", ")", "as", "test_br", ":", "full_args", "=", "[", "'ovs-ofctl'", ",", "cmd", ",", "test_br", ".", "br_name", ",", "ovs_lib", ".", "_build_flow_expr_str", "(", "kwargs", ",", "cmd", ".", "split", "(", "'-'", ")", "[", "0", "]", ")", "]", "try", ":", "agent_utils", ".", "execute", "(", "full_args", ",", "run_as_root", "=", "True", ")", "except", "RuntimeError", "as", "e", ":", "LOG", ".", "debug", "(", "'Exception while checking supported feature via command %s. Exception: %s'", ",", "full_args", ",", "e", ")", "return", "False", "except", "Exception", ":", "LOG", ".", "exception", "(", "_LE", "(", "'Unexpected exception while checking supported feature via command: %s'", ")", ",", "full_args", ")", "return", "False", "else", ":", "return", "True" ]
verify if ovs-ofctl binary supports cmd with **kwargs .
train
false
51,504
def test_vb_scenarios(): x = ClassWithOverloadDefaultIndexer() for i in range(3): x.MyProperty[i] = (3 * i) AreEqual(x.MyProperty[i], (3 * i)) for i in range(2, 4): for j in range(6, 9): a = (i + j) x = StructWithDefaultIndexer() x.Init() x = ClassWithNotExistingMember() AreEqual(x.MyProperty[1], 0) x.MyProperty[1] = 10 AreEqual(x.MyProperty[1], 10) for t in [StructImplementsIDefaultIndexer, ClassImplementsIDefaultIndexer]: x = t() x = DerivedClass() x.MyProperty[2] = 4 AreEqual(x.MyProperty[2], 4)
[ "def", "test_vb_scenarios", "(", ")", ":", "x", "=", "ClassWithOverloadDefaultIndexer", "(", ")", "for", "i", "in", "range", "(", "3", ")", ":", "x", ".", "MyProperty", "[", "i", "]", "=", "(", "3", "*", "i", ")", "AreEqual", "(", "x", ".", "MyProperty", "[", "i", "]", ",", "(", "3", "*", "i", ")", ")", "for", "i", "in", "range", "(", "2", ",", "4", ")", ":", "for", "j", "in", "range", "(", "6", ",", "9", ")", ":", "a", "=", "(", "i", "+", "j", ")", "x", "=", "StructWithDefaultIndexer", "(", ")", "x", ".", "Init", "(", ")", "x", "=", "ClassWithNotExistingMember", "(", ")", "AreEqual", "(", "x", ".", "MyProperty", "[", "1", "]", ",", "0", ")", "x", ".", "MyProperty", "[", "1", "]", "=", "10", "AreEqual", "(", "x", ".", "MyProperty", "[", "1", "]", ",", "10", ")", "for", "t", "in", "[", "StructImplementsIDefaultIndexer", ",", "ClassImplementsIDefaultIndexer", "]", ":", "x", "=", "t", "(", ")", "x", "=", "DerivedClass", "(", ")", "x", ".", "MyProperty", "[", "2", "]", "=", "4", "AreEqual", "(", "x", ".", "MyProperty", "[", "2", "]", ",", "4", ")" ]
vb supported scenarios .
train
false
51,505
def _text_match(vobject_item, filter_, child_name, attrib_name=None): match = next(filter_.itertext()).lower() children = getattr(vobject_item, ('%s_list' % child_name), []) if attrib_name: condition = any(((match in attrib.lower()) for child in children for attrib in child.params.get(attrib_name, []))) else: condition = any(((match in child.value.lower()) for child in children)) if (filter_.get('negate-condition') == 'yes'): return (not condition) else: return condition
[ "def", "_text_match", "(", "vobject_item", ",", "filter_", ",", "child_name", ",", "attrib_name", "=", "None", ")", ":", "match", "=", "next", "(", "filter_", ".", "itertext", "(", ")", ")", ".", "lower", "(", ")", "children", "=", "getattr", "(", "vobject_item", ",", "(", "'%s_list'", "%", "child_name", ")", ",", "[", "]", ")", "if", "attrib_name", ":", "condition", "=", "any", "(", "(", "(", "match", "in", "attrib", ".", "lower", "(", ")", ")", "for", "child", "in", "children", "for", "attrib", "in", "child", ".", "params", ".", "get", "(", "attrib_name", ",", "[", "]", ")", ")", ")", "else", ":", "condition", "=", "any", "(", "(", "(", "match", "in", "child", ".", "value", ".", "lower", "(", ")", ")", "for", "child", "in", "children", ")", ")", "if", "(", "filter_", ".", "get", "(", "'negate-condition'", ")", "==", "'yes'", ")", ":", "return", "(", "not", "condition", ")", "else", ":", "return", "condition" ]
check whether the item matches the text-match filter_ .
train
false
51,506
def write_degapped_fasta_to_file(seqs, tmp_dir=get_qiime_temp_dir()): (fd, tmp_filename) = tempfile.mkstemp(dir=tmp_dir, prefix='degapped_', suffix='.fasta') close(fd) with open(tmp_filename, 'w') as fh: for seq in degap_fasta_aln(seqs): fh.write(seq.to_fasta()) return tmp_filename
[ "def", "write_degapped_fasta_to_file", "(", "seqs", ",", "tmp_dir", "=", "get_qiime_temp_dir", "(", ")", ")", ":", "(", "fd", ",", "tmp_filename", ")", "=", "tempfile", ".", "mkstemp", "(", "dir", "=", "tmp_dir", ",", "prefix", "=", "'degapped_'", ",", "suffix", "=", "'.fasta'", ")", "close", "(", "fd", ")", "with", "open", "(", "tmp_filename", ",", "'w'", ")", "as", "fh", ":", "for", "seq", "in", "degap_fasta_aln", "(", "seqs", ")", ":", "fh", ".", "write", "(", "seq", ".", "to_fasta", "(", ")", ")", "return", "tmp_filename" ]
write degapped seqs to temp fasta file .
train
false
51,507
def test_addition(): assert ((1 + 1) == 2)
[ "def", "test_addition", "(", ")", ":", "assert", "(", "(", "1", "+", "1", ")", "==", "2", ")" ]
test some advanced maths .
train
false
51,508
def test_bernoulli_vector_default_output_layer(): mlp = MLP(layers=[Linear(layer_name='h', dim=5, irange=0.01, max_col_norm=0.01)]) conditional = BernoulliVector(mlp=mlp, name='conditional') vae = DummyVAE() conditional.set_vae(vae) input_space = VectorSpace(dim=5) conditional.initialize_parameters(input_space=input_space, ndim=5)
[ "def", "test_bernoulli_vector_default_output_layer", "(", ")", ":", "mlp", "=", "MLP", "(", "layers", "=", "[", "Linear", "(", "layer_name", "=", "'h'", ",", "dim", "=", "5", ",", "irange", "=", "0.01", ",", "max_col_norm", "=", "0.01", ")", "]", ")", "conditional", "=", "BernoulliVector", "(", "mlp", "=", "mlp", ",", "name", "=", "'conditional'", ")", "vae", "=", "DummyVAE", "(", ")", "conditional", ".", "set_vae", "(", "vae", ")", "input_space", "=", "VectorSpace", "(", "dim", "=", "5", ")", "conditional", ".", "initialize_parameters", "(", "input_space", "=", "input_space", ",", "ndim", "=", "5", ")" ]
bernoullivectors default output layer is compatible with its required output space .
train
false
51,509
def group_albums(session): def group(item): return ((item.albumartist or item.artist), item.album) task = None while True: task = (yield task) if task.skip: continue tasks = [] for (_, items) in itertools.groupby(task.items, group): tasks.append(ImportTask(items=list(items))) tasks.append(SentinelImportTask(task.toppath, task.paths)) task = pipeline.multiple(tasks)
[ "def", "group_albums", "(", "session", ")", ":", "def", "group", "(", "item", ")", ":", "return", "(", "(", "item", ".", "albumartist", "or", "item", ".", "artist", ")", ",", "item", ".", "album", ")", "task", "=", "None", "while", "True", ":", "task", "=", "(", "yield", "task", ")", "if", "task", ".", "skip", ":", "continue", "tasks", "=", "[", "]", "for", "(", "_", ",", "items", ")", "in", "itertools", ".", "groupby", "(", "task", ".", "items", ",", "group", ")", ":", "tasks", ".", "append", "(", "ImportTask", "(", "items", "=", "list", "(", "items", ")", ")", ")", "tasks", ".", "append", "(", "SentinelImportTask", "(", "task", ".", "toppath", ",", "task", ".", "paths", ")", ")", "task", "=", "pipeline", ".", "multiple", "(", "tasks", ")" ]
group the items of a task by albumartist and album name and create a new task for each album .
train
false
51,511
def simple_middleware(get_response): def middleware(request): return get_response(request) return middleware
[ "def", "simple_middleware", "(", "get_response", ")", ":", "def", "middleware", "(", "request", ")", ":", "return", "get_response", "(", "request", ")", "return", "middleware" ]
used to test django 1 .
train
false
51,514
def mean_lon_of_perigee(jd): T = ((jd - jd1950) / 36525.0) p = (0.012, 1.65, 6190.67, 1015489.951) return (np.polyval(p, T) / 3600.0)
[ "def", "mean_lon_of_perigee", "(", "jd", ")", ":", "T", "=", "(", "(", "jd", "-", "jd1950", ")", "/", "36525.0", ")", "p", "=", "(", "0.012", ",", "1.65", ",", "6190.67", ",", "1015489.951", ")", "return", "(", "np", ".", "polyval", "(", "p", ",", "T", ")", "/", "3600.0", ")" ]
computes the mean longitude of perigee of the earths orbit at the requested julian date .
train
false
51,516
def import_(source, image, api_response=False): (repo_name, repo_tag) = _get_repo_tag(image) path = __salt__['container_resource.cache_file'](source) time_started = time.time() response = _image_wrapper('import_image', path, repository=repo_name, tag=repo_tag) ret = {'Time_Elapsed': (time.time() - time_started)} _clear_context() if (not response): raise CommandExecutionError('Import failed for {0}, no response returned from Docker API'.format(source)) elif api_response: ret['API_Response'] = response errors = [] for item in response: try: item_type = next(iter(item)) except StopIteration: continue if (item_type == 'status'): _import_status(ret, item, repo_name, repo_tag) elif (item_type == 'errorDetail'): _error_detail(errors, item) if ('Id' not in ret): msg = 'Import failed for {0}'.format(source) if errors: msg += '. Error(s) follow:\n\n{0}'.format('\n\n'.join(errors)) raise CommandExecutionError(msg) return ret
[ "def", "import_", "(", "source", ",", "image", ",", "api_response", "=", "False", ")", ":", "(", "repo_name", ",", "repo_tag", ")", "=", "_get_repo_tag", "(", "image", ")", "path", "=", "__salt__", "[", "'container_resource.cache_file'", "]", "(", "source", ")", "time_started", "=", "time", ".", "time", "(", ")", "response", "=", "_image_wrapper", "(", "'import_image'", ",", "path", ",", "repository", "=", "repo_name", ",", "tag", "=", "repo_tag", ")", "ret", "=", "{", "'Time_Elapsed'", ":", "(", "time", ".", "time", "(", ")", "-", "time_started", ")", "}", "_clear_context", "(", ")", "if", "(", "not", "response", ")", ":", "raise", "CommandExecutionError", "(", "'Import failed for {0}, no response returned from Docker API'", ".", "format", "(", "source", ")", ")", "elif", "api_response", ":", "ret", "[", "'API_Response'", "]", "=", "response", "errors", "=", "[", "]", "for", "item", "in", "response", ":", "try", ":", "item_type", "=", "next", "(", "iter", "(", "item", ")", ")", "except", "StopIteration", ":", "continue", "if", "(", "item_type", "==", "'status'", ")", ":", "_import_status", "(", "ret", ",", "item", ",", "repo_name", ",", "repo_tag", ")", "elif", "(", "item_type", "==", "'errorDetail'", ")", ":", "_error_detail", "(", "errors", ",", "item", ")", "if", "(", "'Id'", "not", "in", "ret", ")", ":", "msg", "=", "'Import failed for {0}'", ".", "format", "(", "source", ")", "if", "errors", ":", "msg", "+=", "'. Error(s) follow:\\n\\n{0}'", ".", "format", "(", "'\\n\\n'", ".", "join", "(", "errors", ")", ")", "raise", "CommandExecutionError", "(", "msg", ")", "return", "ret" ]
imports content from a local tarball or a url as a new docker image source content to import .
train
false
51,517
@ensure_csrf_cookie @cache_if_anonymous() def render_press_release(request, slug): template = (slug.lower().replace('-', '_') + '.html') try: resp = render_to_response(('static_templates/press_releases/' + template), {}) except TopLevelLookupException: raise Http404 else: return resp
[ "@", "ensure_csrf_cookie", "@", "cache_if_anonymous", "(", ")", "def", "render_press_release", "(", "request", ",", "slug", ")", ":", "template", "=", "(", "slug", ".", "lower", "(", ")", ".", "replace", "(", "'-'", ",", "'_'", ")", "+", "'.html'", ")", "try", ":", "resp", "=", "render_to_response", "(", "(", "'static_templates/press_releases/'", "+", "template", ")", ",", "{", "}", ")", "except", "TopLevelLookupException", ":", "raise", "Http404", "else", ":", "return", "resp" ]
render a press release given a slug .
train
false
51,518
def pwEncrypt(pw, masterPW=None): if (masterPW is None): if (MasterPassword is None): __getMasterPassword() if (MasterPassword is None): return (u'', False) masterPW = pwDecode(MasterPassword) from .py3PBKDF2 import hashPasswordTuple (digestname, iterations, salt, hash) = hashPasswordTuple(masterPW) key = hash[:32] from .py3AES import encryptData try: cipher = encryptData(key, pw.encode(u'utf-8')) except ValueError: return (u'', False) return ((CryptoMarker + Delimiter.join([digestname, str(iterations), base64.b64encode(salt).decode(u'ascii'), base64.b64encode(cipher).decode(u'ascii')])), True)
[ "def", "pwEncrypt", "(", "pw", ",", "masterPW", "=", "None", ")", ":", "if", "(", "masterPW", "is", "None", ")", ":", "if", "(", "MasterPassword", "is", "None", ")", ":", "__getMasterPassword", "(", ")", "if", "(", "MasterPassword", "is", "None", ")", ":", "return", "(", "u''", ",", "False", ")", "masterPW", "=", "pwDecode", "(", "MasterPassword", ")", "from", ".", "py3PBKDF2", "import", "hashPasswordTuple", "(", "digestname", ",", "iterations", ",", "salt", ",", "hash", ")", "=", "hashPasswordTuple", "(", "masterPW", ")", "key", "=", "hash", "[", ":", "32", "]", "from", ".", "py3AES", "import", "encryptData", "try", ":", "cipher", "=", "encryptData", "(", "key", ",", "pw", ".", "encode", "(", "u'utf-8'", ")", ")", "except", "ValueError", ":", "return", "(", "u''", ",", "False", ")", "return", "(", "(", "CryptoMarker", "+", "Delimiter", ".", "join", "(", "[", "digestname", ",", "str", "(", "iterations", ")", ",", "base64", ".", "b64encode", "(", "salt", ")", ".", "decode", "(", "u'ascii'", ")", ",", "base64", ".", "b64encode", "(", "cipher", ")", ".", "decode", "(", "u'ascii'", ")", "]", ")", ")", ",", "True", ")" ]
module function to encrypt a password .
train
false
51,522
def rubygems(ruby, version, runas=None): return _rvm_do(ruby, ['rubygems', version], runas=runas)
[ "def", "rubygems", "(", "ruby", ",", "version", ",", "runas", "=", "None", ")", ":", "return", "_rvm_do", "(", "ruby", ",", "[", "'rubygems'", ",", "version", "]", ",", "runas", "=", "runas", ")" ]
installs a specific rubygems version in the given ruby ruby the ruby for which to install rubygems version the version of rubygems to install .
train
false
51,524
@requires_pandas def test_to_data_frame(): raw = read_raw_fif(test_fif_fname, preload=True) (_, times) = raw[0, :10] df = raw.to_data_frame() assert_true((df.columns == raw.ch_names).all()) assert_array_equal(np.round((times * 1000.0)), df.index.values[:10]) df = raw.to_data_frame(index=None) assert_true(('time' in df.index.names)) assert_array_equal(df.values[:, 0], (raw._data[0] * 10000000000000.0)) assert_array_equal(df.values[:, 2], (raw._data[2] * 1000000000000000.0))
[ "@", "requires_pandas", "def", "test_to_data_frame", "(", ")", ":", "raw", "=", "read_raw_fif", "(", "test_fif_fname", ",", "preload", "=", "True", ")", "(", "_", ",", "times", ")", "=", "raw", "[", "0", ",", ":", "10", "]", "df", "=", "raw", ".", "to_data_frame", "(", ")", "assert_true", "(", "(", "df", ".", "columns", "==", "raw", ".", "ch_names", ")", ".", "all", "(", ")", ")", "assert_array_equal", "(", "np", ".", "round", "(", "(", "times", "*", "1000.0", ")", ")", ",", "df", ".", "index", ".", "values", "[", ":", "10", "]", ")", "df", "=", "raw", ".", "to_data_frame", "(", "index", "=", "None", ")", "assert_true", "(", "(", "'time'", "in", "df", ".", "index", ".", "names", ")", ")", "assert_array_equal", "(", "df", ".", "values", "[", ":", ",", "0", "]", ",", "(", "raw", ".", "_data", "[", "0", "]", "*", "10000000000000.0", ")", ")", "assert_array_equal", "(", "df", ".", "values", "[", ":", ",", "2", "]", ",", "(", "raw", ".", "_data", "[", "2", "]", "*", "1000000000000000.0", ")", ")" ]
test evoked pandas exporter .
train
false
51,525
@sync_performer def perform_upload_s3_key(dispatcher, intent): s3 = boto.connect_s3() bucket = s3.get_bucket(intent.target_bucket) headers = {} if (intent.content_type is not None): headers['Content-Type'] = intent.content_type with intent.file.open() as source_file: key = bucket.new_key(intent.target_key) key.set_contents_from_file(source_file, headers=headers) key.make_public()
[ "@", "sync_performer", "def", "perform_upload_s3_key", "(", "dispatcher", ",", "intent", ")", ":", "s3", "=", "boto", ".", "connect_s3", "(", ")", "bucket", "=", "s3", ".", "get_bucket", "(", "intent", ".", "target_bucket", ")", "headers", "=", "{", "}", "if", "(", "intent", ".", "content_type", "is", "not", "None", ")", ":", "headers", "[", "'Content-Type'", "]", "=", "intent", ".", "content_type", "with", "intent", ".", "file", ".", "open", "(", ")", "as", "source_file", ":", "key", "=", "bucket", ".", "new_key", "(", "intent", ".", "target_key", ")", "key", ".", "set_contents_from_file", "(", "source_file", ",", "headers", "=", "headers", ")", "key", ".", "make_public", "(", ")" ]
see :class:uploadtos3 .
train
false
51,526
def dbshell(): from django.db import runshell runshell()
[ "def", "dbshell", "(", ")", ":", "from", "django", ".", "db", "import", "runshell", "runshell", "(", ")" ]
runs the command-line client for the current database_engine .
train
false
51,527
def _CheckLanguage(language): if (language is None): return None if (not isinstance(language, basestring)): raise TypeError(('language must be a basestring, got %s' % language.__class__.__name__)) if (not re.match(_LANGUAGE_RE, language)): raise ValueError(('invalid language %s. Languages should be two letters.' % language)) return language
[ "def", "_CheckLanguage", "(", "language", ")", ":", "if", "(", "language", "is", "None", ")", ":", "return", "None", "if", "(", "not", "isinstance", "(", "language", ",", "basestring", ")", ")", ":", "raise", "TypeError", "(", "(", "'language must be a basestring, got %s'", "%", "language", ".", "__class__", ".", "__name__", ")", ")", "if", "(", "not", "re", ".", "match", "(", "_LANGUAGE_RE", ",", "language", ")", ")", ":", "raise", "ValueError", "(", "(", "'invalid language %s. Languages should be two letters.'", "%", "language", ")", ")", "return", "language" ]
checks language is none or a string that matches _language_re .
train
false
51,528
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get the repository constructor .
train
false
51,531
def generate_api_key(): seed = os.urandom(256) hashed_seed = hashlib.sha256(seed).hexdigest() return base64.b64encode(hashed_seed, random.choice(['rA', 'aZ', 'gQ', 'hH', 'hG', 'aR', 'DD'])).rstrip('==')
[ "def", "generate_api_key", "(", ")", ":", "seed", "=", "os", ".", "urandom", "(", "256", ")", "hashed_seed", "=", "hashlib", ".", "sha256", "(", "seed", ")", ".", "hexdigest", "(", ")", "return", "base64", ".", "b64encode", "(", "hashed_seed", ",", "random", ".", "choice", "(", "[", "'rA'", ",", "'aZ'", ",", "'gQ'", ",", "'hH'", ",", "'hG'", ",", "'aR'", ",", "'DD'", "]", ")", ")", ".", "rstrip", "(", "'=='", ")" ]
generates an sufficiently large and random key .
train
false
51,532
def get_related_topics(section_div): related_topics = [] try: seealso_div = section_div.find('div', attrs={'class': 'admonition seealso'}) seealso_p = seealso_div.find('p', attrs={'class': 'last'}) related_topics_a = seealso_p.find_all('a') for topic in related_topics_a: related_topics.append((('[[' + topic['title']) + ']]')) except AttributeError: pass return '\\\\n'.join(related_topics)
[ "def", "get_related_topics", "(", "section_div", ")", ":", "related_topics", "=", "[", "]", "try", ":", "seealso_div", "=", "section_div", ".", "find", "(", "'div'", ",", "attrs", "=", "{", "'class'", ":", "'admonition seealso'", "}", ")", "seealso_p", "=", "seealso_div", ".", "find", "(", "'p'", ",", "attrs", "=", "{", "'class'", ":", "'last'", "}", ")", "related_topics_a", "=", "seealso_p", ".", "find_all", "(", "'a'", ")", "for", "topic", "in", "related_topics_a", ":", "related_topics", ".", "append", "(", "(", "(", "'[['", "+", "topic", "[", "'title'", "]", ")", "+", "']]'", ")", ")", "except", "AttributeError", ":", "pass", "return", "'\\\\\\\\n'", ".", "join", "(", "related_topics", ")" ]
get topics related to the current topic .
train
false
51,533
def get_api_servers(context): api_servers = [] api_servers_info = [] if (CONF.glance_api_servers is None): info = CONF.glance_catalog_info try: (service_type, service_name, endpoint_type) = info.split(':') except ValueError: raise exception.InvalidConfigurationValue(_("Failed to parse the configuration option 'glance_catalog_info', must be in the form <service_type>:<service_name>:<endpoint_type>")) for entry in context.service_catalog: if (entry.get('type') == service_type): api_servers.append(entry.get('endpoints')[0].get(endpoint_type)) else: for api_server in CONF.glance_api_servers: api_servers.append(api_server) for api_server in api_servers: if ('//' not in api_server): api_server = ('http://' + api_server) url = urllib.parse.urlparse(api_server) netloc = (url.netloc + url.path) use_ssl = (url.scheme == 'https') api_servers_info.append((netloc, use_ssl)) random.shuffle(api_servers_info) return itertools.cycle(api_servers_info)
[ "def", "get_api_servers", "(", "context", ")", ":", "api_servers", "=", "[", "]", "api_servers_info", "=", "[", "]", "if", "(", "CONF", ".", "glance_api_servers", "is", "None", ")", ":", "info", "=", "CONF", ".", "glance_catalog_info", "try", ":", "(", "service_type", ",", "service_name", ",", "endpoint_type", ")", "=", "info", ".", "split", "(", "':'", ")", "except", "ValueError", ":", "raise", "exception", ".", "InvalidConfigurationValue", "(", "_", "(", "\"Failed to parse the configuration option 'glance_catalog_info', must be in the form <service_type>:<service_name>:<endpoint_type>\"", ")", ")", "for", "entry", "in", "context", ".", "service_catalog", ":", "if", "(", "entry", ".", "get", "(", "'type'", ")", "==", "service_type", ")", ":", "api_servers", ".", "append", "(", "entry", ".", "get", "(", "'endpoints'", ")", "[", "0", "]", ".", "get", "(", "endpoint_type", ")", ")", "else", ":", "for", "api_server", "in", "CONF", ".", "glance_api_servers", ":", "api_servers", ".", "append", "(", "api_server", ")", "for", "api_server", "in", "api_servers", ":", "if", "(", "'//'", "not", "in", "api_server", ")", ":", "api_server", "=", "(", "'http://'", "+", "api_server", ")", "url", "=", "urllib", ".", "parse", ".", "urlparse", "(", "api_server", ")", "netloc", "=", "(", "url", ".", "netloc", "+", "url", ".", "path", ")", "use_ssl", "=", "(", "url", ".", "scheme", "==", "'https'", ")", "api_servers_info", ".", "append", "(", "(", "netloc", ",", "use_ssl", ")", ")", "random", ".", "shuffle", "(", "api_servers_info", ")", "return", "itertools", ".", "cycle", "(", "api_servers_info", ")" ]
return iterable over shuffled api servers .
train
false
51,534
def submit_ratings(**kwargs): query = mbxml.make_rating_request(**kwargs) return _do_mb_post('rating', query)
[ "def", "submit_ratings", "(", "**", "kwargs", ")", ":", "query", "=", "mbxml", ".", "make_rating_request", "(", "**", "kwargs", ")", "return", "_do_mb_post", "(", "'rating'", ",", "query", ")" ]
submit user ratings .
train
false
51,536
def video(): _custom_view('video') response.title = T('Video Tutorials') return dict()
[ "def", "video", "(", ")", ":", "_custom_view", "(", "'video'", ")", "response", ".", "title", "=", "T", "(", "'Video Tutorials'", ")", "return", "dict", "(", ")" ]
custom view .
train
false
51,538
def pants_setup_py(name, description, additional_classifiers=None, **kwargs): if (not name.startswith(u'pantsbuild.pants')): raise ValueError(u"Pants distribution package names must start with 'pantsbuild.pants', given {}".format(name)) standard_classifiers = [u'Intended Audience :: Developers', u'License :: OSI Approved :: Apache Software License', u'Operating System :: MacOS :: MacOS X', u'Operating System :: POSIX :: Linux', u'Programming Language :: Python', u'Topic :: Software Development :: Build Tools'] classifiers = OrderedSet((standard_classifiers + (additional_classifiers or []))) notes = PantsReleases.global_instance().notes_for_version(PANTS_SEMVER) return PythonArtifact(name=name, version=VERSION, description=description, long_description=(_read_contents(u'src/python/pants/ABOUT.rst') + notes), url=u'https://github.com/pantsbuild/pants', license=u'Apache License, Version 2.0', zip_safe=True, classifiers=list(classifiers), **kwargs)
[ "def", "pants_setup_py", "(", "name", ",", "description", ",", "additional_classifiers", "=", "None", ",", "**", "kwargs", ")", ":", "if", "(", "not", "name", ".", "startswith", "(", "u'pantsbuild.pants'", ")", ")", ":", "raise", "ValueError", "(", "u\"Pants distribution package names must start with 'pantsbuild.pants', given {}\"", ".", "format", "(", "name", ")", ")", "standard_classifiers", "=", "[", "u'Intended Audience :: Developers'", ",", "u'License :: OSI Approved :: Apache Software License'", ",", "u'Operating System :: MacOS :: MacOS X'", ",", "u'Operating System :: POSIX :: Linux'", ",", "u'Programming Language :: Python'", ",", "u'Topic :: Software Development :: Build Tools'", "]", "classifiers", "=", "OrderedSet", "(", "(", "standard_classifiers", "+", "(", "additional_classifiers", "or", "[", "]", ")", ")", ")", "notes", "=", "PantsReleases", ".", "global_instance", "(", ")", ".", "notes_for_version", "(", "PANTS_SEMVER", ")", "return", "PythonArtifact", "(", "name", "=", "name", ",", "version", "=", "VERSION", ",", "description", "=", "description", ",", "long_description", "=", "(", "_read_contents", "(", "u'src/python/pants/ABOUT.rst'", ")", "+", "notes", ")", ",", "url", "=", "u'https://github.com/pantsbuild/pants'", ",", "license", "=", "u'Apache License, Version 2.0'", ",", "zip_safe", "=", "True", ",", "classifiers", "=", "list", "(", "classifiers", ")", ",", "**", "kwargs", ")" ]
creates the setup_py for a pants artifact .
train
false
51,540
def mysql_old_passwd(password, uppercase=True): (a, b, c) = (1345345333, 7, 305419889) for d in password: if ((d == ' ') or (d == ' DCTB ')): continue e = ord(d) a ^= ((((a & 63) + b) * e) + (a << 8)) c += ((c << 8) ^ a) b += e retVal = ('%08lx%08lx' % ((a & ((1 << 31) - 1)), (c & ((1 << 31) - 1)))) return (retVal.upper() if uppercase else retVal.lower())
[ "def", "mysql_old_passwd", "(", "password", ",", "uppercase", "=", "True", ")", ":", "(", "a", ",", "b", ",", "c", ")", "=", "(", "1345345333", ",", "7", ",", "305419889", ")", "for", "d", "in", "password", ":", "if", "(", "(", "d", "==", "' '", ")", "or", "(", "d", "==", "' DCTB '", ")", ")", ":", "continue", "e", "=", "ord", "(", "d", ")", "a", "^=", "(", "(", "(", "(", "a", "&", "63", ")", "+", "b", ")", "*", "e", ")", "+", "(", "a", "<<", "8", ")", ")", "c", "+=", "(", "(", "c", "<<", "8", ")", "^", "a", ")", "b", "+=", "e", "retVal", "=", "(", "'%08lx%08lx'", "%", "(", "(", "a", "&", "(", "(", "1", "<<", "31", ")", "-", "1", ")", ")", ",", "(", "c", "&", "(", "(", "1", "<<", "31", ")", "-", "1", ")", ")", ")", ")", "return", "(", "retVal", ".", "upper", "(", ")", "if", "uppercase", "else", "retVal", ".", "lower", "(", ")", ")" ]
reference(s): URL URL .
train
false
51,541
def _html(url, rheaders=None): headers = {'User-Agent': HEADER_USER_AGENT} if rheaders: headers.update(rheaders) request = urllib2.Request(url, headers=headers) return BeautifulSoup(_get(request), convertEntities=BeautifulSoup.HTML_ENTITIES)
[ "def", "_html", "(", "url", ",", "rheaders", "=", "None", ")", ":", "headers", "=", "{", "'User-Agent'", ":", "HEADER_USER_AGENT", "}", "if", "rheaders", ":", "headers", ".", "update", "(", "rheaders", ")", "request", "=", "urllib2", ".", "Request", "(", "url", ",", "headers", "=", "headers", ")", "return", "BeautifulSoup", "(", "_get", "(", "request", ")", ",", "convertEntities", "=", "BeautifulSoup", ".", "HTML_ENTITIES", ")" ]
downloads the resource at the given url and parses via beautifulsoup .
train
false
51,542
def getIncrementFromRank(rank): rankZone = int(math.floor((rank / 3))) rankModulo = (rank % 3) powerOfTen = pow(10, rankZone) moduloMultipliers = (1, 2, 5) return float((powerOfTen * moduloMultipliers[rankModulo]))
[ "def", "getIncrementFromRank", "(", "rank", ")", ":", "rankZone", "=", "int", "(", "math", ".", "floor", "(", "(", "rank", "/", "3", ")", ")", ")", "rankModulo", "=", "(", "rank", "%", "3", ")", "powerOfTen", "=", "pow", "(", "10", ",", "rankZone", ")", "moduloMultipliers", "=", "(", "1", ",", "2", ",", "5", ")", "return", "float", "(", "(", "powerOfTen", "*", "moduloMultipliers", "[", "rankModulo", "]", ")", ")" ]
get the increment from the rank which is 0 at 1 and increases by three every power of ten .
train
false
51,544
def translate_flow(source): global TO_REGISTER TO_REGISTER = [] return (do_block(('{%s}' % source), 0)[0], TO_REGISTER)
[ "def", "translate_flow", "(", "source", ")", ":", "global", "TO_REGISTER", "TO_REGISTER", "=", "[", "]", "return", "(", "do_block", "(", "(", "'{%s}'", "%", "source", ")", ",", "0", ")", "[", "0", "]", ",", "TO_REGISTER", ")" ]
source cant have arrays .
train
false
51,546
def _get_request_ip(request, default=''): if ((request is not None) and hasattr(request, 'META')): return get_ip(request) else: return default
[ "def", "_get_request_ip", "(", "request", ",", "default", "=", "''", ")", ":", "if", "(", "(", "request", "is", "not", "None", ")", "and", "hasattr", "(", "request", ",", "'META'", ")", ")", ":", "return", "get_ip", "(", "request", ")", "else", ":", "return", "default" ]
helper method to get ip from a requests meta dict .
train
false
51,547
def dinitz(G, s, t, capacity='capacity', residual=None, value_only=False, cutoff=None): R = dinitz_impl(G, s, t, capacity, residual, cutoff) R.graph['algorithm'] = 'dinitz' return R
[ "def", "dinitz", "(", "G", ",", "s", ",", "t", ",", "capacity", "=", "'capacity'", ",", "residual", "=", "None", ",", "value_only", "=", "False", ",", "cutoff", "=", "None", ")", ":", "R", "=", "dinitz_impl", "(", "G", ",", "s", ",", "t", ",", "capacity", ",", "residual", ",", "cutoff", ")", "R", ".", "graph", "[", "'algorithm'", "]", "=", "'dinitz'", "return", "R" ]
find a maximum single-commodity flow using dinitz algorithm .
train
false
51,548
def set_ca_path(cacert_path): if cacert_path: __context__['ca.contextual_cert_base_path'] = cacert_path return cert_base_path()
[ "def", "set_ca_path", "(", "cacert_path", ")", ":", "if", "cacert_path", ":", "__context__", "[", "'ca.contextual_cert_base_path'", "]", "=", "cacert_path", "return", "cert_base_path", "(", ")" ]
if wanted .
train
false
51,549
def FindPerformanceAttributesByName(instanceName, object=None, counter=None, format=win32pdh.PDH_FMT_LONG, machine=None, bRefresh=0): if (object is None): object = find_pdh_counter_localized_name('Process', machine) if (counter is None): counter = find_pdh_counter_localized_name('ID Process', machine) if bRefresh: win32pdh.EnumObjects(None, machine, 0, 1) instanceName = instanceName.lower() (items, instances) = win32pdh.EnumObjectItems(None, None, object, (-1)) instance_dict = {} for instance in instances: try: instance_dict[instance] = (instance_dict[instance] + 1) except KeyError: instance_dict[instance] = 0 ret = [] for (instance, max_instances) in instance_dict.iteritems(): for inum in xrange((max_instances + 1)): if (instance.lower() == instanceName): ret.append(GetPerformanceAttributes(object, counter, instance, inum, format, machine)) return ret
[ "def", "FindPerformanceAttributesByName", "(", "instanceName", ",", "object", "=", "None", ",", "counter", "=", "None", ",", "format", "=", "win32pdh", ".", "PDH_FMT_LONG", ",", "machine", "=", "None", ",", "bRefresh", "=", "0", ")", ":", "if", "(", "object", "is", "None", ")", ":", "object", "=", "find_pdh_counter_localized_name", "(", "'Process'", ",", "machine", ")", "if", "(", "counter", "is", "None", ")", ":", "counter", "=", "find_pdh_counter_localized_name", "(", "'ID Process'", ",", "machine", ")", "if", "bRefresh", ":", "win32pdh", ".", "EnumObjects", "(", "None", ",", "machine", ",", "0", ",", "1", ")", "instanceName", "=", "instanceName", ".", "lower", "(", ")", "(", "items", ",", "instances", ")", "=", "win32pdh", ".", "EnumObjectItems", "(", "None", ",", "None", ",", "object", ",", "(", "-", "1", ")", ")", "instance_dict", "=", "{", "}", "for", "instance", "in", "instances", ":", "try", ":", "instance_dict", "[", "instance", "]", "=", "(", "instance_dict", "[", "instance", "]", "+", "1", ")", "except", "KeyError", ":", "instance_dict", "[", "instance", "]", "=", "0", "ret", "=", "[", "]", "for", "(", "instance", ",", "max_instances", ")", "in", "instance_dict", ".", "iteritems", "(", ")", ":", "for", "inum", "in", "xrange", "(", "(", "max_instances", "+", "1", ")", ")", ":", "if", "(", "instance", ".", "lower", "(", ")", "==", "instanceName", ")", ":", "ret", ".", "append", "(", "GetPerformanceAttributes", "(", "object", ",", "counter", ",", "instance", ",", "inum", ",", "format", ",", "machine", ")", ")", "return", "ret" ]
find peformance attributes by instance name .
train
false
51,552
def _determineWindowsError(): return getattr(exceptions, 'WindowsError', FakeWindowsError)
[ "def", "_determineWindowsError", "(", ")", ":", "return", "getattr", "(", "exceptions", ",", "'WindowsError'", ",", "FakeWindowsError", ")" ]
determine which windowserror name to export .
train
false
51,553
@register.assignment_tag() def get_newest_pep_pages(limit=5): latest_peps = Page.objects.filter(path__startswith='dev/peps/', is_published=True).order_by('-created')[:limit] return latest_peps
[ "@", "register", ".", "assignment_tag", "(", ")", "def", "get_newest_pep_pages", "(", "limit", "=", "5", ")", ":", "latest_peps", "=", "Page", ".", "objects", ".", "filter", "(", "path__startswith", "=", "'dev/peps/'", ",", "is_published", "=", "True", ")", ".", "order_by", "(", "'-created'", ")", "[", ":", "limit", "]", "return", "latest_peps" ]
retrieve the most recently added peps .
train
false
51,556
def _scryptBlockMix(blocks, len_blocks): x = blocks[(-1)] core = _raw_salsa20_lib.Salsa20_8_core result = [create_string_buffer(64) for _ in range(len(blocks))] for i in xrange(len(blocks)): core(x, blocks[i], result[i]) x = result[i] return [result[(i + j)] for j in xrange(2) for i in xrange(0, len_blocks, 2)]
[ "def", "_scryptBlockMix", "(", "blocks", ",", "len_blocks", ")", ":", "x", "=", "blocks", "[", "(", "-", "1", ")", "]", "core", "=", "_raw_salsa20_lib", ".", "Salsa20_8_core", "result", "=", "[", "create_string_buffer", "(", "64", ")", "for", "_", "in", "range", "(", "len", "(", "blocks", ")", ")", "]", "for", "i", "in", "xrange", "(", "len", "(", "blocks", ")", ")", ":", "core", "(", "x", ",", "blocks", "[", "i", "]", ",", "result", "[", "i", "]", ")", "x", "=", "result", "[", "i", "]", "return", "[", "result", "[", "(", "i", "+", "j", ")", "]", "for", "j", "in", "xrange", "(", "2", ")", "for", "i", "in", "xrange", "(", "0", ",", "len_blocks", ",", "2", ")", "]" ]
hash function for romix .
train
false
51,558
def get_territory_from_address(address): if (not address): return if isinstance(address, basestring): address = frappe.get_doc(u'Address', address) territory = None for fieldname in (u'city', u'state', u'country'): territory = frappe.db.get_value(u'Territory', address.get(fieldname)) if territory: break return territory
[ "def", "get_territory_from_address", "(", "address", ")", ":", "if", "(", "not", "address", ")", ":", "return", "if", "isinstance", "(", "address", ",", "basestring", ")", ":", "address", "=", "frappe", ".", "get_doc", "(", "u'Address'", ",", "address", ")", "territory", "=", "None", "for", "fieldname", "in", "(", "u'city'", ",", "u'state'", ",", "u'country'", ")", ":", "territory", "=", "frappe", ".", "db", ".", "get_value", "(", "u'Territory'", ",", "address", ".", "get", "(", "fieldname", ")", ")", "if", "territory", ":", "break", "return", "territory" ]
tries to match city .
train
false
51,559
def _make_ptr_array(xs): return PointerArray([x.data.ptr for x in xs], xs)
[ "def", "_make_ptr_array", "(", "xs", ")", ":", "return", "PointerArray", "(", "[", "x", ".", "data", ".", "ptr", "for", "x", "in", "xs", "]", ",", "xs", ")" ]
make an array of pointers denoting pointers of ndarrays .
train
false
51,560
def _check_dsmc_output(output, check_attrs, exact_match=True): parsed_attrs = {} for line in output.split('\n'): (key, sep, val) = line.partition(':') if ((sep is not None) and (key is not None) and (len(val.strip()) > 0)): parsed_attrs[key] = val.strip() for (ckey, cval) in check_attrs.items(): if (ckey not in parsed_attrs): return False elif (exact_match and (parsed_attrs[ckey] != cval)): return False elif ((not exact_match) and (int(parsed_attrs[ckey]) < int(cval))): return False return True
[ "def", "_check_dsmc_output", "(", "output", ",", "check_attrs", ",", "exact_match", "=", "True", ")", ":", "parsed_attrs", "=", "{", "}", "for", "line", "in", "output", ".", "split", "(", "'\\n'", ")", ":", "(", "key", ",", "sep", ",", "val", ")", "=", "line", ".", "partition", "(", "':'", ")", "if", "(", "(", "sep", "is", "not", "None", ")", "and", "(", "key", "is", "not", "None", ")", "and", "(", "len", "(", "val", ".", "strip", "(", ")", ")", ">", "0", ")", ")", ":", "parsed_attrs", "[", "key", "]", "=", "val", ".", "strip", "(", ")", "for", "(", "ckey", ",", "cval", ")", "in", "check_attrs", ".", "items", "(", ")", ":", "if", "(", "ckey", "not", "in", "parsed_attrs", ")", ":", "return", "False", "elif", "(", "exact_match", "and", "(", "parsed_attrs", "[", "ckey", "]", "!=", "cval", ")", ")", ":", "return", "False", "elif", "(", "(", "not", "exact_match", ")", "and", "(", "int", "(", "parsed_attrs", "[", "ckey", "]", ")", "<", "int", "(", "cval", ")", ")", ")", ":", "return", "False", "return", "True" ]
check dsmc command line utility output .
train
false
51,561
def check_variable(name): if (len(name) > 31): return 'Variable names >31 characters may not function on some systems.' return check_identifier(name)
[ "def", "check_variable", "(", "name", ")", ":", "if", "(", "len", "(", "name", ")", ">", "31", ")", ":", "return", "'Variable names >31 characters may not function on some systems.'", "return", "check_identifier", "(", "name", ")" ]
return none if *name* is expected to be a valid variable name in any glsl version .
train
false
51,563
def JumpToPreviewWindow(): vim.command(u'silent! wincmd P') return vim.current.window.options[u'previewwindow']
[ "def", "JumpToPreviewWindow", "(", ")", ":", "vim", ".", "command", "(", "u'silent! wincmd P'", ")", "return", "vim", ".", "current", ".", "window", ".", "options", "[", "u'previewwindow'", "]" ]
jump the vim cursor to the preview window .
train
false
51,565
def branch_create(repo, name, objectish=None, force=False): with open_repo_closing(repo) as r: if isinstance(name, bytes): names = [name] elif isinstance(name, list): names = name else: raise TypeError(('Unexpected branch name type %r' % name)) if (objectish is None): objectish = 'HEAD' object = parse_object(r, objectish) refname = ('refs/heads/' + name) if ((refname in r.refs) and (not force)): raise KeyError(('Branch with name %s already exists.' % name)) r.refs[refname] = object.id
[ "def", "branch_create", "(", "repo", ",", "name", ",", "objectish", "=", "None", ",", "force", "=", "False", ")", ":", "with", "open_repo_closing", "(", "repo", ")", "as", "r", ":", "if", "isinstance", "(", "name", ",", "bytes", ")", ":", "names", "=", "[", "name", "]", "elif", "isinstance", "(", "name", ",", "list", ")", ":", "names", "=", "name", "else", ":", "raise", "TypeError", "(", "(", "'Unexpected branch name type %r'", "%", "name", ")", ")", "if", "(", "objectish", "is", "None", ")", ":", "objectish", "=", "'HEAD'", "object", "=", "parse_object", "(", "r", ",", "objectish", ")", "refname", "=", "(", "'refs/heads/'", "+", "name", ")", "if", "(", "(", "refname", "in", "r", ".", "refs", ")", "and", "(", "not", "force", ")", ")", ":", "raise", "KeyError", "(", "(", "'Branch with name %s already exists.'", "%", "name", ")", ")", "r", ".", "refs", "[", "refname", "]", "=", "object", ".", "id" ]
create a branch .
train
false
51,567
def _clear_watcher(conn, expiring_weakref): try: conn.control_conn_disposed() except ReferenceError: pass
[ "def", "_clear_watcher", "(", "conn", ",", "expiring_weakref", ")", ":", "try", ":", "conn", ".", "control_conn_disposed", "(", ")", "except", "ReferenceError", ":", "pass" ]
called when the controlconnection object is about to be finalized .
train
false
51,568
def rods_connect(): (status, env) = irods.getRodsEnv() assert (status == 0), ('connect(): getRodsEnv() failed (%s): %s' % (status, irods.strerror(status))) (conn, err) = irods.rcConnect(env.rodsHost, env.rodsPort, env.rodsUserName, env.rodsZone) assert (err.status == 0), ('connect(): rcConnect() failed (%s): %s' % (err.status, err.msg)) (status, pw) = irods.obfGetPw() assert (status == 0), ('connect(): getting password with obfGetPw() failed (%s): %s' % (status, irods.strerror(status))) status = irods.clientLoginWithObfPassword(conn, pw) assert (status == 0), ('connect(): logging in with clientLoginWithObfPassword() failed (%s): %s' % (status, irods.strerror(status))) return (env, conn)
[ "def", "rods_connect", "(", ")", ":", "(", "status", ",", "env", ")", "=", "irods", ".", "getRodsEnv", "(", ")", "assert", "(", "status", "==", "0", ")", ",", "(", "'connect(): getRodsEnv() failed (%s): %s'", "%", "(", "status", ",", "irods", ".", "strerror", "(", "status", ")", ")", ")", "(", "conn", ",", "err", ")", "=", "irods", ".", "rcConnect", "(", "env", ".", "rodsHost", ",", "env", ".", "rodsPort", ",", "env", ".", "rodsUserName", ",", "env", ".", "rodsZone", ")", "assert", "(", "err", ".", "status", "==", "0", ")", ",", "(", "'connect(): rcConnect() failed (%s): %s'", "%", "(", "err", ".", "status", ",", "err", ".", "msg", ")", ")", "(", "status", ",", "pw", ")", "=", "irods", ".", "obfGetPw", "(", ")", "assert", "(", "status", "==", "0", ")", ",", "(", "'connect(): getting password with obfGetPw() failed (%s): %s'", "%", "(", "status", ",", "irods", ".", "strerror", "(", "status", ")", ")", ")", "status", "=", "irods", ".", "clientLoginWithObfPassword", "(", "conn", ",", "pw", ")", "assert", "(", "status", "==", "0", ")", ",", "(", "'connect(): logging in with clientLoginWithObfPassword() failed (%s): %s'", "%", "(", "status", ",", "irods", ".", "strerror", "(", "status", ")", ")", ")", "return", "(", "env", ",", "conn", ")" ]
a basic irods connection mechanism that connects using the current irods environment .
train
false
51,570
def _report_failure(self, out, test, example, got): s = self._checker.output_difference(example, got, self.optionflags) s = s.encode('raw_unicode_escape').decode('utf8', 'ignore') out((self._failure_header(test, example) + s))
[ "def", "_report_failure", "(", "self", ",", "out", ",", "test", ",", "example", ",", "got", ")", ":", "s", "=", "self", ".", "_checker", ".", "output_difference", "(", "example", ",", "got", ",", "self", ".", "optionflags", ")", "s", "=", "s", ".", "encode", "(", "'raw_unicode_escape'", ")", ".", "decode", "(", "'utf8'", ",", "'ignore'", ")", "out", "(", "(", "self", ".", "_failure_header", "(", "test", ",", "example", ")", "+", "s", ")", ")" ]
report that the given example failed .
train
false
51,572
def searchFileForAll(hostsFile, name): results = [] try: lines = hostsFile.getContent().splitlines() except: return results name = name.lower() for line in lines: idx = line.find('#') if (idx != (-1)): line = line[:idx] if (not line): continue parts = line.split() if (name.lower() in [s.lower() for s in parts[1:]]): results.append(nativeString(parts[0])) return results
[ "def", "searchFileForAll", "(", "hostsFile", ",", "name", ")", ":", "results", "=", "[", "]", "try", ":", "lines", "=", "hostsFile", ".", "getContent", "(", ")", ".", "splitlines", "(", ")", "except", ":", "return", "results", "name", "=", "name", ".", "lower", "(", ")", "for", "line", "in", "lines", ":", "idx", "=", "line", ".", "find", "(", "'#'", ")", "if", "(", "idx", "!=", "(", "-", "1", ")", ")", ":", "line", "=", "line", "[", ":", "idx", "]", "if", "(", "not", "line", ")", ":", "continue", "parts", "=", "line", ".", "split", "(", ")", "if", "(", "name", ".", "lower", "(", ")", "in", "[", "s", ".", "lower", "(", ")", "for", "s", "in", "parts", "[", "1", ":", "]", "]", ")", ":", "results", ".", "append", "(", "nativeString", "(", "parts", "[", "0", "]", ")", ")", "return", "results" ]
search the given file .
train
false
51,573
def acceptance_yaml_for_test(test_case): _ENV_VAR = 'ACCEPTANCE_YAML' filename = environ.get(_ENV_VAR) if (not filename): test_case.skip('Must set {} to an acceptance.yaml file (http://doc-dev.clusterhq.com/gettinginvolved/appendix.html#acceptance-testing-configuration) plus additional keys in order to run this test.'.format(_ENV_VAR)) with open(filename) as f: config = yaml.safe_load(f) return config
[ "def", "acceptance_yaml_for_test", "(", "test_case", ")", ":", "_ENV_VAR", "=", "'ACCEPTANCE_YAML'", "filename", "=", "environ", ".", "get", "(", "_ENV_VAR", ")", "if", "(", "not", "filename", ")", ":", "test_case", ".", "skip", "(", "'Must set {} to an acceptance.yaml file (http://doc-dev.clusterhq.com/gettinginvolved/appendix.html#acceptance-testing-configuration) plus additional keys in order to run this test.'", ".", "format", "(", "_ENV_VAR", ")", ")", "with", "open", "(", "filename", ")", "as", "f", ":", "config", "=", "yaml", ".", "safe_load", "(", "f", ")", "return", "config" ]
load configuration from a yaml file specified in an environment variable .
train
false
51,574
def checkValidFilePath(filepath, makeValid=True): folder = os.path.split(os.path.abspath(filepath))[0] if (not os.path.isdir(folder)): os.makedirs(folder) return True
[ "def", "checkValidFilePath", "(", "filepath", ",", "makeValid", "=", "True", ")", ":", "folder", "=", "os", ".", "path", ".", "split", "(", "os", ".", "path", ".", "abspath", "(", "filepath", ")", ")", "[", "0", "]", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "folder", ")", ")", ":", "os", ".", "makedirs", "(", "folder", ")", "return", "True" ]
checks whether file path location this should also check whether we have write-permissions to the folder but doesnt currently do that! added in: 1 .
train
false
51,575
def rocket(): return load('rocket.jpg')
[ "def", "rocket", "(", ")", ":", "return", "load", "(", "'rocket.jpg'", ")" ]
launch photo of dscovr on falcon 9 by spacex .
train
false
51,577
def activateAaPdpContextRequest(AccessPointName_presence=0, ProtocolConfigurationOptions_presence=0, GprsTimer_presence=0): a = TpPd(pd=8) b = MessageType(mesType=80) c = NetworkServiceAccessPointIdentifier() d = LlcServiceAccessPointIdentifier() e = QualityOfService() f = PacketDataProtocolAddress() packet = (((((a / b) / c) / d) / e) / f) if (AccessPointName_presence is 1): g = AccessPointName(ieiAPN=40) packet = (packet / g) if (ProtocolConfigurationOptions_presence is 1): h = ProtocolConfigurationOptions(ieiPCO=39) packet = (packet / h) if (GprsTimer_presence is 1): i = GprsTimer(ieiGT=41) packet = (packet / i) return packet
[ "def", "activateAaPdpContextRequest", "(", "AccessPointName_presence", "=", "0", ",", "ProtocolConfigurationOptions_presence", "=", "0", ",", "GprsTimer_presence", "=", "0", ")", ":", "a", "=", "TpPd", "(", "pd", "=", "8", ")", "b", "=", "MessageType", "(", "mesType", "=", "80", ")", "c", "=", "NetworkServiceAccessPointIdentifier", "(", ")", "d", "=", "LlcServiceAccessPointIdentifier", "(", ")", "e", "=", "QualityOfService", "(", ")", "f", "=", "PacketDataProtocolAddress", "(", ")", "packet", "=", "(", "(", "(", "(", "(", "a", "/", "b", ")", "/", "c", ")", "/", "d", ")", "/", "e", ")", "/", "f", ")", "if", "(", "AccessPointName_presence", "is", "1", ")", ":", "g", "=", "AccessPointName", "(", "ieiAPN", "=", "40", ")", "packet", "=", "(", "packet", "/", "g", ")", "if", "(", "ProtocolConfigurationOptions_presence", "is", "1", ")", ":", "h", "=", "ProtocolConfigurationOptions", "(", "ieiPCO", "=", "39", ")", "packet", "=", "(", "packet", "/", "h", ")", "if", "(", "GprsTimer_presence", "is", "1", ")", ":", "i", "=", "GprsTimer", "(", "ieiGT", "=", "41", ")", "packet", "=", "(", "packet", "/", "i", ")", "return", "packet" ]
activate aa pdp context request section 9 .
train
true
51,582
def locale_or_default(locale): if (locale not in LOCALES): locale = settings.LANGUAGE_CODE return locale
[ "def", "locale_or_default", "(", "locale", ")", ":", "if", "(", "locale", "not", "in", "LOCALES", ")", ":", "locale", "=", "settings", ".", "LANGUAGE_CODE", "return", "locale" ]
return locale or .
train
false