id_within_dataset
int64 1
55.5k
| snippet
stringlengths 19
14.2k
| tokens
listlengths 6
1.63k
| nl
stringlengths 6
352
| split_within_dataset
stringclasses 1
value | is_duplicated
bool 2
classes |
|---|---|---|---|---|---|
5,941
|
def endsInNewline(s):
return (s[(- len('\n')):] == '\n')
|
[
"def",
"endsInNewline",
"(",
"s",
")",
":",
"return",
"(",
"s",
"[",
"(",
"-",
"len",
"(",
"'\\n'",
")",
")",
":",
"]",
"==",
"'\\n'",
")"
] |
returns c{true} if this string ends in a newline .
|
train
| false
|
5,942
|
def remove(name=None, pkgs=None, **kwargs):
pkg2rm = ''
if pkgs:
for pkg in pkgs:
pkg2rm += '{0} '.format(pkg)
log.debug('Installing these packages instead of {0}: {1}'.format(name, pkg2rm))
else:
pkg2rm = '{0}'.format(name)
old = list_pkgs()
cmd = '/bin/pkg uninstall -v {0}'.format(pkg2rm)
out = __salt__['cmd.run_all'](cmd, output_loglevel='trace')
__context__.pop('pkg.list_pkgs', None)
new = list_pkgs()
ret = salt.utils.compare_dicts(old, new)
if (out['retcode'] != 0):
raise CommandExecutionError('Error occurred removing package(s)', info={'changes': ret, 'retcode': ips_pkg_return_values[out['retcode']], 'errors': [out['stderr']]})
return ret
|
[
"def",
"remove",
"(",
"name",
"=",
"None",
",",
"pkgs",
"=",
"None",
",",
"**",
"kwargs",
")",
":",
"pkg2rm",
"=",
"''",
"if",
"pkgs",
":",
"for",
"pkg",
"in",
"pkgs",
":",
"pkg2rm",
"+=",
"'{0} '",
".",
"format",
"(",
"pkg",
")",
"log",
".",
"debug",
"(",
"'Installing these packages instead of {0}: {1}'",
".",
"format",
"(",
"name",
",",
"pkg2rm",
")",
")",
"else",
":",
"pkg2rm",
"=",
"'{0}'",
".",
"format",
"(",
"name",
")",
"old",
"=",
"list_pkgs",
"(",
")",
"cmd",
"=",
"'/bin/pkg uninstall -v {0}'",
".",
"format",
"(",
"pkg2rm",
")",
"out",
"=",
"__salt__",
"[",
"'cmd.run_all'",
"]",
"(",
"cmd",
",",
"output_loglevel",
"=",
"'trace'",
")",
"__context__",
".",
"pop",
"(",
"'pkg.list_pkgs'",
",",
"None",
")",
"new",
"=",
"list_pkgs",
"(",
")",
"ret",
"=",
"salt",
".",
"utils",
".",
"compare_dicts",
"(",
"old",
",",
"new",
")",
"if",
"(",
"out",
"[",
"'retcode'",
"]",
"!=",
"0",
")",
":",
"raise",
"CommandExecutionError",
"(",
"'Error occurred removing package(s)'",
",",
"info",
"=",
"{",
"'changes'",
":",
"ret",
",",
"'retcode'",
":",
"ips_pkg_return_values",
"[",
"out",
"[",
"'retcode'",
"]",
"]",
",",
"'errors'",
":",
"[",
"out",
"[",
"'stderr'",
"]",
"]",
"}",
")",
"return",
"ret"
] |
remove files and directories from the subversion repository cwd the path to the subversion repository targets : none files .
|
train
| false
|
5,943
|
def _valid_device(value, device_type):
config = OrderedDict()
for (key, device) in value.items():
if ('packetid' in device.keys()):
msg = (('You are using an outdated configuration of the rfxtrx ' + 'device, {}.'.format(key)) + ' Your new config should be:\n {}: \n name: {}'.format(device.get('packetid'), device.get(ATTR_NAME, 'deivce_name')))
_LOGGER.warning(msg)
key = device.get('packetid')
device.pop('packetid')
key = str(key)
if (not ((len(key) % 2) == 0)):
key = ('0' + key)
if (get_rfx_object(key) is None):
raise vol.Invalid('Rfxtrx device {} is invalid: Invalid device id for {}'.format(key, value))
if (device_type == 'sensor'):
config[key] = DEVICE_SCHEMA_SENSOR(device)
elif (device_type == 'light_switch'):
config[key] = DEVICE_SCHEMA(device)
else:
raise vol.Invalid('Rfxtrx device is invalid')
if (not config[key][ATTR_NAME]):
config[key][ATTR_NAME] = key
return config
|
[
"def",
"_valid_device",
"(",
"value",
",",
"device_type",
")",
":",
"config",
"=",
"OrderedDict",
"(",
")",
"for",
"(",
"key",
",",
"device",
")",
"in",
"value",
".",
"items",
"(",
")",
":",
"if",
"(",
"'packetid'",
"in",
"device",
".",
"keys",
"(",
")",
")",
":",
"msg",
"=",
"(",
"(",
"'You are using an outdated configuration of the rfxtrx '",
"+",
"'device, {}.'",
".",
"format",
"(",
"key",
")",
")",
"+",
"' Your new config should be:\\n {}: \\n name: {}'",
".",
"format",
"(",
"device",
".",
"get",
"(",
"'packetid'",
")",
",",
"device",
".",
"get",
"(",
"ATTR_NAME",
",",
"'deivce_name'",
")",
")",
")",
"_LOGGER",
".",
"warning",
"(",
"msg",
")",
"key",
"=",
"device",
".",
"get",
"(",
"'packetid'",
")",
"device",
".",
"pop",
"(",
"'packetid'",
")",
"key",
"=",
"str",
"(",
"key",
")",
"if",
"(",
"not",
"(",
"(",
"len",
"(",
"key",
")",
"%",
"2",
")",
"==",
"0",
")",
")",
":",
"key",
"=",
"(",
"'0'",
"+",
"key",
")",
"if",
"(",
"get_rfx_object",
"(",
"key",
")",
"is",
"None",
")",
":",
"raise",
"vol",
".",
"Invalid",
"(",
"'Rfxtrx device {} is invalid: Invalid device id for {}'",
".",
"format",
"(",
"key",
",",
"value",
")",
")",
"if",
"(",
"device_type",
"==",
"'sensor'",
")",
":",
"config",
"[",
"key",
"]",
"=",
"DEVICE_SCHEMA_SENSOR",
"(",
"device",
")",
"elif",
"(",
"device_type",
"==",
"'light_switch'",
")",
":",
"config",
"[",
"key",
"]",
"=",
"DEVICE_SCHEMA",
"(",
"device",
")",
"else",
":",
"raise",
"vol",
".",
"Invalid",
"(",
"'Rfxtrx device is invalid'",
")",
"if",
"(",
"not",
"config",
"[",
"key",
"]",
"[",
"ATTR_NAME",
"]",
")",
":",
"config",
"[",
"key",
"]",
"[",
"ATTR_NAME",
"]",
"=",
"key",
"return",
"config"
] |
validate a dictionary of devices definitions .
|
train
| false
|
5,945
|
def compress_merge_back(tokens, tok):
last = tokens[(-1)]
if ((type(last) is not token) or (type(tok) is not token)):
tokens.append(tok)
else:
text = _unicode(last)
if last.trailing_whitespace:
text += last.trailing_whitespace
text += tok
merged = token(text, pre_tags=last.pre_tags, post_tags=tok.post_tags, trailing_whitespace=tok.trailing_whitespace)
merged.annotation = last.annotation
tokens[(-1)] = merged
|
[
"def",
"compress_merge_back",
"(",
"tokens",
",",
"tok",
")",
":",
"last",
"=",
"tokens",
"[",
"(",
"-",
"1",
")",
"]",
"if",
"(",
"(",
"type",
"(",
"last",
")",
"is",
"not",
"token",
")",
"or",
"(",
"type",
"(",
"tok",
")",
"is",
"not",
"token",
")",
")",
":",
"tokens",
".",
"append",
"(",
"tok",
")",
"else",
":",
"text",
"=",
"_unicode",
"(",
"last",
")",
"if",
"last",
".",
"trailing_whitespace",
":",
"text",
"+=",
"last",
".",
"trailing_whitespace",
"text",
"+=",
"tok",
"merged",
"=",
"token",
"(",
"text",
",",
"pre_tags",
"=",
"last",
".",
"pre_tags",
",",
"post_tags",
"=",
"tok",
".",
"post_tags",
",",
"trailing_whitespace",
"=",
"tok",
".",
"trailing_whitespace",
")",
"merged",
".",
"annotation",
"=",
"last",
".",
"annotation",
"tokens",
"[",
"(",
"-",
"1",
")",
"]",
"=",
"merged"
] |
merge tok into the last element of tokens .
|
train
| true
|
5,946
|
def type_change(old, new):
if all((isinstance(x, base) for x in (old + new))):
return False
if (len(old) != len(new)):
return True
new_types = list(map(type, new))
old_types = list(map(type, old))
return (not all(map(issubtype, new_types, old_types)))
|
[
"def",
"type_change",
"(",
"old",
",",
"new",
")",
":",
"if",
"all",
"(",
"(",
"isinstance",
"(",
"x",
",",
"base",
")",
"for",
"x",
"in",
"(",
"old",
"+",
"new",
")",
")",
")",
":",
"return",
"False",
"if",
"(",
"len",
"(",
"old",
")",
"!=",
"len",
"(",
"new",
")",
")",
":",
"return",
"True",
"new_types",
"=",
"list",
"(",
"map",
"(",
"type",
",",
"new",
")",
")",
"old_types",
"=",
"list",
"(",
"map",
"(",
"type",
",",
"old",
")",
")",
"return",
"(",
"not",
"all",
"(",
"map",
"(",
"issubtype",
",",
"new_types",
",",
"old_types",
")",
")",
")"
] |
was there a significant type change between old and new data? .
|
train
| false
|
5,947
|
@loader_option()
def lazyload(loadopt, attr):
return loadopt.set_relationship_strategy(attr, {'lazy': 'select'})
|
[
"@",
"loader_option",
"(",
")",
"def",
"lazyload",
"(",
"loadopt",
",",
"attr",
")",
":",
"return",
"loadopt",
".",
"set_relationship_strategy",
"(",
"attr",
",",
"{",
"'lazy'",
":",
"'select'",
"}",
")"
] |
indicate that the given attribute should be loaded using "lazy" loading .
|
train
| false
|
5,948
|
def embed_kernel(module=None, local_ns=None, **kwargs):
(caller_module, caller_locals) = extract_module_locals(1)
if (module is None):
module = caller_module
if (local_ns is None):
local_ns = caller_locals
from ipykernel.embed import embed_kernel as real_embed_kernel
real_embed_kernel(module=module, local_ns=local_ns, **kwargs)
|
[
"def",
"embed_kernel",
"(",
"module",
"=",
"None",
",",
"local_ns",
"=",
"None",
",",
"**",
"kwargs",
")",
":",
"(",
"caller_module",
",",
"caller_locals",
")",
"=",
"extract_module_locals",
"(",
"1",
")",
"if",
"(",
"module",
"is",
"None",
")",
":",
"module",
"=",
"caller_module",
"if",
"(",
"local_ns",
"is",
"None",
")",
":",
"local_ns",
"=",
"caller_locals",
"from",
"ipykernel",
".",
"embed",
"import",
"embed_kernel",
"as",
"real_embed_kernel",
"real_embed_kernel",
"(",
"module",
"=",
"module",
",",
"local_ns",
"=",
"local_ns",
",",
"**",
"kwargs",
")"
] |
embed and start an ipython kernel in a given scope .
|
train
| false
|
5,950
|
def safe_walk(top, topdown=True, onerror=None, followlinks=True, _seen=None):
(islink, join, isdir) = (os.path.islink, os.path.join, os.path.isdir)
if (_seen is None):
_seen = set()
try:
names = os.listdir(top)
except os.error as err:
if (onerror is not None):
onerror(err)
return
if followlinks:
status = os.stat(top)
if (status.st_ino != 0):
node = (status.st_dev, status.st_ino)
if (node in _seen):
return
_seen.add(node)
(dirs, nondirs) = ([], [])
for name in names:
full_path = join(top, name)
if isdir(full_path):
dirs.append(name)
else:
nondirs.append(name)
if topdown:
(yield (top, dirs, nondirs))
for name in dirs:
new_path = join(top, name)
if (followlinks or (not islink(new_path))):
for x in safe_walk(new_path, topdown, onerror, followlinks, _seen):
(yield x)
if (not topdown):
(yield (top, dirs, nondirs))
|
[
"def",
"safe_walk",
"(",
"top",
",",
"topdown",
"=",
"True",
",",
"onerror",
"=",
"None",
",",
"followlinks",
"=",
"True",
",",
"_seen",
"=",
"None",
")",
":",
"(",
"islink",
",",
"join",
",",
"isdir",
")",
"=",
"(",
"os",
".",
"path",
".",
"islink",
",",
"os",
".",
"path",
".",
"join",
",",
"os",
".",
"path",
".",
"isdir",
")",
"if",
"(",
"_seen",
"is",
"None",
")",
":",
"_seen",
"=",
"set",
"(",
")",
"try",
":",
"names",
"=",
"os",
".",
"listdir",
"(",
"top",
")",
"except",
"os",
".",
"error",
"as",
"err",
":",
"if",
"(",
"onerror",
"is",
"not",
"None",
")",
":",
"onerror",
"(",
"err",
")",
"return",
"if",
"followlinks",
":",
"status",
"=",
"os",
".",
"stat",
"(",
"top",
")",
"if",
"(",
"status",
".",
"st_ino",
"!=",
"0",
")",
":",
"node",
"=",
"(",
"status",
".",
"st_dev",
",",
"status",
".",
"st_ino",
")",
"if",
"(",
"node",
"in",
"_seen",
")",
":",
"return",
"_seen",
".",
"add",
"(",
"node",
")",
"(",
"dirs",
",",
"nondirs",
")",
"=",
"(",
"[",
"]",
",",
"[",
"]",
")",
"for",
"name",
"in",
"names",
":",
"full_path",
"=",
"join",
"(",
"top",
",",
"name",
")",
"if",
"isdir",
"(",
"full_path",
")",
":",
"dirs",
".",
"append",
"(",
"name",
")",
"else",
":",
"nondirs",
".",
"append",
"(",
"name",
")",
"if",
"topdown",
":",
"(",
"yield",
"(",
"top",
",",
"dirs",
",",
"nondirs",
")",
")",
"for",
"name",
"in",
"dirs",
":",
"new_path",
"=",
"join",
"(",
"top",
",",
"name",
")",
"if",
"(",
"followlinks",
"or",
"(",
"not",
"islink",
"(",
"new_path",
")",
")",
")",
":",
"for",
"x",
"in",
"safe_walk",
"(",
"new_path",
",",
"topdown",
",",
"onerror",
",",
"followlinks",
",",
"_seen",
")",
":",
"(",
"yield",
"x",
")",
"if",
"(",
"not",
"topdown",
")",
":",
"(",
"yield",
"(",
"top",
",",
"dirs",
",",
"nondirs",
")",
")"
] |
just like os .
|
train
| true
|
5,951
|
def _service(service, action):
if (distrib_family() != 'gentoo'):
status = run_as_root(('service %(service)s %(action)s' % locals()), pty=False)
else:
status = run_as_root(('/etc/init.d/%(service)s %(action)s' % locals()), pty=False)
return status
|
[
"def",
"_service",
"(",
"service",
",",
"action",
")",
":",
"if",
"(",
"distrib_family",
"(",
")",
"!=",
"'gentoo'",
")",
":",
"status",
"=",
"run_as_root",
"(",
"(",
"'service %(service)s %(action)s'",
"%",
"locals",
"(",
")",
")",
",",
"pty",
"=",
"False",
")",
"else",
":",
"status",
"=",
"run_as_root",
"(",
"(",
"'/etc/init.d/%(service)s %(action)s'",
"%",
"locals",
"(",
")",
")",
",",
"pty",
"=",
"False",
")",
"return",
"status"
] |
compatibility layer for distros that use service and those that dont .
|
train
| false
|
5,952
|
def providers():
ret = {}
for funcname in __salt__:
modname = funcname.split('.')[0]
if (modname not in ret):
ret[provider(modname)] = modname
return ret
|
[
"def",
"providers",
"(",
")",
":",
"ret",
"=",
"{",
"}",
"for",
"funcname",
"in",
"__salt__",
":",
"modname",
"=",
"funcname",
".",
"split",
"(",
"'.'",
")",
"[",
"0",
"]",
"if",
"(",
"modname",
"not",
"in",
"ret",
")",
":",
"ret",
"[",
"provider",
"(",
"modname",
")",
"]",
"=",
"modname",
"return",
"ret"
] |
return a dict of the provider names and the files that provided them cli example: .
|
train
| false
|
5,953
|
def spec_hausman(params_e, params_i, cov_params_e, cov_params_i, dof=None):
params_diff = (params_i - params_e)
cov_diff = (cov_params_i - cov_params_e)
if (not dof):
dof = np_matrix_rank(cov_diff)
cov_diffpinv = np.linalg.pinv(cov_diff)
H = np.dot(params_diff, np.dot(cov_diffpinv, params_diff))
pval = stats.chi2.sf(H, dof)
evals = np.linalg.eigvalsh(cov_diff)
return (H, pval, dof, evals)
|
[
"def",
"spec_hausman",
"(",
"params_e",
",",
"params_i",
",",
"cov_params_e",
",",
"cov_params_i",
",",
"dof",
"=",
"None",
")",
":",
"params_diff",
"=",
"(",
"params_i",
"-",
"params_e",
")",
"cov_diff",
"=",
"(",
"cov_params_i",
"-",
"cov_params_e",
")",
"if",
"(",
"not",
"dof",
")",
":",
"dof",
"=",
"np_matrix_rank",
"(",
"cov_diff",
")",
"cov_diffpinv",
"=",
"np",
".",
"linalg",
".",
"pinv",
"(",
"cov_diff",
")",
"H",
"=",
"np",
".",
"dot",
"(",
"params_diff",
",",
"np",
".",
"dot",
"(",
"cov_diffpinv",
",",
"params_diff",
")",
")",
"pval",
"=",
"stats",
".",
"chi2",
".",
"sf",
"(",
"H",
",",
"dof",
")",
"evals",
"=",
"np",
".",
"linalg",
".",
"eigvalsh",
"(",
"cov_diff",
")",
"return",
"(",
"H",
",",
"pval",
",",
"dof",
",",
"evals",
")"
] |
hausmans specification test parameters params_e : array efficient and consistent under null hypothesis .
|
train
| false
|
5,955
|
def get_user_stylesheet():
filename = config.get('ui', 'user-stylesheet')
if (filename is None):
css = ''
else:
with open(filename, 'r', encoding='utf-8') as f:
css = f.read()
if config.get('ui', 'hide-scrollbar'):
css += '\nhtml > ::-webkit-scrollbar { width: 0px; height: 0px; }'
return css
|
[
"def",
"get_user_stylesheet",
"(",
")",
":",
"filename",
"=",
"config",
".",
"get",
"(",
"'ui'",
",",
"'user-stylesheet'",
")",
"if",
"(",
"filename",
"is",
"None",
")",
":",
"css",
"=",
"''",
"else",
":",
"with",
"open",
"(",
"filename",
",",
"'r'",
",",
"encoding",
"=",
"'utf-8'",
")",
"as",
"f",
":",
"css",
"=",
"f",
".",
"read",
"(",
")",
"if",
"config",
".",
"get",
"(",
"'ui'",
",",
"'hide-scrollbar'",
")",
":",
"css",
"+=",
"'\\nhtml > ::-webkit-scrollbar { width: 0px; height: 0px; }'",
"return",
"css"
] |
get the combined user-stylesheet .
|
train
| false
|
5,956
|
def recv_monitor_message(socket, flags=0):
_check_version((4, 0), 'libzmq event API')
msg = socket.recv_multipart(flags)
return parse_monitor_message(msg)
|
[
"def",
"recv_monitor_message",
"(",
"socket",
",",
"flags",
"=",
"0",
")",
":",
"_check_version",
"(",
"(",
"4",
",",
"0",
")",
",",
"'libzmq event API'",
")",
"msg",
"=",
"socket",
".",
"recv_multipart",
"(",
"flags",
")",
"return",
"parse_monitor_message",
"(",
"msg",
")"
] |
receive and decode the given raw message from the monitoring socket and return a dict .
|
train
| false
|
5,957
|
def MockVimModule():
VIM_MOCK.buffers = {}
VIM_MOCK.eval = MagicMock(side_effect=_MockVimEval)
sys.modules[u'vim'] = VIM_MOCK
return VIM_MOCK
|
[
"def",
"MockVimModule",
"(",
")",
":",
"VIM_MOCK",
".",
"buffers",
"=",
"{",
"}",
"VIM_MOCK",
".",
"eval",
"=",
"MagicMock",
"(",
"side_effect",
"=",
"_MockVimEval",
")",
"sys",
".",
"modules",
"[",
"u'vim'",
"]",
"=",
"VIM_MOCK",
"return",
"VIM_MOCK"
] |
the vim module is something that is only present when running inside the vim python interpreter .
|
train
| false
|
5,958
|
def create_unicode_buffer(init, size=None):
if isinstance(init, str):
if (size is None):
size = (len(init) + 1)
buftype = (c_wchar * size)
buf = buftype()
buf.value = init
return buf
elif isinstance(init, int):
buftype = (c_wchar * init)
buf = buftype()
return buf
raise TypeError(init)
|
[
"def",
"create_unicode_buffer",
"(",
"init",
",",
"size",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"init",
",",
"str",
")",
":",
"if",
"(",
"size",
"is",
"None",
")",
":",
"size",
"=",
"(",
"len",
"(",
"init",
")",
"+",
"1",
")",
"buftype",
"=",
"(",
"c_wchar",
"*",
"size",
")",
"buf",
"=",
"buftype",
"(",
")",
"buf",
".",
"value",
"=",
"init",
"return",
"buf",
"elif",
"isinstance",
"(",
"init",
",",
"int",
")",
":",
"buftype",
"=",
"(",
"c_wchar",
"*",
"init",
")",
"buf",
"=",
"buftype",
"(",
")",
"return",
"buf",
"raise",
"TypeError",
"(",
"init",
")"
] |
create_unicode_buffer -> character array create_unicode_buffer -> character array create_unicode_buffer -> character array .
|
train
| false
|
5,959
|
def list_user_permissions(name, runas=None):
if ((runas is None) and (not salt.utils.is_windows())):
runas = salt.utils.get_user()
res = __salt__['cmd.run_all']([__context__['rabbitmqctl'], 'list_user_permissions', name, '-q'], runas=runas, python_shell=False)
return _output_to_dict(res)
|
[
"def",
"list_user_permissions",
"(",
"name",
",",
"runas",
"=",
"None",
")",
":",
"if",
"(",
"(",
"runas",
"is",
"None",
")",
"and",
"(",
"not",
"salt",
".",
"utils",
".",
"is_windows",
"(",
")",
")",
")",
":",
"runas",
"=",
"salt",
".",
"utils",
".",
"get_user",
"(",
")",
"res",
"=",
"__salt__",
"[",
"'cmd.run_all'",
"]",
"(",
"[",
"__context__",
"[",
"'rabbitmqctl'",
"]",
",",
"'list_user_permissions'",
",",
"name",
",",
"'-q'",
"]",
",",
"runas",
"=",
"runas",
",",
"python_shell",
"=",
"False",
")",
"return",
"_output_to_dict",
"(",
"res",
")"
] |
list permissions for a user via rabbitmqctl list_user_permissions cli example: .
|
train
| false
|
5,963
|
def __call__(self, func):
return FunctionMaker.create(func, 'with _self_: return _func_(%(shortsignature)s)', dict(_self_=self, _func_=func), __wrapped__=func)
|
[
"def",
"__call__",
"(",
"self",
",",
"func",
")",
":",
"return",
"FunctionMaker",
".",
"create",
"(",
"func",
",",
"'with _self_: return _func_(%(shortsignature)s)'",
",",
"dict",
"(",
"_self_",
"=",
"self",
",",
"_func_",
"=",
"func",
")",
",",
"__wrapped__",
"=",
"func",
")"
] |
context manager decorator .
|
train
| true
|
5,964
|
def dottedQuadToNum(ip):
import socket, struct
try:
return struct.unpack('!L', socket.inet_aton(ip.strip()))[0]
except socket.error:
if (ip.strip() == '255.255.255.255'):
return 4294967295L
else:
raise ValueError(('Not a good dotted-quad IP: %s' % ip))
return
|
[
"def",
"dottedQuadToNum",
"(",
"ip",
")",
":",
"import",
"socket",
",",
"struct",
"try",
":",
"return",
"struct",
".",
"unpack",
"(",
"'!L'",
",",
"socket",
".",
"inet_aton",
"(",
"ip",
".",
"strip",
"(",
")",
")",
")",
"[",
"0",
"]",
"except",
"socket",
".",
"error",
":",
"if",
"(",
"ip",
".",
"strip",
"(",
")",
"==",
"'255.255.255.255'",
")",
":",
"return",
"4294967295",
"L",
"else",
":",
"raise",
"ValueError",
"(",
"(",
"'Not a good dotted-quad IP: %s'",
"%",
"ip",
")",
")",
"return"
] |
convert decimal dotted quad string to long integer .
|
train
| true
|
5,965
|
def require_post_params(required_params):
def _decorator(func):
@wraps(func)
def _wrapped(*args, **_kwargs):
request = args[0]
missing_params = (set(required_params) - set(request.POST.keys()))
if (len(missing_params) > 0):
msg = u'Missing POST parameters: {missing}'.format(missing=', '.join(missing_params))
return HttpResponseBadRequest(msg)
else:
return func(request)
return _wrapped
return _decorator
|
[
"def",
"require_post_params",
"(",
"required_params",
")",
":",
"def",
"_decorator",
"(",
"func",
")",
":",
"@",
"wraps",
"(",
"func",
")",
"def",
"_wrapped",
"(",
"*",
"args",
",",
"**",
"_kwargs",
")",
":",
"request",
"=",
"args",
"[",
"0",
"]",
"missing_params",
"=",
"(",
"set",
"(",
"required_params",
")",
"-",
"set",
"(",
"request",
".",
"POST",
".",
"keys",
"(",
")",
")",
")",
"if",
"(",
"len",
"(",
"missing_params",
")",
">",
"0",
")",
":",
"msg",
"=",
"u'Missing POST parameters: {missing}'",
".",
"format",
"(",
"missing",
"=",
"', '",
".",
"join",
"(",
"missing_params",
")",
")",
"return",
"HttpResponseBadRequest",
"(",
"msg",
")",
"else",
":",
"return",
"func",
"(",
"request",
")",
"return",
"_wrapped",
"return",
"_decorator"
] |
checks for required parameters or renders a 400 error .
|
train
| false
|
5,972
|
def retrieveNameFromCache(name):
name = sickbeard.helpers.full_sanitizeSceneName(name)
if (name in nameCache):
return int(nameCache[name])
|
[
"def",
"retrieveNameFromCache",
"(",
"name",
")",
":",
"name",
"=",
"sickbeard",
".",
"helpers",
".",
"full_sanitizeSceneName",
"(",
"name",
")",
"if",
"(",
"name",
"in",
"nameCache",
")",
":",
"return",
"int",
"(",
"nameCache",
"[",
"name",
"]",
")"
] |
looks up the given name in the scene_names table in cache .
|
train
| false
|
5,973
|
def sql_all(app, style, connection):
return ((sql_create(app, style, connection) + sql_custom(app, style, connection)) + sql_indexes(app, style, connection))
|
[
"def",
"sql_all",
"(",
"app",
",",
"style",
",",
"connection",
")",
":",
"return",
"(",
"(",
"sql_create",
"(",
"app",
",",
"style",
",",
"connection",
")",
"+",
"sql_custom",
"(",
"app",
",",
"style",
",",
"connection",
")",
")",
"+",
"sql_indexes",
"(",
"app",
",",
"style",
",",
"connection",
")",
")"
] |
returns a list of create table sql .
|
train
| false
|
5,974
|
def CheckForNewlineAtEOF(filename, lines, error):
if ((len(lines) < 3) or lines[(-2)]):
error(filename, (len(lines) - 2), 'whitespace/ending_newline', 5, 'Could not find a newline character at the end of the file.')
|
[
"def",
"CheckForNewlineAtEOF",
"(",
"filename",
",",
"lines",
",",
"error",
")",
":",
"if",
"(",
"(",
"len",
"(",
"lines",
")",
"<",
"3",
")",
"or",
"lines",
"[",
"(",
"-",
"2",
")",
"]",
")",
":",
"error",
"(",
"filename",
",",
"(",
"len",
"(",
"lines",
")",
"-",
"2",
")",
",",
"'whitespace/ending_newline'",
",",
"5",
",",
"'Could not find a newline character at the end of the file.'",
")"
] |
logs an error if there is no newline char at the end of the file .
|
train
| true
|
5,975
|
def static_with_version(path):
path_re = re.compile('(.*)/([^/]*$)')
return re.sub(path_re, ('\\1/%s/\\2' % cms.__version__), path)
|
[
"def",
"static_with_version",
"(",
"path",
")",
":",
"path_re",
"=",
"re",
".",
"compile",
"(",
"'(.*)/([^/]*$)'",
")",
"return",
"re",
".",
"sub",
"(",
"path_re",
",",
"(",
"'\\\\1/%s/\\\\2'",
"%",
"cms",
".",
"__version__",
")",
",",
"path",
")"
] |
changes provided path from path/to/filename .
|
train
| false
|
5,976
|
@collect_auth
def auth_login(auth):
campaign = request.args.get('campaign')
next_url = request.args.get('next')
data = login_and_register_handler(auth, login=True, campaign=campaign, next_url=next_url)
if (data['status_code'] == http.FOUND):
return redirect(data['next_url'])
|
[
"@",
"collect_auth",
"def",
"auth_login",
"(",
"auth",
")",
":",
"campaign",
"=",
"request",
".",
"args",
".",
"get",
"(",
"'campaign'",
")",
"next_url",
"=",
"request",
".",
"args",
".",
"get",
"(",
"'next'",
")",
"data",
"=",
"login_and_register_handler",
"(",
"auth",
",",
"login",
"=",
"True",
",",
"campaign",
"=",
"campaign",
",",
"next_url",
"=",
"next_url",
")",
"if",
"(",
"data",
"[",
"'status_code'",
"]",
"==",
"http",
".",
"FOUND",
")",
":",
"return",
"redirect",
"(",
"data",
"[",
"'next_url'",
"]",
")"
] |
view for osf login .
|
train
| false
|
5,977
|
def _resp_app_iter_property():
def getter(self):
return self._app_iter
def setter(self, value):
if isinstance(value, (list, tuple)):
self.content_length = sum(map(len, value))
elif (value is not None):
self.content_length = None
self._body = None
self._app_iter = value
return property(getter, setter, doc='Retrieve and set the response app_iter')
|
[
"def",
"_resp_app_iter_property",
"(",
")",
":",
"def",
"getter",
"(",
"self",
")",
":",
"return",
"self",
".",
"_app_iter",
"def",
"setter",
"(",
"self",
",",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"self",
".",
"content_length",
"=",
"sum",
"(",
"map",
"(",
"len",
",",
"value",
")",
")",
"elif",
"(",
"value",
"is",
"not",
"None",
")",
":",
"self",
".",
"content_length",
"=",
"None",
"self",
".",
"_body",
"=",
"None",
"self",
".",
"_app_iter",
"=",
"value",
"return",
"property",
"(",
"getter",
",",
"setter",
",",
"doc",
"=",
"'Retrieve and set the response app_iter'",
")"
] |
set and retrieve response .
|
train
| false
|
5,979
|
def gegenbauer_poly(n, a, x=None, **args):
if (n < 0):
raise ValueError(("can't generate Gegenbauer polynomial of degree %s" % n))
(K, a) = construct_domain(a, field=True)
poly = DMP(dup_gegenbauer(int(n), a, K), K)
if (x is not None):
poly = Poly.new(poly, x)
else:
poly = PurePoly.new(poly, Dummy('x'))
if (not args.get('polys', False)):
return poly.as_expr()
else:
return poly
|
[
"def",
"gegenbauer_poly",
"(",
"n",
",",
"a",
",",
"x",
"=",
"None",
",",
"**",
"args",
")",
":",
"if",
"(",
"n",
"<",
"0",
")",
":",
"raise",
"ValueError",
"(",
"(",
"\"can't generate Gegenbauer polynomial of degree %s\"",
"%",
"n",
")",
")",
"(",
"K",
",",
"a",
")",
"=",
"construct_domain",
"(",
"a",
",",
"field",
"=",
"True",
")",
"poly",
"=",
"DMP",
"(",
"dup_gegenbauer",
"(",
"int",
"(",
"n",
")",
",",
"a",
",",
"K",
")",
",",
"K",
")",
"if",
"(",
"x",
"is",
"not",
"None",
")",
":",
"poly",
"=",
"Poly",
".",
"new",
"(",
"poly",
",",
"x",
")",
"else",
":",
"poly",
"=",
"PurePoly",
".",
"new",
"(",
"poly",
",",
"Dummy",
"(",
"'x'",
")",
")",
"if",
"(",
"not",
"args",
".",
"get",
"(",
"'polys'",
",",
"False",
")",
")",
":",
"return",
"poly",
".",
"as_expr",
"(",
")",
"else",
":",
"return",
"poly"
] |
generates gegenbauer polynomial of degree n in x .
|
train
| false
|
5,980
|
def debugwrite(x):
try:
out = ctx.environ['wsgi.errors']
except:
out = sys.stderr
out.write(x)
|
[
"def",
"debugwrite",
"(",
"x",
")",
":",
"try",
":",
"out",
"=",
"ctx",
".",
"environ",
"[",
"'wsgi.errors'",
"]",
"except",
":",
"out",
"=",
"sys",
".",
"stderr",
"out",
".",
"write",
"(",
"x",
")"
] |
writes debug data to error stream .
|
train
| false
|
5,981
|
def create_real_path(name, loc, path, umask=False, writable=True):
if path:
my_dir = real_path(loc, path)
if (not os.path.exists(my_dir)):
logging.info('%s directory: %s does not exist, try to create it', name, my_dir)
if (not create_all_dirs(my_dir, umask)):
logging.error(T('Cannot create directory %s'), clip_path(my_dir))
return (False, my_dir)
checks = ((os.W_OK + os.R_OK) if writable else os.R_OK)
if os.access(my_dir, checks):
return (True, my_dir)
else:
logging.error(T('%s directory: %s error accessing'), name, clip_path(my_dir))
return (False, my_dir)
else:
return (False, '')
|
[
"def",
"create_real_path",
"(",
"name",
",",
"loc",
",",
"path",
",",
"umask",
"=",
"False",
",",
"writable",
"=",
"True",
")",
":",
"if",
"path",
":",
"my_dir",
"=",
"real_path",
"(",
"loc",
",",
"path",
")",
"if",
"(",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"my_dir",
")",
")",
":",
"logging",
".",
"info",
"(",
"'%s directory: %s does not exist, try to create it'",
",",
"name",
",",
"my_dir",
")",
"if",
"(",
"not",
"create_all_dirs",
"(",
"my_dir",
",",
"umask",
")",
")",
":",
"logging",
".",
"error",
"(",
"T",
"(",
"'Cannot create directory %s'",
")",
",",
"clip_path",
"(",
"my_dir",
")",
")",
"return",
"(",
"False",
",",
"my_dir",
")",
"checks",
"=",
"(",
"(",
"os",
".",
"W_OK",
"+",
"os",
".",
"R_OK",
")",
"if",
"writable",
"else",
"os",
".",
"R_OK",
")",
"if",
"os",
".",
"access",
"(",
"my_dir",
",",
"checks",
")",
":",
"return",
"(",
"True",
",",
"my_dir",
")",
"else",
":",
"logging",
".",
"error",
"(",
"T",
"(",
"'%s directory: %s error accessing'",
")",
",",
"name",
",",
"clip_path",
"(",
"my_dir",
")",
")",
"return",
"(",
"False",
",",
"my_dir",
")",
"else",
":",
"return",
"(",
"False",
",",
"''",
")"
] |
when path is relative .
|
train
| false
|
5,982
|
def empty_queue(queue):
chan = connection_manager.get_channel()
chan.queue_purge(queue)
|
[
"def",
"empty_queue",
"(",
"queue",
")",
":",
"chan",
"=",
"connection_manager",
".",
"get_channel",
"(",
")",
"chan",
".",
"queue_purge",
"(",
"queue",
")"
] |
debug function to completely erase the contents of a queue .
|
train
| false
|
5,985
|
def mdadm():
devices = set()
try:
with salt.utils.fopen('/proc/mdstat', 'r') as mdstat:
for line in mdstat:
if line.startswith('Personalities : '):
continue
if line.startswith('unused devices:'):
continue
if (' : ' in line):
devices.add(line.split(' : ')[0])
except IOError:
return {}
devices = sorted(devices)
if devices:
log.trace('mdadm devices detected: {0}'.format(', '.join(devices)))
return {'mdadm': devices}
|
[
"def",
"mdadm",
"(",
")",
":",
"devices",
"=",
"set",
"(",
")",
"try",
":",
"with",
"salt",
".",
"utils",
".",
"fopen",
"(",
"'/proc/mdstat'",
",",
"'r'",
")",
"as",
"mdstat",
":",
"for",
"line",
"in",
"mdstat",
":",
"if",
"line",
".",
"startswith",
"(",
"'Personalities : '",
")",
":",
"continue",
"if",
"line",
".",
"startswith",
"(",
"'unused devices:'",
")",
":",
"continue",
"if",
"(",
"' : '",
"in",
"line",
")",
":",
"devices",
".",
"add",
"(",
"line",
".",
"split",
"(",
"' : '",
")",
"[",
"0",
"]",
")",
"except",
"IOError",
":",
"return",
"{",
"}",
"devices",
"=",
"sorted",
"(",
"devices",
")",
"if",
"devices",
":",
"log",
".",
"trace",
"(",
"'mdadm devices detected: {0}'",
".",
"format",
"(",
"', '",
".",
"join",
"(",
"devices",
")",
")",
")",
"return",
"{",
"'mdadm'",
":",
"devices",
"}"
] |
return list of mdadm devices .
|
train
| true
|
5,987
|
def glob_to_regex(pattern):
out = [u'^']
components = pattern.strip(u'/').replace(u'.', u'[.]').split(u'/')
doublestar = False
for component in components:
if (len(out) == 1):
if pattern.startswith(u'/'):
out.append(u'/')
elif (not doublestar):
out.append(u'/')
if (u'**' in component):
if (component != u'**'):
raise ValueError(u'Invalid usage of "**", use "*" instead.')
if (not doublestar):
out.append(u'(([^/]+/)*)')
doublestar = True
else:
out.append(component.replace(u'*', u'[^/]*'))
doublestar = False
if doublestar:
out.append(u'[^/]*')
out.append(u'$')
return u''.join(out)
|
[
"def",
"glob_to_regex",
"(",
"pattern",
")",
":",
"out",
"=",
"[",
"u'^'",
"]",
"components",
"=",
"pattern",
".",
"strip",
"(",
"u'/'",
")",
".",
"replace",
"(",
"u'.'",
",",
"u'[.]'",
")",
".",
"split",
"(",
"u'/'",
")",
"doublestar",
"=",
"False",
"for",
"component",
"in",
"components",
":",
"if",
"(",
"len",
"(",
"out",
")",
"==",
"1",
")",
":",
"if",
"pattern",
".",
"startswith",
"(",
"u'/'",
")",
":",
"out",
".",
"append",
"(",
"u'/'",
")",
"elif",
"(",
"not",
"doublestar",
")",
":",
"out",
".",
"append",
"(",
"u'/'",
")",
"if",
"(",
"u'**'",
"in",
"component",
")",
":",
"if",
"(",
"component",
"!=",
"u'**'",
")",
":",
"raise",
"ValueError",
"(",
"u'Invalid usage of \"**\", use \"*\" instead.'",
")",
"if",
"(",
"not",
"doublestar",
")",
":",
"out",
".",
"append",
"(",
"u'(([^/]+/)*)'",
")",
"doublestar",
"=",
"True",
"else",
":",
"out",
".",
"append",
"(",
"component",
".",
"replace",
"(",
"u'*'",
",",
"u'[^/]*'",
")",
")",
"doublestar",
"=",
"False",
"if",
"doublestar",
":",
"out",
".",
"append",
"(",
"u'[^/]*'",
")",
"out",
".",
"append",
"(",
"u'$'",
")",
"return",
"u''",
".",
"join",
"(",
"out",
")"
] |
given a glob pattern .
|
train
| false
|
5,988
|
def test_git_require_remote_url_and_path():
from fabtools.require.git import working_copy
try:
working_copy(REMOTE_URL, path='wc')
assert is_dir('wc')
assert is_dir('wc/.git')
with cd('wc'):
remotes = run('git remote -v')
assert (remotes == 'origin DCTB https://github.com/disko/fabtools.git (fetch)\r\norigin DCTB https://github.com/disko/fabtools.git (push)')
assert (_current_branch() == 'master')
finally:
run('rm -rf wc')
|
[
"def",
"test_git_require_remote_url_and_path",
"(",
")",
":",
"from",
"fabtools",
".",
"require",
".",
"git",
"import",
"working_copy",
"try",
":",
"working_copy",
"(",
"REMOTE_URL",
",",
"path",
"=",
"'wc'",
")",
"assert",
"is_dir",
"(",
"'wc'",
")",
"assert",
"is_dir",
"(",
"'wc/.git'",
")",
"with",
"cd",
"(",
"'wc'",
")",
":",
"remotes",
"=",
"run",
"(",
"'git remote -v'",
")",
"assert",
"(",
"remotes",
"==",
"'origin DCTB https://github.com/disko/fabtools.git (fetch)\\r\\norigin DCTB https://github.com/disko/fabtools.git (push)'",
")",
"assert",
"(",
"_current_branch",
"(",
")",
"==",
"'master'",
")",
"finally",
":",
"run",
"(",
"'rm -rf wc'",
")"
] |
test working_copy() with remote url and path .
|
train
| false
|
5,989
|
def TagBytes(field_number, wire_type):
return _VarintBytes(wire_format.PackTag(field_number, wire_type))
|
[
"def",
"TagBytes",
"(",
"field_number",
",",
"wire_type",
")",
":",
"return",
"_VarintBytes",
"(",
"wire_format",
".",
"PackTag",
"(",
"field_number",
",",
"wire_type",
")",
")"
] |
encode the given tag and return the bytes .
|
train
| false
|
5,990
|
def peek_path_info(environ):
segments = environ.get('PATH_INFO', '').lstrip('/').split('/', 1)
if segments:
return segments[0]
|
[
"def",
"peek_path_info",
"(",
"environ",
")",
":",
"segments",
"=",
"environ",
".",
"get",
"(",
"'PATH_INFO'",
",",
"''",
")",
".",
"lstrip",
"(",
"'/'",
")",
".",
"split",
"(",
"'/'",
",",
"1",
")",
"if",
"segments",
":",
"return",
"segments",
"[",
"0",
"]"
] |
returns the next segment on the path_info or none if there is none .
|
train
| false
|
5,991
|
def _copy_arrays_if_base_present(T):
l = [_copy_array_if_base_present(a) for a in T]
return l
|
[
"def",
"_copy_arrays_if_base_present",
"(",
"T",
")",
":",
"l",
"=",
"[",
"_copy_array_if_base_present",
"(",
"a",
")",
"for",
"a",
"in",
"T",
"]",
"return",
"l"
] |
accepts a tuple of arrays t .
|
train
| false
|
5,993
|
def isshape(x):
try:
(M, N) = x
except:
return False
else:
if (isintlike(M) and isintlike(N)):
if ((np.ndim(M) == 0) and (np.ndim(N) == 0)):
return True
return False
|
[
"def",
"isshape",
"(",
"x",
")",
":",
"try",
":",
"(",
"M",
",",
"N",
")",
"=",
"x",
"except",
":",
"return",
"False",
"else",
":",
"if",
"(",
"isintlike",
"(",
"M",
")",
"and",
"isintlike",
"(",
"N",
")",
")",
":",
"if",
"(",
"(",
"np",
".",
"ndim",
"(",
"M",
")",
"==",
"0",
")",
"and",
"(",
"np",
".",
"ndim",
"(",
"N",
")",
"==",
"0",
")",
")",
":",
"return",
"True",
"return",
"False"
] |
is x a valid 2-tuple of dimensions? .
|
train
| false
|
5,994
|
def ellipse_perimeter(r, c, r_radius, c_radius, orientation=0, shape=None):
return _ellipse_perimeter(r, c, r_radius, c_radius, orientation, shape)
|
[
"def",
"ellipse_perimeter",
"(",
"r",
",",
"c",
",",
"r_radius",
",",
"c_radius",
",",
"orientation",
"=",
"0",
",",
"shape",
"=",
"None",
")",
":",
"return",
"_ellipse_perimeter",
"(",
"r",
",",
"c",
",",
"r_radius",
",",
"c_radius",
",",
"orientation",
",",
"shape",
")"
] |
generate ellipse perimeter coordinates .
|
train
| false
|
5,995
|
def movie_list_list(options):
with Session() as session:
try:
movie_list = get_list_by_exact_name(options.list_name)
except NoResultFound:
console(u'Could not find movie list with name {}'.format(options.list_name))
return
header = [u'#', u'Movie Name', u'Movie year']
header += MovieListBase().supported_ids
table_data = [header]
movies = get_movies_by_list_id(movie_list.id, order_by=u'added', descending=True, session=session)
for movie in movies:
movie_row = [movie.id, movie.title, (movie.year or u'')]
for identifier in MovieListBase().supported_ids:
movie_row.append(movie.identifiers.get(identifier, u''))
table_data.append(movie_row)
title = u'{} Movies in movie list: `{}`'.format(len(movies), options.list_name)
try:
table = TerminalTable(options.table_type, table_data, title, drop_columns=[5, 2, 4])
except TerminalTableError as e:
console(u'ERROR: {}'.format(e))
else:
console(table.output)
|
[
"def",
"movie_list_list",
"(",
"options",
")",
":",
"with",
"Session",
"(",
")",
"as",
"session",
":",
"try",
":",
"movie_list",
"=",
"get_list_by_exact_name",
"(",
"options",
".",
"list_name",
")",
"except",
"NoResultFound",
":",
"console",
"(",
"u'Could not find movie list with name {}'",
".",
"format",
"(",
"options",
".",
"list_name",
")",
")",
"return",
"header",
"=",
"[",
"u'#'",
",",
"u'Movie Name'",
",",
"u'Movie year'",
"]",
"header",
"+=",
"MovieListBase",
"(",
")",
".",
"supported_ids",
"table_data",
"=",
"[",
"header",
"]",
"movies",
"=",
"get_movies_by_list_id",
"(",
"movie_list",
".",
"id",
",",
"order_by",
"=",
"u'added'",
",",
"descending",
"=",
"True",
",",
"session",
"=",
"session",
")",
"for",
"movie",
"in",
"movies",
":",
"movie_row",
"=",
"[",
"movie",
".",
"id",
",",
"movie",
".",
"title",
",",
"(",
"movie",
".",
"year",
"or",
"u''",
")",
"]",
"for",
"identifier",
"in",
"MovieListBase",
"(",
")",
".",
"supported_ids",
":",
"movie_row",
".",
"append",
"(",
"movie",
".",
"identifiers",
".",
"get",
"(",
"identifier",
",",
"u''",
")",
")",
"table_data",
".",
"append",
"(",
"movie_row",
")",
"title",
"=",
"u'{} Movies in movie list: `{}`'",
".",
"format",
"(",
"len",
"(",
"movies",
")",
",",
"options",
".",
"list_name",
")",
"try",
":",
"table",
"=",
"TerminalTable",
"(",
"options",
".",
"table_type",
",",
"table_data",
",",
"title",
",",
"drop_columns",
"=",
"[",
"5",
",",
"2",
",",
"4",
"]",
")",
"except",
"TerminalTableError",
"as",
"e",
":",
"console",
"(",
"u'ERROR: {}'",
".",
"format",
"(",
"e",
")",
")",
"else",
":",
"console",
"(",
"table",
".",
"output",
")"
] |
list movie list .
|
train
| false
|
5,996
|
def build_inlinepatterns(md_instance, **kwargs):
inlinePatterns = odict.OrderedDict()
inlinePatterns[u'backtick'] = BacktickPattern(BACKTICK_RE)
inlinePatterns[u'escape'] = EscapePattern(ESCAPE_RE, md_instance)
inlinePatterns[u'reference'] = ReferencePattern(REFERENCE_RE, md_instance)
inlinePatterns[u'link'] = LinkPattern(LINK_RE, md_instance)
inlinePatterns[u'image_link'] = ImagePattern(IMAGE_LINK_RE, md_instance)
inlinePatterns[u'image_reference'] = ImageReferencePattern(IMAGE_REFERENCE_RE, md_instance)
inlinePatterns[u'short_reference'] = ReferencePattern(SHORT_REF_RE, md_instance)
inlinePatterns[u'autolink'] = AutolinkPattern(AUTOLINK_RE, md_instance)
inlinePatterns[u'automail'] = AutomailPattern(AUTOMAIL_RE, md_instance)
inlinePatterns[u'linebreak'] = SubstituteTagPattern(LINE_BREAK_RE, u'br')
if (md_instance.safeMode != u'escape'):
inlinePatterns[u'html'] = HtmlPattern(HTML_RE, md_instance)
inlinePatterns[u'entity'] = HtmlPattern(ENTITY_RE, md_instance)
inlinePatterns[u'not_strong'] = SimpleTextPattern(NOT_STRONG_RE)
inlinePatterns[u'em_strong'] = DoubleTagPattern(EM_STRONG_RE, u'strong,em')
inlinePatterns[u'strong_em'] = DoubleTagPattern(STRONG_EM_RE, u'em,strong')
inlinePatterns[u'strong'] = SimpleTagPattern(STRONG_RE, u'strong')
inlinePatterns[u'emphasis'] = SimpleTagPattern(EMPHASIS_RE, u'em')
if md_instance.smart_emphasis:
inlinePatterns[u'emphasis2'] = SimpleTagPattern(SMART_EMPHASIS_RE, u'em')
else:
inlinePatterns[u'emphasis2'] = SimpleTagPattern(EMPHASIS_2_RE, u'em')
return inlinePatterns
|
[
"def",
"build_inlinepatterns",
"(",
"md_instance",
",",
"**",
"kwargs",
")",
":",
"inlinePatterns",
"=",
"odict",
".",
"OrderedDict",
"(",
")",
"inlinePatterns",
"[",
"u'backtick'",
"]",
"=",
"BacktickPattern",
"(",
"BACKTICK_RE",
")",
"inlinePatterns",
"[",
"u'escape'",
"]",
"=",
"EscapePattern",
"(",
"ESCAPE_RE",
",",
"md_instance",
")",
"inlinePatterns",
"[",
"u'reference'",
"]",
"=",
"ReferencePattern",
"(",
"REFERENCE_RE",
",",
"md_instance",
")",
"inlinePatterns",
"[",
"u'link'",
"]",
"=",
"LinkPattern",
"(",
"LINK_RE",
",",
"md_instance",
")",
"inlinePatterns",
"[",
"u'image_link'",
"]",
"=",
"ImagePattern",
"(",
"IMAGE_LINK_RE",
",",
"md_instance",
")",
"inlinePatterns",
"[",
"u'image_reference'",
"]",
"=",
"ImageReferencePattern",
"(",
"IMAGE_REFERENCE_RE",
",",
"md_instance",
")",
"inlinePatterns",
"[",
"u'short_reference'",
"]",
"=",
"ReferencePattern",
"(",
"SHORT_REF_RE",
",",
"md_instance",
")",
"inlinePatterns",
"[",
"u'autolink'",
"]",
"=",
"AutolinkPattern",
"(",
"AUTOLINK_RE",
",",
"md_instance",
")",
"inlinePatterns",
"[",
"u'automail'",
"]",
"=",
"AutomailPattern",
"(",
"AUTOMAIL_RE",
",",
"md_instance",
")",
"inlinePatterns",
"[",
"u'linebreak'",
"]",
"=",
"SubstituteTagPattern",
"(",
"LINE_BREAK_RE",
",",
"u'br'",
")",
"if",
"(",
"md_instance",
".",
"safeMode",
"!=",
"u'escape'",
")",
":",
"inlinePatterns",
"[",
"u'html'",
"]",
"=",
"HtmlPattern",
"(",
"HTML_RE",
",",
"md_instance",
")",
"inlinePatterns",
"[",
"u'entity'",
"]",
"=",
"HtmlPattern",
"(",
"ENTITY_RE",
",",
"md_instance",
")",
"inlinePatterns",
"[",
"u'not_strong'",
"]",
"=",
"SimpleTextPattern",
"(",
"NOT_STRONG_RE",
")",
"inlinePatterns",
"[",
"u'em_strong'",
"]",
"=",
"DoubleTagPattern",
"(",
"EM_STRONG_RE",
",",
"u'strong,em'",
")",
"inlinePatterns",
"[",
"u'strong_em'",
"]",
"=",
"DoubleTagPattern",
"(",
"STRONG_EM_RE",
",",
"u'em,strong'",
")",
"inlinePatterns",
"[",
"u'strong'",
"]",
"=",
"SimpleTagPattern",
"(",
"STRONG_RE",
",",
"u'strong'",
")",
"inlinePatterns",
"[",
"u'emphasis'",
"]",
"=",
"SimpleTagPattern",
"(",
"EMPHASIS_RE",
",",
"u'em'",
")",
"if",
"md_instance",
".",
"smart_emphasis",
":",
"inlinePatterns",
"[",
"u'emphasis2'",
"]",
"=",
"SimpleTagPattern",
"(",
"SMART_EMPHASIS_RE",
",",
"u'em'",
")",
"else",
":",
"inlinePatterns",
"[",
"u'emphasis2'",
"]",
"=",
"SimpleTagPattern",
"(",
"EMPHASIS_2_RE",
",",
"u'em'",
")",
"return",
"inlinePatterns"
] |
build the default set of inline patterns for markdown .
|
train
| false
|
5,997
|
@not_implemented_for('directed')
def generate_edgelist(G, delimiter=' ', data=True):
try:
part0 = [n for (n, d) in G.node.items() if (d['bipartite'] == 0)]
except:
raise AttributeError('Missing node attribute `bipartite`')
if ((data is True) or (data is False)):
for n in part0:
for e in G.edges(n, data=data):
(yield delimiter.join(map(make_str, e)))
else:
for n in part0:
for (u, v, d) in G.edges(n, data=True):
e = [u, v]
try:
e.extend((d[k] for k in data))
except KeyError:
pass
(yield delimiter.join(map(make_str, e)))
|
[
"@",
"not_implemented_for",
"(",
"'directed'",
")",
"def",
"generate_edgelist",
"(",
"G",
",",
"delimiter",
"=",
"' '",
",",
"data",
"=",
"True",
")",
":",
"try",
":",
"part0",
"=",
"[",
"n",
"for",
"(",
"n",
",",
"d",
")",
"in",
"G",
".",
"node",
".",
"items",
"(",
")",
"if",
"(",
"d",
"[",
"'bipartite'",
"]",
"==",
"0",
")",
"]",
"except",
":",
"raise",
"AttributeError",
"(",
"'Missing node attribute `bipartite`'",
")",
"if",
"(",
"(",
"data",
"is",
"True",
")",
"or",
"(",
"data",
"is",
"False",
")",
")",
":",
"for",
"n",
"in",
"part0",
":",
"for",
"e",
"in",
"G",
".",
"edges",
"(",
"n",
",",
"data",
"=",
"data",
")",
":",
"(",
"yield",
"delimiter",
".",
"join",
"(",
"map",
"(",
"make_str",
",",
"e",
")",
")",
")",
"else",
":",
"for",
"n",
"in",
"part0",
":",
"for",
"(",
"u",
",",
"v",
",",
"d",
")",
"in",
"G",
".",
"edges",
"(",
"n",
",",
"data",
"=",
"True",
")",
":",
"e",
"=",
"[",
"u",
",",
"v",
"]",
"try",
":",
"e",
".",
"extend",
"(",
"(",
"d",
"[",
"k",
"]",
"for",
"k",
"in",
"data",
")",
")",
"except",
"KeyError",
":",
"pass",
"(",
"yield",
"delimiter",
".",
"join",
"(",
"map",
"(",
"make_str",
",",
"e",
")",
")",
")"
] |
generate a single line of the graph g in edge list format .
|
train
| false
|
5,999
|
def _fitstart_poisson(self, x, fixed=None):
a = x.min()
eps = 0
if (fixed is None):
loc = (a - eps)
elif np.isnan(fixed[(-1)]):
loc = (a - eps)
else:
loc = fixed[(-1)]
xtrans = (x - loc)
lambd = xtrans.mean()
return (lambd, loc)
|
[
"def",
"_fitstart_poisson",
"(",
"self",
",",
"x",
",",
"fixed",
"=",
"None",
")",
":",
"a",
"=",
"x",
".",
"min",
"(",
")",
"eps",
"=",
"0",
"if",
"(",
"fixed",
"is",
"None",
")",
":",
"loc",
"=",
"(",
"a",
"-",
"eps",
")",
"elif",
"np",
".",
"isnan",
"(",
"fixed",
"[",
"(",
"-",
"1",
")",
"]",
")",
":",
"loc",
"=",
"(",
"a",
"-",
"eps",
")",
"else",
":",
"loc",
"=",
"fixed",
"[",
"(",
"-",
"1",
")",
"]",
"xtrans",
"=",
"(",
"x",
"-",
"loc",
")",
"lambd",
"=",
"xtrans",
".",
"mean",
"(",
")",
"return",
"(",
"lambd",
",",
"loc",
")"
] |
maximum likelihood estimator as starting values for poisson distribution parameters x : array data for which the parameters are estimated fixed : none or array_like sequence of numbers and np .
|
train
| false
|
6,000
|
def quotes_historical_yahoo_ohlc(ticker, date1, date2, asobject=False, adjusted=True, cachename=None):
return _quotes_historical_yahoo(ticker, date1, date2, asobject=asobject, adjusted=adjusted, cachename=cachename, ochl=False)
|
[
"def",
"quotes_historical_yahoo_ohlc",
"(",
"ticker",
",",
"date1",
",",
"date2",
",",
"asobject",
"=",
"False",
",",
"adjusted",
"=",
"True",
",",
"cachename",
"=",
"None",
")",
":",
"return",
"_quotes_historical_yahoo",
"(",
"ticker",
",",
"date1",
",",
"date2",
",",
"asobject",
"=",
"asobject",
",",
"adjusted",
"=",
"adjusted",
",",
"cachename",
"=",
"cachename",
",",
"ochl",
"=",
"False",
")"
] |
get historical data for ticker between date1 and date2 .
|
train
| false
|
6,001
|
def get_hosting_services():
return list(_hosting_service_registry)
|
[
"def",
"get_hosting_services",
"(",
")",
":",
"return",
"list",
"(",
"_hosting_service_registry",
")"
] |
return the list of hosting services .
|
train
| false
|
6,002
|
def get_static_page_by_path(path):
if (path == 'index_2.html'):
return get_static_index_page(False)
elif (path == 'index.html'):
return get_static_index_page(True)
elif (path == 'NLTK Wordnet Browser Database Info.html'):
return 'Display of Wordnet Database Statistics is not supported'
elif (path == 'upper_2.html'):
return get_static_upper_page(False)
elif (path == 'upper.html'):
return get_static_upper_page(True)
elif (path == 'web_help.html'):
return get_static_web_help_page()
elif (path == 'wx_help.html'):
return get_static_wx_help_page()
else:
return ("Internal error: Path for static page '%s' is unknown" % path)
|
[
"def",
"get_static_page_by_path",
"(",
"path",
")",
":",
"if",
"(",
"path",
"==",
"'index_2.html'",
")",
":",
"return",
"get_static_index_page",
"(",
"False",
")",
"elif",
"(",
"path",
"==",
"'index.html'",
")",
":",
"return",
"get_static_index_page",
"(",
"True",
")",
"elif",
"(",
"path",
"==",
"'NLTK Wordnet Browser Database Info.html'",
")",
":",
"return",
"'Display of Wordnet Database Statistics is not supported'",
"elif",
"(",
"path",
"==",
"'upper_2.html'",
")",
":",
"return",
"get_static_upper_page",
"(",
"False",
")",
"elif",
"(",
"path",
"==",
"'upper.html'",
")",
":",
"return",
"get_static_upper_page",
"(",
"True",
")",
"elif",
"(",
"path",
"==",
"'web_help.html'",
")",
":",
"return",
"get_static_web_help_page",
"(",
")",
"elif",
"(",
"path",
"==",
"'wx_help.html'",
")",
":",
"return",
"get_static_wx_help_page",
"(",
")",
"else",
":",
"return",
"(",
"\"Internal error: Path for static page '%s' is unknown\"",
"%",
"path",
")"
] |
return a static html page from the path given .
|
train
| false
|
6,006
|
def confirm_updated(value, check_fun, normalize_ret=False, wait=5):
for i in range(wait):
state = (validate_enabled(check_fun()) if normalize_ret else check_fun())
if (value in state):
return True
time.sleep(1)
return False
|
[
"def",
"confirm_updated",
"(",
"value",
",",
"check_fun",
",",
"normalize_ret",
"=",
"False",
",",
"wait",
"=",
"5",
")",
":",
"for",
"i",
"in",
"range",
"(",
"wait",
")",
":",
"state",
"=",
"(",
"validate_enabled",
"(",
"check_fun",
"(",
")",
")",
"if",
"normalize_ret",
"else",
"check_fun",
"(",
")",
")",
"if",
"(",
"value",
"in",
"state",
")",
":",
"return",
"True",
"time",
".",
"sleep",
"(",
"1",
")",
"return",
"False"
] |
wait up to wait seconds for a system parameter to be changed before deciding it hasnt changed .
|
train
| true
|
6,007
|
def inverse_mellin_transform(F, s, x, strip, **hints):
return InverseMellinTransform(F, s, x, strip[0], strip[1]).doit(**hints)
|
[
"def",
"inverse_mellin_transform",
"(",
"F",
",",
"s",
",",
"x",
",",
"strip",
",",
"**",
"hints",
")",
":",
"return",
"InverseMellinTransform",
"(",
"F",
",",
"s",
",",
"x",
",",
"strip",
"[",
"0",
"]",
",",
"strip",
"[",
"1",
"]",
")",
".",
"doit",
"(",
"**",
"hints",
")"
] |
compute the inverse mellin transform of f(s) over the fundamental strip given by strip= .
|
train
| false
|
6,009
|
def break_into_chunks(bigiterator, chunksize=500):
biglist = list(bigiterator)
return [biglist[i:(i + chunksize)] for i in range(0, len(biglist), chunksize)]
|
[
"def",
"break_into_chunks",
"(",
"bigiterator",
",",
"chunksize",
"=",
"500",
")",
":",
"biglist",
"=",
"list",
"(",
"bigiterator",
")",
"return",
"[",
"biglist",
"[",
"i",
":",
"(",
"i",
"+",
"chunksize",
")",
"]",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"biglist",
")",
",",
"chunksize",
")",
"]"
] |
given an iterator .
|
train
| false
|
6,010
|
def _proc_name_from_pid(pid):
try:
ps_output = check_output(['/bin/ps', '-p', str(pid), '-o', 'comm'])
return ps_output.split('\n')[1]
except CalledProcessError:
return 'unknown'
|
[
"def",
"_proc_name_from_pid",
"(",
"pid",
")",
":",
"try",
":",
"ps_output",
"=",
"check_output",
"(",
"[",
"'/bin/ps'",
",",
"'-p'",
",",
"str",
"(",
"pid",
")",
",",
"'-o'",
",",
"'comm'",
"]",
")",
"return",
"ps_output",
".",
"split",
"(",
"'\\n'",
")",
"[",
"1",
"]",
"except",
"CalledProcessError",
":",
"return",
"'unknown'"
] |
parses ps -o comm output for the given pid .
|
train
| false
|
6,011
|
def _FilterActionsFromExcluded(excluded_sources, actions_to_add):
must_keep = OrderedSet(_FixPaths(actions_to_add.keys()))
return [s for s in excluded_sources if (s not in must_keep)]
|
[
"def",
"_FilterActionsFromExcluded",
"(",
"excluded_sources",
",",
"actions_to_add",
")",
":",
"must_keep",
"=",
"OrderedSet",
"(",
"_FixPaths",
"(",
"actions_to_add",
".",
"keys",
"(",
")",
")",
")",
"return",
"[",
"s",
"for",
"s",
"in",
"excluded_sources",
"if",
"(",
"s",
"not",
"in",
"must_keep",
")",
"]"
] |
take inputs with actions attached out of the list of exclusions .
|
train
| false
|
6,012
|
def map_keys(dikt, func):
return dict(((func(key), value) for (key, value) in six.iteritems(dikt)))
|
[
"def",
"map_keys",
"(",
"dikt",
",",
"func",
")",
":",
"return",
"dict",
"(",
"(",
"(",
"func",
"(",
"key",
")",
",",
"value",
")",
"for",
"(",
"key",
",",
"value",
")",
"in",
"six",
".",
"iteritems",
"(",
"dikt",
")",
")",
")"
] |
map dictionary keys .
|
train
| false
|
6,013
|
def gen_challenge_path(challbs, preferences, combinations):
if combinations:
return _find_smart_path(challbs, preferences, combinations)
else:
return _find_dumb_path(challbs, preferences)
|
[
"def",
"gen_challenge_path",
"(",
"challbs",
",",
"preferences",
",",
"combinations",
")",
":",
"if",
"combinations",
":",
"return",
"_find_smart_path",
"(",
"challbs",
",",
"preferences",
",",
"combinations",
")",
"else",
":",
"return",
"_find_dumb_path",
"(",
"challbs",
",",
"preferences",
")"
] |
generate a plan to get authority over the identity .
|
train
| false
|
6,014
|
def fix_help_options(options):
new_options = []
for help_tuple in options:
new_options.append(help_tuple[0:3])
return new_options
|
[
"def",
"fix_help_options",
"(",
"options",
")",
":",
"new_options",
"=",
"[",
"]",
"for",
"help_tuple",
"in",
"options",
":",
"new_options",
".",
"append",
"(",
"help_tuple",
"[",
"0",
":",
"3",
"]",
")",
"return",
"new_options"
] |
convert a 4-tuple help_options list as found in various command classes to the 3-tuple form required by fancygetopt .
|
train
| false
|
6,015
|
def hostgroup_create(name, **connection_args):
conn_args = _login(**connection_args)
try:
if conn_args:
method = 'hostgroup.create'
params = {'name': name}
params = _params_extend(params, **connection_args)
ret = _query(method, params, conn_args['url'], conn_args['auth'])
return ret['result']['groupids']
else:
raise KeyError
except KeyError:
return ret
|
[
"def",
"hostgroup_create",
"(",
"name",
",",
"**",
"connection_args",
")",
":",
"conn_args",
"=",
"_login",
"(",
"**",
"connection_args",
")",
"try",
":",
"if",
"conn_args",
":",
"method",
"=",
"'hostgroup.create'",
"params",
"=",
"{",
"'name'",
":",
"name",
"}",
"params",
"=",
"_params_extend",
"(",
"params",
",",
"**",
"connection_args",
")",
"ret",
"=",
"_query",
"(",
"method",
",",
"params",
",",
"conn_args",
"[",
"'url'",
"]",
",",
"conn_args",
"[",
"'auth'",
"]",
")",
"return",
"ret",
"[",
"'result'",
"]",
"[",
"'groupids'",
"]",
"else",
":",
"raise",
"KeyError",
"except",
"KeyError",
":",
"return",
"ret"
] |
create a host group .
|
train
| true
|
6,017
|
@library.filter
def ifeq(a, b, text):
return Markup((text if (a == b) else ''))
|
[
"@",
"library",
".",
"filter",
"def",
"ifeq",
"(",
"a",
",",
"b",
",",
"text",
")",
":",
"return",
"Markup",
"(",
"(",
"text",
"if",
"(",
"a",
"==",
"b",
")",
"else",
"''",
")",
")"
] |
return text if a == b .
|
train
| false
|
6,018
|
def mac_change_back():
global RUN_CONFIG
iface = RUN_CONFIG.ORIGINAL_IFACE_MAC[0]
old_mac = RUN_CONFIG.ORIGINAL_IFACE_MAC[1]
if ((iface == '') or (old_mac == '')):
return
print (((GR + ' [+]') + W) + (" changing %s's mac back to %s..." % (((G + iface) + W), ((G + old_mac) + W)))),
stdout.flush()
call(['ifconfig', iface, 'down'], stdout=DN, stderr=DN)
proc = Popen(['ifconfig', iface, 'hw', 'ether', old_mac], stdout=PIPE, stderr=DN)
proc.wait()
call(['ifconfig', iface, 'up'], stdout=DN, stderr=DN)
print 'done'
|
[
"def",
"mac_change_back",
"(",
")",
":",
"global",
"RUN_CONFIG",
"iface",
"=",
"RUN_CONFIG",
".",
"ORIGINAL_IFACE_MAC",
"[",
"0",
"]",
"old_mac",
"=",
"RUN_CONFIG",
".",
"ORIGINAL_IFACE_MAC",
"[",
"1",
"]",
"if",
"(",
"(",
"iface",
"==",
"''",
")",
"or",
"(",
"old_mac",
"==",
"''",
")",
")",
":",
"return",
"print",
"(",
"(",
"(",
"GR",
"+",
"' [+]'",
")",
"+",
"W",
")",
"+",
"(",
"\" changing %s's mac back to %s...\"",
"%",
"(",
"(",
"(",
"G",
"+",
"iface",
")",
"+",
"W",
")",
",",
"(",
"(",
"G",
"+",
"old_mac",
")",
"+",
"W",
")",
")",
")",
")",
",",
"stdout",
".",
"flush",
"(",
")",
"call",
"(",
"[",
"'ifconfig'",
",",
"iface",
",",
"'down'",
"]",
",",
"stdout",
"=",
"DN",
",",
"stderr",
"=",
"DN",
")",
"proc",
"=",
"Popen",
"(",
"[",
"'ifconfig'",
",",
"iface",
",",
"'hw'",
",",
"'ether'",
",",
"old_mac",
"]",
",",
"stdout",
"=",
"PIPE",
",",
"stderr",
"=",
"DN",
")",
"proc",
".",
"wait",
"(",
")",
"call",
"(",
"[",
"'ifconfig'",
",",
"iface",
",",
"'up'",
"]",
",",
"stdout",
"=",
"DN",
",",
"stderr",
"=",
"DN",
")",
"print",
"'done'"
] |
changes mac address back to what it was before attacks began .
|
train
| false
|
6,020
|
def _butterfly_on_button_press(event, params):
if params['need_draw']:
event.canvas.draw()
else:
idx = np.where([(event.inaxes is ax) for ax in params['axes']])[0]
if (len(idx) == 1):
text = params['texts'][idx[0]]
text.set_alpha(0.0)
text.set_path_effects([])
event.canvas.draw()
params['need_draw'] = False
|
[
"def",
"_butterfly_on_button_press",
"(",
"event",
",",
"params",
")",
":",
"if",
"params",
"[",
"'need_draw'",
"]",
":",
"event",
".",
"canvas",
".",
"draw",
"(",
")",
"else",
":",
"idx",
"=",
"np",
".",
"where",
"(",
"[",
"(",
"event",
".",
"inaxes",
"is",
"ax",
")",
"for",
"ax",
"in",
"params",
"[",
"'axes'",
"]",
"]",
")",
"[",
"0",
"]",
"if",
"(",
"len",
"(",
"idx",
")",
"==",
"1",
")",
":",
"text",
"=",
"params",
"[",
"'texts'",
"]",
"[",
"idx",
"[",
"0",
"]",
"]",
"text",
".",
"set_alpha",
"(",
"0.0",
")",
"text",
".",
"set_path_effects",
"(",
"[",
"]",
")",
"event",
".",
"canvas",
".",
"draw",
"(",
")",
"params",
"[",
"'need_draw'",
"]",
"=",
"False"
] |
only draw once for picking .
|
train
| false
|
6,021
|
def extract_bugs(changelog):
bug_regexp = re.compile('\\bbug (\\d+)\\b', re.I)
bugs = set()
for line in changelog:
for bug in bug_regexp.findall(line):
bugs.add(bug)
return sorted(list(bugs))
|
[
"def",
"extract_bugs",
"(",
"changelog",
")",
":",
"bug_regexp",
"=",
"re",
".",
"compile",
"(",
"'\\\\bbug (\\\\d+)\\\\b'",
",",
"re",
".",
"I",
")",
"bugs",
"=",
"set",
"(",
")",
"for",
"line",
"in",
"changelog",
":",
"for",
"bug",
"in",
"bug_regexp",
".",
"findall",
"(",
"line",
")",
":",
"bugs",
".",
"add",
"(",
"bug",
")",
"return",
"sorted",
"(",
"list",
"(",
"bugs",
")",
")"
] |
takes output from git log --oneline and extracts bug numbers .
|
train
| true
|
6,022
|
@public
def legendre_poly(n, x=None, **args):
if (n < 0):
raise ValueError(("can't generate Legendre polynomial of degree %s" % n))
poly = DMP(dup_legendre(int(n), QQ), QQ)
if (x is not None):
poly = Poly.new(poly, x)
else:
poly = PurePoly.new(poly, Dummy('x'))
if (not args.get('polys', False)):
return poly.as_expr()
else:
return poly
|
[
"@",
"public",
"def",
"legendre_poly",
"(",
"n",
",",
"x",
"=",
"None",
",",
"**",
"args",
")",
":",
"if",
"(",
"n",
"<",
"0",
")",
":",
"raise",
"ValueError",
"(",
"(",
"\"can't generate Legendre polynomial of degree %s\"",
"%",
"n",
")",
")",
"poly",
"=",
"DMP",
"(",
"dup_legendre",
"(",
"int",
"(",
"n",
")",
",",
"QQ",
")",
",",
"QQ",
")",
"if",
"(",
"x",
"is",
"not",
"None",
")",
":",
"poly",
"=",
"Poly",
".",
"new",
"(",
"poly",
",",
"x",
")",
"else",
":",
"poly",
"=",
"PurePoly",
".",
"new",
"(",
"poly",
",",
"Dummy",
"(",
"'x'",
")",
")",
"if",
"(",
"not",
"args",
".",
"get",
"(",
"'polys'",
",",
"False",
")",
")",
":",
"return",
"poly",
".",
"as_expr",
"(",
")",
"else",
":",
"return",
"poly"
] |
generates legendre polynomial of degree n in x .
|
train
| false
|
6,023
|
def list_blobs_with_prefix(bucket_name, prefix, delimiter=None):
storage_client = storage.Client()
bucket = storage_client.get_bucket(bucket_name)
blobs = bucket.list_blobs(prefix=prefix, delimiter=delimiter)
print 'Blobs:'
for blob in blobs:
print blob.name
if delimiter:
print 'Prefixes:'
for prefix in blobs.prefixes:
print prefix
|
[
"def",
"list_blobs_with_prefix",
"(",
"bucket_name",
",",
"prefix",
",",
"delimiter",
"=",
"None",
")",
":",
"storage_client",
"=",
"storage",
".",
"Client",
"(",
")",
"bucket",
"=",
"storage_client",
".",
"get_bucket",
"(",
"bucket_name",
")",
"blobs",
"=",
"bucket",
".",
"list_blobs",
"(",
"prefix",
"=",
"prefix",
",",
"delimiter",
"=",
"delimiter",
")",
"print",
"'Blobs:'",
"for",
"blob",
"in",
"blobs",
":",
"print",
"blob",
".",
"name",
"if",
"delimiter",
":",
"print",
"'Prefixes:'",
"for",
"prefix",
"in",
"blobs",
".",
"prefixes",
":",
"print",
"prefix"
] |
lists all the blobs in the bucket that begin with the prefix .
|
train
| false
|
6,024
|
@treeio_login_required
@handle_response_format
def item_add(request, response_format='html'):
context = _get_default_context(request)
return render_to_response('infrastructure/item_add', context, context_instance=RequestContext(request), response_format=response_format)
|
[
"@",
"treeio_login_required",
"@",
"handle_response_format",
"def",
"item_add",
"(",
"request",
",",
"response_format",
"=",
"'html'",
")",
":",
"context",
"=",
"_get_default_context",
"(",
"request",
")",
"return",
"render_to_response",
"(",
"'infrastructure/item_add'",
",",
"context",
",",
"context_instance",
"=",
"RequestContext",
"(",
"request",
")",
",",
"response_format",
"=",
"response_format",
")"
] |
add new knowledge item .
|
train
| false
|
6,025
|
def _UpdateCost(cost, entity_writes, index_writes):
cost.set_entity_writes((cost.entity_writes() + entity_writes))
cost.set_index_writes((cost.index_writes() + index_writes))
|
[
"def",
"_UpdateCost",
"(",
"cost",
",",
"entity_writes",
",",
"index_writes",
")",
":",
"cost",
".",
"set_entity_writes",
"(",
"(",
"cost",
".",
"entity_writes",
"(",
")",
"+",
"entity_writes",
")",
")",
"cost",
".",
"set_index_writes",
"(",
"(",
"cost",
".",
"index_writes",
"(",
")",
"+",
"index_writes",
")",
")"
] |
updates the provided cost .
|
train
| false
|
6,028
|
def strptime_musicbrainz(date_str):
acceptable_formats = ('%Y-%m-%d', '%Y-%m', '%Y')
for date_format in acceptable_formats:
try:
return datetime.datetime.strptime(date_str, date_format)
except:
pass
return None
|
[
"def",
"strptime_musicbrainz",
"(",
"date_str",
")",
":",
"acceptable_formats",
"=",
"(",
"'%Y-%m-%d'",
",",
"'%Y-%m'",
",",
"'%Y'",
")",
"for",
"date_format",
"in",
"acceptable_formats",
":",
"try",
":",
"return",
"datetime",
".",
"datetime",
".",
"strptime",
"(",
"date_str",
",",
"date_format",
")",
"except",
":",
"pass",
"return",
"None"
] |
release date as returned by musicbrainz may contain the full date but it may as well be just year-month or even just the year .
|
train
| false
|
6,029
|
@require_GET
def topic_lookup(request):
topiclist = []
if request.is_ajax():
topic = request.GET.get('topic', '')
if topic:
matches = Document.objects.filter(slug__icontains=topic)
for match in matches:
topiclist.append({'label': match.slug})
data = json.dumps(topiclist)
return HttpResponse(data, content_type='application/json; charset=utf-8')
|
[
"@",
"require_GET",
"def",
"topic_lookup",
"(",
"request",
")",
":",
"topiclist",
"=",
"[",
"]",
"if",
"request",
".",
"is_ajax",
"(",
")",
":",
"topic",
"=",
"request",
".",
"GET",
".",
"get",
"(",
"'topic'",
",",
"''",
")",
"if",
"topic",
":",
"matches",
"=",
"Document",
".",
"objects",
".",
"filter",
"(",
"slug__icontains",
"=",
"topic",
")",
"for",
"match",
"in",
"matches",
":",
"topiclist",
".",
"append",
"(",
"{",
"'label'",
":",
"match",
".",
"slug",
"}",
")",
"data",
"=",
"json",
".",
"dumps",
"(",
"topiclist",
")",
"return",
"HttpResponse",
"(",
"data",
",",
"content_type",
"=",
"'application/json; charset=utf-8'",
")"
] |
returns partial topic matches .
|
train
| false
|
6,030
|
def GetEmail(prompt):
last_email_file_name = os.path.expanduser('~/.last_codereview_email_address')
last_email = ''
if os.path.exists(last_email_file_name):
try:
last_email_file = open(last_email_file_name, 'r')
last_email = last_email_file.readline().strip('\n')
last_email_file.close()
prompt += (' [%s]' % last_email)
except IOError as e:
pass
email = raw_input((prompt + ': ')).strip()
if email:
try:
last_email_file = open(last_email_file_name, 'w')
last_email_file.write(email)
last_email_file.close()
except IOError as e:
pass
else:
email = last_email
return email
|
[
"def",
"GetEmail",
"(",
"prompt",
")",
":",
"last_email_file_name",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"'~/.last_codereview_email_address'",
")",
"last_email",
"=",
"''",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"last_email_file_name",
")",
":",
"try",
":",
"last_email_file",
"=",
"open",
"(",
"last_email_file_name",
",",
"'r'",
")",
"last_email",
"=",
"last_email_file",
".",
"readline",
"(",
")",
".",
"strip",
"(",
"'\\n'",
")",
"last_email_file",
".",
"close",
"(",
")",
"prompt",
"+=",
"(",
"' [%s]'",
"%",
"last_email",
")",
"except",
"IOError",
"as",
"e",
":",
"pass",
"email",
"=",
"raw_input",
"(",
"(",
"prompt",
"+",
"': '",
")",
")",
".",
"strip",
"(",
")",
"if",
"email",
":",
"try",
":",
"last_email_file",
"=",
"open",
"(",
"last_email_file_name",
",",
"'w'",
")",
"last_email_file",
".",
"write",
"(",
"email",
")",
"last_email_file",
".",
"close",
"(",
")",
"except",
"IOError",
"as",
"e",
":",
"pass",
"else",
":",
"email",
"=",
"last_email",
"return",
"email"
] |
prompts the user for their email address and returns it .
|
train
| false
|
6,032
|
def substrings(word, from_beginning_only=False):
w_len = len(word)
w_len_plus_1 = (w_len + 1)
i = 0
while (i < w_len):
j = (i + 2)
while (j < w_len_plus_1):
(yield word[i:j])
j += 1
if from_beginning_only:
return
i += 1
|
[
"def",
"substrings",
"(",
"word",
",",
"from_beginning_only",
"=",
"False",
")",
":",
"w_len",
"=",
"len",
"(",
"word",
")",
"w_len_plus_1",
"=",
"(",
"w_len",
"+",
"1",
")",
"i",
"=",
"0",
"while",
"(",
"i",
"<",
"w_len",
")",
":",
"j",
"=",
"(",
"i",
"+",
"2",
")",
"while",
"(",
"j",
"<",
"w_len_plus_1",
")",
":",
"(",
"yield",
"word",
"[",
"i",
":",
"j",
"]",
")",
"j",
"+=",
"1",
"if",
"from_beginning_only",
":",
"return",
"i",
"+=",
"1"
] |
a generator of all substrings in word greater than 1 character in length .
|
train
| false
|
6,033
|
def report_warning(message):
if (sys.stderr.isatty() and (compat_os_name != u'nt')):
_msg_header = u'\x1b[0;33mWARNING:\x1b[0m'
else:
_msg_header = u'WARNING:'
output = (u'%s %s\n' % (_msg_header, message))
if ((u'b' in getattr(sys.stderr, u'mode', u'')) or (sys.version_info[0] < 3)):
output = output.encode(preferredencoding())
sys.stderr.write(output)
|
[
"def",
"report_warning",
"(",
"message",
")",
":",
"if",
"(",
"sys",
".",
"stderr",
".",
"isatty",
"(",
")",
"and",
"(",
"compat_os_name",
"!=",
"u'nt'",
")",
")",
":",
"_msg_header",
"=",
"u'\\x1b[0;33mWARNING:\\x1b[0m'",
"else",
":",
"_msg_header",
"=",
"u'WARNING:'",
"output",
"=",
"(",
"u'%s %s\\n'",
"%",
"(",
"_msg_header",
",",
"message",
")",
")",
"if",
"(",
"(",
"u'b'",
"in",
"getattr",
"(",
"sys",
".",
"stderr",
",",
"u'mode'",
",",
"u''",
")",
")",
"or",
"(",
"sys",
".",
"version_info",
"[",
"0",
"]",
"<",
"3",
")",
")",
":",
"output",
"=",
"output",
".",
"encode",
"(",
"preferredencoding",
"(",
")",
")",
"sys",
".",
"stderr",
".",
"write",
"(",
"output",
")"
] |
print the message to stderr .
|
train
| false
|
6,034
|
def _check_current_value(gnome_kwargs, value):
current_value = __salt__['gnome.get'](**gnome_kwargs)
return (str(current_value) == str(value))
|
[
"def",
"_check_current_value",
"(",
"gnome_kwargs",
",",
"value",
")",
":",
"current_value",
"=",
"__salt__",
"[",
"'gnome.get'",
"]",
"(",
"**",
"gnome_kwargs",
")",
"return",
"(",
"str",
"(",
"current_value",
")",
"==",
"str",
"(",
"value",
")",
")"
] |
check the current value with the passed value .
|
train
| false
|
6,037
|
def get_logger(name):
old_class = logging.getLoggerClass()
logging.setLoggerClass(logging.Logger)
logger = logging.getLogger(name)
logging.setLoggerClass(old_class)
return logger
|
[
"def",
"get_logger",
"(",
"name",
")",
":",
"old_class",
"=",
"logging",
".",
"getLoggerClass",
"(",
")",
"logging",
".",
"setLoggerClass",
"(",
"logging",
".",
"Logger",
")",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"name",
")",
"logging",
".",
"setLoggerClass",
"(",
"old_class",
")",
"return",
"logger"
] |
attach a file handler to the logger if there isnt one already .
|
train
| false
|
6,038
|
def get_log_format_types():
ret = dict()
prefix = 'logging/'
with salt.utils.winapi.Com():
try:
connection = wmi.WMI(namespace=_WMI_NAMESPACE)
objs = connection.IISLogModuleSetting()
for obj in objs:
name = str(obj.Name).replace(prefix, '', 1)
ret[name] = str(obj.LogModuleId)
except wmi.x_wmi as error:
_LOG.error('Encountered WMI error: %s', error.com_error)
except (AttributeError, IndexError) as error:
_LOG.error('Error getting IISLogModuleSetting: %s', error)
if (not ret):
_LOG.error('Unable to get log format types.')
return ret
|
[
"def",
"get_log_format_types",
"(",
")",
":",
"ret",
"=",
"dict",
"(",
")",
"prefix",
"=",
"'logging/'",
"with",
"salt",
".",
"utils",
".",
"winapi",
".",
"Com",
"(",
")",
":",
"try",
":",
"connection",
"=",
"wmi",
".",
"WMI",
"(",
"namespace",
"=",
"_WMI_NAMESPACE",
")",
"objs",
"=",
"connection",
".",
"IISLogModuleSetting",
"(",
")",
"for",
"obj",
"in",
"objs",
":",
"name",
"=",
"str",
"(",
"obj",
".",
"Name",
")",
".",
"replace",
"(",
"prefix",
",",
"''",
",",
"1",
")",
"ret",
"[",
"name",
"]",
"=",
"str",
"(",
"obj",
".",
"LogModuleId",
")",
"except",
"wmi",
".",
"x_wmi",
"as",
"error",
":",
"_LOG",
".",
"error",
"(",
"'Encountered WMI error: %s'",
",",
"error",
".",
"com_error",
")",
"except",
"(",
"AttributeError",
",",
"IndexError",
")",
"as",
"error",
":",
"_LOG",
".",
"error",
"(",
"'Error getting IISLogModuleSetting: %s'",
",",
"error",
")",
"if",
"(",
"not",
"ret",
")",
":",
"_LOG",
".",
"error",
"(",
"'Unable to get log format types.'",
")",
"return",
"ret"
] |
get all available log format names and ids .
|
train
| true
|
6,039
|
def list_all_tmux_configs():
for (root, dirs, files) in os.walk(TMUX_CONFIG_DIRECTORY):
dirs[:] = ()
for fname in files:
match = CONFIG_FILE_NAME.match(fname)
if match:
assert (match.group(u'suffix') is None)
(yield (os.path.join(root, fname), CONFIG_MATCHERS[match.group(u'mod')], CONFIG_PRIORITY[match.group(u'mod')], TmuxVersionInfo(int(match.group(u'major')), int(match.group(u'minor')), match.group(u'suffix'))))
|
[
"def",
"list_all_tmux_configs",
"(",
")",
":",
"for",
"(",
"root",
",",
"dirs",
",",
"files",
")",
"in",
"os",
".",
"walk",
"(",
"TMUX_CONFIG_DIRECTORY",
")",
":",
"dirs",
"[",
":",
"]",
"=",
"(",
")",
"for",
"fname",
"in",
"files",
":",
"match",
"=",
"CONFIG_FILE_NAME",
".",
"match",
"(",
"fname",
")",
"if",
"match",
":",
"assert",
"(",
"match",
".",
"group",
"(",
"u'suffix'",
")",
"is",
"None",
")",
"(",
"yield",
"(",
"os",
".",
"path",
".",
"join",
"(",
"root",
",",
"fname",
")",
",",
"CONFIG_MATCHERS",
"[",
"match",
".",
"group",
"(",
"u'mod'",
")",
"]",
",",
"CONFIG_PRIORITY",
"[",
"match",
".",
"group",
"(",
"u'mod'",
")",
"]",
",",
"TmuxVersionInfo",
"(",
"int",
"(",
"match",
".",
"group",
"(",
"u'major'",
")",
")",
",",
"int",
"(",
"match",
".",
"group",
"(",
"u'minor'",
")",
")",
",",
"match",
".",
"group",
"(",
"u'suffix'",
")",
")",
")",
")"
] |
list all version-specific tmux configuration files .
|
train
| false
|
6,041
|
def legacy_path(path):
return urljoin(LEGACY_PYTHON_DOMAIN, path)
|
[
"def",
"legacy_path",
"(",
"path",
")",
":",
"return",
"urljoin",
"(",
"LEGACY_PYTHON_DOMAIN",
",",
"path",
")"
] |
build a path to the same path under the legacy .
|
train
| false
|
6,042
|
def templated_plugin_factory(identifier, template_name, **kwargs):
ns = {u'identifier': identifier, u'template_name': template_name}
ns.update(kwargs)
ns.setdefault(u'name', space_case(identifier).title())
return type(str((u'%sPlugin' % identifier)), (TemplatedPlugin,), ns)
|
[
"def",
"templated_plugin_factory",
"(",
"identifier",
",",
"template_name",
",",
"**",
"kwargs",
")",
":",
"ns",
"=",
"{",
"u'identifier'",
":",
"identifier",
",",
"u'template_name'",
":",
"template_name",
"}",
"ns",
".",
"update",
"(",
"kwargs",
")",
"ns",
".",
"setdefault",
"(",
"u'name'",
",",
"space_case",
"(",
"identifier",
")",
".",
"title",
"(",
")",
")",
"return",
"type",
"(",
"str",
"(",
"(",
"u'%sPlugin'",
"%",
"identifier",
")",
")",
",",
"(",
"TemplatedPlugin",
",",
")",
",",
"ns",
")"
] |
a factory to quickly create simple plugins .
|
train
| false
|
6,043
|
@receiver(models.signals.post_save, sender=CreditProvider)
@receiver(models.signals.post_delete, sender=CreditProvider)
def invalidate_provider_cache(sender, **kwargs):
cache.delete(CreditProvider.CREDIT_PROVIDERS_CACHE_KEY)
|
[
"@",
"receiver",
"(",
"models",
".",
"signals",
".",
"post_save",
",",
"sender",
"=",
"CreditProvider",
")",
"@",
"receiver",
"(",
"models",
".",
"signals",
".",
"post_delete",
",",
"sender",
"=",
"CreditProvider",
")",
"def",
"invalidate_provider_cache",
"(",
"sender",
",",
"**",
"kwargs",
")",
":",
"cache",
".",
"delete",
"(",
"CreditProvider",
".",
"CREDIT_PROVIDERS_CACHE_KEY",
")"
] |
invalidate the cache of credit providers .
|
train
| false
|
6,044
|
def _make_table_stub(client):
if (client.emulator_host is None):
return make_secure_stub(client.credentials, client.user_agent, bigtable_table_admin_pb2.BigtableTableAdminStub, TABLE_ADMIN_HOST)
else:
return make_insecure_stub(bigtable_table_admin_pb2.BigtableTableAdminStub, client.emulator_host)
|
[
"def",
"_make_table_stub",
"(",
"client",
")",
":",
"if",
"(",
"client",
".",
"emulator_host",
"is",
"None",
")",
":",
"return",
"make_secure_stub",
"(",
"client",
".",
"credentials",
",",
"client",
".",
"user_agent",
",",
"bigtable_table_admin_pb2",
".",
"BigtableTableAdminStub",
",",
"TABLE_ADMIN_HOST",
")",
"else",
":",
"return",
"make_insecure_stub",
"(",
"bigtable_table_admin_pb2",
".",
"BigtableTableAdminStub",
",",
"client",
".",
"emulator_host",
")"
] |
creates grpc stub to make requests to the table admin api .
|
train
| false
|
6,045
|
def GetTZCapabilities():
tzi = TimeZoneInfo('Mountain Standard Time')
MissingTZPatch = (datetime.datetime(2007, 11, 2, tzinfo=tzi).utctimetuple() != (2007, 11, 2, 6, 0, 0, 4, 306, 0))
DynamicTZSupport = ((not MissingTZPatch) and (datetime.datetime(2003, 11, 2, tzinfo=tzi).utctimetuple() == (2003, 11, 2, 7, 0, 0, 6, 306, 0)))
del tzi
return vars()
|
[
"def",
"GetTZCapabilities",
"(",
")",
":",
"tzi",
"=",
"TimeZoneInfo",
"(",
"'Mountain Standard Time'",
")",
"MissingTZPatch",
"=",
"(",
"datetime",
".",
"datetime",
"(",
"2007",
",",
"11",
",",
"2",
",",
"tzinfo",
"=",
"tzi",
")",
".",
"utctimetuple",
"(",
")",
"!=",
"(",
"2007",
",",
"11",
",",
"2",
",",
"6",
",",
"0",
",",
"0",
",",
"4",
",",
"306",
",",
"0",
")",
")",
"DynamicTZSupport",
"=",
"(",
"(",
"not",
"MissingTZPatch",
")",
"and",
"(",
"datetime",
".",
"datetime",
"(",
"2003",
",",
"11",
",",
"2",
",",
"tzinfo",
"=",
"tzi",
")",
".",
"utctimetuple",
"(",
")",
"==",
"(",
"2003",
",",
"11",
",",
"2",
",",
"7",
",",
"0",
",",
"0",
",",
"6",
",",
"306",
",",
"0",
")",
")",
")",
"del",
"tzi",
"return",
"vars",
"(",
")"
] |
run a few known tests to determine the capabilities of the time zone database on this machine .
|
train
| false
|
6,047
|
def do_trim_params(params, k_params, alpha, score, passed, trim_mode, size_trim_tol, auto_trim_tol):
trimmed = ([False] * k_params)
if (trim_mode == 'off'):
trimmed = np.array(([False] * k_params))
elif ((trim_mode == 'auto') and (not passed)):
print("Could not trim params automatically due to failed QC check. Trimming using trim_mode == 'size' will still work.")
trimmed = np.array(([False] * k_params))
elif ((trim_mode == 'auto') and passed):
fprime = score(params)
for i in range(k_params):
if (alpha[i] != 0):
if (((alpha[i] - abs(fprime[i])) / alpha[i]) > auto_trim_tol):
params[i] = 0.0
trimmed[i] = True
elif (trim_mode == 'size'):
for i in range(k_params):
if (alpha[i] != 0):
if (abs(params[i]) < size_trim_tol):
params[i] = 0.0
trimmed[i] = True
else:
raise Exception(('trim_mode == %s, which is not recognized' % trim_mode))
return (params, np.asarray(trimmed))
|
[
"def",
"do_trim_params",
"(",
"params",
",",
"k_params",
",",
"alpha",
",",
"score",
",",
"passed",
",",
"trim_mode",
",",
"size_trim_tol",
",",
"auto_trim_tol",
")",
":",
"trimmed",
"=",
"(",
"[",
"False",
"]",
"*",
"k_params",
")",
"if",
"(",
"trim_mode",
"==",
"'off'",
")",
":",
"trimmed",
"=",
"np",
".",
"array",
"(",
"(",
"[",
"False",
"]",
"*",
"k_params",
")",
")",
"elif",
"(",
"(",
"trim_mode",
"==",
"'auto'",
")",
"and",
"(",
"not",
"passed",
")",
")",
":",
"print",
"(",
"\"Could not trim params automatically due to failed QC check. Trimming using trim_mode == 'size' will still work.\"",
")",
"trimmed",
"=",
"np",
".",
"array",
"(",
"(",
"[",
"False",
"]",
"*",
"k_params",
")",
")",
"elif",
"(",
"(",
"trim_mode",
"==",
"'auto'",
")",
"and",
"passed",
")",
":",
"fprime",
"=",
"score",
"(",
"params",
")",
"for",
"i",
"in",
"range",
"(",
"k_params",
")",
":",
"if",
"(",
"alpha",
"[",
"i",
"]",
"!=",
"0",
")",
":",
"if",
"(",
"(",
"(",
"alpha",
"[",
"i",
"]",
"-",
"abs",
"(",
"fprime",
"[",
"i",
"]",
")",
")",
"/",
"alpha",
"[",
"i",
"]",
")",
">",
"auto_trim_tol",
")",
":",
"params",
"[",
"i",
"]",
"=",
"0.0",
"trimmed",
"[",
"i",
"]",
"=",
"True",
"elif",
"(",
"trim_mode",
"==",
"'size'",
")",
":",
"for",
"i",
"in",
"range",
"(",
"k_params",
")",
":",
"if",
"(",
"alpha",
"[",
"i",
"]",
"!=",
"0",
")",
":",
"if",
"(",
"abs",
"(",
"params",
"[",
"i",
"]",
")",
"<",
"size_trim_tol",
")",
":",
"params",
"[",
"i",
"]",
"=",
"0.0",
"trimmed",
"[",
"i",
"]",
"=",
"True",
"else",
":",
"raise",
"Exception",
"(",
"(",
"'trim_mode == %s, which is not recognized'",
"%",
"trim_mode",
")",
")",
"return",
"(",
"params",
",",
"np",
".",
"asarray",
"(",
"trimmed",
")",
")"
] |
trims params that are zero at the theoretical minimum .
|
train
| false
|
6,049
|
def assert_shape(x, expected_shape, msg='Unexpected shape.'):
if (expected_shape is None):
return x
shape = x.shape
tests = []
for i in range(x.ndim):
if (expected_shape[i] is not None):
tests.append(theano.tensor.eq(shape[i], expected_shape[i]))
if tests:
return Assert(msg)(x, *tests)
else:
return x
|
[
"def",
"assert_shape",
"(",
"x",
",",
"expected_shape",
",",
"msg",
"=",
"'Unexpected shape.'",
")",
":",
"if",
"(",
"expected_shape",
"is",
"None",
")",
":",
"return",
"x",
"shape",
"=",
"x",
".",
"shape",
"tests",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"x",
".",
"ndim",
")",
":",
"if",
"(",
"expected_shape",
"[",
"i",
"]",
"is",
"not",
"None",
")",
":",
"tests",
".",
"append",
"(",
"theano",
".",
"tensor",
".",
"eq",
"(",
"shape",
"[",
"i",
"]",
",",
"expected_shape",
"[",
"i",
"]",
")",
")",
"if",
"tests",
":",
"return",
"Assert",
"(",
"msg",
")",
"(",
"x",
",",
"*",
"tests",
")",
"else",
":",
"return",
"x"
] |
wraps x in an assert to check its shape .
|
train
| false
|
6,050
|
def modify_parse_args(args):
mods = {}
dels = []
query = []
for arg in args:
if (arg.endswith('!') and ('=' not in arg) and (':' not in arg)):
dels.append(arg[:(-1)])
elif (('=' in arg) and (':' not in arg.split('=', 1)[0])):
(key, val) = arg.split('=', 1)
mods[key] = val
else:
query.append(arg)
return (query, mods, dels)
|
[
"def",
"modify_parse_args",
"(",
"args",
")",
":",
"mods",
"=",
"{",
"}",
"dels",
"=",
"[",
"]",
"query",
"=",
"[",
"]",
"for",
"arg",
"in",
"args",
":",
"if",
"(",
"arg",
".",
"endswith",
"(",
"'!'",
")",
"and",
"(",
"'='",
"not",
"in",
"arg",
")",
"and",
"(",
"':'",
"not",
"in",
"arg",
")",
")",
":",
"dels",
".",
"append",
"(",
"arg",
"[",
":",
"(",
"-",
"1",
")",
"]",
")",
"elif",
"(",
"(",
"'='",
"in",
"arg",
")",
"and",
"(",
"':'",
"not",
"in",
"arg",
".",
"split",
"(",
"'='",
",",
"1",
")",
"[",
"0",
"]",
")",
")",
":",
"(",
"key",
",",
"val",
")",
"=",
"arg",
".",
"split",
"(",
"'='",
",",
"1",
")",
"mods",
"[",
"key",
"]",
"=",
"val",
"else",
":",
"query",
".",
"append",
"(",
"arg",
")",
"return",
"(",
"query",
",",
"mods",
",",
"dels",
")"
] |
split the arguments for the modify subcommand into query parts .
|
train
| false
|
6,051
|
@mock_ec2
def test_describe_dhcp_options_invalid_id():
conn = boto.connect_vpc(u'the_key', u'the_secret')
with assert_raises(EC2ResponseError) as cm:
conn.get_all_dhcp_options([u'1'])
cm.exception.code.should.equal(u'InvalidDhcpOptionID.NotFound')
cm.exception.status.should.equal(400)
cm.exception.request_id.should_not.be.none
|
[
"@",
"mock_ec2",
"def",
"test_describe_dhcp_options_invalid_id",
"(",
")",
":",
"conn",
"=",
"boto",
".",
"connect_vpc",
"(",
"u'the_key'",
",",
"u'the_secret'",
")",
"with",
"assert_raises",
"(",
"EC2ResponseError",
")",
"as",
"cm",
":",
"conn",
".",
"get_all_dhcp_options",
"(",
"[",
"u'1'",
"]",
")",
"cm",
".",
"exception",
".",
"code",
".",
"should",
".",
"equal",
"(",
"u'InvalidDhcpOptionID.NotFound'",
")",
"cm",
".",
"exception",
".",
"status",
".",
"should",
".",
"equal",
"(",
"400",
")",
"cm",
".",
"exception",
".",
"request_id",
".",
"should_not",
".",
"be",
".",
"none"
] |
get error on invalid dhcp_option_id lookup .
|
train
| false
|
6,053
|
def update_counts(s, counts):
for char in s:
if (char in counts):
counts[char] += 1
|
[
"def",
"update_counts",
"(",
"s",
",",
"counts",
")",
":",
"for",
"char",
"in",
"s",
":",
"if",
"(",
"char",
"in",
"counts",
")",
":",
"counts",
"[",
"char",
"]",
"+=",
"1"
] |
adds one to the counts of each appearance of characters in s .
|
train
| false
|
6,055
|
def new_module(name):
return type(sys)(name)
|
[
"def",
"new_module",
"(",
"name",
")",
":",
"return",
"type",
"(",
"sys",
")",
"(",
"name",
")"
] |
return a new empty module object .
|
train
| false
|
6,058
|
def _no_op(name, **kwargs):
return dict(name=name, result=True, changes={}, comment='')
|
[
"def",
"_no_op",
"(",
"name",
",",
"**",
"kwargs",
")",
":",
"return",
"dict",
"(",
"name",
"=",
"name",
",",
"result",
"=",
"True",
",",
"changes",
"=",
"{",
"}",
",",
"comment",
"=",
"''",
")"
] |
no-op state to support state config via the stateconf renderer .
|
train
| true
|
6,060
|
@contextmanager
def patch_open():
mock_open = MagicMock(spec=open)
mock_file = MagicMock(spec=file)
@contextmanager
def stub_open(*args, **kwargs):
mock_open(*args, **kwargs)
(yield mock_file)
with patch('__builtin__.open', stub_open):
(yield (mock_open, mock_file))
|
[
"@",
"contextmanager",
"def",
"patch_open",
"(",
")",
":",
"mock_open",
"=",
"MagicMock",
"(",
"spec",
"=",
"open",
")",
"mock_file",
"=",
"MagicMock",
"(",
"spec",
"=",
"file",
")",
"@",
"contextmanager",
"def",
"stub_open",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"mock_open",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
"(",
"yield",
"mock_file",
")",
"with",
"patch",
"(",
"'__builtin__.open'",
",",
"stub_open",
")",
":",
"(",
"yield",
"(",
"mock_open",
",",
"mock_file",
")",
")"
] |
patch open() to allow mocking both open() itself and the file that is yielded .
|
train
| false
|
6,062
|
def process_java_sources(target, source, env):
shutil.copy2(str(source[0]), str(target[0]))
return None
|
[
"def",
"process_java_sources",
"(",
"target",
",",
"source",
",",
"env",
")",
":",
"shutil",
".",
"copy2",
"(",
"str",
"(",
"source",
"[",
"0",
"]",
")",
",",
"str",
"(",
"target",
"[",
"0",
"]",
")",
")",
"return",
"None"
] |
copy source file into .
|
train
| false
|
6,065
|
def getVector3ByFloatList(floatList, vector3):
if (len(floatList) > 0):
vector3 = getVector3IfNone(vector3)
vector3.x = euclidean.getFloatFromValue(floatList[0])
if (len(floatList) > 1):
vector3 = getVector3IfNone(vector3)
vector3.y = euclidean.getFloatFromValue(floatList[1])
if (len(floatList) > 2):
vector3 = getVector3IfNone(vector3)
vector3.z = euclidean.getFloatFromValue(floatList[2])
return vector3
|
[
"def",
"getVector3ByFloatList",
"(",
"floatList",
",",
"vector3",
")",
":",
"if",
"(",
"len",
"(",
"floatList",
")",
">",
"0",
")",
":",
"vector3",
"=",
"getVector3IfNone",
"(",
"vector3",
")",
"vector3",
".",
"x",
"=",
"euclidean",
".",
"getFloatFromValue",
"(",
"floatList",
"[",
"0",
"]",
")",
"if",
"(",
"len",
"(",
"floatList",
")",
">",
"1",
")",
":",
"vector3",
"=",
"getVector3IfNone",
"(",
"vector3",
")",
"vector3",
".",
"y",
"=",
"euclidean",
".",
"getFloatFromValue",
"(",
"floatList",
"[",
"1",
"]",
")",
"if",
"(",
"len",
"(",
"floatList",
")",
">",
"2",
")",
":",
"vector3",
"=",
"getVector3IfNone",
"(",
"vector3",
")",
"vector3",
".",
"z",
"=",
"euclidean",
".",
"getFloatFromValue",
"(",
"floatList",
"[",
"2",
"]",
")",
"return",
"vector3"
] |
get vector3 by float list .
|
train
| false
|
6,066
|
def cucumber_reports(registry, xml_parent, data):
cucumber_reports = XML.SubElement(xml_parent, 'net.masterthought.jenkins.CucumberReportPublisher')
cucumber_reports.set('plugin', 'cucumber-reports')
mappings = [('json-reports-path', 'jsonReportDirectory', ''), ('plugin-url-path', 'pluginUrlPath', ''), ('file-include-pattern', 'fileIncludePattern', ''), ('file-exclude-pattern', 'fileExcludePattern', ''), ('skipped-fails', 'skippedFails', False), ('pending-fails', 'pendingFails', False), ('undefined-fails', 'undefinedFails', False), ('missing-fails', 'missingFails', False), ('no-flash-charts', 'noFlashCharts', False), ('ignore-failed-tests', 'ignoreFailedTests', False), ('parallel-testing', 'parallelTesting', False)]
helpers.convert_mapping_to_xml(cucumber_reports, data, mappings, fail_required=True)
|
[
"def",
"cucumber_reports",
"(",
"registry",
",",
"xml_parent",
",",
"data",
")",
":",
"cucumber_reports",
"=",
"XML",
".",
"SubElement",
"(",
"xml_parent",
",",
"'net.masterthought.jenkins.CucumberReportPublisher'",
")",
"cucumber_reports",
".",
"set",
"(",
"'plugin'",
",",
"'cucumber-reports'",
")",
"mappings",
"=",
"[",
"(",
"'json-reports-path'",
",",
"'jsonReportDirectory'",
",",
"''",
")",
",",
"(",
"'plugin-url-path'",
",",
"'pluginUrlPath'",
",",
"''",
")",
",",
"(",
"'file-include-pattern'",
",",
"'fileIncludePattern'",
",",
"''",
")",
",",
"(",
"'file-exclude-pattern'",
",",
"'fileExcludePattern'",
",",
"''",
")",
",",
"(",
"'skipped-fails'",
",",
"'skippedFails'",
",",
"False",
")",
",",
"(",
"'pending-fails'",
",",
"'pendingFails'",
",",
"False",
")",
",",
"(",
"'undefined-fails'",
",",
"'undefinedFails'",
",",
"False",
")",
",",
"(",
"'missing-fails'",
",",
"'missingFails'",
",",
"False",
")",
",",
"(",
"'no-flash-charts'",
",",
"'noFlashCharts'",
",",
"False",
")",
",",
"(",
"'ignore-failed-tests'",
",",
"'ignoreFailedTests'",
",",
"False",
")",
",",
"(",
"'parallel-testing'",
",",
"'parallelTesting'",
",",
"False",
")",
"]",
"helpers",
".",
"convert_mapping_to_xml",
"(",
"cucumber_reports",
",",
"data",
",",
"mappings",
",",
"fail_required",
"=",
"True",
")"
] |
yaml: cucumber-reports this plugin creates pretty cucumber-jvm html reports on jenkins .
|
train
| false
|
6,067
|
def output():
return s3_rest_controller()
|
[
"def",
"output",
"(",
")",
":",
"return",
"s3_rest_controller",
"(",
")"
] |
appends s to the response .
|
train
| false
|
6,068
|
def parse_set_union(source, info):
items = [parse_set_symm_diff(source, info)]
while source.match('||'):
items.append(parse_set_symm_diff(source, info))
if (len(items) == 1):
return items[0]
return SetUnion(info, items)
|
[
"def",
"parse_set_union",
"(",
"source",
",",
"info",
")",
":",
"items",
"=",
"[",
"parse_set_symm_diff",
"(",
"source",
",",
"info",
")",
"]",
"while",
"source",
".",
"match",
"(",
"'||'",
")",
":",
"items",
".",
"append",
"(",
"parse_set_symm_diff",
"(",
"source",
",",
"info",
")",
")",
"if",
"(",
"len",
"(",
"items",
")",
"==",
"1",
")",
":",
"return",
"items",
"[",
"0",
"]",
"return",
"SetUnion",
"(",
"info",
",",
"items",
")"
] |
parses a set union .
|
train
| false
|
6,069
|
def result_to_region(result, country):
if ('province' in result):
mapbox_region = result['province']
lookup_args = {'name': mapbox_region['name'], 'country': country}
args = {'mapbox_id': mapbox_region['id']}
args.update(lookup_args)
query = (Q(**lookup_args) | Q(mapbox_id=mapbox_region['id']))
region_qs = Region.objects.filter(query).distinct()
if region_qs.exists():
if (region_qs.count() == 2):
deduplicate_regions(region_qs[0], region_qs[1])
region_qs.update(**args)
region = region_qs[0]
else:
region = Region.objects.create(**args)
return region
|
[
"def",
"result_to_region",
"(",
"result",
",",
"country",
")",
":",
"if",
"(",
"'province'",
"in",
"result",
")",
":",
"mapbox_region",
"=",
"result",
"[",
"'province'",
"]",
"lookup_args",
"=",
"{",
"'name'",
":",
"mapbox_region",
"[",
"'name'",
"]",
",",
"'country'",
":",
"country",
"}",
"args",
"=",
"{",
"'mapbox_id'",
":",
"mapbox_region",
"[",
"'id'",
"]",
"}",
"args",
".",
"update",
"(",
"lookup_args",
")",
"query",
"=",
"(",
"Q",
"(",
"**",
"lookup_args",
")",
"|",
"Q",
"(",
"mapbox_id",
"=",
"mapbox_region",
"[",
"'id'",
"]",
")",
")",
"region_qs",
"=",
"Region",
".",
"objects",
".",
"filter",
"(",
"query",
")",
".",
"distinct",
"(",
")",
"if",
"region_qs",
".",
"exists",
"(",
")",
":",
"if",
"(",
"region_qs",
".",
"count",
"(",
")",
"==",
"2",
")",
":",
"deduplicate_regions",
"(",
"region_qs",
"[",
"0",
"]",
",",
"region_qs",
"[",
"1",
"]",
")",
"region_qs",
".",
"update",
"(",
"**",
"args",
")",
"region",
"=",
"region_qs",
"[",
"0",
"]",
"else",
":",
"region",
"=",
"Region",
".",
"objects",
".",
"create",
"(",
"**",
"args",
")",
"return",
"region"
] |
given one result from mapbox and a country object .
|
train
| false
|
6,070
|
def headersParser(headers):
if (not kb.headerPaths):
kb.headerPaths = {'cookie': os.path.join(paths.SQLMAP_XML_BANNER_PATH, 'cookie.xml'), 'microsoftsharepointteamservices': os.path.join(paths.SQLMAP_XML_BANNER_PATH, 'sharepoint.xml'), 'server': os.path.join(paths.SQLMAP_XML_BANNER_PATH, 'server.xml'), 'servlet-engine': os.path.join(paths.SQLMAP_XML_BANNER_PATH, 'servlet.xml'), 'set-cookie': os.path.join(paths.SQLMAP_XML_BANNER_PATH, 'cookie.xml'), 'x-aspnet-version': os.path.join(paths.SQLMAP_XML_BANNER_PATH, 'x-aspnet-version.xml'), 'x-powered-by': os.path.join(paths.SQLMAP_XML_BANNER_PATH, 'x-powered-by.xml')}
for header in itertools.ifilter((lambda x: (x in kb.headerPaths)), headers):
value = headers[header]
xmlfile = kb.headerPaths[header]
handler = FingerprintHandler(value, kb.headersFp)
parseXmlFile(xmlfile, handler)
parseXmlFile(paths.GENERIC_XML, handler)
|
[
"def",
"headersParser",
"(",
"headers",
")",
":",
"if",
"(",
"not",
"kb",
".",
"headerPaths",
")",
":",
"kb",
".",
"headerPaths",
"=",
"{",
"'cookie'",
":",
"os",
".",
"path",
".",
"join",
"(",
"paths",
".",
"SQLMAP_XML_BANNER_PATH",
",",
"'cookie.xml'",
")",
",",
"'microsoftsharepointteamservices'",
":",
"os",
".",
"path",
".",
"join",
"(",
"paths",
".",
"SQLMAP_XML_BANNER_PATH",
",",
"'sharepoint.xml'",
")",
",",
"'server'",
":",
"os",
".",
"path",
".",
"join",
"(",
"paths",
".",
"SQLMAP_XML_BANNER_PATH",
",",
"'server.xml'",
")",
",",
"'servlet-engine'",
":",
"os",
".",
"path",
".",
"join",
"(",
"paths",
".",
"SQLMAP_XML_BANNER_PATH",
",",
"'servlet.xml'",
")",
",",
"'set-cookie'",
":",
"os",
".",
"path",
".",
"join",
"(",
"paths",
".",
"SQLMAP_XML_BANNER_PATH",
",",
"'cookie.xml'",
")",
",",
"'x-aspnet-version'",
":",
"os",
".",
"path",
".",
"join",
"(",
"paths",
".",
"SQLMAP_XML_BANNER_PATH",
",",
"'x-aspnet-version.xml'",
")",
",",
"'x-powered-by'",
":",
"os",
".",
"path",
".",
"join",
"(",
"paths",
".",
"SQLMAP_XML_BANNER_PATH",
",",
"'x-powered-by.xml'",
")",
"}",
"for",
"header",
"in",
"itertools",
".",
"ifilter",
"(",
"(",
"lambda",
"x",
":",
"(",
"x",
"in",
"kb",
".",
"headerPaths",
")",
")",
",",
"headers",
")",
":",
"value",
"=",
"headers",
"[",
"header",
"]",
"xmlfile",
"=",
"kb",
".",
"headerPaths",
"[",
"header",
"]",
"handler",
"=",
"FingerprintHandler",
"(",
"value",
",",
"kb",
".",
"headersFp",
")",
"parseXmlFile",
"(",
"xmlfile",
",",
"handler",
")",
"parseXmlFile",
"(",
"paths",
".",
"GENERIC_XML",
",",
"handler",
")"
] |
this function calls a class that parses the input http headers to fingerprint the back-end database management system operating system and the web application technology .
|
train
| false
|
6,071
|
def token(page):
index = ((page or 0) * util.getxy().max_results)
k = ((index // 128) - 1)
index -= (128 * k)
f = [8, index]
if ((k > 0) or (index > 127)):
f.append((k + 1))
f += [16, 0]
b64 = base64.b64encode(bytes(f)).decode('utf8')
return b64.strip('=')
|
[
"def",
"token",
"(",
"page",
")",
":",
"index",
"=",
"(",
"(",
"page",
"or",
"0",
")",
"*",
"util",
".",
"getxy",
"(",
")",
".",
"max_results",
")",
"k",
"=",
"(",
"(",
"index",
"//",
"128",
")",
"-",
"1",
")",
"index",
"-=",
"(",
"128",
"*",
"k",
")",
"f",
"=",
"[",
"8",
",",
"index",
"]",
"if",
"(",
"(",
"k",
">",
"0",
")",
"or",
"(",
"index",
">",
"127",
")",
")",
":",
"f",
".",
"append",
"(",
"(",
"k",
"+",
"1",
")",
")",
"f",
"+=",
"[",
"16",
",",
"0",
"]",
"b64",
"=",
"base64",
".",
"b64encode",
"(",
"bytes",
"(",
"f",
")",
")",
".",
"decode",
"(",
"'utf8'",
")",
"return",
"b64",
".",
"strip",
"(",
"'='",
")"
] |
token verification checks for the authorization header and verifies using the verify_user function .
|
train
| false
|
6,072
|
def BatchMailEventFeedFromString(xml_string):
return atom.CreateClassFromXMLString(BatchMailEventFeed, xml_string)
|
[
"def",
"BatchMailEventFeedFromString",
"(",
"xml_string",
")",
":",
"return",
"atom",
".",
"CreateClassFromXMLString",
"(",
"BatchMailEventFeed",
",",
"xml_string",
")"
] |
parse in the batchmaileventfeed from the xml definition .
|
train
| false
|
6,074
|
def chunk(elist, size):
for i in xrange(0, len(elist), size):
(yield elist[i:(i + size)])
|
[
"def",
"chunk",
"(",
"elist",
",",
"size",
")",
":",
"for",
"i",
"in",
"xrange",
"(",
"0",
",",
"len",
"(",
"elist",
")",
",",
"size",
")",
":",
"(",
"yield",
"elist",
"[",
"i",
":",
"(",
"i",
"+",
"size",
")",
"]",
")"
] |
yield chunks of an iterable .
|
train
| false
|
6,076
|
def instance_type_get_by_name(context, name):
return IMPL.instance_type_get_by_name(context, name)
|
[
"def",
"instance_type_get_by_name",
"(",
"context",
",",
"name",
")",
":",
"return",
"IMPL",
".",
"instance_type_get_by_name",
"(",
"context",
",",
"name",
")"
] |
returns a dict describing specific instance_type .
|
train
| false
|
6,077
|
@requires_application()
def test_sys_info():
fname = op.join(temp_dir, 'info.txt')
sys_info(fname)
assert_raises(IOError, sys_info, fname)
with open(fname, 'r') as fid:
out = ''.join(fid.readlines())
keys = ['GL version', 'Python', 'Backend', 'pyglet', 'Platform:']
for key in keys:
assert_in(key, out)
print out
assert_true(('Info-gathering error' not in out))
|
[
"@",
"requires_application",
"(",
")",
"def",
"test_sys_info",
"(",
")",
":",
"fname",
"=",
"op",
".",
"join",
"(",
"temp_dir",
",",
"'info.txt'",
")",
"sys_info",
"(",
"fname",
")",
"assert_raises",
"(",
"IOError",
",",
"sys_info",
",",
"fname",
")",
"with",
"open",
"(",
"fname",
",",
"'r'",
")",
"as",
"fid",
":",
"out",
"=",
"''",
".",
"join",
"(",
"fid",
".",
"readlines",
"(",
")",
")",
"keys",
"=",
"[",
"'GL version'",
",",
"'Python'",
",",
"'Backend'",
",",
"'pyglet'",
",",
"'Platform:'",
"]",
"for",
"key",
"in",
"keys",
":",
"assert_in",
"(",
"key",
",",
"out",
")",
"print",
"out",
"assert_true",
"(",
"(",
"'Info-gathering error'",
"not",
"in",
"out",
")",
")"
] |
test info-showing utility .
|
train
| false
|
6,080
|
def extractErrorMessage(page):
retVal = None
if isinstance(page, basestring):
for regex in ERROR_PARSING_REGEXES:
match = re.search(regex, page, (re.DOTALL | re.IGNORECASE))
if match:
retVal = htmlunescape(match.group('result')).replace('<br>', '\n').strip()
break
return retVal
|
[
"def",
"extractErrorMessage",
"(",
"page",
")",
":",
"retVal",
"=",
"None",
"if",
"isinstance",
"(",
"page",
",",
"basestring",
")",
":",
"for",
"regex",
"in",
"ERROR_PARSING_REGEXES",
":",
"match",
"=",
"re",
".",
"search",
"(",
"regex",
",",
"page",
",",
"(",
"re",
".",
"DOTALL",
"|",
"re",
".",
"IGNORECASE",
")",
")",
"if",
"match",
":",
"retVal",
"=",
"htmlunescape",
"(",
"match",
".",
"group",
"(",
"'result'",
")",
")",
".",
"replace",
"(",
"'<br>'",
",",
"'\\n'",
")",
".",
"strip",
"(",
")",
"break",
"return",
"retVal"
] |
returns reported error message from page if it founds one .
|
train
| false
|
6,081
|
def _es_documents_for(locale, topics=None, products=None):
s = DocumentMappingType.search().values_dict('id', 'document_title', 'url', 'document_parent_id', 'document_summary').filter(document_locale=locale, document_is_archived=False, document_category__in=settings.IA_DEFAULT_CATEGORIES)
for topic in (topics or []):
s = s.filter(topic=topic.slug)
for product in (products or []):
s = s.filter(product=product.slug)
results = s.order_by('document_display_order', '-document_recent_helpful_votes')[:100]
results = DocumentMappingType.reshape(results)
return results
|
[
"def",
"_es_documents_for",
"(",
"locale",
",",
"topics",
"=",
"None",
",",
"products",
"=",
"None",
")",
":",
"s",
"=",
"DocumentMappingType",
".",
"search",
"(",
")",
".",
"values_dict",
"(",
"'id'",
",",
"'document_title'",
",",
"'url'",
",",
"'document_parent_id'",
",",
"'document_summary'",
")",
".",
"filter",
"(",
"document_locale",
"=",
"locale",
",",
"document_is_archived",
"=",
"False",
",",
"document_category__in",
"=",
"settings",
".",
"IA_DEFAULT_CATEGORIES",
")",
"for",
"topic",
"in",
"(",
"topics",
"or",
"[",
"]",
")",
":",
"s",
"=",
"s",
".",
"filter",
"(",
"topic",
"=",
"topic",
".",
"slug",
")",
"for",
"product",
"in",
"(",
"products",
"or",
"[",
"]",
")",
":",
"s",
"=",
"s",
".",
"filter",
"(",
"product",
"=",
"product",
".",
"slug",
")",
"results",
"=",
"s",
".",
"order_by",
"(",
"'document_display_order'",
",",
"'-document_recent_helpful_votes'",
")",
"[",
":",
"100",
"]",
"results",
"=",
"DocumentMappingType",
".",
"reshape",
"(",
"results",
")",
"return",
"results"
] |
es implementation of documents_for .
|
train
| false
|
6,082
|
def calc_parity_vector(parity_vector):
return reduce((lambda x, y: (x ^ y)), parity_vector[1:])
|
[
"def",
"calc_parity_vector",
"(",
"parity_vector",
")",
":",
"return",
"reduce",
"(",
"(",
"lambda",
"x",
",",
"y",
":",
"(",
"x",
"^",
"y",
")",
")",
",",
"parity_vector",
"[",
"1",
":",
"]",
")"
] |
returns even or odd parit for parity vector .
|
train
| false
|
6,083
|
def put_object(url, token=None, container=None, name=None, contents=None, content_length=None, etag=None, chunk_size=None, content_type=None, headers=None, http_conn=None, proxy=None, query_string=None, response_dict=None, service_token=None):
if http_conn:
(parsed, conn) = http_conn
else:
(parsed, conn) = http_connection(url, proxy=proxy)
path = parsed.path
if container:
path = ('%s/%s' % (path.rstrip('/'), quote(container)))
if name:
path = ('%s/%s' % (path.rstrip('/'), quote(name)))
if query_string:
path += ('?' + query_string)
if headers:
headers = dict(headers)
else:
headers = {}
if token:
headers['X-Auth-Token'] = token
if service_token:
headers['X-Service-Token'] = service_token
if etag:
headers['ETag'] = etag.strip('"')
if (content_length is not None):
headers['Content-Length'] = str(content_length)
else:
for (n, v) in headers.items():
if (n.lower() == 'content-length'):
content_length = int(v)
if (content_type is not None):
headers['Content-Type'] = content_type
elif ('Content-Type' not in headers):
headers['Content-Type'] = ''
if (not contents):
headers['Content-Length'] = '0'
if isinstance(contents, (ReadableToIterable, LengthWrapper)):
conn.putrequest(path, headers=headers, data=contents)
elif hasattr(contents, 'read'):
if (chunk_size is None):
chunk_size = 65536
if (content_length is None):
data = ReadableToIterable(contents, chunk_size, md5=False)
else:
data = LengthWrapper(contents, content_length, md5=False)
conn.putrequest(path, headers=headers, data=data)
else:
if (chunk_size is not None):
warn_msg = ('%s object has no "read" method, ignoring chunk_size' % type(contents).__name__)
warnings.warn(warn_msg, stacklevel=2)
if (hasattr(contents, '__iter__') and (not isinstance(contents, (six.text_type, six.binary_type, list, tuple, dict)))):
contents = iter_wrapper(contents)
conn.request('PUT', path, contents, headers)
resp = conn.getresponse()
body = resp.read()
http_log((('%s%s' % (url.replace(parsed.path, ''), path)), 'PUT'), {'headers': headers}, resp, body)
store_response(resp, response_dict)
if ((resp.status < 200) or (resp.status >= 300)):
raise ClientException.from_response(resp, 'Object PUT failed', body)
etag = resp.getheader('etag', '').strip('"')
return etag
|
[
"def",
"put_object",
"(",
"url",
",",
"token",
"=",
"None",
",",
"container",
"=",
"None",
",",
"name",
"=",
"None",
",",
"contents",
"=",
"None",
",",
"content_length",
"=",
"None",
",",
"etag",
"=",
"None",
",",
"chunk_size",
"=",
"None",
",",
"content_type",
"=",
"None",
",",
"headers",
"=",
"None",
",",
"http_conn",
"=",
"None",
",",
"proxy",
"=",
"None",
",",
"query_string",
"=",
"None",
",",
"response_dict",
"=",
"None",
",",
"service_token",
"=",
"None",
")",
":",
"if",
"http_conn",
":",
"(",
"parsed",
",",
"conn",
")",
"=",
"http_conn",
"else",
":",
"(",
"parsed",
",",
"conn",
")",
"=",
"http_connection",
"(",
"url",
",",
"proxy",
"=",
"proxy",
")",
"path",
"=",
"parsed",
".",
"path",
"if",
"container",
":",
"path",
"=",
"(",
"'%s/%s'",
"%",
"(",
"path",
".",
"rstrip",
"(",
"'/'",
")",
",",
"quote",
"(",
"container",
")",
")",
")",
"if",
"name",
":",
"path",
"=",
"(",
"'%s/%s'",
"%",
"(",
"path",
".",
"rstrip",
"(",
"'/'",
")",
",",
"quote",
"(",
"name",
")",
")",
")",
"if",
"query_string",
":",
"path",
"+=",
"(",
"'?'",
"+",
"query_string",
")",
"if",
"headers",
":",
"headers",
"=",
"dict",
"(",
"headers",
")",
"else",
":",
"headers",
"=",
"{",
"}",
"if",
"token",
":",
"headers",
"[",
"'X-Auth-Token'",
"]",
"=",
"token",
"if",
"service_token",
":",
"headers",
"[",
"'X-Service-Token'",
"]",
"=",
"service_token",
"if",
"etag",
":",
"headers",
"[",
"'ETag'",
"]",
"=",
"etag",
".",
"strip",
"(",
"'\"'",
")",
"if",
"(",
"content_length",
"is",
"not",
"None",
")",
":",
"headers",
"[",
"'Content-Length'",
"]",
"=",
"str",
"(",
"content_length",
")",
"else",
":",
"for",
"(",
"n",
",",
"v",
")",
"in",
"headers",
".",
"items",
"(",
")",
":",
"if",
"(",
"n",
".",
"lower",
"(",
")",
"==",
"'content-length'",
")",
":",
"content_length",
"=",
"int",
"(",
"v",
")",
"if",
"(",
"content_type",
"is",
"not",
"None",
")",
":",
"headers",
"[",
"'Content-Type'",
"]",
"=",
"content_type",
"elif",
"(",
"'Content-Type'",
"not",
"in",
"headers",
")",
":",
"headers",
"[",
"'Content-Type'",
"]",
"=",
"''",
"if",
"(",
"not",
"contents",
")",
":",
"headers",
"[",
"'Content-Length'",
"]",
"=",
"'0'",
"if",
"isinstance",
"(",
"contents",
",",
"(",
"ReadableToIterable",
",",
"LengthWrapper",
")",
")",
":",
"conn",
".",
"putrequest",
"(",
"path",
",",
"headers",
"=",
"headers",
",",
"data",
"=",
"contents",
")",
"elif",
"hasattr",
"(",
"contents",
",",
"'read'",
")",
":",
"if",
"(",
"chunk_size",
"is",
"None",
")",
":",
"chunk_size",
"=",
"65536",
"if",
"(",
"content_length",
"is",
"None",
")",
":",
"data",
"=",
"ReadableToIterable",
"(",
"contents",
",",
"chunk_size",
",",
"md5",
"=",
"False",
")",
"else",
":",
"data",
"=",
"LengthWrapper",
"(",
"contents",
",",
"content_length",
",",
"md5",
"=",
"False",
")",
"conn",
".",
"putrequest",
"(",
"path",
",",
"headers",
"=",
"headers",
",",
"data",
"=",
"data",
")",
"else",
":",
"if",
"(",
"chunk_size",
"is",
"not",
"None",
")",
":",
"warn_msg",
"=",
"(",
"'%s object has no \"read\" method, ignoring chunk_size'",
"%",
"type",
"(",
"contents",
")",
".",
"__name__",
")",
"warnings",
".",
"warn",
"(",
"warn_msg",
",",
"stacklevel",
"=",
"2",
")",
"if",
"(",
"hasattr",
"(",
"contents",
",",
"'__iter__'",
")",
"and",
"(",
"not",
"isinstance",
"(",
"contents",
",",
"(",
"six",
".",
"text_type",
",",
"six",
".",
"binary_type",
",",
"list",
",",
"tuple",
",",
"dict",
")",
")",
")",
")",
":",
"contents",
"=",
"iter_wrapper",
"(",
"contents",
")",
"conn",
".",
"request",
"(",
"'PUT'",
",",
"path",
",",
"contents",
",",
"headers",
")",
"resp",
"=",
"conn",
".",
"getresponse",
"(",
")",
"body",
"=",
"resp",
".",
"read",
"(",
")",
"http_log",
"(",
"(",
"(",
"'%s%s'",
"%",
"(",
"url",
".",
"replace",
"(",
"parsed",
".",
"path",
",",
"''",
")",
",",
"path",
")",
")",
",",
"'PUT'",
")",
",",
"{",
"'headers'",
":",
"headers",
"}",
",",
"resp",
",",
"body",
")",
"store_response",
"(",
"resp",
",",
"response_dict",
")",
"if",
"(",
"(",
"resp",
".",
"status",
"<",
"200",
")",
"or",
"(",
"resp",
".",
"status",
">=",
"300",
")",
")",
":",
"raise",
"ClientException",
".",
"from_response",
"(",
"resp",
",",
"'Object PUT failed'",
",",
"body",
")",
"etag",
"=",
"resp",
".",
"getheader",
"(",
"'etag'",
",",
"''",
")",
".",
"strip",
"(",
"'\"'",
")",
"return",
"etag"
] |
for usage with container sync .
|
train
| false
|
6,084
|
def _bidirectional_shortest_path(G, source, target, ignore_nodes=None, ignore_edges=None, weight=None):
results = _bidirectional_pred_succ(G, source, target, ignore_nodes, ignore_edges)
(pred, succ, w) = results
path = []
while (w is not None):
path.append(w)
w = succ[w]
w = pred[path[0]]
while (w is not None):
path.insert(0, w)
w = pred[w]
return (len(path), path)
|
[
"def",
"_bidirectional_shortest_path",
"(",
"G",
",",
"source",
",",
"target",
",",
"ignore_nodes",
"=",
"None",
",",
"ignore_edges",
"=",
"None",
",",
"weight",
"=",
"None",
")",
":",
"results",
"=",
"_bidirectional_pred_succ",
"(",
"G",
",",
"source",
",",
"target",
",",
"ignore_nodes",
",",
"ignore_edges",
")",
"(",
"pred",
",",
"succ",
",",
"w",
")",
"=",
"results",
"path",
"=",
"[",
"]",
"while",
"(",
"w",
"is",
"not",
"None",
")",
":",
"path",
".",
"append",
"(",
"w",
")",
"w",
"=",
"succ",
"[",
"w",
"]",
"w",
"=",
"pred",
"[",
"path",
"[",
"0",
"]",
"]",
"while",
"(",
"w",
"is",
"not",
"None",
")",
":",
"path",
".",
"insert",
"(",
"0",
",",
"w",
")",
"w",
"=",
"pred",
"[",
"w",
"]",
"return",
"(",
"len",
"(",
"path",
")",
",",
"path",
")"
] |
return shortest path between source and target ignoring nodes in the container exclude .
|
train
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.