id_within_dataset
int64 1
55.5k
| snippet
stringlengths 19
14.2k
| tokens
listlengths 6
1.63k
| nl
stringlengths 6
352
| split_within_dataset
stringclasses 1
value | is_duplicated
bool 2
classes |
|---|---|---|---|---|---|
10,450
|
def test_rgb_to_hsl_part_13():
assert (rgb_to_hsl(0, 0, 0) == (0, 0, 0))
assert (rgb_to_hsl(51, 0, 0) == (0, 100, 10))
assert (rgb_to_hsl(102, 0, 0) == (0, 100, 20))
assert (rgb_to_hsl(153, 0, 0) == (0, 100, 30))
assert (rgb_to_hsl(204, 0, 0) == (0, 100, 40))
assert (rgb_to_hsl(255, 0, 0) == (0, 100, 50))
assert (rgb_to_hsl(255, 51, 51) == (0, 100, 60))
assert (rgb_to_hsl(255, 102, 102) == (0, 100, 70))
assert (rgb_to_hsl(255, 153, 153) == (0, 100, 80))
assert (rgb_to_hsl(255, 204, 204) == (0, 100, 90))
assert (rgb_to_hsl(255, 255, 255) == (0, 0, 100))
|
[
"def",
"test_rgb_to_hsl_part_13",
"(",
")",
":",
"assert",
"(",
"rgb_to_hsl",
"(",
"0",
",",
"0",
",",
"0",
")",
"==",
"(",
"0",
",",
"0",
",",
"0",
")",
")",
"assert",
"(",
"rgb_to_hsl",
"(",
"51",
",",
"0",
",",
"0",
")",
"==",
"(",
"0",
",",
"100",
",",
"10",
")",
")",
"assert",
"(",
"rgb_to_hsl",
"(",
"102",
",",
"0",
",",
"0",
")",
"==",
"(",
"0",
",",
"100",
",",
"20",
")",
")",
"assert",
"(",
"rgb_to_hsl",
"(",
"153",
",",
"0",
",",
"0",
")",
"==",
"(",
"0",
",",
"100",
",",
"30",
")",
")",
"assert",
"(",
"rgb_to_hsl",
"(",
"204",
",",
"0",
",",
"0",
")",
"==",
"(",
"0",
",",
"100",
",",
"40",
")",
")",
"assert",
"(",
"rgb_to_hsl",
"(",
"255",
",",
"0",
",",
"0",
")",
"==",
"(",
"0",
",",
"100",
",",
"50",
")",
")",
"assert",
"(",
"rgb_to_hsl",
"(",
"255",
",",
"51",
",",
"51",
")",
"==",
"(",
"0",
",",
"100",
",",
"60",
")",
")",
"assert",
"(",
"rgb_to_hsl",
"(",
"255",
",",
"102",
",",
"102",
")",
"==",
"(",
"0",
",",
"100",
",",
"70",
")",
")",
"assert",
"(",
"rgb_to_hsl",
"(",
"255",
",",
"153",
",",
"153",
")",
"==",
"(",
"0",
",",
"100",
",",
"80",
")",
")",
"assert",
"(",
"rgb_to_hsl",
"(",
"255",
",",
"204",
",",
"204",
")",
"==",
"(",
"0",
",",
"100",
",",
"90",
")",
")",
"assert",
"(",
"rgb_to_hsl",
"(",
"255",
",",
"255",
",",
"255",
")",
"==",
"(",
"0",
",",
"0",
",",
"100",
")",
")"
] |
test rgb to hsl color function .
|
train
| false
|
10,452
|
def filesizeformat(bytes):
try:
bytes = float(bytes)
except (TypeError, ValueError, UnicodeDecodeError):
return (ungettext('%(size)d byte', '%(size)d bytes', 0) % {'size': 0})
filesize_number_format = (lambda value: formats.number_format(round(value, 1), 1))
if (bytes < 1024):
return (ungettext('%(size)d byte', '%(size)d bytes', bytes) % {'size': bytes})
if (bytes < (1024 * 1024)):
return (ugettext('%s KB') % filesize_number_format((bytes / 1024)))
if (bytes < ((1024 * 1024) * 1024)):
return (ugettext('%s MB') % filesize_number_format((bytes / (1024 * 1024))))
if (bytes < (((1024 * 1024) * 1024) * 1024)):
return (ugettext('%s GB') % filesize_number_format((bytes / ((1024 * 1024) * 1024))))
if (bytes < ((((1024 * 1024) * 1024) * 1024) * 1024)):
return (ugettext('%s TB') % filesize_number_format((bytes / (((1024 * 1024) * 1024) * 1024))))
return (ugettext('%s PB') % filesize_number_format((bytes / ((((1024 * 1024) * 1024) * 1024) * 1024))))
|
[
"def",
"filesizeformat",
"(",
"bytes",
")",
":",
"try",
":",
"bytes",
"=",
"float",
"(",
"bytes",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
",",
"UnicodeDecodeError",
")",
":",
"return",
"(",
"ungettext",
"(",
"'%(size)d byte'",
",",
"'%(size)d bytes'",
",",
"0",
")",
"%",
"{",
"'size'",
":",
"0",
"}",
")",
"filesize_number_format",
"=",
"(",
"lambda",
"value",
":",
"formats",
".",
"number_format",
"(",
"round",
"(",
"value",
",",
"1",
")",
",",
"1",
")",
")",
"if",
"(",
"bytes",
"<",
"1024",
")",
":",
"return",
"(",
"ungettext",
"(",
"'%(size)d byte'",
",",
"'%(size)d bytes'",
",",
"bytes",
")",
"%",
"{",
"'size'",
":",
"bytes",
"}",
")",
"if",
"(",
"bytes",
"<",
"(",
"1024",
"*",
"1024",
")",
")",
":",
"return",
"(",
"ugettext",
"(",
"'%s KB'",
")",
"%",
"filesize_number_format",
"(",
"(",
"bytes",
"/",
"1024",
")",
")",
")",
"if",
"(",
"bytes",
"<",
"(",
"(",
"1024",
"*",
"1024",
")",
"*",
"1024",
")",
")",
":",
"return",
"(",
"ugettext",
"(",
"'%s MB'",
")",
"%",
"filesize_number_format",
"(",
"(",
"bytes",
"/",
"(",
"1024",
"*",
"1024",
")",
")",
")",
")",
"if",
"(",
"bytes",
"<",
"(",
"(",
"(",
"1024",
"*",
"1024",
")",
"*",
"1024",
")",
"*",
"1024",
")",
")",
":",
"return",
"(",
"ugettext",
"(",
"'%s GB'",
")",
"%",
"filesize_number_format",
"(",
"(",
"bytes",
"/",
"(",
"(",
"1024",
"*",
"1024",
")",
"*",
"1024",
")",
")",
")",
")",
"if",
"(",
"bytes",
"<",
"(",
"(",
"(",
"(",
"1024",
"*",
"1024",
")",
"*",
"1024",
")",
"*",
"1024",
")",
"*",
"1024",
")",
")",
":",
"return",
"(",
"ugettext",
"(",
"'%s TB'",
")",
"%",
"filesize_number_format",
"(",
"(",
"bytes",
"/",
"(",
"(",
"(",
"1024",
"*",
"1024",
")",
"*",
"1024",
")",
"*",
"1024",
")",
")",
")",
")",
"return",
"(",
"ugettext",
"(",
"'%s PB'",
")",
"%",
"filesize_number_format",
"(",
"(",
"bytes",
"/",
"(",
"(",
"(",
"(",
"1024",
"*",
"1024",
")",
"*",
"1024",
")",
"*",
"1024",
")",
"*",
"1024",
")",
")",
")",
")"
] |
format the value like a human-readable file size .
|
train
| false
|
10,453
|
def _extend_blocks(result, blocks=None):
if (blocks is None):
blocks = []
if isinstance(result, list):
for r in result:
if isinstance(r, list):
blocks.extend(r)
else:
blocks.append(r)
elif isinstance(result, BlockManager):
blocks.extend(result.blocks)
else:
blocks.append(result)
return blocks
|
[
"def",
"_extend_blocks",
"(",
"result",
",",
"blocks",
"=",
"None",
")",
":",
"if",
"(",
"blocks",
"is",
"None",
")",
":",
"blocks",
"=",
"[",
"]",
"if",
"isinstance",
"(",
"result",
",",
"list",
")",
":",
"for",
"r",
"in",
"result",
":",
"if",
"isinstance",
"(",
"r",
",",
"list",
")",
":",
"blocks",
".",
"extend",
"(",
"r",
")",
"else",
":",
"blocks",
".",
"append",
"(",
"r",
")",
"elif",
"isinstance",
"(",
"result",
",",
"BlockManager",
")",
":",
"blocks",
".",
"extend",
"(",
"result",
".",
"blocks",
")",
"else",
":",
"blocks",
".",
"append",
"(",
"result",
")",
"return",
"blocks"
] |
extends the dictionary blocks with *new* blocks in the parent node .
|
train
| false
|
10,454
|
def dllVsExe():
createAssembly('DLL', 'DLLEXE', 1)
compileAssembly((Directory.GetCurrentDirectory() + '\\fooDLL.cs'))
file_name = (Directory.GetCurrentDirectory() + '\\fooEXE.cs')
file = open(file_name, 'w')
print >>file, (cs_ipy % ('DLLEXE', 0))
file.close()
compileExe(file_name)
|
[
"def",
"dllVsExe",
"(",
")",
":",
"createAssembly",
"(",
"'DLL'",
",",
"'DLLEXE'",
",",
"1",
")",
"compileAssembly",
"(",
"(",
"Directory",
".",
"GetCurrentDirectory",
"(",
")",
"+",
"'\\\\fooDLL.cs'",
")",
")",
"file_name",
"=",
"(",
"Directory",
".",
"GetCurrentDirectory",
"(",
")",
"+",
"'\\\\fooEXE.cs'",
")",
"file",
"=",
"open",
"(",
"file_name",
",",
"'w'",
")",
"print",
">>",
"file",
",",
"(",
"cs_ipy",
"%",
"(",
"'DLLEXE'",
",",
"0",
")",
")",
"file",
".",
"close",
"(",
")",
"compileExe",
"(",
"file_name",
")"
] |
creates a dll and exe of the same namespace to see which one ip uses .
|
train
| false
|
10,455
|
def laplacian_kernel(X, Y=None, gamma=None):
(X, Y) = check_pairwise_arrays(X, Y)
if (gamma is None):
gamma = (1.0 / X.shape[1])
K = ((- gamma) * manhattan_distances(X, Y))
np.exp(K, K)
return K
|
[
"def",
"laplacian_kernel",
"(",
"X",
",",
"Y",
"=",
"None",
",",
"gamma",
"=",
"None",
")",
":",
"(",
"X",
",",
"Y",
")",
"=",
"check_pairwise_arrays",
"(",
"X",
",",
"Y",
")",
"if",
"(",
"gamma",
"is",
"None",
")",
":",
"gamma",
"=",
"(",
"1.0",
"/",
"X",
".",
"shape",
"[",
"1",
"]",
")",
"K",
"=",
"(",
"(",
"-",
"gamma",
")",
"*",
"manhattan_distances",
"(",
"X",
",",
"Y",
")",
")",
"np",
".",
"exp",
"(",
"K",
",",
"K",
")",
"return",
"K"
] |
compute the laplacian kernel between x and y .
|
train
| false
|
10,456
|
def DEBUG(x):
LOG_LEVEL('debug')
|
[
"def",
"DEBUG",
"(",
"x",
")",
":",
"LOG_LEVEL",
"(",
"'debug'",
")"
] |
temporary logger during development that is always on .
|
train
| false
|
10,458
|
def batchpipe(command, args, preexec_fn=None, arg_max=sc_arg_max):
base_size = _argmax_base(command)
while args:
room = (arg_max - base_size)
i = 0
while (i < len(args)):
next_size = _argmax_args_size(args[i:(i + 1)])
if ((room - next_size) < 0):
break
room -= next_size
i += 1
sub_args = args[:i]
args = args[i:]
assert len(sub_args)
(yield readpipe((command + sub_args), preexec_fn=preexec_fn))
|
[
"def",
"batchpipe",
"(",
"command",
",",
"args",
",",
"preexec_fn",
"=",
"None",
",",
"arg_max",
"=",
"sc_arg_max",
")",
":",
"base_size",
"=",
"_argmax_base",
"(",
"command",
")",
"while",
"args",
":",
"room",
"=",
"(",
"arg_max",
"-",
"base_size",
")",
"i",
"=",
"0",
"while",
"(",
"i",
"<",
"len",
"(",
"args",
")",
")",
":",
"next_size",
"=",
"_argmax_args_size",
"(",
"args",
"[",
"i",
":",
"(",
"i",
"+",
"1",
")",
"]",
")",
"if",
"(",
"(",
"room",
"-",
"next_size",
")",
"<",
"0",
")",
":",
"break",
"room",
"-=",
"next_size",
"i",
"+=",
"1",
"sub_args",
"=",
"args",
"[",
":",
"i",
"]",
"args",
"=",
"args",
"[",
"i",
":",
"]",
"assert",
"len",
"(",
"sub_args",
")",
"(",
"yield",
"readpipe",
"(",
"(",
"command",
"+",
"sub_args",
")",
",",
"preexec_fn",
"=",
"preexec_fn",
")",
")"
] |
if args is not empty .
|
train
| false
|
10,459
|
def _validate_ami_region_map(blob):
return dict(((AWS_REGIONS.lookupByValue(k), v) for (k, v) in json.loads(blob).items()))
|
[
"def",
"_validate_ami_region_map",
"(",
"blob",
")",
":",
"return",
"dict",
"(",
"(",
"(",
"AWS_REGIONS",
".",
"lookupByValue",
"(",
"k",
")",
",",
"v",
")",
"for",
"(",
"k",
",",
"v",
")",
"in",
"json",
".",
"loads",
"(",
"blob",
")",
".",
"items",
"(",
")",
")",
")"
] |
validate and decode the supplied json encoded mapping .
|
train
| false
|
10,460
|
def template_used(response, template_name):
templates = []
templates += [t.name for t in getattr(response, 'templates', [])]
templates += getattr(response, 'jinja_templates', [])
return (template_name in templates)
|
[
"def",
"template_used",
"(",
"response",
",",
"template_name",
")",
":",
"templates",
"=",
"[",
"]",
"templates",
"+=",
"[",
"t",
".",
"name",
"for",
"t",
"in",
"getattr",
"(",
"response",
",",
"'templates'",
",",
"[",
"]",
")",
"]",
"templates",
"+=",
"getattr",
"(",
"response",
",",
"'jinja_templates'",
",",
"[",
"]",
")",
"return",
"(",
"template_name",
"in",
"templates",
")"
] |
asserts a given template was used first off .
|
train
| false
|
10,463
|
def prefix_encode(a, b):
i = first_diff(a, b)
return (chr(i) + b[i:].encode('utf8'))
|
[
"def",
"prefix_encode",
"(",
"a",
",",
"b",
")",
":",
"i",
"=",
"first_diff",
"(",
"a",
",",
"b",
")",
"return",
"(",
"chr",
"(",
"i",
")",
"+",
"b",
"[",
"i",
":",
"]",
".",
"encode",
"(",
"'utf8'",
")",
")"
] |
compresses string b as an integer representing the prefix it shares with a .
|
train
| false
|
10,464
|
def _replicate_elements(input_iterable, num_times):
if (len(input_iterable) == 1):
return ((input_iterable[0],) * num_times)
return tuple(input_iterable)
|
[
"def",
"_replicate_elements",
"(",
"input_iterable",
",",
"num_times",
")",
":",
"if",
"(",
"len",
"(",
"input_iterable",
")",
"==",
"1",
")",
":",
"return",
"(",
"(",
"input_iterable",
"[",
"0",
"]",
",",
")",
"*",
"num_times",
")",
"return",
"tuple",
"(",
"input_iterable",
")"
] |
replicates entry in input_iterable if input_iterable is of length 1 .
|
train
| false
|
10,468
|
def _sec_to_usec(t_sec):
return int((t_sec * 1000000.0))
|
[
"def",
"_sec_to_usec",
"(",
"t_sec",
")",
":",
"return",
"int",
"(",
"(",
"t_sec",
"*",
"1000000.0",
")",
")"
] |
converts a time in seconds to usec since the epoch .
|
train
| false
|
10,469
|
def strip_control_sequences(string):
global _STRIP_ANSI
if (not _STRIP_ANSI):
from evennia.utils.ansi import strip_raw_ansi as _STRIP_ANSI
return _RE_CONTROL_CHAR.sub('', _STRIP_ANSI(string))
|
[
"def",
"strip_control_sequences",
"(",
"string",
")",
":",
"global",
"_STRIP_ANSI",
"if",
"(",
"not",
"_STRIP_ANSI",
")",
":",
"from",
"evennia",
".",
"utils",
".",
"ansi",
"import",
"strip_raw_ansi",
"as",
"_STRIP_ANSI",
"return",
"_RE_CONTROL_CHAR",
".",
"sub",
"(",
"''",
",",
"_STRIP_ANSI",
"(",
"string",
")",
")"
] |
remove non-print text sequences .
|
train
| false
|
10,470
|
def validate_new_email(user, new_email):
try:
validate_email(new_email)
except ValidationError:
raise ValueError(_('Valid e-mail address required.'))
if (new_email == user.email):
raise ValueError(_('Old email is the same as the new email.'))
if (User.objects.filter(email=new_email).count() != 0):
raise ValueError(_('An account with this e-mail already exists.'))
|
[
"def",
"validate_new_email",
"(",
"user",
",",
"new_email",
")",
":",
"try",
":",
"validate_email",
"(",
"new_email",
")",
"except",
"ValidationError",
":",
"raise",
"ValueError",
"(",
"_",
"(",
"'Valid e-mail address required.'",
")",
")",
"if",
"(",
"new_email",
"==",
"user",
".",
"email",
")",
":",
"raise",
"ValueError",
"(",
"_",
"(",
"'Old email is the same as the new email.'",
")",
")",
"if",
"(",
"User",
".",
"objects",
".",
"filter",
"(",
"email",
"=",
"new_email",
")",
".",
"count",
"(",
")",
"!=",
"0",
")",
":",
"raise",
"ValueError",
"(",
"_",
"(",
"'An account with this e-mail already exists.'",
")",
")"
] |
given a new email for a user .
|
train
| false
|
10,471
|
def _hdu_class_from_header(cls, header):
klass = cls
if header:
for c in reversed(list(itersubclasses(cls))):
try:
if (not (c.__module__.startswith('astropy.io.fits.') or (c in cls._hdu_registry))):
continue
if c.match_header(header):
klass = c
break
except NotImplementedError:
continue
except Exception as exc:
warnings.warn('An exception occurred matching an HDU header to the appropriate HDU type: {0}'.format(exc), AstropyUserWarning)
warnings.warn('The HDU will be treated as corrupted.', AstropyUserWarning)
klass = _CorruptedHDU
del exc
break
return klass
|
[
"def",
"_hdu_class_from_header",
"(",
"cls",
",",
"header",
")",
":",
"klass",
"=",
"cls",
"if",
"header",
":",
"for",
"c",
"in",
"reversed",
"(",
"list",
"(",
"itersubclasses",
"(",
"cls",
")",
")",
")",
":",
"try",
":",
"if",
"(",
"not",
"(",
"c",
".",
"__module__",
".",
"startswith",
"(",
"'astropy.io.fits.'",
")",
"or",
"(",
"c",
"in",
"cls",
".",
"_hdu_registry",
")",
")",
")",
":",
"continue",
"if",
"c",
".",
"match_header",
"(",
"header",
")",
":",
"klass",
"=",
"c",
"break",
"except",
"NotImplementedError",
":",
"continue",
"except",
"Exception",
"as",
"exc",
":",
"warnings",
".",
"warn",
"(",
"'An exception occurred matching an HDU header to the appropriate HDU type: {0}'",
".",
"format",
"(",
"exc",
")",
",",
"AstropyUserWarning",
")",
"warnings",
".",
"warn",
"(",
"'The HDU will be treated as corrupted.'",
",",
"AstropyUserWarning",
")",
"klass",
"=",
"_CorruptedHDU",
"del",
"exc",
"break",
"return",
"klass"
] |
used primarily by _basehdu .
|
train
| false
|
10,472
|
def reset_runtime():
from behave import step_registry
from behave import matchers
step_registry.registry = step_registry.StepRegistry()
step_registry.setup_step_decorators(None, step_registry.registry)
matchers.ParseMatcher.custom_types = {}
matchers.current_matcher = matchers.ParseMatcher
|
[
"def",
"reset_runtime",
"(",
")",
":",
"from",
"behave",
"import",
"step_registry",
"from",
"behave",
"import",
"matchers",
"step_registry",
".",
"registry",
"=",
"step_registry",
".",
"StepRegistry",
"(",
")",
"step_registry",
".",
"setup_step_decorators",
"(",
"None",
",",
"step_registry",
".",
"registry",
")",
"matchers",
".",
"ParseMatcher",
".",
"custom_types",
"=",
"{",
"}",
"matchers",
".",
"current_matcher",
"=",
"matchers",
".",
"ParseMatcher"
] |
reset runtime environment .
|
train
| false
|
10,473
|
def get_xrds_url(resource, request):
host = request.get_host()
location = (((host + '/openid/provider/') + resource) + '/')
if request.is_secure():
return ('https://' + location)
else:
return ('http://' + location)
|
[
"def",
"get_xrds_url",
"(",
"resource",
",",
"request",
")",
":",
"host",
"=",
"request",
".",
"get_host",
"(",
")",
"location",
"=",
"(",
"(",
"(",
"host",
"+",
"'/openid/provider/'",
")",
"+",
"resource",
")",
"+",
"'/'",
")",
"if",
"request",
".",
"is_secure",
"(",
")",
":",
"return",
"(",
"'https://'",
"+",
"location",
")",
"else",
":",
"return",
"(",
"'http://'",
"+",
"location",
")"
] |
return the xrds url for a resource .
|
train
| false
|
10,474
|
def test_init_wrong_classifier():
classifier = 'rnd'
bc = BalanceCascade(classifier=classifier)
assert_raises(NotImplementedError, bc.fit_sample, X, Y)
|
[
"def",
"test_init_wrong_classifier",
"(",
")",
":",
"classifier",
"=",
"'rnd'",
"bc",
"=",
"BalanceCascade",
"(",
"classifier",
"=",
"classifier",
")",
"assert_raises",
"(",
"NotImplementedError",
",",
"bc",
".",
"fit_sample",
",",
"X",
",",
"Y",
")"
] |
test either if an error is raised the classifier provided is unknown .
|
train
| false
|
10,475
|
@utils.arg('--all-tenants', action='store_const', const=1, default=0, help=_('Reset state server(s) in another tenant by name (Admin only).'))
@utils.arg('server', metavar='<server>', nargs='+', help=_('Name or ID of server(s).'))
@utils.arg('--active', action='store_const', dest='state', default='error', const='active', help=_('Request the server be reset to "active" state instead of "error" state (the default).'))
def do_reset_state(cs, args):
failure_flag = False
find_args = {'all_tenants': args.all_tenants}
for server in args.server:
try:
_find_server(cs, server, **find_args).reset_state(args.state)
msg = 'Reset state for server %s succeeded; new state is %s'
print((msg % (server, args.state)))
except Exception as e:
failure_flag = True
msg = ('Reset state for server %s failed: %s' % (server, e))
print(msg)
if failure_flag:
msg = 'Unable to reset the state for the specified server(s).'
raise exceptions.CommandError(msg)
|
[
"@",
"utils",
".",
"arg",
"(",
"'--all-tenants'",
",",
"action",
"=",
"'store_const'",
",",
"const",
"=",
"1",
",",
"default",
"=",
"0",
",",
"help",
"=",
"_",
"(",
"'Reset state server(s) in another tenant by name (Admin only).'",
")",
")",
"@",
"utils",
".",
"arg",
"(",
"'server'",
",",
"metavar",
"=",
"'<server>'",
",",
"nargs",
"=",
"'+'",
",",
"help",
"=",
"_",
"(",
"'Name or ID of server(s).'",
")",
")",
"@",
"utils",
".",
"arg",
"(",
"'--active'",
",",
"action",
"=",
"'store_const'",
",",
"dest",
"=",
"'state'",
",",
"default",
"=",
"'error'",
",",
"const",
"=",
"'active'",
",",
"help",
"=",
"_",
"(",
"'Request the server be reset to \"active\" state instead of \"error\" state (the default).'",
")",
")",
"def",
"do_reset_state",
"(",
"cs",
",",
"args",
")",
":",
"failure_flag",
"=",
"False",
"find_args",
"=",
"{",
"'all_tenants'",
":",
"args",
".",
"all_tenants",
"}",
"for",
"server",
"in",
"args",
".",
"server",
":",
"try",
":",
"_find_server",
"(",
"cs",
",",
"server",
",",
"**",
"find_args",
")",
".",
"reset_state",
"(",
"args",
".",
"state",
")",
"msg",
"=",
"'Reset state for server %s succeeded; new state is %s'",
"print",
"(",
"(",
"msg",
"%",
"(",
"server",
",",
"args",
".",
"state",
")",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"failure_flag",
"=",
"True",
"msg",
"=",
"(",
"'Reset state for server %s failed: %s'",
"%",
"(",
"server",
",",
"e",
")",
")",
"print",
"(",
"msg",
")",
"if",
"failure_flag",
":",
"msg",
"=",
"'Unable to reset the state for the specified server(s).'",
"raise",
"exceptions",
".",
"CommandError",
"(",
"msg",
")"
] |
reset the state of a server .
|
train
| false
|
10,476
|
def _addrdict_to_ip_addrs(addrs):
return ['{0}/{1}'.format(x['IP Address'], x['Subnet'].rsplit('/', 1)[(-1)]) for x in addrs]
|
[
"def",
"_addrdict_to_ip_addrs",
"(",
"addrs",
")",
":",
"return",
"[",
"'{0}/{1}'",
".",
"format",
"(",
"x",
"[",
"'IP Address'",
"]",
",",
"x",
"[",
"'Subnet'",
"]",
".",
"rsplit",
"(",
"'/'",
",",
"1",
")",
"[",
"(",
"-",
"1",
")",
"]",
")",
"for",
"x",
"in",
"addrs",
"]"
] |
extracts a list of ip/cidr expressions from a list of addrdicts .
|
train
| false
|
10,477
|
def auth_basic():
auth = request.authorization
if (not auth):
return (False, '')
user = UserModel.query.filter_by(email=auth.username).first()
auth_ok = False
if (user is not None):
auth_ok = check_password_hash(auth.password.encode('utf-8'), user.password.encode('utf-8'), user.salt)
if (not auth_ok):
return (False, 'Authentication failed. Wrong username or password')
g.user = user
return (True, '')
|
[
"def",
"auth_basic",
"(",
")",
":",
"auth",
"=",
"request",
".",
"authorization",
"if",
"(",
"not",
"auth",
")",
":",
"return",
"(",
"False",
",",
"''",
")",
"user",
"=",
"UserModel",
".",
"query",
".",
"filter_by",
"(",
"email",
"=",
"auth",
".",
"username",
")",
".",
"first",
"(",
")",
"auth_ok",
"=",
"False",
"if",
"(",
"user",
"is",
"not",
"None",
")",
":",
"auth_ok",
"=",
"check_password_hash",
"(",
"auth",
".",
"password",
".",
"encode",
"(",
"'utf-8'",
")",
",",
"user",
".",
"password",
".",
"encode",
"(",
"'utf-8'",
")",
",",
"user",
".",
"salt",
")",
"if",
"(",
"not",
"auth_ok",
")",
":",
"return",
"(",
"False",
",",
"'Authentication failed. Wrong username or password'",
")",
"g",
".",
"user",
"=",
"user",
"return",
"(",
"True",
",",
"''",
")"
] |
callback decorator to require http auth .
|
train
| false
|
10,479
|
def toDouble(bytes):
(right, left) = struct.unpack('<Ii', struct.pack(('B' * 8), *bytes[0:8]))
return (float(left) + (float(right) / (2 ** 32)))
|
[
"def",
"toDouble",
"(",
"bytes",
")",
":",
"(",
"right",
",",
"left",
")",
"=",
"struct",
".",
"unpack",
"(",
"'<Ii'",
",",
"struct",
".",
"pack",
"(",
"(",
"'B'",
"*",
"8",
")",
",",
"*",
"bytes",
"[",
"0",
":",
"8",
"]",
")",
")",
"return",
"(",
"float",
"(",
"left",
")",
"+",
"(",
"float",
"(",
"right",
")",
"/",
"(",
"2",
"**",
"32",
")",
")",
")"
] |
name: todouble args: buffer .
|
train
| false
|
10,481
|
def get_program_marketing_url(programs_config):
return urljoin(settings.MKTG_URLS.get('ROOT'), programs_config.marketing_path).rstrip('/')
|
[
"def",
"get_program_marketing_url",
"(",
"programs_config",
")",
":",
"return",
"urljoin",
"(",
"settings",
".",
"MKTG_URLS",
".",
"get",
"(",
"'ROOT'",
")",
",",
"programs_config",
".",
"marketing_path",
")",
".",
"rstrip",
"(",
"'/'",
")"
] |
build a url to be used when linking to program details on a marketing site .
|
train
| false
|
10,482
|
def distribute_and_over_or(expr):
return _distribute((expr, And, Or))
|
[
"def",
"distribute_and_over_or",
"(",
"expr",
")",
":",
"return",
"_distribute",
"(",
"(",
"expr",
",",
"And",
",",
"Or",
")",
")"
] |
given a sentence s consisting of conjunctions and disjunctions of literals .
|
train
| false
|
10,483
|
def extract_all_time_results(results_string):
pattern = re.compile('(.*?)user (.*?)system (.*?)elapsed')
results = []
for result in pattern.findall(results_string):
results.append(tuple([to_seconds(elt) for elt in result]))
return results
|
[
"def",
"extract_all_time_results",
"(",
"results_string",
")",
":",
"pattern",
"=",
"re",
".",
"compile",
"(",
"'(.*?)user (.*?)system (.*?)elapsed'",
")",
"results",
"=",
"[",
"]",
"for",
"result",
"in",
"pattern",
".",
"findall",
"(",
"results_string",
")",
":",
"results",
".",
"append",
"(",
"tuple",
"(",
"[",
"to_seconds",
"(",
"elt",
")",
"for",
"elt",
"in",
"result",
"]",
")",
")",
"return",
"results"
] |
extract user .
|
train
| false
|
10,484
|
def disk_delete(session, dc_ref, file_path):
LOG.debug('Deleting virtual disk %s', file_path)
delete_disk_task = session._call_method(session.vim, 'DeleteVirtualDisk_Task', session.vim.service_content.virtualDiskManager, name=str(file_path), datacenter=dc_ref)
session._wait_for_task(delete_disk_task)
LOG.info(_LI('Deleted virtual disk %s.'), file_path)
|
[
"def",
"disk_delete",
"(",
"session",
",",
"dc_ref",
",",
"file_path",
")",
":",
"LOG",
".",
"debug",
"(",
"'Deleting virtual disk %s'",
",",
"file_path",
")",
"delete_disk_task",
"=",
"session",
".",
"_call_method",
"(",
"session",
".",
"vim",
",",
"'DeleteVirtualDisk_Task'",
",",
"session",
".",
"vim",
".",
"service_content",
".",
"virtualDiskManager",
",",
"name",
"=",
"str",
"(",
"file_path",
")",
",",
"datacenter",
"=",
"dc_ref",
")",
"session",
".",
"_wait_for_task",
"(",
"delete_disk_task",
")",
"LOG",
".",
"info",
"(",
"_LI",
"(",
"'Deleted virtual disk %s.'",
")",
",",
"file_path",
")"
] |
deletes a virtual disk .
|
train
| false
|
10,485
|
def skip_if_cache_disabled(*sections):
def wrapper(f):
@functools.wraps(f)
def inner(*args, **kwargs):
if (not CONF.cache.enabled):
raise testcase.TestSkipped('Cache globally disabled.')
for s in sections:
conf_sec = getattr(CONF, s, None)
if (conf_sec is not None):
if (not getattr(conf_sec, 'caching', True)):
raise testcase.TestSkipped(('%s caching disabled.' % s))
return f(*args, **kwargs)
return inner
return wrapper
|
[
"def",
"skip_if_cache_disabled",
"(",
"*",
"sections",
")",
":",
"def",
"wrapper",
"(",
"f",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"f",
")",
"def",
"inner",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"if",
"(",
"not",
"CONF",
".",
"cache",
".",
"enabled",
")",
":",
"raise",
"testcase",
".",
"TestSkipped",
"(",
"'Cache globally disabled.'",
")",
"for",
"s",
"in",
"sections",
":",
"conf_sec",
"=",
"getattr",
"(",
"CONF",
",",
"s",
",",
"None",
")",
"if",
"(",
"conf_sec",
"is",
"not",
"None",
")",
":",
"if",
"(",
"not",
"getattr",
"(",
"conf_sec",
",",
"'caching'",
",",
"True",
")",
")",
":",
"raise",
"testcase",
".",
"TestSkipped",
"(",
"(",
"'%s caching disabled.'",
"%",
"s",
")",
")",
"return",
"f",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
"return",
"inner",
"return",
"wrapper"
] |
skip a test if caching is disabled .
|
train
| false
|
10,486
|
def __names_from_dicts(lod, optional=True):
results = []
if optional:
results.append('<<None>>')
for x in lod:
results.append(x['name'])
results.sort()
return results
|
[
"def",
"__names_from_dicts",
"(",
"lod",
",",
"optional",
"=",
"True",
")",
":",
"results",
"=",
"[",
"]",
"if",
"optional",
":",
"results",
".",
"append",
"(",
"'<<None>>'",
")",
"for",
"x",
"in",
"lod",
":",
"results",
".",
"append",
"(",
"x",
"[",
"'name'",
"]",
")",
"results",
".",
"sort",
"(",
")",
"return",
"results"
] |
tiny helper function .
|
train
| false
|
10,490
|
def is_utf8(p):
utf8 = True
try:
p.decode('ascii')
except:
try:
p.decode('utf-8')
except:
utf8 = False
return utf8
|
[
"def",
"is_utf8",
"(",
"p",
")",
":",
"utf8",
"=",
"True",
"try",
":",
"p",
".",
"decode",
"(",
"'ascii'",
")",
"except",
":",
"try",
":",
"p",
".",
"decode",
"(",
"'utf-8'",
")",
"except",
":",
"utf8",
"=",
"False",
"return",
"utf8"
] |
return true when p is utf-8 or plain ascii .
|
train
| false
|
10,491
|
def _find_monitor_snapshot(cs, snapshot):
return utils.find_resource(cs.monitor_snapshots, snapshot)
|
[
"def",
"_find_monitor_snapshot",
"(",
"cs",
",",
"snapshot",
")",
":",
"return",
"utils",
".",
"find_resource",
"(",
"cs",
".",
"monitor_snapshots",
",",
"snapshot",
")"
] |
get a monitor snapshot by id .
|
train
| false
|
10,492
|
def _validator(code_or_name, validator_type):
if (validator_type == 'error'):
from .errors import codes
from .errors import EXT
elif (validator_type == 'warning'):
from .warnings import codes
from .warnings import EXT
else:
pass
def decorator(func):
def wrapper(*args, **kw):
extra = func(*args, **kw)
if (extra is None):
return []
if isinstance(code_or_name, string_types):
code = EXT
name = ((codes[code][0] + ':') + code_or_name)
else:
code = code_or_name
name = codes[code][0]
text = codes[code][1]
return [(code, name, text, extra)]
wrapper.validator_type = validator_type
return wrapper
return decorator
|
[
"def",
"_validator",
"(",
"code_or_name",
",",
"validator_type",
")",
":",
"if",
"(",
"validator_type",
"==",
"'error'",
")",
":",
"from",
".",
"errors",
"import",
"codes",
"from",
".",
"errors",
"import",
"EXT",
"elif",
"(",
"validator_type",
"==",
"'warning'",
")",
":",
"from",
".",
"warnings",
"import",
"codes",
"from",
".",
"warnings",
"import",
"EXT",
"else",
":",
"pass",
"def",
"decorator",
"(",
"func",
")",
":",
"def",
"wrapper",
"(",
"*",
"args",
",",
"**",
"kw",
")",
":",
"extra",
"=",
"func",
"(",
"*",
"args",
",",
"**",
"kw",
")",
"if",
"(",
"extra",
"is",
"None",
")",
":",
"return",
"[",
"]",
"if",
"isinstance",
"(",
"code_or_name",
",",
"string_types",
")",
":",
"code",
"=",
"EXT",
"name",
"=",
"(",
"(",
"codes",
"[",
"code",
"]",
"[",
"0",
"]",
"+",
"':'",
")",
"+",
"code_or_name",
")",
"else",
":",
"code",
"=",
"code_or_name",
"name",
"=",
"codes",
"[",
"code",
"]",
"[",
"0",
"]",
"text",
"=",
"codes",
"[",
"code",
"]",
"[",
"1",
"]",
"return",
"[",
"(",
"code",
",",
"name",
",",
"text",
",",
"extra",
")",
"]",
"wrapper",
".",
"validator_type",
"=",
"validator_type",
"return",
"wrapper",
"return",
"decorator"
] |
validator that tis tightly coupled to the implementation of the classes here .
|
train
| true
|
10,493
|
def wait_for_successful_query(url, wait_for=300, **kwargs):
starttime = time.time()
while True:
caught_exception = None
result = None
try:
result = query(url=url, **kwargs)
if ((not result.get('Error')) and (not result.get('error'))):
return result
except Exception as exc:
caught_exception = exc
if (time.time() > (starttime + wait_for)):
if ((not result) and caught_exception):
raise caught_exception
return result
|
[
"def",
"wait_for_successful_query",
"(",
"url",
",",
"wait_for",
"=",
"300",
",",
"**",
"kwargs",
")",
":",
"starttime",
"=",
"time",
".",
"time",
"(",
")",
"while",
"True",
":",
"caught_exception",
"=",
"None",
"result",
"=",
"None",
"try",
":",
"result",
"=",
"query",
"(",
"url",
"=",
"url",
",",
"**",
"kwargs",
")",
"if",
"(",
"(",
"not",
"result",
".",
"get",
"(",
"'Error'",
")",
")",
"and",
"(",
"not",
"result",
".",
"get",
"(",
"'error'",
")",
")",
")",
":",
"return",
"result",
"except",
"Exception",
"as",
"exc",
":",
"caught_exception",
"=",
"exc",
"if",
"(",
"time",
".",
"time",
"(",
")",
">",
"(",
"starttime",
"+",
"wait_for",
")",
")",
":",
"if",
"(",
"(",
"not",
"result",
")",
"and",
"caught_exception",
")",
":",
"raise",
"caught_exception",
"return",
"result"
] |
like query but .
|
train
| true
|
10,494
|
def listmailcapfiles():
if ('MAILCAPS' in os.environ):
pathstr = os.environ['MAILCAPS']
mailcaps = pathstr.split(os.pathsep)
else:
if ('HOME' in os.environ):
home = os.environ['HOME']
else:
home = '.'
mailcaps = [(home + '/.mailcap'), '/etc/mailcap', '/usr/etc/mailcap', '/usr/local/etc/mailcap']
return mailcaps
|
[
"def",
"listmailcapfiles",
"(",
")",
":",
"if",
"(",
"'MAILCAPS'",
"in",
"os",
".",
"environ",
")",
":",
"pathstr",
"=",
"os",
".",
"environ",
"[",
"'MAILCAPS'",
"]",
"mailcaps",
"=",
"pathstr",
".",
"split",
"(",
"os",
".",
"pathsep",
")",
"else",
":",
"if",
"(",
"'HOME'",
"in",
"os",
".",
"environ",
")",
":",
"home",
"=",
"os",
".",
"environ",
"[",
"'HOME'",
"]",
"else",
":",
"home",
"=",
"'.'",
"mailcaps",
"=",
"[",
"(",
"home",
"+",
"'/.mailcap'",
")",
",",
"'/etc/mailcap'",
",",
"'/usr/etc/mailcap'",
",",
"'/usr/local/etc/mailcap'",
"]",
"return",
"mailcaps"
] |
return a list of all mailcap files found on the system .
|
train
| false
|
10,495
|
def get_entrance_exam_instructor_task_history(course_id, usage_key=None, student=None):
instructor_tasks = InstructorTask.objects.filter(course_id=course_id)
if ((usage_key is not None) or (student is not None)):
(_, task_key) = encode_entrance_exam_and_student_input(usage_key, student)
instructor_tasks = instructor_tasks.filter(task_key=task_key)
return instructor_tasks.order_by('-id')
|
[
"def",
"get_entrance_exam_instructor_task_history",
"(",
"course_id",
",",
"usage_key",
"=",
"None",
",",
"student",
"=",
"None",
")",
":",
"instructor_tasks",
"=",
"InstructorTask",
".",
"objects",
".",
"filter",
"(",
"course_id",
"=",
"course_id",
")",
"if",
"(",
"(",
"usage_key",
"is",
"not",
"None",
")",
"or",
"(",
"student",
"is",
"not",
"None",
")",
")",
":",
"(",
"_",
",",
"task_key",
")",
"=",
"encode_entrance_exam_and_student_input",
"(",
"usage_key",
",",
"student",
")",
"instructor_tasks",
"=",
"instructor_tasks",
".",
"filter",
"(",
"task_key",
"=",
"task_key",
")",
"return",
"instructor_tasks",
".",
"order_by",
"(",
"'-id'",
")"
] |
returns a query of instructortask objects of historical tasks for a given course .
|
train
| false
|
10,496
|
def h1(individual):
num = ((sin((individual[0] - (individual[1] / 8))) ** 2) + (sin((individual[1] + (individual[0] / 8))) ** 2))
denum = (((((individual[0] - 8.6998) ** 2) + ((individual[1] - 6.7665) ** 2)) ** 0.5) + 1)
return ((num / denum),)
|
[
"def",
"h1",
"(",
"individual",
")",
":",
"num",
"=",
"(",
"(",
"sin",
"(",
"(",
"individual",
"[",
"0",
"]",
"-",
"(",
"individual",
"[",
"1",
"]",
"/",
"8",
")",
")",
")",
"**",
"2",
")",
"+",
"(",
"sin",
"(",
"(",
"individual",
"[",
"1",
"]",
"+",
"(",
"individual",
"[",
"0",
"]",
"/",
"8",
")",
")",
")",
"**",
"2",
")",
")",
"denum",
"=",
"(",
"(",
"(",
"(",
"(",
"individual",
"[",
"0",
"]",
"-",
"8.6998",
")",
"**",
"2",
")",
"+",
"(",
"(",
"individual",
"[",
"1",
"]",
"-",
"6.7665",
")",
"**",
"2",
")",
")",
"**",
"0.5",
")",
"+",
"1",
")",
"return",
"(",
"(",
"num",
"/",
"denum",
")",
",",
")"
] |
simple two-dimensional function containing several local maxima .
|
train
| false
|
10,498
|
def get_stack_report(stack_id, aws_config):
output = aws_output(['cloudformation', 'describe-stacks', '--stack-name', stack_id], aws_config)
results = json.loads(output)
return results['Stacks'][0]
|
[
"def",
"get_stack_report",
"(",
"stack_id",
",",
"aws_config",
")",
":",
"output",
"=",
"aws_output",
"(",
"[",
"'cloudformation'",
",",
"'describe-stacks'",
",",
"'--stack-name'",
",",
"stack_id",
"]",
",",
"aws_config",
")",
"results",
"=",
"json",
".",
"loads",
"(",
"output",
")",
"return",
"results",
"[",
"'Stacks'",
"]",
"[",
"0",
"]"
] |
get information about a cloudformation stack .
|
train
| false
|
10,500
|
def _process_mass_opportunity_form(f):
def wrap(request, *args, **kwargs):
'Wrap'
if ('massform' in request.POST):
for key in request.POST:
if ('mass-opportunity' in key):
try:
opportunity = Opportunity.objects.get(pk=request.POST[key])
form = OpportunityMassActionForm(request.user.profile, request.POST, instance=opportunity)
if (form.is_valid() and request.user.profile.has_permission(opportunity, mode='w')):
form.save()
except:
pass
return f(request, *args, **kwargs)
wrap.__doc__ = f.__doc__
wrap.__name__ = f.__name__
return wrap
|
[
"def",
"_process_mass_opportunity_form",
"(",
"f",
")",
":",
"def",
"wrap",
"(",
"request",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"if",
"(",
"'massform'",
"in",
"request",
".",
"POST",
")",
":",
"for",
"key",
"in",
"request",
".",
"POST",
":",
"if",
"(",
"'mass-opportunity'",
"in",
"key",
")",
":",
"try",
":",
"opportunity",
"=",
"Opportunity",
".",
"objects",
".",
"get",
"(",
"pk",
"=",
"request",
".",
"POST",
"[",
"key",
"]",
")",
"form",
"=",
"OpportunityMassActionForm",
"(",
"request",
".",
"user",
".",
"profile",
",",
"request",
".",
"POST",
",",
"instance",
"=",
"opportunity",
")",
"if",
"(",
"form",
".",
"is_valid",
"(",
")",
"and",
"request",
".",
"user",
".",
"profile",
".",
"has_permission",
"(",
"opportunity",
",",
"mode",
"=",
"'w'",
")",
")",
":",
"form",
".",
"save",
"(",
")",
"except",
":",
"pass",
"return",
"f",
"(",
"request",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
"wrap",
".",
"__doc__",
"=",
"f",
".",
"__doc__",
"wrap",
".",
"__name__",
"=",
"f",
".",
"__name__",
"return",
"wrap"
] |
pre-process request to handle mass action form for orders .
|
train
| false
|
10,501
|
@register.inclusion_tag('utilities/render_field.html')
def render_field(field, bulk_nullable=False):
return {'field': field, 'bulk_nullable': bulk_nullable}
|
[
"@",
"register",
".",
"inclusion_tag",
"(",
"'utilities/render_field.html'",
")",
"def",
"render_field",
"(",
"field",
",",
"bulk_nullable",
"=",
"False",
")",
":",
"return",
"{",
"'field'",
":",
"field",
",",
"'bulk_nullable'",
":",
"bulk_nullable",
"}"
] |
render a field to a bootstrap layout .
|
train
| false
|
10,502
|
def windows_variant():
from win32api import GetVersionEx
from win32con import VER_PLATFORM_WIN32_NT
import _winreg
vista_plus = x64 = False
(maj, _minor, _buildno, plat, _csd) = GetVersionEx()
if (plat == VER_PLATFORM_WIN32_NT):
vista_plus = (maj > 5)
if vista_plus:
key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SYSTEM\\CurrentControlSet\\Control\\Session Manager\\Environment')
for n in xrange(_winreg.QueryInfoKey(key)[1]):
(name, value, _val_type) = _winreg.EnumValue(key, n)
if (name == 'PROCESSOR_ARCHITECTURE'):
x64 = (value.upper() == u'AMD64')
break
_winreg.CloseKey(key)
return (vista_plus, x64)
|
[
"def",
"windows_variant",
"(",
")",
":",
"from",
"win32api",
"import",
"GetVersionEx",
"from",
"win32con",
"import",
"VER_PLATFORM_WIN32_NT",
"import",
"_winreg",
"vista_plus",
"=",
"x64",
"=",
"False",
"(",
"maj",
",",
"_minor",
",",
"_buildno",
",",
"plat",
",",
"_csd",
")",
"=",
"GetVersionEx",
"(",
")",
"if",
"(",
"plat",
"==",
"VER_PLATFORM_WIN32_NT",
")",
":",
"vista_plus",
"=",
"(",
"maj",
">",
"5",
")",
"if",
"vista_plus",
":",
"key",
"=",
"_winreg",
".",
"OpenKey",
"(",
"_winreg",
".",
"HKEY_LOCAL_MACHINE",
",",
"'SYSTEM\\\\CurrentControlSet\\\\Control\\\\Session Manager\\\\Environment'",
")",
"for",
"n",
"in",
"xrange",
"(",
"_winreg",
".",
"QueryInfoKey",
"(",
"key",
")",
"[",
"1",
"]",
")",
":",
"(",
"name",
",",
"value",
",",
"_val_type",
")",
"=",
"_winreg",
".",
"EnumValue",
"(",
"key",
",",
"n",
")",
"if",
"(",
"name",
"==",
"'PROCESSOR_ARCHITECTURE'",
")",
":",
"x64",
"=",
"(",
"value",
".",
"upper",
"(",
")",
"==",
"u'AMD64'",
")",
"break",
"_winreg",
".",
"CloseKey",
"(",
"key",
")",
"return",
"(",
"vista_plus",
",",
"x64",
")"
] |
determine windows variant return vista_plus .
|
train
| false
|
10,504
|
def create_contenttypes(app_config, verbosity=2, interactive=True, using=DEFAULT_DB_ALIAS, apps=global_apps, **kwargs):
if (not app_config.models_module):
return
app_label = app_config.label
try:
app_config = apps.get_app_config(app_label)
ContentType = apps.get_model('contenttypes', 'ContentType')
except LookupError:
return
(content_types, app_models) = get_contenttypes_and_models(app_config, using, ContentType)
if (not app_models):
return
cts = [ContentType(app_label=app_label, model=model_name) for (model_name, model) in app_models.items() if (model_name not in content_types)]
ContentType.objects.using(using).bulk_create(cts)
if (verbosity >= 2):
for ct in cts:
print ("Adding content type '%s | %s'" % (ct.app_label, ct.model))
|
[
"def",
"create_contenttypes",
"(",
"app_config",
",",
"verbosity",
"=",
"2",
",",
"interactive",
"=",
"True",
",",
"using",
"=",
"DEFAULT_DB_ALIAS",
",",
"apps",
"=",
"global_apps",
",",
"**",
"kwargs",
")",
":",
"if",
"(",
"not",
"app_config",
".",
"models_module",
")",
":",
"return",
"app_label",
"=",
"app_config",
".",
"label",
"try",
":",
"app_config",
"=",
"apps",
".",
"get_app_config",
"(",
"app_label",
")",
"ContentType",
"=",
"apps",
".",
"get_model",
"(",
"'contenttypes'",
",",
"'ContentType'",
")",
"except",
"LookupError",
":",
"return",
"(",
"content_types",
",",
"app_models",
")",
"=",
"get_contenttypes_and_models",
"(",
"app_config",
",",
"using",
",",
"ContentType",
")",
"if",
"(",
"not",
"app_models",
")",
":",
"return",
"cts",
"=",
"[",
"ContentType",
"(",
"app_label",
"=",
"app_label",
",",
"model",
"=",
"model_name",
")",
"for",
"(",
"model_name",
",",
"model",
")",
"in",
"app_models",
".",
"items",
"(",
")",
"if",
"(",
"model_name",
"not",
"in",
"content_types",
")",
"]",
"ContentType",
".",
"objects",
".",
"using",
"(",
"using",
")",
".",
"bulk_create",
"(",
"cts",
")",
"if",
"(",
"verbosity",
">=",
"2",
")",
":",
"for",
"ct",
"in",
"cts",
":",
"print",
"(",
"\"Adding content type '%s | %s'\"",
"%",
"(",
"ct",
".",
"app_label",
",",
"ct",
".",
"model",
")",
")"
] |
creates content types for models in the given app .
|
train
| false
|
10,505
|
def pythonify(text):
lines = text.split('\n')
lines[0] = ('>>> ' + lines[0])
return ('\n... '.join(lines) + '\n')
|
[
"def",
"pythonify",
"(",
"text",
")",
":",
"lines",
"=",
"text",
".",
"split",
"(",
"'\\n'",
")",
"lines",
"[",
"0",
"]",
"=",
"(",
"'>>> '",
"+",
"lines",
"[",
"0",
"]",
")",
"return",
"(",
"'\\n... '",
".",
"join",
"(",
"lines",
")",
"+",
"'\\n'",
")"
] |
make some text appear as though it was typed in at a python prompt .
|
train
| false
|
10,506
|
def handle_request_files_upload(request):
is_raw = False
upload = list(request.FILES.values())[0]
filename = upload.name
return (upload, filename, is_raw)
|
[
"def",
"handle_request_files_upload",
"(",
"request",
")",
":",
"is_raw",
"=",
"False",
"upload",
"=",
"list",
"(",
"request",
".",
"FILES",
".",
"values",
"(",
")",
")",
"[",
"0",
"]",
"filename",
"=",
"upload",
".",
"name",
"return",
"(",
"upload",
",",
"filename",
",",
"is_raw",
")"
] |
handle request .
|
train
| true
|
10,507
|
def _as_scalar(res, dtype=None):
if (dtype is None):
dtype = config.floatX
if numpy.all(res.type.broadcastable):
while (res.owner and isinstance(res.owner.op, T.DimShuffle)):
res = res.owner.inputs[0]
if res.type.broadcastable:
rval = res.dimshuffle()
else:
rval = res
if (rval.type.dtype in theano.tensor.integer_dtypes):
if (theano.scalar.upcast(res.dtype, dtype) == dtype):
return T.cast(rval, dtype)
else:
return None
return rval
|
[
"def",
"_as_scalar",
"(",
"res",
",",
"dtype",
"=",
"None",
")",
":",
"if",
"(",
"dtype",
"is",
"None",
")",
":",
"dtype",
"=",
"config",
".",
"floatX",
"if",
"numpy",
".",
"all",
"(",
"res",
".",
"type",
".",
"broadcastable",
")",
":",
"while",
"(",
"res",
".",
"owner",
"and",
"isinstance",
"(",
"res",
".",
"owner",
".",
"op",
",",
"T",
".",
"DimShuffle",
")",
")",
":",
"res",
"=",
"res",
".",
"owner",
".",
"inputs",
"[",
"0",
"]",
"if",
"res",
".",
"type",
".",
"broadcastable",
":",
"rval",
"=",
"res",
".",
"dimshuffle",
"(",
")",
"else",
":",
"rval",
"=",
"res",
"if",
"(",
"rval",
".",
"type",
".",
"dtype",
"in",
"theano",
".",
"tensor",
".",
"integer_dtypes",
")",
":",
"if",
"(",
"theano",
".",
"scalar",
".",
"upcast",
"(",
"res",
".",
"dtype",
",",
"dtype",
")",
"==",
"dtype",
")",
":",
"return",
"T",
".",
"cast",
"(",
"rval",
",",
"dtype",
")",
"else",
":",
"return",
"None",
"return",
"rval"
] |
return none or a tensorvariable whose type is in t .
|
train
| false
|
10,508
|
def opt_out(msg_hash):
(email, added) = Email.handler.opt_out(msg_hash)
if (email and added):
_system_email(email, '', Email.Kind.OPTOUT)
return (email, added)
|
[
"def",
"opt_out",
"(",
"msg_hash",
")",
":",
"(",
"email",
",",
"added",
")",
"=",
"Email",
".",
"handler",
".",
"opt_out",
"(",
"msg_hash",
")",
"if",
"(",
"email",
"and",
"added",
")",
":",
"_system_email",
"(",
"email",
",",
"''",
",",
"Email",
".",
"Kind",
".",
"OPTOUT",
")",
"return",
"(",
"email",
",",
"added",
")"
] |
queues an opt-out email .
|
train
| false
|
10,509
|
def _dict_with_group_specs_if_authorized(context, inst_type_query):
inst_type_dict = dict(inst_type_query)
if (not is_admin_context(context)):
del inst_type_dict['group_specs']
else:
group_specs = {x['key']: x['value'] for x in inst_type_query['group_specs']}
inst_type_dict['group_specs'] = group_specs
return inst_type_dict
|
[
"def",
"_dict_with_group_specs_if_authorized",
"(",
"context",
",",
"inst_type_query",
")",
":",
"inst_type_dict",
"=",
"dict",
"(",
"inst_type_query",
")",
"if",
"(",
"not",
"is_admin_context",
"(",
"context",
")",
")",
":",
"del",
"inst_type_dict",
"[",
"'group_specs'",
"]",
"else",
":",
"group_specs",
"=",
"{",
"x",
"[",
"'key'",
"]",
":",
"x",
"[",
"'value'",
"]",
"for",
"x",
"in",
"inst_type_query",
"[",
"'group_specs'",
"]",
"}",
"inst_type_dict",
"[",
"'group_specs'",
"]",
"=",
"group_specs",
"return",
"inst_type_dict"
] |
convert group type query result to dict with spec and rate_limit .
|
train
| false
|
10,510
|
def _csr_gen_triples(A):
nrows = A.shape[0]
(data, indices, indptr) = (A.data, A.indices, A.indptr)
for i in range(nrows):
for j in range(indptr[i], indptr[(i + 1)]):
(yield (i, indices[j], data[j]))
|
[
"def",
"_csr_gen_triples",
"(",
"A",
")",
":",
"nrows",
"=",
"A",
".",
"shape",
"[",
"0",
"]",
"(",
"data",
",",
"indices",
",",
"indptr",
")",
"=",
"(",
"A",
".",
"data",
",",
"A",
".",
"indices",
",",
"A",
".",
"indptr",
")",
"for",
"i",
"in",
"range",
"(",
"nrows",
")",
":",
"for",
"j",
"in",
"range",
"(",
"indptr",
"[",
"i",
"]",
",",
"indptr",
"[",
"(",
"i",
"+",
"1",
")",
"]",
")",
":",
"(",
"yield",
"(",
"i",
",",
"indices",
"[",
"j",
"]",
",",
"data",
"[",
"j",
"]",
")",
")"
] |
converts a scipy sparse matrix in **compressed sparse row** format to an iterable of weighted edge triples .
|
train
| false
|
10,512
|
@db_api.retry_if_session_inactive()
def add_provisioning_component(context, object_id, object_type, entity):
log_dict = {'entity': entity, 'oid': object_id, 'otype': object_type}
standard_attr_id = _get_standard_attr_id(context, object_id, object_type)
if (not standard_attr_id):
return
record = context.session.query(pb_model.ProvisioningBlock).filter_by(standard_attr_id=standard_attr_id, entity=entity).first()
if record:
LOG.debug('Ignored duplicate provisioning block setup for %(otype)s %(oid)s by entity %(entity)s.', log_dict)
return
with context.session.begin(subtransactions=True):
record = pb_model.ProvisioningBlock(standard_attr_id=standard_attr_id, entity=entity)
context.session.add(record)
LOG.debug('Transition to ACTIVE for %(otype)s object %(oid)s will not be triggered until provisioned by entity %(entity)s.', log_dict)
|
[
"@",
"db_api",
".",
"retry_if_session_inactive",
"(",
")",
"def",
"add_provisioning_component",
"(",
"context",
",",
"object_id",
",",
"object_type",
",",
"entity",
")",
":",
"log_dict",
"=",
"{",
"'entity'",
":",
"entity",
",",
"'oid'",
":",
"object_id",
",",
"'otype'",
":",
"object_type",
"}",
"standard_attr_id",
"=",
"_get_standard_attr_id",
"(",
"context",
",",
"object_id",
",",
"object_type",
")",
"if",
"(",
"not",
"standard_attr_id",
")",
":",
"return",
"record",
"=",
"context",
".",
"session",
".",
"query",
"(",
"pb_model",
".",
"ProvisioningBlock",
")",
".",
"filter_by",
"(",
"standard_attr_id",
"=",
"standard_attr_id",
",",
"entity",
"=",
"entity",
")",
".",
"first",
"(",
")",
"if",
"record",
":",
"LOG",
".",
"debug",
"(",
"'Ignored duplicate provisioning block setup for %(otype)s %(oid)s by entity %(entity)s.'",
",",
"log_dict",
")",
"return",
"with",
"context",
".",
"session",
".",
"begin",
"(",
"subtransactions",
"=",
"True",
")",
":",
"record",
"=",
"pb_model",
".",
"ProvisioningBlock",
"(",
"standard_attr_id",
"=",
"standard_attr_id",
",",
"entity",
"=",
"entity",
")",
"context",
".",
"session",
".",
"add",
"(",
"record",
")",
"LOG",
".",
"debug",
"(",
"'Transition to ACTIVE for %(otype)s object %(oid)s will not be triggered until provisioned by entity %(entity)s.'",
",",
"log_dict",
")"
] |
adds a provisioning block by an entity to a given object .
|
train
| false
|
10,513
|
def multiplex(*brules):
def multiplex_brl(expr):
seen = set([])
for brl in brules:
for nexpr in brl(expr):
if (nexpr not in seen):
seen.add(nexpr)
(yield nexpr)
return multiplex_brl
|
[
"def",
"multiplex",
"(",
"*",
"brules",
")",
":",
"def",
"multiplex_brl",
"(",
"expr",
")",
":",
"seen",
"=",
"set",
"(",
"[",
"]",
")",
"for",
"brl",
"in",
"brules",
":",
"for",
"nexpr",
"in",
"brl",
"(",
"expr",
")",
":",
"if",
"(",
"nexpr",
"not",
"in",
"seen",
")",
":",
"seen",
".",
"add",
"(",
"nexpr",
")",
"(",
"yield",
"nexpr",
")",
"return",
"multiplex_brl"
] |
multiplex many branching rules into one .
|
train
| false
|
10,514
|
def _abi_parse_header(header, handle):
head_elem_size = header[4]
head_elem_num = header[5]
head_offset = header[7]
index = 0
while (index < head_elem_num):
start = (head_offset + (index * head_elem_size))
handle.seek(start)
dir_entry = (struct.unpack(_DIRFMT, handle.read(struct.calcsize(_DIRFMT))) + (start,))
index += 1
key = _bytes_to_string(dir_entry[0])
key += str(dir_entry[1])
tag_name = _bytes_to_string(dir_entry[0])
tag_number = dir_entry[1]
elem_code = dir_entry[2]
elem_num = dir_entry[4]
data_size = dir_entry[5]
data_offset = dir_entry[6]
tag_offset = dir_entry[8]
if (data_size <= 4):
data_offset = (tag_offset + 20)
handle.seek(data_offset)
data = handle.read(data_size)
(yield (tag_name, tag_number, _parse_tag_data(elem_code, elem_num, data)))
|
[
"def",
"_abi_parse_header",
"(",
"header",
",",
"handle",
")",
":",
"head_elem_size",
"=",
"header",
"[",
"4",
"]",
"head_elem_num",
"=",
"header",
"[",
"5",
"]",
"head_offset",
"=",
"header",
"[",
"7",
"]",
"index",
"=",
"0",
"while",
"(",
"index",
"<",
"head_elem_num",
")",
":",
"start",
"=",
"(",
"head_offset",
"+",
"(",
"index",
"*",
"head_elem_size",
")",
")",
"handle",
".",
"seek",
"(",
"start",
")",
"dir_entry",
"=",
"(",
"struct",
".",
"unpack",
"(",
"_DIRFMT",
",",
"handle",
".",
"read",
"(",
"struct",
".",
"calcsize",
"(",
"_DIRFMT",
")",
")",
")",
"+",
"(",
"start",
",",
")",
")",
"index",
"+=",
"1",
"key",
"=",
"_bytes_to_string",
"(",
"dir_entry",
"[",
"0",
"]",
")",
"key",
"+=",
"str",
"(",
"dir_entry",
"[",
"1",
"]",
")",
"tag_name",
"=",
"_bytes_to_string",
"(",
"dir_entry",
"[",
"0",
"]",
")",
"tag_number",
"=",
"dir_entry",
"[",
"1",
"]",
"elem_code",
"=",
"dir_entry",
"[",
"2",
"]",
"elem_num",
"=",
"dir_entry",
"[",
"4",
"]",
"data_size",
"=",
"dir_entry",
"[",
"5",
"]",
"data_offset",
"=",
"dir_entry",
"[",
"6",
"]",
"tag_offset",
"=",
"dir_entry",
"[",
"8",
"]",
"if",
"(",
"data_size",
"<=",
"4",
")",
":",
"data_offset",
"=",
"(",
"tag_offset",
"+",
"20",
")",
"handle",
".",
"seek",
"(",
"data_offset",
")",
"data",
"=",
"handle",
".",
"read",
"(",
"data_size",
")",
"(",
"yield",
"(",
"tag_name",
",",
"tag_number",
",",
"_parse_tag_data",
"(",
"elem_code",
",",
"elem_num",
",",
"data",
")",
")",
")"
] |
generator that returns directory contents .
|
train
| false
|
10,515
|
def test_enn_sk_estimator():
check_estimator(RepeatedEditedNearestNeighbours)
|
[
"def",
"test_enn_sk_estimator",
"(",
")",
":",
"check_estimator",
"(",
"RepeatedEditedNearestNeighbours",
")"
] |
test the sklearn estimator compatibility .
|
train
| false
|
10,516
|
def monomial_lcm(A, B):
return tuple([max(a, b) for (a, b) in zip(A, B)])
|
[
"def",
"monomial_lcm",
"(",
"A",
",",
"B",
")",
":",
"return",
"tuple",
"(",
"[",
"max",
"(",
"a",
",",
"b",
")",
"for",
"(",
"a",
",",
"b",
")",
"in",
"zip",
"(",
"A",
",",
"B",
")",
"]",
")"
] |
least common multiple of tuples representing monomials .
|
train
| false
|
10,517
|
@bdd.when('selection is not supported')
def selection_not_supported(qapp):
if qapp.clipboard().supportsSelection():
pytest.skip('OS supports primary selection!')
|
[
"@",
"bdd",
".",
"when",
"(",
"'selection is not supported'",
")",
"def",
"selection_not_supported",
"(",
"qapp",
")",
":",
"if",
"qapp",
".",
"clipboard",
"(",
")",
".",
"supportsSelection",
"(",
")",
":",
"pytest",
".",
"skip",
"(",
"'OS supports primary selection!'",
")"
] |
skip the test if selection is supported .
|
train
| false
|
10,518
|
def test_init_default_scoring():
tpot_obj = TPOTRegressor()
assert (tpot_obj.scoring_function == 'neg_mean_squared_error')
|
[
"def",
"test_init_default_scoring",
"(",
")",
":",
"tpot_obj",
"=",
"TPOTRegressor",
"(",
")",
"assert",
"(",
"tpot_obj",
".",
"scoring_function",
"==",
"'neg_mean_squared_error'",
")"
] |
assert that tpot intitializes with the correct default scoring function .
|
train
| false
|
10,519
|
def flatten_schema(schema, flattened=None, key=None):
flattened = (flattened or {})
old_key = (key or [])
for (key, value) in schema.iteritems():
new_key = (old_key + [key])
if isinstance(value, dict):
flattened = flatten_schema(value, flattened, new_key)
else:
flattened[tuple(new_key)] = value
return flattened
|
[
"def",
"flatten_schema",
"(",
"schema",
",",
"flattened",
"=",
"None",
",",
"key",
"=",
"None",
")",
":",
"flattened",
"=",
"(",
"flattened",
"or",
"{",
"}",
")",
"old_key",
"=",
"(",
"key",
"or",
"[",
"]",
")",
"for",
"(",
"key",
",",
"value",
")",
"in",
"schema",
".",
"iteritems",
"(",
")",
":",
"new_key",
"=",
"(",
"old_key",
"+",
"[",
"key",
"]",
")",
"if",
"isinstance",
"(",
"value",
",",
"dict",
")",
":",
"flattened",
"=",
"flatten_schema",
"(",
"value",
",",
"flattened",
",",
"new_key",
")",
"else",
":",
"flattened",
"[",
"tuple",
"(",
"new_key",
")",
"]",
"=",
"value",
"return",
"flattened"
] |
convert schema into flat dict .
|
train
| false
|
10,521
|
def init(mpstate):
return SerialModule(mpstate)
|
[
"def",
"init",
"(",
"mpstate",
")",
":",
"return",
"SerialModule",
"(",
"mpstate",
")"
] |
initialise module .
|
train
| false
|
10,523
|
def shuffle(lol, seed):
for l in lol:
random.seed(seed)
random.shuffle(l)
|
[
"def",
"shuffle",
"(",
"lol",
",",
"seed",
")",
":",
"for",
"l",
"in",
"lol",
":",
"random",
".",
"seed",
"(",
"seed",
")",
"random",
".",
"shuffle",
"(",
"l",
")"
] |
shuffle mapreduce files using the shuffler service .
|
train
| false
|
10,524
|
@pick_context_manager_writer
def compute_node_create(context, values):
convert_objects_related_datetimes(values)
compute_node_ref = models.ComputeNode()
compute_node_ref.update(values)
compute_node_ref.save(context.session)
return compute_node_ref
|
[
"@",
"pick_context_manager_writer",
"def",
"compute_node_create",
"(",
"context",
",",
"values",
")",
":",
"convert_objects_related_datetimes",
"(",
"values",
")",
"compute_node_ref",
"=",
"models",
".",
"ComputeNode",
"(",
")",
"compute_node_ref",
".",
"update",
"(",
"values",
")",
"compute_node_ref",
".",
"save",
"(",
"context",
".",
"session",
")",
"return",
"compute_node_ref"
] |
create a computenode from the values dictionary .
|
train
| false
|
10,525
|
@lower('+', types.Array)
def array_positive_impl(context, builder, sig, args):
class _UnaryPositiveKernel(_Kernel, ):
def generate(self, *args):
[val] = args
return val
return numpy_ufunc_kernel(context, builder, sig, args, _UnaryPositiveKernel, explicit_output=False)
|
[
"@",
"lower",
"(",
"'+'",
",",
"types",
".",
"Array",
")",
"def",
"array_positive_impl",
"(",
"context",
",",
"builder",
",",
"sig",
",",
"args",
")",
":",
"class",
"_UnaryPositiveKernel",
"(",
"_Kernel",
",",
")",
":",
"def",
"generate",
"(",
"self",
",",
"*",
"args",
")",
":",
"[",
"val",
"]",
"=",
"args",
"return",
"val",
"return",
"numpy_ufunc_kernel",
"(",
"context",
",",
"builder",
",",
"sig",
",",
"args",
",",
"_UnaryPositiveKernel",
",",
"explicit_output",
"=",
"False",
")"
] |
lowering function for + expressions .
|
train
| false
|
10,526
|
def _ResolvePath(included_from, included_path, basepath):
python_lib = '$PYTHON_LIB'
appscale_server = '/root/appscale/AppServer'
included_from = included_from.replace(python_lib, appscale_server)
included_path = included_path.replace(python_lib, appscale_server)
basepath = basepath.replace(python_lib, appscale_server)
path = os.path.join(os.path.dirname(included_from), included_path)
if (not _IsFileOrDirWithFile(path)):
path = os.path.join(basepath, included_path)
if (not _IsFileOrDirWithFile(path)):
path = included_path
if (not _IsFileOrDirWithFile(path)):
return ''
if os.path.isfile(path):
return os.path.normcase(os.path.abspath(path))
return os.path.normcase(os.path.abspath(os.path.join(path, 'include.yaml')))
|
[
"def",
"_ResolvePath",
"(",
"included_from",
",",
"included_path",
",",
"basepath",
")",
":",
"python_lib",
"=",
"'$PYTHON_LIB'",
"appscale_server",
"=",
"'/root/appscale/AppServer'",
"included_from",
"=",
"included_from",
".",
"replace",
"(",
"python_lib",
",",
"appscale_server",
")",
"included_path",
"=",
"included_path",
".",
"replace",
"(",
"python_lib",
",",
"appscale_server",
")",
"basepath",
"=",
"basepath",
".",
"replace",
"(",
"python_lib",
",",
"appscale_server",
")",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"included_from",
")",
",",
"included_path",
")",
"if",
"(",
"not",
"_IsFileOrDirWithFile",
"(",
"path",
")",
")",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"basepath",
",",
"included_path",
")",
"if",
"(",
"not",
"_IsFileOrDirWithFile",
"(",
"path",
")",
")",
":",
"path",
"=",
"included_path",
"if",
"(",
"not",
"_IsFileOrDirWithFile",
"(",
"path",
")",
")",
":",
"return",
"''",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"path",
")",
":",
"return",
"os",
".",
"path",
".",
"normcase",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"path",
")",
")",
"return",
"os",
".",
"path",
".",
"normcase",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"'include.yaml'",
")",
")",
")"
] |
gets the absolute path of the file to be included .
|
train
| false
|
10,528
|
def update_meta_info(rec_dir, meta_info):
logger.info('Updating meta info')
meta_info_path = os.path.join(rec_dir, 'info.csv')
meta_info_old_path = os.path.join(rec_dir, 'info_old.csv')
shutil.copy2(meta_info_path, meta_info_old_path)
with open(meta_info_path, 'w', newline='') as csvfile:
csv_utils.write_key_value_file(csvfile, meta_info)
|
[
"def",
"update_meta_info",
"(",
"rec_dir",
",",
"meta_info",
")",
":",
"logger",
".",
"info",
"(",
"'Updating meta info'",
")",
"meta_info_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"rec_dir",
",",
"'info.csv'",
")",
"meta_info_old_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"rec_dir",
",",
"'info_old.csv'",
")",
"shutil",
".",
"copy2",
"(",
"meta_info_path",
",",
"meta_info_old_path",
")",
"with",
"open",
"(",
"meta_info_path",
",",
"'w'",
",",
"newline",
"=",
"''",
")",
"as",
"csvfile",
":",
"csv_utils",
".",
"write_key_value_file",
"(",
"csvfile",
",",
"meta_info",
")"
] |
backup old meta info file .
|
train
| false
|
10,534
|
def test_venonat(session, media_root):
venonat = session.query(tables.PokemonSpecies).filter_by(identifier=u'venonat').one()
accessor = media.PokemonSpeciesMedia(media_root, venonat)
assert (accessor.sprite('yellow') == accessor.sprite('yellow', shiny=True))
|
[
"def",
"test_venonat",
"(",
"session",
",",
"media_root",
")",
":",
"venonat",
"=",
"session",
".",
"query",
"(",
"tables",
".",
"PokemonSpecies",
")",
".",
"filter_by",
"(",
"identifier",
"=",
"u'venonat'",
")",
".",
"one",
"(",
")",
"accessor",
"=",
"media",
".",
"PokemonSpeciesMedia",
"(",
"media_root",
",",
"venonat",
")",
"assert",
"(",
"accessor",
".",
"sprite",
"(",
"'yellow'",
")",
"==",
"accessor",
".",
"sprite",
"(",
"'yellow'",
",",
"shiny",
"=",
"True",
")",
")"
] |
venonats shiny yellow sprite -- same as non-shiny .
|
train
| false
|
10,535
|
@utils.arg('--host-only', dest='host_only', metavar='<0|1>', nargs='?', type=int, const=1, default=0)
@utils.arg('--project-only', dest='project_only', metavar='<0|1>', nargs='?', type=int, const=1, default=0)
@utils.arg('network', metavar='<network>', help=_('UUID of network.'))
@deprecated_network
def do_network_disassociate(cs, args):
if args.host_only:
cs.networks.disassociate(args.network, True, False)
elif args.project_only:
cs.networks.disassociate(args.network, False, True)
else:
cs.networks.disassociate(args.network, True, True)
|
[
"@",
"utils",
".",
"arg",
"(",
"'--host-only'",
",",
"dest",
"=",
"'host_only'",
",",
"metavar",
"=",
"'<0|1>'",
",",
"nargs",
"=",
"'?'",
",",
"type",
"=",
"int",
",",
"const",
"=",
"1",
",",
"default",
"=",
"0",
")",
"@",
"utils",
".",
"arg",
"(",
"'--project-only'",
",",
"dest",
"=",
"'project_only'",
",",
"metavar",
"=",
"'<0|1>'",
",",
"nargs",
"=",
"'?'",
",",
"type",
"=",
"int",
",",
"const",
"=",
"1",
",",
"default",
"=",
"0",
")",
"@",
"utils",
".",
"arg",
"(",
"'network'",
",",
"metavar",
"=",
"'<network>'",
",",
"help",
"=",
"_",
"(",
"'UUID of network.'",
")",
")",
"@",
"deprecated_network",
"def",
"do_network_disassociate",
"(",
"cs",
",",
"args",
")",
":",
"if",
"args",
".",
"host_only",
":",
"cs",
".",
"networks",
".",
"disassociate",
"(",
"args",
".",
"network",
",",
"True",
",",
"False",
")",
"elif",
"args",
".",
"project_only",
":",
"cs",
".",
"networks",
".",
"disassociate",
"(",
"args",
".",
"network",
",",
"False",
",",
"True",
")",
"else",
":",
"cs",
".",
"networks",
".",
"disassociate",
"(",
"args",
".",
"network",
",",
"True",
",",
"True",
")"
] |
disassociate host and/or project from the given network .
|
train
| false
|
10,536
|
def test_store_except_info_on_eror():
class ItemThatRaises:
def runtest(self):
raise IndexError('TEST')
try:
runner.pytest_runtest_call(ItemThatRaises())
except IndexError:
pass
assert (sys.last_type is IndexError)
assert (sys.last_value.args[0] == 'TEST')
assert sys.last_traceback
|
[
"def",
"test_store_except_info_on_eror",
"(",
")",
":",
"class",
"ItemThatRaises",
":",
"def",
"runtest",
"(",
"self",
")",
":",
"raise",
"IndexError",
"(",
"'TEST'",
")",
"try",
":",
"runner",
".",
"pytest_runtest_call",
"(",
"ItemThatRaises",
"(",
")",
")",
"except",
"IndexError",
":",
"pass",
"assert",
"(",
"sys",
".",
"last_type",
"is",
"IndexError",
")",
"assert",
"(",
"sys",
".",
"last_value",
".",
"args",
"[",
"0",
"]",
"==",
"'TEST'",
")",
"assert",
"sys",
".",
"last_traceback"
] |
test that upon test failure .
|
train
| false
|
10,537
|
def tr_lang(language_name):
language_code = translation.get_language()
if (language_code is None):
language_code = settings.LANGUAGE_CODE
language_code = translation.to_locale(language_code)
return langdata.tr_lang(language_code)(language_name)
|
[
"def",
"tr_lang",
"(",
"language_name",
")",
":",
"language_code",
"=",
"translation",
".",
"get_language",
"(",
")",
"if",
"(",
"language_code",
"is",
"None",
")",
":",
"language_code",
"=",
"settings",
".",
"LANGUAGE_CODE",
"language_code",
"=",
"translation",
".",
"to_locale",
"(",
"language_code",
")",
"return",
"langdata",
".",
"tr_lang",
"(",
"language_code",
")",
"(",
"language_name",
")"
] |
translates language names .
|
train
| false
|
10,538
|
def stripped_lines(lines, ignore_comments, ignore_docstrings, ignore_imports):
strippedlines = []
docstring = None
for line in lines:
line = line.strip()
if ignore_docstrings:
if ((not docstring) and (line.startswith('"""') or line.startswith("'''"))):
docstring = line[:3]
line = line[3:]
if docstring:
if line.endswith(docstring):
docstring = None
line = ''
if ignore_imports:
if (line.startswith('import ') or line.startswith('from ')):
line = ''
if ignore_comments:
line = line.split('#', 1)[0].strip()
strippedlines.append(line)
return strippedlines
|
[
"def",
"stripped_lines",
"(",
"lines",
",",
"ignore_comments",
",",
"ignore_docstrings",
",",
"ignore_imports",
")",
":",
"strippedlines",
"=",
"[",
"]",
"docstring",
"=",
"None",
"for",
"line",
"in",
"lines",
":",
"line",
"=",
"line",
".",
"strip",
"(",
")",
"if",
"ignore_docstrings",
":",
"if",
"(",
"(",
"not",
"docstring",
")",
"and",
"(",
"line",
".",
"startswith",
"(",
"'\"\"\"'",
")",
"or",
"line",
".",
"startswith",
"(",
"\"'''\"",
")",
")",
")",
":",
"docstring",
"=",
"line",
"[",
":",
"3",
"]",
"line",
"=",
"line",
"[",
"3",
":",
"]",
"if",
"docstring",
":",
"if",
"line",
".",
"endswith",
"(",
"docstring",
")",
":",
"docstring",
"=",
"None",
"line",
"=",
"''",
"if",
"ignore_imports",
":",
"if",
"(",
"line",
".",
"startswith",
"(",
"'import '",
")",
"or",
"line",
".",
"startswith",
"(",
"'from '",
")",
")",
":",
"line",
"=",
"''",
"if",
"ignore_comments",
":",
"line",
"=",
"line",
".",
"split",
"(",
"'#'",
",",
"1",
")",
"[",
"0",
"]",
".",
"strip",
"(",
")",
"strippedlines",
".",
"append",
"(",
"line",
")",
"return",
"strippedlines"
] |
return lines with leading/trailing whitespace and any ignored code features removed .
|
train
| false
|
10,540
|
def gauss_hermite(n, n_digits):
x = Dummy('x')
p = hermite_poly(n, x, polys=True)
p1 = hermite_poly((n - 1), x, polys=True)
xi = []
w = []
for r in p.real_roots():
if isinstance(r, RootOf):
r = r.eval_rational((S(1) / (10 ** (n_digits + 2))))
xi.append(r.n(n_digits))
w.append(((((2 ** (n - 1)) * factorial(n)) * sqrt(pi)) / ((n ** 2) * (p1.subs(x, r) ** 2))).n(n_digits))
return (xi, w)
|
[
"def",
"gauss_hermite",
"(",
"n",
",",
"n_digits",
")",
":",
"x",
"=",
"Dummy",
"(",
"'x'",
")",
"p",
"=",
"hermite_poly",
"(",
"n",
",",
"x",
",",
"polys",
"=",
"True",
")",
"p1",
"=",
"hermite_poly",
"(",
"(",
"n",
"-",
"1",
")",
",",
"x",
",",
"polys",
"=",
"True",
")",
"xi",
"=",
"[",
"]",
"w",
"=",
"[",
"]",
"for",
"r",
"in",
"p",
".",
"real_roots",
"(",
")",
":",
"if",
"isinstance",
"(",
"r",
",",
"RootOf",
")",
":",
"r",
"=",
"r",
".",
"eval_rational",
"(",
"(",
"S",
"(",
"1",
")",
"/",
"(",
"10",
"**",
"(",
"n_digits",
"+",
"2",
")",
")",
")",
")",
"xi",
".",
"append",
"(",
"r",
".",
"n",
"(",
"n_digits",
")",
")",
"w",
".",
"append",
"(",
"(",
"(",
"(",
"(",
"2",
"**",
"(",
"n",
"-",
"1",
")",
")",
"*",
"factorial",
"(",
"n",
")",
")",
"*",
"sqrt",
"(",
"pi",
")",
")",
"/",
"(",
"(",
"n",
"**",
"2",
")",
"*",
"(",
"p1",
".",
"subs",
"(",
"x",
",",
"r",
")",
"**",
"2",
")",
")",
")",
".",
"n",
"(",
"n_digits",
")",
")",
"return",
"(",
"xi",
",",
"w",
")"
] |
computes the gauss-hermite quadrature [1]_ points and weights .
|
train
| false
|
10,542
|
def requeue_job(job_id, connection=None):
from .queue import get_failed_queue
fq = get_failed_queue(connection=connection)
fq.requeue(job_id)
|
[
"def",
"requeue_job",
"(",
"job_id",
",",
"connection",
"=",
"None",
")",
":",
"from",
".",
"queue",
"import",
"get_failed_queue",
"fq",
"=",
"get_failed_queue",
"(",
"connection",
"=",
"connection",
")",
"fq",
".",
"requeue",
"(",
"job_id",
")"
] |
requeues the job with the given job id .
|
train
| false
|
10,543
|
def validate_bool_kwarg(value, arg_name):
if (not (is_bool(value) or (value is None))):
raise ValueError(('For argument "%s" expected type bool, received type %s.' % (arg_name, type(value).__name__)))
return value
|
[
"def",
"validate_bool_kwarg",
"(",
"value",
",",
"arg_name",
")",
":",
"if",
"(",
"not",
"(",
"is_bool",
"(",
"value",
")",
"or",
"(",
"value",
"is",
"None",
")",
")",
")",
":",
"raise",
"ValueError",
"(",
"(",
"'For argument \"%s\" expected type bool, received type %s.'",
"%",
"(",
"arg_name",
",",
"type",
"(",
"value",
")",
".",
"__name__",
")",
")",
")",
"return",
"value"
] |
ensures that argument passed in arg_name is of type bool .
|
train
| false
|
10,544
|
def alsoProvides(object, *interfaces):
directlyProvides(object, directlyProvidedBy(object), *interfaces)
|
[
"def",
"alsoProvides",
"(",
"object",
",",
"*",
"interfaces",
")",
":",
"directlyProvides",
"(",
"object",
",",
"directlyProvidedBy",
"(",
"object",
")",
",",
"*",
"interfaces",
")"
] |
declare interfaces declared directly for an object the arguments after the object are one or more interfaces or interface specifications .
|
train
| false
|
10,548
|
def itemfreq(a):
(items, inv) = np.unique(a, return_inverse=True)
freq = np.bincount(inv)
return np.array([items, freq]).T
|
[
"def",
"itemfreq",
"(",
"a",
")",
":",
"(",
"items",
",",
"inv",
")",
"=",
"np",
".",
"unique",
"(",
"a",
",",
"return_inverse",
"=",
"True",
")",
"freq",
"=",
"np",
".",
"bincount",
"(",
"inv",
")",
"return",
"np",
".",
"array",
"(",
"[",
"items",
",",
"freq",
"]",
")",
".",
"T"
] |
returns a 2-d array of item frequencies .
|
train
| false
|
10,549
|
def _colorize(color, text):
if (color in DARK_COLORS):
escape = (COLOR_ESCAPE + ('%im' % (DARK_COLORS[color] + 30)))
elif (color in LIGHT_COLORS):
escape = (COLOR_ESCAPE + ('%i;01m' % (LIGHT_COLORS[color] + 30)))
else:
raise ValueError(u'no such color %s', color)
return ((escape + text) + RESET_COLOR)
|
[
"def",
"_colorize",
"(",
"color",
",",
"text",
")",
":",
"if",
"(",
"color",
"in",
"DARK_COLORS",
")",
":",
"escape",
"=",
"(",
"COLOR_ESCAPE",
"+",
"(",
"'%im'",
"%",
"(",
"DARK_COLORS",
"[",
"color",
"]",
"+",
"30",
")",
")",
")",
"elif",
"(",
"color",
"in",
"LIGHT_COLORS",
")",
":",
"escape",
"=",
"(",
"COLOR_ESCAPE",
"+",
"(",
"'%i;01m'",
"%",
"(",
"LIGHT_COLORS",
"[",
"color",
"]",
"+",
"30",
")",
")",
")",
"else",
":",
"raise",
"ValueError",
"(",
"u'no such color %s'",
",",
"color",
")",
"return",
"(",
"(",
"escape",
"+",
"text",
")",
"+",
"RESET_COLOR",
")"
] |
returns a string that prints the given text in the given color in a terminal that is ansi color-aware .
|
train
| false
|
10,550
|
def jnyn_zeros(n, nt):
if (not (isscalar(nt) and isscalar(n))):
raise ValueError('Arguments must be scalars.')
if ((floor(n) != n) or (floor(nt) != nt)):
raise ValueError('Arguments must be integers.')
if (nt <= 0):
raise ValueError('nt > 0')
return specfun.jyzo(abs(n), nt)
|
[
"def",
"jnyn_zeros",
"(",
"n",
",",
"nt",
")",
":",
"if",
"(",
"not",
"(",
"isscalar",
"(",
"nt",
")",
"and",
"isscalar",
"(",
"n",
")",
")",
")",
":",
"raise",
"ValueError",
"(",
"'Arguments must be scalars.'",
")",
"if",
"(",
"(",
"floor",
"(",
"n",
")",
"!=",
"n",
")",
"or",
"(",
"floor",
"(",
"nt",
")",
"!=",
"nt",
")",
")",
":",
"raise",
"ValueError",
"(",
"'Arguments must be integers.'",
")",
"if",
"(",
"nt",
"<=",
"0",
")",
":",
"raise",
"ValueError",
"(",
"'nt > 0'",
")",
"return",
"specfun",
".",
"jyzo",
"(",
"abs",
"(",
"n",
")",
",",
"nt",
")"
] |
compute nt zeros of bessel functions jn(x) .
|
train
| false
|
10,551
|
def show_std_icons():
app = qapplication()
dialog = ShowStdIcons(None)
dialog.show()
sys.exit(app.exec_())
|
[
"def",
"show_std_icons",
"(",
")",
":",
"app",
"=",
"qapplication",
"(",
")",
"dialog",
"=",
"ShowStdIcons",
"(",
"None",
")",
"dialog",
".",
"show",
"(",
")",
"sys",
".",
"exit",
"(",
"app",
".",
"exec_",
"(",
")",
")"
] |
show all standard icons .
|
train
| true
|
10,552
|
def str_dict(dic):
if isinstance(dic, CaseInsensitiveDictionary):
dic2 = CaseInsensitiveDictionary()
else:
dic2 = {}
for (k, v) in dic.items():
if isinstance(k, unicode):
k = k.encode('utf-8')
if isinstance(v, unicode):
v = v.encode('utf-8')
dic2[k] = v
return dic2
|
[
"def",
"str_dict",
"(",
"dic",
")",
":",
"if",
"isinstance",
"(",
"dic",
",",
"CaseInsensitiveDictionary",
")",
":",
"dic2",
"=",
"CaseInsensitiveDictionary",
"(",
")",
"else",
":",
"dic2",
"=",
"{",
"}",
"for",
"(",
"k",
",",
"v",
")",
"in",
"dic",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"k",
",",
"unicode",
")",
":",
"k",
"=",
"k",
".",
"encode",
"(",
"'utf-8'",
")",
"if",
"isinstance",
"(",
"v",
",",
"unicode",
")",
":",
"v",
"=",
"v",
".",
"encode",
"(",
"'utf-8'",
")",
"dic2",
"[",
"k",
"]",
"=",
"v",
"return",
"dic2"
] |
convert keys and values in dic into utf-8-encoded :class:str .
|
train
| false
|
10,553
|
def nzf_get_filename(nzf):
name = nzf.filename
if (not name):
name = nzf.subject
if (not name):
name = ''
return name.lower()
|
[
"def",
"nzf_get_filename",
"(",
"nzf",
")",
":",
"name",
"=",
"nzf",
".",
"filename",
"if",
"(",
"not",
"name",
")",
":",
"name",
"=",
"nzf",
".",
"subject",
"if",
"(",
"not",
"name",
")",
":",
"name",
"=",
"''",
"return",
"name",
".",
"lower",
"(",
")"
] |
return filename .
|
train
| false
|
10,554
|
def check_write_instance_metadata(name, value):
if is_google_instance():
check_run_quick('gcloud compute instances add-metadata {hostname} --zone={zone} --metadata={name}={value}'.format(hostname=socket.gethostname(), zone=check_get_zone(), name=name, value=value))
elif is_aws_instance():
result = check_fetch(os.path.join(AWS_METADATA_URL, 'instance-id'))
id = result.content.strip()
result = check_fetch(os.path.join(AWS_METADATA_URL, 'placement/availability-zone'))
region = result.content.strip()[:(-1)]
command = ['aws ec2 create-tags --resources', id, '--region', region, '--tags Key={key},Value={value}'.format(key=name, value=value)]
check_run_quick(' '.join(command), echo=False)
else:
raise NotImplementedError('This platform does not support metadata.')
|
[
"def",
"check_write_instance_metadata",
"(",
"name",
",",
"value",
")",
":",
"if",
"is_google_instance",
"(",
")",
":",
"check_run_quick",
"(",
"'gcloud compute instances add-metadata {hostname} --zone={zone} --metadata={name}={value}'",
".",
"format",
"(",
"hostname",
"=",
"socket",
".",
"gethostname",
"(",
")",
",",
"zone",
"=",
"check_get_zone",
"(",
")",
",",
"name",
"=",
"name",
",",
"value",
"=",
"value",
")",
")",
"elif",
"is_aws_instance",
"(",
")",
":",
"result",
"=",
"check_fetch",
"(",
"os",
".",
"path",
".",
"join",
"(",
"AWS_METADATA_URL",
",",
"'instance-id'",
")",
")",
"id",
"=",
"result",
".",
"content",
".",
"strip",
"(",
")",
"result",
"=",
"check_fetch",
"(",
"os",
".",
"path",
".",
"join",
"(",
"AWS_METADATA_URL",
",",
"'placement/availability-zone'",
")",
")",
"region",
"=",
"result",
".",
"content",
".",
"strip",
"(",
")",
"[",
":",
"(",
"-",
"1",
")",
"]",
"command",
"=",
"[",
"'aws ec2 create-tags --resources'",
",",
"id",
",",
"'--region'",
",",
"region",
",",
"'--tags Key={key},Value={value}'",
".",
"format",
"(",
"key",
"=",
"name",
",",
"value",
"=",
"value",
")",
"]",
"check_run_quick",
"(",
"' '",
".",
"join",
"(",
"command",
")",
",",
"echo",
"=",
"False",
")",
"else",
":",
"raise",
"NotImplementedError",
"(",
"'This platform does not support metadata.'",
")"
] |
add a name/value pair to our instance metadata .
|
train
| false
|
10,556
|
@env.catch_exceptions
def complete_check():
(row, column) = env.cursor
line = env.lines[(row - 1)]
word_finder = worder.Worder(line, True)
(parent, name, _) = word_finder.get_splitted_primary_before((column - 1))
if parent:
return False
with RopeContext() as ctx:
modules = ctx.importer.get_modules(name)
if (not modules):
return False
if (name in ctx.project.pycore.resource_to_pyobject(ctx.resource)):
return False
if (not env.user_confirm(('Import %s?' % name), True)):
return False
if (len(modules) == 1):
_insert_import(name, modules[0], ctx)
else:
module = env.user_input_choices('With module to import:', *modules)
if module:
_insert_import(name, module, ctx)
|
[
"@",
"env",
".",
"catch_exceptions",
"def",
"complete_check",
"(",
")",
":",
"(",
"row",
",",
"column",
")",
"=",
"env",
".",
"cursor",
"line",
"=",
"env",
".",
"lines",
"[",
"(",
"row",
"-",
"1",
")",
"]",
"word_finder",
"=",
"worder",
".",
"Worder",
"(",
"line",
",",
"True",
")",
"(",
"parent",
",",
"name",
",",
"_",
")",
"=",
"word_finder",
".",
"get_splitted_primary_before",
"(",
"(",
"column",
"-",
"1",
")",
")",
"if",
"parent",
":",
"return",
"False",
"with",
"RopeContext",
"(",
")",
"as",
"ctx",
":",
"modules",
"=",
"ctx",
".",
"importer",
".",
"get_modules",
"(",
"name",
")",
"if",
"(",
"not",
"modules",
")",
":",
"return",
"False",
"if",
"(",
"name",
"in",
"ctx",
".",
"project",
".",
"pycore",
".",
"resource_to_pyobject",
"(",
"ctx",
".",
"resource",
")",
")",
":",
"return",
"False",
"if",
"(",
"not",
"env",
".",
"user_confirm",
"(",
"(",
"'Import %s?'",
"%",
"name",
")",
",",
"True",
")",
")",
":",
"return",
"False",
"if",
"(",
"len",
"(",
"modules",
")",
"==",
"1",
")",
":",
"_insert_import",
"(",
"name",
",",
"modules",
"[",
"0",
"]",
",",
"ctx",
")",
"else",
":",
"module",
"=",
"env",
".",
"user_input_choices",
"(",
"'With module to import:'",
",",
"*",
"modules",
")",
"if",
"module",
":",
"_insert_import",
"(",
"name",
",",
"module",
",",
"ctx",
")"
] |
function description .
|
train
| false
|
10,558
|
def libvlc_media_player_get_full_chapter_descriptions(p_mi, i_chapters_of_title, pp_chapters):
f = (_Cfunctions.get('libvlc_media_player_get_full_chapter_descriptions', None) or _Cfunction('libvlc_media_player_get_full_chapter_descriptions', ((1,), (1,), (1,)), None, ctypes.c_int, MediaPlayer, ctypes.c_int, ctypes.POINTER(ctypes.POINTER(ChapterDescription))))
return f(p_mi, i_chapters_of_title, pp_chapters)
|
[
"def",
"libvlc_media_player_get_full_chapter_descriptions",
"(",
"p_mi",
",",
"i_chapters_of_title",
",",
"pp_chapters",
")",
":",
"f",
"=",
"(",
"_Cfunctions",
".",
"get",
"(",
"'libvlc_media_player_get_full_chapter_descriptions'",
",",
"None",
")",
"or",
"_Cfunction",
"(",
"'libvlc_media_player_get_full_chapter_descriptions'",
",",
"(",
"(",
"1",
",",
")",
",",
"(",
"1",
",",
")",
",",
"(",
"1",
",",
")",
")",
",",
"None",
",",
"ctypes",
".",
"c_int",
",",
"MediaPlayer",
",",
"ctypes",
".",
"c_int",
",",
"ctypes",
".",
"POINTER",
"(",
"ctypes",
".",
"POINTER",
"(",
"ChapterDescription",
")",
")",
")",
")",
"return",
"f",
"(",
"p_mi",
",",
"i_chapters_of_title",
",",
"pp_chapters",
")"
] |
get the full description of available chapters .
|
train
| false
|
10,560
|
def _validate_partition_boundary(boundary):
try:
for unit in VALID_UNITS:
if boundary.endswith(unit):
return
int(boundary)
except Exception:
raise CommandExecutionError('Invalid partition boundary passed: "{0}"'.format(boundary))
|
[
"def",
"_validate_partition_boundary",
"(",
"boundary",
")",
":",
"try",
":",
"for",
"unit",
"in",
"VALID_UNITS",
":",
"if",
"boundary",
".",
"endswith",
"(",
"unit",
")",
":",
"return",
"int",
"(",
"boundary",
")",
"except",
"Exception",
":",
"raise",
"CommandExecutionError",
"(",
"'Invalid partition boundary passed: \"{0}\"'",
".",
"format",
"(",
"boundary",
")",
")"
] |
ensure valid partition boundaries are supplied .
|
train
| false
|
10,561
|
def list_functions(*args, **kwargs):
if (not args):
return sorted(__salt__)
names = set()
for module in args:
if (('*' in module) or ('.' in module)):
for func in fnmatch.filter(__salt__, module):
names.add(func)
else:
moduledot = (module + '.')
for func in __salt__:
if func.startswith(moduledot):
names.add(func)
return sorted(names)
|
[
"def",
"list_functions",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"if",
"(",
"not",
"args",
")",
":",
"return",
"sorted",
"(",
"__salt__",
")",
"names",
"=",
"set",
"(",
")",
"for",
"module",
"in",
"args",
":",
"if",
"(",
"(",
"'*'",
"in",
"module",
")",
"or",
"(",
"'.'",
"in",
"module",
")",
")",
":",
"for",
"func",
"in",
"fnmatch",
".",
"filter",
"(",
"__salt__",
",",
"module",
")",
":",
"names",
".",
"add",
"(",
"func",
")",
"else",
":",
"moduledot",
"=",
"(",
"module",
"+",
"'.'",
")",
"for",
"func",
"in",
"__salt__",
":",
"if",
"func",
".",
"startswith",
"(",
"moduledot",
")",
":",
"names",
".",
"add",
"(",
"func",
")",
"return",
"sorted",
"(",
"names",
")"
] |
list the functions for all modules .
|
train
| true
|
10,562
|
def flatten_output(task):
r = flatten(task.output())
if (not r):
for dep in flatten(task.requires()):
r += flatten_output(dep)
return r
|
[
"def",
"flatten_output",
"(",
"task",
")",
":",
"r",
"=",
"flatten",
"(",
"task",
".",
"output",
"(",
")",
")",
"if",
"(",
"not",
"r",
")",
":",
"for",
"dep",
"in",
"flatten",
"(",
"task",
".",
"requires",
"(",
")",
")",
":",
"r",
"+=",
"flatten_output",
"(",
"dep",
")",
"return",
"r"
] |
lists all output targets by recursively walking output-less tasks .
|
train
| true
|
10,563
|
def left_multiplied_operator(J, d):
J = aslinearoperator(J)
def matvec(x):
return (d * J.matvec(x))
def matmat(X):
return (d * J.matmat(X))
def rmatvec(x):
return J.rmatvec((x.ravel() * d))
return LinearOperator(J.shape, matvec=matvec, matmat=matmat, rmatvec=rmatvec)
|
[
"def",
"left_multiplied_operator",
"(",
"J",
",",
"d",
")",
":",
"J",
"=",
"aslinearoperator",
"(",
"J",
")",
"def",
"matvec",
"(",
"x",
")",
":",
"return",
"(",
"d",
"*",
"J",
".",
"matvec",
"(",
"x",
")",
")",
"def",
"matmat",
"(",
"X",
")",
":",
"return",
"(",
"d",
"*",
"J",
".",
"matmat",
"(",
"X",
")",
")",
"def",
"rmatvec",
"(",
"x",
")",
":",
"return",
"J",
".",
"rmatvec",
"(",
"(",
"x",
".",
"ravel",
"(",
")",
"*",
"d",
")",
")",
"return",
"LinearOperator",
"(",
"J",
".",
"shape",
",",
"matvec",
"=",
"matvec",
",",
"matmat",
"=",
"matmat",
",",
"rmatvec",
"=",
"rmatvec",
")"
] |
return diag(d) j as linearoperator .
|
train
| false
|
10,564
|
def _can_load_course_on_mobile(user, course):
return (is_mobile_available_for_user(user, course) and (_has_staff_access_to_descriptor(user, course, course.id) or _has_fulfilled_all_milestones(user, course.id)))
|
[
"def",
"_can_load_course_on_mobile",
"(",
"user",
",",
"course",
")",
":",
"return",
"(",
"is_mobile_available_for_user",
"(",
"user",
",",
"course",
")",
"and",
"(",
"_has_staff_access_to_descriptor",
"(",
"user",
",",
"course",
",",
"course",
".",
"id",
")",
"or",
"_has_fulfilled_all_milestones",
"(",
"user",
",",
"course",
".",
"id",
")",
")",
")"
] |
checks if a user can view the given course on a mobile device .
|
train
| false
|
10,565
|
def isplit(start_text, iterator):
head = []
iterator = iter(iterator)
for item in iterator:
if item.startswith(start_text):
return (head, item, iterator)
head.append(item)
return (head, None, iterator)
|
[
"def",
"isplit",
"(",
"start_text",
",",
"iterator",
")",
":",
"head",
"=",
"[",
"]",
"iterator",
"=",
"iter",
"(",
"iterator",
")",
"for",
"item",
"in",
"iterator",
":",
"if",
"item",
".",
"startswith",
"(",
"start_text",
")",
":",
"return",
"(",
"head",
",",
"item",
",",
"iterator",
")",
"head",
".",
"append",
"(",
"item",
")",
"return",
"(",
"head",
",",
"None",
",",
"iterator",
")"
] |
behaves like str .
|
train
| false
|
10,566
|
def _get_readable_id(id_name, id_prefix_to_skip):
pos = id_name.find('//')
if (pos != (-1)):
pos += 2
if id_prefix_to_skip:
pos = id_name.find(id_prefix_to_skip, pos)
if (pos != (-1)):
pos += len(id_prefix_to_skip)
pos = id_name.find('/', pos)
if (pos != (-1)):
return id_name[(pos + 1):]
return id_name
|
[
"def",
"_get_readable_id",
"(",
"id_name",
",",
"id_prefix_to_skip",
")",
":",
"pos",
"=",
"id_name",
".",
"find",
"(",
"'//'",
")",
"if",
"(",
"pos",
"!=",
"(",
"-",
"1",
")",
")",
":",
"pos",
"+=",
"2",
"if",
"id_prefix_to_skip",
":",
"pos",
"=",
"id_name",
".",
"find",
"(",
"id_prefix_to_skip",
",",
"pos",
")",
"if",
"(",
"pos",
"!=",
"(",
"-",
"1",
")",
")",
":",
"pos",
"+=",
"len",
"(",
"id_prefix_to_skip",
")",
"pos",
"=",
"id_name",
".",
"find",
"(",
"'/'",
",",
"pos",
")",
"if",
"(",
"pos",
"!=",
"(",
"-",
"1",
")",
")",
":",
"return",
"id_name",
"[",
"(",
"pos",
"+",
"1",
")",
":",
"]",
"return",
"id_name"
] |
simplified an id to be more friendly for us people .
|
train
| true
|
10,567
|
@pytest.mark.parametrize(u'text,is_num', [(u'one', True), (u'ten', True), (u'teneleven', False)])
def test_issue759(en_tokenizer, text, is_num):
tokens = en_tokenizer(text)
assert (tokens[0].like_num == is_num)
|
[
"@",
"pytest",
".",
"mark",
".",
"parametrize",
"(",
"u'text,is_num'",
",",
"[",
"(",
"u'one'",
",",
"True",
")",
",",
"(",
"u'ten'",
",",
"True",
")",
",",
"(",
"u'teneleven'",
",",
"False",
")",
"]",
")",
"def",
"test_issue759",
"(",
"en_tokenizer",
",",
"text",
",",
"is_num",
")",
":",
"tokens",
"=",
"en_tokenizer",
"(",
"text",
")",
"assert",
"(",
"tokens",
"[",
"0",
"]",
".",
"like_num",
"==",
"is_num",
")"
] |
test that numbers are recognised correctly .
|
train
| false
|
10,568
|
def matrix_transpose(matrix):
return matrix.swapaxes((-2), (-1))
|
[
"def",
"matrix_transpose",
"(",
"matrix",
")",
":",
"return",
"matrix",
".",
"swapaxes",
"(",
"(",
"-",
"2",
")",
",",
"(",
"-",
"1",
")",
")"
] |
transpose a matrix or stack of matrices by swapping the last two axes .
|
train
| false
|
10,569
|
def _finish(status_int, response_data=None, content_type=u'text', headers=None):
assert isinstance(status_int, int)
response_msg = u''
if (headers is None):
headers = {}
if (response_data is not None):
headers[u'Content-Type'] = CONTENT_TYPES[content_type]
if (content_type == u'json'):
response_msg = json.dumps(response_data, for_json=True)
else:
response_msg = response_data
if ((status_int == 200) and (u'callback' in request.args) and (request.method == u'GET')):
callback = cgi.escape(request.args[u'callback'])
response_msg = _wrap_jsonp(callback, response_msg)
return make_response((response_msg, status_int, headers))
|
[
"def",
"_finish",
"(",
"status_int",
",",
"response_data",
"=",
"None",
",",
"content_type",
"=",
"u'text'",
",",
"headers",
"=",
"None",
")",
":",
"assert",
"isinstance",
"(",
"status_int",
",",
"int",
")",
"response_msg",
"=",
"u''",
"if",
"(",
"headers",
"is",
"None",
")",
":",
"headers",
"=",
"{",
"}",
"if",
"(",
"response_data",
"is",
"not",
"None",
")",
":",
"headers",
"[",
"u'Content-Type'",
"]",
"=",
"CONTENT_TYPES",
"[",
"content_type",
"]",
"if",
"(",
"content_type",
"==",
"u'json'",
")",
":",
"response_msg",
"=",
"json",
".",
"dumps",
"(",
"response_data",
",",
"for_json",
"=",
"True",
")",
"else",
":",
"response_msg",
"=",
"response_data",
"if",
"(",
"(",
"status_int",
"==",
"200",
")",
"and",
"(",
"u'callback'",
"in",
"request",
".",
"args",
")",
"and",
"(",
"request",
".",
"method",
"==",
"u'GET'",
")",
")",
":",
"callback",
"=",
"cgi",
".",
"escape",
"(",
"request",
".",
"args",
"[",
"u'callback'",
"]",
")",
"response_msg",
"=",
"_wrap_jsonp",
"(",
"callback",
",",
"response_msg",
")",
"return",
"make_response",
"(",
"(",
"response_msg",
",",
"status_int",
",",
"headers",
")",
")"
] |
when a controller method has completed .
|
train
| false
|
10,570
|
def _make_scorer(scoring):
from sklearn.metrics import make_scorer, get_scorer
if (scoring is None):
return None
elif isinstance(scoring, str):
return get_scorer(scoring)
else:
return make_scorer(scoring)
|
[
"def",
"_make_scorer",
"(",
"scoring",
")",
":",
"from",
"sklearn",
".",
"metrics",
"import",
"make_scorer",
",",
"get_scorer",
"if",
"(",
"scoring",
"is",
"None",
")",
":",
"return",
"None",
"elif",
"isinstance",
"(",
"scoring",
",",
"str",
")",
":",
"return",
"get_scorer",
"(",
"scoring",
")",
"else",
":",
"return",
"make_scorer",
"(",
"scoring",
")"
] |
make scorer .
|
train
| false
|
10,572
|
def test_compute_residual():
(t0, k0) = (0, np.array([5.0]))
results = _compute_fixed_length_solns(model, t0, k0)
for (integrator, numeric_solution) in results.items():
(N, T) = (1000, numeric_solution[:, 0][(-1)])
tmp_grid_pts = np.linspace(t0, T, N)
tmp_residual = model.compute_residual(numeric_solution, tmp_grid_pts, k=5)
expected_residual = np.zeros((N, 2))
actual_residual = tmp_residual
np.testing.assert_almost_equal(expected_residual, actual_residual)
|
[
"def",
"test_compute_residual",
"(",
")",
":",
"(",
"t0",
",",
"k0",
")",
"=",
"(",
"0",
",",
"np",
".",
"array",
"(",
"[",
"5.0",
"]",
")",
")",
"results",
"=",
"_compute_fixed_length_solns",
"(",
"model",
",",
"t0",
",",
"k0",
")",
"for",
"(",
"integrator",
",",
"numeric_solution",
")",
"in",
"results",
".",
"items",
"(",
")",
":",
"(",
"N",
",",
"T",
")",
"=",
"(",
"1000",
",",
"numeric_solution",
"[",
":",
",",
"0",
"]",
"[",
"(",
"-",
"1",
")",
"]",
")",
"tmp_grid_pts",
"=",
"np",
".",
"linspace",
"(",
"t0",
",",
"T",
",",
"N",
")",
"tmp_residual",
"=",
"model",
".",
"compute_residual",
"(",
"numeric_solution",
",",
"tmp_grid_pts",
",",
"k",
"=",
"5",
")",
"expected_residual",
"=",
"np",
".",
"zeros",
"(",
"(",
"N",
",",
"2",
")",
")",
"actual_residual",
"=",
"tmp_residual",
"np",
".",
"testing",
".",
"assert_almost_equal",
"(",
"expected_residual",
",",
"actual_residual",
")"
] |
testing computation of solution residual .
|
train
| false
|
10,574
|
def runReactorWithLogging(config, oldstdout, oldstderr, profiler=None, reactor=None):
if (reactor is None):
from twisted.internet import reactor
try:
if config['profile']:
if (profiler is not None):
profiler.run(reactor)
elif config['debug']:
sys.stdout = oldstdout
sys.stderr = oldstderr
if (runtime.platformType == 'posix'):
signal.signal(signal.SIGUSR2, (lambda *args: pdb.set_trace()))
signal.signal(signal.SIGINT, (lambda *args: pdb.set_trace()))
fixPdb()
pdb.runcall(reactor.run)
else:
reactor.run()
except:
if config['nodaemon']:
file = oldstdout
else:
file = open('TWISTD-CRASH.log', 'a')
traceback.print_exc(file=file)
file.flush()
|
[
"def",
"runReactorWithLogging",
"(",
"config",
",",
"oldstdout",
",",
"oldstderr",
",",
"profiler",
"=",
"None",
",",
"reactor",
"=",
"None",
")",
":",
"if",
"(",
"reactor",
"is",
"None",
")",
":",
"from",
"twisted",
".",
"internet",
"import",
"reactor",
"try",
":",
"if",
"config",
"[",
"'profile'",
"]",
":",
"if",
"(",
"profiler",
"is",
"not",
"None",
")",
":",
"profiler",
".",
"run",
"(",
"reactor",
")",
"elif",
"config",
"[",
"'debug'",
"]",
":",
"sys",
".",
"stdout",
"=",
"oldstdout",
"sys",
".",
"stderr",
"=",
"oldstderr",
"if",
"(",
"runtime",
".",
"platformType",
"==",
"'posix'",
")",
":",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGUSR2",
",",
"(",
"lambda",
"*",
"args",
":",
"pdb",
".",
"set_trace",
"(",
")",
")",
")",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGINT",
",",
"(",
"lambda",
"*",
"args",
":",
"pdb",
".",
"set_trace",
"(",
")",
")",
")",
"fixPdb",
"(",
")",
"pdb",
".",
"runcall",
"(",
"reactor",
".",
"run",
")",
"else",
":",
"reactor",
".",
"run",
"(",
")",
"except",
":",
"if",
"config",
"[",
"'nodaemon'",
"]",
":",
"file",
"=",
"oldstdout",
"else",
":",
"file",
"=",
"open",
"(",
"'TWISTD-CRASH.log'",
",",
"'a'",
")",
"traceback",
".",
"print_exc",
"(",
"file",
"=",
"file",
")",
"file",
".",
"flush",
"(",
")"
] |
start the reactor .
|
train
| false
|
10,576
|
def create_manual_course_enrollment(user, course_id, mode, enrolled_by, reason, state_transition):
enrollment_obj = CourseEnrollment.enroll(user, course_id, mode=mode)
ManualEnrollmentAudit.create_manual_enrollment_audit(enrolled_by, user.email, state_transition, reason, enrollment_obj)
log.info(u'user %s enrolled in the course %s', user.username, course_id)
return enrollment_obj
|
[
"def",
"create_manual_course_enrollment",
"(",
"user",
",",
"course_id",
",",
"mode",
",",
"enrolled_by",
",",
"reason",
",",
"state_transition",
")",
":",
"enrollment_obj",
"=",
"CourseEnrollment",
".",
"enroll",
"(",
"user",
",",
"course_id",
",",
"mode",
"=",
"mode",
")",
"ManualEnrollmentAudit",
".",
"create_manual_enrollment_audit",
"(",
"enrolled_by",
",",
"user",
".",
"email",
",",
"state_transition",
",",
"reason",
",",
"enrollment_obj",
")",
"log",
".",
"info",
"(",
"u'user %s enrolled in the course %s'",
",",
"user",
".",
"username",
",",
"course_id",
")",
"return",
"enrollment_obj"
] |
create course enrollment for the given student and create manual enrollment audit trail .
|
train
| false
|
10,577
|
@handle_response_format
@treeio_login_required
def milestone_edit(request, milestone_id, response_format='html'):
milestone = get_object_or_404(Milestone, pk=milestone_id)
project = milestone.project
if (not request.user.profile.has_permission(milestone, mode='w')):
return user_denied(request, message="You don't have access to this Milestone")
if request.POST:
if ('cancel' not in request.POST):
form = MilestoneForm(request.user.profile, None, request.POST, instance=milestone)
if form.is_valid():
milestone = form.save()
return HttpResponseRedirect(reverse('projects_milestone_view', args=[milestone.id]))
else:
return HttpResponseRedirect(reverse('projects_milestone_view', args=[milestone.id]))
else:
form = MilestoneForm(request.user.profile, None, instance=milestone)
context = _get_default_context(request)
context.update({'form': form, 'milestone': milestone, 'project': project})
return render_to_response('projects/milestone_edit', context, context_instance=RequestContext(request), response_format=response_format)
|
[
"@",
"handle_response_format",
"@",
"treeio_login_required",
"def",
"milestone_edit",
"(",
"request",
",",
"milestone_id",
",",
"response_format",
"=",
"'html'",
")",
":",
"milestone",
"=",
"get_object_or_404",
"(",
"Milestone",
",",
"pk",
"=",
"milestone_id",
")",
"project",
"=",
"milestone",
".",
"project",
"if",
"(",
"not",
"request",
".",
"user",
".",
"profile",
".",
"has_permission",
"(",
"milestone",
",",
"mode",
"=",
"'w'",
")",
")",
":",
"return",
"user_denied",
"(",
"request",
",",
"message",
"=",
"\"You don't have access to this Milestone\"",
")",
"if",
"request",
".",
"POST",
":",
"if",
"(",
"'cancel'",
"not",
"in",
"request",
".",
"POST",
")",
":",
"form",
"=",
"MilestoneForm",
"(",
"request",
".",
"user",
".",
"profile",
",",
"None",
",",
"request",
".",
"POST",
",",
"instance",
"=",
"milestone",
")",
"if",
"form",
".",
"is_valid",
"(",
")",
":",
"milestone",
"=",
"form",
".",
"save",
"(",
")",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"'projects_milestone_view'",
",",
"args",
"=",
"[",
"milestone",
".",
"id",
"]",
")",
")",
"else",
":",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"'projects_milestone_view'",
",",
"args",
"=",
"[",
"milestone",
".",
"id",
"]",
")",
")",
"else",
":",
"form",
"=",
"MilestoneForm",
"(",
"request",
".",
"user",
".",
"profile",
",",
"None",
",",
"instance",
"=",
"milestone",
")",
"context",
"=",
"_get_default_context",
"(",
"request",
")",
"context",
".",
"update",
"(",
"{",
"'form'",
":",
"form",
",",
"'milestone'",
":",
"milestone",
",",
"'project'",
":",
"project",
"}",
")",
"return",
"render_to_response",
"(",
"'projects/milestone_edit'",
",",
"context",
",",
"context_instance",
"=",
"RequestContext",
"(",
"request",
")",
",",
"response_format",
"=",
"response_format",
")"
] |
milestone edit page .
|
train
| false
|
10,578
|
def dump_graph(root_name, out_name='cg.dot', variable_style=None, function_style=None):
def trigger(trainer):
return (trainer.updater.iteration == 1)
if (variable_style is None):
variable_style = _var_style
if (function_style is None):
function_style = _func_style
@extension.make_extension(trigger=trigger)
def dump_graph(trainer):
var = trainer.observation[root_name]
if (not isinstance(var, variable.Variable)):
raise TypeError('root value is not a Variable')
cg = computational_graph.build_computational_graph([var], variable_style=variable_style, function_style=function_style).dump()
out_path = os.path.join(trainer.out, out_name)
with open(out_path, 'w') as f:
f.write(cg)
return dump_graph
|
[
"def",
"dump_graph",
"(",
"root_name",
",",
"out_name",
"=",
"'cg.dot'",
",",
"variable_style",
"=",
"None",
",",
"function_style",
"=",
"None",
")",
":",
"def",
"trigger",
"(",
"trainer",
")",
":",
"return",
"(",
"trainer",
".",
"updater",
".",
"iteration",
"==",
"1",
")",
"if",
"(",
"variable_style",
"is",
"None",
")",
":",
"variable_style",
"=",
"_var_style",
"if",
"(",
"function_style",
"is",
"None",
")",
":",
"function_style",
"=",
"_func_style",
"@",
"extension",
".",
"make_extension",
"(",
"trigger",
"=",
"trigger",
")",
"def",
"dump_graph",
"(",
"trainer",
")",
":",
"var",
"=",
"trainer",
".",
"observation",
"[",
"root_name",
"]",
"if",
"(",
"not",
"isinstance",
"(",
"var",
",",
"variable",
".",
"Variable",
")",
")",
":",
"raise",
"TypeError",
"(",
"'root value is not a Variable'",
")",
"cg",
"=",
"computational_graph",
".",
"build_computational_graph",
"(",
"[",
"var",
"]",
",",
"variable_style",
"=",
"variable_style",
",",
"function_style",
"=",
"function_style",
")",
".",
"dump",
"(",
")",
"out_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"trainer",
".",
"out",
",",
"out_name",
")",
"with",
"open",
"(",
"out_path",
",",
"'w'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"cg",
")",
"return",
"dump_graph"
] |
returns a trainer extension to dump a computational graph .
|
train
| false
|
10,579
|
def test_qt_log_ignore(qtbot, quteproc):
with qtbot.waitSignal(quteproc.got_error):
quteproc.send_cmd(':message-error "SpellCheck: test"')
|
[
"def",
"test_qt_log_ignore",
"(",
"qtbot",
",",
"quteproc",
")",
":",
"with",
"qtbot",
".",
"waitSignal",
"(",
"quteproc",
".",
"got_error",
")",
":",
"quteproc",
".",
"send_cmd",
"(",
"':message-error \"SpellCheck: test\"'",
")"
] |
make sure the test passes when logging a qt_log_ignore message .
|
train
| false
|
10,580
|
def _pshell(cmd, cwd=None, json_depth=2):
if (u'convertto-json' not in cmd.lower()):
cmd = u'{0} | ConvertTo-Json -Depth {1}'.format(cmd, json_depth)
log.debug(u'DSC: {0}'.format(cmd))
results = __salt__[u'cmd.run_all'](cmd, shell=u'powershell', cwd=cwd, python_shell=True)
if (u'pid' in results):
del results[u'pid']
if ((u'retcode' not in results) or (results[u'retcode'] != 0)):
raise CommandExecutionError(u'Issue executing powershell {0}'.format(cmd), info=results)
try:
ret = json.loads(results[u'stdout'], strict=False)
except ValueError:
raise CommandExecutionError(u'No JSON results from powershell', info=results)
return ret
|
[
"def",
"_pshell",
"(",
"cmd",
",",
"cwd",
"=",
"None",
",",
"json_depth",
"=",
"2",
")",
":",
"if",
"(",
"u'convertto-json'",
"not",
"in",
"cmd",
".",
"lower",
"(",
")",
")",
":",
"cmd",
"=",
"u'{0} | ConvertTo-Json -Depth {1}'",
".",
"format",
"(",
"cmd",
",",
"json_depth",
")",
"log",
".",
"debug",
"(",
"u'DSC: {0}'",
".",
"format",
"(",
"cmd",
")",
")",
"results",
"=",
"__salt__",
"[",
"u'cmd.run_all'",
"]",
"(",
"cmd",
",",
"shell",
"=",
"u'powershell'",
",",
"cwd",
"=",
"cwd",
",",
"python_shell",
"=",
"True",
")",
"if",
"(",
"u'pid'",
"in",
"results",
")",
":",
"del",
"results",
"[",
"u'pid'",
"]",
"if",
"(",
"(",
"u'retcode'",
"not",
"in",
"results",
")",
"or",
"(",
"results",
"[",
"u'retcode'",
"]",
"!=",
"0",
")",
")",
":",
"raise",
"CommandExecutionError",
"(",
"u'Issue executing powershell {0}'",
".",
"format",
"(",
"cmd",
")",
",",
"info",
"=",
"results",
")",
"try",
":",
"ret",
"=",
"json",
".",
"loads",
"(",
"results",
"[",
"u'stdout'",
"]",
",",
"strict",
"=",
"False",
")",
"except",
"ValueError",
":",
"raise",
"CommandExecutionError",
"(",
"u'No JSON results from powershell'",
",",
"info",
"=",
"results",
")",
"return",
"ret"
] |
execute the desired powershell command and ensure that it returns data in json format and load that into python .
|
train
| false
|
10,581
|
def abort(msg):
from fabric.state import output, env
if (not env.colorize_errors):
red = (lambda x: x)
else:
from colors import red
if output.aborts:
sys.stderr.write(red(('\nFatal error: %s\n' % _encode(msg, sys.stderr))))
sys.stderr.write(red('\nAborting.\n'))
if env.abort_exception:
raise env.abort_exception(msg)
else:
e = SystemExit(1)
e.message = msg
raise e
|
[
"def",
"abort",
"(",
"msg",
")",
":",
"from",
"fabric",
".",
"state",
"import",
"output",
",",
"env",
"if",
"(",
"not",
"env",
".",
"colorize_errors",
")",
":",
"red",
"=",
"(",
"lambda",
"x",
":",
"x",
")",
"else",
":",
"from",
"colors",
"import",
"red",
"if",
"output",
".",
"aborts",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"red",
"(",
"(",
"'\\nFatal error: %s\\n'",
"%",
"_encode",
"(",
"msg",
",",
"sys",
".",
"stderr",
")",
")",
")",
")",
"sys",
".",
"stderr",
".",
"write",
"(",
"red",
"(",
"'\\nAborting.\\n'",
")",
")",
"if",
"env",
".",
"abort_exception",
":",
"raise",
"env",
".",
"abort_exception",
"(",
"msg",
")",
"else",
":",
"e",
"=",
"SystemExit",
"(",
"1",
")",
"e",
".",
"message",
"=",
"msg",
"raise",
"e"
] |
raises an httpexception .
|
train
| false
|
10,582
|
def test_include_x_axis(Chart):
chart = Chart()
if ((Chart in (Pie, Treemap, Radar, Funnel, Dot, Gauge, Histogram, Box, SolidGauge)) or issubclass(Chart, BaseMap)):
return
if (not chart._dual):
data = (100, 200, 150)
else:
data = ((1, 100), (3, 200), (2, 150))
chart.add('_', data)
q = chart.render_pyquery()
yaxis = ('.axis.%s .guides text' % ('y' if (not getattr(chart, 'horizontal', False)) else 'x'))
if (not isinstance(chart, Bar)):
assert ('0' not in q(yaxis).map(texts))
else:
assert ('0' in q(yaxis).map(texts))
chart.include_x_axis = True
q = chart.render_pyquery()
assert ('0' in q(yaxis).map(texts))
|
[
"def",
"test_include_x_axis",
"(",
"Chart",
")",
":",
"chart",
"=",
"Chart",
"(",
")",
"if",
"(",
"(",
"Chart",
"in",
"(",
"Pie",
",",
"Treemap",
",",
"Radar",
",",
"Funnel",
",",
"Dot",
",",
"Gauge",
",",
"Histogram",
",",
"Box",
",",
"SolidGauge",
")",
")",
"or",
"issubclass",
"(",
"Chart",
",",
"BaseMap",
")",
")",
":",
"return",
"if",
"(",
"not",
"chart",
".",
"_dual",
")",
":",
"data",
"=",
"(",
"100",
",",
"200",
",",
"150",
")",
"else",
":",
"data",
"=",
"(",
"(",
"1",
",",
"100",
")",
",",
"(",
"3",
",",
"200",
")",
",",
"(",
"2",
",",
"150",
")",
")",
"chart",
".",
"add",
"(",
"'_'",
",",
"data",
")",
"q",
"=",
"chart",
".",
"render_pyquery",
"(",
")",
"yaxis",
"=",
"(",
"'.axis.%s .guides text'",
"%",
"(",
"'y'",
"if",
"(",
"not",
"getattr",
"(",
"chart",
",",
"'horizontal'",
",",
"False",
")",
")",
"else",
"'x'",
")",
")",
"if",
"(",
"not",
"isinstance",
"(",
"chart",
",",
"Bar",
")",
")",
":",
"assert",
"(",
"'0'",
"not",
"in",
"q",
"(",
"yaxis",
")",
".",
"map",
"(",
"texts",
")",
")",
"else",
":",
"assert",
"(",
"'0'",
"in",
"q",
"(",
"yaxis",
")",
".",
"map",
"(",
"texts",
")",
")",
"chart",
".",
"include_x_axis",
"=",
"True",
"q",
"=",
"chart",
".",
"render_pyquery",
"(",
")",
"assert",
"(",
"'0'",
"in",
"q",
"(",
"yaxis",
")",
".",
"map",
"(",
"texts",
")",
")"
] |
test x axis inclusion option .
|
train
| false
|
10,583
|
def to_asyncio_future(tornado_future):
tornado_future = convert_yielded(tornado_future)
af = asyncio.Future()
tornado.concurrent.chain_future(tornado_future, af)
return af
|
[
"def",
"to_asyncio_future",
"(",
"tornado_future",
")",
":",
"tornado_future",
"=",
"convert_yielded",
"(",
"tornado_future",
")",
"af",
"=",
"asyncio",
".",
"Future",
"(",
")",
"tornado",
".",
"concurrent",
".",
"chain_future",
"(",
"tornado_future",
",",
"af",
")",
"return",
"af"
] |
convert a tornado .
|
train
| false
|
10,586
|
def stonith_show(stonith_id, extra_args=None, cibfile=None):
return item_show(item='stonith', item_id=stonith_id, extra_args=extra_args, cibfile=cibfile)
|
[
"def",
"stonith_show",
"(",
"stonith_id",
",",
"extra_args",
"=",
"None",
",",
"cibfile",
"=",
"None",
")",
":",
"return",
"item_show",
"(",
"item",
"=",
"'stonith'",
",",
"item_id",
"=",
"stonith_id",
",",
"extra_args",
"=",
"extra_args",
",",
"cibfile",
"=",
"cibfile",
")"
] |
show the value of a cluster stonith stonith_id name for the stonith resource extra_args additional options for the pcs stonith command cibfile use cibfile instead of the live cib cli example: .
|
train
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.